Python同时读取多个文件

最近使用Python多线程进行异步读取文件,读取30多个文件消耗了大概90s时间,相比于for循环顺序读取速度还慢了一大截,所以写了下面的测试程序,

为什么多线程会慢这么多,大概是因为多线程GIL的缘故,至于方法可以使用多进程或者异步IO进行读取,目前只看了多进程。

print(file_lists)

['2.txt',
'3.txt',
'5.txt',
'4.txt',
'9.txt',
'7.txt',
'6.txt',
'8.txt',
'1.txt',
'10.txt']

Sequential

for file in  file_lists:
    read_file(os.path.join('data',file))

executed in 184ms

Multi-Threading

import operator
import csv
import time
import os
import threading
from time import ctime
def read_file(file_dir):
    f = open(file_dir, 'r')
    #print(f.readlines())
    txt = f.readlines()
# def read_docx(file_dir):
threads = []
file_lists = os.listdir('data')
for file in  file_lists:
    t= threading.Thread(target=read_file,args=(os.path.join('data',file),))
    threads.append(t)
for thr in threads:
    thr.start()
for the in threads:    
    thr.join()
print("all over %s"%ctime())

executed in 360ms

Multi-Process

from multiprocessing import Process
processes = []
for file in file_lists:
    processes.append(Process(target=read_file,args=(os.path.join('data',file),)))