1.同步执行--------------
from concurrent.futures import ProcessPoolExecutor,ThreadPoolExecutor
import os,time,random
def task(n):
print('[%s] is running'%os.getpid())
time.sleep(random.randint(1,3)) #I/O密集型的,,一般用线程,用了进程耗时长
return n**2
if __name__ == '__main__':
start = time.time()
p = ProcessPoolExecutor()
for i in range(10): #现在是开了10个任务, 那么如果是上百个任务呢,就不能无线的开进程,那么就得考虑控制
#线程数了,那么就得考虑到池了
obj = p.submit(task,i).result() #相当于apply同步方法
p.shutdown() #相当于close和join方法
print('='*30)
print(time.time() - start) #17.36499309539795
2.异步执行-----------
from concurrent.futures import ProcessPoolExecutor,ThreadPoolExecutor
import os,time,random
def task(n):
print('[%s] is running'%os.getpid())
time.sleep(random.randint(1,3)) #I/O密集型的,,一般用线程,用了进程耗时长
return n**2
if __name__ == '__main__':
start = time.time()
p = ProcessPoolExecutor()
l = []
for i in range(10): #现在是开了10个任务, 那么如果是上百个任务呢,就不能无线的开进程,那么就得考虑控制
# 线程数了,那么就得考虑到池了
obj = p.submit(task,i) #相当于apply_async()异步方法
l.append(obj)
p.shutdown() #相当于close和join方法
print('='*30)
print([obj.result() for obj in l])
print(time.time() - start)
线程池的异步
from concurrent.futures import ProcessPoolExecutor,ThreadPoolExecutor
from threading import currentThread
import os,time,random
def task(n):
print('%s:%s is running'%(currentThread().getName(),os.getpid())) #看到的pid都是一样的,因为线程是共享了一个进程
time.sleep(random.randint(1,3)) #I/O密集型的,,一般用线程,用了进程耗时长
return n**2
if __name__ == '__main__':
start = time.time()
p = ThreadPoolExecutor() #线程池 #如果不给定值,默认cup*5
l = []
for i in range(10): #10个任务 # 线程池效率高了
obj = p.submit(task,i) #相当于apply_async异步方法
l.append(obj)
p.shutdown() #默认有个参数wite=True (相当于close和join)
print('='*30)
print([obj.result() for obj in l])
print(time.time() - start) #3.001171827316284