进程中的锁以及进程池
1. 进程
2. 数据共享
3. 锁
4. 进程池
5. 模块(爬虫)
- requests
- bs4(beautifulsoup)
6. 协程
# 1.类的特殊方法的补充
# (1)对象名.xxx 执行类中的__getattr__方法
# (2)对象名.xxx=xxx 执行类中的__setattr__方法
# class Foo(object):
# info={}
# def __init__(self,name):
# self.name=name #初始化方法的本质是执行的object类中的__steattr__方法
# def __setattr__(self, key, value):
# self.info[key]=value
# def __getattr__(self, item):
# return self.info[item]
# obj=Foo('lisa')
# obj.age=18
# print(obj.info)
# print(obj.age)
# 一.进程
# 1.进程间数据不共享
# import multiprocessing
# data_list=[]
# def task(arg):
# data_list.append(arg)
# print(data_list)
# def run():
# for i in range(1,11):
# p=multiprocessing.Process(target=task,args=(i,))
# p.start()
# if __name__ == '__main__':
# run()
# 2.常用功能:
# 主进程默认等子进程执行完毕
# import time
# import multiprocessing
# data_list=[]
# def task(arg):
#
# data_list.append(arg)
# time.sleep(2)
# print(data_list)
# def run():
# for i in range(1,11):
# p=multiprocessing.Process(target=task,args=(i,))
# p.start()
# p.join()
# if __name__ == '__main__':
# run()
# (1)daemon,主进程不再等子进程执行结束
# import time
# import multiprocessing
# data_list = []
# def task(arg):
# data_list.append(arg)
# time.sleep(2)
# print(data_list)
# def run():
# for i in range(1,11):
# p = multiprocessing.Process(target=task, args=(i,))
# p.daemon=True #此处进程与线程的写法不再相同,不是括号中写布尔值了
# p.start()
# if __name__ == '__main__':
# run()
# (2)join没有参数,主进程默认等子进程执行完毕再往下走
# import time
# import multiprocessing
# data_list = []
# def task(arg):
# data_list.append(arg)
# time.sleep(2)
# print(data_list)
# def run():
# for i in range(1,11):
# p = multiprocessing.Process(target=task, args=(i,))
# p.start()
# p.join()
# print('执行完了')
# if __name__ == '__main__':
# run()
# 运行结果:
#[1]
# 执行完了
# [2]
# 执行完了
# [3]
# 执行完了
# [4]
# 执行完了
# [5]
# 执行完了
# [6]
# 执行完了
# [7]
# 执行完了
# [8]
# 执行完了
# [9]
# 执行完了
# [10]
# 执行完了
#
# (3)join有参数,让主进程在这里最多等待几秒,无论是否执行完都会继续往下走
# import time
# import multiprocessing
# data_list = []
# def task(arg):
# data_list.append(arg)
# time.sleep(2)
# print(data_list)
# def run():
# for i in range(1,11):
# p = multiprocessing.Process(target=task, args=(i,))
# p.start()
# p.join(1)
# print('执行完了')
# if __name__ == '__main__':
# run()
# 运行结果:
# 执行完了
# 执行完了
# [1]
# 执行完了
# [2]
# 执行完了
# [3]
# 执行完了
# [4]
# 执行完了
# [5]
# 执行完了
# [6]
# 执行完了
# [7]
# 执行完了
# [8]
# 执行完了
# [9]
# [10]
# (4)进程名称:与线程不同的是不再是setname,getname,直接name
# import time
# import multiprocessing
# data_list = []
# def task(arg):
# data_list.append(arg)
# p=multiprocessing.current_process()
# name=p.name
# time.sleep(2)
# print(name,data_list)
# def run():
# p = multiprocessing.Process(target=task, args=(1,))
# p.name='去玩儿'
# p.start()
# print('执行完了')
# if __name__ == '__main__':
# run()
# 3.与线程相同的是除了正常import multiprocessing写线程,还可以用类继承的方法创建进程
# import multiprocessing
# class MyProcess(multiprocessing.Process):
# def run(self):
# print('当前线程名称%s' % multiprocessing.current_process())
# def task():
# p1=MyProcess()
# p1.start()
# p2 = MyProcess()
# p2.start()
# if __name__ == '__main__':
# task()
#二.进程间数据共享
# Queue
# linux:
# import multiprocessing
# q=multiprocessing.Queue()
# def task(arg):
# q.put(arg)
# def run():
# for i in range(10):
# p=multiprocessing.Process(target=task,args=(i,))
# p.start()
# while 1:
# print(q.get())
# run()
# windows:
# import multiprocessing
# def task(arg,q):
# q.put(arg)
# if __name__ == '__main__':
# q = multiprocessing.Queue()
# for i in range(10):
# p=multiprocessing.Process(target=task,args=(i,q))
# p.start()
# while 1:
# print(q.get())
#1.Manager
# Linux
# import multiprocessing
# m=multiprocessing.Manager()
# dic=m.dict() #一个特殊的字典,是所有进程共享的
# def task(arg):
# dic[arg]=100
# def run():
# for i in range(10):
# p=multiprocessing.Process(target=task,args=(i,))
# p.start()
# while 1:
# print(dic.items())
# run()
# Windows
# import time
# import multiprocessing
# def task(arg,dic):
# dic[arg]=arg
# if __name__ == '__main__':
# m = multiprocessing.Manager()
# dic = m.dict()#会写到一个文件中
# def run():
# for i in range(10):
# p=multiprocessing.Process(target=task,args=(i,dic))
# p.start()
# run()
# time.sleep(10) #如果不加这句,有可能主进程结束了,子进程还没有结束,主进程结束了,文件就删除了,子进程就无法找到文件,所以会报错
# print(dic)
#
#三,进程锁(进程间存在数据共享的时候才有必要加锁)
# import time
# def task(arg,lock):
# lock.acquire()
# time.sleep(1)
# print(arg)
# lock.release()
# if __name__ == '__main__':
# import multiprocessing
# lock = multiprocessing.RLock()
# def run():
# for i in range(1, 11):
# p = multiprocessing.Process(target=task, args=(i,lock,))
# p.start()
# run()
#四.进程池
# import time
# from concurrent.futures import ThreadPoolExecutor,ProcessPoolExecutor
# def task(arg):
# time.sleep(1)
# print(arg)
# if __name__ == '__main__':
# pool=ProcessPoolExecutor(5)
# for i in range(16):
# pool.submit(task,i)
#五.
#
#
#
#
#
#
#
#
#
#
#
#
#
#
#
#
#
#
#
#
#
#
#
#
#
#
#
#
#
# #
改变世界,改变自己!