标签:spool text requests end exe OLE == lex 自动
‘‘‘ from concurrent.futures import ThreadPoolExecutor,ProcessPoolExecutor import time,random,os def task(name,n): print(‘%s%s is running‘ %(name,os.getpid())) time.sleep(random.randint(1,3)) return n**2 if __name__ == ‘__main__‘: # print(os.cpu_count()) p=ProcessPoolExecutor(4) #提交任务的两种方式: # 同步调用:提交完一个任务之后,就在原地等待,等待任务完完整整地运行完毕拿到结果后,再执行下一行代码,会导致任务是串行执行的 # 异步调用:提交完一个任务之后,不在原地等待,结果???,而是直接执行下一行代码,会导致任务是并发执行的 l=[] for i in range(10): # 同步提交 # res=p.submit(task,‘进程pid: ‘,i).result() # print(res) # 异步提交 future=p.submit(task,‘进程pid: ‘,i) l.append(future) p.shutdown(wait=True) #关闭进程池的入口,并且在原地等待进程池内所有任务运行完毕 for future in l: print(future.result()) print(‘主‘) ‘‘‘ ‘‘‘ from concurrent.futures import ThreadPoolExecutor,ProcessPoolExecutor import time,random,os import requests def get(url): print(‘%s GET %s‘ %(os.getpid(),url)) time.sleep(3) response=requests.get(url) if response.status_code == 200: res=response.text else: res=‘下载失败‘ parse(res) def parse(res): time.sleep(1) print(‘%s 解析结果为%s‘ %(os.getpid(),len(res))) if __name__ == ‘__main__‘: urls=[ ‘https://www.baidu.com‘, ‘https://www.sina.com.cn‘, ‘https://www.tmall.com‘, ‘https://www.jd.com‘, ‘https://www.python.org‘, ‘https://www.openstack.org‘, ‘https://www.baidu.com‘, ‘https://www.baidu.com‘, ‘https://www.baidu.com‘, ] p=ProcessPoolExecutor(9) l=[] start=time.time() for url in urls: future=p.submit(get,url) l.append(future) p.shutdown(wait=True) print(‘主‘,time.time()-start) ‘‘‘ ‘‘‘ from concurrent.futures import ThreadPoolExecutor,ProcessPoolExecutor import time,random,os import requests def get(url): print(‘%s GET %s‘ %(os.getpid(),url)) time.sleep(3) response=requests.get(url) if response.status_code == 200: res=response.text else: res=‘下载失败‘ return res def parse(future): time.sleep(1) res=future.result() print(‘%s 解析结果为%s‘ %(os.getpid(),len(res))) if __name__ == ‘__main__‘: urls=[ ‘https://www.baidu.com‘, ‘https://www.sina.com.cn‘, ‘https://www.tmall.com‘, ‘https://www.jd.com‘, ‘https://www.python.org‘, ‘https://www.openstack.org‘, ‘https://www.baidu.com‘, ‘https://www.baidu.com‘, ‘https://www.baidu.com‘, ] p=ProcessPoolExecutor(9) start=time.time() for url in urls: future=p.submit(get,url) # 异步调用:提交完一个任务之后,不在原地等待,而是直接执行下一行代码,会导致任务是并发执行的,,结果futrue对象会在任务运行完毕后自动传给回调函数 future.add_done_callback(parse) #parse会在任务运行完毕后自动触发,然后接收一个参数future对象 p.shutdown(wait=True) print(‘主‘,time.time()-start) print(‘主‘,os.getpid()) ‘‘‘ from concurrent.futures import ThreadPoolExecutor,ProcessPoolExecutor from threading import current_thread import time,random,os import requests def get(url): print(‘%s GET %s‘ %(current_thread().name,url)) time.sleep(3) response=requests.get(url) if response.status_code == 200: res=response.text else: res=‘下载失败‘ return res def parse(future): time.sleep(1) res=future.result() print(‘%s 解析结果为%s‘ %(current_thread().name,len(res))) if __name__ == ‘__main__‘: urls=[ ‘https://www.baidu.com‘, ‘https://www.sina.com.cn‘, ‘https://www.tmall.com‘, ‘https://www.jd.com‘, ‘https://www.python.org‘, ‘https://www.openstack.org‘, ‘https://www.baidu.com‘, ‘https://www.baidu.com‘, ‘https://www.baidu.com‘, ] p=ThreadPoolExecutor(4) start=time.time() for url in urls: future=p.submit(get,url) future.add_done_callback(parse) p.shutdown(wait=True) print(‘主‘,current_thread().name,time.time()-start)
标签:spool text requests end exe OLE == lex 自动
原文地址:https://www.cnblogs.com/xuqidong/p/13592773.html