|
我查到的原因是因为导入的Queue是queue.Queue所以我便改成了from multiprocessing import Queue,Process
但是依旧报错,因为英语水平有限,无法看懂overflow上的全部回答,所以只能进入论坛问问各位前辈,望帮助
- if __name__ == "__main__":
-
- queue1 = Queue()
- queue2 = Queue()
- threadList1 = []
- threadList2 = []
- ipList1 = []
- ipList2 = []
- lockList1 = []
- lockList2 = []
- for i in range(0,10):
- lock1 = threading.Lock()
- lockList1.append(lock1)
- lock2 = threading.Lock()
- lockList2.append(lock2)
- for i in range(1,10):
- lockList1[i].acquire()
- lockList2[i].acquire()
-
-
- p1 = Process(target = startrun, args = (queue1, threadList1, lockList1, ipList1, 0, 200))
- p2 = Process(target = startrun, args = (queue2, threadList2, lockList2, ipList2, 200, 450))
- p1.start()
- p2.start()
- p1.join()
- p2.join()
复制代码- TypeError Traceback (most recent call last)
- <ipython-input-13-07db18c7f19b> in <module>()
- 218 p1 = Process(target = startrun, args = (queue1, threadList1, lockList1, ipList1, 0, 200))
- 219 p2 = Process(target = startrun, args = (queue2, threadList2, lockList2, ipList2, 200, 450))
- --> 220 p1.start()
- 221 p2.start()
- 222 p1.join()
- ~\Anaconda3\lib\multiprocessing\process.py in start(self)
- 103 'daemonic processes are not allowed to have children'
- 104 _cleanup()
- --> 105 self._popen = self._Popen(self)
- 106 self._sentinel = self._popen.sentinel
- 107 # Avoid a refcycle if the target function holds an indirect
- ~\Anaconda3\lib\multiprocessing\context.py in _Popen(process_obj)
- 221 @staticmethod
- 222 def _Popen(process_obj):
- --> 223 return _default_context.get_context().Process._Popen(process_obj)
- 224
- 225 class DefaultContext(BaseContext):
- ~\Anaconda3\lib\multiprocessing\context.py in _Popen(process_obj)
- 320 def _Popen(process_obj):
- 321 from .popen_spawn_win32 import Popen
- --> 322 return Popen(process_obj)
- 323
- 324 class SpawnContext(BaseContext):
- ~\Anaconda3\lib\multiprocessing\popen_spawn_win32.py in __init__(self, process_obj)
- 63 try:
- 64 reduction.dump(prep_data, to_child)
- ---> 65 reduction.dump(process_obj, to_child)
- 66 finally:
- 67 set_spawning_popen(None)
- ~\Anaconda3\lib\multiprocessing\reduction.py in dump(obj, file, protocol)
- 58 def dump(obj, file, protocol=None):
- 59 '''Replacement for pickle.dump() using ForkingPickler.'''
- ---> 60 ForkingPickler(file, protocol).dump(obj)
- 61
- 62 #
- TypeError: can't pickle _thread.lock objects
复制代码
而我运用进程的原因是因为之前写好的爬虫爬取豆瓣电影是速度过慢才让我萌生尝试同时运行俩个爬虫的想法,
(之前写的爬虫过慢的原因是因为我不懂得如何有效率的使用付费的ip代理,因为我只学了半个月,所以只能将ip放入list中,用添加删除的方式取得有效ip)
|
|