The below code seems to have some issues. The aim would be to append each result of new_df() to some list, e.g. out
.
import pandas as pd
import random
import time
from multiprocessing import Pool
def new_df(rows=10000): # proxy for complex dataframe
temp = pd.DataFrame({'a': [''.join(chr(random.randint(65,122)) for _ in range(200))
for _ in range(rows)]})
temp['b'] = temp['a'].str.lower()
temp['c'] = temp['a'].str.upper()
return temp
pool = Pool(4)
start = time.time()
out = pool.map(new_df, [9999,10000,10001,10002])
print(f"{time.time() - now} sec")
Issues - VisualStudioCode
raise RuntimeError('''
RuntimeError:
An attempt has been made to start a new process before the
current process has finished its bootstrapping phase.
This probably means that you are not using fork to start your
child processes and you have forgotten to use the proper idiom
in the main module:
if __name__ == '__main__':
freeze_support()
...
The "freeze_support()" line can be omitted if the program
is not going to be frozen to produce an executable.
Traceback
Traceback (most recent call last):
File "<string>", line 1, in <module>
File "C:\Mambaforge\lib\multiprocessing\spawn.py", line 116, in spawn_main
exitcode = _main(fd, parent_sentinel)
File "C:\Mambaforge\lib\multiprocessing\spawn.py", line 125, in _main
prepare(preparation_data)
File "C:\Mambaforge\lib\multiprocessing\spawn.py", line 236, in prepare
_fixup_main_from_path(data['init_main_from_path'])
File "C:\Mambaforge\lib\multiprocessing\spawn.py", line 287, in _fixup_main_from_path
main_content = runpy.run_path(main_path,
File "C:\Mambaforge\lib\runpy.py", line 268, in run_path
return _run_module_code(code, init_globals, run_name,
File "C:\Mambaforge\lib\runpy.py", line 97, in _run_module_code
_run_code(code, mod_globals, init_globals,
File "C:\Mambaforge\lib\runpy.py", line 87, in _run_code
exec(code, run_globals)
File "c:\Users\XXX\untitled0.py", line 13, in <module>
pool = Pool(4)
File "C:\Mambaforge\lib\multiprocessing\context.py", line 119, in Pool
return Pool(processes, initializer, initargs, maxtasksperchild,
File "C:\Mambaforge\lib\multiprocessing\pool.py", line 212, in __init__
self._repopulate_pool()
File "C:\Mambaforge\lib\multiprocessing\pool.py", line 303, in _repopulate_pool
return self._repopulate_pool_static(self._ctx, self.Process,
File "C:\Mambaforge\lib\multiprocessing\pool.py", line 326, in _repopulate_pool_static
w.start()
File "C:\Mambaforge\lib\multiprocessing\process.py", line 121, in start
self._popen = self._Popen(self)
File "C:\Mambaforge\lib\multiprocessing\context.py", line 327, in _Popen
return Popen(process_obj)
File "C:\Mambaforge\lib\multiprocessing\popen_spawn_win32.py", line 45, in __init__
prep_data = spawn.get_preparation_data(process_obj._name)
File "C:\Mambaforge\lib\multiprocessing\spawn.py", line 154, in get_preparation_data
_check_not_importing_main()
File "C:\Mambaforge\lib\multiprocessing\spawn.py", line 134, in _check_not_importing_main
CodePudding user response:
Code reconstructed to utilise the main module idiom:
import pandas as pd
import random
import time
from multiprocessing import Pool
def new_df(rows=10000):
temp = pd.DataFrame({'a': [''.join(chr(random.randint(65,122)) for _ in range(200))
for _ in range(rows)]})
temp['b'] = temp['a'].str.lower()
temp['c'] = temp['a'].str.upper()
return temp
def main():
start = time.perf_counter()
with Pool(4) as pool:
pool.map(new_df, [9999, 10000, 10001, 10002])
print(f"{time.perf_counter() - start:.2f}s")
if __name__ == '__main__':
main()
Output:
1.24s