Answer the question
In order to leave comments, you need to log in
Why doesn't LoadLibrary work in Worker function when using multiprocessing?
I'm trying to optimize the program's running time by using the DLL in the mpCalcDllWorker function.
DLL returns some kind of garbage. In single-threaded mode, the DLL runs flawlessly.
Here is a simplified example.
def mpCalcDllWorker(nodes, queue, arrD):
dll_fn = "***"
lib = ctypes.cdll.LoadLibrary(dll_fn)
print("lib:", lib)
D = np.reshape( np.frombuffer(arrD), 1000000 )
while True:
job = queue.get()
if job == None:
break
start = job[0]
stop = job[1]
lib.dll_add_ts.restype = ctypes.c_double
ret = lib.dll_add_ts( nodes.ctypes.data_as(ctypes.c_void_p), ctypes.c_int(start), ctypes.c_int(stop) )
print("start", start, "stop", stop, ret)
print("nodes", nodes)
D[start] = ret
queue.task_done()
queue.task_done()
def mpCalcWorker(nodes, queue, arrD):
""" Worker process for the multiprocessing calculations
"""
D = np.reshape( np.frombuffer(arrD), 1000000 )
while True:
job = queue.get()
if job == None:
break
start = job[0]
stop = job[1]
D[start] = sum(nodes[start:stop])
queue.task_done()
queue.task_done()
def mpCalcSums(arr, res, tp=0):
# allocate shared array
arrD = mp.RawArray(ctypes.c_double, 1000000)
# setup jobs
nCPU = mp.cpu_count()
jobs = []
for i in range(100000):
jobs.append((i, i+10000))
queue = mp.JoinableQueue()
for job in jobs:
queue.put(job)
for i in range(nCPU):
queue.put(None)
# run workers
workers = []
for i in range(nCPU):
if tp==0:
worker = mp.Process(target=mpCalcWorker, args=(arr, queue, arrD))
if tp==1:
worker = mp.Process(target=mpCalcDllWorker, args=(arr, queue, arrD))
workers.append(worker)
#worker.daemon=True - не влияет на ошибку
worker.start()
queue.join()
# make array from shared memory
D = np.reshape(np.frombuffer(arrD), res.shape)
return D
Answer the question
In order to leave comments, you need to log in
Didn't find what you were looking for?
Ask your questionAsk a Question
731 491 924 answers to any question