Skip to main content
deleted 416 characters in body
Source Link
j. doe
  • 173
  • 3
  • 13

More information about the scans object:

print(type(scans))
#<class 'xnat.core.XNATListing'>

print (scans)
#<XNATListing {(ID_7a037c2d39, unknown): <CtScanData unknown (ID_7a037c2d39)>}>

print (sc)
#ID_7a037c2d39

print(scans[sc])
#CtScanData unknown (ID_7a037c2d39)> /data/projects/test/subjects/XNAT_S00004/experiments/XNAT_E00004/scans/ID_7a037c2d39/resources/5/files/ID_0a0eebac2.dcm

The error I am getting with 'After multiprocessing' code:

More information about the scans object:

print(type(scans))
#<class 'xnat.core.XNATListing'>

print (scans)
#<XNATListing {(ID_7a037c2d39, unknown): <CtScanData unknown (ID_7a037c2d39)>}>

print (sc)
#ID_7a037c2d39

print(scans[sc])
#CtScanData unknown (ID_7a037c2d39)> /data/projects/test/subjects/XNAT_S00004/experiments/XNAT_E00004/scans/ID_7a037c2d39/resources/5/files/ID_0a0eebac2.dcm

The error I am getting with 'After multiprocessing' code:

The error I am getting with 'After multiprocessing' code:

deleted 13 characters in body
Source Link
j. doe
  • 173
  • 3
  • 13
def process(the_scanx):
    my_file = the_scanx.resources['DICOM'].files[0] 

def another_method():
    ...                
    pool = Pool(os.cpu_count())
    pool.map(process, [scans[sc] for sc in scans])

another_method()  

More information about the scanscans object:

def process(the_scan):
    my_file = the_scan.resources['DICOM'].files[0] 

def another_method():
    ...                
    pool = Pool(os.cpu_count())
    pool.map(process, [scans[sc] for sc in scans])

another_method()  

More information about the scan object:

def process(x):
    my_file = x.resources['DICOM'].files[0] 

def another_method():
    ...                
    pool = Pool(os.cpu_count())
    pool.map(process, [scans[sc] for sc in scans])

another_method()  

More information about the scans object:

deleted 105 characters in body; edited title
Source Link
j. doe
  • 173
  • 3
  • 13

how to access the input from pool.map() and use it in thefix 'TypeError: can't pickle module objects' during multiprocessing function?

How can I accessam trying to implement multiprocessing, but I am having difficulties accessing information from the 'data_inputs' object withinscans that I'm passing through the process_scanpool.map() method without declaring it as a global variable?function

What I want my code to look likeBefore multiprocessing (this works perfectly):

deffor process_scan(sc) in scans:
 
    #how tomy_file access= data_inputs?scans[sc].resources['DICOM'].files[0]

After multiprocessing (does not work, error shown below):

def process(the_scan):
    my_file = data_inputs[sc]the_scan.resources['DICOM'].files[0] 

def another_method():
    ...                
    pool = Pool(os.cpu_count())
    pool.map(process_scanprocess, data_inputs[scans[sc] for sc in scans])

another_method()  

What it currently looks llikeMore information about the scan object:

forprint(type(scans))
#<class sc'xnat.core.XNATListing'>

print in(scans)
#<XNATListing data_inputs{(ID_7a037c2d39, unknown):
  <CtScanData unknown (ID_7a037c2d39)>}>

print my_file(sc)
#ID_7a037c2d39

print(scans[sc])
#CtScanData =unknown data_inputs[sc].resources['DICOM'](ID_7a037c2d39)> /data/projects/test/subjects/XNAT_S00004/experiments/XNAT_E00004/scans/ID_7a037c2d39/resources/5/files/ID_0a0eebac2.files[0]dcm

 

WhyThe error I need data_inputs[sc]:

print(sc)

ID_7a037c2d39

print(data_inputs[sc])

CtScanData unknown (ID_7a037c2d39)> /data/projects/test/subjects/XNAT_S00004/experiments/XNAT_E00004/scans/ID_7a037c2d39/resources/5/files/ID_0a0eebac2.dcm

print(type(data_inputs))

<class 'xnat.core.XNATListing'>

print(data_inputs)

<XNATListing {(ID_7a037c2d39, unknown): <CtScanData unknown (ID_7a037c2d39)>}>


Error from the revisionam getting with 'After multiprocessing' code:

---> 24         pool.map(process_scanprocess, [data_inputs[sc][scans[sc] for sc in data_inputs]scans])

~/opt/anaconda3/lib/python3.7/multiprocessing/pool.py in map(self, func, iterable, chunksize)
    266         in a list that is returned.
    267         '''
--> 268         return self._map_async(func, iterable, mapstar, chunksize).get()
    269 
    270     def starmap(self, func, iterable, chunksize=None):

~/opt/anaconda3/lib/python3.7/multiprocessing/pool.py in get(self, timeout)
    655             return self._value
    656         else:
--> 657             raise self._value
    658 
    659     def _set(self, i, obj):

~/opt/anaconda3/lib/python3.7/multiprocessing/pool.py in _handle_tasks(taskqueue, put, outqueue, pool, cache)
    429                         break
    430                     try:
--> 431                         put(task)
    432                     except Exception as e:
    433                         job, idx = task[:2]

~/opt/anaconda3/lib/python3.7/multiprocessing/connection.py in send(self, obj)
    204         self._check_closed()
    205         self._check_writable()
--> 206         self._send_bytes(_ForkingPickler.dumps(obj))
    207 
    208     def recv_bytes(self, maxlength=None):

~/opt/anaconda3/lib/python3.7/multiprocessing/reduction.py in dumps(cls, obj, protocol)
     49     def dumps(cls, obj, protocol=None):
     50         buf = io.BytesIO()
---> 51         cls(buf, protocol).dump(obj)
     52         return buf.getbuffer()
     53 

TypeError: can't pickle module objects

how to access the input from pool.map() and use it in the multiprocessing function?

How can I access the 'data_inputs' object within the process_scan() method without declaring it as a global variable?

What I want my code to look like:

def process_scan(sc):
 
    #how to access data_inputs?

    my_file = data_inputs[sc].resources['DICOM'].files[0] 

def another_method():
    ...                
    pool = Pool(os.cpu_count())
    pool.map(process_scan, data_inputs)

another_method()  

What it currently looks llike:

for sc in data_inputs:
     my_file = data_inputs[sc].resources['DICOM'].files[0]

 

Why I need data_inputs[sc]:

print(sc)

ID_7a037c2d39

print(data_inputs[sc])

CtScanData unknown (ID_7a037c2d39)> /data/projects/test/subjects/XNAT_S00004/experiments/XNAT_E00004/scans/ID_7a037c2d39/resources/5/files/ID_0a0eebac2.dcm

print(type(data_inputs))

<class 'xnat.core.XNATListing'>

print(data_inputs)

<XNATListing {(ID_7a037c2d39, unknown): <CtScanData unknown (ID_7a037c2d39)>}>


Error from the revision:

---> 24         pool.map(process_scan, [data_inputs[sc] for sc in data_inputs])

~/opt/anaconda3/lib/python3.7/multiprocessing/pool.py in map(self, func, iterable, chunksize)
    266         in a list that is returned.
    267         '''
--> 268         return self._map_async(func, iterable, mapstar, chunksize).get()
    269 
    270     def starmap(self, func, iterable, chunksize=None):

~/opt/anaconda3/lib/python3.7/multiprocessing/pool.py in get(self, timeout)
    655             return self._value
    656         else:
--> 657             raise self._value
    658 
    659     def _set(self, i, obj):

~/opt/anaconda3/lib/python3.7/multiprocessing/pool.py in _handle_tasks(taskqueue, put, outqueue, pool, cache)
    429                         break
    430                     try:
--> 431                         put(task)
    432                     except Exception as e:
    433                         job, idx = task[:2]

~/opt/anaconda3/lib/python3.7/multiprocessing/connection.py in send(self, obj)
    204         self._check_closed()
    205         self._check_writable()
--> 206         self._send_bytes(_ForkingPickler.dumps(obj))
    207 
    208     def recv_bytes(self, maxlength=None):

~/opt/anaconda3/lib/python3.7/multiprocessing/reduction.py in dumps(cls, obj, protocol)
     49     def dumps(cls, obj, protocol=None):
     50         buf = io.BytesIO()
---> 51         cls(buf, protocol).dump(obj)
     52         return buf.getbuffer()
     53 

TypeError: can't pickle module objects

how to fix 'TypeError: can't pickle module objects' during multiprocessing?

I am trying to implement multiprocessing, but I am having difficulties accessing information from the object scans that I'm passing through the pool.map() function

Before multiprocessing (this works perfectly):

for sc in scans:
    my_file = scans[sc].resources['DICOM'].files[0]

After multiprocessing (does not work, error shown below):

def process(the_scan):
    my_file = the_scan.resources['DICOM'].files[0] 

def another_method():
    ...                
    pool = Pool(os.cpu_count())
    pool.map(process, [scans[sc] for sc in scans])

another_method()  

More information about the scan object:

print(type(scans))
#<class 'xnat.core.XNATListing'>

print (scans)
#<XNATListing {(ID_7a037c2d39, unknown): <CtScanData unknown (ID_7a037c2d39)>}>

print (sc)
#ID_7a037c2d39

print(scans[sc])
#CtScanData unknown (ID_7a037c2d39)> /data/projects/test/subjects/XNAT_S00004/experiments/XNAT_E00004/scans/ID_7a037c2d39/resources/5/files/ID_0a0eebac2.dcm

The error I am getting with 'After multiprocessing' code:

---> 24         pool.map(process, [scans[sc] for sc in scans])

~/opt/anaconda3/lib/python3.7/multiprocessing/pool.py in map(self, func, iterable, chunksize)
    266         in a list that is returned.
    267         '''
--> 268         return self._map_async(func, iterable, mapstar, chunksize).get()
    269 
    270     def starmap(self, func, iterable, chunksize=None):

~/opt/anaconda3/lib/python3.7/multiprocessing/pool.py in get(self, timeout)
    655             return self._value
    656         else:
--> 657             raise self._value
    658 
    659     def _set(self, i, obj):

~/opt/anaconda3/lib/python3.7/multiprocessing/pool.py in _handle_tasks(taskqueue, put, outqueue, pool, cache)
    429                         break
    430                     try:
--> 431                         put(task)
    432                     except Exception as e:
    433                         job, idx = task[:2]

~/opt/anaconda3/lib/python3.7/multiprocessing/connection.py in send(self, obj)
    204         self._check_closed()
    205         self._check_writable()
--> 206         self._send_bytes(_ForkingPickler.dumps(obj))
    207 
    208     def recv_bytes(self, maxlength=None):

~/opt/anaconda3/lib/python3.7/multiprocessing/reduction.py in dumps(cls, obj, protocol)
     49     def dumps(cls, obj, protocol=None):
     50         buf = io.BytesIO()
---> 51         cls(buf, protocol).dump(obj)
     52         return buf.getbuffer()
     53 

TypeError: can't pickle module objects
added 1601 characters in body
Source Link
j. doe
  • 173
  • 3
  • 13
Loading
added 9 characters in body
Source Link
j. doe
  • 173
  • 3
  • 13
Loading
edited tags
Link
Tomerikoo
  • 19.5k
  • 16
  • 57
  • 68
Loading
added 50 characters in body
Source Link
j. doe
  • 173
  • 3
  • 13
Loading
added 155 characters in body
Source Link
j. doe
  • 173
  • 3
  • 13
Loading
added 155 characters in body
Source Link
j. doe
  • 173
  • 3
  • 13
Loading
Source Link
j. doe
  • 173
  • 3
  • 13
Loading