Skip to content

Cloud Pickle cannot serialize some objects. #74

@arita37

Description

@arita37

I use map to execute some code.

############## Testing of IPyrallel on DEAP  ###################################
creator.create("FitnessMin", base.Fitness, weights=(-1.0,))
creator.create("Individual", gp.PrimitiveTree, fitness=creator.FitnessMin)

toolbox = base.Toolbox()

**#Using Parallell Processing
import ipyparallel as ipp,  time
rc= ipp.Client()
# pool = rc.load_balanced_view()
rc[:].use_cloudpickle()
pool= rc[:]
toolbox.register("map", pool.map)**


toolbox.register("expr", gp.genHalfAndHalf, pset=pset, min_=1, max_=2)
toolbox.register("individual", tools.initIterate, creator.Individual, toolbox.expr)
toolbox.register("population", tools.initRepeat, list, toolbox.individual)
toolbox.register("compile", gp.compile, pset=pset)


def evalSymbReg(individual, points):
    func = toolbox.compile(expr=individual)   # Transform the tree expression in a callable function
    # and the real function : x**4 + x**3 + x**2 + x
    sqerrors = ((func(x) - x**4 - x**3 - x**2 - x)**2 for x in points)
    return math.fsum(sqerrors) / len(points),


toolbox.register("evaluate", evalSymbReg, points=[x/10. for x in range(-10,10)])
toolbox.register("select", tools.selTournament, tournsize=3)
toolbox.register("mate", gp.cxOnePoint)
toolbox.register("expr_mut", gp.genFull, min_=0, max_=2)
toolbox.register("mutate", gp.mutUniform, expr=toolbox.expr_mut, pset=pset)

toolbox.decorate("mate", gp.staticLimit(key=operator.attrgetter("height"), max_value=17))
toolbox.decorate("mutate", gp.staticLimit(key=operator.attrgetter("height"), max_value=17))

def main():
    random.seed(318)

    pop = toolbox.population(n=300)
    hof = tools.HallOfFame(1)
    
    stats_fit = tools.Statistics(lambda ind: ind.fitness.values)
    stats_size = tools.Statistics(len)
    mstats = tools.MultiStatistics(fitness=stats_fit, size=stats_size)
    mstats.register("avg", np.mean)
    mstats.register("std", np.std)
    mstats.register("min", np.min)
    mstats.register("max", np.max)

    pop, log = algorithms.eaSimple(pop, toolbox, 0.5, 0.1, 40, stats=mstats,
                                   halloffame=hof, verbose=True)
    # print log
    return pop, log, hof

I got this error :

---------------------------------------------------------------------------
TypeError                                 Traceback (most recent call last)
<ipython-input-11-978da9be5b87> in <module>()
      1 if __name__ == "__main__":
----> 2     pop, log, hof= main()

<ipython-input-10-6ff69ab06682> in main()
     15 
     16     pop, log = algorithms.eaSimple(pop, toolbox, 0.5, 0.1, 40, stats=mstats,
---> 17                                    halloffame=hof, verbose=True)
     18     # print log
     19     return pop, log, hof

D:\_devs\Python01\Anaconda27\lib\site-packages\deap\algorithms.pyc in eaSimple(population, toolbox, cxpb, mutpb, ngen, stats, halloffame, verbose)
    145     # Evaluate the individuals with an invalid fitness
    146     invalid_ind = [ind for ind in population if not ind.fitness.valid]
--> 147     fitnesses = toolbox.map(toolbox.evaluate, invalid_ind)
    148     for ind, fit in zip(invalid_ind, fitnesses):
    149         ind.fitness.values = fit

<decorator-gen-141> in map(self, f, *sequences, **kwargs)

D:\_devs\Python01\Anaconda27\lib\site-packages\ipyparallel\client\view.pyc in sync_results(f, self, *args, **kwargs)
     48     self._in_sync_results = True
     49     try:
---> 50         ret = f(self, *args, **kwargs)
     51     finally:
     52         self._in_sync_results = False

D:\_devs\Python01\Anaconda27\lib\site-packages\ipyparallel\client\view.pyc in map(self, f, *sequences, **kwargs)
    613         assert len(sequences) > 0, "must have some sequences to map onto!"
    614         pf = ParallelFunction(self, f, block=block, **kwargs)
--> 615         return pf.map(*sequences)
    616 
    617     @sync_results

D:\_devs\Python01\Anaconda27\lib\site-packages\ipyparallel\client\remotefunction.pyc in map(self, *sequences)
    283         and mismatched sequence lengths will be padded with None.
    284         """
--> 285         return self(*sequences, __ipp_mapping=True)
    286 
    287 __all__ = ['remote', 'parallel', 'RemoteFunction', 'ParallelFunction']

<decorator-gen-131> in __call__(self, *sequences, **kwargs)

D:\_devs\Python01\Anaconda27\lib\site-packages\ipyparallel\client\remotefunction.pyc in sync_view_results(f, self, *args, **kwargs)
     74     view = self.view
     75     if view._in_sync_results:
---> 76         return f(self, *args, **kwargs)
     77     view._in_sync_results = True
     78     try:

D:\_devs\Python01\Anaconda27\lib\site-packages\ipyparallel\client\remotefunction.pyc in __call__(self, *sequences, **kwargs)
    257             view = self.view if balanced else client[t]
    258             with view.temp_flags(block=False, **self.flags):
--> 259                 ar = view.apply(f, *args)
    260                 ar.owner = False
    261 

D:\_devs\Python01\Anaconda27\lib\site-packages\ipyparallel\client\view.pyc in apply(self, f, *args, **kwargs)
    209         ``f(*args, **kwargs)``.
    210         """
--> 211         return self._really_apply(f, args, kwargs)
    212 
    213     def apply_async(self, f, *args, **kwargs):

<decorator-gen-140> in _really_apply(self, f, args, kwargs, targets, block, track)

D:\_devs\Python01\Anaconda27\lib\site-packages\ipyparallel\client\view.pyc in sync_results(f, self, *args, **kwargs)
     48     self._in_sync_results = True
     49     try:
---> 50         ret = f(self, *args, **kwargs)
     51     finally:
     52         self._in_sync_results = False

<decorator-gen-139> in _really_apply(self, f, args, kwargs, targets, block, track)

D:\_devs\Python01\Anaconda27\lib\site-packages\ipyparallel\client\view.pyc in save_ids(f, self, *args, **kwargs)
     33     n_previous = len(self.client.history)
     34     try:
---> 35         ret = f(self, *args, **kwargs)
     36     finally:
     37         nmsgs = len(self.client.history) - n_previous

D:\_devs\Python01\Anaconda27\lib\site-packages\ipyparallel\client\view.pyc in _really_apply(self, f, args, kwargs, targets, block, track)
    555         for ident in _idents:
    556             future = self.client.send_apply_request(self._socket, f, args, kwargs, track=track,
--> 557                                     ident=ident)
    558             futures.append(future)
    559         if track:

D:\_devs\Python01\Anaconda27\lib\site-packages\ipyparallel\client\client.pyc in send_apply_request(self, socket, f, args, kwargs, metadata, track, ident)
   1387         bufs = serialize.pack_apply_message(f, args, kwargs,
   1388             buffer_threshold=self.session.buffer_threshold,
-> 1389             item_threshold=self.session.item_threshold,
   1390         )
   1391 

D:\_devs\Python01\Anaconda27\lib\site-packages\ipyparallel\serialize\serialize.pyc in pack_apply_message(f, args, kwargs, buffer_threshold, item_threshold)
    164 
    165     arg_bufs = list(chain.from_iterable(
--> 166         serialize_object(arg, buffer_threshold, item_threshold) for arg in args))
    167 
    168     kw_keys = sorted(kwargs.keys())

D:\_devs\Python01\Anaconda27\lib\site-packages\ipyparallel\serialize\serialize.pyc in <genexpr>((arg,))
    164 
    165     arg_bufs = list(chain.from_iterable(
--> 166         serialize_object(arg, buffer_threshold, item_threshold) for arg in args))
    167 
    168     kw_keys = sorted(kwargs.keys())

D:\_devs\Python01\Anaconda27\lib\site-packages\ipyparallel\serialize\serialize.pyc in serialize_object(obj, buffer_threshold, item_threshold)
    110         buffers.extend(_extract_buffers(cobj, buffer_threshold))
    111 
--> 112     buffers.insert(0, pickle.dumps(cobj, PICKLE_PROTOCOL))
    113     return buffers
    114 

D:\_devs\Python01\Anaconda27\lib\site-packages\cloudpickle\cloudpickle.pyc in dumps(obj, protocol)
    627 
    628     cp = CloudPickler(file,protocol)
--> 629     cp.dump(obj)
    630 
    631     return file.getvalue()

D:\_devs\Python01\Anaconda27\lib\site-packages\cloudpickle\cloudpickle.pyc in dump(self, obj)
    105         self.inject_addons()
    106         try:
--> 107             return Pickler.dump(self, obj)
    108         except RuntimeError as e:
    109             if 'recursion' in e.args[0]:

D:\_devs\Python01\Anaconda27\lib\pickle.pyc in dump(self, obj)
    222         if self.proto >= 2:
    223             self.write(PROTO + chr(self.proto))
--> 224         self.save(obj)
    225         self.write(STOP)
    226 

D:\_devs\Python01\Anaconda27\lib\pickle.pyc in save(self, obj)
    329 
    330         # Save the reduce() output and finally memoize the object
--> 331         self.save_reduce(obj=obj, *rv)
    332 
    333     def persistent_id(self, obj):

D:\_devs\Python01\Anaconda27\lib\site-packages\cloudpickle\cloudpickle.pyc in save_reduce(self, func, args, state, listitems, dictitems, obj)
    527         else:
    528             save(func)
--> 529             save(args)
    530             write(pickle.REDUCE)
    531 

D:\_devs\Python01\Anaconda27\lib\pickle.pyc in save(self, obj)
    284         f = self.dispatch.get(t)
    285         if f:
--> 286             f(self, obj) # Call unbound method with explicit self
    287             return
    288 

D:\_devs\Python01\Anaconda27\lib\pickle.pyc in save_tuple(self, obj)
    552         if n <= 3 and proto >= 2:
    553             for element in obj:
--> 554                 save(element)
    555             # Subtle.  Same as in the big comment below.
    556             if id(obj) in memo:

D:\_devs\Python01\Anaconda27\lib\pickle.pyc in save(self, obj)
    284         f = self.dispatch.get(t)
    285         if f:
--> 286             f(self, obj) # Call unbound method with explicit self
    287             return
    288 

D:\_devs\Python01\Anaconda27\lib\site-packages\cloudpickle\cloudpickle.pyc in save_function(self, obj, name)
    203                 or getattr(obj.__code__, 'co_filename', None) == '<stdin>'
    204                 or themodule is None):
--> 205             self.save_function_tuple(obj)
    206             return
    207         else:

D:\_devs\Python01\Anaconda27\lib\site-packages\cloudpickle\cloudpickle.pyc in save_function_tuple(self, func)
    251 
    252         # save the rest of the func data needed by _fill_function
--> 253         save(f_globals)
    254         save(defaults)
    255         save(dct)

D:\_devs\Python01\Anaconda27\lib\pickle.pyc in save(self, obj)
    284         f = self.dispatch.get(t)
    285         if f:
--> 286             f(self, obj) # Call unbound method with explicit self
    287             return
    288 

D:\_devs\Python01\Anaconda27\lib\pickle.pyc in save_dict(self, obj)
    653 
    654         self.memoize(obj)
--> 655         self._batch_setitems(obj.iteritems())
    656 
    657     dispatch[DictionaryType] = save_dict

D:\_devs\Python01\Anaconda27\lib\pickle.pyc in _batch_setitems(self, items)
    685                 for k, v in tmp:
    686                     save(k)
--> 687                     save(v)
    688                 write(SETITEMS)
    689             elif n:

D:\_devs\Python01\Anaconda27\lib\pickle.pyc in save(self, obj)
    329 
    330         # Save the reduce() output and finally memoize the object
--> 331         self.save_reduce(obj=obj, *rv)
    332 
    333     def persistent_id(self, obj):

D:\_devs\Python01\Anaconda27\lib\site-packages\cloudpickle\cloudpickle.pyc in save_reduce(self, func, args, state, listitems, dictitems, obj)
    545 
    546         if state is not None:
--> 547             save(state)
    548             write(pickle.BUILD)
    549 

D:\_devs\Python01\Anaconda27\lib\pickle.pyc in save(self, obj)
    284         f = self.dispatch.get(t)
    285         if f:
--> 286             f(self, obj) # Call unbound method with explicit self
    287             return
    288 

D:\_devs\Python01\Anaconda27\lib\pickle.pyc in save_dict(self, obj)
    653 
    654         self.memoize(obj)
--> 655         self._batch_setitems(obj.iteritems())
    656 
    657     dispatch[DictionaryType] = save_dict

D:\_devs\Python01\Anaconda27\lib\pickle.pyc in _batch_setitems(self, items)
    685                 for k, v in tmp:
    686                     save(k)
--> 687                     save(v)
    688                 write(SETITEMS)
    689             elif n:

D:\_devs\Python01\Anaconda27\lib\pickle.pyc in save(self, obj)
    329 
    330         # Save the reduce() output and finally memoize the object
--> 331         self.save_reduce(obj=obj, *rv)
    332 
    333     def persistent_id(self, obj):

D:\_devs\Python01\Anaconda27\lib\site-packages\cloudpickle\cloudpickle.pyc in save_reduce(self, func, args, state, listitems, dictitems, obj)
    545 
    546         if state is not None:
--> 547             save(state)
    548             write(pickle.BUILD)
    549 

D:\_devs\Python01\Anaconda27\lib\pickle.pyc in save(self, obj)
    284         f = self.dispatch.get(t)
    285         if f:
--> 286             f(self, obj) # Call unbound method with explicit self
    287             return
    288 

D:\_devs\Python01\Anaconda27\lib\pickle.pyc in save_tuple(self, obj)
    566         write(MARK)
    567         for element in obj:
--> 568             save(element)
    569 
    570         if id(obj) in memo:

D:\_devs\Python01\Anaconda27\lib\pickle.pyc in save(self, obj)
    284         f = self.dispatch.get(t)
    285         if f:
--> 286             f(self, obj) # Call unbound method with explicit self
    287             return
    288 

D:\_devs\Python01\Anaconda27\lib\pickle.pyc in save_dict(self, obj)
    653 
    654         self.memoize(obj)
--> 655         self._batch_setitems(obj.iteritems())
    656 
    657     dispatch[DictionaryType] = save_dict

D:\_devs\Python01\Anaconda27\lib\pickle.pyc in _batch_setitems(self, items)
    685                 for k, v in tmp:
    686                     save(k)
--> 687                     save(v)
    688                 write(SETITEMS)
    689             elif n:

D:\_devs\Python01\Anaconda27\lib\pickle.pyc in save(self, obj)
    329 
    330         # Save the reduce() output and finally memoize the object
--> 331         self.save_reduce(obj=obj, *rv)
    332 
    333     def persistent_id(self, obj):

D:\_devs\Python01\Anaconda27\lib\site-packages\cloudpickle\cloudpickle.pyc in save_reduce(self, func, args, state, listitems, dictitems, obj)
    545 
    546         if state is not None:
--> 547             save(state)
    548             write(pickle.BUILD)
    549 

D:\_devs\Python01\Anaconda27\lib\pickle.pyc in save(self, obj)
    284         f = self.dispatch.get(t)
    285         if f:
--> 286             f(self, obj) # Call unbound method with explicit self
    287             return
    288 

D:\_devs\Python01\Anaconda27\lib\pickle.pyc in save_dict(self, obj)
    653 
    654         self.memoize(obj)
--> 655         self._batch_setitems(obj.iteritems())
    656 
    657     dispatch[DictionaryType] = save_dict

D:\_devs\Python01\Anaconda27\lib\pickle.pyc in _batch_setitems(self, items)
    685                 for k, v in tmp:
    686                     save(k)
--> 687                     save(v)
    688                 write(SETITEMS)
    689             elif n:

D:\_devs\Python01\Anaconda27\lib\pickle.pyc in save(self, obj)
    284         f = self.dispatch.get(t)
    285         if f:
--> 286             f(self, obj) # Call unbound method with explicit self
    287             return
    288 

D:\_devs\Python01\Anaconda27\lib\pickle.pyc in save_dict(self, obj)
    653 
    654         self.memoize(obj)
--> 655         self._batch_setitems(obj.iteritems())
    656 
    657     dispatch[DictionaryType] = save_dict

D:\_devs\Python01\Anaconda27\lib\pickle.pyc in _batch_setitems(self, items)
    684                 write(MARK)
    685                 for k, v in tmp:
--> 686                     save(k)
    687                     save(v)
    688                 write(SETITEMS)

D:\_devs\Python01\Anaconda27\lib\pickle.pyc in save(self, obj)
    304             reduce = getattr(obj, "__reduce_ex__", None)
    305             if reduce:
--> 306                 rv = reduce(self.proto)
    307             else:
    308                 reduce = getattr(obj, "__reduce__", None)

TypeError: can't pickle member_descriptor objects

Metadata

Metadata

Assignees

No one assigned

    Labels

    No labels
    No labels

    Type

    No type

    Projects

    No projects

    Milestone

    No milestone

    Relationships

    None yet

    Development

    No branches or pull requests

    Issue actions