pickle.dumps

Here are the examples of the python api pickle.dumps taken from open source projects. By voting up you can indicate which examples are most useful and appropriate.

200 Examples 7

Example 1

Project: pyfilesystem
Source File: __init__.py
View license
    def test_pickling(self):
        if self.fs.getmeta('pickle_contents', True):
            self.fs.setcontents("test1", b("hello world"))
            fs2 = pickle.loads(pickle.dumps(self.fs))
            self.assert_(fs2.isfile("test1"))
            fs3 = pickle.loads(pickle.dumps(self.fs, -1))
            self.assert_(fs3.isfile("test1"))
        else:
            # Just make sure it doesn't throw an exception
            fs2 = pickle.loads(pickle.dumps(self.fs))

Example 2

Project: performance
Source File: bm_pickle.py
View license
def bench_pickle_dict(loops, pickle, options):
    range_it = xrange(loops)
    # micro-optimization: use fast local variables
    protocol = options.protocol
    obj = MICRO_DICT
    t0 = perf.perf_counter()

    for _ in range_it:
        # 5 dumps dict
        pickle.dumps(obj, protocol)
        pickle.dumps(obj, protocol)
        pickle.dumps(obj, protocol)
        pickle.dumps(obj, protocol)
        pickle.dumps(obj, protocol)

    return perf.perf_counter() - t0

Example 3

Project: toolz
Source File: test_serialization.py
View license
def test_flip():
    flip = pickle.loads(pickle.dumps(toolz.functoolz.flip))
    assert flip is toolz.functoolz.flip
    g1 = flip(f)
    g2 = pickle.loads(pickle.dumps(g1))
    assert g1(1, 2) == g2(1, 2) == f(2, 1)
    g1 = flip(f)(1)
    g2 = pickle.loads(pickle.dumps(g1))
    assert g1(2) == g2(2) == f(2, 1)

Example 4

Project: imagrium
Source File: test_bool.py
View license
    def test_pickle(self):
        import pickle
        self.assertIs(pickle.loads(pickle.dumps(True)), True)
        self.assertIs(pickle.loads(pickle.dumps(False)), False)
        self.assertIs(pickle.loads(pickle.dumps(True, True)), True)
        self.assertIs(pickle.loads(pickle.dumps(False, True)), False)

Example 5

Project: imagrium
Source File: test_bool.py
View license
    def test_picklevalues(self):
        import pickle, cPickle

        # Test for specific backwards-compatible pickle values
        self.assertEqual(pickle.dumps(True), "I01\n.")
        self.assertEqual(pickle.dumps(False), "I00\n.")
        self.assertEqual(cPickle.dumps(True), "I01\n.")
        self.assertEqual(cPickle.dumps(False), "I00\n.")
        self.assertEqual(pickle.dumps(True, True), "I01\n.")
        self.assertEqual(pickle.dumps(False, True), "I00\n.")
        self.assertEqual(cPickle.dumps(True, True), "I01\n.")
        self.assertEqual(cPickle.dumps(False, True), "I00\n.")

Example 6

Project: babble
Source File: test_bool.py
View license
    def test_pickle(self):
        import pickle
        self.assertIs(pickle.loads(pickle.dumps(True)), True)
        self.assertIs(pickle.loads(pickle.dumps(False)), False)
        self.assertIs(pickle.loads(pickle.dumps(True, True)), True)
        self.assertIs(pickle.loads(pickle.dumps(False, True)), False)

Example 7

Project: babble
Source File: test_bool.py
View license
    def test_picklevalues(self):
        import pickle, cPickle

        # Test for specific backwards-compatible pickle values
        self.assertEqual(pickle.dumps(True), "I01\n.")
        self.assertEqual(pickle.dumps(False), "I00\n.")
        self.assertEqual(cPickle.dumps(True), "I01\n.")
        self.assertEqual(cPickle.dumps(False), "I00\n.")
        self.assertEqual(pickle.dumps(True, True), "I01\n.")
        self.assertEqual(pickle.dumps(False, True), "I00\n.")
        self.assertEqual(cPickle.dumps(True, True), "I01\n.")
        self.assertEqual(cPickle.dumps(False, True), "I00\n.")

Example 8

Project: freezegun
Source File: test_pickle.py
View license
def test_pickle_real_date():
    real_date = datetime.date(1970, 2, 1)
    assert pickle.loads(pickle.dumps(real_date)) == real_date

    freezer = freeze_time("1970-01-01")
    freezer.start()
    fake_date = datetime.datetime.now()
    assert pickle.loads(pickle.dumps(fake_date)) == fake_date
    pickle.loads(pickle.dumps(real_date))
    freezer.stop()

    assert pickle.loads(pickle.dumps(fake_date)) == fake_date
    assert pickle.loads(pickle.dumps(real_date)) == real_date

Example 9

Project: zorro
Source File: test_zmq.py
View license
    @interactive(setup_req)
    def testRequests(self):
        ctx = zmq.Context(1)
        sock = ctx.socket(zmq.REP)
        sock.bind('ipc:///tmp/zorro-pool-test')
        self.assertEqual(sock.recv_multipart(),
            [b'hello', pickle.dumps('Test')])
        sock.send_multipart([b'_result', pickle.dumps('hi')])
        self.assertEqual(sock.recv_multipart(),
            [b'jim.hello', pickle.dumps('John')])
        sock.send_multipart([b'_result', pickle.dumps('hihi')])
        self.assertEqual(sock.recv_multipart(), [b'jim.hello'])
        sock.send_multipart([b'_error', b'bad_method'])
        self.assertEqual(sock.recv_multipart(), [b'jim.hello_world'])
        sock.send_multipart([b'_exception',
            repr(ValueError('test')).encode('ascii')])

Example 10

Project: plumbum
Source File: test_local.py
View license
    def test_pickle(self):
        path1 = local.path('.')
        path2 = local.path('~')
        assert pickle.loads(pickle.dumps(self.longpath)) == self.longpath
        assert pickle.loads(pickle.dumps(path1)) == path1
        assert pickle.loads(pickle.dumps(path2)) == path2

Example 11

Project: TrustRouter
Source File: test_bool.py
View license
    def test_pickle(self):
        import pickle
        self.assertIs(pickle.loads(pickle.dumps(True)), True)
        self.assertIs(pickle.loads(pickle.dumps(False)), False)
        self.assertIs(pickle.loads(pickle.dumps(True, True)), True)
        self.assertIs(pickle.loads(pickle.dumps(False, True)), False)

Example 12

Project: TrustRouter
Source File: test_bool.py
View license
    def test_picklevalues(self):
        # Test for specific backwards-compatible pickle values
        import pickle
        self.assertEqual(pickle.dumps(True, protocol=0), b"I01\n.")
        self.assertEqual(pickle.dumps(False, protocol=0), b"I00\n.")
        self.assertEqual(pickle.dumps(True, protocol=1), b"I01\n.")
        self.assertEqual(pickle.dumps(False, protocol=1), b"I00\n.")
        self.assertEqual(pickle.dumps(True, protocol=2), b'\x80\x02\x88.')
        self.assertEqual(pickle.dumps(False, protocol=2), b'\x80\x02\x89.')

Example 13

Project: sqlconstruct
Source File: tests.py
View license
    def test_object_pickling(self):
        ref = {'a': 1, 'b': 2}

        o1 = pickle.loads(pickle.dumps(Object(ref), 0))
        self.assertIs(type(o1), Object)
        self.assertEqual(dict(o1), ref)

        o2 = pickle.loads(pickle.dumps(Object(ref), 1))
        self.assertIs(type(o2), Object)
        self.assertEqual(dict(o2), ref)

        o3 = pickle.loads(pickle.dumps(Object(ref), 2))
        self.assertIs(type(o3), Object)
        self.assertEqual(dict(o3), ref)

Example 14

Project: brython
Source File: test_bool.py
View license
    def test_pickle(self):
        import pickle
        self.assertIs(pickle.loads(pickle.dumps(True)), True)
        self.assertIs(pickle.loads(pickle.dumps(False)), False)
        self.assertIs(pickle.loads(pickle.dumps(True, True)), True)
        self.assertIs(pickle.loads(pickle.dumps(False, True)), False)

Example 15

Project: brython
Source File: test_bool.py
View license
    def test_picklevalues(self):
        # Test for specific backwards-compatible pickle values
        import pickle
        self.assertEqual(pickle.dumps(True, protocol=0), b"I01\n.")
        self.assertEqual(pickle.dumps(False, protocol=0), b"I00\n.")
        self.assertEqual(pickle.dumps(True, protocol=1), b"I01\n.")
        self.assertEqual(pickle.dumps(False, protocol=1), b"I00\n.")
        self.assertEqual(pickle.dumps(True, protocol=2), b'\x80\x02\x88.')
        self.assertEqual(pickle.dumps(False, protocol=2), b'\x80\x02\x89.')

Example 16

Project: chipsec
Source File: test_bool.py
View license
    def test_pickle(self):
        import pickle
        self.assertIs(pickle.loads(pickle.dumps(True)), True)
        self.assertIs(pickle.loads(pickle.dumps(False)), False)
        self.assertIs(pickle.loads(pickle.dumps(True, True)), True)
        self.assertIs(pickle.loads(pickle.dumps(False, True)), False)

Example 17

Project: chipsec
Source File: test_bool.py
View license
    def test_picklevalues(self):
        import pickle, cPickle

        # Test for specific backwards-compatible pickle values
        self.assertEqual(pickle.dumps(True), "I01\n.")
        self.assertEqual(pickle.dumps(False), "I00\n.")
        self.assertEqual(cPickle.dumps(True), "I01\n.")
        self.assertEqual(cPickle.dumps(False), "I00\n.")
        self.assertEqual(pickle.dumps(True, True), "I01\n.")
        self.assertEqual(pickle.dumps(False, True), "I00\n.")
        self.assertEqual(cPickle.dumps(True, True), "I01\n.")
        self.assertEqual(cPickle.dumps(False, True), "I00\n.")

Example 18

Project: apscheduler
Source File: mongodb_store.py
View license
    def add_job(self, job):
        job_dict = job.__getstate__()
        job_dict['trigger'] = Binary(pickle.dumps(job.trigger, self.pickle_protocol))
        job_dict['args'] = Binary(pickle.dumps(job.args, self.pickle_protocol))
        job_dict['kwargs'] = Binary(pickle.dumps(job.kwargs, self.pickle_protocol))
        job.id = self.collection.insert(job_dict)
        self.jobs.append(job)

Example 19

Project: CouchPotatoServer
Source File: mongodb_store.py
View license
    def add_job(self, job):
        job_dict = job.__getstate__()
        job_dict['trigger'] = Binary(pickle.dumps(job.trigger,
                                                  self.pickle_protocol))
        job_dict['args'] = Binary(pickle.dumps(job.args,
                                               self.pickle_protocol))
        job_dict['kwargs'] = Binary(pickle.dumps(job.kwargs,
                                                 self.pickle_protocol))
        job.id = self.collection.insert(job_dict)
        self.jobs.append(job)

Example 20

Project: netaddr
Source File: test_eui.py
View license
def test_eui_pickle_support():
    eui1 = EUI('00-00-00-01-02-03')
    eui2 = pickle.loads(pickle.dumps(eui1))
    assert eui1 == eui2

    eui1 = EUI('00-00-00-01-02-03', dialect=mac_cisco)
    eui2 = pickle.loads(pickle.dumps(eui1))
    assert eui1 == eui2
    assert eui1.dialect == eui2.dialect

    oui1 = EUI('00-00-00-01-02-03').oui
    oui2 = pickle.loads(pickle.dumps(oui1))
    assert oui1 == oui2
    assert oui1.records == oui2.records

    iab1 = EUI('00-50-C2-00-1F-FF').iab
    iab2 = pickle.loads(pickle.dumps(iab1))
    assert iab1 == iab2
    assert iab1.record == iab2.record

Example 21

Project: LazyLibrarian
Source File: mongodb_store.py
View license
    def add_job(self, job):
        job_dict = job.__getstate__()
        job_dict['trigger'] = Binary(pickle.dumps(job.trigger,
                                                  self.pickle_protocol))
        job_dict['args'] = Binary(pickle.dumps(job.args,
                                               self.pickle_protocol))
        job_dict['kwargs'] = Binary(pickle.dumps(job.kwargs,
                                                 self.pickle_protocol))
        job.id = self.collection.insert(job_dict)
        self.jobs.append(job)

Example 22

Project: jeeves
Source File: ConcreteCache.py
View license
    def get_cache_key(self, ctxt, val, pathvars):
        """Makes a cache key string by hashing the state of the context, value,
        and path variables involved in the concretization.

        :param ctxt: Output channel (viewer).
        :type ctxt: T, where policies have type T -> bool
        :param val: Value to concretize.
        :type v: FExpr
        :param pathvars: Path variables involved in the concretization.
        :type pathvars: PathVars
        :returns: The concrete (non-faceted) version of T under the policies
        in the environment.
        """
        if self._should_cache:
            return str(hash(pickle.dumps(ctxt))) + "__" + \
                str(hash(pickle.dumps(val))) + "__" + \
                str(hash(pickle.dumps(pathvars)))
        else:
            return ""

Example 23

Project: kbengine
Source File: test_bool.py
View license
    def test_pickle(self):
        import pickle
        self.assertIs(pickle.loads(pickle.dumps(True)), True)
        self.assertIs(pickle.loads(pickle.dumps(False)), False)
        self.assertIs(pickle.loads(pickle.dumps(True, True)), True)
        self.assertIs(pickle.loads(pickle.dumps(False, True)), False)

Example 24

Project: kbengine
Source File: test_bool.py
View license
    def test_picklevalues(self):
        # Test for specific backwards-compatible pickle values
        import pickle
        self.assertEqual(pickle.dumps(True, protocol=0), b"I01\n.")
        self.assertEqual(pickle.dumps(False, protocol=0), b"I00\n.")
        self.assertEqual(pickle.dumps(True, protocol=1), b"I01\n.")
        self.assertEqual(pickle.dumps(False, protocol=1), b"I00\n.")
        self.assertEqual(pickle.dumps(True, protocol=2), b'\x80\x02\x88.')
        self.assertEqual(pickle.dumps(False, protocol=2), b'\x80\x02\x89.')

Example 25

Project: maraschino
Source File: mongodb_store.py
View license
    def add_job(self, job):
        job_dict = job.__getstate__()
        job_dict['trigger'] = Binary(pickle.dumps(job.trigger,
                                                  self.pickle_protocol))
        job_dict['args'] = Binary(pickle.dumps(job.args,
                                               self.pickle_protocol))
        job_dict['kwargs'] = Binary(pickle.dumps(job.kwargs,
                                                 self.pickle_protocol))
        job.id = self.collection.insert(job_dict)
        self.jobs.append(job)

Example 26

Project: iot-utilities
Source File: test_bool.py
View license
    def test_picklevalues(self):
        # Test for specific backwards-compatible pickle values
        import pickle
        self.assertEqual(pickle.dumps(True, protocol=0), b"I01\n.")
        self.assertEqual(pickle.dumps(False, protocol=0), b"I00\n.")
        self.assertEqual(pickle.dumps(True, protocol=1), b"I01\n.")
        self.assertEqual(pickle.dumps(False, protocol=1), b"I00\n.")
        self.assertEqual(pickle.dumps(True, protocol=2), b'\x80\x02\x88.')
        self.assertEqual(pickle.dumps(False, protocol=2), b'\x80\x02\x89.')

Example 27

Project: neuroConstruct
Source File: test_bool.py
View license
    def test_picklevalues(self):
        import pickle, cPickle

        # Test for specific backwards-compatible pickle values
        self.assertEqual(pickle.dumps(True), "I01\n.")
        self.assertEqual(pickle.dumps(False), "I00\n.")
        self.assertEqual(cPickle.dumps(True), "I01\n.")
        self.assertEqual(cPickle.dumps(False), "I00\n.")
        self.assertEqual(pickle.dumps(True, True), "I01\n.")
        self.assertEqual(pickle.dumps(False, True), "I00\n.")
        self.assertEqual(cPickle.dumps(True, True), "I01\n.")
        self.assertEqual(cPickle.dumps(False, True), "I00\n.")

Example 28

Project: neuroConstruct
Source File: test_bool.py
View license
    def test_pickle(self):
        import pickle
        self.assertIs(pickle.loads(pickle.dumps(True)), True)
        self.assertIs(pickle.loads(pickle.dumps(False)), False)
        self.assertIs(pickle.loads(pickle.dumps(True, True)), True)
        self.assertIs(pickle.loads(pickle.dumps(False, True)), False)

Example 29

Project: medicare-demo
Source File: test_bool.py
View license
    def test_pickle(self):
        import pickle
        self.assertIs(pickle.loads(pickle.dumps(True)), True)
        self.assertIs(pickle.loads(pickle.dumps(False)), False)
        self.assertIs(pickle.loads(pickle.dumps(True, True)), True)
        self.assertIs(pickle.loads(pickle.dumps(False, True)), False)

Example 30

Project: medicare-demo
Source File: test_bool.py
View license
    def test_picklevalues(self):
        import pickle, cPickle

        # Test for specific backwards-compatible pickle values
        self.assertEqual(pickle.dumps(True), "I01\n.")
        self.assertEqual(pickle.dumps(False), "I00\n.")
        self.assertEqual(cPickle.dumps(True), "I01\n.")
        self.assertEqual(cPickle.dumps(False), "I00\n.")
        self.assertEqual(pickle.dumps(True, True), "I01\n.")
        self.assertEqual(pickle.dumps(False, True), "I00\n.")
        self.assertEqual(cPickle.dumps(True, True), "I01\n.")
        self.assertEqual(cPickle.dumps(False, True), "I00\n.")

Example 31

Project: pyomo
Source File: test_pickle.py
View license
def CreateTestMethod(test_case,
                     modelClass,
                     test_name,
                     symbolic_labels=False):

    # We do not want to test the plugin case on a model
    # class it is not capable of handling
    if not modelClass().validateCapabilities(test_case):
        return None

    # Skip this test if the solver is not available on the system
    if not test_case.available:
        def skipping_test(self):
            return self.skipTest('Solver unavailable: '
                                 +test_case.name+' ('+test_case.io+')')
        return skipping_test

    def pickle_test(self):

        #self.skipTest("Ignoring pickling tests for now")

        # Instantiate the model class
        model_class = modelClass()

        # Make sure we start from a new solver plugin
        # each time. We don't want them to maintain
        # some state that carries over between tests
        opt, io_options = test_case.initialize()

        try:
            if test_case.io == 'nl':
                self.assertEqual(opt.problem_format(), ProblemFormat.nl)
            elif test_case.io == 'lp':
                self.assertEqual(opt.problem_format(), ProblemFormat.cpxlp)
            elif test_case.io == 'python':
                self.assertEqual(opt.problem_format(), None)

            # check that the solver plugin is at least as capable as the
            # test_case advertises, otherwise the plugin capabilities need
            # to be change or the test case should be removed
            if not all(opt.has_capability(tag)
                       for tag in test_case.capabilities):
                self.fail("Actual plugin capabilities are less than "
                          "that of the of test case for the plugin: "
                          +test_case.name+' ('+test_case.io+')')
        finally:
            opt.deactivate()

        # Create the model instance and send to the solver
        model_class.generateModel()
        model_class.warmstartModel()

        model = model_class.model
        self.assertTrue(model is not None)

        test_suffixes = [] if model_class.disableSuffixTests() else \
                        test_case.import_suffixes

        for suffix in test_suffixes:
            setattr(model,suffix,Suffix(direction=Suffix.IMPORT))

        def _solve(_model):
            _opt, io_options = test_case.initialize()
            try:
                if isinstance(_opt, PersistentSolver):
                    _opt.compile_instance(_model,
                                          symbolic_solver_labels=symbolic_labels)
                if _opt.warm_start_capable():
                    return _opt.solve(_model,
                                      symbolic_solver_labels=symbolic_labels,
                                      warmstart=True,
                                      **io_options)
                else:
                    return _opt.solve(_model,
                                      symbolic_solver_labels=symbolic_labels,
                                      **io_options)
            finally:
                _opt.deactivate()
            del _opt

        results = _solve(model)

        instance1 = model.clone()
        # try to pickle then unpickle instance
        instance2 = pickle.loads(pickle.dumps(instance1))
        self.assertNotEqual(id(instance1),id(instance2))

        # try to solve the original instance
        results1 = _solve(instance1)
        #instance1.solutions.load(results1)

        # try to solve the unpickled instance
        results2 = _solve(instance2)
        #instance2.solutions.load(results2)

        # try to pickle the instance and results,
        # then unpickle and load results
        inst, res = pickle.loads(pickle.dumps([instance1,results1]))
        #inst.solutions.load(res)

        # try to pickle the instance and results,
        # then unpickle and load results
        inst, res = pickle.loads(pickle.dumps([instance2,results2]))
        #inst.solutions.load(res)

    return pickle_test

Example 32

Project: performance
Source File: bm_pickle.py
View license
def bench_unpickle(loops, pickle, options):
    pickled_dict = pickle.dumps(DICT, options.protocol)
    pickled_tuple = pickle.dumps(TUPLE, options.protocol)
    pickled_dict_group = pickle.dumps(DICT_GROUP, options.protocol)
    range_it = xrange(loops)

    # micro-optimization: use fast local variables
    loads = pickle.loads
    objs = (pickled_dict, pickled_tuple, pickled_dict_group)

    t0 = perf.perf_counter()
    for _ in range_it:
        for obj in objs:
            # 20 loads dict
            loads(obj)
            loads(obj)
            loads(obj)
            loads(obj)
            loads(obj)
            loads(obj)
            loads(obj)
            loads(obj)
            loads(obj)
            loads(obj)
            loads(obj)
            loads(obj)
            loads(obj)
            loads(obj)
            loads(obj)
            loads(obj)
            loads(obj)
            loads(obj)
            loads(obj)
            loads(obj)

    return perf.perf_counter() - t0

Example 33

Project: uwsgi_tasks
Source File: tasks.py
View license
    def get_message_content(self):
        base_message_dict = self.__getstate__()
        base_message_dict['setup'] = pickle.dumps(self.setup)

        for key in self.spooler_default_arguments:
            if key in self.setup:
                base_message_dict[key] = self.setup[key]

        # datetime and timedelta conversion
        at = base_message_dict.get('at')

        if at:
            if isinstance(at, timedelta):
                at += datetime.utcnow()

            if isinstance(at, datetime):
                at = calendar.timegm(at.timetuple())

            base_message_dict['at'] = str(at)

        logger.debug('Spooler base parameters: "%r"', base_message_dict)

        message_dict = base_message_dict.copy()
        message_dict.update({
            'args': pickle.dumps(self.args),
            'kwargs': pickle.dumps(self.kwargs)
        })

        if len(repr(message_dict)) >= UWSGI_MAXIMUM_MESSAGE_SIZE:
            # message too long for spooler - we have to use `body` parameter
            message_dict = base_message_dict
            message_dict['body'] = pickle.dumps({
                'args': self.args,
                'kwargs': self.kwargs,
            })

        return self._encode_message(message_dict)

Example 34

Project: PySyncObj
Source File: test_syncobj3.py
View license
def test_doChangeClusterUT1():
	removeFiles(['dump1.bin'])

	baseAddr = getNextAddr()
	oterAddr = getNextAddr()

	o1 = TestObj(baseAddr, ['localhost:1235', oterAddr], dumpFile='dump1.bin', dynamicMembershipChange=True)
	__checkParnerNodeExists(o1, 'localhost:1238', False)
	__checkParnerNodeExists(o1, 'localhost:1239', False)
	__checkParnerNodeExists(o1, 'localhost:1235', True)

	noop = _bchr(_COMMAND_TYPE.NO_OP)
	member = _bchr(_COMMAND_TYPE.MEMBERSHIP)

	# Check regular configuration change - adding
	o1._onMessageReceived('localhost:12345', {
		'type': 'append_entries',
		'term': 1,
		'prevLogIdx': 1,
		'prevLogTerm': 0,
		'commit_index': 2,
		'entries': [(noop, 2, 1), (noop, 3, 1), (member + pickle.dumps(['add', 'localhost:1238']), 4, 1)]
	})
	__checkParnerNodeExists(o1, 'localhost:1238', True)
	__checkParnerNodeExists(o1, 'localhost:1239', False)

	# Check rollback adding
	o1._onMessageReceived('localhost:1236', {
		'type': 'append_entries',
		'term': 2,
		'prevLogIdx': 2,
		'prevLogTerm': 1,
		'commit_index': 3,
		'entries': [(noop, 3, 2), (member + pickle.dumps(['add', 'localhost:1239']), 4, 2)]
	})
	__checkParnerNodeExists(o1, 'localhost:1238', False)
	__checkParnerNodeExists(o1, 'localhost:1239', True)
	__checkParnerNodeExists(o1, oterAddr, True)

	# Check regular configuration change - removing
	o1._onMessageReceived('localhost:1236', {
		'type': 'append_entries',
		'term': 2,
		'prevLogIdx': 4,
		'prevLogTerm': 2,
		'commit_index': 4,
		'entries': [(member + pickle.dumps(['rem', 'localhost:1235']), 5, 2)]
	})

	__checkParnerNodeExists(o1, 'localhost:1238', False)
	__checkParnerNodeExists(o1, 'localhost:1239', True)
	__checkParnerNodeExists(o1, 'localhost:1235', False)


	# Check log compaction
	o1._forceLogCompaction()
	doTicks([o1], 0.5)
	o1._destroy()

	o2 = TestObj(oterAddr, [baseAddr, 'localhost:1236'], dumpFile='dump1.bin', dynamicMembershipChange=True)
	doTicks([o2], 0.5)

	__checkParnerNodeExists(o2, oterAddr, False)
	__checkParnerNodeExists(o2, baseAddr, True)
	__checkParnerNodeExists(o2, 'localhost:1238', False)
	__checkParnerNodeExists(o2, 'localhost:1239', True)
	__checkParnerNodeExists(o2, 'localhost:1235', False)
	o2._destroy()

Example 35

Project: PyRFC
Source File: test_server.py
View license
    def test_server(self):
        server = Server(config={'debug': True}, **config._sections['gateway'])
        test = _Testing()

        # Install two functions
        func_desc_conn = self.conn.get_function_description("STFC_CONNECTION")
        server.install_function(
            func_desc_conn,
            my_stfc_connection
        )
        func_desc_chan = self.conn.get_function_description("STFC_CHANGING")
        server.install_function(
            func_desc_chan,
            my_stfc_changing
        )

        # Lookup test
        func_desc_invalid = test.get_srv_func_desc("NOT_VALID")
        self.assertEqual(func_desc_invalid, 17, "Return code for unknown func_desc should be RFC_NOT_FOUND (17).")

        func_desc_conn2 = test.get_srv_func_desc("STFC_CONNECTION")
        self.assertEqual(pickle.dumps(func_desc_conn), pickle.dumps(func_desc_conn2))

        func_desc_chan2 = test.get_srv_func_desc("STFC_CHANGING")
        self.assertEqual(pickle.dumps(func_desc_chan), pickle.dumps(func_desc_chan2))

        # Invocation test
        result = test.invoke_srv_function("STFC_CONNECTION", REQUTEXT="request_text")
        self.assertEqual(result['ECHOTEXT'], "request_text")
        self.assertEqual(result['RESPTEXT'], u"Local server here.")

        result = test.invoke_srv_function("STFC_CHANGING", START_VALUE=23, COUNTER=7)
        self.assertEqual(result['COUNTER'], 17) # COUNTER = COUNTER + 10
        self.assertEqual(result['RESULT'], 40) # RESULT = START_VALUE + (COUNTER + 10)

        server.close()

Example 36

Project: scikit-learn
Source File: test_base.py
View license
def test_pickle_version_warning():
    # check that warnings are raised when unpickling in a different version

    # first, check no warning when in the same version:
    iris = datasets.load_iris()
    tree = DecisionTreeClassifier().fit(iris.data, iris.target)
    tree_pickle = pickle.dumps(tree)
    assert_true(b"version" in tree_pickle)
    assert_no_warnings(pickle.loads, tree_pickle)

    # check that warning is raised on different version
    tree = TreeBadVersion().fit(iris.data, iris.target)
    tree_pickle_other = pickle.dumps(tree)
    message = ("Trying to unpickle estimator TreeBadVersion from "
               "version {0} when using version {1}. This might lead to "
               "breaking code or invalid results. "
               "Use at your own risk.".format("something",
                                              sklearn.__version__))
    assert_warns_message(UserWarning, message, pickle.loads, tree_pickle_other)

    # check that not including any version also works:
    # TreeNoVersion has no getstate, like pre-0.18
    tree = TreeNoVersion().fit(iris.data, iris.target)

    tree_pickle_noversion = pickle.dumps(tree)
    assert_false(b"version" in tree_pickle_noversion)
    message = message.replace("something", "pre-0.18")
    message = message.replace("TreeBadVersion", "TreeNoVersion")
    # check we got the warning about using pre-0.18 pickle
    assert_warns_message(UserWarning, message, pickle.loads,
                         tree_pickle_noversion)

    # check that no warning is raised for external estimators
    TreeNoVersion.__module__ = "notsklearn"
    assert_no_warnings(pickle.loads, tree_pickle_noversion)

Example 37

Project: django-evolution
Source File: __init__.py
View license
def evolution(app, created_models, verbosity=1, **kwargs):
    """
    A hook into syncdb's post_syncdb signal, that is used to notify the user
    if a model evolution is necessary.
    """
    default_db = DEFAULT_DB_ALIAS

    db = kwargs.get('db', default_db)
    proj_sig = create_project_sig(db)
    signature = pickle.dumps(proj_sig)

    using_args = {
        'using': db,
    }

    try:
        latest_version = \
            django_evolution.Version.objects.current_version(using=db)
    except django_evolution.Version.DoesNotExist:
        # We need to create a baseline version.
        if verbosity > 0:
            print "Installing baseline version"

        latest_version = django_evolution.Version(signature=signature)
        latest_version.save(**using_args)

        for a in get_apps():
            install_baseline(a, latest_version, using_args, verbosity)

    unapplied = get_unapplied_evolutions(app, db)

    if unapplied:
        print style.NOTICE('There are unapplied evolutions for %s.'
                           % app.__name__.split('.')[-2])

    # Evolutions are checked over the entire project, so we only need to check
    # once. We do this check when Django Evolutions itself is synchronized.
    if app == django_evolution:
        old_proj_sig = pickle.loads(str(latest_version.signature))

        # If any models or apps have been added, a baseline must be set
        # for those new models
        changed = False
        new_apps = []

        for app_name, new_app_sig in proj_sig.items():
            if app_name == '__version__':
                # Ignore the __version__ tag
                continue

            old_app_sig = old_proj_sig.get(app_name, None)

            if old_app_sig is None:
                # App has been added
                old_proj_sig[app_name] = proj_sig[app_name]
                new_apps.append(app_name)
                changed = True
            else:
                for model_name, new_model_sig in new_app_sig.items():
                    old_model_sig = old_app_sig.get(model_name, None)

                    if old_model_sig is None:
                        # Model has been added
                        old_proj_sig[app_name][model_name] = \
                            proj_sig[app_name][model_name]
                        changed = True

        if changed:
            if verbosity > 0:
                print "Adding baseline version for new models"

            latest_version = \
                django_evolution.Version(signature=pickle.dumps(old_proj_sig))
            latest_version.save(**using_args)

            for app_name in new_apps:
                app = get_app(app_name, True)

                if app:
                    install_baseline(app, latest_version, using_args,
                                     verbosity)

        # TODO: Model introspection step goes here.
        # # If the current database state doesn't match the last
        # # saved signature (as reported by latest_version),
        # # then we need to update the Evolution table.
        # actual_sig = introspect_project_sig()
        # acutal = pickle.dumps(actual_sig)
        # if actual != latest_version.signature:
        #     nudge = Version(signature=actual)
        #     nudge.save()
        #     latest_version = nudge

        diff = Diff(old_proj_sig, proj_sig)

        if not diff.is_empty():
            print style.NOTICE(
                'Project signature has changed - an evolution is required')

            if verbosity > 1:
                old_proj_sig = pickle.loads(str(latest_version.signature))
                print diff

Example 38

Project: hubplus
Source File: __init__.py
View license
def evolution(app, created_models, verbosity=1, **kwargs):
    """
    A hook into syncdb's post_syncdb signal, that is used to notify the user
    if a model evolution is necessary.
    """
    proj_sig = create_project_sig()
    signature = pickle.dumps(proj_sig)

    try:
        latest_version = django_evolution.Version.objects.latest('when')
    except django_evolution.Version.DoesNotExist:
        # We need to create a baseline version.
        if verbosity > 0:
            print "Installing baseline version"

        latest_version = django_evolution.Version(signature=signature)
        latest_version.save()

        for a in get_apps():
            app_label = a.__name__.split('.')[-2]
            sequence = get_evolution_sequence(a)
            if sequence:
                if verbosity > 0:
                    print 'Evolutions in %s baseline:' % app_label,', '.join(sequence)
            for evo_label in sequence:
                evolution = django_evolution.Evolution(app_label=app_label, 
                                                       label=evo_label,
                                                       version=latest_version)
                evolution.save()

    unapplied = get_unapplied_evolutions(app)
    if unapplied:
        print style.NOTICE('There are unapplied evolutions for %s.' % app.__name__.split('.')[-2])
        
    # Evolutions are checked over the entire project, so we only need to 
    # check once. We do this check when Django Evolutions itself is synchronized.
    if app == django_evolution:        
        old_proj_sig = pickle.loads(str(latest_version.signature))
        
        # If any models have been added, a baseline must be set 
        # for those new models
        changed = False
        for app_name, new_app_sig in proj_sig.items():
            if app_name == '__version__':
                # Ignore the __version__ tag
                continue
            old_app_sig = old_proj_sig.get(app_name, None)
            if old_app_sig is None:
                # App has been added
                old_proj_sig[app_name] = proj_sig[app_name]
                changed = True
                continue
            for model_name, new_model_sig in new_app_sig.items():
                old_model_sig = old_app_sig.get(model_name, None)
                if old_model_sig is None:
                    # Model has been added
                    old_proj_sig[app_name][model_name] = proj_sig[app_name][model_name]
                    changed = True
        
        if changed:
            if verbosity > 0:
                print "Adding baseline version for new models"
            latest_version = django_evolution.Version(signature=pickle.dumps(old_proj_sig))
            latest_version.save()

        # TODO: Model introspection step goes here. 
        # # If the current database state doesn't match the last 
        # # saved signature (as reported by latest_version),
        # # then we need to update the Evolution table.
        # actual_sig = introspect_project_sig()
        # acutal = pickle.dumps(actual_sig)
        # if actual != latest_version.signature:
        #     nudge = Version(signature=actual)
        #     nudge.save()
        #     latest_version = nudge
        
        diff = Diff(old_proj_sig, proj_sig)
        if not diff.is_empty():
            print style.NOTICE('Project signature has changed - an evolution is required')
            if verbosity > 1:
                old_proj_sig = pickle.loads(str(latest_version.signature))
                print diff

Example 39

Project: PythonScript
Source File: test_dbtables.py
View license
    def test02(self):
        tabname = "test02"
        col0 = 'coolness factor'
        col1 = 'but can it fly?'
        col2 = 'Species'

        import sys
        if sys.version_info[0] < 3 :
            testinfo = [
                {col0: pickle.dumps(8, 1), col1: 'no', col2: 'Penguin'},
                {col0: pickle.dumps(-1, 1), col1: 'no', col2: 'Turkey'},
                {col0: pickle.dumps(9, 1), col1: 'yes', col2: 'SR-71A Blackbird'}
            ]
        else :
            testinfo = [
                {col0: pickle.dumps(8, 1).decode("iso8859-1"),
                    col1: 'no', col2: 'Penguin'},
                {col0: pickle.dumps(-1, 1).decode("iso8859-1"),
                    col1: 'no', col2: 'Turkey'},
                {col0: pickle.dumps(9, 1).decode("iso8859-1"),
                    col1: 'yes', col2: 'SR-71A Blackbird'}
            ]

        try:
            self.tdb.Drop(tabname)
        except dbtables.TableDBError:
            pass
        self.tdb.CreateTable(tabname, [col0, col1, col2])
        for row in testinfo :
            self.tdb.Insert(tabname, row)

        import sys
        if sys.version_info[0] < 3 :
            values = self.tdb.Select(tabname, [col2],
                conditions={col0: lambda x: pickle.loads(x) >= 8})
        else :
            values = self.tdb.Select(tabname, [col2],
                conditions={col0: lambda x:
                    pickle.loads(bytes(x, "iso8859-1")) >= 8})

        self.assertEqual(len(values), 2)
        if values[0]['Species'] == 'Penguin' :
            self.assertEqual(values[1]['Species'], 'SR-71A Blackbird')
        elif values[0]['Species'] == 'SR-71A Blackbird' :
            self.assertEqual(values[1]['Species'], 'Penguin')
        else :
            if verbose:
                print "values= %r" % (values,)
            raise RuntimeError("Wrong values returned!")

Example 40

Project: esky
Source File: __init__.py
View license
    def run(self,pipe):
        self.target.sudo_proxy = None
        pipe.write(b("READY"))
        try:
            #  Process incoming commands in a loop.
            while True:
                try:
                    methname = pipe.read().decode("ascii")
                    if methname == "CLOSE":
                        pipe.write(b("CLOSING"))
                        break
                    else:
                        argtypes = _get_sudo_argtypes(self.target,methname)
                        iterator = _get_sudo_iterator(self.target,methname)
                        if argtypes is None:
                            msg = "attribute '%s' not allowed from sudo"
                            raise AttributeError(msg % (attr,))
                        method = getattr(self.target,methname)
                        args = []
                        for t in argtypes:
                            if t is str:
                                args.append(pipe.read().decode("ascii"))
                            else:
                                args.append(t(pipe.read()))
                        try:
                            res = method(*args)
                        except Exception, e:
                            pipe.write(pickle.dumps((False,e)))
                        else:
                            if not iterator:
                                pipe.write(pickle.dumps((True,res)))
                            else:
                                try:
                                    for item in res:
                                        pipe.write(pickle.dumps((True,item)))
                                except Exception, e:
                                    pipe.write(pickle.dumps((False,e)))
                                else:
                                    SI = StopIteration
                                    pipe.write(pickle.dumps((False,SI)))
                except EOFError:
                    break
            #  Stay alive until the pipe is closed, but don't execute
            #  any further commands.
            while True:
                try:
                    pipe.read()
                except EOFError:
                    break
        finally:
            pipe.close()

Example 41

Project: mythboxee
Source File: mythboxee.py
View license
	def _GetDbRecordings(self):
		self.log("def(_GetDbRecordings): Start =========================================================")

		# Create a connection to TheTVDB.com API
		t = tvdb_api.Tvdb(apikey=self.tvdb_apikey)

		# Setup some of the variables we need.
		titles = []
		banners = {}
		series = {}
		shows = {}

		# Grab the recordings from the backend
		self.recs = self.be.getRecordings()

		# Generate the the Fingerprint
		finger_titles = []
		for rec in self.recs:
			rectitle = rec.title.encode('utf-8')
			if rectitle not in finger_titles:
				finger_titles.append(rectitle)
		finger_titles.sort()

		fingerprint = str(md5.new(str(finger_titles)).hexdigest())
		
		self.log("def(_GetDbRecordings): " + fingerprint)
		
		if self.config.GetValue("cache.fingerprint") == fingerprint:
			self.log("def(_GetDbRecordings): Fingerprint Matches, Retrieving Recordings from the Cache")
			self._GetCacheRecordings()
		else:
			self.log("def(_GetDbRecordings): New Fingerprint, Retrieving Recordings from the Database")
			self.config.SetValue("cache.fingerprint", fingerprint)

			x = 0
			for recording in self.recs:
				# Check for title, and if not encode it utf-8
				if recording.title == None:
					title = ""
				else:
					title = recording.title.encode('utf-8')

				# Check for subtitle, and if not encode it utf-8
				if recording.subtitle == None:
					subtitle = ""
				else:
					subtitle = recording.subtitle.encode('utf-8')

				# Check for description, and if not encode it utf-8
				if recording.description == None:
					description = ""
				else:
					description = recording.description.encode('utf-8')

				if title not in titles:
					titles.append(title)
					shows[title] = []

				# Check to see if we have a valid banner for the show, if not try and get one.
				if title not in self.banners:
					self.banners[title] = self.GetRecordingArtwork(title)
				else:
					if self.banners[title] == "mb_artwork_error.png":
						self.banners[title] = self.GetRecordingArtwork(title)

				# Check to see if we have a valid series id for the show, if not try and get one.
				if title not in self.series:
					self.series[title] = self.GetRecordingSeriesID(title)
				else:
					if self.series[title] == 00000:
						self.series[title] = self.GetRecordingSeriesID(title)

				single = [title, subtitle, description, str(recording.chanid), str(recording.airdate), str(recording.starttime), str(recording.endtime), str(recording.filename.rsplit('/',1)[-1]), recording.getRecorded().watched, x]
				shows[title].append(single)
				x = x + 1

			## Set our global variables
			self.titles = titles
			self.shows = shows
			
			# Sort the titles so we are in alphabetical order
			self.titles.sort()

			# Lets cache our findings for now and the time we cached them.
			self.config.SetValue("cache.time", str(time.time()))
			self.config.SetValue("cache.titles", pickle.dumps(titles))
			self.config.SetValue("cache.titlecount", str(len(titles)))
			self.config.SetValue("cache.banners", pickle.dumps(self.banners))
			self.config.SetValue("cache.series", pickle.dumps(self.series))
			self.config.SetValue("cache.shows", pickle.dumps(shows))
			self.config.SetValue("cache.changed", "true")

		self.log("def(GetRecordings): End ===========================================================")

Example 42

Project: protocyt
Source File: main.py
View license
    def test_time(size, stop):
        count = sum(size**i for i in range(stop+1))
        yield 'count', count

        def wrap_testing(name, tester):
            print(name, end='')
            start = time()
            counter = 0
            while time() - start < 1 or counter < 10:
                counter+=1
                tester()
            end = time()
            print()
            result = 1000 * (end - start) / counter
            print('{0:.3f} milliseconds per {1}'.format(result, name))

            yield name, result

        print('Initialization [{0}-{1}] {2}'.format(size, stop, count))
        tree = createNode(size, stop)

        def test_serialize():
            tree.serialize(bytearray())

        def test_deserialize(ba):
            Node.deserialize(ba)

        def test_dumps():
            pickle.dumps(tree, 2)

        def test_loads(data):
            pickle.loads(data)


        ba = bytearray()
        tree.serialize(ba)
        data = pickle.dumps(tree, 2)

        tests = (
            ('serialize', test_serialize),
            ('deserialize', partial(test_deserialize, ba)),
            ('pickle.dumps', test_dumps),
            ('pickle.loads', partial(test_loads, data)),
            )

        for name, tester in tests:
            for _ in wrap_testing(name, tester):
                yield _

Example 43

Project: datafari
Source File: test_dbtables.py
View license
    def test02(self):
        tabname = "test02"
        col0 = 'coolness factor'
        col1 = 'but can it fly?'
        col2 = 'Species'

        import sys
        if sys.version_info[0] < 3 :
            testinfo = [
                {col0: pickle.dumps(8, 1), col1: 'no', col2: 'Penguin'},
                {col0: pickle.dumps(-1, 1), col1: 'no', col2: 'Turkey'},
                {col0: pickle.dumps(9, 1), col1: 'yes', col2: 'SR-71A Blackbird'}
            ]
        else :
            testinfo = [
                {col0: pickle.dumps(8, 1).decode("iso8859-1"),
                    col1: 'no', col2: 'Penguin'},
                {col0: pickle.dumps(-1, 1).decode("iso8859-1"),
                    col1: 'no', col2: 'Turkey'},
                {col0: pickle.dumps(9, 1).decode("iso8859-1"),
                    col1: 'yes', col2: 'SR-71A Blackbird'}
            ]

        try:
            self.tdb.Drop(tabname)
        except dbtables.TableDBError:
            pass
        self.tdb.CreateTable(tabname, [col0, col1, col2])
        for row in testinfo :
            self.tdb.Insert(tabname, row)

        import sys
        if sys.version_info[0] < 3 :
            values = self.tdb.Select(tabname, [col2],
                conditions={col0: lambda x: pickle.loads(x) >= 8})
        else :
            values = self.tdb.Select(tabname, [col2],
                conditions={col0: lambda x:
                    pickle.loads(bytes(x, "iso8859-1")) >= 8})

        self.assertEqual(len(values), 2)
        if values[0]['Species'] == 'Penguin' :
            self.assertEqual(values[1]['Species'], 'SR-71A Blackbird')
        elif values[0]['Species'] == 'SR-71A Blackbird' :
            self.assertEqual(values[1]['Species'], 'Penguin')
        else :
            if verbose:
                print "values= %r" % (values,)
            raise RuntimeError("Wrong values returned!")

Example 44

Project: gsutil
Source File: test_metrics.py
View license
  def testRetryableErrorMediaCollection(self):
    """Tests that retryable errors are collected on JSON media operations."""
    # Retryable errors will only be collected with the JSON API.
    if self.test_api != ApiSelector.JSON:
      return unittest.skip('Retryable errors are only collected in JSON')

    boto_config_for_test = [('GSUtil', 'resumable_threshold', str(ONE_KIB))]
    bucket_uri = self.CreateBucket()
    # For the resumable upload exception, we need to ensure at least one
    # callback occurs.
    halt_size = START_CALLBACK_PER_BYTES * 2
    fpath = self.CreateTempFile(contents='a' * halt_size)

    # Test that the retry function for data transfers catches and logs an error.
    test_callback_file = self.CreateTempFile(contents=pickle.dumps(
        _ResumableUploadRetryHandler(5, apitools_exceptions.BadStatusCodeError,
                                     ('unused', 'unused', 'unused'))))
    with SetBotoConfigForTest(boto_config_for_test):
      metrics_list = self._RunGsUtilWithAnalyticsOutput(
          ['cp', '--testcallbackfile', test_callback_file,
           fpath, suri(bucket_uri)])
      self._CheckParameterValue('Event Category',
                                metrics._GA_ERRORRETRY_CATEGORY, metrics_list)
      self._CheckParameterValue('Event Action', 'BadStatusCodeError',
                                metrics_list)
      self._CheckParameterValue('Retryable Errors', '1', metrics_list)
      self._CheckParameterValue('Num Retryable Service Errors', '1',
                                metrics_list)

    # Test that the ResumableUploadStartOverException in copy_helper is caught.
    test_callback_file = self.CreateTempFile(
        contents=pickle.dumps(_JSONForceHTTPErrorCopyCallbackHandler(5, 404)))
    with SetBotoConfigForTest(boto_config_for_test):
      metrics_list = self._RunGsUtilWithAnalyticsOutput(
          ['cp', '--testcallbackfile', test_callback_file,
           fpath, suri(bucket_uri)])
      self._CheckParameterValue(
          'Event Category', metrics._GA_ERRORRETRY_CATEGORY, metrics_list)
      self._CheckParameterValue(
          'Event Action', 'ResumableUploadStartOverException', metrics_list)
      self._CheckParameterValue('Retryable Errors', '1', metrics_list)
      self._CheckParameterValue(
          'Num Retryable Service Errors', '1', metrics_list)

    # Test retryable error collection in a multithread/multiprocess situation.
    test_callback_file = self.CreateTempFile(
        contents=pickle.dumps(_JSONForceHTTPErrorCopyCallbackHandler(5, 404)))
    with SetBotoConfigForTest(boto_config_for_test):
      metrics_list = self._RunGsUtilWithAnalyticsOutput(
          ['-m', 'cp', '--testcallbackfile',
           test_callback_file, fpath, suri(bucket_uri)])
      self._CheckParameterValue('Event Category',
                                metrics._GA_ERRORRETRY_CATEGORY, metrics_list)
      self._CheckParameterValue(
          'Event Action', 'ResumableUploadStartOverException', metrics_list)
      self._CheckParameterValue('Retryable Errors', '1', metrics_list)
      self._CheckParameterValue(
          'Num Retryable Service Errors', '1', metrics_list)

Example 45

Project: ilastik-0.5
Source File: dataImpex.py
View license
    @staticmethod
    def exportOverlay(filename, format, overlayItem, timeOffset = 0, sliceOffset = 0, channelOffset = 0):
        if format == "h5":
            filename = filename + "." + format
            f = h5py.File(filename, 'w')
            path = overlayItem.key
            #pathparts = path.split("/")
            #pathparts.pop()
            #prevgr = f.create_group(pathparts.pop(0))
            #for item in pathparts:
            prevgr = f.create_group("volume")
            #try:
            data = numpy.ndarray(overlayItem._data.shape, overlayItem._data.dtype)
            data[0,:,:,:,:] = overlayItem._data[0,:,:,:,:]
            dataset = prevgr.create_dataset("data", compression = "gzip", data=data)
            dataset.attrs["overlayKey"] = str(overlayItem.key)
            dataset.attrs["overlayColor"] = pickle.dumps(overlayItem.color)
            dataset.attrs["overlayColortable"] = pickle.dumps(overlayItem.colorTable)
            dataset.attrs["overlayMin"] = pickle.dumps(overlayItem.min)
            dataset.attrs["overlayMax"] = pickle.dumps(overlayItem.max)
            dataset.attrs["overlayAutoadd"] = pickle.dumps(overlayItem.autoAdd)
            dataset.attrs["overlayAutovisible"] = pickle.dumps(overlayItem.autoVisible)
            dataset.attrs["overlayAlpha"] = pickle.dumps(overlayItem.alpha)
            #overlayItemReference.name, data=overlayItemReference.overlayItem._data[0,:,:,:,:])
            #except Exception, e:
            #    print e
            f.close()
            return
        
        if overlayItem._data.shape[1]>1:
            #3d _data
            for t in range(overlayItem._data.shape[0]):
                for z in range(overlayItem._data.shape[3]):
                    for c in range(overlayItem._data.shape[-1]):
                        fn = filename
                        data = overlayItem._data[t,:,:,z,c]
                        if overlayItem._data.shape[0]>1:
                            fn = fn + ("_time%03i" %(t+timeOffset))
                        fn = fn + ("_z%05i" %(z+sliceOffset))
                        if overlayItem._data.shape[-1]>1:
                            fn = fn + ("_channel%03i" %(c+channelOffset))
                        fn = fn + "." + format
                        
                        dtype_ = None
                        if data.dtype == numpy.float32:
                            mi = data.min()
                            ma = data.max()
                            if mi >= 0 and 1 < ma <= 255:
                                data = data.astype(numpy.uint8)
                                dtype_ = 'NATIVE'
                            else:
                                dtype_ = numpy.uint8
                        
                        vigra.impex.writeImage(data.swapaxes(1,0), fn, dtype=dtype_)
                        print "Exported file ", fn
        else:
            for t in range(overlayItem._data.shape[0]):
                for c in range(overlayItem._data.shape[-1]):
                    fn = filename
                    data = overlayItem._data[t, 0, :, :, c]
                    if overlayItem._data.shape[0]>1:
                        fn = fn + ("_time%03i" %(t+timeOffset))
                    if overlayItem._data.shape[-1]>1:
                        fn = fn + ("_channel%03i" %(c+channelOffset))
                    fn = fn + "." + format
                    
                    # dtype option for tif images when dtype is not uint8
                    # specifing dtype in the write function leads to scaling!
                    # be careful nbyte also scales, which is typically fine
                    if data.dtype == numpy.float32:
                        mi = data.min()
                        ma = data.max()
                        if mi >= 0 and 1 < ma <= 255:
                            data = data.astype(numpy.uint8)
                            dtype_ = 'NATIVE'
                        else:
                            dtype_ = numpy.uint8
                    
                    vigra.impex.writeImage(data, fn, dtype=dtype_)
                    print "Exported file ", fn

Example 46

Project: popcorn_maker
Source File: multiprocess.py
View license
    def run(self, test):
        """
        Execute the test (which may be a test suite). If the test is a suite,
        distribute it out among as many processes as have been configured, at
        as fine a level as is possible given the context fixtures defined in
        the suite or any sub-suites.

        """
        log.debug("%s.run(%s) (%s)", self, test, os.getpid())
        wrapper = self.config.plugins.prepareTest(test)
        if wrapper is not None:
            test = wrapper

        # plugins can decorate or capture the output stream
        wrapped = self.config.plugins.setOutputStream(self.stream)
        if wrapped is not None:
            self.stream = wrapped

        testQueue = Queue()
        resultQueue = Queue()
        tasks = []
        completed = []
        workers = []
        to_teardown = []
        shouldStop = Event()

        result = self._makeResult()
        start = time.time()

        # dispatch and collect results
        # put indexes only on queue because tests aren't picklable
        for case in self.nextBatch(test):
            log.debug("Next batch %s (%s)", case, type(case))
            if (isinstance(case, nose.case.Test) and
                isinstance(case.test, failure.Failure)):
                log.debug("Case is a Failure")
                case(result) # run here to capture the failure
                continue
            # handle shared fixtures
            if isinstance(case, ContextSuite) and case.context is failure.Failure:
                log.debug("Case is a Failure")
                case(result) # run here to capture the failure
                continue
            elif isinstance(case, ContextSuite) and self.sharedFixtures(case):
                log.debug("%s has shared fixtures", case)
                try:
                    case.setUp()
                except (KeyboardInterrupt, SystemExit):
                    raise
                except:
                    log.debug("%s setup failed", sys.exc_info())
                    result.addError(case, sys.exc_info())
                else:
                    to_teardown.append(case)
                    for _t in case:
                        test_addr = self.addtask(testQueue,tasks,_t)
                        log.debug("Queued shared-fixture test %s (%s) to %s",
                                  len(tasks), test_addr, testQueue)

            else:
                test_addr = self.addtask(testQueue,tasks,case)
                log.debug("Queued test %s (%s) to %s",
                          len(tasks), test_addr, testQueue)

        log.debug("Starting %s workers", self.config.multiprocess_workers)
        for i in range(self.config.multiprocess_workers):
            currentaddr = Value('c',bytes_(''))
            currentstart = Value('d',0.0)
            keyboardCaught = Event()
            p = Process(target=runner, args=(i, testQueue, resultQueue,
                                             currentaddr, currentstart,
                                             keyboardCaught, shouldStop,
                                             self.loaderClass,
                                             result.__class__,
                                             pickle.dumps(self.config)))
            p.currentaddr = currentaddr
            p.currentstart = currentstart
            p.keyboardCaught = keyboardCaught
            # p.setDaemon(True)
            p.start()
            workers.append(p)
            log.debug("Started worker process %s", i+1)

        total_tasks = len(tasks)
        # need to keep track of the next time to check for timeouts in case
        # more than one process times out at the same time.
        nexttimeout=self.config.multiprocess_timeout
        while tasks:
            log.debug("Waiting for results (%s/%s tasks), next timeout=%.3fs",
                      len(completed), total_tasks,nexttimeout)
            try:
                iworker, addr, newtask_addrs, batch_result = resultQueue.get(
                                                        timeout=nexttimeout)
                log.debug('Results received for worker %d, %s, new tasks: %d',
                          iworker,addr,len(newtask_addrs))
                try:
                    try:
                        tasks.remove(addr)
                    except ValueError:
                        log.warn('worker %s failed to remove from tasks: %s',
                                 iworker,addr)
                    total_tasks += len(newtask_addrs)
                    for newaddr in newtask_addrs:
                        tasks.append(newaddr)
                except KeyError:
                    log.debug("Got result for unknown task? %s", addr)
                    log.debug("current: %s",str(list(tasks)[0]))
                else:
                    completed.append([addr,batch_result])
                self.consolidate(result, batch_result)
                if (self.config.stopOnError
                    and not result.wasSuccessful()):
                    # set the stop condition
                    shouldStop.set()
                    break
                if self.config.multiprocess_restartworker:
                    log.debug('joining worker %s',iworker)
                    # wait for working, but not that important if worker
                    # cannot be joined in fact, for workers that add to
                    # testQueue, they will not terminate until all their
                    # items are read
                    workers[iworker].join(timeout=1)
                    if not shouldStop.is_set() and not testQueue.empty():
                        log.debug('starting new process on worker %s',iworker)
                        currentaddr = Value('c',bytes_(''))
                        currentstart = Value('d',time.time())
                        keyboardCaught = Event()
                        workers[iworker] = Process(target=runner,
                                                   args=(iworker, testQueue,
                                                         resultQueue,
                                                         currentaddr,
                                                         currentstart,
                                                         keyboardCaught,
                                                         shouldStop,
                                                         self.loaderClass,
                                                         result.__class__,
                                                         pickle.dumps(self.config)))
                        workers[iworker].currentaddr = currentaddr
                        workers[iworker].currentstart = currentstart
                        workers[iworker].keyboardCaught = keyboardCaught
                        workers[iworker].start()
            except Empty:
                log.debug("Timed out with %s tasks pending "
                          "(empty testQueue=%d): %s",
                          len(tasks),testQueue.empty(),str(tasks))
                any_alive = False
                for iworker, w in enumerate(workers):
                    if w.is_alive():
                        worker_addr = bytes_(w.currentaddr.value,'ascii')
                        timeprocessing = time.time() - w.currentstart.value
                        if ( len(worker_addr) == 0
                                and timeprocessing > self.config.multiprocess_timeout-0.1):
                            log.debug('worker %d has finished its work item, '
                                      'but is not exiting? do we wait for it?',
                                      iworker)
                        else:
                            any_alive = True
                        if (len(worker_addr) > 0
                            and timeprocessing > self.config.multiprocess_timeout-0.1):
                            log.debug('timed out worker %s: %s',
                                      iworker,worker_addr)
                            w.currentaddr.value = bytes_('')
                            # If the process is in C++ code, sending a SIGINT
                            # might not send a python KeybordInterrupt exception
                            # therefore, send multiple signals until an
                            # exception is caught. If this takes too long, then
                            # terminate the process
                            w.keyboardCaught.clear()
                            startkilltime = time.time()
                            while not w.keyboardCaught.is_set() and w.is_alive():
                                if time.time()-startkilltime > self.waitkilltime:
                                    # have to terminate...
                                    log.error("terminating worker %s",iworker)
                                    w.terminate()
                                    currentaddr = Value('c',bytes_(''))
                                    currentstart = Value('d',time.time())
                                    keyboardCaught = Event()
                                    workers[iworker] = Process(target=runner,
                                        args=(iworker, testQueue, resultQueue,
                                              currentaddr, currentstart,
                                              keyboardCaught, shouldStop,
                                              self.loaderClass,
                                              result.__class__,
                                              pickle.dumps(self.config)))
                                    workers[iworker].currentaddr = currentaddr
                                    workers[iworker].currentstart = currentstart
                                    workers[iworker].keyboardCaught = keyboardCaught
                                    workers[iworker].start()
                                    # there is a small probability that the
                                    # terminated process might send a result,
                                    # which has to be specially handled or
                                    # else processes might get orphaned.
                                    w = workers[iworker]
                                    break
                                os.kill(w.pid, signal.SIGINT)
                                time.sleep(0.1)
                if not any_alive and testQueue.empty():
                    log.debug("All workers dead")
                    break
            nexttimeout=self.config.multiprocess_timeout
            for w in workers:
                if w.is_alive() and len(w.currentaddr.value) > 0:
                    timeprocessing = time.time()-w.currentstart.value
                    if timeprocessing <= self.config.multiprocess_timeout:
                        nexttimeout = min(nexttimeout,
                            self.config.multiprocess_timeout-timeprocessing)

        log.debug("Completed %s tasks (%s remain)", len(completed), len(tasks))

        for case in to_teardown:
            log.debug("Tearing down shared fixtures for %s", case)
            try:
                case.tearDown()
            except (KeyboardInterrupt, SystemExit):
                raise
            except:
                result.addError(case, sys.exc_info())

        stop = time.time()

        # first write since can freeze on shutting down processes
        result.printErrors()
        result.printSummary(start, stop)
        self.config.plugins.finalize(result)

        log.debug("Tell all workers to stop")
        for w in workers:
            if w.is_alive():
                testQueue.put('STOP', block=False)

        # wait for the workers to end
        try:
            for iworker,worker in enumerate(workers):
                if worker.is_alive():
                    log.debug('joining worker %s',iworker)
                    worker.join()#10)
                    if worker.is_alive():
                        log.debug('failed to join worker %s',iworker)
        except KeyboardInterrupt:
            log.info('parent received ctrl-c')
            for worker in workers:
                worker.terminate()
                worker.join()

        return result

Example 47

Project: popcorn_maker
Source File: multiprocess.py
View license
    def run(self, test):
        """
        Execute the test (which may be a test suite). If the test is a suite,
        distribute it out among as many processes as have been configured, at
        as fine a level as is possible given the context fixtures defined in
        the suite or any sub-suites.

        """
        log.debug("%s.run(%s) (%s)", self, test, os.getpid())
        wrapper = self.config.plugins.prepareTest(test)
        if wrapper is not None:
            test = wrapper

        # plugins can decorate or capture the output stream
        wrapped = self.config.plugins.setOutputStream(self.stream)
        if wrapped is not None:
            self.stream = wrapped

        testQueue = Queue()
        resultQueue = Queue()
        tasks = []
        completed = []
        workers = []
        to_teardown = []
        shouldStop = Event()

        result = self._makeResult()
        start = time.time()

        # dispatch and collect results
        # put indexes only on queue because tests aren't picklable
        for case in self.nextBatch(test):
            log.debug("Next batch %s (%s)", case, type(case))
            if (isinstance(case, nose.case.Test) and
                isinstance(case.test, failure.Failure)):
                log.debug("Case is a Failure")
                case(result) # run here to capture the failure
                continue
            # handle shared fixtures
            if isinstance(case, ContextSuite) and case.context is failure.Failure:
                log.debug("Case is a Failure")
                case(result) # run here to capture the failure
                continue
            elif isinstance(case, ContextSuite) and self.sharedFixtures(case):
                log.debug("%s has shared fixtures", case)
                try:
                    case.setUp()
                except (KeyboardInterrupt, SystemExit):
                    raise
                except:
                    log.debug("%s setup failed", sys.exc_info())
                    result.addError(case, sys.exc_info())
                else:
                    to_teardown.append(case)
                    for _t in case:
                        test_addr = self.addtask(testQueue,tasks,_t)
                        log.debug("Queued shared-fixture test %s (%s) to %s",
                                  len(tasks), test_addr, testQueue)

            else:
                test_addr = self.addtask(testQueue,tasks,case)
                log.debug("Queued test %s (%s) to %s",
                          len(tasks), test_addr, testQueue)

        log.debug("Starting %s workers", self.config.multiprocess_workers)
        for i in range(self.config.multiprocess_workers):
            currentaddr = Value('c',bytes_(''))
            currentstart = Value('d',0.0)
            keyboardCaught = Event()
            p = Process(target=runner, args=(i, testQueue, resultQueue,
                                             currentaddr, currentstart,
                                             keyboardCaught, shouldStop,
                                             self.loaderClass,
                                             result.__class__,
                                             pickle.dumps(self.config)))
            p.currentaddr = currentaddr
            p.currentstart = currentstart
            p.keyboardCaught = keyboardCaught
            # p.setDaemon(True)
            p.start()
            workers.append(p)
            log.debug("Started worker process %s", i+1)

        total_tasks = len(tasks)
        # need to keep track of the next time to check for timeouts in case
        # more than one process times out at the same time.
        nexttimeout=self.config.multiprocess_timeout
        while tasks:
            log.debug("Waiting for results (%s/%s tasks), next timeout=%.3fs",
                      len(completed), total_tasks,nexttimeout)
            try:
                iworker, addr, newtask_addrs, batch_result = resultQueue.get(
                                                        timeout=nexttimeout)
                log.debug('Results received for worker %d, %s, new tasks: %d',
                          iworker,addr,len(newtask_addrs))
                try:
                    try:
                        tasks.remove(addr)
                    except ValueError:
                        log.warn('worker %s failed to remove from tasks: %s',
                                 iworker,addr)
                    total_tasks += len(newtask_addrs)
                    for newaddr in newtask_addrs:
                        tasks.append(newaddr)
                except KeyError:
                    log.debug("Got result for unknown task? %s", addr)
                    log.debug("current: %s",str(list(tasks)[0]))
                else:
                    completed.append([addr,batch_result])
                self.consolidate(result, batch_result)
                if (self.config.stopOnError
                    and not result.wasSuccessful()):
                    # set the stop condition
                    shouldStop.set()
                    break
                if self.config.multiprocess_restartworker:
                    log.debug('joining worker %s',iworker)
                    # wait for working, but not that important if worker
                    # cannot be joined in fact, for workers that add to
                    # testQueue, they will not terminate until all their
                    # items are read
                    workers[iworker].join(timeout=1)
                    if not shouldStop.is_set() and not testQueue.empty():
                        log.debug('starting new process on worker %s',iworker)
                        currentaddr = Value('c',bytes_(''))
                        currentstart = Value('d',time.time())
                        keyboardCaught = Event()
                        workers[iworker] = Process(target=runner,
                                                   args=(iworker, testQueue,
                                                         resultQueue,
                                                         currentaddr,
                                                         currentstart,
                                                         keyboardCaught,
                                                         shouldStop,
                                                         self.loaderClass,
                                                         result.__class__,
                                                         pickle.dumps(self.config)))
                        workers[iworker].currentaddr = currentaddr
                        workers[iworker].currentstart = currentstart
                        workers[iworker].keyboardCaught = keyboardCaught
                        workers[iworker].start()
            except Empty:
                log.debug("Timed out with %s tasks pending "
                          "(empty testQueue=%d): %s",
                          len(tasks),testQueue.empty(),str(tasks))
                any_alive = False
                for iworker, w in enumerate(workers):
                    if w.is_alive():
                        worker_addr = bytes_(w.currentaddr.value,'ascii')
                        timeprocessing = time.time() - w.currentstart.value
                        if ( len(worker_addr) == 0
                                and timeprocessing > self.config.multiprocess_timeout-0.1):
                            log.debug('worker %d has finished its work item, '
                                      'but is not exiting? do we wait for it?',
                                      iworker)
                        else:
                            any_alive = True
                        if (len(worker_addr) > 0
                            and timeprocessing > self.config.multiprocess_timeout-0.1):
                            log.debug('timed out worker %s: %s',
                                      iworker,worker_addr)
                            w.currentaddr.value = bytes_('')
                            # If the process is in C++ code, sending a SIGINT
                            # might not send a python KeybordInterrupt exception
                            # therefore, send multiple signals until an
                            # exception is caught. If this takes too long, then
                            # terminate the process
                            w.keyboardCaught.clear()
                            startkilltime = time.time()
                            while not w.keyboardCaught.is_set() and w.is_alive():
                                if time.time()-startkilltime > self.waitkilltime:
                                    # have to terminate...
                                    log.error("terminating worker %s",iworker)
                                    w.terminate()
                                    currentaddr = Value('c',bytes_(''))
                                    currentstart = Value('d',time.time())
                                    keyboardCaught = Event()
                                    workers[iworker] = Process(target=runner,
                                        args=(iworker, testQueue, resultQueue,
                                              currentaddr, currentstart,
                                              keyboardCaught, shouldStop,
                                              self.loaderClass,
                                              result.__class__,
                                              pickle.dumps(self.config)))
                                    workers[iworker].currentaddr = currentaddr
                                    workers[iworker].currentstart = currentstart
                                    workers[iworker].keyboardCaught = keyboardCaught
                                    workers[iworker].start()
                                    # there is a small probability that the
                                    # terminated process might send a result,
                                    # which has to be specially handled or
                                    # else processes might get orphaned.
                                    w = workers[iworker]
                                    break
                                os.kill(w.pid, signal.SIGINT)
                                time.sleep(0.1)
                if not any_alive and testQueue.empty():
                    log.debug("All workers dead")
                    break
            nexttimeout=self.config.multiprocess_timeout
            for w in workers:
                if w.is_alive() and len(w.currentaddr.value) > 0:
                    timeprocessing = time.time()-w.currentstart.value
                    if timeprocessing <= self.config.multiprocess_timeout:
                        nexttimeout = min(nexttimeout,
                            self.config.multiprocess_timeout-timeprocessing)

        log.debug("Completed %s tasks (%s remain)", len(completed), len(tasks))

        for case in to_teardown:
            log.debug("Tearing down shared fixtures for %s", case)
            try:
                case.tearDown()
            except (KeyboardInterrupt, SystemExit):
                raise
            except:
                result.addError(case, sys.exc_info())

        stop = time.time()

        # first write since can freeze on shutting down processes
        result.printErrors()
        result.printSummary(start, stop)
        self.config.plugins.finalize(result)

        log.debug("Tell all workers to stop")
        for w in workers:
            if w.is_alive():
                testQueue.put('STOP', block=False)

        # wait for the workers to end
        try:
            for iworker,worker in enumerate(workers):
                if worker.is_alive():
                    log.debug('joining worker %s',iworker)
                    worker.join()#10)
                    if worker.is_alive():
                        log.debug('failed to join worker %s',iworker)
        except KeyboardInterrupt:
            log.info('parent received ctrl-c')
            for worker in workers:
                worker.terminate()
                worker.join()

        return result

Example 48

Project: rjsmin
Source File: shell.py
View license
def _filepipespawn(infile, outfile, argv, env):
    """ File Pipe spawn """
    try:
        import subprocess
    except ImportError:
        subprocess = None
    import pickle as _pickle
    fd, name = mkstemp('.py')
    try:
        _os.write(fd, ("""
import os
import pickle
import sys

infile = pickle.loads(%(infile)s)
outfile = pickle.loads(%(outfile)s)
argv = pickle.loads(%(argv)s)
env = pickle.loads(%(env)s)

if infile is not None:
    infile = open(infile, 'rb')
    os.dup2(infile.fileno(), 0)
    infile.close()
if outfile is not None:
    outfile = open(outfile, 'wb')
    os.dup2(outfile.fileno(), 1)
    outfile.close()

pid = os.spawnve(os.P_NOWAIT, argv[0], argv, env)
result = os.waitpid(pid, 0)[1]
sys.exit(result & 7)
        """.strip() + "\n") % {
            'infile': repr(_pickle.dumps(_os.path.abspath(infile))),
            'outfile': repr(_pickle.dumps(_os.path.abspath(outfile))),
            'argv': repr(_pickle.dumps(argv)),
            'env': repr(_pickle.dumps(env)),
        })
        fd, _ = None, _os.close(fd)
        if _sys.platform == 'win32':
            argv = []
            for arg in [_sys.executable, name]:
                if ' ' in arg or arg.startswith('"'):
                    arg = '"%s"' % arg.replace('"', '\\"')
                argv.append(arg)
            argv = ' '.join(argv)
            close_fds = False
            shell = True
        else:
            argv = [_sys.executable, name]
            close_fds = True
            shell = False

        if subprocess is None:
            pid = _os.spawnve(_os.P_NOWAIT, argv[0], argv, env)
            return _os.waitpid(pid, 0)[1]
        else:
            p = subprocess.Popen(
                argv, env=env, shell=shell, close_fds=close_fds
            )
            return p.wait()
    finally:
        try:
            if fd is not None:
                _os.close(fd)
        finally:
            _os.unlink(name)

Example 49

Project: rjsmin
Source File: shell.py
View license
def _filepipespawn(infile, outfile, argv, env):
    """ File Pipe spawn """
    try:
        import subprocess
    except ImportError:
        subprocess = None
    import pickle as _pickle
    fd, name = mkstemp('.py')
    try:
        _os.write(fd, ("""
import os
import pickle
import sys

infile = pickle.loads(%(infile)s)
outfile = pickle.loads(%(outfile)s)
argv = pickle.loads(%(argv)s)
env = pickle.loads(%(env)s)

if infile is not None:
    infile = open(infile, 'rb')
    os.dup2(infile.fileno(), 0)
    infile.close()
if outfile is not None:
    outfile = open(outfile, 'wb')
    os.dup2(outfile.fileno(), 1)
    outfile.close()

pid = os.spawnve(os.P_NOWAIT, argv[0], argv, env)
result = os.waitpid(pid, 0)[1]
sys.exit(result & 7)
        """.strip() + "\n") % {
            'infile': repr(_pickle.dumps(_os.path.abspath(infile))),
            'outfile': repr(_pickle.dumps(_os.path.abspath(outfile))),
            'argv': repr(_pickle.dumps(argv)),
            'env': repr(_pickle.dumps(env)),
        })
        fd, _ = None, _os.close(fd)
        if _sys.platform == 'win32':
            argv = []
            for arg in [_sys.executable, name]:
                if ' ' in arg or arg.startswith('"'):
                    arg = '"%s"' % arg.replace('"', '\\"')
                argv.append(arg)
            argv = ' '.join(argv)
            close_fds = False
            shell = True
        else:
            argv = [_sys.executable, name]
            close_fds = True
            shell = False

        if subprocess is None:
            pid = _os.spawnve(_os.P_NOWAIT, argv[0], argv, env)
            return _os.waitpid(pid, 0)[1]
        else:
            p = subprocess.Popen(
                argv, env=env, shell=shell, close_fds=close_fds
            )
            return p.wait()
    finally:
        try:
            if fd is not None:
                _os.close(fd)
        finally:
            _os.unlink(name)

Example 50

Project: numba
Source File: test_serialize.py
View license
    @tag('important')
    def test_reuse(self):
        """
        Check that deserializing the same function multiple times re-uses
        the same dispatcher object.

        Note that "same function" is intentionally under-specified.
        """
        func = closure(5)
        pickled = pickle.dumps(func)
        func2 = closure(6)
        pickled2 = pickle.dumps(func2)

        f = pickle.loads(pickled)
        g = pickle.loads(pickled)
        h = pickle.loads(pickled2)
        self.assertIs(f, g)
        self.assertEqual(f(2, 3), 10)
        g.disable_compile()
        self.assertEqual(g(2, 4), 11)

        self.assertIsNot(f, h)
        self.assertEqual(h(2, 3), 11)

        # Now make sure the original object doesn't exist when deserializing
        func = closure(7)
        func(42, 43)
        pickled = pickle.dumps(func)
        del func
        gc.collect()

        f = pickle.loads(pickled)
        g = pickle.loads(pickled)
        self.assertIs(f, g)
        self.assertEqual(f(2, 3), 12)
        g.disable_compile()
        self.assertEqual(g(2, 4), 13)