nose.tools.assert_equal

Here are the examples of the python api nose.tools.assert_equal taken from open source projects. By voting up you can indicate which examples are most useful and appropriate.

200 Examples 7

Example 1

Project: angrop
Source File: test_rop.py
View license
def assert_mem_access_equal(m1, m2):
    nose.tools.assert_equal(set(m1.addr_dependencies), set(m2.addr_dependencies))
    nose.tools.assert_equal(set(m1.addr_controllers), set(m2.addr_controllers))
    nose.tools.assert_equal(set(m1.data_dependencies), set(m2.data_dependencies))
    nose.tools.assert_equal(set(m1.data_controllers), set(m2.data_controllers))
    nose.tools.assert_equal(m1.addr_constant, m2.addr_constant)
    nose.tools.assert_equal(m1.data_constant, m2.data_constant)
    nose.tools.assert_equal(m1.addr_size, m2.addr_size)
    nose.tools.assert_equal(m1.data_size, m2.data_size)

Example 2

Project: angrop
Source File: test_rop.py
View license
def test_rop_x86_64():
    b = angr.Project(os.path.join(public_bin_location, "x86_64/datadep_test"))
    rop = b.analyses.ROP()
    rop.find_gadgets_single_threaded()

    # check gadgets
    test_gadgets, _ = pickle.load(open(os.path.join(test_data_location, "datadep_test_gadgets"), "rb"))
    compare_gadgets(rop.gadgets, test_gadgets)

    # test creating a rop chain
    chain = rop.set_regs(rbp=0x1212, rbx=0x1234567890123456)
    # smallest possible chain
    nose.tools.assert_equal(chain.payload_len, 24)
    # chain is correct
    result_state = execute_chain(b, chain)
    nose.tools.assert_equal(result_state.se.any_int(result_state.regs.rbp), 0x1212)
    nose.tools.assert_equal(result_state.se.any_int(result_state.regs.rbx), 0x1234567890123456)

Example 3

Project: splunk-app-splunkgit
Source File: test_memory.py
View license
def test_memory_ignore():
    " Test the ignore feature of memory "
    memory = Memory(cachedir=env['dir'], verbose=0)
    accumulator = list()

    @memory.cache(ignore=['y'])
    def z(x, y=1):
        accumulator.append(1)

    yield nose.tools.assert_equal, z.ignore, ['y']

    z(0, y=1)
    yield nose.tools.assert_equal, len(accumulator), 1
    z(0, y=1)
    yield nose.tools.assert_equal, len(accumulator), 1
    z(0, y=2)
    yield nose.tools.assert_equal, len(accumulator), 1

Example 4

Project: sed_eval
Source File: test_util.py
View license
def test_match_event_roll_lengths():
    a = numpy.zeros((100, 10))
    b = numpy.zeros((10, 10))

    a_, b_ = sed_eval.util.match_event_roll_lengths(a, b)
    nose.tools.assert_equal(a_.shape[0], b_.shape[0])
    nose.tools.assert_equal(a_.shape[1], b_.shape[1])
    nose.tools.assert_equal(b_.shape[0], a.shape[0])
    nose.tools.assert_equal(a_.shape[1], a.shape[1])
    nose.tools.assert_equal(b_.shape[1], b.shape[1])

Example 5

Project: borg
Source File: test_instance.py
View license
def test_cnf_parse_simple():
    """Test simple CNF input."""

    with open(path_to("example.simple.cnf")) as cnf_file:
        instance = borg.domains.sat.instance.parse_sat_file(cnf_file)

    nose.tools.assert_equal(instance.N, 4)
    nose.tools.assert_equal(instance.M, 2)
    nose.tools.assert_equal(
        instance.to_clauses(),
        [[4, -1], [3, 2]],
        )

Example 6

Project: freeipa
Source File: test_ipautil.py
View license
    def test_setdefault(self):
        nose.tools.assert_equal("val1", self.cidict.setdefault("KEY1", "default"))

        assert "KEY4" not in self.cidict
        nose.tools.assert_equal("default", self.cidict.setdefault("KEY4", "default"))
        assert "KEY4" in self.cidict
        nose.tools.assert_equal("default", self.cidict["key4"])

        assert "KEY5" not in self.cidict
        nose.tools.assert_equal(None, self.cidict.setdefault("KEY5"))
        assert "KEY5" in self.cidict
        nose.tools.assert_equal(None, self.cidict["key5"])

Example 7

Project: freeipa
Source File: test_ipautil.py
View license
    def test_pop(self):
        nose.tools.assert_equal("val1", self.cidict.pop("KEY1", "default"))
        assert "key1" not in self.cidict

        nose.tools.assert_equal("val2", self.cidict.pop("KEY2"))
        assert "key2" not in self.cidict

        nose.tools.assert_equal("default", self.cidict.pop("key4", "default"))
        with nose.tools.assert_raises(KeyError):
            self.cidict.pop("key4")

Example 8

View license
    def b_check_collision_data(self, b_obj):
        #We check if the collision settings have been added
        nose.tools.assert_equal(b_obj.game.use_collision_bounds, True)
        nose.tools.assert_equal(b_obj.nifcollision.use_blender_properties, True)
        nose.tools.assert_equal(b_obj.nifcollision.motion_system, "MO_SYS_FIXED")
        nose.tools.assert_equal(b_obj.nifcollision.oblivion_layer, "OL_STATIC")
        nose.tools.assert_equal(b_obj.nifcollision.col_filter, 0)
        nose.tools.assert_equal(b_obj.nifcollision.havok_material, "HAV_MAT_WOOD")

Example 9

Project: blender_nif_plugin
Source File: test_geometry.py
View license
    def n_check_transform(self, n_geom):        
        nose.tools.assert_equal(n_geom.translation.as_tuple(),(20.0, 20.0, 20.0)) # location
        
        n_rot_eul = mathutils.Matrix(n_geom.rotation.as_tuple()).to_euler()
        nose.tools.assert_equal((n_rot_eul.x - math.radians(30.0)) < self.EPSILON, True) # x rotation
        nose.tools.assert_equal((n_rot_eul.y - math.radians(60.0)) < self.EPSILON, True) # y rotation
        nose.tools.assert_equal((n_rot_eul.z - math.radians(90.0)) < self.EPSILON, True) # z rotation
        
        nose.tools.assert_equal(n_geom.scale - 0.75 < self.EPSILON, True) # scale

Example 10

Project: cle
Source File: test_overlap.py
View license
def test_overlap():
    filename = os.path.join(os.path.dirname(os.path.realpath(__file__)), '../../binaries/tests/i386/manysum')
    ld = cle.Loader(filename, auto_load_libs=False)
    nose.tools.assert_equal(ld.main_bin.rebase_addr, 0)
    nose.tools.assert_equal(ld.main_bin.get_min_addr(), 0x8048000)

    obj1 = MockBackend(0x8047000, 0x2000, custom_arch=ld.main_bin.arch)
    obj2 = MockBackend(0x8047000, 0x1000, custom_arch=ld.main_bin.arch)

    ld.add_object(obj1)
    ld.add_object(obj2)

    nose.tools.assert_equal(obj2.rebase_addr, 0x8047000)
    nose.tools.assert_greater(obj1.rebase_addr, 0x8048000)

Example 11

Project: cle
Source File: test_patched_stream.py
View license
def test_patched_stream():
    stream = StringIO.StringIO('0123456789abcdef')

    stream1 = cle.PatchedStream(stream, [(2, 'AA')])
    stream1.seek(0)
    nose.tools.assert_equal(stream1.read(), '01AA456789abcdef')

    stream2 = cle.PatchedStream(stream, [(2, 'AA')])
    stream2.seek(0)
    nose.tools.assert_equal(stream2.read(3), '01A')

    stream3 = cle.PatchedStream(stream, [(2, 'AA')])
    stream3.seek(3)
    nose.tools.assert_equal(stream3.read(3), 'A45')

    stream4 = cle.PatchedStream(stream, [(-1, 'AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA')])
    stream4.seek(0)
    nose.tools.assert_equal(stream4.read(), 'A'*0x10)

Example 12

Project: pyvex
Source File: test.py
View license
def test_irstmt_noop():
    irsb = pyvex.IRSB(bytes='\x90\x5d\xc3')
    irnop = irsb.statements[0]
    irnop2 = pyvex.IRStmt.NoOp()
    irnop3 = irnop2.deepCopy()

    nose.tools.assert_equal(irnop.tag, "Ist_NoOp")
    nose.tools.assert_equal(type(irnop), type(irnop2))
    nose.tools.assert_equal(type(irnop), type(irnop3))

Example 13

Project: pyvex
Source File: test.py
View license
def test_irstmt_imark():
    m = pyvex.IRStmt.IMark(1,2,3)
    nose.tools.assert_equal(m.tag, "Ist_IMark")
    nose.tools.assert_equal(m.addr, 1)
    nose.tools.assert_equal(m.len, 2)
    nose.tools.assert_equal(m.delta, 3)

    m.addr = 5
    nose.tools.assert_equal(m.addr, 5)
    m.len = 5
    nose.tools.assert_equal(m.len, 5)
    m.delta = 5
    nose.tools.assert_equal(m.delta, 5)

    nose.tools.assert_raises(Exception, pyvex.IRStmt.IMark, ())
    nose.tools.assert_equal(type(m), type(m.deepCopy()))

Example 14

Project: stolos
Source File: test_stolos.py
View license
@with_setup
def test_retry_failed_task(
        app1, job_id1, job_id2, log, tasks_json_tmpfile):
    """
    Retry failed tasks up to max num retries and then remove self from queue

    Tasks should maintain proper task state throughout.
    """
    # create 2 tasks in same queue
    enqueue(app1, job_id1)
    enqueue(app1, job_id2, validate_queued=False)
    nose.tools.assert_equal(2, get_qb_status(app1, job_id1)['app_qsize'])
    nose.tools.assert_equal(job_id1, cycle_queue(app1))
    # run job_id2 and have it fail
    run_code(
        log, tasks_json_tmpfile, app1,
        extra_opts='--bash_cmd "&& notacommand...fail" ')
    # ensure we still have both items in the queue
    nose.tools.assert_true(get_qb_status(app1, job_id1)['in_queue'])
    nose.tools.assert_true(get_qb_status(app1, job_id2)['in_queue'])
    # ensure the failed task is sent to back of the queue
    nose.tools.assert_equal(2, get_qb_status(app1, job_id1)['app_qsize'])
    nose.tools.assert_equal(job_id1, cycle_queue(app1))
    # run and fail n times, where n = max failures
    run_code(
        log, tasks_json_tmpfile, app1,
        extra_opts='--max_retry 1 --bash_cmd "&& notacommand...fail"')
    # verify that job_id2 is removed from queue
    validate_one_queued_task(app1, job_id1)
    # verify that job_id2 state is 'failed' and job_id1 is still pending
    validate_one_failed_task(app1, job_id2)

Example 15

Project: splunk-app-splunkgit
Source File: test_memory.py
View license
def test_memory_integration():
    """ Simple test of memory lazy evaluation.
    """
    accumulator = list()
    # Rmk: this function has the same name than a module-level function,
    # thus it serves as a test to see that both are identified
    # as different.

    def f(l):
        accumulator.append(1)
        return l

    for test in check_identity_lazy(f, accumulator):
        yield test

    # Now test clearing
    memory = Memory(cachedir=env['dir'], verbose=0)
    # First clear the cache directory, to check that our code can
    # handle that
    # NOTE: this line would raise an exception, as the database file is still
    # open; we ignore the error since we want to test what happens if the
    # directory disappears
    shutil.rmtree(env['dir'], ignore_errors=True)
    g = memory.cache(f)
    g(1)
    g.clear(warn=False)
    current_accumulator = len(accumulator)
    out = g(1)
    yield nose.tools.assert_equal, len(accumulator), \
                current_accumulator + 1
    # Also, check that Memory.eval works similarly
    yield nose.tools.assert_equal, memory.eval(f, 1), out
    yield nose.tools.assert_equal, len(accumulator), \
                current_accumulator + 1

Example 16

Project: angr
Source File: test_function_manager.py
View license
def test_amd64():
    logging.getLogger('angr.analyses.cfg').setLevel(logging.DEBUG)

    fauxware_amd64 = angr.Project(test_location + "/x86_64/fauxware")

    EXPECTED_FUNCTIONS = { 0x4004e0, 0x400510, 0x400520, 0x400530, 0x400540, 0x400550, 0x400560, 0x400570,
                           0x400580, 0x4005ac, 0x400640, 0x400664, 0x4006ed, 0x4006fd, 0x40071d, 0x4007e0,
                           0x400880 }
    EXPECTED_BLOCKS = { 0x40071D, 0x40073E, 0x400754, 0x40076A, 0x400774, 0x40078A, 0x4007A0, 0x4007B3, 0x4007C7,
                        0x4007C9, 0x4007BD, 0x4007D3 }
    EXPECTED_CALLSITES = { 0x40071D, 0x40073E, 0x400754, 0x40076A, 0x400774, 0x40078A, 0x4007A0, 0x4007BD, 0x4007C9 }
    EXPECTED_CALLSITE_TARGETS = { 4195600L, 4195632L, 4195632L, 4195600L, 4195632L, 4195632L, 4195940L, 4196077L,
                                  4196093L }
    EXPECTED_CALLSITE_RETURNS = { 0x40073e, 0x400754, 0x40076a, 0x400774, 0x40078a, 0x4007a0, 0x4007b3, 0x4007c7,
                                  None }

    fauxware_amd64.analyses.CFGAccurate()
    nose.tools.assert_equal(set([ k for k in fauxware_amd64.kb.functions.keys() if k < 0x500000 ]), EXPECTED_FUNCTIONS)

    main = fauxware_amd64.kb.functions.function(name='main')
    nose.tools.assert_equal(main.startpoint.addr, 0x40071D)
    nose.tools.assert_equal(set(main.block_addrs), EXPECTED_BLOCKS)
    nose.tools.assert_equal([0x4007D3], [bl.addr for bl in main.endpoints])
    nose.tools.assert_equal(set(main.get_call_sites()), EXPECTED_CALLSITES)
    nose.tools.assert_equal(set(map(main.get_call_target, main.get_call_sites())), EXPECTED_CALLSITE_TARGETS)
    nose.tools.assert_equal(set(map(main.get_call_return, main.get_call_sites())), EXPECTED_CALLSITE_RETURNS)
    nose.tools.assert_true(main.has_return)

    rejected = fauxware_amd64.kb.functions.function(name='rejected')
    nose.tools.assert_equal(rejected.returning, False)

    # transition graph
    main_g = main.transition_graph
    main_g_edges_ = main_g.edges(data=True)

    # Convert nodes those edges from blocks to addresses
    main_g_edges = [ ]
    for src_node, dst_node, data in main_g_edges_:
        main_g_edges.append((src_node.addr, dst_node.addr, data))

    nose.tools.assert_true((0x40071d, 0x400510, {'type': 'call'}) in main_g_edges)
    nose.tools.assert_true((0x40071d, 0x40073e, {'type': 'fake_return', 'confirmed': True, 'outside': False}) in
                           main_g_edges
                           )
    nose.tools.assert_true((0x40073e, 0x400530, {'type': 'call'}) in main_g_edges)
    nose.tools.assert_true((0x40073e, 0x400754, {'type': 'fake_return', 'confirmed': True, 'outside': False}) in main_g_edges)

    # rejected() does not return
    nose.tools.assert_true((0x4007c9, 0x4006fd, {'type': 'call'}) in main_g_edges)
    nose.tools.assert_true((0x4007c9, 0x4007d3, {'type': 'fake_return', 'outside': False}) in main_g_edges)

Example 17

Project: angr
Source File: test_function_manager.py
View license
def test_amd64():
    logging.getLogger('angr.analyses.cfg').setLevel(logging.DEBUG)

    fauxware_amd64 = angr.Project(test_location + "/x86_64/fauxware")

    EXPECTED_FUNCTIONS = { 0x4004e0, 0x400510, 0x400520, 0x400530, 0x400540, 0x400550, 0x400560, 0x400570,
                           0x400580, 0x4005ac, 0x400640, 0x400664, 0x4006ed, 0x4006fd, 0x40071d, 0x4007e0,
                           0x400880 }
    EXPECTED_BLOCKS = { 0x40071D, 0x40073E, 0x400754, 0x40076A, 0x400774, 0x40078A, 0x4007A0, 0x4007B3, 0x4007C7,
                        0x4007C9, 0x4007BD, 0x4007D3 }
    EXPECTED_CALLSITES = { 0x40071D, 0x40073E, 0x400754, 0x40076A, 0x400774, 0x40078A, 0x4007A0, 0x4007BD, 0x4007C9 }
    EXPECTED_CALLSITE_TARGETS = { 4195600L, 4195632L, 4195632L, 4195600L, 4195632L, 4195632L, 4195940L, 4196077L,
                                  4196093L }
    EXPECTED_CALLSITE_RETURNS = { 0x40073e, 0x400754, 0x40076a, 0x400774, 0x40078a, 0x4007a0, 0x4007b3, 0x4007c7,
                                  None }

    fauxware_amd64.analyses.CFGAccurate()
    nose.tools.assert_equal(set([ k for k in fauxware_amd64.kb.functions.keys() if k < 0x500000 ]), EXPECTED_FUNCTIONS)

    main = fauxware_amd64.kb.functions.function(name='main')
    nose.tools.assert_equal(main.startpoint.addr, 0x40071D)
    nose.tools.assert_equal(set(main.block_addrs), EXPECTED_BLOCKS)
    nose.tools.assert_equal([0x4007D3], [bl.addr for bl in main.endpoints])
    nose.tools.assert_equal(set(main.get_call_sites()), EXPECTED_CALLSITES)
    nose.tools.assert_equal(set(map(main.get_call_target, main.get_call_sites())), EXPECTED_CALLSITE_TARGETS)
    nose.tools.assert_equal(set(map(main.get_call_return, main.get_call_sites())), EXPECTED_CALLSITE_RETURNS)
    nose.tools.assert_true(main.has_return)

    rejected = fauxware_amd64.kb.functions.function(name='rejected')
    nose.tools.assert_equal(rejected.returning, False)

    # transition graph
    main_g = main.transition_graph
    main_g_edges_ = main_g.edges(data=True)

    # Convert nodes those edges from blocks to addresses
    main_g_edges = [ ]
    for src_node, dst_node, data in main_g_edges_:
        main_g_edges.append((src_node.addr, dst_node.addr, data))

    nose.tools.assert_true((0x40071d, 0x400510, {'type': 'call'}) in main_g_edges)
    nose.tools.assert_true((0x40071d, 0x40073e, {'type': 'fake_return', 'confirmed': True, 'outside': False}) in
                           main_g_edges
                           )
    nose.tools.assert_true((0x40073e, 0x400530, {'type': 'call'}) in main_g_edges)
    nose.tools.assert_true((0x40073e, 0x400754, {'type': 'fake_return', 'confirmed': True, 'outside': False}) in main_g_edges)

    # rejected() does not return
    nose.tools.assert_true((0x4007c9, 0x4006fd, {'type': 'call'}) in main_g_edges)
    nose.tools.assert_true((0x4007c9, 0x4007d3, {'type': 'fake_return', 'outside': False}) in main_g_edges)

Example 18

Project: claripy
Source File: test_vsa.py
View license
def test_wrapped_intervals():
    #SI = claripy.StridedInterval

    # Disable the use of DiscreteStridedIntervalSet
    claripy.vsa.strided_interval.allow_dsis = False

    #
    # Signedness/unsignedness conversion
    #

    si1 = claripy.SI(bits=32, stride=1, lower_bound=0, upper_bound=0xffffffff)
    nose.tools.assert_equal(vsa_model(si1)._signed_bounds(), [ (0x0, 0x7fffffff), (-0x80000000, -0x1) ])
    nose.tools.assert_equal(vsa_model(si1)._unsigned_bounds(), [ (0x0, 0xffffffff) ])

    #
    # Pole-splitting
    #

    # south-pole splitting
    si1 = claripy.SI(bits=32, stride=1, lower_bound=-1, upper_bound=1)
    si_list = vsa_model(si1)._ssplit()
    nose.tools.assert_equal(len(si_list), 2)
    nose.tools.assert_true(
        si_list[0].identical(vsa_model(claripy.SI(bits=32, stride=1, lower_bound=-1, upper_bound=-1))))
    nose.tools.assert_true(
        si_list[1].identical(vsa_model(claripy.SI(bits=32, stride=1, lower_bound=0, upper_bound=1))))

    # north-pole splitting
    si1 = claripy.SI(bits=32, stride=1, lower_bound=-1, upper_bound=-3)
    si_list = vsa_model(si1)._nsplit()
    nose.tools.assert_equal(len(si_list), 2)
    nose.tools.assert_true(
        si_list[0].identical(vsa_model(claripy.SI(bits=32, stride=1, lower_bound=-1, upper_bound=0x7fffffff))))
    nose.tools.assert_true(
        si_list[1].identical(vsa_model(claripy.SI(bits=32, stride=1, lower_bound=0x80000000, upper_bound=-3))))

    # north-pole splitting, episode 2
    si1 = claripy.SI(bits=32, stride=3, lower_bound=3, upper_bound=0)
    si_list = vsa_model(si1)._nsplit()
    nose.tools.assert_equal(len(si_list), 2)
    nose.tools.assert_true(
        si_list[0].identical(vsa_model(claripy.SI(bits=32, stride=3, lower_bound=3, upper_bound=0x7ffffffe))))
    nose.tools.assert_true(
        si_list[1].identical(vsa_model(claripy.SI(bits=32, stride=3, lower_bound=0x80000001, upper_bound=0))))

    # bipolar splitting
    si1 = claripy.SI(bits=32, stride=1, lower_bound=-2, upper_bound=-8)
    si_list = vsa_model(si1)._psplit()
    nose.tools.assert_equal(len(si_list), 3)
    nose.tools.assert_true(
        si_list[0].identical(vsa_model(claripy.SI(bits=32, stride=1, lower_bound=-2, upper_bound=-1))))
    nose.tools.assert_true(
        si_list[1].identical(vsa_model(claripy.SI(bits=32, stride=1, lower_bound=0, upper_bound=0x7fffffff))))
    nose.tools.assert_true(
        si_list[2].identical(vsa_model(claripy.SI(bits=32, stride=1, lower_bound=0x80000000, upper_bound=-8))))

    #
    # Addition
    #

    # Plain addition
    si1 = claripy.SI(bits=32, stride=1, lower_bound=-1, upper_bound=1)
    si2 = claripy.SI(bits=32, stride=1, lower_bound=-1, upper_bound=1)
    si3 = claripy.SI(bits=32, stride=1, lower_bound=-2, upper_bound=2)
    nose.tools.assert_true(claripy.backends.vsa.identical(si1 + si2, si3))
    si4 = claripy.SI(bits=32, stride=1, lower_bound=0xfffffffe, upper_bound=2)
    nose.tools.assert_true(claripy.backends.vsa.identical(si1 + si2, si4))
    si5 = claripy.SI(bits=32, stride=1, lower_bound=2, upper_bound=-2)
    nose.tools.assert_false(claripy.backends.vsa.identical(si1 + si2, si5))

    # Addition with overflowing cardinality
    si1 = claripy.SI(bits=8, stride=1, lower_bound=0, upper_bound=0xfe)
    si2 = claripy.SI(bits=8, stride=1, lower_bound=0xfe, upper_bound=0xff)
    nose.tools.assert_true(vsa_model((si1 + si2)).is_top)

    # Addition that shouldn't get a TOP
    si1 = claripy.SI(bits=8, stride=1, lower_bound=0, upper_bound=0xfe)
    si2 = claripy.SI(bits=8, stride=1, lower_bound=0, upper_bound=0)
    nose.tools.assert_false(vsa_model((si1 + si2)).is_top)

    #
    # Subtraction
    #

    si1 = claripy.SI(bits=8, stride=1, lower_bound=10, upper_bound=15)
    si2 = claripy.SI(bits=8, stride=1, lower_bound=11, upper_bound=12)
    si3 = claripy.SI(bits=8, stride=1, lower_bound=-2, upper_bound=4)
    nose.tools.assert_true(claripy.backends.vsa.identical(si1 - si2, si3))

    #
    # Multiplication
    #

    # integer multiplication
    si1 = claripy.SI(bits=32, to_conv=0xffff)
    si2 = claripy.SI(bits=32, to_conv=0x10000)
    si3 = claripy.SI(bits=32, to_conv=0xffff0000)
    nose.tools.assert_true(claripy.backends.vsa.identical(si1 * si2, si3))

    # intervals multiplication
    si1 = claripy.SI(bits=32, stride=1, lower_bound=10, upper_bound=15)
    si2 = claripy.SI(bits=32, stride=1, lower_bound=20, upper_bound=30)
    si3 = claripy.SI(bits=32, stride=1, lower_bound=200, upper_bound=450)
    nose.tools.assert_true(claripy.backends.vsa.identical(si1 * si2, si3))

    #
    # Division
    #

    # integer division
    si1 = claripy.SI(bits=32, to_conv=10)
    si2 = claripy.SI(bits=32, to_conv=5)
    si3 = claripy.SI(bits=32, to_conv=2)
    nose.tools.assert_true(claripy.backends.vsa.identical(si1 / si2, si3))

    si3 = claripy.SI(bits=32, to_conv=0)
    nose.tools.assert_true(claripy.backends.vsa.identical(si2 / si1, si3))

    # intervals division
    si1 = claripy.SI(bits=32, stride=1, lower_bound=10, upper_bound=100)
    si2 = claripy.SI(bits=32, stride=1, lower_bound=10, upper_bound=20)
    si3 = claripy.SI(bits=32, stride=1, lower_bound=0, upper_bound=10)
    nose.tools.assert_true(claripy.backends.vsa.identical(si1 / si2, si3))

    #
    # Extension
    #

    # zero-extension
    si1 = claripy.SI(bits=8, stride=1, lower_bound=0, upper_bound=0xfd)
    si_zext = si1.zero_extend(32 - 8)
    si_zext_ = claripy.SI(bits=32, stride=1, lower_bound=0x0, upper_bound=0xfd)
    nose.tools.assert_true(claripy.backends.vsa.identical(si_zext, si_zext_))

    # sign-extension
    si1 = claripy.SI(bits=8, stride=1, lower_bound=0, upper_bound=0xfd)
    si_sext = si1.sign_extend(32 - 8)
    si_sext_ = claripy.SI(bits=32, stride=1, lower_bound=0xffffff80, upper_bound=0x7f)
    nose.tools.assert_true(claripy.backends.vsa.identical(si_sext, si_sext_))

    #
    # Comparisons
    #

    # -1 == 0xff
    si1 = claripy.SI(bits=8, stride=1, lower_bound=-1, upper_bound=-1)
    si2 = claripy.SI(bits=8, stride=1, lower_bound=0xff, upper_bound=0xff)
    nose.tools.assert_true(claripy.backends.vsa.is_true(si1 == si2))

    # -2 != 0xff
    si1 = claripy.SI(bits=8, stride=1, lower_bound=-2, upper_bound=-2)
    si2 = claripy.SI(bits=8, stride=1, lower_bound=0xff, upper_bound=0xff)
    nose.tools.assert_true(claripy.backends.vsa.is_true(si1 != si2))

    # [-2, -1] < [1, 2] (signed arithmetic)
    si1 = claripy.SI(bits=8, stride=1, lower_bound=1, upper_bound=2)
    si2 = claripy.SI(bits=8, stride=1, lower_bound=-2, upper_bound=-1)
    nose.tools.assert_true(claripy.backends.vsa.is_true(si2.SLT(si1)))

    # [-2, -1] <= [1, 2] (signed arithmetic)
    nose.tools.assert_true(claripy.backends.vsa.is_true(si2.SLE(si1)))

    # [0xfe, 0xff] > [1, 2] (unsigned arithmetic)
    nose.tools.assert_true(claripy.backends.vsa.is_true(si2.UGT(si1)))

    # [0xfe, 0xff] >= [1, 2] (unsigned arithmetic)
    nose.tools.assert_true(claripy.backends.vsa.is_true(si2.UGE(si1)))

Example 19

Project: claripy
Source File: test_vsa.py
View license
def test_wrapped_intervals():
    #SI = claripy.StridedInterval

    # Disable the use of DiscreteStridedIntervalSet
    claripy.vsa.strided_interval.allow_dsis = False

    #
    # Signedness/unsignedness conversion
    #

    si1 = claripy.SI(bits=32, stride=1, lower_bound=0, upper_bound=0xffffffff)
    nose.tools.assert_equal(vsa_model(si1)._signed_bounds(), [ (0x0, 0x7fffffff), (-0x80000000, -0x1) ])
    nose.tools.assert_equal(vsa_model(si1)._unsigned_bounds(), [ (0x0, 0xffffffff) ])

    #
    # Pole-splitting
    #

    # south-pole splitting
    si1 = claripy.SI(bits=32, stride=1, lower_bound=-1, upper_bound=1)
    si_list = vsa_model(si1)._ssplit()
    nose.tools.assert_equal(len(si_list), 2)
    nose.tools.assert_true(
        si_list[0].identical(vsa_model(claripy.SI(bits=32, stride=1, lower_bound=-1, upper_bound=-1))))
    nose.tools.assert_true(
        si_list[1].identical(vsa_model(claripy.SI(bits=32, stride=1, lower_bound=0, upper_bound=1))))

    # north-pole splitting
    si1 = claripy.SI(bits=32, stride=1, lower_bound=-1, upper_bound=-3)
    si_list = vsa_model(si1)._nsplit()
    nose.tools.assert_equal(len(si_list), 2)
    nose.tools.assert_true(
        si_list[0].identical(vsa_model(claripy.SI(bits=32, stride=1, lower_bound=-1, upper_bound=0x7fffffff))))
    nose.tools.assert_true(
        si_list[1].identical(vsa_model(claripy.SI(bits=32, stride=1, lower_bound=0x80000000, upper_bound=-3))))

    # north-pole splitting, episode 2
    si1 = claripy.SI(bits=32, stride=3, lower_bound=3, upper_bound=0)
    si_list = vsa_model(si1)._nsplit()
    nose.tools.assert_equal(len(si_list), 2)
    nose.tools.assert_true(
        si_list[0].identical(vsa_model(claripy.SI(bits=32, stride=3, lower_bound=3, upper_bound=0x7ffffffe))))
    nose.tools.assert_true(
        si_list[1].identical(vsa_model(claripy.SI(bits=32, stride=3, lower_bound=0x80000001, upper_bound=0))))

    # bipolar splitting
    si1 = claripy.SI(bits=32, stride=1, lower_bound=-2, upper_bound=-8)
    si_list = vsa_model(si1)._psplit()
    nose.tools.assert_equal(len(si_list), 3)
    nose.tools.assert_true(
        si_list[0].identical(vsa_model(claripy.SI(bits=32, stride=1, lower_bound=-2, upper_bound=-1))))
    nose.tools.assert_true(
        si_list[1].identical(vsa_model(claripy.SI(bits=32, stride=1, lower_bound=0, upper_bound=0x7fffffff))))
    nose.tools.assert_true(
        si_list[2].identical(vsa_model(claripy.SI(bits=32, stride=1, lower_bound=0x80000000, upper_bound=-8))))

    #
    # Addition
    #

    # Plain addition
    si1 = claripy.SI(bits=32, stride=1, lower_bound=-1, upper_bound=1)
    si2 = claripy.SI(bits=32, stride=1, lower_bound=-1, upper_bound=1)
    si3 = claripy.SI(bits=32, stride=1, lower_bound=-2, upper_bound=2)
    nose.tools.assert_true(claripy.backends.vsa.identical(si1 + si2, si3))
    si4 = claripy.SI(bits=32, stride=1, lower_bound=0xfffffffe, upper_bound=2)
    nose.tools.assert_true(claripy.backends.vsa.identical(si1 + si2, si4))
    si5 = claripy.SI(bits=32, stride=1, lower_bound=2, upper_bound=-2)
    nose.tools.assert_false(claripy.backends.vsa.identical(si1 + si2, si5))

    # Addition with overflowing cardinality
    si1 = claripy.SI(bits=8, stride=1, lower_bound=0, upper_bound=0xfe)
    si2 = claripy.SI(bits=8, stride=1, lower_bound=0xfe, upper_bound=0xff)
    nose.tools.assert_true(vsa_model((si1 + si2)).is_top)

    # Addition that shouldn't get a TOP
    si1 = claripy.SI(bits=8, stride=1, lower_bound=0, upper_bound=0xfe)
    si2 = claripy.SI(bits=8, stride=1, lower_bound=0, upper_bound=0)
    nose.tools.assert_false(vsa_model((si1 + si2)).is_top)

    #
    # Subtraction
    #

    si1 = claripy.SI(bits=8, stride=1, lower_bound=10, upper_bound=15)
    si2 = claripy.SI(bits=8, stride=1, lower_bound=11, upper_bound=12)
    si3 = claripy.SI(bits=8, stride=1, lower_bound=-2, upper_bound=4)
    nose.tools.assert_true(claripy.backends.vsa.identical(si1 - si2, si3))

    #
    # Multiplication
    #

    # integer multiplication
    si1 = claripy.SI(bits=32, to_conv=0xffff)
    si2 = claripy.SI(bits=32, to_conv=0x10000)
    si3 = claripy.SI(bits=32, to_conv=0xffff0000)
    nose.tools.assert_true(claripy.backends.vsa.identical(si1 * si2, si3))

    # intervals multiplication
    si1 = claripy.SI(bits=32, stride=1, lower_bound=10, upper_bound=15)
    si2 = claripy.SI(bits=32, stride=1, lower_bound=20, upper_bound=30)
    si3 = claripy.SI(bits=32, stride=1, lower_bound=200, upper_bound=450)
    nose.tools.assert_true(claripy.backends.vsa.identical(si1 * si2, si3))

    #
    # Division
    #

    # integer division
    si1 = claripy.SI(bits=32, to_conv=10)
    si2 = claripy.SI(bits=32, to_conv=5)
    si3 = claripy.SI(bits=32, to_conv=2)
    nose.tools.assert_true(claripy.backends.vsa.identical(si1 / si2, si3))

    si3 = claripy.SI(bits=32, to_conv=0)
    nose.tools.assert_true(claripy.backends.vsa.identical(si2 / si1, si3))

    # intervals division
    si1 = claripy.SI(bits=32, stride=1, lower_bound=10, upper_bound=100)
    si2 = claripy.SI(bits=32, stride=1, lower_bound=10, upper_bound=20)
    si3 = claripy.SI(bits=32, stride=1, lower_bound=0, upper_bound=10)
    nose.tools.assert_true(claripy.backends.vsa.identical(si1 / si2, si3))

    #
    # Extension
    #

    # zero-extension
    si1 = claripy.SI(bits=8, stride=1, lower_bound=0, upper_bound=0xfd)
    si_zext = si1.zero_extend(32 - 8)
    si_zext_ = claripy.SI(bits=32, stride=1, lower_bound=0x0, upper_bound=0xfd)
    nose.tools.assert_true(claripy.backends.vsa.identical(si_zext, si_zext_))

    # sign-extension
    si1 = claripy.SI(bits=8, stride=1, lower_bound=0, upper_bound=0xfd)
    si_sext = si1.sign_extend(32 - 8)
    si_sext_ = claripy.SI(bits=32, stride=1, lower_bound=0xffffff80, upper_bound=0x7f)
    nose.tools.assert_true(claripy.backends.vsa.identical(si_sext, si_sext_))

    #
    # Comparisons
    #

    # -1 == 0xff
    si1 = claripy.SI(bits=8, stride=1, lower_bound=-1, upper_bound=-1)
    si2 = claripy.SI(bits=8, stride=1, lower_bound=0xff, upper_bound=0xff)
    nose.tools.assert_true(claripy.backends.vsa.is_true(si1 == si2))

    # -2 != 0xff
    si1 = claripy.SI(bits=8, stride=1, lower_bound=-2, upper_bound=-2)
    si2 = claripy.SI(bits=8, stride=1, lower_bound=0xff, upper_bound=0xff)
    nose.tools.assert_true(claripy.backends.vsa.is_true(si1 != si2))

    # [-2, -1] < [1, 2] (signed arithmetic)
    si1 = claripy.SI(bits=8, stride=1, lower_bound=1, upper_bound=2)
    si2 = claripy.SI(bits=8, stride=1, lower_bound=-2, upper_bound=-1)
    nose.tools.assert_true(claripy.backends.vsa.is_true(si2.SLT(si1)))

    # [-2, -1] <= [1, 2] (signed arithmetic)
    nose.tools.assert_true(claripy.backends.vsa.is_true(si2.SLE(si1)))

    # [0xfe, 0xff] > [1, 2] (unsigned arithmetic)
    nose.tools.assert_true(claripy.backends.vsa.is_true(si2.UGT(si1)))

    # [0xfe, 0xff] >= [1, 2] (unsigned arithmetic)
    nose.tools.assert_true(claripy.backends.vsa.is_true(si2.UGE(si1)))

Example 20

Project: simuvex
Source File: test_memory.py
View license
def test_abstract_memory():
    initial_memory = {0: 'A', 1: 'B', 2: 'C', 3: 'D'}

    s = SimState(mode='static',
                 arch="AMD64",
                 memory_backer=initial_memory,
                 add_options={simuvex.o.ABSTRACT_SOLVER, simuvex.o.ABSTRACT_MEMORY})
    se = s.se

    def to_vs(region, offset):
        return s.se.VS(s.arch.bits, region, 0, offset)

    # Load a single-byte constant from global region
    expr = s.memory.load(to_vs('global', 2), 1)
    nose.tools.assert_equal(s.se.any_int(expr), 0x43)
    nose.tools.assert_equal(s.se.max_int(expr), 0x43)
    nose.tools.assert_equal(s.se.min_int(expr), 0x43)

    # Store a single-byte constant to global region
    s.memory.store(to_vs('global', 1), s.se.BVV(ord('D'), 8), 1)
    expr = s.memory.load(to_vs('global', 1), 1)
    nose.tools.assert_equal(s.se.any_int(expr), 0x44)

    # Store a single-byte StridedInterval to global region
    si_0 = s.se.BVS('unnamed', 8, 10, 20, 2)
    s.memory.store(to_vs('global', 4), si_0)

    # Load the single-byte StridedInterval from global region
    expr = s.memory.load(to_vs('global', 4), 1)
    nose.tools.assert_equal(s.se.min_int(expr), 10)
    nose.tools.assert_equal(s.se.max_int(expr), 20)
    nose.tools.assert_equal(s.se.any_n_int(expr, 100), [10, 12, 14, 16, 18, 20])

    # Store a two-byte StridedInterval object to global region
    si_1 = s.se.BVS('unnamed', 16, 10, 20, 2)
    s.memory.store(to_vs('global', 5), si_1)

    # Load the two-byte StridedInterval object from global region
    expr = s.memory.load(to_vs('global', 5), 2)
    nose.tools.assert_true(claripy.backends.vsa.identical(expr, si_1))

    # Store a four-byte StridedInterval object to global region
    si_2 = s.se.BVS('unnamed', 32, 8000, 9000, 2)
    s.memory.store(to_vs('global', 7), si_2)

    # Load the four-byte StridedInterval object from global region
    expr = s.memory.load(to_vs('global', 7), 4)
    nose.tools.assert_true(claripy.backends.vsa.identical(expr, s.se.BVS('unnamed', 32, 8000, 9000, 2)))

    # Test default values
    s.options.remove(simuvex.o.SYMBOLIC_INITIAL_VALUES)
    expr = s.memory.load(to_vs('global', 100), 4)
    nose.tools.assert_true(claripy.backends.vsa.identical(expr, s.se.BVS('unnamed', 32, 0, 0, 0)))

    # Test default values (symbolic)
    s.options.add(simuvex.o.SYMBOLIC_INITIAL_VALUES)
    expr = s.memory.load(to_vs('global', 104), 4)
    nose.tools.assert_true(claripy.backends.vsa.identical(expr, s.se.BVS('unnamed', 32, 0, 0xffffffff, 1)))
    nose.tools.assert_true(claripy.backends.vsa.identical(expr, s.se.BVS('unnamed', 32, -0x80000000, 0x7fffffff, 1)))

    #
    # Merging
    #

    # Merging two one-byte values
    s.memory.store(to_vs('function_merge', 0), s.se.BVS('unnamed', 8, 0x10, 0x10, 0))
    a = s.copy()
    a.memory.store(to_vs('function_merge', 0), s.se.BVS('unnamed', 8, 0x20, 0x20, 0))

    b = s.merge(a)[0]
    expr = b.memory.load(to_vs('function_merge', 0), 1)
    nose.tools.assert_true(claripy.backends.vsa.identical(expr, s.se.BVS('unnamed', 8, 0x10, 0x20, 0x10)))

    #  |  MO(value_0)  |
    #  |  MO(value_1)  |
    # 0x20          0x24
    # Merge one byte in value_0/1 means merging the entire MemoryObject
    a = s.copy()
    a.memory.store(to_vs('function_merge', 0x20), se.SI(bits=32, stride=0, lower_bound=0x100000, upper_bound=0x100000))
    b = s.copy()
    b.memory.store(to_vs('function_merge', 0x20), se.SI(bits=32, stride=0, lower_bound=0x100001, upper_bound=0x100001))
    c = a.merge(b)[0]
    expr = c.memory.load(to_vs('function_merge', 0x20), 4)
    nose.tools.assert_true(claripy.backends.vsa.identical(expr, se.SI(bits=32, stride=1, lower_bound=0x100000, upper_bound=0x100001)))
    c_mem = c.memory.regions['function_merge'].memory.mem
    object_set = set([ c_mem[0x20], c_mem[0x20], c_mem[0x22], c_mem[0x23]])
    nose.tools.assert_equal(len(object_set), 1)

    a = s.copy()
    a.memory.store(to_vs('function_merge', 0x20), se.SI(bits=32, stride=0x100000, lower_bound=0x100000, upper_bound=0x200000))
    b = s.copy()
    b.memory.store(to_vs('function_merge', 0x20), se.SI(bits=32, stride=0, lower_bound=0x300000, upper_bound=0x300000))
    c = a.merge(b)[0]
    expr = c.memory.load(to_vs('function_merge', 0x20), 4)
    nose.tools.assert_true(claripy.backends.vsa.identical(expr, se.SI(bits=32, stride=0x100000, lower_bound=0x100000, upper_bound=0x300000)))
    object_set = set([c_mem[0x20], c_mem[0x20], c_mem[0x22], c_mem[0x23]])
    nose.tools.assert_equal(len(object_set), 1)

    #
    # Widening
    #

    a = s.se.SI(bits=32, stride=1, lower_bound=1, upper_bound=2)
    b = s.se.SI(bits=32, stride=1, lower_bound=1, upper_bound=3)
    a = a.reversed
    b = b.reversed

Example 21

Project: simuvex
Source File: test_memory.py
View license
def test_abstract_memory():
    initial_memory = {0: 'A', 1: 'B', 2: 'C', 3: 'D'}

    s = SimState(mode='static',
                 arch="AMD64",
                 memory_backer=initial_memory,
                 add_options={simuvex.o.ABSTRACT_SOLVER, simuvex.o.ABSTRACT_MEMORY})
    se = s.se

    def to_vs(region, offset):
        return s.se.VS(s.arch.bits, region, 0, offset)

    # Load a single-byte constant from global region
    expr = s.memory.load(to_vs('global', 2), 1)
    nose.tools.assert_equal(s.se.any_int(expr), 0x43)
    nose.tools.assert_equal(s.se.max_int(expr), 0x43)
    nose.tools.assert_equal(s.se.min_int(expr), 0x43)

    # Store a single-byte constant to global region
    s.memory.store(to_vs('global', 1), s.se.BVV(ord('D'), 8), 1)
    expr = s.memory.load(to_vs('global', 1), 1)
    nose.tools.assert_equal(s.se.any_int(expr), 0x44)

    # Store a single-byte StridedInterval to global region
    si_0 = s.se.BVS('unnamed', 8, 10, 20, 2)
    s.memory.store(to_vs('global', 4), si_0)

    # Load the single-byte StridedInterval from global region
    expr = s.memory.load(to_vs('global', 4), 1)
    nose.tools.assert_equal(s.se.min_int(expr), 10)
    nose.tools.assert_equal(s.se.max_int(expr), 20)
    nose.tools.assert_equal(s.se.any_n_int(expr, 100), [10, 12, 14, 16, 18, 20])

    # Store a two-byte StridedInterval object to global region
    si_1 = s.se.BVS('unnamed', 16, 10, 20, 2)
    s.memory.store(to_vs('global', 5), si_1)

    # Load the two-byte StridedInterval object from global region
    expr = s.memory.load(to_vs('global', 5), 2)
    nose.tools.assert_true(claripy.backends.vsa.identical(expr, si_1))

    # Store a four-byte StridedInterval object to global region
    si_2 = s.se.BVS('unnamed', 32, 8000, 9000, 2)
    s.memory.store(to_vs('global', 7), si_2)

    # Load the four-byte StridedInterval object from global region
    expr = s.memory.load(to_vs('global', 7), 4)
    nose.tools.assert_true(claripy.backends.vsa.identical(expr, s.se.BVS('unnamed', 32, 8000, 9000, 2)))

    # Test default values
    s.options.remove(simuvex.o.SYMBOLIC_INITIAL_VALUES)
    expr = s.memory.load(to_vs('global', 100), 4)
    nose.tools.assert_true(claripy.backends.vsa.identical(expr, s.se.BVS('unnamed', 32, 0, 0, 0)))

    # Test default values (symbolic)
    s.options.add(simuvex.o.SYMBOLIC_INITIAL_VALUES)
    expr = s.memory.load(to_vs('global', 104), 4)
    nose.tools.assert_true(claripy.backends.vsa.identical(expr, s.se.BVS('unnamed', 32, 0, 0xffffffff, 1)))
    nose.tools.assert_true(claripy.backends.vsa.identical(expr, s.se.BVS('unnamed', 32, -0x80000000, 0x7fffffff, 1)))

    #
    # Merging
    #

    # Merging two one-byte values
    s.memory.store(to_vs('function_merge', 0), s.se.BVS('unnamed', 8, 0x10, 0x10, 0))
    a = s.copy()
    a.memory.store(to_vs('function_merge', 0), s.se.BVS('unnamed', 8, 0x20, 0x20, 0))

    b = s.merge(a)[0]
    expr = b.memory.load(to_vs('function_merge', 0), 1)
    nose.tools.assert_true(claripy.backends.vsa.identical(expr, s.se.BVS('unnamed', 8, 0x10, 0x20, 0x10)))

    #  |  MO(value_0)  |
    #  |  MO(value_1)  |
    # 0x20          0x24
    # Merge one byte in value_0/1 means merging the entire MemoryObject
    a = s.copy()
    a.memory.store(to_vs('function_merge', 0x20), se.SI(bits=32, stride=0, lower_bound=0x100000, upper_bound=0x100000))
    b = s.copy()
    b.memory.store(to_vs('function_merge', 0x20), se.SI(bits=32, stride=0, lower_bound=0x100001, upper_bound=0x100001))
    c = a.merge(b)[0]
    expr = c.memory.load(to_vs('function_merge', 0x20), 4)
    nose.tools.assert_true(claripy.backends.vsa.identical(expr, se.SI(bits=32, stride=1, lower_bound=0x100000, upper_bound=0x100001)))
    c_mem = c.memory.regions['function_merge'].memory.mem
    object_set = set([ c_mem[0x20], c_mem[0x20], c_mem[0x22], c_mem[0x23]])
    nose.tools.assert_equal(len(object_set), 1)

    a = s.copy()
    a.memory.store(to_vs('function_merge', 0x20), se.SI(bits=32, stride=0x100000, lower_bound=0x100000, upper_bound=0x200000))
    b = s.copy()
    b.memory.store(to_vs('function_merge', 0x20), se.SI(bits=32, stride=0, lower_bound=0x300000, upper_bound=0x300000))
    c = a.merge(b)[0]
    expr = c.memory.load(to_vs('function_merge', 0x20), 4)
    nose.tools.assert_true(claripy.backends.vsa.identical(expr, se.SI(bits=32, stride=0x100000, lower_bound=0x100000, upper_bound=0x300000)))
    object_set = set([c_mem[0x20], c_mem[0x20], c_mem[0x22], c_mem[0x23]])
    nose.tools.assert_equal(len(object_set), 1)

    #
    # Widening
    #

    a = s.se.SI(bits=32, stride=1, lower_bound=1, upper_bound=2)
    b = s.se.SI(bits=32, stride=1, lower_bound=1, upper_bound=3)
    a = a.reversed
    b = b.reversed

Example 22

Project: simuvex
Source File: test_string.py
View license
def test_memcpy():
    l.info("concrete src, concrete dst, concrete len")
    l.debug("... full copy")
    s = SimState(arch="AMD64", mode="symbolic")
    dst = s.se.BVV(0x41414141, 32)
    dst_addr = s.se.BVV(0x1000, 64)
    src = s.se.BVV(0x42424242, 32)
    src_addr = s.se.BVV(0x2000, 64)

    s.memory.store(dst_addr, dst)
    s.memory.store(src_addr, src)
    memcpy(s, inline=True, arguments=[dst_addr, src_addr, s.se.BVV(4, 64)])
    new_dst = s.memory.load(dst_addr, 4, endness='Iend_BE')
    nose.tools.assert_equal(s.se.any_n_str(new_dst, 2), [ "BBBB" ])

    l.info("giant copy")
    s = SimState(arch="AMD64", mode="symbolic", remove_options=simuvex.o.simplification)
    s.memory._maximum_symbolic_size = 0x2000000
    size = s.se.BVV(0x1000000, 64)
    dst_addr = s.se.BVV(0x2000000, 64)
    src_addr = s.se.BVV(0x4000000, 64)

    memcpy(s, inline=True, arguments=[dst_addr, src_addr, size])
    nose.tools.assert_is(s.memory.load(dst_addr, size), s.memory.load(src_addr, size))

    l.debug("... partial copy")
    s = SimState(arch="AMD64", mode="symbolic")
    s.memory.store(dst_addr, dst)
    s.memory.store(src_addr, src)
    memcpy(s, inline=True, arguments=[dst_addr, src_addr, s.se.BVV(2, 64)])
    new_dst = s.memory.load(dst_addr, 4, endness='Iend_BE')
    nose.tools.assert_equal(s.se.any_n_str(new_dst, 2), [ "BBAA" ])

    l.info("symbolic src, concrete dst, concrete len")
    s = SimState(arch="AMD64", mode="symbolic")
    dst = s.se.BVV(0x41414141, 32)
    dst_addr = s.se.BVV(0x1000, 64)
    src = s.se.BVS("src", 32)
    src_addr = s.se.BVV(0x2000, 64)

    s.memory.store(dst_addr, dst)
    s.memory.store(src_addr, src)

    # make sure it copies it all
    memcpy(s, inline=True, arguments=[dst_addr, src_addr, s.se.BVV(4, 64)])
    nose.tools.assert_true(s.satisfiable())
    s.add_constraints(src != s.memory.load(dst_addr, 4))
    nose.tools.assert_false(s.satisfiable())

    l.info("symbolic src, concrete dst, symbolic len")
    s = SimState(arch="AMD64", mode="symbolic")
    dst = s.se.BVV(0x41414141, 32)
    dst_addr = s.se.BVV(0x1000, 64)
    src = s.se.BVS("src", 32)
    src_addr = s.se.BVV(0x2000, 64)
    cpylen = s.se.BVS("len", 64)

    s.memory.store(dst_addr, dst)
    s.memory.store(src_addr, src)
    memcpy(s, inline=True, arguments=[dst_addr, src_addr, cpylen])
    result = s.memory.load(dst_addr, 4, endness='Iend_BE')

    # make sure it copies it all
    s1 = s.copy()
    s1.add_constraints(cpylen == 1)
    nose.tools.assert_true(s1.se.unique(s1.memory.load(dst_addr+1, 3)))
    nose.tools.assert_equals(len(s1.se.any_n_int(s1.memory.load(dst_addr, 1), 300)), 256)

    s2 = s.copy()
    s2.add_constraints(cpylen == 2)
    nose.tools.assert_equals(len(s2.se.any_n_int(result[31:24], 300)), 256)
    nose.tools.assert_equals(len(s2.se.any_n_int(result[23:16], 300)), 256)
    nose.tools.assert_equals(s2.se.any_n_str(result[15:0], 300), [ 'AA' ])

    l.info("concrete src, concrete dst, symbolic len")
    dst = s2.se.BVV(0x41414141, 32)
    dst_addr = s2.se.BVV(0x1000, 64)
    src = s2.se.BVV(0x42424242, 32)
    src_addr = s2.se.BVV(0x2000, 64)

    s = SimState(arch="AMD64", mode="symbolic")
    s.memory.store(dst_addr, dst)
    s.memory.store(src_addr, src)
    cpylen = s.se.BVS("len", 64)

    s.add_constraints(s.se.ULE(cpylen, 4))
    memcpy(s, inline=True, arguments=[dst_addr, src_addr, cpylen])
    new_dst = s.memory.load(dst_addr, 4, endness='Iend_BE')
    nose.tools.assert_items_equal(s.se.any_n_str(new_dst, 300), [ 'AAAA', 'BAAA', 'BBAA', 'BBBA', 'BBBB' ])

Example 23

Project: simuvex
Source File: test_string.py
View license
def test_strncpy():
    l.info("concrete src, concrete dst, concrete len")
    l.debug("... full copy")
    s = SimState(arch="AMD64", mode="symbolic")
    dst = s.se.BVV(0x41414100, 32)
    dst_addr = s.se.BVV(0x1000, 64)
    src = s.se.BVV(0x42420000, 32)
    src_addr = s.se.BVV(0x2000, 64)

    s.memory.store(dst_addr, dst)
    s.memory.store(src_addr, src)
    strncpy(s, inline=True, arguments=[dst_addr, src_addr, s.se.BVV(3, 64)])
    new_dst = s.memory.load(dst_addr, 4, endness='Iend_BE')
    nose.tools.assert_equal(s.se.any_str(new_dst), "BB\x00\x00")

    l.debug("... partial copy")
    s = SimState(arch="AMD64", mode="symbolic")
    s.memory.store(dst_addr, dst)
    s.memory.store(src_addr, src)
    strncpy(s, inline=True, arguments=[dst_addr, src_addr, s.se.BVV(2, 64)])
    new_dst = s.memory.load(dst_addr, 4, endness='Iend_BE')
    nose.tools.assert_equal(s.se.any_n_str(new_dst, 2), [ "BBA\x00" ])

    l.info("symbolic src, concrete dst, concrete len")
    s = SimState(arch="AMD64", mode="symbolic")
    dst = s.se.BVV(0x41414100, 32)
    dst_addr = s.se.BVV(0x1000, 64)
    src = s.se.BVS("src", 32)
    src_addr = s.se.BVV(0x2000, 64)

    s.memory.store(dst_addr, dst)
    s.memory.store(src_addr, src)

    # make sure it copies it all
    s.add_constraints(strlen(s, inline=True, arguments=[src_addr]).ret_expr == 2)

    # sanity check
    s_false = s.copy()
    s_false.add_constraints(strlen(s_false, inline=True, arguments=[src_addr]).ret_expr == 3)
    nose.tools.assert_false(s_false.satisfiable())

    strncpy(s, inline=True, arguments=[dst_addr, src_addr, 3])
    nose.tools.assert_true(s.satisfiable())
    c = strcmp(s, inline=True, arguments=[dst_addr, src_addr]).ret_expr

    nose.tools.assert_items_equal(s.se.any_n_int(c, 10), [0])

    l.info("symbolic src, concrete dst, symbolic len")
    s = SimState(arch="AMD64", mode="symbolic")
    dst = s.se.BVV(0x41414100, 32)
    dst_addr = s.se.BVV(0x1000, 64)
    src = s.se.BVS("src", 32)
    src_addr = s.se.BVV(0x2000, 64)
    maxlen = s.se.BVS("len", 64)

    s.memory.store(dst_addr, dst)
    s.memory.store(src_addr, src)

    # make sure it copies it all
    s.add_constraints(strlen(s, inline=True, arguments=[src_addr]).ret_expr == 2)
    strncpy(s, inline=True, arguments=[dst_addr, src_addr, maxlen])
    c = strcmp(s, inline=True, arguments=[dst_addr, src_addr]).ret_expr

    s_match = s.copy()
    s_match.add_constraints(c == 0)
    nose.tools.assert_equals(s_match.se.min_int(maxlen), 3)

    s_nomatch = s.copy()
    s_nomatch.add_constraints(c != 0)
    nose.tools.assert_equals(s_nomatch.se.max_int(maxlen), 2)

    l.info("concrete src, concrete dst, symbolic len")
    l.debug("... full copy")
    s = SimState(arch="AMD64", mode="symbolic")

    dst = s.se.BVV(0x41414100, 32)
    dst_addr = s.se.BVV(0x1000, 64)
    src = s.se.BVV(0x42420000, 32)
    src_addr = s.se.BVV(0x2000, 64)
    maxlen = s.se.BVS("len", 64)

    s.memory.store(dst_addr, dst)
    s.memory.store(src_addr, src)
    strncpy(s, inline=True, arguments=[dst_addr, src_addr, maxlen])
    r = s.memory.load(dst_addr, 4, endness='Iend_BE')
    #print repr(r.se.any_n_str(10))
    nose.tools.assert_items_equal(s.se.any_n_str(r, 10), [ "AAA\x00", 'BAA\x00', 'BBA\x00', 'BB\x00\x00' ] )

Example 24

Project: simuvex
Source File: test_string.py
View license
def test_memcpy():
    l.info("concrete src, concrete dst, concrete len")
    l.debug("... full copy")
    s = SimState(arch="AMD64", mode="symbolic")
    dst = s.se.BVV(0x41414141, 32)
    dst_addr = s.se.BVV(0x1000, 64)
    src = s.se.BVV(0x42424242, 32)
    src_addr = s.se.BVV(0x2000, 64)

    s.memory.store(dst_addr, dst)
    s.memory.store(src_addr, src)
    memcpy(s, inline=True, arguments=[dst_addr, src_addr, s.se.BVV(4, 64)])
    new_dst = s.memory.load(dst_addr, 4, endness='Iend_BE')
    nose.tools.assert_equal(s.se.any_n_str(new_dst, 2), [ "BBBB" ])

    l.info("giant copy")
    s = SimState(arch="AMD64", mode="symbolic", remove_options=simuvex.o.simplification)
    s.memory._maximum_symbolic_size = 0x2000000
    size = s.se.BVV(0x1000000, 64)
    dst_addr = s.se.BVV(0x2000000, 64)
    src_addr = s.se.BVV(0x4000000, 64)

    memcpy(s, inline=True, arguments=[dst_addr, src_addr, size])
    nose.tools.assert_is(s.memory.load(dst_addr, size), s.memory.load(src_addr, size))

    l.debug("... partial copy")
    s = SimState(arch="AMD64", mode="symbolic")
    s.memory.store(dst_addr, dst)
    s.memory.store(src_addr, src)
    memcpy(s, inline=True, arguments=[dst_addr, src_addr, s.se.BVV(2, 64)])
    new_dst = s.memory.load(dst_addr, 4, endness='Iend_BE')
    nose.tools.assert_equal(s.se.any_n_str(new_dst, 2), [ "BBAA" ])

    l.info("symbolic src, concrete dst, concrete len")
    s = SimState(arch="AMD64", mode="symbolic")
    dst = s.se.BVV(0x41414141, 32)
    dst_addr = s.se.BVV(0x1000, 64)
    src = s.se.BVS("src", 32)
    src_addr = s.se.BVV(0x2000, 64)

    s.memory.store(dst_addr, dst)
    s.memory.store(src_addr, src)

    # make sure it copies it all
    memcpy(s, inline=True, arguments=[dst_addr, src_addr, s.se.BVV(4, 64)])
    nose.tools.assert_true(s.satisfiable())
    s.add_constraints(src != s.memory.load(dst_addr, 4))
    nose.tools.assert_false(s.satisfiable())

    l.info("symbolic src, concrete dst, symbolic len")
    s = SimState(arch="AMD64", mode="symbolic")
    dst = s.se.BVV(0x41414141, 32)
    dst_addr = s.se.BVV(0x1000, 64)
    src = s.se.BVS("src", 32)
    src_addr = s.se.BVV(0x2000, 64)
    cpylen = s.se.BVS("len", 64)

    s.memory.store(dst_addr, dst)
    s.memory.store(src_addr, src)
    memcpy(s, inline=True, arguments=[dst_addr, src_addr, cpylen])
    result = s.memory.load(dst_addr, 4, endness='Iend_BE')

    # make sure it copies it all
    s1 = s.copy()
    s1.add_constraints(cpylen == 1)
    nose.tools.assert_true(s1.se.unique(s1.memory.load(dst_addr+1, 3)))
    nose.tools.assert_equals(len(s1.se.any_n_int(s1.memory.load(dst_addr, 1), 300)), 256)

    s2 = s.copy()
    s2.add_constraints(cpylen == 2)
    nose.tools.assert_equals(len(s2.se.any_n_int(result[31:24], 300)), 256)
    nose.tools.assert_equals(len(s2.se.any_n_int(result[23:16], 300)), 256)
    nose.tools.assert_equals(s2.se.any_n_str(result[15:0], 300), [ 'AA' ])

    l.info("concrete src, concrete dst, symbolic len")
    dst = s2.se.BVV(0x41414141, 32)
    dst_addr = s2.se.BVV(0x1000, 64)
    src = s2.se.BVV(0x42424242, 32)
    src_addr = s2.se.BVV(0x2000, 64)

    s = SimState(arch="AMD64", mode="symbolic")
    s.memory.store(dst_addr, dst)
    s.memory.store(src_addr, src)
    cpylen = s.se.BVS("len", 64)

    s.add_constraints(s.se.ULE(cpylen, 4))
    memcpy(s, inline=True, arguments=[dst_addr, src_addr, cpylen])
    new_dst = s.memory.load(dst_addr, 4, endness='Iend_BE')
    nose.tools.assert_items_equal(s.se.any_n_str(new_dst, 300), [ 'AAAA', 'BAAA', 'BBAA', 'BBBA', 'BBBB' ])

Example 25

Project: simuvex
Source File: test_string.py
View license
def test_strncpy():
    l.info("concrete src, concrete dst, concrete len")
    l.debug("... full copy")
    s = SimState(arch="AMD64", mode="symbolic")
    dst = s.se.BVV(0x41414100, 32)
    dst_addr = s.se.BVV(0x1000, 64)
    src = s.se.BVV(0x42420000, 32)
    src_addr = s.se.BVV(0x2000, 64)

    s.memory.store(dst_addr, dst)
    s.memory.store(src_addr, src)
    strncpy(s, inline=True, arguments=[dst_addr, src_addr, s.se.BVV(3, 64)])
    new_dst = s.memory.load(dst_addr, 4, endness='Iend_BE')
    nose.tools.assert_equal(s.se.any_str(new_dst), "BB\x00\x00")

    l.debug("... partial copy")
    s = SimState(arch="AMD64", mode="symbolic")
    s.memory.store(dst_addr, dst)
    s.memory.store(src_addr, src)
    strncpy(s, inline=True, arguments=[dst_addr, src_addr, s.se.BVV(2, 64)])
    new_dst = s.memory.load(dst_addr, 4, endness='Iend_BE')
    nose.tools.assert_equal(s.se.any_n_str(new_dst, 2), [ "BBA\x00" ])

    l.info("symbolic src, concrete dst, concrete len")
    s = SimState(arch="AMD64", mode="symbolic")
    dst = s.se.BVV(0x41414100, 32)
    dst_addr = s.se.BVV(0x1000, 64)
    src = s.se.BVS("src", 32)
    src_addr = s.se.BVV(0x2000, 64)

    s.memory.store(dst_addr, dst)
    s.memory.store(src_addr, src)

    # make sure it copies it all
    s.add_constraints(strlen(s, inline=True, arguments=[src_addr]).ret_expr == 2)

    # sanity check
    s_false = s.copy()
    s_false.add_constraints(strlen(s_false, inline=True, arguments=[src_addr]).ret_expr == 3)
    nose.tools.assert_false(s_false.satisfiable())

    strncpy(s, inline=True, arguments=[dst_addr, src_addr, 3])
    nose.tools.assert_true(s.satisfiable())
    c = strcmp(s, inline=True, arguments=[dst_addr, src_addr]).ret_expr

    nose.tools.assert_items_equal(s.se.any_n_int(c, 10), [0])

    l.info("symbolic src, concrete dst, symbolic len")
    s = SimState(arch="AMD64", mode="symbolic")
    dst = s.se.BVV(0x41414100, 32)
    dst_addr = s.se.BVV(0x1000, 64)
    src = s.se.BVS("src", 32)
    src_addr = s.se.BVV(0x2000, 64)
    maxlen = s.se.BVS("len", 64)

    s.memory.store(dst_addr, dst)
    s.memory.store(src_addr, src)

    # make sure it copies it all
    s.add_constraints(strlen(s, inline=True, arguments=[src_addr]).ret_expr == 2)
    strncpy(s, inline=True, arguments=[dst_addr, src_addr, maxlen])
    c = strcmp(s, inline=True, arguments=[dst_addr, src_addr]).ret_expr

    s_match = s.copy()
    s_match.add_constraints(c == 0)
    nose.tools.assert_equals(s_match.se.min_int(maxlen), 3)

    s_nomatch = s.copy()
    s_nomatch.add_constraints(c != 0)
    nose.tools.assert_equals(s_nomatch.se.max_int(maxlen), 2)

    l.info("concrete src, concrete dst, symbolic len")
    l.debug("... full copy")
    s = SimState(arch="AMD64", mode="symbolic")

    dst = s.se.BVV(0x41414100, 32)
    dst_addr = s.se.BVV(0x1000, 64)
    src = s.se.BVV(0x42420000, 32)
    src_addr = s.se.BVV(0x2000, 64)
    maxlen = s.se.BVS("len", 64)

    s.memory.store(dst_addr, dst)
    s.memory.store(src_addr, src)
    strncpy(s, inline=True, arguments=[dst_addr, src_addr, maxlen])
    r = s.memory.load(dst_addr, 4, endness='Iend_BE')
    #print repr(r.se.any_n_str(10))
    nose.tools.assert_items_equal(s.se.any_n_str(r, 10), [ "AAA\x00", 'BAA\x00', 'BBA\x00', 'BB\x00\x00' ] )

Example 26

Project: pyblish-base
Source File: test_logic.py
View license
@with_setup(setup_empty, teardown)
def test_order():
    """Ordering with util.publish works fine"""

    order = {"#": "0"}

    class SelectInstance(pyblish.api.Selector):
        def process_context(self, context):
            order["#"] += "1"
            instance = context.create_instance("MyInstance")
            instance.set_data("family", "myFamily")

    class Validator1(pyblish.plugin.Validator):
        def process_instance(self, instance):
            order["#"] += "2"

    class Validator2(pyblish.plugin.Validator):
        order = pyblish.plugin.Validator.order + 0.1

        def process_instance(self, instance):
            order["#"] += "3"

    class Validator3(pyblish.plugin.Validator):
        order = pyblish.plugin.Validator.order + 0.2

        def process_instance(self, instance):
            order["#"] += "4"

    class Extractor1(pyblish.plugin.Extractor):
        def process_instance(self, instance):
            order["#"] += "5"

    class Extractor2(pyblish.plugin.Extractor):
        order = pyblish.plugin.Extractor.order + 0.1

        def process_instance(self, instance):
            order["#"] += "6"

    for plugin in (Extractor2, Extractor1, Validator3,
                   Validator2, Validator1, SelectInstance):
        pyblish.plugin.register_plugin(plugin)

    pyblish.util.publish()
    assert_equal(order["#"], "0123456")

Example 27

Project: pycassa
Source File: test_columnfamily.py
View license
    @requireOPP
    def test_get_range_batching(self):
        cf.truncate()

        keys = []
        columns = {'c': 'v'}
        for i in range(100, 201):
            keys.append('key%d' % i)
            cf.insert('key%d' % i, columns)

        for i in range(201, 301):
            cf.insert('key%d' % i, columns)

        count = 0
        for k, v in cf.get_range(row_count=100, buffer_size=10):
            assert_true(k in keys, 'key "%s" should be in keys' % k)
            count += 1
        assert_equal(count, 100)

        count = 0
        for k, v in cf.get_range(row_count=100, buffer_size=1000):
            assert_true(k in keys, 'key "%s" should be in keys' % k)
            count += 1
        assert_equal(count, 100)

        count = 0
        for k, v in cf.get_range(row_count=100, buffer_size=150):
            assert_true(k in keys, 'key "%s" should be in keys' % k)
            count += 1
        assert_equal(count, 100)

        count = 0
        for k, v in cf.get_range(row_count=100, buffer_size=7):
            assert_true(k in keys, 'key "%s" should be in keys' % k)
            count += 1
        assert_equal(count, 100)

        count = 0
        for k, v in cf.get_range(row_count=100, buffer_size=2):
            assert_true(k in keys, 'key "%s" should be in keys' % k)
            count += 1
        assert_equal(count, 100)

        # Put the remaining keys in our list
        for i in range(201, 301):
            keys.append('key%d' % i)

        count = 0
        for k, v in cf.get_range(row_count=10000, buffer_size=2):
            assert_true(k in keys, 'key "%s" should be in keys' % k)
            count += 1
        assert_equal(count, 201)

        count = 0
        for k, v in cf.get_range(row_count=10000, buffer_size=7):
            assert_true(k in keys, 'key "%s" should be in keys' % k)
            count += 1
        assert_equal(count, 201)

        count = 0
        for k, v in cf.get_range(row_count=10000, buffer_size=200):
            assert_true(k in keys, 'key "%s" should be in keys' % k)
            count += 1
        assert_equal(count, 201)

        count = 0
        for k, v in cf.get_range(row_count=10000, buffer_size=10000):
            assert_true(k in keys, 'key "%s" should be in keys' % k)
            count += 1
        assert_equal(count, 201)

        # Don't give a row count
        count = 0
        for k, v in cf.get_range(buffer_size=2):
            assert_true(k in keys, 'key "%s" should be in keys' % k)
            count += 1
        assert_equal(count, 201)

        count = 0
        for k, v in cf.get_range(buffer_size=77):
            assert_true(k in keys, 'key "%s" should be in keys' % k)
            count += 1
        assert_equal(count, 201)

        count = 0
        for k, v in cf.get_range(buffer_size=200):
            assert_true(k in keys, 'key "%s" should be in keys' % k)
            count += 1
        assert_equal(count, 201)

        count = 0
        for k, v in cf.get_range(buffer_size=10000):
            assert_true(k in keys, 'key "%s" should be in keys' % k)
            count += 1
        assert_equal(count, 201)

        cf.truncate()

Example 28

Project: pycassa
Source File: test_connection_pooling.py
View license
    def test_queue_pool_threadlocal(self):
        stats_logger = StatsLoggerWithListStorage()
        pool = ConnectionPool(pool_size=5, max_overflow=5, recycle=10000,
                         prefill=True, pool_timeout=0.01, timeout=1,
                         keyspace='PycassaTestKeyspace', credentials=_credentials,
                         listeners=[stats_logger], use_threadlocal=True)
        conns = []

        assert_equal(stats_logger.stats['created']['success'], 5)
        # These connections should all be the same
        for i in range(10):
            conns.append(pool.get())
        assert_equal(stats_logger.stats['created']['success'], 5)
        assert_equal(stats_logger.stats['checked_out'], 1)

        for i in range(0, 5):
            pool.return_conn(conns[i])
        assert_equal(stats_logger.stats['checked_in'], 1)
        for i in range(5, 10):
            pool.return_conn(conns[i])
        assert_equal(stats_logger.stats['checked_in'], 1)

        conns = []

        assert_equal(stats_logger.stats['created']['success'], 5)
        # A single connection should come from the pool
        for i in range(5):
            conns.append(pool.get())
        assert_equal(stats_logger.stats['created']['success'], 5)
        assert_equal(stats_logger.stats['checked_out'], 2)

        for conn in conns:
            pool.return_conn(conn)

        conns = []
        threads = []
        stats_logger.reset()

        def checkout_return():
            conn = pool.get()
            time.sleep(1)
            pool.return_conn(conn)

        for i in range(5):
            threads.append(threading.Thread(target=checkout_return))
            threads[-1].start()
        for thread in threads:
            thread.join()

        assert_equal(stats_logger.stats['created']['success'], 0) # Still 5 connections in pool
        assert_equal(stats_logger.stats['checked_out'], 5)
        assert_equal(stats_logger.stats['checked_in'], 5)

        # These should come from the pool
        threads = []
        for i in range(5):
            threads.append(threading.Thread(target=checkout_return))
            threads[-1].start()
        for thread in threads:
            thread.join()
        assert_equal(stats_logger.stats['created']['success'], 0)
        assert_equal(stats_logger.stats['checked_out'], 10)
        assert_equal(stats_logger.stats['checked_in'], 10)

        pool.dispose()

Example 29

Project: pycassa
Source File: test_pool_logger.py
View license
    def test_pool(self):
        listener = StatsLogger()
        pool = ConnectionPool(pool_size=5, max_overflow=5, recycle=10000,
                              prefill=True, pool_timeout=0.1, timeout=1,
                              keyspace='PycassaTestKeyspace', credentials=_credentials,
                              listeners=[listener], use_threadlocal=False)
        conns = []
        for i in range(10):
            conns.append(pool.get())
        assert_equal(listener.stats['created']['success'], 10)
        assert_equal(listener.stats['created']['failure'], 0)
        assert_equal(listener.stats['checked_out'], 10)
        assert_equal(listener.stats['opened'], {'current': 10, 'max': 10})

        # Pool is maxed out now
        assert_raises(NoConnectionAvailable, pool.get)
        assert_equal(listener.stats['created']['success'], 10)
        assert_equal(listener.stats['checked_out'], 10)
        assert_equal(listener.stats['opened'], {'current': 10, 'max': 10})
        assert_equal(listener.stats['at_max'], 1)

        for i in range(0, 5):
            pool.return_conn(conns[i])
        assert_equal(listener.stats['disposed']['success'], 0)
        assert_equal(listener.stats['checked_in'], 5)
        assert_equal(listener.stats['opened'], {'current': 5, 'max': 10})

        for i in range(5, 10):
            pool.return_conn(conns[i])
        assert_equal(listener.stats['disposed']['success'], 5)
        assert_equal(listener.stats['checked_in'], 10)

        conns = []

        # These connections should come from the pool
        for i in range(5):
            conns.append(pool.get())
        assert_equal(listener.stats['created']['success'], 10)
        assert_equal(listener.stats['checked_out'], 15)

        # But these will need to be made
        for i in range(5):
            conns.append(pool.get())
        assert_equal(listener.stats['created']['success'], 15)
        assert_equal(listener.stats['checked_out'], 20)

        assert_equal(listener.stats['disposed']['success'], 5)
        for i in range(10):
            conns[i].return_to_pool()
        assert_equal(listener.stats['checked_in'], 20)
        assert_equal(listener.stats['disposed']['success'], 10)

        assert_raises(InvalidRequestError, conns[0].return_to_pool)
        assert_equal(listener.stats['checked_in'], 20)
        assert_equal(listener.stats['disposed']['success'], 10)

        print "in test:", id(conns[-1])
        conns[-1].return_to_pool()
        assert_equal(listener.stats['checked_in'], 20)
        assert_equal(listener.stats['disposed']['success'], 10)

        pool.dispose()

Example 30

Project: pystruct
Source File: test_latent_svm.py
View license
def test_switch_to_ad3():
    # smoketest only
    # test if switching between qpbo and ad3 works inside latent svm
    # use less perfect initialization

    if not get_installed(['qpbo']) or not get_installed(['ad3']):
        return
    X, Y = generate_crosses(n_samples=20, noise=5, n_crosses=1, total_size=8)
    X_test, Y_test = X[10:], Y[10:]
    X, Y = X[:10], Y[:10]

    crf = LatentGridCRF(n_states_per_label=2,
                        inference_method='qpbo')
    crf.initialize(X, Y)
    H_init = crf.init_latent(X, Y)

    np.random.seed(0)
    mask = np.random.uniform(size=H_init.shape) > .7
    H_init[mask] = 2 * (H_init[mask] / 2)

    base_ssvm = OneSlackSSVM(crf, inactive_threshold=1e-8, cache_tol=.0001,
                             inference_cache=50, max_iter=10000,
                             switch_to=('ad3', {'branch_and_bound': True}),
                             C=10. ** 3)
    clf = LatentSSVM(base_ssvm)

    # evil hackery to get rid of ad3 output
    try:
        devnull = open('/dev/null', 'w')
        oldstdout_fno = os.dup(sys.stdout.fileno())
        os.dup2(devnull.fileno(), 1)
        replaced_stdout = True
    except:
        replaced_stdout = False

    clf.fit(X, Y, H_init=H_init)

    if replaced_stdout:
        os.dup2(oldstdout_fno, 1)
    assert_equal(clf.model.inference_method[0], 'ad3')

    Y_pred = clf.predict(X)

    assert_array_equal(np.array(Y_pred), Y)
    # test that score is not always 1
    assert_true(.98 < clf.score(X_test, Y_test) < 1)

Example 31

Project: RMG-Py
Source File: isomorphismTest.py
View license
def run_parameter_tests():
    
    def failed(*args):
        raise AssertionError

    def exception(exc):
        raise exc

    def success():
        assert_equal(True, True)
        
    def isIsomorphic_mol_atom_types(e1, e2, u1, u2, c1, c2):
        """
        Check whether isomorphism between 2 molecules consisting of each 1 atom
        perceives the difference in charge
        """
        mol1, adjList1 = createMolecule(e1, u1, c1)
        mol2, adjList2 = createMolecule(e2, u2, c2)
        
        exp = mol_atom_type_comparison(e1, e2, u1, u2, c1, c2)    
        err = "\nGraph 1: {0},\nGraph 2: {1}. \nExpected: {2}".format(adjList1, adjList2, exp)
        
        if mol1 is not None and mol2 is not None:
            calc = mol1.isIsomorphic(mol2)
            assert_equal(calc, exp, err)
    
    def findIsomorphisms_mol_atom_types(e1, e2, u1, u2, c1, c2):
        """
        Check whether isomorphism between 2 molecules consisting of each 1 atom
        perceives the difference in charge
        """
        
        mol1, adjList1 = createMolecule(e1, u1, c1)
        mol2, adjList2 = createMolecule(e2, u2, c2)

        exp = mol_atom_type_comparison(e1, e2, u1, u2, c1, c2)        
        err = "\nGraph 1: {0},\nGraph 2: {1}. \nExpected: {2}".format(adjList1, adjList2, exp)
        
        if mol1 is not None and mol2 is not None:
            calc = len(mol1.findIsomorphism(mol2)) > 0
            assert_equal(calc, exp, err)
    
    def isSubgraphIsomorphic_mol_atom_types(e1, e2, u1, u2, c1, c2):
        mol1, adjList1 = createMolecule(e1, u1, c1)
        group1, adjList2 = createGroup(e2, u2, c2)

        exp = mol_atom_type_comparison(e1, e2, u1, u2, c1, c2)#string comparison will give us expected value! 
        
        err = "\nGraph 1: {0},\nGraph 2: {1}. \nExpected: {2}".format(adjList1, adjList2, exp)
        
        if mol1 is not None and group1 is not None:
            calc = mol1.isSubgraphIsomorphic(group1)
            assert_equal(calc, exp, err)
            
    def findSubgraphIsomorphisms_mol_atom_types(e1, e2, u1, u2, c1, c2):

        mol1, adjList1 = createMolecule(e1, u1, c1)
        group1, adjList2 = createGroup(e2, u2, c2)

        exp = mol_atom_type_comparison(e1, e2, u1, u2, c1, c2)#string comparison will give us expected value! 
        
        err = "\nGraph 1: {0},\nGraph 2: {1}. \nExpected: {2}".format(adjList1, adjList2, exp)
        
        if mol1 is not None and group1 is not None:
            calc = len(mol1.findSubgraphIsomorphisms(group1)) > 0
            assert_equal(calc, exp, err)
    
    output = load_cases_molecule_atom_types()
    for args in output:
        try:
            isIsomorphic_mol_atom_types(*args)
            findIsomorphisms_mol_atom_types(*args)
            isSubgraphIsomorphic_mol_atom_types(*args)
            
        except AssertionError:
            yield (failed, args)
            
        except Exception as e:
            yield (exception, e)
    
    
    def isIsomorphic_mol_group_atom_types(e1, e2, u1, u2, c1, c2):
        """
        Check whether isomorphism between 2 molecules consisting of each 1 atom
        perceives the difference in charge
        """
        mol1, adjList1 = createMolecule(e1, u1, c1)
        group1, adjList2 = createGroup(e2, u2, c2)
        if mol1 is not None and group1 is not None:
            a1 = mol1.atoms[0].atomType
            a2 = group1.atoms[0].atomType[0]
            exp = group_atom_type_comparison(a1, a2, u1, u2, c1, c2)#string comparison will give us expected value!        
            err = "\nGraph 1: {0},\nGraph 2: {1}. \nExpected: {2}".format(adjList1, adjList2, exp)

            calc = mol1.isSubgraphIsomorphic(group1)
            assert_equal(calc, exp, err)
    
    def findSubgraphIsomorphisms_mol_group_atom_types(e1, e2, u1, u2, c1, c2):
        mol1, adjList1 = createMolecule(e1, u1, c1)
        group1, adjList2 = createGroup(e2, u2, c2)
        if mol1 is not None and group1 is not None:
            a1 = mol1.atoms[0].atomType
            a2 = group1.atoms[0].atomType[0]
            exp = group_atom_type_comparison(a1, a2, u1, u2, c1, c2)        
            err = "\nGraph 1: {0},\nGraph 2: {1}. \nExpected: {2}".format(adjList1, adjList2, exp)

            calc = len(mol1.findSubgraphIsomorphisms(group1)) > 0
            assert_equal(calc, exp, err)
            
    output = load_cases_group_atom_types()
    for args in output:
        try:
            isIsomorphic_mol_group_atom_types(*args)
            findSubgraphIsomorphisms_mol_group_atom_types(*args)
        except AssertionError:
            yield (failed, args)
        except Exception as e:
            yield (exception, e)
                    
    # Make sure that one test is always returned
    yield (success, )

Example 32

Project: meza
Source File: test_process.py
View license
    def test_merge(self):
        expected = {'a': 1, 'b': 10, 'c': 11}
        result = pr.merge([{'a': 1, 'b': 2}, {'b': 10, 'c': 11}])
        nt.assert_equal(expected, result)

        # setup
        records = [{'a': 1, 'b': 2, 'c': 3}, {'b': 4, 'c': 5, 'd': 6}]

        # Combine all keys
        expected = {u'a': 1, u'c': 8, u'b': 6, u'd': 6}
        result = pr.merge(records, pred=bool, op=sum)
        nt.assert_equal(expected, result)

        first = lambda pair: next(filter(partial(is_not, None), pair))
        kwargs = {'pred': bool, 'op': first, 'default': None}
        expected = {u'a': 1, u'b': 2, u'c': 3, u'd': 6}
        result = pr.merge(records, **kwargs)
        nt.assert_equal(expected, result)

        # This will only reliably give the expected result for 2 records
        kwargs = {'pred': bool, 'op': stats.mean, 'default': None}
        expected = {u'a': 1, u'b': 3.0, u'c': 4.0, u'd': 6.0}
        result = pr.merge(records, **kwargs)
        nt.assert_equal(expected, result)

        # Only combine key 'b'
        expected = {u'a': 1, u'b': 6, u'c': 5, u'd': 6}
        result = pr.merge(records, pred='b', op=sum)
        nt.assert_equal(expected, result)

        # Only combine keys that have the same value of 'b'
        expected = {u'a': 1, u'b': 6, u'c': 5, u'd': 6}
        result = pr.merge(records, pred=itemgetter('b'), op=sum)
        nt.assert_equal(expected, result)

        # This will reliably work for any number of records
        counted = defaultdict(int)

        records = [
            {'a': 1, 'b': 4, 'c': 0},
            {'a': 2, 'b': 5, 'c': 2},
            {'a': 3, 'b': 6, 'd': 7}]

        for r in records:
            for k in r.keys():
                counted[k] += 1

        expected = {u'a': 3, u'b': 3, u'c': 2, u'd': 1}
        nt.assert_equal(expected, counted)

        summed = pr.merge(records, pred=bool, op=sum)
        expected = {u'a': 6, u'b': 15, u'c': 2, u'd': 7}
        nt.assert_equal(expected, summed)

        kwargs = {'pred': bool, 'op': ft.fpartial(truediv)}
        expected = {u'a': 2.0, u'b': 5.0, u'c': 1.0, u'd': 7.0}
        result = pr.merge([summed, counted], **kwargs)
        nt.assert_equal(expected, result)

        # This should also reliably work for any number of records
        op = ft.fpartial(ft.sum_and_count)
        kwargs = {'pred': bool, 'op': op, 'default': None}
        merged = pr.merge(records, **kwargs)
        result = {x: truediv(*y) for x, y in merged.items()}
        nt.assert_equal(expected, result)

Example 33

Project: cassandra-dtest
Source File: cdc_test.py
View license
def _write_to_cdc_WriteFailure(session, insert_stmt):
    prepared = session.prepare(insert_stmt)
    start, rows_loaded, error_found = time.time(), 0, False
    rate_limited_debug = get_rate_limited_function(debug, 5)
    while not error_found:
        # We want to fail if inserting data takes too long. Locally this
        # takes about 10s, but let's be generous.
        assert_less_equal(
            (time.time() - start), 600,
            "It's taken more than 10 minutes to reach a WriteFailure trying "
            'to overrun the space designated for CDC commitlogs. This could '
            "be because data isn't being written quickly enough in this "
            'environment, or because C* is failing to reject writes when '
            'it should.'
        )

        # If we haven't logged from here in the last 5s, do so.
        rate_limited_debug(
            '  data load step has lasted {s:.2f}s, '
            'loaded {r} rows'.format(s=(time.time() - start), r=rows_loaded))

        batch_results = list(execute_concurrent(
            session,
            ((prepared, ()) for _ in range(1000)),
            concurrency=500,
            # Don't propagate errors to the main thread. We expect at least
            # one WriteFailure, so we handle it below as part of the
            # results recieved from this method.
            raise_on_first_error=False
        ))

        # Here, we track the number of inserted values by getting the
        # number of successfully completed statements...
        rows_loaded += len([br for br in batch_results if br[0]])
        # then, we make sure that the only failures are the expected
        # WriteFailures.
        assert_equal([],
                     [result for (success, result) in batch_results
                      if not success and not isinstance(result, WriteFailure)])
        # Finally, if we find a WriteFailure, that means we've inserted all
        # the CDC data we can and so we flip error_found to exit the loop.
        if any(isinstance(result, WriteFailure) for (_, result) in batch_results):
            debug("write failed (presumably because we've overrun "
                  'designated CDC commitlog space) after '
                  'loading {r} rows in {s:.2f}s'.format(
                      r=rows_loaded,
                      s=time.time() - start))
            error_found = True
    return rows_loaded

Example 34

Project: rootpy
Source File: test_histfactory.py
View license
@requires_ROOT(histfactory.MIN_ROOT_VERSION, exception=SkipTest)
def test_histfactory():

    # create some Samples
    data = Data('data')
    data.hist = get_random_hist()
    a = Sample('QCD')
    b = Sample('QCD')

    for sample in (a, b):
        sample.hist = get_random_hist()
        # include some histosysts
        for sysname in ('x', 'y', 'z'):
            histosys = HistoSys(sysname)
            histosys.high = get_random_hist()
            histosys.low = get_random_hist()
            sample.AddHistoSys(histosys)
        # include some normfactors
        for normname in ('x', 'y', 'z'):
            norm = NormFactor(normname)
            norm.value = 1
            norm.high = 2
            norm.low = 0
            norm.const = False
            sample.AddNormFactor(norm)

    # samples must be compatible here
    c = a + b
    c = sum([a, b])

    # create Channels
    channel_a = Channel('VBF')
    channel_a.data = data
    channel_a.AddSample(a)

    channel_b = Channel('VBF')
    channel_b.data = data
    channel_b.AddSample(b)

    combined_channel = channel_a + channel_b
    combined_channel = sum([channel_a, channel_b])

    # create a Measurement
    meas = Measurement('MyAnalysis')
    meas.AddChannel(channel_a)

    # create the workspace containing the model
    workspace = make_workspace(meas, silence=True)
    with TemporaryFile():
        workspace.Write()

    assert_true(channel_a.GetSample(a.name) is not None)
    channel_a.RemoveSample(a.name)
    assert_true(channel_a.GetSample(a.name) is None)

    assert_true(meas.GetChannel(channel_a.name) is not None)
    meas.RemoveChannel(channel_a.name)
    assert_true(meas.GetChannel(channel_a.name) is None)

    # test split_norm_shape
    nominal = Hist(1, 0, 1)
    nominal.FillRandom('gaus')
    hsys = HistoSys('shape', high=nominal * 1.5, low=nominal * 0.9)
    norm, shape = split_norm_shape(hsys, nominal)
    assert_equal(norm.low, 0.9)
    assert_equal(norm.high, 1.5)
    assert_equal(shape.high[1].value, nominal[1].value)
    assert_equal(shape.low[1].value, nominal[1].value)

Example 35

Project: rootpy
Source File: test_tree.py
View license
@with_setup(create_tree, cleanup)
def test_draw():
    with root_open(FILE_PATHS[0]) as f:
        tree = f.tree

        tree.draw('a_x')
        tree.draw('a_x:a_y')
        tree.draw('a_x:TMath::Exp(a_y)')
        tree.draw('a_x:a_y:a_z')
        tree.draw('a_x:a_y:a_z:b_x')
        tree.draw('a_x:a_y:a_z:b_x:b_y', options='para')

        h1 = Hist(10, -1, 2, name='h1')
        h2 = Hist2D(10, -1, 2, 10, -1, 2)
        h3 = Hist3D(10, -1, 2, 10, -1, 2, 10, -1, 2)

        # dimensionality does not match
        assert_raises(TypeError, tree.draw, 'a_x:a_y', hist=h1)

        # name does not match
        assert_raises(ValueError, tree.draw, 'a_x>>+something', hist=h1)

        # hist is not a TH1
        assert_raises(TypeError, tree.draw, 'a_x:a_y', hist=ROOT.TGraph())

        # name does match and is fine (just redundant)
        tree.draw('a_x>>h1', hist=h1)
        assert_equal(h1.Integral() > 0, True)
        h1.Reset()
        tree.draw('a_x>>+h1', hist=h1)
        assert_equal(h1.Integral() > 0, True)
        h1.Reset()

        # both binning and hist are specified
        assert_raises(ValueError, tree.draw, 'a_x>>+h1(10, 0, 1)', hist=h1)

        tree.draw('a_x', hist=h1)
        assert_equal(h1.Integral() > 0, True)
        tree.draw('a_x:a_y', hist=h2)
        assert_equal(h2.Integral() > 0, True)
        tree.draw('a_x:a_y:a_z', hist=h3)
        assert_equal(h3.Integral() > 0, True)

        h3.Reset()
        tree.draw('a_x>0:a_y/2:a_z*2', hist=h3)
        assert_equal(h3.Integral() > 0, True)

        # create a histogram
        hist = tree.draw('a_x:a_y:a_z', create_hist=True)
        assert_equal(hist.Integral() > 0, True)

        hist = tree.draw('a_x:a_y:a_z>>new_hist_1')
        assert_equal(hist.Integral() > 0, True)
        assert_equal(hist.name, 'new_hist_1')

        # create_hist=True is redundant here
        hist = tree.draw('a_x:a_y:a_z>>new_hist_2', create_hist=True)
        assert_equal(hist.Integral() > 0, True)
        assert_equal(hist.name, 'new_hist_2')

        # test list/tuple expression
        hist1 = tree.draw('a_x:a_y:a_z', create_hist=True)
        hist2 = tree.draw(['a_x', 'a_y', 'a_z'], create_hist=True)
        hist3 = tree.draw(('a_x', 'a_y', 'a_z'), create_hist=True)
        assert_equal(hist1.Integral() > 0, True)
        assert_equal(hist2.Integral(), hist1.Integral())
        assert_equal(hist3.Integral(), hist1.Integral())

Example 36

Project: root_numpy
Source File: tests.py
View license
def test_vector():
    a = rnp.root2array(load('vector.root')).view(np.recarray)
    types = [
        ('v_i', 'O'),
        ('v_f', 'O'),
        ('v_F', 'O'),
        ('v_d', 'O'),
        ('v_l', 'O'),
        ('v_c', 'O'),
        ('v_b', 'O'),
        ('vv_i', 'O'),
        ('vv_f', 'O'),
        ('vv_F', 'O'),
        ('vv_d', 'O'),
        ('vv_l', 'O'),
        ('vv_c', 'O'),
        ('vv_b', 'O'),
    ]
    assert_equal(a.dtype, types)

    assert_equal(a.v_i[0].dtype, np.int32)
    assert_equal(a.v_f[0].dtype, np.float32)
    assert_equal(a.v_F[0].dtype, np.float32)
    assert_equal(a.v_d[0].dtype, np.float64)
    assert_equal(a.v_l[0].dtype, np.int64)
    assert_equal(a.v_c[0].dtype, np.int8)
    assert_equal(a.v_b[0].dtype, np.bool)

    # assert that wrapper array is np.object
    assert_equal(a.vv_i[0].dtype, np.object)
    assert_equal(a.vv_f[0].dtype, np.object)
    assert_equal(a.vv_F[0].dtype, np.object)
    assert_equal(a.vv_d[0].dtype, np.object)
    assert_equal(a.vv_l[0].dtype, np.object)
    assert_equal(a.vv_c[0].dtype, np.object)
    assert_equal(a.vv_b[0].dtype, np.object)

    assert_equal(a.vv_i[0][0].dtype, np.int32)
    assert_equal(a.vv_f[0][0].dtype, np.float32)
    assert_equal(a.vv_F[0][0].dtype, np.float32)
    assert_equal(a.vv_d[0][0].dtype, np.float64)
    assert_equal(a.vv_l[0][0].dtype, np.int64)
    assert_equal(a.vv_c[0][0].dtype, np.int8)
    assert_equal(a.vv_b[0][0].dtype, np.bool)

    # check a few values
    assert_equal(a.v_i[0][0], 1)
    assert_equal(a.v_i[1][1], 3)
    assert_equal(a.v_i[-2][0], 9)
    assert_equal(a.v_i[-2][-1], 17)

    assert_equal(a.v_f[0][0], 2.0)
    assert_equal(a.v_f[1][1], 5.0)
    assert_equal(a.v_f[-2][0], 18.0)
    assert_equal(a.v_f[-2][-1], 26.0)

    assert_equal(a.v_F[0][0], 2.0)
    assert_equal(a.v_F[1][1], 5.0)
    assert_equal(a.v_F[-2][0], 18.0)
    assert_equal(a.v_F[-2][-1], 26.0)

    # more strict conditioning for numpy arrays
    def assert_equal_array(arr1, arr2):
        return assert_equal((arr1 == arr2).all(), True,
            "array mismatch: {0} != {1}".format(arr1, arr2))

    assert_equal_array(a.vv_i[0][0], np.array([1], dtype=np.int32) )
    assert_equal_array(a.vv_i[1][1], np.array([2, 3], dtype=np.int32) )
    assert_equal_array(a.vv_i[-2][0], np.array([9], dtype=np.int32) )
    assert_equal_array(a.vv_i[-2][-1],
                       np.array([ 9, 10, 11, 12, 13, 14, 15, 16, 17],
                                dtype=np.int32))

    assert_equal_array(a.vv_f[0][0], np.array([ 2.], dtype=np.float32) )
    assert_equal_array(a.vv_f[1][1], np.array([ 4.,  5.], dtype=np.float32) )
    assert_equal_array(a.vv_f[-2][0], np.array([ 18.], dtype=np.float32) )
    assert_equal_array(a.vv_f[-2][-1],
                       np.array([ 18.,  19.,  20.,  21.,  22.,
                                  23.,  24.,  25.,  26.],
                                dtype=np.float32))

    assert_equal_array(a.vv_F[0][0], np.array([ 2.], dtype=np.float32) )
    assert_equal_array(a.vv_F[1][1], np.array([ 4.,  5.], dtype=np.float32) )
    assert_equal_array(a.vv_F[-2][0], np.array([ 18.], dtype=np.float32) )
    assert_equal_array(a.vv_F[-2][-1],
                       np.array([ 18.,  19.,  20.,  21.,  22.,
                                  23.,  24.,  25.,  26.],
                                dtype=np.float32))

Example 37

Project: root_numpy
Source File: tests.py
View license
def test_stretch():
    arr = np.empty(5,
        dtype=[
            ('scalar', np.int),
            ('vl1', 'O'),
            ('vl2', 'O'),
            ('vl3', 'O'),
            ('fl1', np.int, (2, 2)),
            ('fl2', np.float, (2, 3)),
            ('fl3', np.double, (3, 2))])

    for i in range(arr.shape[0]):
        vl1 = np.array(range(i + 1), dtype=np.int)
        vl2 = np.array(range(i + 2), dtype=np.float) * 2
        vl3 = np.array(range(2), dtype=np.double) * 3
        fl1 = np.array(range(4), dtype=np.int).reshape((2, 2))
        fl2 = np.array(range(6), dtype=np.float).reshape((2, 3))
        fl3 = np.array(range(6), dtype=np.double).reshape((3, 2))
        arr[i] = (i, vl1, vl2, vl3, fl1, fl2, fl3)

    # no array columns included
    assert_raises(RuntimeError, rnp.stretch, arr, ['scalar',])

    # lengths don't match
    assert_raises(ValueError, rnp.stretch, arr, ['scalar', 'vl1', 'vl2',])
    assert_raises(ValueError, rnp.stretch, arr, ['scalar', 'fl1', 'fl3',])
    assert_raises(ValueError, rnp.stretch, arr)

    # variable-length stretch
    stretched = rnp.stretch(arr, ['scalar', 'vl1',])
    assert_equal(stretched.dtype,
                 [('scalar', np.int),
                  ('vl1', np.int)])
    assert_equal(stretched.shape[0], 15)
    assert_array_equal(
        stretched['scalar'],
        np.repeat(arr['scalar'], np.vectorize(len)(arr['vl1'])))

    # fixed-length stretch
    stretched = rnp.stretch(arr, ['scalar', 'vl3', 'fl1', 'fl2',])
    assert_equal(stretched.dtype,
                 [('scalar', np.int),
                  ('vl3', np.double),
                  ('fl1', np.int, (2,)),
                  ('fl2', np.float, (3,))])
    assert_equal(stretched.shape[0], 10)
    assert_array_equal(
        stretched['scalar'], np.repeat(arr['scalar'], 2))

    # optional argument return_indices
    stretched, idx = rnp.stretch(arr, ['scalar', 'vl1'], return_indices=True)
    assert_equal(stretched.shape[0], idx.shape[0])

    from_arr = list(map(lambda x: x['vl1'][0], arr))
    from_stretched = stretched[idx == 0]['vl1']
    assert_array_equal(from_arr, from_stretched)

    # stretch single field and produce unstructured output
    stretched = rnp.stretch(arr, 'vl1')
    assert_equal(stretched.dtype, np.int)

Example 38

Project: root_numpy
Source File: tests.py
View license
def test_rec2array():
    # scalar fields
    a = np.array([
        (12345, 2., 2.1, True),
        (3, 4., 4.2, False),],
        dtype=[
            ('x', np.int32),
            ('y', np.float32),
            ('z', np.float64),
            ('w', np.bool)])

    arr = rnp.rec2array(a)
    assert_array_equal(arr,
        np.array([
            [12345, 2, 2.1, 1],
            [3, 4, 4.2, 0]]))

    arr = rnp.rec2array(a, fields=['x', 'y'])
    assert_array_equal(arr,
        np.array([
            [12345, 2],
            [3, 4]]))

    # single scalar field
    arr = rnp.rec2array(a, fields=['x'])
    assert_array_equal(arr, np.array([[12345], [3]], dtype=np.int32))
    # single scalar field simplified
    arr = rnp.rec2array(a, fields='x')
    assert_array_equal(arr, np.array([12345, 3], dtype=np.int32))

    # case where array has single record
    assert_equal(rnp.rec2array(a[:1]).shape, (1, 4))
    assert_equal(rnp.rec2array(a[:1], fields=['x']).shape, (1, 1))
    assert_equal(rnp.rec2array(a[:1], fields='x').shape, (1,))

    # array fields
    a = np.array([
        ([1, 2, 3], [4.5, 6, 9.5],),
        ([4, 5, 6], [3.3, 7.5, 8.4],),],
        dtype=[
            ('x', np.int32, (3,)),
            ('y', np.float32, (3,))])

    arr = rnp.rec2array(a)
    assert_array_almost_equal(arr,
        np.array([[[1, 4.5],
                   [2, 6],
                   [3, 9.5]],
                  [[4, 3.3],
                   [5, 7.5],
                   [6, 8.4]]]))

    # lengths mismatch
    a = np.array([
        ([1, 2], [4.5, 6, 9.5],),
        ([4, 5], [3.3, 7.5, 8.4],),],
        dtype=[
            ('x', np.int32, (2,)),
            ('y', np.float32, (3,))])
    assert_raises(ValueError, rnp.rec2array, a)

    # single array field
    arr = rnp.rec2array(a, fields=['y'])
    assert_array_almost_equal(arr,
        np.array([[[4.5], [6], [9.5]],
                  [[3.3], [7.5], [8.4]]]))
    # single array field simplified
    arr = rnp.rec2array(a, fields='y')
    assert_array_almost_equal(arr,
        np.array([[4.5, 6, 9.5],
                  [3.3, 7.5, 8.4]]))

    # case where array has single record
    assert_equal(rnp.rec2array(a[:1], fields=['y']).shape, (1, 3, 1))
    assert_equal(rnp.rec2array(a[:1], fields='y').shape, (1, 3))

Example 39

Project: synapsePythonClient
Source File: test_wikis.py
View license
def test_wikiAttachment():
    # Upload a file to be attached to a Wiki
    filename = utils.make_bogus_data_file()
    attachname = utils.make_bogus_data_file()
    schedule_for_cleanup(filename)
    schedule_for_cleanup(attachname)
    fileHandle = syn._uploadToFileHandleService(filename)

    # Create and store a Wiki 
    # The constructor should accept both file handles and file paths
    md = """
    This is a test wiki
    =======================

    Blabber jabber blah blah boo.
    """
    wiki = Wiki(owner=project, title='A Test Wiki', markdown=md, 
                fileHandles=[fileHandle['id']], 
                attachments=[attachname])
    wiki = syn.store(wiki)
    
    # Create a Wiki sub-page
    subwiki = Wiki(owner=project, title='A sub-wiki', 
                   markdown='nothing', parentWikiId=wiki.id)
    subwiki = syn.store(subwiki)
    
    # Retrieve the root Wiki from Synapse
    wiki2 = syn.getWiki(project)
    ## due to the new wiki api, we'll get back some new properties,
    ## namely markdownFileHandleId and markdown_path, so only compare
    ## properties that are in the first object
    for property_name in wiki:
        assert_equal(wiki[property_name], wiki2[property_name])

    # Retrieve the sub Wiki from Synapse
    wiki2 = syn.getWiki(project, subpageId=subwiki.id)
    for property_name in wiki:
        assert_equal(subwiki[property_name], wiki2[property_name])

    # Try making an update
    wiki['title'] = 'A New Title'
    wiki['markdown'] = wiki['markdown'] + "\nNew stuff here!!!\n"
    wiki = syn.store(wiki)
    wiki = syn.getWiki(project)
    assert wiki['title'] == 'A New Title'
    assert wiki['markdown'].endswith("\nNew stuff here!!!\n")

    # Check the Wiki's metadata
    headers = syn.getWikiHeaders(project)
    assert len(headers) == 2
    assert headers[0]['title'] in (wiki['title'], subwiki['title'])

    file_handles = syn.getWikiAttachments(wiki)
    file_names = [fh['fileName'] for fh in file_handles]
    assert all( os.path.basename(fn) in file_names for fn in [filename, attachname] )

    # # Retrieve the file attachment
    # tmpdir = tempfile.mkdtemp()
    # file_props = syn._downloadWikiAttachment(project, wiki, 
    #                         os.path.basename(filename), dest_dir=tmpdir)
    # path = file_props['path']
    # assert os.path.exists(path)
    # assert filecmp.cmp(original_path, path)

    # Clean up
    # syn._deleteFileHandle(fileHandle)
    syn.delete(subwiki)
    syn.delete(wiki)
    assert_raises(SynapseHTTPError, syn.getWiki, project)

Example 40

Project: pybossa
Source File: test_category_api.py
View license
    @with_context
    def test_category_post(self):
        """Test API Category creation and auth"""
        admin = UserFactory.create()
        user = UserFactory.create()
        name = u'Category'
        category = dict(
            name=name,
            short_name='category',
            description=u'description')
        data = json.dumps(category)
        # no api-key
        url = '/api/category'
        res = self.app.post(url, data=data)
        err = json.loads(res.data)
        err_msg = 'Should not be allowed to create'
        assert res.status_code == 401, err_msg
        assert err['action'] == 'POST', err_msg
        assert err['exception_cls'] == 'Unauthorized', err_msg

        # now a real user but not admin
        res = self.app.post(url + '?api_key=' + user.api_key, data=data)
        err = json.loads(res.data)
        err_msg = 'Should not be allowed to create'
        assert res.status_code == 403, err_msg
        assert err['action'] == 'POST', err_msg
        assert err['exception_cls'] == 'Forbidden', err_msg

        # now as an admin
        res = self.app.post(url + '?api_key=' + admin.api_key,
                            data=data)
        err = json.loads(res.data)
        err_msg = 'Admin should be able to create a Category'
        assert res.status_code == 200, err_msg
        cat = project_repo.get_category_by(short_name=category['short_name'])
        assert err['id'] == cat.id, err_msg
        assert err['name'] == category['name'], err_msg
        assert err['short_name'] == category['short_name'], err_msg
        assert err['description'] == category['description'], err_msg

        # test re-create should fail
        res = self.app.post(url + '?api_key=' + admin.api_key,
                            data=data)
        err = json.loads(res.data)
        assert res.status_code == 415, err
        assert err['status'] == 'failed', err
        assert err['action'] == 'POST', err
        assert err['exception_cls'] == "DBIntegrityError", err

        # test create with non-allowed fields should fail
        data = dict(name='fail', short_name='fail', wrong=15)
        res = self.app.post(url + '?api_key=' + admin.api_key,
                            data=data)
        err = json.loads(res.data)
        err_msg = "ValueError exception should be raised"
        assert res.status_code == 415, err
        assert err['action'] == 'POST', err
        assert err['status'] == 'failed', err
        assert err['exception_cls'] == "ValueError", err_msg
        # Now with a JSON object but not valid
        data = json.dumps(data)
        res = self.app.post(url + '?api_key=' + user.api_key,
                            data=data)
        err = json.loads(res.data)
        err_msg = "TypeError exception should be raised"
        assert err['action'] == 'POST', err_msg
        assert err['status'] == 'failed', err_msg
        assert err['exception_cls'] == "TypeError", err_msg
        assert res.status_code == 415, err_msg

        # test update
        data = {'name': 'My New Title'}
        datajson = json.dumps(data)
        ## anonymous
        res = self.app.put(url + '/%s' % cat.id,
                           data=data)
        error_msg = 'Anonymous should not be allowed to update'
        assert_equal(res.status, '401 UNAUTHORIZED', error_msg)
        error = json.loads(res.data)
        assert error['status'] == 'failed', error
        assert error['action'] == 'PUT', error
        assert error['exception_cls'] == 'Unauthorized', error

        ### real user but not allowed as not admin!
        url = '/api/category/%s?api_key=%s' % (cat.id, user.api_key)
        res = self.app.put(url, data=datajson)
        error_msg = 'Should not be able to update projects of others'
        assert_equal(res.status, '403 FORBIDDEN', error_msg)
        error = json.loads(res.data)
        assert error['status'] == 'failed', error
        assert error['action'] == 'PUT', error
        assert error['exception_cls'] == 'Forbidden', error

        # Now as an admin
        res = self.app.put('/api/category/%s?api_key=%s' % (cat.id, admin.api_key),
                           data=datajson)
        assert_equal(res.status, '200 OK', res.data)
        out2 = project_repo.get_category(cat.id)
        assert_equal(out2.name, data['name'])
        out = json.loads(res.data)
        assert out.get('status') is None, error
        assert out.get('id') == cat.id, error

        # With fake data
        data['algo'] = 13
        datajson = json.dumps(data)
        res = self.app.put('/api/category/%s?api_key=%s' % (cat.id, admin.api_key),
                           data=datajson)
        err = json.loads(res.data)
        assert res.status_code == 415, err
        assert err['status'] == 'failed', err
        assert err['action'] == 'PUT', err
        assert err['exception_cls'] == 'TypeError', err

        # With not JSON data
        datajson = data
        res = self.app.put('/api/category/%s?api_key=%s' % (cat.id, admin.api_key),
                           data=datajson)
        err = json.loads(res.data)
        assert res.status_code == 415, err
        assert err['status'] == 'failed', err
        assert err['action'] == 'PUT', err
        assert err['exception_cls'] == 'ValueError', err

        # With wrong args in the URL
        data = dict(
            name='Category3',
            short_name='category3',
            description=u'description3')

        datajson = json.dumps(data)
        res = self.app.put('/api/category/%s?api_key=%s&search=select1' % (cat.id, admin.api_key),
                           data=datajson)
        err = json.loads(res.data)
        assert res.status_code == 415, err
        assert err['status'] == 'failed', err
        assert err['action'] == 'PUT', err
        assert err['exception_cls'] == 'AttributeError', err

        # test delete
        ## anonymous
        res = self.app.delete(url + '/%s' % cat.id, data=data)
        error_msg = 'Anonymous should not be allowed to delete'
        assert_equal(res.status, '401 UNAUTHORIZED', error_msg)
        error = json.loads(res.data)
        assert error['status'] == 'failed', error
        assert error['action'] == 'DELETE', error
        assert error['target'] == 'category', error
        ### real user but not admin
        url = '/api/category/%s?api_key=%s' % (cat.id, user.api_key)
        res = self.app.delete(url, data=datajson)
        error_msg = 'Should not be able to delete apps of others'
        assert_equal(res.status, '403 FORBIDDEN', error_msg)
        error = json.loads(res.data)
        assert error['status'] == 'failed', error
        assert error['action'] == 'DELETE', error
        assert error['target'] == 'category', error

        # As admin
        url = '/api/category/%s?api_key=%s' % (cat.id, admin.api_key)
        res = self.app.delete(url, data=datajson)

        assert_equal(res.status, '204 NO CONTENT', res.data)

        # delete a category that does not exist
        url = '/api/category/5000?api_key=%s' % admin.api_key
        res = self.app.delete(url, data=datajson)
        error = json.loads(res.data)
        assert res.status_code == 404, error
        assert error['status'] == 'failed', error
        assert error['action'] == 'DELETE', error
        assert error['target'] == 'category', error
        assert error['exception_cls'] == 'NotFound', error

        # delete a category that does not exist
        url = '/api/category/?api_key=%s' % admin.api_key
        res = self.app.delete(url, data=datajson)
        assert res.status_code == 404, error

Example 41

Project: pybossa
Source File: test_project_api.py
View license
    @with_context
    def test_project_post(self):
        """Test API project creation and auth"""
        users = UserFactory.create_batch(2)
        CategoryFactory.create()
        name = u'XXXX Project'
        data = dict(
            name=name,
            short_name='xxxx-project',
            description='description',
            owner_id=1,
            long_description=u'Long Description\n================')
        data = json.dumps(data)
        # no api-key
        res = self.app.post('/api/project', data=data)
        assert_equal(res.status, '401 UNAUTHORIZED',
                     'Should not be allowed to create')
        # now a real user
        res = self.app.post('/api/project?api_key=' + users[1].api_key,
                            data=data)
        out = project_repo.get_by(name=name)
        assert out, out
        assert_equal(out.short_name, 'xxxx-project'), out
        assert_equal(out.owner.name, 'user2')
        id_ = out.id

        # now a real user with headers auth
        headers = [('Authorization', users[1].api_key)]
        new_project = dict(
            name=name + '2',
            short_name='xxxx-project2',
            description='description2',
            owner_id=1,
            long_description=u'Long Description\n================')
        new_project = json.dumps(new_project)
        res = self.app.post('/api/project', headers=headers,
                            data=new_project)
        out = project_repo.get_by(name=name + '2')
        assert out, out
        assert_equal(out.short_name, 'xxxx-project2'), out
        assert_equal(out.owner.name, 'user2')
        ## Test that a default category is assigned to the project
        assert out.category_id, "No category assigned to project"
        id_ = out.id

        # test re-create should fail
        res = self.app.post('/api/project?api_key=' + users[1].api_key,
                            data=data)
        err = json.loads(res.data)
        assert res.status_code == 415, err
        assert err['status'] == 'failed', err
        assert err['action'] == 'POST', err
        assert err['exception_cls'] == "DBIntegrityError", err

        # test create with non-allowed fields should fail
        data = dict(name='fail', short_name='fail', link='hateoas', wrong=15)
        res = self.app.post('/api/project?api_key=' + users[1].api_key,
                            data=data)
        err = json.loads(res.data)
        err_msg = "ValueError exception should be raised"
        assert res.status_code == 415, err
        assert err['action'] == 'POST', err
        assert err['status'] == 'failed', err
        assert err['exception_cls'] == "ValueError", err_msg
        # Now with a JSON object but not valid
        data = json.dumps(data)
        res = self.app.post('/api/project?api_key=' + users[1].api_key,
                            data=data)
        err = json.loads(res.data)
        err_msg = "TypeError exception should be raised"
        assert err['action'] == 'POST', err_msg
        assert err['status'] == 'failed', err_msg
        assert err['exception_cls'] == "TypeError", err_msg
        assert res.status_code == 415, err_msg

        # test update
        data = {'name': 'My New Title', 'links': 'hateoas'}
        datajson = json.dumps(data)
        ## anonymous
        res = self.app.put('/api/project/%s' % id_, data=data)
        error_msg = 'Anonymous should not be allowed to update'
        assert_equal(res.status, '401 UNAUTHORIZED', error_msg)
        error = json.loads(res.data)
        assert error['status'] == 'failed', error
        assert error['action'] == 'PUT', error
        assert error['exception_cls'] == 'Unauthorized', error

        ### real user but not allowed as not owner!
        non_owner = UserFactory.create()
        url = '/api/project/%s?api_key=%s' % (id_, non_owner.api_key)
        res = self.app.put(url, data=datajson)
        error_msg = 'Should not be able to update projects of others'
        assert_equal(res.status, '403 FORBIDDEN', error_msg)
        error = json.loads(res.data)
        assert error['status'] == 'failed', error
        assert error['action'] == 'PUT', error
        assert error['exception_cls'] == 'Forbidden', error

        res = self.app.put('/api/project/%s?api_key=%s' % (id_, users[1].api_key),
                           data=datajson)

        # with hateoas links
        assert_equal(res.status, '200 OK', res.data)
        out2 = project_repo.get(id_)
        assert_equal(out2.name, data['name'])
        out = json.loads(res.data)
        assert out.get('status') is None, error
        assert out.get('id') == id_, error

        # without hateoas links
        del data['links']
        newdata = json.dumps(data)
        res = self.app.put('/api/project/%s?api_key=%s' % (id_, users[1].api_key),
                           data=newdata)

        assert_equal(res.status, '200 OK', res.data)
        out2 = project_repo.get(id_)
        assert_equal(out2.name, data['name'])
        out = json.loads(res.data)
        assert out.get('status') is None, error
        assert out.get('id') == id_, error

        # With wrong id
        res = self.app.put('/api/project/5000?api_key=%s' % users[1].api_key,
                           data=datajson)
        assert_equal(res.status, '404 NOT FOUND', res.data)
        error = json.loads(res.data)
        assert error['status'] == 'failed', error
        assert error['action'] == 'PUT', error
        assert error['exception_cls'] == 'NotFound', error

        # With fake data
        data['algo'] = 13
        datajson = json.dumps(data)
        res = self.app.put('/api/project/%s?api_key=%s' % (id_, users[1].api_key),
                           data=datajson)
        err = json.loads(res.data)
        assert res.status_code == 415, err
        assert err['status'] == 'failed', err
        assert err['action'] == 'PUT', err
        assert err['exception_cls'] == 'TypeError', err

        # With empty fields
        data.pop('algo')
        data['name'] = None
        datajson = json.dumps(data)
        res = self.app.put('/api/project/%s?api_key=%s' % (id_, users[1].api_key),
                           data=datajson)
        err = json.loads(res.data)
        assert res.status_code == 415, err
        assert err['status'] == 'failed', err
        assert err['action'] == 'PUT', err
        assert err['exception_cls'] == 'DBIntegrityError', err

        data['name'] = ''
        datajson = json.dumps(data)
        res = self.app.put('/api/project/%s?api_key=%s' % (id_, users[1].api_key),
                           data=datajson)
        err = json.loads(res.data)
        assert res.status_code == 415, err
        assert err['status'] == 'failed', err
        assert err['action'] == 'PUT', err
        assert err['exception_cls'] == 'DBIntegrityError', err

        data['name'] = 'something'
        data['short_name'] = ''
        datajson = json.dumps(data)
        res = self.app.put('/api/project/%s?api_key=%s' % (id_, users[1].api_key),
                           data=datajson)
        err = json.loads(res.data)
        assert res.status_code == 415, err
        assert err['status'] == 'failed', err
        assert err['action'] == 'PUT', err
        assert err['exception_cls'] == 'DBIntegrityError', err

        # With not JSON data
        datajson = data
        res = self.app.put('/api/project/%s?api_key=%s' % (id_, users[1].api_key),
                           data=datajson)
        err = json.loads(res.data)
        assert res.status_code == 415, err
        assert err['status'] == 'failed', err
        assert err['action'] == 'PUT', err
        assert err['exception_cls'] == 'ValueError', err

        # With wrong args in the URL
        data = dict(
            name=name,
            short_name='xxxx-project',
            long_description=u'Long Description\n================')

        datajson = json.dumps(data)
        res = self.app.put('/api/project/%s?api_key=%s&search=select1' % (id_, users[1].api_key),
                           data=datajson)
        err = json.loads(res.data)
        assert res.status_code == 415, err
        assert err['status'] == 'failed', err
        assert err['action'] == 'PUT', err
        assert err['exception_cls'] == 'AttributeError', err

        # test delete
        ## anonymous
        res = self.app.delete('/api/project/%s' % id_, data=data)
        error_msg = 'Anonymous should not be allowed to delete'
        assert_equal(res.status, '401 UNAUTHORIZED', error_msg)
        error = json.loads(res.data)
        assert error['status'] == 'failed', error
        assert error['action'] == 'DELETE', error
        assert error['target'] == 'project', error
        ### real user but not allowed as not owner!
        url = '/api/project/%s?api_key=%s' % (id_, non_owner.api_key)
        res = self.app.delete(url, data=datajson)
        error_msg = 'Should not be able to delete projects of others'
        assert_equal(res.status, '403 FORBIDDEN', error_msg)
        error = json.loads(res.data)
        assert error['status'] == 'failed', error
        assert error['action'] == 'DELETE', error
        assert error['target'] == 'project', error

        url = '/api/project/%s?api_key=%s' % (id_, users[1].api_key)
        res = self.app.delete(url, data=datajson)

        assert_equal(res.status, '204 NO CONTENT', res.data)

        # delete a project that does not exist
        url = '/api/project/5000?api_key=%s' % users[1].api_key
        res = self.app.delete(url, data=datajson)
        error = json.loads(res.data)
        assert res.status_code == 404, error
        assert error['status'] == 'failed', error
        assert error['action'] == 'DELETE', error
        assert error['target'] == 'project', error
        assert error['exception_cls'] == 'NotFound', error

        # delete a project that does not exist
        url = '/api/project/?api_key=%s' % users[1].api_key
        res = self.app.delete(url, data=datajson)
        assert res.status_code == 404, error

Example 42

Project: pybossa
Source File: test_taskrun_api.py
View license
    @with_context
    def test_taskrun_update(self):
        """Test TaskRun API update works"""
        admin = UserFactory.create()
        owner = UserFactory.create()
        non_owner = UserFactory.create()
        project = ProjectFactory.create(owner=owner)
        task = TaskFactory.create(project=project)
        anonymous_taskrun = AnonymousTaskRunFactory.create(task=task, info='my task result')
        user_taskrun = TaskRunFactory.create(task=task, user=owner, info='my task result')

        task_run = dict(project_id=project.id, task_id=task.id, info='another result')
        datajson = json.dumps(task_run)

        # anonymous user
        # No one can update anonymous TaskRuns
        url = '/api/taskrun/%s' % anonymous_taskrun.id
        res = self.app.put(url, data=datajson)
        assert anonymous_taskrun, anonymous_taskrun
        assert_equal(anonymous_taskrun.user, None)
        error_msg = 'Should not be allowed to update'
        assert_equal(res.status, '401 UNAUTHORIZED', error_msg)

        # real user but not allowed as not owner!
        url = '/api/taskrun/%s?api_key=%s' % (user_taskrun.id, non_owner.api_key)
        res = self.app.put(url, data=datajson)
        error_msg = 'Should not be able to update TaskRuns of others'
        assert_equal(res.status, '403 FORBIDDEN', error_msg)

        # real user
        url = '/api/taskrun/%s?api_key=%s' % (user_taskrun.id, owner.api_key)
        out = self.app.get(url, follow_redirects=True)
        task = json.loads(out.data)
        datajson = json.loads(datajson)
        datajson['link'] = task['link']
        datajson['links'] = task['links']
        datajson = json.dumps(datajson)
        url = '/api/taskrun/%s?api_key=%s' % (user_taskrun.id, owner.api_key)
        res = self.app.put(url, data=datajson)
        out = json.loads(res.data)
        assert_equal(res.status, '403 FORBIDDEN', res.data)

        # PUT with not JSON data
        res = self.app.put(url, data=task_run)
        err = json.loads(res.data)
        assert res.status_code == 403, err
        assert err['status'] == 'failed', err
        assert err['target'] == 'taskrun', err
        assert err['action'] == 'PUT', err
        assert err['exception_cls'] == 'Forbidden', err

        # PUT with not allowed args
        res = self.app.put(url + "&foo=bar", data=json.dumps(task_run))
        err = json.loads(res.data)
        assert res.status_code == 415, err
        assert err['status'] == 'failed', err
        assert err['target'] == 'taskrun', err
        assert err['action'] == 'PUT', err
        assert err['exception_cls'] == 'AttributeError', err

        # PUT with fake data
        task_run['wrongfield'] = 13
        res = self.app.put(url, data=json.dumps(task_run))
        err = json.loads(res.data)
        assert res.status_code == 403, err
        assert err['status'] == 'failed', err
        assert err['target'] == 'taskrun', err
        assert err['action'] == 'PUT', err
        assert err['exception_cls'] == 'Forbidden', err
        task_run.pop('wrongfield')

        # root user
        url = '/api/taskrun/%s?api_key=%s' % (user_taskrun.id, admin.api_key)
        res = self.app.put(url, data=datajson)
        assert_equal(res.status, '403 FORBIDDEN', res.data)

Example 43

Project: pybossa
Source File: test_task_api.py
View license
    @with_context
    def test_task_post(self):
        """Test API Task creation"""
        admin = UserFactory.create()
        user = UserFactory.create()
        non_owner = UserFactory.create()
        project = ProjectFactory.create(owner=user)
        data = dict(project_id=project.id, info='my task data')
        root_data = dict(project_id=project.id, info='my root task data')

        # anonymous user
        # no api-key
        res = self.app.post('/api/task', data=json.dumps(data))
        error_msg = 'Should not be allowed to create'
        assert_equal(res.status, '401 UNAUTHORIZED', error_msg)

        ### real user but not allowed as not owner!
        res = self.app.post('/api/task?api_key=' + non_owner.api_key,
                            data=json.dumps(data))

        error_msg = 'Should not be able to post tasks for projects of others'
        assert_equal(res.status, '403 FORBIDDEN', error_msg)

        # now a real user
        res = self.app.post('/api/task?api_key=' + user.api_key,
                            data=json.dumps(data))
        assert res.data, res
        datajson = json.loads(res.data)
        out = task_repo.get_task(datajson['id'])
        assert out, out
        assert_equal(out.info, 'my task data'), out
        assert_equal(out.project_id, project.id)

        # now the root user
        res = self.app.post('/api/task?api_key=' + admin.api_key,
                            data=json.dumps(root_data))
        assert res.data, res
        datajson = json.loads(res.data)
        out = task_repo.get_task(datajson['id'])
        assert out, out
        assert_equal(out.info, 'my root task data'), out
        assert_equal(out.project_id, project.id)

        # POST with not JSON data
        url = '/api/task?api_key=%s' % user.api_key
        res = self.app.post(url, data=data)
        err = json.loads(res.data)
        assert res.status_code == 415, err
        assert err['status'] == 'failed', err
        assert err['target'] == 'task', err
        assert err['action'] == 'POST', err
        assert err['exception_cls'] == 'ValueError', err

        # POST with not allowed args
        res = self.app.post(url + '&foo=bar', data=json.dumps(data))
        err = json.loads(res.data)
        assert res.status_code == 415, err
        assert err['status'] == 'failed', err
        assert err['target'] == 'task', err
        assert err['action'] == 'POST', err
        assert err['exception_cls'] == 'AttributeError', err

        # POST with fake data
        data['wrongfield'] = 13
        res = self.app.post(url, data=json.dumps(data))
        err = json.loads(res.data)
        assert res.status_code == 415, err
        assert err['status'] == 'failed', err
        assert err['target'] == 'task', err
        assert err['action'] == 'POST', err
        assert err['exception_cls'] == 'TypeError', err

Example 44

Project: scikit-beam
Source File: test_roi.py
View license
def test_rings():
    center = (100., 100.)
    img_dim = (200, 205)
    first_q = 10.
    delta_q = 5.
    num_rings = 7  # number of Q rings
    one_step_q = 5.0
    step_q = [2.5, 3.0, 5.8]

    # test when there is same spacing between rings
    edges = roi.ring_edges(first_q, width=delta_q, spacing=one_step_q,
                           num_rings=num_rings)
    print("edges there is same spacing between rings ", edges)
    label_array = roi.rings(edges, center, img_dim)
    print("label_array there is same spacing between rings", label_array)
    label_mask, pixel_list = roi.extract_label_indices(label_array)
    # number of pixels per ROI
    num_pixels = np.bincount(label_mask, minlength=(np.max(label_mask)+1))
    num_pixels = num_pixels[1:]

    # test when there is same spacing between rings
    edges = roi.ring_edges(first_q, width=delta_q, spacing=2.5,
                           num_rings=num_rings)
    print("edges there is same spacing between rings ", edges)
    label_array = roi.rings(edges, center, img_dim)
    print("label_array there is same spacing between rings", label_array)
    label_mask, pixel_list = roi.extract_label_indices(label_array)
    # number of pixels per ROI
    num_pixels = np.bincount(label_mask, minlength=(np.max(label_mask)+1))
    num_pixels = num_pixels[1:]

    # test when there is different spacing between rings
    edges = roi.ring_edges(first_q, width=delta_q, spacing=step_q,
                           num_rings=4)
    print("edges when there is different spacing between rings", edges)
    label_array = roi.rings(edges, center, img_dim)
    print("label_array there is different spacing between rings", label_array)
    label_mask, pixel_list = roi.extract_label_indices(label_array)
    # number of pixels per ROI
    num_pixels = np.bincount(label_mask, minlength=(np.max(label_mask)+1))
    num_pixels = num_pixels[1:]

    # test when there is no spacing between rings
    edges = roi.ring_edges(first_q, width=delta_q, num_rings=num_rings)
    print("edges", edges)
    label_array = roi.rings(edges, center, img_dim)
    print("label_array", label_array)
    label_mask, pixel_list = roi.extract_label_indices(label_array)
    # number of pixels per ROI
    num_pixels = np.bincount(label_mask, minlength=(np.max(label_mask)+1))
    num_pixels = num_pixels[1:]

    # Did we draw the right number of rings?
    print(np.unique(label_array))
    actual_num_rings = len(np.unique(label_array)) - 1
    assert_equal(actual_num_rings, num_rings)

    # Does each ring have more pixels than the last, being larger?
    ring_areas = np.bincount(label_array.ravel())[1:]
    area_comparison = np.diff(ring_areas)
    print(area_comparison)
    areas_monotonically_increasing = np.all(area_comparison > 0)
    assert_true(areas_monotonically_increasing)

    # Test various illegal inputs
    assert_raises(ValueError,
                  lambda: roi.ring_edges(1, 2))  # need num_rings
    # width incompatible with num_rings
    assert_raises(ValueError,
                  lambda: roi.ring_edges(1, [1, 2, 3], num_rings=2))
    # too few spacings
    assert_raises(ValueError,
                  lambda: roi.ring_edges(1, [1, 2, 3], [1]))
    # too many spacings
    assert_raises(ValueError,
                  lambda: roi.ring_edges(1, [1, 2, 3], [1, 2, 3]))
    # num_rings conflicts with width, spacing
    assert_raises(ValueError,
                  lambda: roi.ring_edges(1, [1, 2, 3], [1, 2], 5))
    w_edges = [[5, 7], [1, 2]]
    assert_raises(ValueError, roi.rings, w_edges, center=(4, 4),
                  shape=(20, 20))

Example 45

Project: scikit-beam
Source File: test_utils.py
View license
def test_subtract_reference_images():
    num_images = 10
    img_dims = 200
    ones = np.ones((img_dims, img_dims))
    img_lst = [ones * _ for _ in range(num_images)]
    img_arr = np.asarray(img_lst)
    is_dark_lst = [True]
    is_dark = False
    was_dark = True
    while len(is_dark_lst) < num_images:
        if was_dark:
            is_dark = False
        else:
            is_dark = np.random.rand() > 0.5
        was_dark = is_dark
        is_dark_lst.append(is_dark)

    is_dark_arr = np.asarray(is_dark_lst)
    # make sure that a list of 2d images can be passed in
    core.subtract_reference_images(imgs=img_lst, is_reference=is_dark_arr)
    # make sure that the reference arr can actually be a list
    core.subtract_reference_images(imgs=img_arr, is_reference=is_dark_lst)
    # make sure that both input arrays can actually be lists
    core.subtract_reference_images(imgs=img_arr, is_reference=is_dark_lst)

    # test that the number of returned images is equal to the expected number
    # of returned images
    num_expected_images = is_dark_lst.count(False)
    # subtract an additional value if the last image is a reference image
    # num_expected_images -= is_dark_lst[len(is_dark_lst)-1]
    subtracted = core.subtract_reference_images(img_lst, is_dark_lst)
    try:
        assert_equal(num_expected_images, len(subtracted))
    except AssertionError as ae:
        print('is_dark_lst: {0}'.format(is_dark_lst))
        print('num_expected_images: {0}'.format(num_expected_images))
        print('len(subtracted): {0}'.format(len(subtracted)))
        six.reraise(AssertionError, ae, sys.exc_info()[2])
    # test that the image subtraction values are behaving as expected
    img_sum_lst = [img_dims * img_dims * val for val in range(num_images)]
    expected_return_val = 0
    dark_val = 0
    for idx, (is_dark, img_val) in enumerate(zip(is_dark_lst, img_sum_lst)):
        if is_dark:
            dark_val = img_val
        else:
            expected_return_val = expected_return_val - dark_val + img_val
    # test that the image subtraction was actually processed correctly
    return_sum = sum(subtracted)
    try:
        while True:
            return_sum = sum(return_sum)
    except TypeError:
        # thrown when return_sum is a single number
        pass

    try:
        assert_equal(expected_return_val, return_sum)
    except AssertionError as ae:
        print('is_dark_lst: {0}'.format(is_dark_lst))
        print('expected_return_val: {0}'.format(expected_return_val))
        print('return_sum: {0}'.format(return_sum))
        six.reraise(AssertionError, ae, sys.exc_info()[2])

Example 46

Project: scikit-image
Source File: test_testing.py
View license
def test_skipper():
    def f():
        pass

    class c():

        def __init__(self):
            self.me = "I think, therefore..."

    docstring = \
        """ Header

            >>> something # skip if not HAVE_AMODULE
            >>> something + else
            >>> a = 1 # skip if not HAVE_BMODULE
            >>> something2   # skip if HAVE_AMODULE
        """
    f.__doc__ = docstring
    c.__doc__ = docstring

    global HAVE_AMODULE, HAVE_BMODULE
    HAVE_AMODULE = False
    HAVE_BMODULE = True

    f2 = doctest_skip_parser(f)
    c2 = doctest_skip_parser(c)
    assert_true(f is f2)
    assert_true(c is c2)

    expected = \
        """ Header

            >>> something # doctest: +SKIP
            >>> something + else
            >>> a = 1
            >>> something2
        """
    assert_equal(f2.__doc__, expected)
    assert_equal(c2.__doc__, expected)

    HAVE_AMODULE = True
    HAVE_BMODULE = False
    f.__doc__ = docstring
    c.__doc__ = docstring
    f2 = doctest_skip_parser(f)
    c2 = doctest_skip_parser(c)

    assert_true(f is f2)
    expected = \
        """ Header

            >>> something
            >>> something + else
            >>> a = 1 # doctest: +SKIP
            >>> something2   # doctest: +SKIP
        """
    assert_equal(f2.__doc__, expected)
    assert_equal(c2.__doc__, expected)

    del HAVE_AMODULE
    f.__doc__ = docstring
    c.__doc__ = docstring
    assert_raises(NameError, doctest_skip_parser, f)
    assert_raises(NameError, doctest_skip_parser, c)

Example 47

Project: py-earth
Source File: test_knot_search.py
View license
def test_outcome_dependent_data():
    np.random.seed(10)
    m = 1000
    max_terms = 100
    y = np.random.normal(size=m)
    w = np.random.normal(size=m) ** 2
    weight = SingleWeightDependentData.alloc(w, m, max_terms, 1e-16)
    data = SingleOutcomeDependentData.alloc(y, weight, m, max_terms)

    # Test updating
    B = np.empty(shape=(m, max_terms))
    for k in range(max_terms):
        b = np.random.normal(size=m)
        B[:, k] = b
        code = weight.update_from_array(b)
        if k >= 99:
            1 + 1
        data.update()
        assert_equal(code, 0)
        assert_almost_equal(
            np.dot(weight.Q_t[:k + 1, :], np.transpose(weight.Q_t[:k + 1, :])),
            np.eye(k + 1))
    assert_equal(weight.update_from_array(b), -1)
#     data.update(1e-16)

    # Test downdating
    q = np.array(weight.Q_t).copy()
    theta = np.array(data.theta[:max_terms]).copy()
    weight.downdate()
    data.downdate()
    weight.update_from_array(b)
    data.update()
    assert_almost_equal(q, np.array(weight.Q_t))
    assert_almost_equal(theta, np.array(data.theta[:max_terms]))
    assert_almost_equal(
        np.array(data.theta[:max_terms]), np.dot(weight.Q_t, w * y))
    wB = B * w[:, None]
    Q, _ = qr(wB, pivoting=False, mode='economic')
    assert_almost_equal(np.abs(np.dot(weight.Q_t, Q)), np.eye(max_terms))

    # Test that reweighting works
    assert_equal(data.k, max_terms)
    w2 = np.random.normal(size=m) ** 2
    weight.reweight(w2, B, max_terms)
    data.synchronize()
    assert_equal(data.k, max_terms)
    w2B = B * w2[:, None]
    Q2, _ = qr(w2B, pivoting=False, mode='economic')
    assert_almost_equal(np.abs(np.dot(weight.Q_t, Q2)), np.eye(max_terms))
    assert_almost_equal(
        np.array(data.theta[:max_terms]), np.dot(weight.Q_t, w2 * y))

Example 48

View license
    def test_grid(self):
        # USGS Professional Paper 1395, pp 196--197, Table 30
        globe = ccrs.Globe(ellipse=None,
                           semimajor_axis=1.0, semiminor_axis=1.0)
        aeqd = ccrs.AzimuthalEquidistant(central_latitude=0.0,
                                         central_longitude=0.0,
                                         globe=globe)
        geodetic = aeqd.as_geodetic()

        expected = ('+a=1.0 +b=1.0 +proj=aeqd +lon_0=0.0 +lat_0=0.0 '
                    '+x_0=0.0 +y_0=0.0 +no_defs')
        assert_equal(aeqd.proj4_init, expected)

        assert_almost_equal(np.array(aeqd.x_limits),
                            [-3.14159265, 3.14159265], decimal=6)
        assert_almost_equal(np.array(aeqd.y_limits),
                            [-3.14159265, 3.14159265], decimal=6)

        lats, lons = np.mgrid[0:100:10, 0:100:10]
        result = aeqd.transform_points(geodetic, lons.ravel(), lats.ravel())

        expected_x = np.array([
            [0.00000, 0.17453, 0.34907, 0.52360, 0.69813,
             0.87266, 1.04720, 1.22173, 1.39626, 1.57080],
            [0.00000, 0.17275, 0.34546, 0.51807, 0.69054,
             0.86278, 1.03472, 1.20620, 1.37704, 1.54693],
            [0.00000, 0.16736, 0.33454, 0.50137, 0.66762,
             0.83301, 0.99719, 1.15965, 1.31964, 1.47607],
            [0.00000, 0.15822, 0.31607, 0.47314, 0.62896,
             0.78296, 0.93436, 1.08215, 1.22487, 1.36035],
            [0.00000, 0.14511, 0.28959, 0.43276, 0.57386,
             0.71195, 0.84583, 0.97392, 1.09409, 1.20330],
            [0.00000, 0.12765, 0.25441, 0.37931, 0.50127,
             0.61904, 0.73106, 0.83535, 0.92935, 1.00969],
            [0.00000, 0.10534, 0.20955, 0.31145, 0.40976,
             0.50301, 0.58948, 0.66711, 0.73343, 0.78540],
            [0.00000, 0.07741, 0.15362, 0.22740, 0.29744,
             0.36234, 0.42056, 0.47039, 0.50997, 0.53724],
            [0.00000, 0.04281, 0.08469, 0.12469, 0.16188,
             0.19529, 0.22399, 0.24706, 0.26358, 0.27277],
            [0.00000, 0.00000, 0.00000, 0.00000, 0.00000,
             0.00000, 0.00000, 0.00000, 0.00000, 0.00000],
        ]).ravel()
        assert_almost_equal(result[:, 0], expected_x, decimal=5)

        expected_y = np.array([
            [0.00000, 0.00000, 0.00000, 0.00000, 0.00000,
             0.00000, 0.00000, 0.00000, 0.00000, 0.00000],
            [0.17453, 0.17541, 0.17810, 0.18270, 0.18943,
             0.19859, 0.21067, 0.22634, 0.24656, 0.27277],
            [0.34907, 0.35079, 0.35601, 0.36497, 0.37803,
             0.39579, 0.41910, 0.44916, 0.48772, 0.53724],
            [0.52360, 0.52606, 0.53355, 0.54634, 0.56493,
             0.59010, 0.62291, 0.66488, 0.71809, 0.78540],
            [0.69813, 0.70119, 0.71046, 0.72626, 0.74912,
             0.77984, 0.81953, 0.86967, 0.93221, 1.00969],
            [0.87266, 0.87609, 0.88647, 0.90408, 0.92938,
             0.96306, 1.00602, 1.05942, 1.12464, 1.20330],
            [1.04720, 1.05068, 1.06119, 1.07891, 1.10415,
             1.13733, 1.17896, 1.22963, 1.28993, 1.36035],
            [1.22173, 1.22481, 1.23407, 1.24956, 1.27137,
             1.29957, 1.33423, 1.37533, 1.42273, 1.47607],
            [1.39626, 1.39829, 1.40434, 1.41435, 1.42823,
             1.44581, 1.46686, 1.49104, 1.51792, 1.54693],
            [1.57080, 1.57080, 1.57080, 1.57080, 1.57080,
             1.57080, 1.57080, 1.57080, 1.57080, 1.57080],
        ]).ravel()
        assert_almost_equal(result[:, 1], expected_y, decimal=5)

Example 49

Project: cartopy
Source File: test_img_nest.py
View license
def test_intersect():
    with tests.temp_dir() as base_dir:
        # Zoom level zero.
        # File 1: Parent space of all images.
        z_0_dir = os.path.join(base_dir, 'z_0')
        os.mkdir(z_0_dir)
        world = dict(x_pix_size=2, y_rotation=0, x_rotation=0,
                     y_pix_size=2, x_center=1, y_center=1)
        im = Image.new('RGB', (50, 50))
        fname = os.path.join(z_0_dir, 'p0.tfw')
        _save_world(fname, world)
        fname = os.path.join(z_0_dir, 'p0.tif')
        im.save(fname)

        # Zoom level one.
        # File 1: complete containment within p0.
        z_1_dir = os.path.join(base_dir, 'z_1')
        os.mkdir(z_1_dir)
        world = dict(x_pix_size=2, y_rotation=0, x_rotation=0,
                     y_pix_size=2, x_center=21, y_center=21)
        im = Image.new('RGB', (30, 30))
        fname = os.path.join(z_1_dir, 'p1.tfw')
        _save_world(fname, world)
        fname = os.path.join(z_1_dir, 'p1.tif')
        im.save(fname)

        # Zoom level two.
        # File 1: intersect right edge with p1 left edge.
        z_2_dir = os.path.join(base_dir, 'z_2')
        os.mkdir(z_2_dir)
        world = dict(x_pix_size=2, y_rotation=0, x_rotation=0,
                     y_pix_size=2, x_center=6, y_center=21)
        im = Image.new('RGB', (5, 5))
        fname = os.path.join(z_2_dir, 'p2-1.tfw')
        _save_world(fname, world)
        fname = os.path.join(z_2_dir, 'p2-1.tif')
        im.save(fname)
        # File 2: intersect upper right corner with p1
        #         lower left corner.
        world = dict(x_pix_size=2, y_rotation=0, x_rotation=0,
                     y_pix_size=2, x_center=6, y_center=6)
        im = Image.new('RGB', (5, 5))
        fname = os.path.join(z_2_dir, 'p2-2.tfw')
        _save_world(fname, world)
        fname = os.path.join(z_2_dir, 'p2-2.tif')
        im.save(fname)
        # File 3: complete containment within p1.
        world = dict(x_pix_size=2, y_rotation=0, x_rotation=0,
                     y_pix_size=2, x_center=41, y_center=41)
        im = Image.new('RGB', (5, 5))
        fname = os.path.join(z_2_dir, 'p2-3.tfw')
        _save_world(fname, world)
        fname = os.path.join(z_2_dir, 'p2-3.tif')
        im.save(fname)
        # File 4: overlap with p1 right edge.
        world = dict(x_pix_size=2, y_rotation=0, x_rotation=0,
                     y_pix_size=2, x_center=76, y_center=61)
        im = Image.new('RGB', (5, 5))
        fname = os.path.join(z_2_dir, 'p2-4.tfw')
        _save_world(fname, world)
        fname = os.path.join(z_2_dir, 'p2-4.tif')
        im.save(fname)
        # File 5: overlap with p1 bottom right corner.
        world = dict(x_pix_size=2, y_rotation=0, x_rotation=0,
                     y_pix_size=2, x_center=76, y_center=76)
        im = Image.new('RGB', (5, 5))
        fname = os.path.join(z_2_dir, 'p2-5.tfw')
        _save_world(fname, world)
        fname = os.path.join(z_2_dir, 'p2-5.tif')
        im.save(fname)

        # Provided in reverse order in order to test the area sorting.
        items = [('dummy-z-2', z_2_dir),
                 ('dummy-z-1', z_1_dir),
                 ('dummy-z-0', z_0_dir)]
        nic = cimg_nest.NestedImageCollection.from_configuration('dummy',
                                                                 None,
                                                                 items)

        names = [collection.name for collection in nic._collections]
        zoom_levels = ['dummy-z-0', 'dummy-z-1', 'dummy-z-2']
        assert_true(names, zoom_levels)

        # Check all images are loaded.
        for zoom, expected_image_count in zip(zoom_levels, [1, 1, 5]):
            images = nic._collections_by_name[zoom].images
            assert_equal(len(images), expected_image_count)

        # Check the image ancestry.
        zoom_levels = ['dummy-z-0', 'dummy-z-1']
        assert_equal(sorted(k[0] for k in nic._ancestry.keys()),
                     zoom_levels)

        expected = [('dummy-z-0', ['p1.tif']),
                    ('dummy-z-1', ['p2-3.tif', 'p2-4.tif', 'p2-5.tif'])]
        for zoom, image_names in expected:
            key = [k for k in nic._ancestry.keys() if k[0] == zoom][0]
            ancestry = nic._ancestry[key]
            fnames = sorted([os.path.basename(item[1].filename)
                             for item in ancestry])
            assert_equal(image_names, fnames)

        # Check image retrieval for specific domain.
        items = [(sgeom.box(20, 20, 80, 80), 3),
                 (sgeom.box(20, 20, 75, 75), 1),
                 (sgeom.box(40, 40, 85, 85), 3)]
        for domain, expected in items:
            result = [image for image in nic.find_images(domain,
                                                         'dummy-z-2')]
            assert_equal(len(result), expected)

Example 50

Project: cartopy
Source File: test_img_nest.py
View license
def test_nest():
    crs = cimgt.GoogleTiles().crs
    z0 = cimg_nest.ImageCollection('aerial z0 test', crs)
    z0.scan_dir_for_imgs(os.path.join(_TEST_DATA_DIR, 'z_0'),
                         glob_pattern='*.png', img_class=RoundedImg)

    z1 = cimg_nest.ImageCollection('aerial z1 test', crs)
    z1.scan_dir_for_imgs(os.path.join(_TEST_DATA_DIR, 'z_1'),
                         glob_pattern='*.png', img_class=RoundedImg)

    z2 = cimg_nest.ImageCollection('aerial z2 test', crs)
    z2.scan_dir_for_imgs(os.path.join(_TEST_DATA_DIR, 'z_2'),
                         glob_pattern='*.png', img_class=RoundedImg)

    # make sure all the images from z1 are contained by the z0 image. The
    # only reason this might occur is if the tfw files are handling
    # floating point values badly
    for img in z1.images:
        if not z0.images[0].bbox().contains(img.bbox()):
            raise IOError('The test images aren\'t all "contained" by the '
                          'z0 images, the nest cannot possibly work.\n '
                          'img {!s} not contained by {!s}\nExtents: {!s}; '
                          '{!s}'.format(img, z0.images[0], img.extent,
                                        z0.images[0].extent))
    nest_z0_z1 = cimg_nest.NestedImageCollection('aerial test',
                                                 crs,
                                                 [z0, z1])

    nest = cimg_nest.NestedImageCollection('aerial test', crs, [z0, z1, z2])

    z0_key = ('aerial z0 test', z0.images[0])

    assert_true(z0_key in nest_z0_z1._ancestry.keys())
    assert_equal(len(nest_z0_z1._ancestry), 1)

    # check that it has figured out that all the z1 images are children of
    # the only z0 image
    for img in z1.images:
        key = ('aerial z0 test', z0.images[0])
        assert_in(('aerial z1 test', img), nest_z0_z1._ancestry[key])

    x1_y0_z1, = [img for img in z1.images
                 if img.filename.endswith('z_1/x_1_y_0.png')]

    assert_equal((1, 0, 1), _tile_from_img(x1_y0_z1))

    assert_equal([(2, 0, 2), (2, 1, 2), (3, 0, 2), (3, 1, 2)],
                 sorted([_tile_from_img(img) for z, img in
                         nest.subtiles(('aerial z1 test', x1_y0_z1))]))

    nest_from_config = gen_nest()
    # check that the the images in the nest from configuration are the
    # same as those created by hand.
    for name in nest_z0_z1._collections_by_name.keys():
        for img in nest_z0_z1._collections_by_name[name].images:
            collection = nest_from_config._collections_by_name[name]
            assert_in(img, collection.images)

    assert_equal(nest_z0_z1._ancestry, nest_from_config._ancestry)

    # check that a nest can be pickled and unpickled easily.
    s = io.BytesIO()
    pickle.dump(nest_z0_z1, s)
    s.seek(0)
    nest_z0_z1_from_pickle = pickle.load(s)

    assert_equal(nest_z0_z1._ancestry,
                 nest_z0_z1_from_pickle._ancestry)