Here are the examples of the python api numpy.equal.all taken from open source projects. By voting up you can indicate which examples are most useful and appropriate.
9 Examples
3
Example 1
Project: statsmodels Source File: linalg_covmat.py
def __init__(self, const, tmat, dist):
self.const = const
self.tmat = tmat
self.dist = dist
self.nrv = len(const)
if not np.equal(self.nrv, tmat.shape).all():
raise ValueError('dimension of const and tmat do not agree')
#replace the following with a linalgarray class
self.tmatinv = linalg.inv(tmat)
self.absdet = np.abs(np.linalg.det(self.tmat))
self.logabsdet = np.log(np.abs(np.linalg.det(self.tmat)))
self.dist
3
Example 2
Project: tvb-framework Source File: nifti_importer_test.py
def test_import_nii_without_time_dimension(self):
"""
This method tests import of a NIFTI file.
"""
structure = self._import(self.NII_FILE)
self.assertEqual("T1", structure.weighting)
data_shape = structure.array_data.shape
self.assertEquals(3, len(data_shape))
self.assertEqual(64, data_shape[0])
self.assertEqual(64, data_shape[1])
self.assertEqual(10, data_shape[2])
volume = structure.volume
self.assertTrue(volume is not None)
self.assertTrue(numpy.equal(self.DEFAULT_ORIGIN, volume.origin).all())
self.assertTrue(numpy.equal([3.0, 3.0, 3.0], volume.voxel_size).all())
self.assertEquals(self.UNKNOWN_STR, volume.voxel_unit)
3
Example 3
Project: tvb-framework Source File: nifti_importer_test.py
def test_import_region_mapping(self):
"""
This method tests import of a NIFTI file compressed in GZ format.
"""
to_link_conn = self.datatypeFactory.create_connectivity()[1]
mapping = self._import(self.GZ_NII_FILE, RegionVolumeMapping, to_link_conn.gid)
self.assertTrue(-1 <= mapping.array_data.min())
self.assertTrue(mapping.array_data.max() < to_link_conn.number_of_regions)
conn = mapping.connectivity
self.assertTrue(conn is not None)
self.assertEquals(to_link_conn.number_of_regions, conn.number_of_regions)
volume = mapping.volume
self.assertTrue(volume is not None)
self.assertTrue(numpy.equal(self.DEFAULT_ORIGIN, volume.origin).all())
self.assertTrue(numpy.equal([3.0, 3.0, 3.0], volume.voxel_size).all())
self.assertEquals(self.UNKNOWN_STR, volume.voxel_unit)
3
Example 4
Project: deep_recommend_system Source File: sparse_feature_cross_op_test.py
def test_hashed_output_v1_has_collision(self):
"""Tests the old version of the fingerprint concatenation has collisions.
"""
# The last 10 bits of 359 and 1024+359 are identical.
# As a result, all the crosses collide.
t1 = tf.constant([[359], [359 + 1024]])
t2 = tf.constant([list(range(10)), list(range(10))])
cross = tf.contrib.layers.sparse_feature_cross(
[t2, t1], hashed_output=True, num_buckets=1024)
cross_dense = tf.sparse_tensor_to_dense(cross)
with tf.Session():
values = cross_dense.eval()
self.assertTrue(numpy.equal(values[0], values[1]).all())
3
Example 5
Project: deep_recommend_system Source File: multinomial_op_test.py
def testOneOpMultipleStepsIndependent(self):
with self.test_session(use_gpu=self.use_gpu) as sess:
sample_op1, _ = self._make_ops(10)
# Consecutive runs shouldn't yield identical output.
sample1a = sess.run(sample_op1)
sample1b = sess.run(sample_op1)
self.assertFalse(np.equal(sample1a, sample1b).all())
3
Example 6
Project: deep_recommend_system Source File: multinomial_op_test.py
def testTwoOpsIndependent(self):
with self.test_session(use_gpu=self.use_gpu) as sess:
sample_op1, sample_op2 = self._make_ops(32)
sample1, sample2 = sess.run([sample_op1, sample_op2])
# We expect sample1 and sample2 to be independent.
# 1 in 2^32 chance of this assertion failing.
self.assertFalse(np.equal(sample1, sample2).all())
0
Example 7
Project: tvb-framework Source File: nifti_importer_test.py
def test_import_demo_ts(self):
"""
This method tests import of a NIFTI file.
"""
time_series = self._import(self.TIMESERIES_NII_FILE, TimeSeriesVolume)
# Since self.assertAlmostEquals is not available on all machine
# We compare floats as following
self.assertTrue(abs(1.0 - time_series.sample_period) <= 0.001)
self.assertEqual("sec", str(time_series.sample_period_unit))
self.assertEqual(0.0, time_series.start_time)
self.assertTrue(time_series.title.startswith("NIFTI"))
data_shape = time_series.read_data_shape()
self.assertEquals(4, len(data_shape))
# We have 5 time points
self.assertEqual(5, data_shape[0])
dimension_labels = time_series.labels_ordering
self.assertTrue(dimension_labels is not None)
self.assertEquals(4, len(dimension_labels))
volume = time_series.volume
self.assertTrue(volume is not None)
self.assertTrue(numpy.equal(self.DEFAULT_ORIGIN, volume.origin).all())
self.assertEquals("mm", volume.voxel_unit)
0
Example 8
Project: tvb-framework Source File: operation_service_test.py
Function: test_reduce_dimension_component
Function: test_reduce_dimension_component
def test_reduce_dimension_component(self):
"""
This method tests if the data passed to the launch method of
the NDimensionArrayAdapter adapter is correct. The passed data should be a list
of arrays with one dimension.
"""
inserted_count = FlowService().get_available_datatypes(self.test_project.id,
"tvb.datatypes.arrays.MappedArray")[1]
self.assertEqual(inserted_count, 0, "Expected to find no data.")
#create an operation
algorithm_id = FlowService().get_algorithm_by_module_and_class('tvb.tests.framework.adapters.ndimensionarrayadapter',
'NDimensionArrayAdapter').id
operation = model.Operation(self.test_user.id, self.test_project.id, algorithm_id, 'test params',
meta=json.dumps({DataTypeMetaData.KEY_STATE: "RAW_DATA"}),
status=model.STATUS_FINISHED)
operation = dao.store_entity(operation)
#save the array wrapper in DB
adapter_instance = NDimensionArrayAdapter()
PARAMS = {}
self.operation_service.initiate_prelaunch(operation, adapter_instance, {}, **PARAMS)
inserted_data = FlowService().get_available_datatypes(self.test_project.id,
"tvb.datatypes.arrays.MappedArray")[0]
self.assertEqual(len(inserted_data), 1, "Problems when inserting data")
gid = inserted_data[0][2]
entity = dao.get_datatype_by_gid(gid)
#from the 3D array do not select any array
PARAMS = {"python_method": "reduce_dimension", "input_data": gid,
"input_data_dimensions_0": "requiredDim_1",
"input_data_dimensions_1": "",
"input_data_dimensions_2": ""}
try:
self.operation_service.initiate_prelaunch(operation, adapter_instance, {}, **PARAMS)
self.fail("Test should not pass. The resulted array should be a 1D array.")
except Exception:
# OK, do nothing; we were expecting to produce a 1D array
pass
#from the 3D array select only a 1D array
first_dim = [gid + '_1_0', 'requiredDim_1']
PARAMS = {"python_method": "reduce_dimension", "input_data": gid,
"input_data_dimensions_0": first_dim,
"input_data_dimensions_1": gid + "_2_1"}
self.operation_service.initiate_prelaunch(operation, adapter_instance, {}, **PARAMS)
expected_result = entity.array_data[:, 0, 1]
actual_result = adapter_instance.launch_param
self.assertEqual(len(actual_result), len(expected_result), "Not the same size for results!")
self.assertTrue(numpy.equal(actual_result, expected_result).all())
#from the 3D array select a 2D array
first_dim = [gid + '_1_0', gid + '_1_1', 'requiredDim_2']
PARAMS = {"python_method": "reduce_dimension", "input_data": gid,
"input_data_dimensions_0": first_dim,
"input_data_dimensions_1": gid + "_2_1"}
self.operation_service.initiate_prelaunch(operation, adapter_instance, {}, **PARAMS)
expected_result = entity.array_data[slice(0, None), [0, 1], 1]
actual_result = adapter_instance.launch_param
self.assertEqual(len(actual_result), len(expected_result), "Not the same size for results!")
self.assertTrue(numpy.equal(actual_result, expected_result).all())
#from 3D array select 1D array by applying SUM function on the first
#dimension and average function on the second dimension
PARAMS = {"python_method": "reduce_dimension", "input_data": gid,
"input_data_dimensions_0": ["requiredDim_1", "func_sum"],
"input_data_dimensions_1": "func_average",
"input_data_dimensions_2": ""}
self.operation_service.initiate_prelaunch(operation, adapter_instance, {}, **PARAMS)
aux = numpy.sum(entity.array_data, axis=0)
expected_result = numpy.average(aux, axis=0)
actual_result = adapter_instance.launch_param
self.assertEqual(len(actual_result), len(expected_result), "Not the same size of results!")
self.assertTrue(numpy.equal(actual_result, expected_result).all())
#from 3D array select a 2D array and apply op. on the second dimension
PARAMS = {"python_method": "reduce_dimension", "input_data": gid,
"input_data_dimensions_0": ["requiredDim_2", "func_sum",
"expected_shape_x,512", "operations_x,>"],
"input_data_dimensions_1": "",
"input_data_dimensions_2": ""}
try:
self.operation_service.initiate_prelaunch(operation, adapter_instance, {}, **PARAMS)
self.fail("Test should not pass! The second dimension of the array should be >512.")
except Exception:
# OK, do nothing;
pass
0
Example 9
Project: Z2Pack Source File: test_read_mmn.py
def test_read(compare_data):
compare_data(lambda x, y: np.equal(x, y).all(), z2pack.fp._read_mmn.get_m('samples/mmn/bi.mmn'))