system_test_utils.unique_resource_id

Here are the examples of the python api system_test_utils.unique_resource_id taken from open source projects. By voting up you can indicate which examples are most useful and appropriate.

22 Examples 7

Example 1

Project: google-cloud-python
Source File: bigquery.py
View license
    def test_list_datasets(self):
        datasets_to_create = [
            'new' + unique_resource_id(),
            'newer' + unique_resource_id(),
            'newest' + unique_resource_id(),
        ]
        for dataset_name in datasets_to_create:
            dataset = Config.CLIENT.dataset(dataset_name)
            retry_403(dataset.create)()
            self.to_delete.append(dataset)

        # Retrieve the datasets.
        iterator = Config.CLIENT.list_datasets()
        all_datasets = list(iterator)
        self.assertIsNone(iterator.next_page_token)
        created = [dataset for dataset in all_datasets
                   if dataset.name in datasets_to_create and
                   dataset.project == Config.CLIENT.project]
        self.assertEqual(len(created), len(datasets_to_create))

Example 2

Project: google-cloud-python
Source File: bigtable.py
View license
    def test_create_instance(self):
        ALT_INSTANCE_ID = 'new' + unique_resource_id('-')
        instance = Config.CLIENT.instance(ALT_INSTANCE_ID, LOCATION_ID)
        operation = instance.create()
        # Make sure this instance gets deleted after the test case.
        self.instances_to_delete.append(instance)

        # We want to make sure the operation completes.
        self.assertTrue(_wait_until_complete(operation))

        # Create a new instance instance and make sure it is the same.
        instance_alt = Config.CLIENT.instance(ALT_INSTANCE_ID, LOCATION_ID)
        instance_alt.reload()

        self.assertEqual(instance, instance_alt)
        self.assertEqual(instance.display_name, instance_alt.display_name)

Example 3

Project: google-cloud-python
Source File: datastore.py
View license
def setUpModule():
    emulator_dataset = os.getenv(GCD_DATASET)
    # Isolated namespace so concurrent test runs don't collide.
    test_namespace = 'ns' + unique_resource_id()
    if emulator_dataset is None:
        Config.CLIENT = datastore.Client(namespace=test_namespace)
    else:
        credentials = EmulatorCreds()
        http = httplib2.Http()  # Un-authorized.
        Config.CLIENT = datastore.Client(project=emulator_dataset,
                                         namespace=test_namespace,
                                         credentials=credentials,
                                         http=http)

Example 4

Project: google-cloud-python
Source File: language.py
View license
def setUpModule():
    Config.CLIENT = language.Client()
    # Now create a bucket for GCS stored content.
    storage_client = storage.Client()
    bucket_name = 'new' + unique_resource_id()
    Config.TEST_BUCKET = storage_client.bucket(bucket_name)
    # 429 Too Many Requests in case API requests rate-limited.
    retry_429 = RetryErrors(exceptions.TooManyRequests)
    retry_429(Config.TEST_BUCKET.create)()

Example 5

Project: google-cloud-python
Source File: monitoring.py
View license
    def test_create_and_delete_metric_descriptor(self):
        METRIC_TYPE = ('custom.googleapis.com/tmp/system_test_example' +
                       unique_resource_id())
        METRIC_KIND = monitoring.MetricKind.GAUGE
        VALUE_TYPE = monitoring.ValueType.DOUBLE
        DESCRIPTION = 'System test example -- DELETE ME!'

        client = monitoring.Client()
        descriptor = client.metric_descriptor(
            METRIC_TYPE,
            metric_kind=METRIC_KIND,
            value_type=VALUE_TYPE,
            description=DESCRIPTION,
        )

        retry_500(descriptor.create)()
        retry_404_500(descriptor.delete)()

Example 6

Project: google-cloud-python
Source File: pubsub.py
View license
    def test_create_topic(self):
        topic_name = 'a-new-topic' + unique_resource_id('-')
        topic = Config.CLIENT.topic(topic_name)
        self.assertFalse(topic.exists())
        topic.create()
        self.to_delete.append(topic)
        self.assertTrue(topic.exists())
        self.assertEqual(topic.name, topic_name)

Example 7

Project: google-cloud-python
Source File: pubsub.py
View license
    def test_create_subscription_defaults(self):
        TOPIC_NAME = 'create-sub-def' + unique_resource_id('-')
        topic = Config.CLIENT.topic(TOPIC_NAME)
        self.assertFalse(topic.exists())
        topic.create()
        self.to_delete.append(topic)
        SUBSCRIPTION_NAME = 'subscribing-now' + unique_resource_id('-')
        subscription = topic.subscription(SUBSCRIPTION_NAME)
        self.assertFalse(subscription.exists())
        subscription.create()
        self.to_delete.append(subscription)
        self.assertTrue(subscription.exists())
        self.assertEqual(subscription.name, SUBSCRIPTION_NAME)
        self.assertIs(subscription.topic, topic)

Example 8

Project: google-cloud-python
Source File: pubsub.py
View license
    def test_create_subscription_w_ack_deadline(self):
        TOPIC_NAME = 'create-sub-ack' + unique_resource_id('-')
        topic = Config.CLIENT.topic(TOPIC_NAME)
        self.assertFalse(topic.exists())
        topic.create()
        self.to_delete.append(topic)
        SUBSCRIPTION_NAME = 'subscribing-now' + unique_resource_id()
        subscription = topic.subscription(SUBSCRIPTION_NAME, ack_deadline=120)
        self.assertFalse(subscription.exists())
        subscription.create()
        self.to_delete.append(subscription)
        self.assertTrue(subscription.exists())
        self.assertEqual(subscription.name, SUBSCRIPTION_NAME)
        self.assertEqual(subscription.ack_deadline, 120)
        self.assertIs(subscription.topic, topic)

Example 9

Project: google-cloud-python
Source File: pubsub.py
View license
    def test_topic_iam_policy(self):
        from google.cloud.pubsub.iam import PUBSUB_TOPICS_GET_IAM_POLICY
        self._maybe_emulator_skip()
        topic_name = 'test-topic-iam-policy-topic' + unique_resource_id('-')
        topic = Config.CLIENT.topic(topic_name)
        topic.create()

        # Retry / backoff up to 7 seconds (1 + 2 + 4)
        retry = RetryResult(lambda result: result, max_tries=4)
        retry(topic.exists)()
        self.to_delete.append(topic)

        if topic.check_iam_permissions([PUBSUB_TOPICS_GET_IAM_POLICY]):
            policy = topic.get_iam_policy()
            policy.viewers.add(policy.user('[email protected]'))
            new_policy = topic.set_iam_policy(policy)
            self.assertEqual(new_policy.viewers, policy.viewers)

Example 10

Project: google-cloud-python
Source File: speech.py
View license
def setUpModule():
    Config.CLIENT = speech.Client()
    Config.USE_GAX = Config.CLIENT._use_gax
    # Now create a bucket for GCS stored content.
    storage_client = storage.Client()
    bucket_name = 'new' + unique_resource_id()
    Config.TEST_BUCKET = storage_client.bucket(bucket_name)
    # 429 Too Many Requests in case API requests rate-limited.
    retry_429 = RetryErrors(exceptions.TooManyRequests)
    retry_429(Config.TEST_BUCKET.create)()

Example 11

Project: google-cloud-python
Source File: storage.py
View license
def setUpModule():
    Config.CLIENT = storage.Client()
    bucket_name = 'new' + unique_resource_id()
    # In the **very** rare case the bucket name is reserved, this
    # fails with a ConnectionError.
    Config.TEST_BUCKET = Config.CLIENT.bucket(bucket_name)
    retry_429(Config.TEST_BUCKET.create)()

Example 12

Project: google-cloud-python
Source File: storage.py
View license
    def test_create_bucket(self):
        new_bucket_name = 'a-new-bucket' + unique_resource_id('-')
        self.assertRaises(exceptions.NotFound,
                          Config.CLIENT.get_bucket, new_bucket_name)
        created = Config.CLIENT.create_bucket(new_bucket_name)
        self.case_buckets_to_delete.append(new_bucket_name)
        self.assertEqual(created.name, new_bucket_name)

Example 13

Project: google-cloud-python
Source File: storage.py
View license
    def test_list_buckets(self):
        buckets_to_create = [
            'new' + unique_resource_id(),
            'newer' + unique_resource_id(),
            'newest' + unique_resource_id(),
        ]
        created_buckets = []
        for bucket_name in buckets_to_create:
            bucket = Config.CLIENT.bucket(bucket_name)
            retry_429(bucket.create)()
            self.case_buckets_to_delete.append(bucket_name)

        # Retrieve the buckets.
        all_buckets = Config.CLIENT.list_buckets()
        created_buckets = [bucket for bucket in all_buckets
                           if bucket.name in buckets_to_create]
        self.assertEqual(len(created_buckets), len(buckets_to_create))

Example 14

Project: google-cloud-python
Source File: bigquery.py
View license
def _make_dataset_name(prefix):
    return '%s%s' % (prefix, unique_resource_id())

Example 15

Project: google-cloud-python
Source File: bigquery.py
View license
    def test_list_tables(self):
        DATASET_NAME = _make_dataset_name('list_tables')
        dataset = Config.CLIENT.dataset(DATASET_NAME)
        self.assertFalse(dataset.exists())

        retry_403(dataset.create)()
        self.to_delete.append(dataset)

        # Retrieve tables before any are created for the dataset.
        iterator = dataset.list_tables()
        all_tables = list(iterator)
        self.assertEqual(all_tables, [])
        self.assertIsNone(iterator.next_page_token)

        # Insert some tables to be listed.
        tables_to_create = [
            'new' + unique_resource_id(),
            'newer' + unique_resource_id(),
            'newest' + unique_resource_id(),
        ]
        full_name = bigquery.SchemaField('full_name', 'STRING',
                                         mode='REQUIRED')
        age = bigquery.SchemaField('age', 'INTEGER', mode='REQUIRED')
        for table_name in tables_to_create:
            table = dataset.table(table_name, schema=[full_name, age])
            table.create()
            self.to_delete.insert(0, table)

        # Retrieve the tables.
        iterator = dataset.list_tables()
        all_tables = list(iterator)
        self.assertIsNone(iterator.next_page_token)
        created = [table for table in all_tables
                   if (table.name in tables_to_create and
                       table.dataset_name == DATASET_NAME)]
        self.assertEqual(len(created), len(tables_to_create))

Example 16

Project: google-cloud-python
Source File: bigquery.py
View license
    def test_load_table_from_storage_then_dump_table(self):
        import csv
        import tempfile
        from google.cloud.storage import Client as StorageClient
        local_id = unique_resource_id()
        BUCKET_NAME = 'bq_load_test' + local_id
        BLOB_NAME = 'person_ages.csv'
        GS_URL = 'gs://%s/%s' % (BUCKET_NAME, BLOB_NAME)
        ROWS = [
            ('Phred Phlyntstone', 32),
            ('Bharney Rhubble', 33),
            ('Wylma Phlyntstone', 29),
            ('Bhettye Rhubble', 27),
        ]
        TABLE_NAME = 'test_table'

        s_client = StorageClient()

        # In the **very** rare case the bucket name is reserved, this
        # fails with a ConnectionError.
        bucket = s_client.create_bucket(BUCKET_NAME)
        self.to_delete.append(bucket)

        blob = bucket.blob(BLOB_NAME)

        with tempfile.TemporaryFile(mode='w+') as csv_file:
            writer = csv.writer(csv_file)
            writer.writerow(('Full Name', 'Age'))
            writer.writerows(ROWS)
            blob.upload_from_file(
                csv_file, rewind=True, content_type='text/csv')

        self.to_delete.insert(0, blob)

        dataset = Config.CLIENT.dataset(
            _make_dataset_name('load_gcs_then_dump'))

        retry_403(dataset.create)()
        self.to_delete.append(dataset)

        full_name = bigquery.SchemaField('full_name', 'STRING',
                                         mode='REQUIRED')
        age = bigquery.SchemaField('age', 'INTEGER', mode='REQUIRED')
        table = dataset.table(TABLE_NAME, schema=[full_name, age])
        table.create()
        self.to_delete.insert(0, table)

        job = Config.CLIENT.load_table_from_storage(
            'bq_load_storage_test_' + local_id, table, GS_URL)
        job.create_disposition = 'CREATE_NEVER'
        job.skip_leading_rows = 1
        job.source_format = 'CSV'
        job.write_disposition = 'WRITE_EMPTY'

        job.begin()

        def _job_done(instance):
            return instance.state in ('DONE', 'done')

        # Allow for 90 seconds of "warm up" before rows visible.  See:
        # https://cloud.google.com/bigquery/streaming-data-into-bigquery#dataavailability
        # 8 tries -> 1 + 2 + 4 + 8 + 16 + 32 + 64 = 127 seconds
        retry = RetryInstanceState(_job_done, max_tries=8)
        retry(job.reload)()

        rows = self._fetch_single_page(table)
        by_age = operator.itemgetter(1)
        self.assertEqual(sorted(rows, key=by_age),
                         sorted(ROWS, key=by_age))

Example 17

Project: google-cloud-python
Source File: logging_.py
View license
    @staticmethod
    def _logger_name():
        return 'system-tests-logger' + unique_resource_id('-')

Example 18

Project: google-cloud-python
Source File: monitoring.py
View license
    def test_write_point(self):
        METRIC_TYPE = ('custom.googleapis.com/tmp/system_test_example' +
                       unique_resource_id())
        METRIC_KIND = monitoring.MetricKind.GAUGE
        VALUE_TYPE = monitoring.ValueType.DOUBLE
        DESCRIPTION = 'System test example -- DELETE ME!'
        VALUE = 3.14

        client = monitoring.Client()
        descriptor = client.metric_descriptor(
            METRIC_TYPE,
            metric_kind=METRIC_KIND,
            value_type=VALUE_TYPE,
            description=DESCRIPTION,
        )

        descriptor.create()

        metric = client.metric(METRIC_TYPE, {})
        resource = client.resource('global', {})

        retry_500(client.write_point)(metric, resource, VALUE)

        def _query_timeseries_with_retries():
            MAX_RETRIES = 7

            def _has_timeseries(result):
                return len(list(result)) > 0

            retry_result = RetryResult(_has_timeseries,
                                       max_tries=MAX_RETRIES)(client.query)
            return RetryErrors(BadRequest, max_tries=MAX_RETRIES)(retry_result)

        query = _query_timeseries_with_retries()(METRIC_TYPE, minutes=5)
        timeseries_list = list(query)
        self.assertEqual(len(timeseries_list), 1)
        timeseries = timeseries_list[0]
        self.assertEqual(timeseries.metric, metric)
        # project_id label only exists on output.
        del timeseries.resource.labels['project_id']
        self.assertEqual(timeseries.resource, resource)

        descriptor.delete()

        with self.assertRaises(NotFound):
            descriptor.delete()

Example 19

Project: google-cloud-python
Source File: pubsub.py
View license
    def test_list_topics(self):
        before = _consume_topics(Config.CLIENT)
        topics_to_create = [
            'new' + unique_resource_id(),
            'newer' + unique_resource_id(),
            'newest' + unique_resource_id(),
        ]
        for topic_name in topics_to_create:
            topic = Config.CLIENT.topic(topic_name)
            topic.create()
            self.to_delete.append(topic)

        # Retrieve the topics.
        def _all_created(result):
            return len(result) == len(before) + len(topics_to_create)

        retry = RetryResult(_all_created)
        after = retry(_consume_topics)(Config.CLIENT)

        created = [topic for topic in after
                   if topic.name in topics_to_create and
                   topic.project == Config.CLIENT.project]
        self.assertEqual(len(created), len(topics_to_create))

Example 20

Project: google-cloud-python
Source File: pubsub.py
View license
    def test_list_subscriptions(self):
        TOPIC_NAME = 'list-sub' + unique_resource_id('-')
        topic = Config.CLIENT.topic(TOPIC_NAME)
        topic.create()
        self.to_delete.append(topic)
        empty = _consume_subscriptions(topic)
        self.assertEqual(len(empty), 0)
        subscriptions_to_create = [
            'new' + unique_resource_id(),
            'newer' + unique_resource_id(),
            'newest' + unique_resource_id(),
        ]
        for subscription_name in subscriptions_to_create:
            subscription = topic.subscription(subscription_name)
            subscription.create()
            self.to_delete.append(subscription)

        # Retrieve the subscriptions.
        def _all_created(result):
            return len(result) == len(subscriptions_to_create)

        retry = RetryResult(_all_created)
        all_subscriptions = retry(_consume_subscriptions)(topic)

        created = [subscription for subscription in all_subscriptions
                   if subscription.name in subscriptions_to_create]
        self.assertEqual(len(created), len(subscriptions_to_create))

Example 21

Project: google-cloud-python
Source File: pubsub.py
View license
    def test_message_pull_mode_e2e(self):
        import operator
        TOPIC_NAME = 'message-e2e' + unique_resource_id('-')
        topic = Config.CLIENT.topic(TOPIC_NAME,
                                    timestamp_messages=True)
        self.assertFalse(topic.exists())
        topic.create()
        self.to_delete.append(topic)
        SUBSCRIPTION_NAME = 'subscribing-now' + unique_resource_id('-')
        subscription = topic.subscription(SUBSCRIPTION_NAME)
        self.assertFalse(subscription.exists())
        subscription.create()
        self.to_delete.append(subscription)

        MESSAGE_1 = b'MESSAGE ONE'
        MESSAGE_2 = b'MESSAGE ONE'
        EXTRA_1 = 'EXTRA 1'
        EXTRA_2 = 'EXTRA 2'
        topic.publish(MESSAGE_1, extra=EXTRA_1)
        topic.publish(MESSAGE_2, extra=EXTRA_2)

        class Hoover(object):

            def __init__(self):
                self.received = []

            def done(self, *dummy):
                return len(self.received) == 2

            def suction(self):
                with subscription.auto_ack(max_messages=2) as ack:
                    self.received.extend(ack.values())

        hoover = Hoover()
        retry = RetryInstanceState(hoover.done)
        retry(hoover.suction)()

        message1, message2 = sorted(hoover.received,
                                    key=operator.attrgetter('timestamp'))

        self.assertEqual(message1.data, MESSAGE_1)
        self.assertEqual(message1.attributes['extra'], EXTRA_1)
        self.assertIsNotNone(message1.service_timestamp)

        self.assertEqual(message2.data, MESSAGE_2)
        self.assertEqual(message2.attributes['extra'], EXTRA_2)
        self.assertIsNotNone(message2.service_timestamp)

Example 22

Project: google-cloud-python
Source File: pubsub.py
View license
    def test_subscription_iam_policy(self):
        from google.cloud.pubsub.iam import PUBSUB_SUBSCRIPTIONS_GET_IAM_POLICY
        self._maybe_emulator_skip()
        topic_name = 'test-sub-iam-policy-topic' + unique_resource_id('-')
        topic = Config.CLIENT.topic(topic_name)
        topic.create()

        # Retry / backoff up to 7 seconds (1 + 2 + 4)
        retry = RetryResult(lambda result: result, max_tries=4)
        retry(topic.exists)()
        self.to_delete.append(topic)

        SUB_NAME = 'test-sub-iam-policy-sub' + unique_resource_id('-')
        subscription = topic.subscription(SUB_NAME)
        subscription.create()

        # Retry / backoff up to 7 seconds (1 + 2 + 4)
        retry = RetryResult(lambda result: result, max_tries=4)
        retry(subscription.exists)()
        self.to_delete.insert(0, subscription)

        if subscription.check_iam_permissions(
                [PUBSUB_SUBSCRIPTIONS_GET_IAM_POLICY]):
            policy = subscription.get_iam_policy()
            policy.viewers.add(policy.user('[email protected]'))
            new_policy = subscription.set_iam_policy(policy)
            self.assertEqual(new_policy.viewers, policy.viewers)