pytest.mark.run

Here are the examples of the python api pytest.mark.run taken from open source projects. By voting up you can indicate which examples are most useful and appropriate.

9 Examples 7

Example 1

Project: osm2vectortiles Source File: integration_test.py
@pytest.mark.run(order=1)
def test_postgis_startup():
    print(PROJECT_DIR)
    dc = DockerCompose()
    dc.remove_all()
    dc.up('postgis')
    # PostGIS can take a long time to get ready
    time.sleep(10)

Example 2

Project: osm2vectortiles Source File: integration_test.py
@pytest.mark.run(order=7)
def test_diff_update():
    dc = DockerCompose()

    # Pull the latest diffs
    baseurl = 'http://download.geofabrik.de/europe/albania-updates/'
    dc.run(['-e', 'OSM_UPDATE_BASEURL={}'.format(baseurl), 'update-osm-diff'])

    # Import diffs and calculate the changed tiles
    dc.run(['import-osm-diff'])
    dc.run(['changed-tiles'])

    # Read and verify that at least one tile is marked dirty
    tile_file = os.path.join(PROJECT_DIR, 'export/tiles.txt')
    print('Checking {} for changed tiles'.format(tile_file))
    num_lines = sum(1 for line in open(tile_file))
    assert num_lines > 0

Example 3

Project: osm2vectortiles Source File: integration_test.py
@pytest.mark.run(order=8)
def test_diff_jobs():
    dc = DockerCompose()

    # Schedule changed tiles as jobs
    dc.run(['generate-diff-jobs'])

Example 4

Project: pytest-ordering Source File: __init__.py
Function: pytest_collection_modifyitems
def pytest_collection_modifyitems(session, config, items):
    grouped_items = {}

    for item in items:

        for mark_name, order in orders_map.items():
            mark = item.get_marker(mark_name)

            if mark:
                item.add_marker(pytest.mark.run(order=order))
                break

        mark = item.get_marker('run')

        if mark:
            order = mark.kwargs.get('order')
        else:
            order = None

        grouped_items.setdefault(order, []).append(item)

    sorted_items = []
    unordered_items = [grouped_items.pop(None, [])]

    start_list = sorted((i for i in grouped_items.items() if i[0] >= 0),
                        key=operator.itemgetter(0))
    end_list = sorted((i for i in grouped_items.items() if i[0] < 0),
                      key=operator.itemgetter(0))

    sorted_items.extend([i[1] for i in start_list])
    sorted_items.extend(unordered_items)
    sorted_items.extend([i[1] for i in end_list])

    items[:] = [item for sublist in sorted_items for item in sublist]

Example 5

Project: osm2vectortiles Source File: integration_test.py
@pytest.mark.run(order=2)
def test_import_external():
    dc = DockerCompose()
    dc.run(['import-external'])

Example 6

Project: osm2vectortiles Source File: integration_test.py
@pytest.mark.run(order=3)
def test_import_osm():
    dc = DockerCompose()
    dc.run(['import-osm'])

Example 7

Project: osm2vectortiles Source File: integration_test.py
@pytest.mark.run(order=4)
def test_import_sql():
    dc = DockerCompose()
    dc.run(['import-sql'])

Example 8

Project: osm2vectortiles Source File: integration_test.py
@pytest.mark.run(order=5)
def test_local_export():
    "Test export of local Liechtenstein bbox and verify all tiles are present"
    dc = DockerCompose()

    def export_bbox(bbox, min_zoom, max_zoom):
        dc.run([
            '-e', 'BBOX={}'.format(bbox),
            '-e', 'MIN_ZOOM={}'.format(min_zoom),
            '-e', 'MAX_ZOOM={}'.format(max_zoom),
            'export'
        ])

    tile_x, tile_y, tile_z = ALBANIA_TIRANA_TILE
    export_bbox(ALBANIA_BBOX, tile_z, 14)

    # There are missing tiles on z14 because
    # Albania does not have data at some places
    exported_mbtiles = os.path.join(PROJECT_DIR, 'export/tiles.mbtiles')
    tiles = find_missing_tiles(exported_mbtiles, tile_x, tile_y, tile_z, 13)
    assert tiles == []

Example 9

Project: osm2vectortiles Source File: integration_test.py
@pytest.mark.run(order=6)
def test_distributed_worker():
    dc = DockerCompose()

    def schedule_tile_jobs(x, y, z, job_zoom):
        dc.run([
            '-e', 'TILE_X={}'.format(x),
            '-e', 'TILE_Y={}'.format(y),
            '-e', 'TILE_Z={}'.format(z),
            '-e', 'JOB_ZOOM={}'.format(job_zoom),
            '-e', 'WORLD_JOB=false',
            'generate-jobs'
        ])

    dc.up('rabbitmq')
    time.sleep(10)

    tile_x, tile_y, tile_z = ALBANIA_TIRANA_TILE
    job_zoom = tile_z + 1
    schedule_tile_jobs(tile_x, tile_y, tile_z, job_zoom)

    dc.run([
        '-e', 'BUCKET_NAME={}'.format(BUCKET),
        '-e', 'AWS_ACCESS_KEY_ID={}'.format(AWS_ACCESS_KEY_ID),
        '-e', 'AWS_SECRET_ACCESS_KEY={}'.format(AWS_SECRET_ACCESS_KEY),
        '-e', 'AWS_S3_HOST={}'.format(AWS_S3_HOST),
        'export-worker'
    ])

    # Give time to merge jobs together
    dc.up('merge-jobs')
    time.sleep(20)
    dc.stop('merge-jobs')

    # Merge jobs will merge all results into the existing planet.mbtiles
    # if MBTiles contains all the Albania tiles at job zoom level
    # the export was successful
    exported_mbtiles = os.path.join(PROJECT_DIR, 'export/planet.mbtiles')
    print('Checking {} for missing tiles'.format(exported_mbtiles))
    tiles = find_missing_tiles(exported_mbtiles, tile_x, tile_y, tile_z, 13)
    assert [t for t in tiles if t.z > tile_z] == []