Here are the examples of the python api bokeh.models.WMTSTileSource taken from open source projects. By voting up you can indicate which examples are most useful and appropriate.
2 Examples
0
Source : plot_buffer.py
with GNU General Public License v3.0
from eqcorrscan
with GNU General Public License v3.0
from eqcorrscan
def define_plot(
doc: Document,
rt_client: _StreamingClient,
channels: list,
tribe: RealTimeTribe,
inventory: Inventory,
detections: list,
map_options: dict,
plot_options: dict,
plot_length: float,
update_interval: int,
data_color: str = "grey",
lowcut: float = 1.0,
highcut: float = 10.0,
offline: bool = False,
):
"""
Set up a bokeh plot for real-time plotting.
Defines a moving data stream and a map.
Parameters
----------
doc
Bokeh document to edit - usually called as a partial
rt_client
RealTimeClient streaming data
channels
Channels to plot
tribe
Tribe to plot
inventory
Inventory to plot
detections
Detections to plot - should be a list that is updated in place.
map_options
Dictionary of options for the map
plot_options
Dictionary of options for plotting in general
plot_length
Length of data plot
update_interval
Update frequency in seconds
data_color
Colour to data stream
lowcut
Lowcut for filtering data stream
highcut
Highcut for filtering data stream
offline
Flag to set time-stamps to data time-stamps if True, else timestamps
will be real-time
"""
# Set up the data source
Logger.info("Getting stream to define plot")
stream = rt_client.stream.copy().split().detrend()
if lowcut and highcut:
stream.filter("bandpass", freqmin=lowcut, freqmax=highcut)
title = "Streaming data: {0}-{1} Hz bandpass".format(lowcut, highcut)
elif lowcut:
stream.filter("highpass", lowcut)
title = "Streaming data: {0} Hz highpass".format(lowcut)
elif highcut:
stream.filter("lowpass", highcut)
title = "Streaming data: {0} Hz lowpass".format(highcut)
else:
title = "Raw streaming data"
stream.merge()
Logger.info(f"Have the stream: \n{stream}")
template_lats, template_lons, template_alphas, template_ids = (
[], [], [], [])
for template in tribe:
try:
origin = (template.event.preferred_origin() or
template.event.origins[0])
except IndexError:
continue
template_lats.append(origin.latitude)
template_lons.append(origin.longitude % 360)
template_alphas.append(0)
template_ids.append(template.event.resource_id.id.split("/")[-1])
station_lats, station_lons, station_ids = ([], [], [])
for network in inventory:
for station in network:
station_lats.append(station.latitude)
station_lons.append(station.longitude % 360)
station_ids.append(station.code)
# Get plot bounds in web mercator
Logger.info("Defining map")
transformer = Transformer.from_crs(
"epsg:4326", "epsg:3857", always_xy=True)
try:
min_lat, min_lon, max_lat, max_lon = (
min(template_lats + station_lats),
min(template_lons + station_lons),
max(template_lats + station_lats),
max(template_lons + station_lons))
except ValueError as e:
Logger.error(e)
Logger.info("Setting map bounds to NZ")
min_lat, min_lon, max_lat, max_lon = (-47., 165., -34., 179.9)
Logger.info(f"Map bounds: {min_lon}, {min_lat} - {max_lon}, {max_lat}")
bottom_left = transformer.transform(min_lon, min_lat)
top_right = transformer.transform(max_lon, max_lat)
map_x_range = (bottom_left[0], top_right[0])
map_y_range = (bottom_left[1], top_right[1])
template_x, template_y = ([], [])
for lon, lat in zip(template_lons, template_lats):
_x, _y = transformer.transform(lon, lat)
template_x.append(_x)
template_y.append(_y)
station_x, station_y = ([], [])
for lon, lat in zip(station_lons, station_lats):
_x, _y = transformer.transform(lon, lat)
station_x.append(_x)
station_y.append(_y)
template_source = ColumnDataSource({
'y': template_y, 'x': template_x,
'lats': template_lats, 'lons': template_lons,
'template_alphas': template_alphas, 'id': template_ids})
station_source = ColumnDataSource({
'y': station_y, 'x': station_x,
'lats': station_lats, 'lons': station_lons, 'id': station_ids})
Logger.info("Allocated data sources")
trace_sources = {}
trace_data_range = {}
# Allocate empty arrays
for channel in channels:
tr = stream.select(id=channel)[0]
times = np.arange(
tr.stats.starttime.datetime,
(tr.stats.endtime + tr.stats.delta).datetime,
step=dt.timedelta(seconds=tr.stats.delta))
data = tr.data
trace_sources.update(
{channel: ColumnDataSource({'time': times, 'data': data})})
trace_data_range.update({channel: (data.min(), data.max())})
# Set up the map to go on the left side
Logger.info("Adding features to map")
map_plot = figure(
title="Template map", x_range=map_x_range, y_range=map_y_range,
x_axis_type="mercator", y_axis_type="mercator", **map_options)
url = 'http://a.basemaps.cartocdn.com/rastertiles/voyager/{Z}/{X}/{Y}.png'
attribution = "Tiles by Carto, under CC BY 3.0. Data by OSM, under ODbL"
map_plot.add_tile(WMTSTileSource(url=url, attribution=attribution))
map_plot.circle(
x="x", y="y", source=template_source, fill_color="firebrick",
line_color="grey", line_alpha=.2,
fill_alpha="template_alphas", size=10)
map_plot.triangle(
x="x", y="y", size=10, source=station_source, color="blue", alpha=1.0)
# Set up the trace plots
Logger.info("Setting up streaming plot")
trace_plots = []
if not offline:
now = dt.datetime.utcnow()
else:
now = max([tr.stats.endtime for tr in stream]).datetime
p1 = figure(
y_axis_location="right", title=title,
x_range=[now - dt.timedelta(seconds=plot_length), now],
plot_height=int(plot_options["plot_height"] * 1.2),
**{key: value for key, value in plot_options.items()
if key != "plot_height"})
p1.yaxis.axis_label = None
p1.xaxis.axis_label = None
p1.min_border_bottom = 0
p1.min_border_top = 0
if len(channels) != 1:
p1.xaxis.major_label_text_font_size = '0pt'
p1_line = p1.line(
x="time", y='data', source=trace_sources[channels[0]],
color=data_color, line_width=1)
legend = Legend(items=[(channels[0], [p1_line])])
p1.add_layout(legend, 'right')
datetick_formatter = DatetimeTickFormatter(
days=["%m/%d"], months=["%m/%d"],
hours=["%m/%d %H:%M:%S"], minutes=["%m/%d %H:%M:%S"],
seconds=["%m/%d %H:%M:%S"], hourmin=["%m/%d %H:%M:%S"],
minsec=["%m/%d %H:%M:%S"])
p1.xaxis.formatter = datetick_formatter
# Add detection lines
Logger.info("Adding detection artists")
detection_source = _get_pick_times(detections, channels[0])
detection_source.update(
{"pick_values": [[
int(min(stream.select(id=channels[0])[0].data) * .9),
int(max(stream.select(id=channels[0])[0].data) * .9)]
for _ in detection_source['picks']]})
detection_sources = {channels[0]: ColumnDataSource(detection_source)}
detection_lines = MultiLine(
xs="picks", ys="pick_values", line_color="red", line_dash="dashed",
line_width=1)
p1.add_glyph(detection_sources[channels[0]], detection_lines)
trace_plots.append(p1)
if len(channels) > 1:
for i, channel in enumerate(channels[1:]):
p = figure(
x_range=p1.x_range,
y_axis_location="right", **plot_options)
p.yaxis.axis_label = None
p.xaxis.axis_label = None
p.min_border_bottom = 0
# p.min_border_top = 0
p_line = p.line(
x="time", y="data", source=trace_sources[channel],
color=data_color, line_width=1)
legend = Legend(items=[(channel, [p_line])])
p.add_layout(legend, 'right')
p.xaxis.formatter = datetick_formatter
# Add detection lines
detection_source = _get_pick_times(detections, channel)
detection_source.update(
{"pick_values": [[
int(min(stream.select(id=channel)[0].data) * .9),
int(max(stream.select(id=channel)[0].data) * .9)]
for _ in detection_source['picks']]})
detection_sources.update({
channel: ColumnDataSource(detection_source)})
detection_lines = MultiLine(
xs="picks", ys="pick_values", line_color="red",
line_dash="dashed", line_width=1)
p.add_glyph(detection_sources[channel], detection_lines)
trace_plots.append(p)
if i != len(channels) - 2:
p.xaxis.major_label_text_font_size = '0pt'
plots = gridplot([[map_plot, column(trace_plots)]])
previous_timestamps = {
channel: stream.select(id=channel)[0].stats.endtime
for channel in channels}
def update():
Logger.debug("Plot updating")
_stream = rt_client.stream.split().detrend()
if lowcut and highcut:
_stream.filter("bandpass", freqmin=lowcut, freqmax=highcut)
elif lowcut:
_stream.filter("highpass", lowcut)
elif highcut:
_stream.filter("lowpass", highcut)
_stream.merge()
for _i, _channel in enumerate(channels):
try:
_tr = _stream.select(id=_channel)[0]
except IndexError:
Logger.debug("No channel for {0}".format(_channel))
continue
new_samples = int(_tr.stats.sampling_rate * (
previous_timestamps[_channel] - _tr.stats.endtime))
if new_samples == 0:
Logger.debug("No new data for {0}".format(_channel))
continue
_new_data = _tr.slice(
starttime=previous_timestamps[_channel])
new_times = np.arange(
_new_data.stats.starttime.datetime,
(_tr.stats.endtime + _tr.stats.delta).datetime,
step=dt.timedelta(seconds=_tr.stats.delta))
new_data = {'time': new_times[1:], 'data': _new_data.data[1:]}
Logger.debug("Channl: {0}\tNew times: {1}\t New data: {2}".format(
_tr.id, new_data["time"].shape, new_data["data"].shape))
trace_sources[_channel].stream(
new_data=new_data,
rollover=int(plot_length * _tr.stats.sampling_rate))
new_picks = _get_pick_times(detections, _channel)
new_picks.update({
'pick_values': [
[int(np.nan_to_num(
trace_sources[_channel].data['data']).max() * .9),
int(np.nan_to_num(
trace_sources[_channel].data['data']).min() * .9)]
for _ in new_picks['picks']]})
detection_sources[_channel].data = new_picks
previous_timestamps.update({_channel: _tr.stats.endtime})
Logger.debug("New data plotted for {0}".format(_channel))
if not offline:
now = dt.datetime.utcnow()
else:
try:
now = max([tr.stats.endtime for tr in _stream]).datetime
except ValueError:
return
trace_plots[0].x_range.start = now - dt.timedelta(seconds=plot_length)
trace_plots[0].x_range.end = now
_update_template_alphas(
detections, tribe, decay=plot_length, now=now,
datastream=template_source)
Logger.info("Adding callback")
doc.add_periodic_callback(update, update_interval)
doc.title = "EQcorrscan Real-time plotter"
doc.add_root(plots)
Logger.info("Plot defined")
def _update_template_alphas(
0
Source : geoplot.py
with MIT License
from PatrikHlobil
with MIT License
from PatrikHlobil
def _add_backgroundtile(
p, tile_provider, tile_provider_url, tile_attribution, tile_alpha
):
"""Add a background tile to the plot. Either uses predefined Tiles from Bokeh
(parameter: tile_provider) or user passed a tile_provider_url of the form
' < url>/{Z}/{X}/{Y}*.png' or ' < url>/{Z}/{Y}/{X}*.png'."""
from bokeh.models import WMTSTileSource
if tile_provider_url is not None:
if (
"/{Z}/{X}/{Y}" not in tile_provider_url
and "/{Z}/{Y}/{X}" not in tile_provider_url
):
raise ValueError(
" < tile_provider_url> has to be of the form ' < url>/{Z}/{X}/{Y}*.png' or < url>/{Z}/{Y}/{X}*.png'."
)
if not isinstance(tile_attribution, str):
raise ValueError(" < tile_attribution> has to be a string.")
t = p.add_tile(
WMTSTileSource(url=tile_provider_url, attribution=tile_attribution)
)
t.alpha = tile_alpha
elif tile_provider is not None:
if not isinstance(tile_provider, str):
raise ValueError(
f" < tile_provider> only accepts the values: {TILE_PROVIDERS}"
)
elif _get_background_tile(tile_provider):
t = p.add_tile(_get_background_tile(tile_provider))
else:
raise ValueError(
f" < tile_provider> only accepts the values: {TILE_PROVIDERS}"
)
t.alpha = tile_alpha
return p
def _get_figure(col):