Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
4 changes: 4 additions & 0 deletions local-root/configs/tileserver-cache.vcl
Original file line number Diff line number Diff line change
Expand Up @@ -63,6 +63,10 @@ sub vcl_backend_response {
set beresp.ttl = 1d;
# Allow stale content while revalidating
set beresp.grace = 5m;
} else if (beresp.http.Content-Type ~ "application/x-protobuf") {
# Vector tiles should have the same TTL
set beresp.ttl = 1d;
set beresp.grace = 5m;
} else {
# Shorter TTL for other content
set beresp.ttl = 5m;
Expand Down
4 changes: 3 additions & 1 deletion local-root/docker-compose.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -83,6 +83,8 @@ services:
environment:
- VARNISH_HTTP_PORT=8000
volumes:
# TODO:
# Changes to this config file aren't pulled in without explicitly restarting the container.
- ./configs/tileserver-cache.vcl:/etc/varnish/default.vcl
tileserver_core:
image: hub.opensciencegrid.org/macrostrat/tileserver:main
Expand Down Expand Up @@ -112,7 +114,7 @@ services:
py-modules: ../py-modules
environment:
- DATABASE_URL=postgresql://${POSTGRES_USER}:${POSTGRES_PASSWORD}@database:5432/${POSTGRES_DB}
- MAPNIK_POOL_SIZE=64
- MAPNIK_POOL_SIZE=8
command: >
uvicorn --host 0.0.0.0 --port 8000
macrostrat.legacy_tileserver:app
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -39,7 +39,7 @@ async def startup_event():
# Tile rendering map pool size
# This controls how many image tiles can be concurrently rendered.
# Database access is somewhat inefficient, so we may need to adjust this.
mapnik_pool_size = int(environ.get("MAPNIK_POOL_SIZE", "64"))
mapnik_pool_size = int(environ.get("MAPNIK_POOL_SIZE", "8"))
log.info(f"Setting up Mapnik map pool with size {mapnik_pool_size}")

app.state.pool = await create_pool_b(url)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -31,12 +31,17 @@ async def get_image_tile(request: Request, args: CachedTileArgs) -> bytes:
scale = scale_for_zoom(tile.z)
box = Box2d(bbox.left, bbox.top, bbox.right, bbox.bottom)

# TODO: tune PostGIS data sources
# https://github.com/mapnik/mapnik/wiki/PostGIS

async with pool.map_context(scale) as _map:

_map.zoom_to_box(box)

# Render map to image
im = Image(512, 512)
render(_map, im, 2)

# Return image as binary
return im.tostring("png")
res = im.tostring("png")
return res
Original file line number Diff line number Diff line change
Expand Up @@ -8,56 +8,60 @@
from subprocess import CalledProcessError, check_output
from textwrap import dedent

from mapnik import Datasource

from .config import layer_order

__here__ = Path(__file__).parent


def make_carto_stylesheet(scale, db_url):
def make_datasource(db_url, **kwargs):
pg_credentials = get_credentials(db_url)
return Datasource(
**pg_credentials,
**kwargs,
)

line_sql = " UNION ALL ".join(
f"SELECT * FROM lines.{s}" for s in layer_order[scale]

def make_line_datasource(db_url, scale):
line_query = create_line_query(scale)
pg_credentials = get_credentials(db_url)
return Datasource(
type="postgis",
table=f"({line_query}) subset",
key_field="line_id",
geometry_field="geom",
extent_cache="auto",
extent="-180,-90,180,90",
srid="4326",
**pg_credentials,
)


def make_polygon_datasource(db_url, scale):
polygon_query = create_polygon_query(scale)
pg_credentials = get_credentials(db_url)
return Datasource(
type="postgis",
table=f"({polygon_query}) subset",
key_field="map_id",
geometry_field="geom",
extent_cache="auto",
extent="-180,-90,180,90",
srid="4326",
**pg_credentials,
)


def make_carto_stylesheet(scale, db_url):
pg_credentials = get_credentials(db_url)

cartoCSS = (__here__ / "style.mss").read_text()

webmercator_srs = "+proj=merc +a=6378137 +b=6378137 +lat_ts=0.0 +lon_0=0.0 +x_0=0.0 +y_0=0.0 +k=1.0 +units=m +nadgrids=@null +wktext +no_defs +over"

polygon_query = dedent(
f"""
SELECT
z.map_id,
nullif(l.color, '') AS color,
z.geom FROM carto.polygons z
LEFT JOIN maps.map_legend
ON z.map_id = map_legend.map_id
LEFT JOIN maps.legend AS l
ON l.legend_id = map_legend.legend_id
LEFT JOIN maps.sources
ON l.source_id = sources.source_id
WHERE sources.status_code = 'active'
AND l.color IS NOT NULL
AND l.color != ''
AND z.scale = '{scale}'
"""
)

line_query = dedent(
f"""
SELECT
x.line_id,
x.geom,
q.direction,
q.type
FROM carto.lines x
LEFT JOIN ( {line_sql} ) q
ON q.line_id = x.line_id
LEFT JOIN maps.sources ON x.source_id = sources.source_id
WHERE sources.status_code = 'active'
AND x.scale = '{scale}'
"""
)
polygon_query = create_polygon_query(scale)
line_query = create_line_query(scale)

return {
"bounds": [-89, -179, 89, 179],
Expand Down Expand Up @@ -153,3 +157,45 @@ def get_credentials(db_url=None):
"password": db_url.password,
"dbname": db_url.database,
}


def create_polygon_query(scale):
return dedent(
f"""
SELECT
z.map_id,
nullif(l.color, '') AS color,
z.geom FROM carto.polygons z
LEFT JOIN maps.map_legend
ON z.map_id = map_legend.map_id
LEFT JOIN maps.legend AS l
ON l.legend_id = map_legend.legend_id
LEFT JOIN maps.sources
ON l.source_id = sources.source_id
WHERE sources.status_code = 'active'
AND l.color IS NOT NULL
AND l.color != ''
AND z.scale = '{scale}'
"""
)


def create_line_query(scale):
line_sql = " UNION ALL ".join(
f"SELECT * FROM lines.{s}" for s in layer_order[scale]
)
return dedent(
f"""
SELECT
x.line_id,
x.geom,
q.direction,
q.type
FROM carto.lines x
LEFT JOIN ( {line_sql} ) q
ON q.line_id = x.line_id
LEFT JOIN maps.sources ON x.source_id = sources.source_id
WHERE sources.status_code = 'active'
AND x.scale = '{scale}'
"""
)
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,11 @@
from macrostrat.utils import get_logger

from .config import scales
from .mapnik_styles import make_mapnik_xml
from .mapnik_styles import (
make_line_datasource,
make_mapnik_xml,
make_polygon_datasource,
)

log = get_logger(__name__)

Expand All @@ -19,6 +23,9 @@ class MapnikMapPool:
storage: dict[str, Queue[Map, None]] = {}
n_instances: int = 4

line_datasources: dict[str, object] = {}
polygon_datasources: dict[str, object] = {}

def __init__(self, n_instances: int = 4):
self.n_instances = n_instances

Expand All @@ -32,19 +39,41 @@ async def setup_queue(self, db: Database, scale: str):
# Fill the queue with Mapnik maps
t = time.time()
_xml = make_mapnik_xml(scale, db.engine.url)

# Set up PostGIS data sources for shared use here

line_datasource = make_line_datasource(db.engine.url, scale)
polygon_datasource = make_polygon_datasource(db.engine.url, scale)

self.line_datasources[scale] = line_datasource
self.polygon_datasources[scale] = polygon_datasource

log.info(f"Generated mapnik XML for scale {scale} in {time.time() - t} seconds")
for _ in range(self.n_instances):
_map = Map(512, 512)
load_map_from_string(_map, _xml)
# Set up shared data sources here
for layer in _map.layers:
if layer.name == f"lines_{scale}":
layer.datasource = line_datasource
elif layer.name == f"units_{scale}":
layer.datasource = polygon_datasource
await q.put(_map)
dt = time.time() - t
log.info(
f"Initialized {self.n_instances} map objects for scale {scale} in {dt} seconds"
)
return q

@asynccontextmanager
async def map_context(self, scale: str) -> Map:
"""Get a map from the pool."""
q = self.storage[scale]
_map = await q.get()
t = time.time()
try:
yield _map
finally:
await q.put(_map)
dt = time.time() - t
log.debug(f"Returned map to pool for scale {scale} in {dt:.3f} seconds")
2 changes: 1 addition & 1 deletion services/legacy-tileserver/pyproject.toml
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
[project]
name = "macrostrat.legacy-tileserver"
version = "2.3.0-beta.3"
version = "2.3.0"
description = "Macrostrat legacy tile server"
authors = [{ name = "Daven Quinn", email = "[email protected]" }]
requires-python = ">=3.9,<3.10"
Expand Down
2 changes: 1 addition & 1 deletion services/legacy-tileserver/uv.lock

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.