Skip to content

Commit a82a043

Browse files
committed
Steps to fix SQL parameters
1 parent 1de373c commit a82a043

31 files changed

+208
-167
lines changed
Lines changed: 10 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,10 @@
1+
import logging
2+
3+
disable_loggers = ["macrostrat.database.utils"]
4+
5+
6+
def pytest_configure():
7+
# Quiet verbose logging
8+
for logger_name in disable_loggers:
9+
logger = logging.getLogger(logger_name)
10+
logger.disabled = True

services/column-footprint-editor/macrostrat/column_footprint_editor/database/__init__.py

Lines changed: 15 additions & 18 deletions
Original file line numberDiff line numberDiff line change
@@ -1,11 +1,12 @@
11
from macrostrat.database import Database as BaseDatabase
2+
from macrostrat.database.utils import get_sql_text
23
from pathlib import Path
34
from sqlalchemy import create_engine
45
from sqlalchemy.orm import sessionmaker
56

67
from .sql_formatter import SqlFormatter
78
from ..settings import DATABASE
8-
from ..utils import config_check, run_docker_config, delete_config
9+
from ..utils import run_topology_command, delete_config
910

1011
here = Path(__file__).parent
1112
fixtures = here / "fixtures"
@@ -35,7 +36,7 @@ def __init__(self, project=None):
3536

3637
self.engine = create_engine(DATABASE, echo=True)
3738
self.Session = sessionmaker(bind=self.engine)
38-
self.config = config_check(project)
39+
# self.config = config_check(project)
3940
self.formatter = SqlFormatter(self.project_id)
4041

4142
def exec_sql(self, sql, params=None, count=None):
@@ -61,17 +62,9 @@ def exec_query(self, filename_or_query, **kwargs):
6162
"""
6263
from pandas import read_sql
6364

64-
if "SELECT" in str(filename_or_query):
65-
# We are working with a query string instead of
66-
# an SQL file.
67-
sql = filename_or_query
68-
sql = self.formatter.sql_config_format(sql, self.config)
69-
else:
70-
with open(filename_or_query) as f:
71-
sql = f.read()
72-
sql = self.formatter.sql_config_format(sql, self.config)
65+
txt = get_sql_text(filename_or_query)
7366

74-
return read_sql(sql, self.engine, **kwargs)
67+
return read_sql(txt, self.engine, **kwargs)
7568

7669
#################### db initialization methods ##########################
7770
def create_project_table(self):
@@ -86,7 +79,7 @@ def clean_topology(self):
8679

8780
def create_project_schema(self):
8881
# need to run docker command and then create core tables
89-
run_docker_config(self.project_id, "create_tables")
82+
run_topology_command(self, self.project_id, "create_tables")
9083
self.run_sql_file(create_core_table)
9184
self.create_map_face_view()
9285

@@ -103,7 +96,7 @@ def insert_project_info(self, params={}):
10396
self.run_sql_file(project_info_insert, params=params)
10497

10598
def insert_project_column_group(self, params={}):
106-
sql = """INSERT INTO ${project_schema}.column_groups(col_group_id, col_group, col_group_name) VALUES(
99+
sql = """INSERT INTO {data_schema}.column_groups(col_group_id, col_group, col_group_name) VALUES(
107100
:col_group_id, :col_group, :col_group_name);"""
108101
self.run_sql(sql, params)
109102

@@ -117,14 +110,14 @@ def redump_linework_from_edge(self):
117110
self.run_sql_file(redump_linework_sql)
118111

119112
def remove_project(self, params={}):
120-
run_docker_config(self.project_id, "delete") # delete topology
113+
run_topology_command(self.project_id, "delete") # delete topology
121114
self.run_sql_file(remove_project_schema, params={"project_id": self.project_id})
122115
delete_config(self.project_id) # remove config file
123116

124117
################## db topology methods ##############################
125118

126119
def update_topology(self):
127-
run_docker_config(self.project_id, "update")
120+
run_topology_command(self.project_id, "update")
128121

129122
###################### Project-Free methods ########################
130123

@@ -142,6 +135,10 @@ def get_next_project_id(self):
142135
data = self.exec_query(sql).to_dict(orient="records")
143136
imported_max_id = data[0]["max"]
144137
all_max_id = data[1]["max"]
138+
if imported_max_id is None:
139+
imported_max_id = 0
140+
if all_max_id is None:
141+
all_max_id = 0
145142
if imported_max_id == all_max_id:
146143
return imported_max_id + 1000
147144
else:
@@ -151,9 +148,9 @@ def get_next_col_group_id(self):
151148
"""function to get the next project id that won't conflict with macrostrat"""
152149
# TODO: unhardcode the max int for project id
153150
# WARNING: Now this isn't going to be conflict free. Because we split the tables up
154-
sql = """SELECT max(col_group_id), 'imported' origin from ${project_schema}.column_groups WHERE col_group_id < 5000
151+
sql = """SELECT max(col_group_id), 'imported' origin from {data_schema}.column_groups WHERE col_group_id < 5000
155152
UNION ALL
156-
SELECT max(col_group_id), 'all' origin from ${project_schema}.column_groups;"""
153+
SELECT max(col_group_id), 'all' origin from $(data_schema).column_groups;"""
157154

158155
data = self.exec_query(sql).to_dict(orient="records")
159156
imported_max_id = data[0]["max"]
Lines changed: 5 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -1,20 +1,20 @@
1-
CREATE SCHEMA IF NOT EXISTS ${project_schema};
1+
CREATE SCHEMA IF NOT EXISTS {project_schema};
22

3-
create table if not exists ${project_schema}.column_groups(
3+
create table if not exists {project_schema}.column_groups(
44
id SERIAL PRIMARY KEY,
55
col_group_id integer,
66
col_group text,
77
col_group_name text,
88
color text
99
);
1010

11-
CREATE TABLE IF NOT EXISTS ${project_schema}.columns(
11+
CREATE TABLE IF NOT EXISTS {project_schema}.columns(
1212
id serial PRIMARY KEY,
1313
project_id integer,
14-
col_group integer REFERENCES ${project_schema}.column_groups(id),
14+
col_group integer REFERENCES {project_schema}.column_groups(id),
1515
col_id integer,
1616
col_name text,
1717
description text,
1818
point geometry,
19-
location geometry
19+
location geometry
2020
);
Lines changed: 14 additions & 14 deletions
Original file line numberDiff line numberDiff line change
@@ -1,26 +1,26 @@
1-
ALTER TABLE ${data_schema}.polygon
1+
ALTER TABLE {data_schema}.polygon
22
ADD COLUMN col_id integer,
3-
ADD FOREIGN KEY (col_id) REFERENCES ${project_schema}.columns(id);
3+
ADD FOREIGN KEY (col_id) REFERENCES {project_schema}.columns(id);
44

5-
CREATE OR REPLACE VIEW ${project_schema}.column_map_face AS
5+
CREATE OR REPLACE VIEW {project_schema}.column_map_face AS
66
WITH A as(
77
SELECT c.id,
8-
c.project_id,
9-
c.col_id,
8+
c.project_id,
9+
c.col_id,
1010
c.col_name,
11-
c.description,
11+
c.description,
1212
cg.col_group,
1313
cg.id as col_group_id,
1414
cg.col_group_id as col_group_id_macrostrat,
15-
cg.col_group_name,
15+
cg.col_group_name,
1616
cg.color,
17-
c.point,
18-
mtm.geometry
19-
FROM ${topo_schema}.map_face mtm
20-
LEFT JOIN ${data_schema}.polygon mdp
17+
c.point,
18+
mtm.geometry
19+
FROM {topo_schema}.map_face mtm
20+
LEFT JOIN {data_schema}.polygon mdp
2121
ON ST_Contains(mtm.geometry, mdp.geometry)
22-
LEFT JOIN ${project_schema}.columns c
22+
LEFT JOIN {project_schema}.columns c
2323
ON mdp.col_id = c.id
24-
LEFT JOIN ${project_schema}.column_groups cg
24+
LEFT JOIN {project_schema}.column_groups cg
2525
ON cg.id = c.col_group
26-
) SELECT A.* from A;
26+
) SELECT A.* from A;
Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,6 @@
11
/*
22
Updates the existing line, just the geometry
33
*/
4-
UPDATE ${data_schema}.linework
4+
UPDATE {data_schema}.linework
55
SET geometry = ST_Multi(ST_GeomFromGeoJSON(:geometry_))
6-
WHERE id = :id_;
6+
WHERE id = :id_;
Lines changed: 19 additions & 19 deletions
Original file line numberDiff line numberDiff line change
@@ -1,36 +1,36 @@
11
SET session_replication_role = replica;
22

3-
UPDATE ${data_schema}.linework
3+
UPDATE {data_schema}.linework
44
SET
55
topo = null,
66
geometry_hash = null,
77
topology_error = null;
88

9-
TRUNCATE TABLE ${topo_schema}.face CASCADE;
10-
TRUNCATE TABLE ${topo_schema}.relation CASCADE;
11-
TRUNCATE TABLE ${topo_schema}.map_face CASCADE;
9+
TRUNCATE TABLE {topo_schema}.face CASCADE;
10+
TRUNCATE TABLE {topo_schema}.relation CASCADE;
11+
TRUNCATE TABLE {topo_schema}.map_face CASCADE;
1212

13-
INSERT INTO ${topo_schema}.face (face_id) VALUES (0);
13+
INSERT INTO {topo_schema}.face (face_id) VALUES (0);
1414

15-
ALTER SEQUENCE ${topo_schema}.node_node_id_seq RESTART WITH 1;
16-
ALTER SEQUENCE ${topo_schema}.face_face_id_seq RESTART WITH 1;
17-
ALTER SEQUENCE ${topo_schema}.edge_data_edge_id_seq RESTART WITH 1;
18-
ALTER SEQUENCE ${topo_schema}.topogeo_s_1 RESTART WITH 1;
15+
ALTER SEQUENCE {topo_schema}.node_node_id_seq RESTART WITH 1;
16+
ALTER SEQUENCE {topo_schema}.face_face_id_seq RESTART WITH 1;
17+
ALTER SEQUENCE {topo_schema}.edge_data_edge_id_seq RESTART WITH 1;
18+
ALTER SEQUENCE {topo_schema}.topogeo_s_1 RESTART WITH 1;
1919

20-
SELECT setval(pg_get_serial_sequence('${topo_schema}.map_face', 'id'), coalesce(max(id),0)+1, false)
21-
FROM ${topo_schema}.map_face;
20+
SELECT setval(pg_get_serial_sequence('{topo_schema}.map_face', 'id'), coalesce(max(id),0)+1, false)
21+
FROM {topo_schema}.map_face;
2222
SET session_replication_role = DEFAULT;
2323

24-
TRUNCATE ${topo_schema}.map_face CASCADE;
24+
TRUNCATE {topo_schema}.map_face CASCADE;
2525

26-
ALTER SEQUENCE ${topo_schema}.map_face_id_seq RESTART WITH 1;
26+
ALTER SEQUENCE {topo_schema}.map_face_id_seq RESTART WITH 1;
2727

2828

29-
TRUNCATE ${data_schema}.linework CASCADE;
30-
TRUNCATE ${project_schema}.columns CASCADE;
31-
TRUNCATE ${data_schema}.polygon CASCADE;
29+
TRUNCATE {data_schema}.linework CASCADE;
30+
TRUNCATE {project_schema}.columns CASCADE;
31+
TRUNCATE {data_schema}.polygon CASCADE;
3232

33-
ALTER SEQUENCE ${data_schema}.linework_id_seq RESTART WITH 1;
34-
ALTER SEQUENCE ${project_schema}.columns_id_seq RESTART WITH 1;
35-
ALTER SEQUENCE ${data_schema}.polygon_id_seq RESTART WITH 1;
33+
ALTER SEQUENCE {data_schema}.linework_id_seq RESTART WITH 1;
34+
ALTER SEQUENCE {project_schema}.columns_id_seq RESTART WITH 1;
35+
ALTER SEQUENCE {data_schema}.polygon_id_seq RESTART WITH 1;
3636

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,4 @@
1-
INSERT INTO ${data_schema}.linework(geometry, type) VALUES (
1+
INSERT INTO {data_schema}.linework(geometry, type) VALUES (
22
ST_Multi(ST_GeomFromGeoJSON(:geometry_)),
33
'default'
4-
);
4+
);
Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,4 @@
11
/*
22
Deletes a line where the id is equal to the id
33
*/
4-
DELETE FROM ${data_schema}.linework WHERE id = :id_;
4+
DELETE FROM {data_schema}.linework WHERE id = :id_;
Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,7 +1,7 @@
11
WITH a as(
22
SELECT (ST_Dump(ST_Boundary(ST_GeomFromGeoJSON(:polygon)))).geom as geom
33
)
4-
INSERT INTO ${data_schema}.linework(geometry, type)
4+
INSERT INTO {data_schema}.linework(geometry, type)
55
SELECT ST_Multi(a.geom),
66
'default'
77
FROM a;
Lines changed: 6 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -1,11 +1,11 @@
1-
/*
1+
/*
22
Select id of polygon that contains point
33
*/
44

5-
SELECT
6-
id
7-
FROM ${topo_schema}.map_face mtm
5+
SELECT
6+
id
7+
FROM {topo_schema}.map_face mtm
88
WHERE ST_Contains(
9-
mtm.geometry,
9+
mtm.geometry,
1010
ST_GeomFromGeoJSON(:point)
11-
);
11+
);

0 commit comments

Comments
 (0)