Skip to content
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion pyproject.toml
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
[project]
name = "qwc-data-service"
version = "v2025.07.04"
version = "v2025.10.04"
description = "QWC Data Service"
readme = "README.md"
requires-python = ">=3.10"
Expand Down
6 changes: 3 additions & 3 deletions requirements.txt
Original file line number Diff line number Diff line change
Expand Up @@ -254,9 +254,9 @@ referencing==0.36.2 \
# via
# jsonschema
# jsonschema-specifications
requests==2.32.4 \
--hash=sha256:27babd3cda2a6d50b30443204ee89830707d396671944c998b5975b031ac2b2c \
--hash=sha256:27d0316682c8a29834d3264820024b62a36942083d52caf2f14c0591336d3422
requests==2.32.3 \
--hash=sha256:55365417734eb18255590a9ff9eb97e9e1da868d4ccd6402399eaf68af20a760 \
--hash=sha256:70761cfe03c773ceb22aa2f671b4757976145175cdfca038c02654d061d6dcc6
# via qwc-data-service
rpds-py==0.22.3 \
--hash=sha256:009de23c9c9ee54bf11303a966edf4d9087cd43a6003672e6aa7def643d06518 \
Expand Down
5 changes: 3 additions & 2 deletions src/data_service.py
Original file line number Diff line number Diff line change
Expand Up @@ -35,7 +35,7 @@ def __init__(self, tenant, logger, config):
self.attachments_service = AttachmentsService(tenant, logger)
self.db_engine = DatabaseEngine()

def index(self, identity, translator, dataset, bbox, crs, filterexpr, filter_geom):
def index(self, identity, translator, dataset, bbox, crs, filterexpr, filter_geom, filter_fields):
"""Find dataset features inside bounding box.

:param str|obj identity: User identity
Expand All @@ -46,6 +46,7 @@ def index(self, identity, translator, dataset, bbox, crs, filterexpr, filter_geo
:param str filterexpr: JSON serialized array of filter expressions:
[["<attr>", "<op>", "<value>"], "and|or", ["<attr>", "<op>", "<value>"]]
:param str filter_geom: JSON serialized GeoJSON geometry
:param list[string] filter_fields: Field names to return
"""
dataset_features_provider = self.dataset_features_provider(
identity, translator, dataset, False
Expand Down Expand Up @@ -88,7 +89,7 @@ def index(self, identity, translator, dataset, bbox, crs, filterexpr, filter_geo

try:
feature_collection = dataset_features_provider.index(
bbox, srid, filterexpr, filter_geom
bbox, srid, filterexpr, filter_geom, filter_fields
)
except (DataError, ProgrammingError) as e:
self.logger.error(e)
Expand Down
31 changes: 20 additions & 11 deletions src/dataset_features_provider.py
Original file line number Diff line number Diff line change
Expand Up @@ -84,7 +84,7 @@ def deletable(self):
"""Return whether dataset can be deleted."""
return self.__deletable

def index(self, bbox, client_srid, filterexpr, filter_geom):
def index(self, bbox, client_srid, filterexpr, filter_geom, filter_fields):
"""Find features inside bounding box.

:param list[float] bbox: Bounding box as [<minx>,<miny>,<maxx>,<maxy>]
Expand All @@ -93,11 +93,16 @@ def index(self, bbox, client_srid, filterexpr, filter_geom):
:param (sql, params) filterexpr: A filter expression as a tuple
(sql_expr, bind_params)
:param str filter_geom: JSON serialized GeoJSON geometry
:param list[string] filter_fields: Field names to return
"""
srid = client_srid or self.srid

own_attributes, join_attributes = self.__extract_join_attributes()

if filter_fields:
own_attributes = [attr for attr in own_attributes if attr in filter_fields or attr == self.primary_key]
join_attributes = [attr for attr in join_attributes if attr in filter_fields]

# build query SQL

# select id and permitted attributes
Expand Down Expand Up @@ -146,13 +151,15 @@ def index(self, bbox, client_srid, filterexpr, filter_geom):
if where_clauses:
where_clause = "WHERE (" + ") AND (".join(where_clauses) + ")"

geom_sql = self.geom_column_sql(srid, with_bbox=False)
if self.geometry_column:
# select overall extent
geom_sql += (
', ST_Extent(%s) OVER () AS _overall_bbox_' %
self.transform_geom_sql('"{geom}"', self.srid, srid)
)
geom_sql = ""
if not filter_fields or "geometry" in filter_fields:
geom_sql = self.geom_column_sql(srid, with_bbox=False)
if self.geometry_column:
# select overall extent
geom_sql += (
', ST_Extent(%s) OVER () AS _overall_bbox_' %
self.transform_geom_sql('"{geom}"', self.srid, srid)
)

sql = sql_text(("""
SELECT {columns}%s
Expand All @@ -179,7 +186,7 @@ def index(self, bbox, client_srid, filterexpr, filter_geom):
join_attribute_values = self.__query_join_attributes(join_attributes, attribute_values)
attribute_values.update(join_attribute_values)

features.append(self.feature_from_query(attribute_values, srid))
features.append(self.feature_from_query(attribute_values, srid, filter_fields))
if '_overall_bbox_' in row:
overall_bbox = row['_overall_bbox_']

Expand Down Expand Up @@ -1060,14 +1067,16 @@ def transform_geom_sql(self, geom_sql, geom_srid, target_srid):

return geom_sql

def feature_from_query(self, row, client_srid):
def feature_from_query(self, row, client_srid, filter_fields=None):
"""Build GeoJSON Feature from query result row.

:param obj row: Row result from query
:param int client_srid: Client SRID or None for dataset SRID
"""
props = OrderedDict()
for attr in self.attributes:
if filter_fields and not attr in filter_fields:
continue
# Omit hidden fields
if self.fields.get(attr, {}).get('constraints', {}).get('hidden', False) == True:
continue
Expand All @@ -1085,7 +1094,7 @@ def feature_from_query(self, row, client_srid):
geometry = None
crs = None
bbox = None
if self.geometry_column:
if self.geometry_column and (not filter_fields or "geometry" in filter_fields):
if row['json_geom'] is not None:
geometry = json.loads(row['json_geom'])
else:
Expand Down
64 changes: 58 additions & 6 deletions src/server.py
Original file line number Diff line number Diff line change
Expand Up @@ -258,6 +258,7 @@ def output(self, key, obj, **kwargs):
index_parser.add_argument('crs')
index_parser.add_argument('filter')
index_parser.add_argument('filter_geom')
index_parser.add_argument('fields')

feature_multipart_parser = reqparse.RequestParser(argument_class=CaseInsensitiveArgument)
feature_multipart_parser.add_argument('feature', help='Feature', required=True, location='form')
Expand All @@ -267,6 +268,11 @@ def output(self, key, obj, **kwargs):
show_parser = reqparse.RequestParser(argument_class=CaseInsensitiveArgument)
show_parser.add_argument('crs')

keyvals_parser = reqparse.RequestParser(argument_class=CaseInsensitiveArgument)
keyvals_parser.add_argument('key')
keyvals_parser.add_argument('value')
keyvals_parser.add_argument('filter')

# attachment
get_attachment_parser = reqparse.RequestParser(argument_class=CaseInsensitiveArgument)
get_attachment_parser.add_argument('file', required=True)
Expand All @@ -292,6 +298,9 @@ class FeatureCollection(Resource):
'`[["<name>", "<op>", <value>],"and|or",["<name>","<op>",<value>]]`')
@api.param(
'filter_geom', 'GeoJSON serialized geometry, used as intersection geometry filter')
@api.param(
'fields', 'Comma separated list of field names to return'
)
@api.expect(index_parser)
@api.marshal_with(geojson_feature_collection, skip_none=True)
@optional_auth
Expand All @@ -308,10 +317,11 @@ def get(self, dataset):
crs = args['crs']
filterexpr = args['filter']
filter_geom = args['filter_geom']
filter_fields = (args['fields'] or "").split(",")

data_service = data_service_handler()
result = data_service.index(
get_identity(), translator, dataset, bbox, crs, filterexpr, filter_geom
get_identity(), translator, dataset, bbox, crs, filterexpr, filter_geom, filter_fields
)
if 'error' not in result:
return result['feature_collection']
Expand Down Expand Up @@ -394,6 +404,45 @@ def get(self, dataset):
api.abort(error_code, result['error'])


@api.route('/<path:dataset>/keyvals')
@api.response(404, 'Dataset or feature not found or permission error')
class KeyValues(Resource):
@api.doc('dataset_keyvals')
@api.param('key', 'Key field name')
@api.param('value', 'Value field name')
@api.param(
'filter', 'JSON serialized filter expression: `[["<name>", "<op>", <value>],"and|or",["<name>","<op>",<value>]]`')
@api.expect(keyvals_parser)
@optional_auth
def get(self, dataset):
app.logger.debug(f"Processing GET (dataset_keyvals) on /{dataset}/keyvals")
translator = Translator(app, request)
args = keyvals_parser.parse_args()
key_field_name = args['key']
value_field_name = args['value']
filterexpr = args['filter']

data_service = data_service_handler()

natsort = lambda s: [int(t) if t.isdigit() else t.lower() for t in re.split(r'(\d+)', s)]

result = data_service.index(
get_identity(), translator, dataset, None, None, filterexpr, None, [key_field_name, value_field_name]
)
ret = []
if 'feature_collection' in result:
entries = {}
for feature in result['feature_collection']['features']:
key = feature["id"] if key_field_name == "id" else feature['properties'][key_field_name]
value = str(feature['properties'][value_field_name]).strip()
entries[key] = value
ret = [{"key": kv[0], "value": kv[1]} for kv in entries.items()]
ret.sort(key=lambda record: natsort(record["value"]))
elif 'error' in result:
app.logger.debug(f"Failed to query relation values for {dataset}:{key_field_name}:{value_field_name}: {result['error']}")
return ret


@api.route('/<path:dataset>/<id>')
@api.response(404, 'Dataset or feature not found or permission error')
@api.param('dataset', 'Dataset ID')
Expand Down Expand Up @@ -646,7 +695,7 @@ def get(self, dataset, id):
except:
continue
result = data_service.index(
get_identity(), translator, table, None, crs, '[["%s", "=", "%s"]]' % (fk_field_name, id), None
get_identity(), translator, table, None, crs, '[["%s", "=", "%s"]]' % (fk_field_name, id), None, None
)
ret[table] = {
"fk": fk_field_name,
Expand Down Expand Up @@ -681,20 +730,23 @@ def get(self):

keyvals = args['tables'] or ""
ret = {}
natsort = lambda s: [int(t) if t.isdigit() else t.lower() for t in re.split(r'(\d+)', s)]
for (idx, keyval) in enumerate(keyvals.split(",")):
try:
table, key_field_name, value_field_name = keyval.split(":")
except:
continue
ret[table] = []
result = data_service.index(
get_identity(), translator, table, None, None, json.dumps(filterexpr[idx]) if filterexpr and len(filterexpr) > idx and filterexpr[idx] else None, None
get_identity(), translator, table, None, None, json.dumps(filterexpr[idx]) if filterexpr and len(filterexpr) > idx and filterexpr[idx] else None, None, [key_field_name, value_field_name]
)
if 'feature_collection' in result:
entries = {}
for feature in result['feature_collection']['features']:
record = {"key": feature["id"] if key_field_name == "id" else feature['properties'][key_field_name], "value": str(feature['properties'][value_field_name]).strip()}
ret[table].append(record)
natsort = lambda s: [int(t) if t.isdigit() else t.lower() for t in re.split(r'(\d+)', s)]
key = feature["id"] if key_field_name == "id" else feature['properties'][key_field_name]
value = str(feature['properties'][value_field_name]).strip()
entries[key] = value
ret[table] = [{"key": kv[0], "value": kv[1]} for kv in entries.items()]
ret[table].sort(key=lambda record: natsort(record["value"]))
elif 'error' in result:
app.logger.debug(f"Failed to query relation values for {keyval}: {result['error']}")
Expand Down
4 changes: 2 additions & 2 deletions uv.lock

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

Loading