diff --git a/pyproject.toml b/pyproject.toml index 5669932..419a90b 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,6 +1,6 @@ [project] name = "qwc-data-service" -version = "v2025.07.04" +version = "v2025.10.04" description = "QWC Data Service" readme = "README.md" requires-python = ">=3.10" diff --git a/requirements.txt b/requirements.txt index 6db3f96..1285a8e 100644 --- a/requirements.txt +++ b/requirements.txt @@ -254,9 +254,9 @@ referencing==0.36.2 \ # via # jsonschema # jsonschema-specifications -requests==2.32.4 \ - --hash=sha256:27babd3cda2a6d50b30443204ee89830707d396671944c998b5975b031ac2b2c \ - --hash=sha256:27d0316682c8a29834d3264820024b62a36942083d52caf2f14c0591336d3422 +requests==2.32.3 \ + --hash=sha256:55365417734eb18255590a9ff9eb97e9e1da868d4ccd6402399eaf68af20a760 \ + --hash=sha256:70761cfe03c773ceb22aa2f671b4757976145175cdfca038c02654d061d6dcc6 # via qwc-data-service rpds-py==0.22.3 \ --hash=sha256:009de23c9c9ee54bf11303a966edf4d9087cd43a6003672e6aa7def643d06518 \ diff --git a/src/data_service.py b/src/data_service.py index c046764..404cacd 100644 --- a/src/data_service.py +++ b/src/data_service.py @@ -35,7 +35,7 @@ def __init__(self, tenant, logger, config): self.attachments_service = AttachmentsService(tenant, logger) self.db_engine = DatabaseEngine() - def index(self, identity, translator, dataset, bbox, crs, filterexpr, filter_geom): + def index(self, identity, translator, dataset, bbox, crs, filterexpr, filter_geom, filter_fields): """Find dataset features inside bounding box. :param str|obj identity: User identity @@ -46,6 +46,7 @@ def index(self, identity, translator, dataset, bbox, crs, filterexpr, filter_geo :param str filterexpr: JSON serialized array of filter expressions: [["", "", ""], "and|or", ["", "", ""]] :param str filter_geom: JSON serialized GeoJSON geometry + :param list[string] filter_fields: Field names to return """ dataset_features_provider = self.dataset_features_provider( identity, translator, dataset, False @@ -88,7 +89,7 @@ def index(self, identity, translator, dataset, bbox, crs, filterexpr, filter_geo try: feature_collection = dataset_features_provider.index( - bbox, srid, filterexpr, filter_geom + bbox, srid, filterexpr, filter_geom, filter_fields ) except (DataError, ProgrammingError) as e: self.logger.error(e) diff --git a/src/dataset_features_provider.py b/src/dataset_features_provider.py index 7b0ac4c..8bbd394 100644 --- a/src/dataset_features_provider.py +++ b/src/dataset_features_provider.py @@ -84,7 +84,7 @@ def deletable(self): """Return whether dataset can be deleted.""" return self.__deletable - def index(self, bbox, client_srid, filterexpr, filter_geom): + def index(self, bbox, client_srid, filterexpr, filter_geom, filter_fields): """Find features inside bounding box. :param list[float] bbox: Bounding box as [,,,] @@ -93,11 +93,16 @@ def index(self, bbox, client_srid, filterexpr, filter_geom): :param (sql, params) filterexpr: A filter expression as a tuple (sql_expr, bind_params) :param str filter_geom: JSON serialized GeoJSON geometry + :param list[string] filter_fields: Field names to return """ srid = client_srid or self.srid own_attributes, join_attributes = self.__extract_join_attributes() + if filter_fields: + own_attributes = [attr for attr in own_attributes if attr in filter_fields or attr == self.primary_key] + join_attributes = [attr for attr in join_attributes if attr in filter_fields] + # build query SQL # select id and permitted attributes @@ -146,13 +151,15 @@ def index(self, bbox, client_srid, filterexpr, filter_geom): if where_clauses: where_clause = "WHERE (" + ") AND (".join(where_clauses) + ")" - geom_sql = self.geom_column_sql(srid, with_bbox=False) - if self.geometry_column: - # select overall extent - geom_sql += ( - ', ST_Extent(%s) OVER () AS _overall_bbox_' % - self.transform_geom_sql('"{geom}"', self.srid, srid) - ) + geom_sql = "" + if not filter_fields or "geometry" in filter_fields: + geom_sql = self.geom_column_sql(srid, with_bbox=False) + if self.geometry_column: + # select overall extent + geom_sql += ( + ', ST_Extent(%s) OVER () AS _overall_bbox_' % + self.transform_geom_sql('"{geom}"', self.srid, srid) + ) sql = sql_text((""" SELECT {columns}%s @@ -179,7 +186,7 @@ def index(self, bbox, client_srid, filterexpr, filter_geom): join_attribute_values = self.__query_join_attributes(join_attributes, attribute_values) attribute_values.update(join_attribute_values) - features.append(self.feature_from_query(attribute_values, srid)) + features.append(self.feature_from_query(attribute_values, srid, filter_fields)) if '_overall_bbox_' in row: overall_bbox = row['_overall_bbox_'] @@ -1060,7 +1067,7 @@ def transform_geom_sql(self, geom_sql, geom_srid, target_srid): return geom_sql - def feature_from_query(self, row, client_srid): + def feature_from_query(self, row, client_srid, filter_fields=None): """Build GeoJSON Feature from query result row. :param obj row: Row result from query @@ -1068,6 +1075,8 @@ def feature_from_query(self, row, client_srid): """ props = OrderedDict() for attr in self.attributes: + if filter_fields and not attr in filter_fields: + continue # Omit hidden fields if self.fields.get(attr, {}).get('constraints', {}).get('hidden', False) == True: continue @@ -1085,7 +1094,7 @@ def feature_from_query(self, row, client_srid): geometry = None crs = None bbox = None - if self.geometry_column: + if self.geometry_column and (not filter_fields or "geometry" in filter_fields): if row['json_geom'] is not None: geometry = json.loads(row['json_geom']) else: diff --git a/src/server.py b/src/server.py index 34a51b0..22bde37 100644 --- a/src/server.py +++ b/src/server.py @@ -258,6 +258,7 @@ def output(self, key, obj, **kwargs): index_parser.add_argument('crs') index_parser.add_argument('filter') index_parser.add_argument('filter_geom') +index_parser.add_argument('fields') feature_multipart_parser = reqparse.RequestParser(argument_class=CaseInsensitiveArgument) feature_multipart_parser.add_argument('feature', help='Feature', required=True, location='form') @@ -267,6 +268,11 @@ def output(self, key, obj, **kwargs): show_parser = reqparse.RequestParser(argument_class=CaseInsensitiveArgument) show_parser.add_argument('crs') +keyvals_parser = reqparse.RequestParser(argument_class=CaseInsensitiveArgument) +keyvals_parser.add_argument('key') +keyvals_parser.add_argument('value') +keyvals_parser.add_argument('filter') + # attachment get_attachment_parser = reqparse.RequestParser(argument_class=CaseInsensitiveArgument) get_attachment_parser.add_argument('file', required=True) @@ -292,6 +298,9 @@ class FeatureCollection(Resource): '`[["", "", ],"and|or",["","",]]`') @api.param( 'filter_geom', 'GeoJSON serialized geometry, used as intersection geometry filter') + @api.param( + 'fields', 'Comma separated list of field names to return' + ) @api.expect(index_parser) @api.marshal_with(geojson_feature_collection, skip_none=True) @optional_auth @@ -308,10 +317,11 @@ def get(self, dataset): crs = args['crs'] filterexpr = args['filter'] filter_geom = args['filter_geom'] + filter_fields = (args['fields'] or "").split(",") data_service = data_service_handler() result = data_service.index( - get_identity(), translator, dataset, bbox, crs, filterexpr, filter_geom + get_identity(), translator, dataset, bbox, crs, filterexpr, filter_geom, filter_fields ) if 'error' not in result: return result['feature_collection'] @@ -394,6 +404,45 @@ def get(self, dataset): api.abort(error_code, result['error']) +@api.route('//keyvals') +@api.response(404, 'Dataset or feature not found or permission error') +class KeyValues(Resource): + @api.doc('dataset_keyvals') + @api.param('key', 'Key field name') + @api.param('value', 'Value field name') + @api.param( + 'filter', 'JSON serialized filter expression: `[["", "", ],"and|or",["","",]]`') + @api.expect(keyvals_parser) + @optional_auth + def get(self, dataset): + app.logger.debug(f"Processing GET (dataset_keyvals) on /{dataset}/keyvals") + translator = Translator(app, request) + args = keyvals_parser.parse_args() + key_field_name = args['key'] + value_field_name = args['value'] + filterexpr = args['filter'] + + data_service = data_service_handler() + + natsort = lambda s: [int(t) if t.isdigit() else t.lower() for t in re.split(r'(\d+)', s)] + + result = data_service.index( + get_identity(), translator, dataset, None, None, filterexpr, None, [key_field_name, value_field_name] + ) + ret = [] + if 'feature_collection' in result: + entries = {} + for feature in result['feature_collection']['features']: + key = feature["id"] if key_field_name == "id" else feature['properties'][key_field_name] + value = str(feature['properties'][value_field_name]).strip() + entries[key] = value + ret = [{"key": kv[0], "value": kv[1]} for kv in entries.items()] + ret.sort(key=lambda record: natsort(record["value"])) + elif 'error' in result: + app.logger.debug(f"Failed to query relation values for {dataset}:{key_field_name}:{value_field_name}: {result['error']}") + return ret + + @api.route('//') @api.response(404, 'Dataset or feature not found or permission error') @api.param('dataset', 'Dataset ID') @@ -646,7 +695,7 @@ def get(self, dataset, id): except: continue result = data_service.index( - get_identity(), translator, table, None, crs, '[["%s", "=", "%s"]]' % (fk_field_name, id), None + get_identity(), translator, table, None, crs, '[["%s", "=", "%s"]]' % (fk_field_name, id), None, None ) ret[table] = { "fk": fk_field_name, @@ -681,6 +730,7 @@ def get(self): keyvals = args['tables'] or "" ret = {} + natsort = lambda s: [int(t) if t.isdigit() else t.lower() for t in re.split(r'(\d+)', s)] for (idx, keyval) in enumerate(keyvals.split(",")): try: table, key_field_name, value_field_name = keyval.split(":") @@ -688,13 +738,15 @@ def get(self): continue ret[table] = [] result = data_service.index( - get_identity(), translator, table, None, None, json.dumps(filterexpr[idx]) if filterexpr and len(filterexpr) > idx and filterexpr[idx] else None, None + get_identity(), translator, table, None, None, json.dumps(filterexpr[idx]) if filterexpr and len(filterexpr) > idx and filterexpr[idx] else None, None, [key_field_name, value_field_name] ) if 'feature_collection' in result: + entries = {} for feature in result['feature_collection']['features']: - record = {"key": feature["id"] if key_field_name == "id" else feature['properties'][key_field_name], "value": str(feature['properties'][value_field_name]).strip()} - ret[table].append(record) - natsort = lambda s: [int(t) if t.isdigit() else t.lower() for t in re.split(r'(\d+)', s)] + key = feature["id"] if key_field_name == "id" else feature['properties'][key_field_name] + value = str(feature['properties'][value_field_name]).strip() + entries[key] = value + ret[table] = [{"key": kv[0], "value": kv[1]} for kv in entries.items()] ret[table].sort(key=lambda record: natsort(record["value"])) elif 'error' in result: app.logger.debug(f"Failed to query relation values for {keyval}: {result['error']}") diff --git a/uv.lock b/uv.lock index 511b2dc..69c7987 100644 --- a/uv.lock +++ b/uv.lock @@ -1,5 +1,5 @@ version = 1 -revision = 2 +revision = 3 requires-python = ">=3.10" [[package]] @@ -395,7 +395,7 @@ wheels = [ [[package]] name = "qwc-data-service" -version = "2025.7.4" +version = "2025.10.4" source = { virtual = "." } dependencies = [ { name = "clamd" },