Skip to content

Commit aecab71

Browse files
authored
Merge pull request #68 from djarecka/id_check
adding id check to the validation
2 parents 889fafe + 1a8304f commit aecab71

File tree

5 files changed

+13
-16
lines changed

5 files changed

+13
-16
lines changed

.pre-commit-config.yaml

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -26,7 +26,7 @@ repos:
2626
rev: v2.3.0
2727
hooks:
2828
- id: codespell
29-
args: [--toml, pyproject.toml]
29+
args: [--toml, pyproject.toml, --skip="CHANGELOG.md"]
3030
additional_dependencies: [tomli]
3131

3232
# Format TOML files

reproschema/jsonldutils.py

Lines changed: 8 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -99,13 +99,15 @@ def load_file(
9999
return data
100100

101101

102-
def validate_data(data):
102+
def validate_data(data, schemaname):
103103
"""Validate an expanded jsonld document against the pydantic model.
104104
105105
Parameters
106106
----------
107107
data : dict
108108
Python dictionary containing JSONLD object
109+
schemaname : str
110+
Name of the schema (name of the file) being validated
109111
110112
Returns
111113
-------
@@ -115,14 +117,16 @@ def validate_data(data):
115117
Validation errors if any returned by pydantic
116118
117119
"""
118-
# do we need it?
119-
# kwargs = {"algorithm": "URDNA2015", "format": "application/n-quads"}
120-
# normalized = jsonld.normalize(data, kwargs)
121120
obj_type = identify_model_class(data["@type"][0])
122121
data_fixed = [fixing_old_schema(data, copy_data=True)]
123122
context = read_contextfile(CONTEXTFILE_URL)
124123
data_fixed_comp = jsonld.compact(data_fixed, context)
125124
del data_fixed_comp["@context"]
125+
if obj_type.__name__ in ["Item", "Activity", "Protocol"]:
126+
if data_fixed_comp["id"].split("/")[-1] != schemaname:
127+
raise Exception(
128+
f"Document {data['@id']} does not match the schema name {schemaname}"
129+
)
126130
conforms = False
127131
v_text = ""
128132
try:

reproschema/tests/data/activities/activity1_embed.jsonld

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,7 +1,7 @@
11
{
22
"@context": "../../contexts/generic",
33
"@type": "reproschema:Activity",
4-
"@id": "activity1.jsonld",
4+
"@id": "activity1_embed.jsonld",
55
"prefLabel": "Example 1",
66
"description": "Activity example 1",
77
"schemaVersion": "1.0.0-rc4",

reproschema/tests/data/protocols/protocol1_embed.jsonld

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,7 +1,7 @@
11
{
22
"@context": "../../contexts/generic",
33
"@type": "reproschema:Protocol",
4-
"@id": "protocol1.jsonld",
4+
"@id": "protocol1_embed.jsonld",
55
"prefLabel": {
66
"en": "Protocol1",
77
"es": "Protocol1_es"

reproschema/validate.py

Lines changed: 2 additions & 9 deletions
Original file line numberDiff line numberDiff line change
@@ -92,7 +92,7 @@ def validate_dir(
9292
if stop is not None:
9393
stop_server(stop)
9494
raise ValueError(f"Empty data graph in file {name}")
95-
conforms, vtext = validate_data(data)
95+
conforms, vtext = validate_data(data, schemaname=Path(name).name)
9696
except (ValueError, json.JSONDecodeError):
9797
if stop is not None:
9898
stop_server(stop)
@@ -133,27 +133,20 @@ def validate(path):
133133
134134
"""
135135
if Path(path).is_dir():
136-
137136
lgr.info(f"Validating directory {path}")
138-
139137
stop, port = start_server()
140138
http_kwargs = {"port": port}
141139
started = True
142-
143140
conforms, _ = validate_dir(
144141
path, started=started, http_kwargs=http_kwargs, stop=stop
145142
)
146-
147143
stop_server(stop)
148-
149144
else:
150-
151145
if Path(path).name in FILES_TO_SKIP:
152146
lgr.info(f"Skipping file {path}")
153147
return True
154-
155148
data = load_file(path, started=False)
156-
conforms, vtext = validate_data(data)
149+
conforms, vtext = validate_data(data, schemaname=Path(path).name)
157150
if not conforms:
158151
lgr.critical(f"File {path} has validation errors.")
159152
raise ValueError(vtext)

0 commit comments

Comments
 (0)