Skip to content

Commit e068554

Browse files
committed
Removing remaining database triggers
Signed-off-by: Andrea Lamparelli <[email protected]>
1 parent 241a084 commit e068554

File tree

8 files changed

+279
-74
lines changed

8 files changed

+279
-74
lines changed

horreum-backend/src/main/java/io/hyperfoil/tools/horreum/svc/DatasetServiceImpl.java

+1-1
Original file line numberDiff line numberDiff line change
@@ -169,8 +169,8 @@ WHERE ds.id IN (SELECT id FROM ids)
169169
LEFT JOIN label_values lv ON dataset.id = lv.dataset_id
170170
LEFT JOIN label ON label.id = label_id
171171
""";
172-
173172
//@formatter:on
173+
174174
@Inject
175175
EntityManager em;
176176

horreum-backend/src/main/java/io/hyperfoil/tools/horreum/svc/RunServiceImpl.java

+80-40
Original file line numberDiff line numberDiff line change
@@ -95,25 +95,34 @@ public class RunServiceImpl implements RunService {
9595

9696
//@formatter:off
9797
private static final String FIND_AUTOCOMPLETE = """
98-
SELECT * FROM (
99-
SELECT DISTINCT jsonb_object_keys(q) AS key
100-
FROM run, jsonb_path_query(run.data, ? ::jsonpath) q
101-
WHERE jsonb_typeof(q) = 'object') AS keys
102-
WHERE keys.key LIKE CONCAT(?, '%');
103-
""";
104-
protected static final String FIND_RUNS_WITH_URI = """
105-
SELECT id, testid
106-
FROM run
107-
WHERE NOT trashed
108-
AND (data->>'$schema' = ?1
109-
OR (CASE
110-
WHEN jsonb_typeof(data) = 'object' THEN ?1 IN (SELECT values.value->>'$schema' FROM jsonb_each(data) as values)
111-
WHEN jsonb_typeof(data) = 'array' THEN ?1 IN (SELECT jsonb_array_elements(data)->>'$schema')
112-
ELSE false
113-
END)
114-
OR (metadata IS NOT NULL AND ?1 IN (SELECT jsonb_array_elements(metadata)->>'$schema'))
115-
)
116-
""";
98+
SELECT * FROM (
99+
SELECT DISTINCT jsonb_object_keys(q) AS key
100+
FROM run, jsonb_path_query(run.data, ? ::jsonpath) q
101+
WHERE jsonb_typeof(q) = 'object') AS keys
102+
WHERE keys.key LIKE CONCAT(?, '%');
103+
""";
104+
private static final String FIND_RUNS_WITH_URI = """
105+
SELECT id, testid
106+
FROM run
107+
WHERE NOT trashed
108+
AND (data->>'$schema' = ?1
109+
OR (CASE
110+
WHEN jsonb_typeof(data) = 'object' THEN ?1 IN (SELECT values.value->>'$schema' FROM jsonb_each(data) as values)
111+
WHEN jsonb_typeof(data) = 'array' THEN ?1 IN (SELECT jsonb_array_elements(data)->>'$schema')
112+
ELSE false
113+
END)
114+
OR (metadata IS NOT NULL AND ?1 IN (SELECT jsonb_array_elements(metadata)->>'$schema'))
115+
)
116+
""";
117+
118+
private static final String UPDATE_DATASET_SCHEMAS = """
119+
WITH uris AS (
120+
SELECT jsonb_array_elements(ds.data)->>'$schema' AS uri FROM dataset ds WHERE ds.id = ?1
121+
), indexed as (
122+
SELECT uri, row_number() over () - 1 as index FROM uris
123+
) INSERT INTO dataset_schemas(dataset_id, uri, index, schema_id)
124+
SELECT ?1 as dataset_id, indexed.uri, indexed.index, schema.id FROM indexed JOIN schema ON schema.uri = indexed.uri;
125+
""";
117126
//@formatter:on
118127
private static final String[] CONDITION_SELECT_TERMINAL = { "==", "!=", "<>", "<", "<=", ">", ">=", " " };
119128
private static final String CHANGE_ACCESS = "UPDATE run SET owner = ?, access = ? WHERE id = ?";
@@ -188,46 +197,71 @@ void onNewOrUpdatedSchema(int schemaId) {
188197
log.errorf("Cannot process schema add/update: cannot load schema %d", schemaId);
189198
return;
190199
}
191-
processNewOrUpdatedSchema(schema);
192-
}
193-
194-
@Transactional
195-
void processNewOrUpdatedSchema(SchemaDAO schema) {
196-
// we don't have to care about races with new runs
200+
clearRunAndDatasetSchemas(schemaId);
197201
findRunsWithUri(schema.uri, (runId, testId) -> {
198202
log.debugf("Recalculate Datasets for run %d - schema %d (%s) changed", runId, schema.id, schema.uri);
199203
onNewOrUpdatedSchemaForRun(runId, schema.id);
200204
});
201205
}
202206

203207
void findRunsWithUri(String uri, BiConsumer<Integer, Integer> consumer) {
204-
ScrollableResults<RunFromUri> results = session.createNativeQuery(FIND_RUNS_WITH_URI, Tuple.class).setParameter(1, uri)
208+
try (ScrollableResults<RunFromUri> results = session.createNativeQuery(FIND_RUNS_WITH_URI, Tuple.class)
209+
.setParameter(1, uri)
205210
.setTupleTransformer((tuple, aliases) -> {
206211
RunFromUri r = new RunFromUri();
207212
r.id = (int) tuple[0];
208213
r.testId = (int) tuple[1];
209214
return r;
210215
})
211216
.setFetchSize(100)
212-
.scroll(ScrollMode.FORWARD_ONLY);
213-
while (results.next()) {
214-
RunFromUri r = results.get();
215-
consumer.accept(r.id, r.testId);
217+
.scroll(ScrollMode.FORWARD_ONLY)) {
218+
while (results.next()) {
219+
RunFromUri r = results.get();
220+
consumer.accept(r.id, r.testId);
221+
}
216222
}
217223
}
218224

225+
/**
226+
* Keep the run_schemas table up to date with the associated schemas
227+
* If `recalculate` is true, trigger the run recalculation as well.
228+
* This is not required when creating a new run as the datasets will be
229+
* created automatically by the process, the recalculation is required when updating
230+
* the Schema
231+
* @param runId id of the run
232+
* @param schemaId id of the schema
233+
*/
219234
@WithRoles(extras = Roles.HORREUM_SYSTEM)
220235
@Transactional
221236
void onNewOrUpdatedSchemaForRun(int runId, int schemaId) {
222-
em.createNativeQuery("SELECT update_run_schemas(?1)::text").setParameter(1, runId).getSingleResult();
223-
//clear validation error tables by schemaId
237+
updateRunSchemas(runId);
238+
239+
// clear validation error tables by schemaId
224240
em.createNativeQuery("DELETE FROM dataset_validationerrors WHERE schema_id = ?1")
225241
.setParameter(1, schemaId).executeUpdate();
226242
em.createNativeQuery("DELETE FROM run_validationerrors WHERE schema_id = ?1")
227243
.setParameter(1, schemaId).executeUpdate();
228244

229245
Util.registerTxSynchronization(tm, txStatus -> mediator.queueRunRecalculation(runId));
230-
// transform(runId, true);
246+
}
247+
248+
@Transactional
249+
void updateRunSchemas(int runId) {
250+
em.createNativeQuery("SELECT update_run_schemas(?1)::text").setParameter(1, runId).getSingleResult();
251+
}
252+
253+
@Transactional
254+
public void updateDatasetSchemas(int datasetId) {
255+
em.createNativeQuery(UPDATE_DATASET_SCHEMAS).setParameter(1, datasetId).executeUpdate();
256+
}
257+
258+
@Transactional
259+
void clearRunAndDatasetSchemas(int schemaId) {
260+
// clear old run and dataset schemas associations
261+
em.createNativeQuery("DELETE FROM run_schemas WHERE schemaid = ?1")
262+
.setParameter(1, schemaId).executeUpdate();
263+
em.createNativeQuery("DELETE FROM dataset_schemas WHERE schema_id = ?1")
264+
.setParameter(1, schemaId).executeUpdate();
231265
}
232266

233267
@PermitAll
@@ -336,13 +370,13 @@ public JsonNode getMetadata(int id, String schemaUri) {
336370
@Override
337371
// TODO: it would be nicer to use @FormParams but fetchival on client side doesn't support that
338372
public void updateAccess(int id, String owner, Access access) {
339-
Query query = em.createNativeQuery(CHANGE_ACCESS);
340-
query.setParameter(1, owner);
341-
query.setParameter(2, access.ordinal());
342-
query.setParameter(3, id);
343-
if (query.executeUpdate() != 1) {
373+
int updatedRecords = RunDAO.update("owner = ?1, access = ?2 WHERE id = ?3", owner, access, id);
374+
if (updatedRecords != 1) {
344375
throw ServiceException.serverError("Access change failed (missing permissions?)");
345376
}
377+
378+
// propagate the same change to all datasets belonging to the run
379+
DatasetDAO.update("owner = ?1, access = ?2 WHERE run.id = ?3", owner, access, id);
346380
}
347381

348382
@RolesAllowed(Roles.UPLOADER)
@@ -670,6 +704,7 @@ public RunPersistence addAuthenticated(RunDAO run, TestDAO test) {
670704
}
671705
log.debugf("Upload flushed, run ID %d", run.id);
672706

707+
updateRunSchemas(run.id);
673708
mediator.newRun(RunMapper.from(run));
674709
List<Integer> datasetIds = transform(run.id, false);
675710
if (mediator.testMode())
@@ -991,6 +1026,7 @@ private void trashInternal(int id, boolean trashed) {
9911026
run.trashed = false;
9921027
run.persistAndFlush();
9931028
transform(id, true);
1029+
updateRunSchemas(run.id);
9941030
} else
9951031
throw ServiceException.badRequest("Not possible to un-trash a run that's not referenced to a Test");
9961032
}
@@ -1017,7 +1053,8 @@ public void updateDescription(int id, String description) {
10171053
throw ServiceException.notFound("Run not found: " + id);
10181054
}
10191055
run.description = description;
1020-
run.persistAndFlush();
1056+
// propagate the same change to all datasets belonging to the run
1057+
DatasetDAO.update("description = ?1 WHERE run.id = ?2", description, run.id);
10211058
}
10221059

10231060
@RolesAllowed(Roles.TESTER)
@@ -1071,7 +1108,7 @@ public Map<Integer, String> updateSchema(int id, String path, String schemaUri)
10711108
.distinct()
10721109
.collect(
10731110
Collectors.toMap(
1074-
tuple -> ((Integer) tuple.get("key")).intValue(),
1111+
tuple -> (Integer) tuple.get("key"),
10751112
tuple -> ((String) tuple.get("value"))));
10761113

10771114
em.flush();
@@ -1377,6 +1414,9 @@ List<Integer> transform(int runId, boolean isRecalculation) {
13771414
*/
13781415
private Integer createDataset(DatasetDAO ds, boolean isRecalculation) {
13791416
ds.persistAndFlush();
1417+
// re-create the dataset_schemas associations
1418+
updateDatasetSchemas(ds.id);
1419+
13801420
if (isRecalculation) {
13811421
try {
13821422
Dataset.EventNew event = new Dataset.EventNew(DatasetMapper.from(ds), true);

horreum-backend/src/main/java/io/hyperfoil/tools/horreum/svc/SchemaServiceImpl.java

+6-12
Original file line numberDiff line numberDiff line change
@@ -71,8 +71,8 @@
7171
public class SchemaServiceImpl implements SchemaService {
7272
private static final Logger log = Logger.getLogger(SchemaServiceImpl.class);
7373

74-
//@formatter:off
75-
private static final String FETCH_SCHEMAS_RECURSIVE =
74+
//@formatter:off
75+
private static final String FETCH_SCHEMAS_RECURSIVE =
7676
"""
7777
WITH RECURSIVE refs(uri) AS
7878
(
@@ -86,7 +86,7 @@ SELECT substring(jsonb_path_query(schema, '$.**.\"$ref\" ? (! (@ starts with \"#
8686
FROM schema
8787
INNER JOIN refs ON schema.uri = refs.uri
8888
""";
89-
//@formatter:on
89+
//@formatter:on
9090

9191
private static final JsonSchemaFactory JSON_SCHEMA_FACTORY = new JsonSchemaFactory.Builder()
9292
.defaultMetaSchemaIri(JsonMetaSchema.getV4().getIri())
@@ -160,13 +160,6 @@ public Integer add(Schema schemaDTO) {
160160
em.flush();
161161
if (!Objects.equals(schema.uri, existing.uri) ||
162162
Objects.equals(schema.schema, existing.schema)) {
163-
//We need to delete from run_schemas and dataset_schemas as they will be recreated
164-
//when we create new datasets psql will still create new entries in dataset_schemas
165-
// https://github.com/Hyperfoil/Horreum/blob/master/horreum-backend/src/main/resources/db/changeLog.xml#L2522
166-
em.createNativeQuery("DELETE FROM run_schemas WHERE schemaid = ?1")
167-
.setParameter(1, schema.id).executeUpdate();
168-
em.createNativeQuery("DELETE FROM dataset_schemas WHERE schema_id = ?1")
169-
.setParameter(1, schema.id).executeUpdate();
170163
newOrUpdatedSchema(schema);
171164
}
172165
} else {
@@ -710,7 +703,7 @@ public Integer addOrUpdateLabel(int schemaId, Label labelDTO) {
710703
}
711704
existing.name = label.name;
712705

713-
//When we clear extractors we should also delete label_values
706+
// when we clear extractors we should also delete label_values
714707
em.createNativeQuery(
715708
"DELETE FROM dataset_view WHERE dataset_id IN (SELECT dataset_id FROM label_values WHERE label_id = ?1)")
716709
.setParameter(1, existing.id).executeUpdate();
@@ -865,7 +858,7 @@ public void importSchema(ObjectNode node) {
865858
}
866859

867860
boolean newSchema = true;
868-
SchemaDAO schema = null;
861+
SchemaDAO schema;
869862
if (importSchema.id != null) {
870863
//first check if this schema exists
871864
schema = SchemaDAO.findById(importSchema.id);
@@ -917,6 +910,7 @@ public void importSchema(ObjectNode node) {
917910
//let's wrap flush in a try/catch, if we get any role issues at commit we can give a sane msg
918911
try {
919912
em.flush();
913+
newOrUpdatedSchema(schema);
920914
} catch (Exception e) {
921915
throw ServiceException.serverError("Failed to persist Schema: " + e.getMessage());
922916
}

horreum-backend/src/main/resources/db/changeLog.xml

+44
Original file line numberDiff line numberDiff line change
@@ -4796,4 +4796,48 @@
47964796
$$ LANGUAGE plpgsql;
47974797
</sql>
47984798
</changeSet>
4799+
<changeSet id="128" author="lampajr">
4800+
<validCheckSum>ANY</validCheckSum>
4801+
<sql>
4802+
-- drop triggers
4803+
DROP TRIGGER IF EXISTS rs_after_run_untrash ON run;
4804+
DROP TRIGGER IF EXISTS rs_after_run_update ON run;
4805+
DROP TRIGGER IF EXISTS before_schema_update ON schema;
4806+
DROP TRIGGER IF EXISTS ds_after_insert ON dataset;
4807+
4808+
-- drop functions
4809+
DROP FUNCTION rs_after_run_update;
4810+
DROP FUNCTION before_schema_update_func;
4811+
DROP FUNCTION ds_after_dataset_insert_func;
4812+
</sql>
4813+
</changeSet>
4814+
<changeSet id="129" author="lampajr">
4815+
<validCheckSum>ANY</validCheckSum>
4816+
<sql>
4817+
-- drop triggers
4818+
DROP TRIGGER IF EXISTS after_run_update_non_data ON run;
4819+
DROP TRIGGER IF EXISTS delete_run_validations ON run;
4820+
4821+
-- drop functions
4822+
DROP FUNCTION after_run_update_non_data_func;
4823+
DROP FUNCTION delete_run_validations;
4824+
</sql>
4825+
</changeSet>
4826+
<changeSet id="130" author="lampajr">
4827+
<validCheckSum>ANY</validCheckSum>
4828+
<sql>
4829+
-- drop triggers
4830+
DROP TRIGGER IF EXISTS lv_before_update ON label;
4831+
DROP TRIGGER IF EXISTS lv_after_update ON label;
4832+
DROP TRIGGER IF EXISTS recalc_labels ON label_recalc_queue;
4833+
4834+
-- drop functions
4835+
DROP FUNCTION lv_before_label_update_func;
4836+
DROP FUNCTION lv_after_label_update_func;
4837+
DROP FUNCTION recalc_label_values;
4838+
4839+
-- drop table as no longer used
4840+
DROP TABLE label_recalc_queue;
4841+
</sql>
4842+
</changeSet>
47994843
</databaseChangeLog>

horreum-backend/src/test/java/io/hyperfoil/tools/horreum/svc/AlertingServiceTest.java

+1-1
Original file line numberDiff line numberDiff line change
@@ -404,7 +404,7 @@ public void testMissingRules(TestInfo info) throws InterruptedException {
404404
em.clear();
405405

406406
pollMissingDataRuleResultsByDataset(thirdEvent.datasetId, 1);
407-
trashRun(thirdRunId, test.id);
407+
trashRun(thirdRunId, test.id, true);
408408
pollMissingDataRuleResultsByDataset(thirdEvent.datasetId, 0);
409409

410410
alertingService.checkMissingDataset();

horreum-backend/src/test/java/io/hyperfoil/tools/horreum/svc/BaseServiceTest.java

+5-13
Original file line numberDiff line numberDiff line change
@@ -237,16 +237,6 @@ public static Test createExampleTest(String testName, Integer datastoreID) {
237237
return test;
238238
}
239239

240-
public static List<View> createExampleViews(int testId) {
241-
View defaultView = new View();
242-
defaultView.name = "Default";
243-
defaultView.testId = testId;
244-
defaultView.components = new ArrayList<>();
245-
defaultView.components.add(new io.hyperfoil.tools.horreum.api.data.ViewComponent("Some column", null, "foo"));
246-
247-
return Collections.singletonList(defaultView);
248-
}
249-
250240
public static String getAccessToken(String userName, String... groups) {
251241
return Jwt.preferredUserName(userName)
252242
.groups(new HashSet<>(Arrays.asList(groups)))
@@ -616,10 +606,12 @@ protected ArrayNode jsonArray(String... items) {
616606
return array;
617607
}
618608

619-
protected BlockingQueue<Integer> trashRun(int runId, Integer testId) throws InterruptedException {
609+
protected BlockingQueue<Integer> trashRun(int runId, Integer testId, boolean trashed) throws InterruptedException {
620610
BlockingQueue<Integer> trashedQueue = serviceMediator.getEventQueue(AsyncEventChannels.RUN_TRASHED, testId);
621-
jsonRequest().post("/api/run/" + runId + "/trash").then().statusCode(204);
622-
assertEquals(runId, trashedQueue.poll(10, TimeUnit.SECONDS));
611+
jsonRequest().post("/api/run/" + runId + "/trash?isTrashed=" + trashed).then().statusCode(204);
612+
if (trashed) {
613+
assertEquals(runId, trashedQueue.poll(10, TimeUnit.SECONDS));
614+
}
623615
return trashedQueue;
624616
}
625617

0 commit comments

Comments
 (0)