diff --git a/client/.eslintrc.js b/client/.eslintrc.js
index d496b9f13b74..af760fcaeb53 100644
--- a/client/.eslintrc.js
+++ b/client/.eslintrc.js
@@ -91,6 +91,7 @@ module.exports = {
rules: {
...baseRules,
"@typescript-eslint/no-throw-literal": "error",
+ "@typescript-eslint/ban-ts-comment": "warn",
},
parser: "@typescript-eslint/parser",
parserOptions: {
diff --git a/client/src/components/Common/models/exportRecordModel.ts b/client/src/components/Common/models/exportRecordModel.ts
index 6c9f6c3f0e3f..578072022980 100644
--- a/client/src/components/Common/models/exportRecordModel.ts
+++ b/client/src/components/Common/models/exportRecordModel.ts
@@ -6,26 +6,58 @@ type ExportObjectRequestMetadata = components["schemas"]["ExportObjectRequestMet
export type StoreExportPayload = components["schemas"]["StoreExportPayload"];
export type ObjectExportTaskResponse = components["schemas"]["ObjectExportTaskResponse"];
-export class ExportParamsModel {
+export interface ExportParams {
+ readonly modelStoreFormat: string;
+ readonly includeFiles: boolean;
+ readonly includeDeleted: boolean;
+ readonly includeHidden: boolean;
+}
+
+export interface ExportRecord {
+ readonly id: string;
+ readonly isReady: boolean;
+ readonly isPreparing: boolean;
+ readonly isUpToDate: boolean;
+ readonly hasFailed: boolean;
+ readonly date: Date;
+ readonly elapsedTime: string;
+ readonly taskUUID: string;
+ readonly importUri?: string;
+ readonly canReimport: boolean;
+ readonly stsDownloadId?: string;
+ readonly isStsDownload: boolean;
+ readonly canDownload: boolean;
+ readonly modelStoreFormat: string;
+ readonly exportParams?: ExportParams;
+ readonly duration?: number;
+ readonly canExpire: boolean;
+ readonly isPermanent: boolean;
+ readonly expirationDate?: Date;
+ readonly expirationElapsedTime?: string;
+ readonly hasExpired: boolean;
+ readonly errorMessage?: string;
+}
+
+export class ExportParamsModel implements ExportParams {
private _params: StoreExportPayload;
constructor(data: StoreExportPayload = {}) {
this._params = data;
}
get modelStoreFormat() {
- return this._params?.model_store_format;
+ return this._params?.model_store_format ?? "tgz";
}
get includeFiles() {
- return this._params?.include_files;
+ return Boolean(this._params?.include_files);
}
get includeDeleted() {
- return this._params?.include_deleted;
+ return Boolean(this._params?.include_deleted);
}
get includeHidden() {
- return this._params?.include_hidden;
+ return Boolean(this._params?.include_hidden);
}
public equals(otherExportParams?: ExportParamsModel) {
@@ -41,9 +73,9 @@ export class ExportParamsModel {
}
}
-export class ExportRecordModel {
+export class ExportRecordModel implements ExportRecord {
private _data: ObjectExportTaskResponse;
- private _expirationDate?: Date | null;
+ private _expirationDate?: Date;
private _requestMetadata?: ExportObjectRequestMetadata;
private _exportParameters?: ExportParamsModel;
@@ -56,6 +88,10 @@ export class ExportRecordModel {
: undefined;
}
+ get id() {
+ return this._data.id;
+ }
+
get isReady() {
return (this._data.ready && !this.hasExpired) ?? false;
}
@@ -109,7 +145,7 @@ export class ExportRecordModel {
}
get modelStoreFormat() {
- return this.exportParams?.modelStoreFormat;
+ return this.exportParams?.modelStoreFormat ?? "tgz";
}
get exportParams() {
@@ -125,9 +161,13 @@ export class ExportRecordModel {
return this.isStsDownload && Boolean(this.duration);
}
+ get isPermanent() {
+ return !this.canExpire;
+ }
+
get expirationDate() {
if (this._expirationDate === undefined) {
- this._expirationDate = this.duration ? new Date(this.date.getTime() + this.duration * 1000) : null;
+ this._expirationDate = this.duration ? new Date(this.date.getTime() + this.duration * 1000) : undefined;
}
return this._expirationDate;
}
@@ -135,11 +175,11 @@ export class ExportRecordModel {
get expirationElapsedTime() {
return this.canExpire && this.expirationDate
? formatDistanceToNow(this.expirationDate, { addSuffix: true })
- : null;
+ : undefined;
}
get hasExpired() {
- return this.canExpire && this.expirationDate && Date.now() > this.expirationDate.getTime();
+ return Boolean(this.canExpire && this.expirationDate && Date.now() > this.expirationDate.getTime());
}
get errorMessage() {
diff --git a/client/src/components/Common/models/testData/exportData.ts b/client/src/components/Common/models/testData/exportData.ts
index 419af7290d36..8253bff39751 100644
--- a/client/src/components/Common/models/testData/exportData.ts
+++ b/client/src/components/Common/models/testData/exportData.ts
@@ -91,10 +91,10 @@ export const FAILED_DOWNLOAD_RESPONSE: ObjectExportTaskResponse = {
};
export const FILE_SOURCE_STORE_RESPONSE: ObjectExportTaskResponse = {
- id: "FAKE_RECENT_DOWNLOAD_ID",
+ id: "FAKE_FILE_SOURCE_EXPORT_ID",
ready: true,
preparing: false,
- up_to_date: true,
+ up_to_date: false,
task_uuid: "35563335-e275-4520-80e8-885793279095",
create_time: RECENT_EXPORT_DATE,
export_metadata: {
@@ -103,6 +103,37 @@ export const FILE_SOURCE_STORE_RESPONSE: ObjectExportTaskResponse = {
},
};
+export const RECENT_FILE_SOURCE_STORE_RESPONSE: ObjectExportTaskResponse = {
+ ...FILE_SOURCE_STORE_RESPONSE,
+ id: "FAKE_RECENT_FILE_SOURCE_EXPORT_ID",
+ up_to_date: true,
+};
+
+export const FAILED_FILE_SOURCE_STORE_RESPONSE: ObjectExportTaskResponse = {
+ ...FILE_SOURCE_STORE_RESPONSE,
+ id: "FAKE_FAILED_FILE_SOURCE_EXPORT_ID",
+ export_metadata: {
+ request_data: FAKE_FILE_SOURCE_REQUEST_DATA,
+ result_data: FAILED_EXPORT_RESULT_DATA,
+ },
+};
+
+export const IN_PROGRESS_FILE_SOURCE_STORE_RESPONSE: ObjectExportTaskResponse = {
+ ...FILE_SOURCE_STORE_RESPONSE,
+ id: "FAKE_IN_PROGRESS_FILE_SOURCE_EXPORT_ID",
+ ready: false,
+ preparing: true,
+ export_metadata: {
+ request_data: FAKE_FILE_SOURCE_REQUEST_DATA,
+ result_data: undefined,
+ },
+};
+
export const EXPIRED_STS_DOWNLOAD_RECORD = new ExportRecordModel(EXPIRED_STS_DOWNLOAD_RESPONSE);
-export const FILE_SOURCE_STORE_RECORD = new ExportRecordModel(FILE_SOURCE_STORE_RESPONSE);
export const RECENT_STS_DOWNLOAD_RECORD = new ExportRecordModel(RECENT_STS_DOWNLOAD_RESPONSE);
+export const FAILED_DOWNLOAD_RECORD = new ExportRecordModel(FAILED_DOWNLOAD_RESPONSE);
+
+export const FILE_SOURCE_STORE_RECORD = new ExportRecordModel(FILE_SOURCE_STORE_RESPONSE);
+export const RECENT_FILE_SOURCE_STORE_RECORD = new ExportRecordModel(RECENT_FILE_SOURCE_STORE_RESPONSE);
+export const FAILED_FILE_SOURCE_STORE_RECORD = new ExportRecordModel(FAILED_FILE_SOURCE_STORE_RESPONSE);
+export const IN_PROGRESS_FILE_SOURCE_STORE_RECORD = new ExportRecordModel(IN_PROGRESS_FILE_SOURCE_STORE_RESPONSE);
diff --git a/client/src/components/History/Archiving/ExportRecordCard.vue b/client/src/components/History/Archiving/ExportRecordCard.vue
new file mode 100644
index 000000000000..d4efe297f3e0
--- /dev/null
+++ b/client/src/components/History/Archiving/ExportRecordCard.vue
@@ -0,0 +1,24 @@
+
+
+
+
+
+ Exported {{ props.exportRecord.elapsedTime }} on {{ props.exportRecord.date }}
+
+
+ Contains datasets:
+
+
+
+
+ Stored in: {{ props.exportRecord.importUri }}
+
+
diff --git a/client/src/components/History/Archiving/HistoryArchive.vue b/client/src/components/History/Archiving/HistoryArchive.vue
new file mode 100644
index 000000000000..78ff133caa79
--- /dev/null
+++ b/client/src/components/History/Archiving/HistoryArchive.vue
@@ -0,0 +1,245 @@
+
+
+
+ Archived Histories
+
+
+
+
+
+
+ There are no archived histories matching your current filter: {{ searchText }}
+
+
+ You do not have any archived histories. You can select the 'Archive History' option from the history
+ menu to archive a history.
+
+
+
+
+
+ {{ history.name }}
+
+
+
+
+ {{ localize("Published") }}
+
+
+ {{ history.count }} {{ localize("items") }}
+
+
+ {{ localize("Snapshot available") }}
+
+
+
+
+
+
+
+
+
+ onViewHistoryInCenterPanel(history)">
+
+ View
+
+ onRestoreHistory(history)">
+
+ Unarchive
+
+
+ onImportCopy(history)">
+
+ Import Copy
+
+
+
+
+ {{ history.annotation }}
+
+
+
+
+
+
+
+
+
+
diff --git a/client/src/components/History/Archiving/HistoryArchiveExportSelector.test.ts b/client/src/components/History/Archiving/HistoryArchiveExportSelector.test.ts
new file mode 100644
index 000000000000..7bcb299b2c87
--- /dev/null
+++ b/client/src/components/History/Archiving/HistoryArchiveExportSelector.test.ts
@@ -0,0 +1,156 @@
+import { shallowMount } from "@vue/test-utils";
+import { getLocalVue } from "tests/jest/helpers";
+import axios from "axios";
+import MockAdapter from "axios-mock-adapter";
+import flushPromises from "flush-promises";
+import { BFormCheckbox } from "bootstrap-vue";
+import HistoryArchiveExportSelector from "./HistoryArchiveExportSelector.vue";
+import type { HistorySummary } from "@/stores/historyStore";
+import { mockFetcher } from "@/schema/__mocks__";
+import {
+ FAILED_FILE_SOURCE_STORE_RESPONSE,
+ FILE_SOURCE_STORE_RESPONSE,
+ IN_PROGRESS_FILE_SOURCE_STORE_RESPONSE,
+ RECENT_FILE_SOURCE_STORE_RESPONSE,
+ RECENT_STS_DOWNLOAD_RESPONSE,
+} from "@/components/Common/models/testData/exportData";
+
+jest.mock("@/schema");
+
+const localVue = getLocalVue(true);
+
+const TEST_HISTORY_ID = "test-history-id";
+const TEST_HISTORY = {
+ id: TEST_HISTORY_ID,
+ name: "fake-history-name",
+ archived: false,
+};
+
+const GET_EXPORTS_API_ENDPOINT = "/api/histories/{history_id}/exports";
+
+const EXPORT_RECORD_BTN = "#create-export-record-btn";
+const ARCHIVE_HISTORY_BTN = "#archive-history-btn";
+const CONFIRM_DELETE_CHECKBOX = "[type='checkbox']";
+
+async function mountComponentWithHistory(history: HistorySummary) {
+ const wrapper = shallowMount(HistoryArchiveExportSelector, {
+ propsData: { history },
+ localVue,
+ stubs: {
+ // Stub with the real component to be able to use setChecked
+ BFormCheckbox,
+ },
+ });
+ await flushPromises();
+ return wrapper;
+}
+
+describe("HistoryArchiveExportSelector.vue", () => {
+ let axiosMock: MockAdapter;
+
+ beforeEach(async () => {
+ axiosMock = new MockAdapter(axios);
+ });
+
+ afterEach(() => {
+ axiosMock.restore();
+ });
+
+ it("should display a button to create an export record if there is no up to date export record", async () => {
+ mockFetcher.path(GET_EXPORTS_API_ENDPOINT).method("get").mock({ data: [] });
+
+ const wrapper = await mountComponentWithHistory(TEST_HISTORY as HistorySummary);
+
+ const createExportButton = wrapper.find(EXPORT_RECORD_BTN);
+ expect(createExportButton.exists()).toBe(true);
+ });
+
+ it("should display a button to create an export record if the most recent export record is not permanent", async () => {
+ mockFetcher
+ .path(GET_EXPORTS_API_ENDPOINT)
+ .method("get")
+ .mock({ data: [RECENT_STS_DOWNLOAD_RESPONSE] });
+
+ const wrapper = await mountComponentWithHistory(TEST_HISTORY as HistorySummary);
+
+ const createExportButton = wrapper.find(EXPORT_RECORD_BTN);
+ expect(createExportButton.exists()).toBe(true);
+ });
+
+ it("should display a button to create an export record if there are permanent export records but none are up to date", async () => {
+ mockFetcher
+ .path(GET_EXPORTS_API_ENDPOINT)
+ .method("get")
+ .mock({ data: [FILE_SOURCE_STORE_RESPONSE, FAILED_FILE_SOURCE_STORE_RESPONSE] });
+
+ const wrapper = await mountComponentWithHistory(TEST_HISTORY as HistorySummary);
+
+ const createExportButton = wrapper.find(EXPORT_RECORD_BTN);
+ expect(createExportButton.exists()).toBe(true);
+ });
+
+ it("should not display a button to create an export record if there is an up to date export record", async () => {
+ mockFetcher
+ .path(GET_EXPORTS_API_ENDPOINT)
+ .method("get")
+ .mock({ data: [RECENT_FILE_SOURCE_STORE_RESPONSE] });
+
+ const wrapper = await mountComponentWithHistory(TEST_HISTORY as HistorySummary);
+
+ const createExportButton = wrapper.find(EXPORT_RECORD_BTN);
+ expect(createExportButton.exists()).toBe(false);
+ });
+
+ it("should not display a button to create an export record if a record is being created", async () => {
+ mockFetcher
+ .path(GET_EXPORTS_API_ENDPOINT)
+ .method("get")
+ .mock({ data: [IN_PROGRESS_FILE_SOURCE_STORE_RESPONSE] });
+
+ const wrapper = await mountComponentWithHistory(TEST_HISTORY as HistorySummary);
+
+ const createExportButton = wrapper.find(EXPORT_RECORD_BTN);
+ expect(createExportButton.exists()).toBe(false);
+ });
+
+ it("should disable the Archive button if there is no up to date export record", async () => {
+ mockFetcher.path(GET_EXPORTS_API_ENDPOINT).method("get").mock({ data: [] });
+
+ const wrapper = await mountComponentWithHistory(TEST_HISTORY as HistorySummary);
+
+ const archiveButton = wrapper.find(ARCHIVE_HISTORY_BTN);
+ expect(archiveButton.attributes("disabled")).toBeTruthy();
+ });
+
+ it("should disable the Archive button if the confirm delete checkbox is not checked", async () => {
+ mockFetcher
+ .path(GET_EXPORTS_API_ENDPOINT)
+ .method("get")
+ .mock({ data: [RECENT_FILE_SOURCE_STORE_RESPONSE] });
+
+ const wrapper = await mountComponentWithHistory(TEST_HISTORY as HistorySummary);
+
+ const confirmDeleteCheckbox = wrapper.find(CONFIRM_DELETE_CHECKBOX);
+ await confirmDeleteCheckbox.setChecked(false);
+ expect((confirmDeleteCheckbox.element as HTMLInputElement).checked).toBeFalsy();
+
+ const archiveButton = wrapper.find(ARCHIVE_HISTORY_BTN);
+ expect(archiveButton.attributes("disabled")).toBeTruthy();
+ });
+
+ it("should enable the Archive button if there is an up to date export record and the confirm delete checkbox is checked", async () => {
+ mockFetcher
+ .path(GET_EXPORTS_API_ENDPOINT)
+ .method("get")
+ .mock({ data: [RECENT_FILE_SOURCE_STORE_RESPONSE] });
+
+ const wrapper = await mountComponentWithHistory(TEST_HISTORY as HistorySummary);
+
+ const confirmDeleteCheckbox = wrapper.find(CONFIRM_DELETE_CHECKBOX);
+ await confirmDeleteCheckbox.setChecked(true);
+ expect((confirmDeleteCheckbox.element as HTMLInputElement).checked).toBeTruthy();
+
+ const archiveButton = wrapper.find(ARCHIVE_HISTORY_BTN);
+ expect(archiveButton.attributes("disabled")).toBeFalsy();
+ });
+});
diff --git a/client/src/components/History/Archiving/HistoryArchiveExportSelector.vue b/client/src/components/History/Archiving/HistoryArchiveExportSelector.vue
new file mode 100644
index 000000000000..dc39daafcd3a
--- /dev/null
+++ b/client/src/components/History/Archiving/HistoryArchiveExportSelector.vue
@@ -0,0 +1,200 @@
+
+
+
+
+
+ If you are not planning to use this history in the near future, you can
+ archive and delete its contents to free up disk space.
+
+
+ To be able to recreate your archived history later, you need to export it first to a permanent remote
+ location. Then you will be able to import it back to Galaxy from the remote source, as a new copy.
+
+
+
+
+
+
+ Something went wrong
+
+ {{ exportErrorMessage }}
+
+
+
+
+ There is an up-to-date export record of this history ready.
+
+
+ This export record will be associated with your archived history so you can recreate it later by
+ importing it.
+
+
+
+
+
+
+
+
+
+
+
+ There is no up-to-date export record of this history. You need to create a new export record to
+ be able to recreate the history later.
+
+
+ Use the button below to create a new export record before archiving the history.
+
+ Create export record
+
+
+
+
+ To continue, you need to confirm that you want to delete the contents of the original history before you can
+ archive it using the checkbox below. If you created an export record above, you will be able to recreate the
+ history later by importing it from the export record.
+
+
+ I am aware that the contents of the original history will be permanently deleted.
+
+
+ Remember that you cannot undo this action. Once you archive and delete the history, you can only recover it
+ by importing it as a new copy from the export record.
+
+
+ Archive (and purge) history
+
+
+
+
+
+
+
diff --git a/client/src/components/History/Archiving/HistoryArchiveSimple.vue b/client/src/components/History/Archiving/HistoryArchiveSimple.vue
new file mode 100644
index 000000000000..6d07ff0b30da
--- /dev/null
+++ b/client/src/components/History/Archiving/HistoryArchiveSimple.vue
@@ -0,0 +1,30 @@
+
+
+
+
+
+ If you want to remove the history from your active histories but keep it around for reference, you
+ can move it to the Archived Histories section, by clicking the button below.
+
+
+ This is particularly useful if you have a lot of histories and want to keep the list of
+ active histories short or if you publish a history and want to avoid accidental changes to it.
+
+
+ You can undo this action at any time, and the history will be moved back to your
+ active histories.
+
+
+
+ Archive history
+
+
+
diff --git a/client/src/components/History/Archiving/HistoryArchiveWizard.test.ts b/client/src/components/History/Archiving/HistoryArchiveWizard.test.ts
new file mode 100644
index 000000000000..54f74cb779b8
--- /dev/null
+++ b/client/src/components/History/Archiving/HistoryArchiveWizard.test.ts
@@ -0,0 +1,114 @@
+import { shallowMount } from "@vue/test-utils";
+import { getLocalVue } from "tests/jest/helpers";
+import axios from "axios";
+import MockAdapter from "axios-mock-adapter";
+import flushPromises from "flush-promises";
+import { createTestingPinia } from "@pinia/testing";
+import { setActivePinia } from "pinia";
+import { useHistoryStore, type HistorySummary } from "@/stores/historyStore";
+import HistoryArchiveWizard from "./HistoryArchiveWizard.vue";
+
+jest.mock("@/composables/config", () => ({
+ useConfig: jest.fn(() => ({
+ config: {
+ value: {
+ enable_celery_tasks: true,
+ },
+ },
+ })),
+}));
+
+const localVue = getLocalVue(true);
+
+const TEST_HISTORY_ID = "test-history-id";
+const TEST_HISTORY = {
+ id: TEST_HISTORY_ID,
+ name: "fake-history-name",
+ archived: false,
+};
+
+const ARCHIVED_TEST_HISTORY = {
+ ...TEST_HISTORY,
+ archived: true,
+};
+
+const REMOTE_FILES_API_ENDPOINT = new RegExp("/api/remote_files/plugins");
+
+async function mountComponentWithHistory(history?: HistorySummary) {
+ const pinia = createTestingPinia();
+ setActivePinia(pinia);
+ const historyStore = useHistoryStore(pinia);
+
+ // the mocking method described in the pinia docs does not work in vue2
+ // this is a work-around
+ jest.spyOn(historyStore, "getHistoryById").mockImplementation((_history_id: string) => history as HistorySummary);
+
+ const wrapper = shallowMount(HistoryArchiveWizard, {
+ propsData: { historyId: TEST_HISTORY_ID },
+ localVue,
+ });
+ await flushPromises();
+ return wrapper;
+}
+
+describe("HistoryArchiveWizard.vue", () => {
+ let axiosMock: MockAdapter;
+
+ beforeEach(async () => {
+ axiosMock = new MockAdapter(axios);
+ axiosMock.onGet(REMOTE_FILES_API_ENDPOINT).reply(200, []);
+ });
+
+ afterEach(() => {
+ axiosMock.restore();
+ });
+
+ it("should render the history name in the header", async () => {
+ const wrapper = await mountComponentWithHistory(TEST_HISTORY as HistorySummary);
+
+ const header = wrapper.find("h1");
+ expect(header.text()).toContain(TEST_HISTORY.name);
+ });
+
+ it("should render only the simple archival mode when no writeable file sources are available", async () => {
+ const wrapper = await mountComponentWithHistory(TEST_HISTORY as HistorySummary);
+
+ const optionTabs = wrapper.findAll(".archival-option-tabs");
+ expect(optionTabs.exists()).toBe(false);
+ });
+
+ it("should render both archival modes when writeable file sources and celery tasks are available", async () => {
+ axiosMock.onGet(REMOTE_FILES_API_ENDPOINT).reply(200, [
+ {
+ id: "test-posix-source",
+ type: "posix",
+ uri_root: "gxfiles://test-posix-source",
+ label: "TestSource",
+ doc: "For testing",
+ writable: true,
+ requires_roles: undefined,
+ requires_groups: undefined,
+ },
+ ]);
+ const wrapper = await mountComponentWithHistory(TEST_HISTORY as HistorySummary);
+
+ const optionTabs = wrapper.findAll(".archival-option-tabs");
+ expect(optionTabs.exists()).toBe(true);
+
+ const keepStorageOption = wrapper.find("#keep-storage-tab");
+ expect(keepStorageOption.exists()).toBe(true);
+
+ const freeStorageOption = wrapper.find("#free-storage-tab");
+ expect(freeStorageOption.exists()).toBe(true);
+ });
+
+ it("should display a success alert when the history is archived instead of the archival options", async () => {
+ const wrapper = await mountComponentWithHistory(ARCHIVED_TEST_HISTORY as HistorySummary);
+
+ const optionTabs = wrapper.findAll(".archival-option-tabs");
+ expect(optionTabs.exists()).toBe(false);
+
+ const successMessage = wrapper.find("#history-archived-alert");
+ expect(successMessage.exists()).toBe(true);
+ });
+});
diff --git a/client/src/components/History/Archiving/HistoryArchiveWizard.vue b/client/src/components/History/Archiving/HistoryArchiveWizard.vue
new file mode 100644
index 000000000000..3f7b2f905028
--- /dev/null
+++ b/client/src/components/History/Archiving/HistoryArchiveWizard.vue
@@ -0,0 +1,102 @@
+
+
+
+
+
+
+ Archive
+
+ {{ history.name }}
+
+
+
+ This history has been archived. You can access it from the
+ Archived Histories section.
+
+
+
+ Archiving a history will remove it from your active histories. You can still access it from the
+ Archived Histories section.
+
+
+
+
How do you want to archive this history?
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
diff --git a/client/src/components/History/Archiving/IncludedBadge.vue b/client/src/components/History/Archiving/IncludedBadge.vue
new file mode 100644
index 000000000000..118f40d4ef3c
--- /dev/null
+++ b/client/src/components/History/Archiving/IncludedBadge.vue
@@ -0,0 +1,28 @@
+
+
+
+
+
+
+ {{ props.itemName }}
+
+
diff --git a/client/src/components/History/CurrentHistory/HistoryEmpty.vue b/client/src/components/History/CurrentHistory/HistoryEmpty.vue
index dbd74b8ef7a5..addac9563877 100644
--- a/client/src/components/History/CurrentHistory/HistoryEmpty.vue
+++ b/client/src/components/History/CurrentHistory/HistoryEmpty.vue
@@ -4,7 +4,7 @@
{{ message | l }}
-
+
You can load your own data
or
get data from an external source.
@@ -18,6 +18,7 @@ import { useGlobalUploadModal } from "composables/globalUploadModal";
export default {
props: {
message: { type: String, default: "This history is empty." },
+ writable: { type: Boolean, default: true },
},
setup() {
const { openGlobalUploadModal } = useGlobalUploadModal();
diff --git a/client/src/components/History/CurrentHistory/HistoryNavigation.test.js b/client/src/components/History/CurrentHistory/HistoryNavigation.test.js
index 798e7e55b52a..ffa76b2db7b3 100644
--- a/client/src/components/History/CurrentHistory/HistoryNavigation.test.js
+++ b/client/src/components/History/CurrentHistory/HistoryNavigation.test.js
@@ -14,6 +14,7 @@ const expectedOptions = [
"Delete this History",
"Export Tool Citations",
"Export History to File",
+ "Archive History",
"Extract Workflow",
"Show Invocations",
"Share or Publish",
diff --git a/client/src/components/History/CurrentHistory/HistoryNavigation.vue b/client/src/components/History/CurrentHistory/HistoryNavigation.vue
index 28d7d0ed8a73..f6e97949f9c5 100644
--- a/client/src/components/History/CurrentHistory/HistoryNavigation.vue
+++ b/client/src/components/History/CurrentHistory/HistoryNavigation.vue
@@ -96,6 +96,15 @@
Export History to File
+
+
+ Archive History
+
+
-
+
Error in filter:
@@ -429,7 +429,7 @@ export default {
}
},
onError(error) {
- Toast.error(error);
+ Toast.error(`${error}`);
},
updateFilterVal(newFilter, newVal) {
this.filterText = FilterClass.setFilterValue(this.filterText, newFilter, newVal);
diff --git a/client/src/components/History/HistoryView.test.js b/client/src/components/History/HistoryView.test.js
index 985280af3488..69ac021af618 100644
--- a/client/src/components/History/HistoryView.test.js
+++ b/client/src/components/History/HistoryView.test.js
@@ -170,6 +170,6 @@ describe("History center panel View", () => {
expect(importButton.exists()).toBe(false);
// instead we have an alert
- expect(wrapper.find("[data-description='history is purged']").text()).toBe("This history has been purged.");
+ expect(wrapper.find("[data-description='history state info']").text()).toBe("This history has been purged.");
});
});
diff --git a/client/src/components/History/HistoryView.vue b/client/src/components/History/HistoryView.vue
index 0ec77bed6274..702afbbdcc11 100644
--- a/client/src/components/History/HistoryView.vue
+++ b/client/src/components/History/HistoryView.vue
@@ -1,15 +1,15 @@
-
- This history has been purged.
+
+ {{ historyStateInfoMessage }}
Switch to this history
@@ -33,7 +33,7 @@
@@ -72,6 +72,28 @@ export default {
history() {
return this.getHistoryById(this.id);
},
+ userOwnsHistory() {
+ return this.currentUser.id == this.history.user_id;
+ },
+ isSetAsCurrentDisabled() {
+ return this.currentHistory?.id == this.history?.id || this.history.archived || this.history.purged;
+ },
+ canEditHistory() {
+ return this.userOwnsHistory && !this.history.archived && !this.history.purged;
+ },
+ showHistoryStateInfo() {
+ return this.history.archived || this.history.purged;
+ },
+ historyStateInfoMessage() {
+ if (this.history.archived && this.history.purged) {
+ return "This history has been archived and purged.";
+ } else if (this.history.archived) {
+ return "This history has been archived.";
+ } else if (this.history.purged) {
+ return "This history has been purged.";
+ }
+ return "";
+ },
},
created() {
this.loadHistoryById(this.id);
diff --git a/client/src/entry/analysis/menu.js b/client/src/entry/analysis/menu.js
index df04a2f45645..1f75398d3560 100644
--- a/client/src/entry/analysis/menu.js
+++ b/client/src/entry/analysis/menu.js
@@ -215,6 +215,10 @@ export function fetchMenu(options = {}) {
url: "/histories/list_shared",
hidden: Galaxy.config.single_user,
},
+ {
+ title: _l("Archived Histories"),
+ url: "/histories/archived",
+ },
{
title: _l("Pages"),
url: "/pages/list",
diff --git a/client/src/entry/analysis/router.js b/client/src/entry/analysis/router.js
index 7ea3dacd94e4..44a57245ba23 100644
--- a/client/src/entry/analysis/router.js
+++ b/client/src/entry/analysis/router.js
@@ -68,6 +68,8 @@ import { CloudAuth } from "components/User/CloudAuth";
import { ExternalIdentities } from "components/User/ExternalIdentities";
import { HistoryExport } from "components/HistoryExport/index";
import HistoryExportTasks from "components/History/Export/HistoryExport";
+import HistoryArchiveWizard from "@/components/History/Archiving/HistoryArchiveWizard.vue";
+import HistoryArchive from "@/components/History/Archiving/HistoryArchive.vue";
Vue.use(VueRouter);
@@ -268,6 +270,10 @@ export function getRouter(Galaxy) {
component: HistoryPublishedList,
props: true,
},
+ {
+ path: "histories/archived",
+ component: HistoryArchive,
+ },
{
path: "histories/:actionId",
component: GridHistory,
@@ -281,6 +287,11 @@ export function getRouter(Galaxy) {
},
props: true,
},
+ {
+ path: "histories/:historyId/archive",
+ component: HistoryArchiveWizard,
+ props: true,
+ },
{
path: "histories/:historyId/invocations",
component: HistoryInvocations,
diff --git a/client/src/schema/schema.ts b/client/src/schema/schema.ts
index 0a2bbb007aa9..9e6e84c4bca9 100644
--- a/client/src/schema/schema.ts
+++ b/client/src/schema/schema.ts
@@ -410,6 +410,15 @@ export interface paths {
*/
post: operations["create_api_histories_post"];
};
+ "/api/histories/archived": {
+ /**
+ * Get a list of all archived histories for the current user.
+ * @description Get a list of all archived histories for the current user.
+ *
+ * Archived histories are histories are not part of the active histories of the user but they can be accessed using this endpoint.
+ */
+ get: operations["get_archived_histories_api_histories_archived_get"];
+ };
"/api/histories/count": {
/** Returns number of histories for the current user. */
get: operations["count_api_histories_count_get"];
@@ -450,6 +459,39 @@ export interface paths {
/** Marks the history with the given ID as deleted. */
delete: operations["delete_api_histories__history_id__delete"];
};
+ "/api/histories/{history_id}/archive": {
+ /**
+ * Archive a history.
+ * @description Marks the given history as 'archived' and returns the history.
+ *
+ * Archiving a history will remove it from the list of active histories of the user but it will still be
+ * accessible via the `/api/histories/{id}` or the `/api/histories/archived` endpoints.
+ *
+ * Associating an export record:
+ *
+ * - Optionally, an export record (containing information about a recent snapshot of the history) can be associated with the
+ * archived history by providing an `archive_export_id` in the payload. The export record must belong to the history and
+ * must be in the ready state.
+ * - When associating an export record, the history can be purged after it has been archived using the `purge_history` flag.
+ *
+ * If the history is already archived, this endpoint will return a 409 Conflict error, indicating that the history is already archived.
+ * If the history was not purged after it was archived, you can restore it using the `/api/histories/{id}/archive/restore` endpoint.
+ */
+ post: operations["archive_history_api_histories__history_id__archive_post"];
+ };
+ "/api/histories/{history_id}/archive/restore": {
+ /**
+ * Restore an archived history.
+ * @description Restores an archived history and returns it.
+ *
+ * Restoring an archived history will add it back to the list of active histories of the user (unless it was purged).
+ *
+ * **Warning**: Please note that histories that are associated with an archive export might be purged after export, so un-archiving them
+ * will not restore the datasets that were in the history before it was archived. You will need to import back the archive export
+ * record to restore the history and its datasets as a new copy. See `/api/histories/from_store_async` for more information.
+ */
+ put: operations["restore_archived_history_api_histories__history_id__archive_restore_put"];
+ };
"/api/histories/{history_id}/citations": {
/** Return all the citations for the tools used to produce the datasets in the history. */
get: operations["citations_api_histories__history_id__citations_get"];
@@ -1588,6 +1630,240 @@ export interface components {
*/
link: string;
};
+ /**
+ * ArchiveHistoryRequestPayload
+ * @description Base model definition with common configuration used by all derived models.
+ */
+ ArchiveHistoryRequestPayload: {
+ /**
+ * Export Record ID
+ * @description The encoded ID of the export record to associate with this history archival.This is used to be able to recover the history from the export record.
+ * @example 0123456789ABCDEF
+ */
+ archive_export_id?: string;
+ /**
+ * Purge History
+ * @description Whether to purge the history after archiving it. It requires an `archive_export_id` to be set.
+ * @default false
+ */
+ purge_history?: boolean;
+ };
+ /**
+ * ArchivedHistoryDetailed
+ * @description History detailed information.
+ */
+ ArchivedHistoryDetailed: {
+ /**
+ * Annotation
+ * @description An annotation to provide details or to help understand the purpose and usage of this item.
+ */
+ annotation: string;
+ /**
+ * Archived
+ * @description Whether this item has been archived and is no longer active.
+ */
+ archived: boolean;
+ /**
+ * Contents URL
+ * @description The relative URL to access the contents of this History.
+ */
+ contents_url: string;
+ /**
+ * Count
+ * @description The number of items in the history.
+ */
+ count: number;
+ /**
+ * Create Time
+ * Format: date-time
+ * @description The time and date this item was created.
+ */
+ create_time: string;
+ /**
+ * Deleted
+ * @description Whether this item is marked as deleted.
+ */
+ deleted: boolean;
+ /**
+ * Export Record Data
+ * @description The export record data associated with this archived history. Used to recover the history.
+ */
+ export_record_data?: components["schemas"]["ExportRecordData"];
+ /**
+ * Genome Build
+ * @description TODO
+ * @default ?
+ */
+ genome_build?: string;
+ /**
+ * ID
+ * @description The encoded ID of this entity.
+ * @example 0123456789ABCDEF
+ */
+ id: string;
+ /**
+ * Importable
+ * @description Whether this History can be imported by other users with a shared link.
+ */
+ importable: boolean;
+ /**
+ * Model class
+ * @description The name of the database model class.
+ * @default History
+ * @enum {string}
+ */
+ model_class: "History";
+ /**
+ * Name
+ * @description The name of the history.
+ */
+ name: string;
+ /**
+ * Preferred Object Store ID
+ * @description The ID of the object store that should be used to store new datasets in this history.
+ */
+ preferred_object_store_id?: string;
+ /**
+ * Published
+ * @description Whether this resource is currently publicly available to all users.
+ */
+ published: boolean;
+ /**
+ * Purged
+ * @description Whether this item has been permanently removed.
+ */
+ purged: boolean;
+ /**
+ * Size
+ * @description The total size of the contents of this history in bytes.
+ */
+ size: number;
+ /**
+ * Slug
+ * @description Part of the URL to uniquely identify this History by link in a readable way.
+ */
+ slug?: string;
+ /**
+ * State
+ * @description The current state of the History based on the states of the datasets it contains.
+ */
+ state: components["schemas"]["DatasetState"];
+ /**
+ * State Counts
+ * @description A dictionary keyed to possible dataset states and valued with the number of datasets in this history that have those states.
+ */
+ state_details: {
+ [key: string]: number | undefined;
+ };
+ /**
+ * State IDs
+ * @description A dictionary keyed to possible dataset states and valued with lists containing the ids of each HDA in that state.
+ */
+ state_ids: {
+ [key: string]: string[] | undefined;
+ };
+ tags: components["schemas"]["TagCollection"];
+ /**
+ * Update Time
+ * Format: date-time
+ * @description The last time and date this item was updated.
+ */
+ update_time: string;
+ /**
+ * URL
+ * @deprecated
+ * @description The relative URL to access this item.
+ */
+ url: string;
+ /**
+ * User ID
+ * @description The encoded ID of the user that owns this History.
+ * @example 0123456789ABCDEF
+ */
+ user_id: string;
+ /**
+ * Username and slug
+ * @description The relative URL in the form of /u/{username}/h/{slug}
+ */
+ username_and_slug?: string;
+ };
+ /**
+ * ArchivedHistorySummary
+ * @description History summary information.
+ */
+ ArchivedHistorySummary: {
+ /**
+ * Annotation
+ * @description An annotation to provide details or to help understand the purpose and usage of this item.
+ */
+ annotation: string;
+ /**
+ * Archived
+ * @description Whether this item has been archived and is no longer active.
+ */
+ archived: boolean;
+ /**
+ * Count
+ * @description The number of items in the history.
+ */
+ count: number;
+ /**
+ * Deleted
+ * @description Whether this item is marked as deleted.
+ */
+ deleted: boolean;
+ /**
+ * Export Record Data
+ * @description The export record data associated with this archived history. Used to recover the history.
+ */
+ export_record_data?: components["schemas"]["ExportRecordData"];
+ /**
+ * ID
+ * @description The encoded ID of this entity.
+ * @example 0123456789ABCDEF
+ */
+ id: string;
+ /**
+ * Model class
+ * @description The name of the database model class.
+ * @default History
+ * @enum {string}
+ */
+ model_class: "History";
+ /**
+ * Name
+ * @description The name of the history.
+ */
+ name: string;
+ /**
+ * Preferred Object Store ID
+ * @description The ID of the object store that should be used to store new datasets in this history.
+ */
+ preferred_object_store_id?: string;
+ /**
+ * Published
+ * @description Whether this resource is currently publicly available to all users.
+ */
+ published: boolean;
+ /**
+ * Purged
+ * @description Whether this item has been permanently removed.
+ */
+ purged: boolean;
+ tags: components["schemas"]["TagCollection"];
+ /**
+ * Update Time
+ * Format: date-time
+ * @description The last time and date this item was updated.
+ */
+ update_time: string;
+ /**
+ * URL
+ * @deprecated
+ * @description The relative URL to access this item.
+ */
+ url: string;
+ };
/**
* AsyncFile
* @description Base model definition with common configuration used by all derived models.
@@ -3332,6 +3608,40 @@ export interface components {
* @enum {string}
*/
ExportObjectType: "history" | "invocation";
+ /**
+ * ExportRecordData
+ * @description Data of an export record associated with a history that was archived.
+ */
+ ExportRecordData: {
+ /**
+ * Include deleted
+ * @description Include file contents for deleted datasets (if include_files is True).
+ * @default false
+ */
+ include_deleted?: boolean;
+ /**
+ * Include Files
+ * @description include materialized files in export when available
+ * @default true
+ */
+ include_files?: boolean;
+ /**
+ * Include hidden
+ * @description Include file contents for hidden datasets (if include_files is True).
+ * @default false
+ */
+ include_hidden?: boolean;
+ /**
+ * @description format of model store to export
+ * @default tar.gz
+ */
+ model_store_format?: components["schemas"]["ModelStoreFormat"];
+ /**
+ * Target URI
+ * @description Galaxy Files URI to write mode store content to.
+ */
+ target_uri: string;
+ };
/**
* ExportTaskListResponse
* @description Base model definition with common configuration used by all derived models.
@@ -4750,6 +5060,11 @@ export interface components {
* @description An annotation to provide details or to help understand the purpose and usage of this item.
*/
annotation: string;
+ /**
+ * Archived
+ * @description Whether this item has been archived and is no longer active.
+ */
+ archived: boolean;
/**
* Contents URL
* @description The relative URL to access the contents of this History.
@@ -4879,6 +5194,11 @@ export interface components {
* @description An annotation to provide details or to help understand the purpose and usage of this item.
*/
annotation: string;
+ /**
+ * Archived
+ * @description Whether this item has been archived and is no longer active.
+ */
+ archived: boolean;
/**
* Count
* @description The number of items in the history.
@@ -8949,10 +9269,7 @@ export interface operations {
*/
/** @description Starts at the beginning skip the first ( offset - 1 ) items and begin returning at the Nth item */
/** @description The maximum number of items to return. */
- /**
- * @description String containing one of the valid ordering attributes followed (optionally) by '-asc' or '-dsc' for ascending and descending order respectively. Orders can be stacked as a comma-separated list of values.
- * @example name-dsc,create_time
- */
+ /** @description String containing one of the valid ordering attributes followed (optionally) by '-asc' or '-dsc' for ascending and descending order respectively. Orders can be stacked as a comma-separated list of values. */
query?: {
history_id?: string;
view?: string;
@@ -10575,10 +10892,7 @@ export interface operations {
*/
/** @description Starts at the beginning skip the first ( offset - 1 ) items and begin returning at the Nth item */
/** @description The maximum number of items to return. */
- /**
- * @description String containing one of the valid ordering attributes followed (optionally) by '-asc' or '-dsc' for ascending and descending order respectively. Orders can be stacked as a comma-separated list of values.
- * @example name-dsc,create_time
- */
+ /** @description String containing one of the valid ordering attributes followed (optionally) by '-asc' or '-dsc' for ascending and descending order respectively. Orders can be stacked as a comma-separated list of values. */
/** @description View to be passed to the serializer */
/** @description Comma-separated list of keys to be passed to the serializer */
query?: {
@@ -10657,6 +10971,60 @@ export interface operations {
};
};
};
+ get_archived_histories_api_histories_archived_get: {
+ /**
+ * Get a list of all archived histories for the current user.
+ * @description Get a list of all archived histories for the current user.
+ *
+ * Archived histories are histories are not part of the active histories of the user but they can be accessed using this endpoint.
+ */
+ parameters?: {
+ /** @description View to be passed to the serializer */
+ /** @description Comma-separated list of keys to be passed to the serializer */
+ /**
+ * @description Generally a property name to filter by followed by an (often optional) hyphen and operator string.
+ * @example create_time-gt
+ */
+ /**
+ * @description The value to filter by.
+ * @example 2015-01-29
+ */
+ /** @description Starts at the beginning skip the first ( offset - 1 ) items and begin returning at the Nth item */
+ /** @description The maximum number of items to return. */
+ /** @description String containing one of the valid ordering attributes followed (optionally) by '-asc' or '-dsc' for ascending and descending order respectively. Orders can be stacked as a comma-separated list of values. */
+ query?: {
+ view?: string;
+ keys?: string;
+ q?: string[];
+ qv?: string[];
+ offset?: number;
+ limit?: number;
+ order?: string;
+ };
+ /** @description The user ID that will be used to effectively make this API call. Only admins and designated users can make API calls on behalf of other users. */
+ header?: {
+ "run-as"?: string;
+ };
+ };
+ responses: {
+ /** @description Successful Response */
+ 200: {
+ content: {
+ "application/json": (
+ | components["schemas"]["ArchivedHistorySummary"]
+ | components["schemas"]["ArchivedHistoryDetailed"]
+ | Record
+ )[];
+ };
+ };
+ /** @description Validation Error */
+ 422: {
+ content: {
+ "application/json": components["schemas"]["HTTPValidationError"];
+ };
+ };
+ };
+ };
count_api_histories_count_get: {
/** Returns number of histories for the current user. */
parameters?: {
@@ -10694,10 +11062,7 @@ export interface operations {
*/
/** @description Starts at the beginning skip the first ( offset - 1 ) items and begin returning at the Nth item */
/** @description The maximum number of items to return. */
- /**
- * @description String containing one of the valid ordering attributes followed (optionally) by '-asc' or '-dsc' for ascending and descending order respectively. Orders can be stacked as a comma-separated list of values.
- * @example name-dsc,create_time
- */
+ /** @description String containing one of the valid ordering attributes followed (optionally) by '-asc' or '-dsc' for ascending and descending order respectively. Orders can be stacked as a comma-separated list of values. */
/** @description View to be passed to the serializer */
/** @description Comma-separated list of keys to be passed to the serializer */
query?: {
@@ -10880,10 +11245,7 @@ export interface operations {
*/
/** @description Starts at the beginning skip the first ( offset - 1 ) items and begin returning at the Nth item */
/** @description The maximum number of items to return. */
- /**
- * @description String containing one of the valid ordering attributes followed (optionally) by '-asc' or '-dsc' for ascending and descending order respectively. Orders can be stacked as a comma-separated list of values.
- * @example name-dsc,create_time
- */
+ /** @description String containing one of the valid ordering attributes followed (optionally) by '-asc' or '-dsc' for ascending and descending order respectively. Orders can be stacked as a comma-separated list of values. */
/** @description View to be passed to the serializer */
/** @description Comma-separated list of keys to be passed to the serializer */
query?: {
@@ -10932,10 +11294,7 @@ export interface operations {
*/
/** @description Starts at the beginning skip the first ( offset - 1 ) items and begin returning at the Nth item */
/** @description The maximum number of items to return. */
- /**
- * @description String containing one of the valid ordering attributes followed (optionally) by '-asc' or '-dsc' for ascending and descending order respectively. Orders can be stacked as a comma-separated list of values.
- * @example name-dsc,create_time
- */
+ /** @description String containing one of the valid ordering attributes followed (optionally) by '-asc' or '-dsc' for ascending and descending order respectively. Orders can be stacked as a comma-separated list of values. */
/** @description View to be passed to the serializer */
/** @description Comma-separated list of keys to be passed to the serializer */
query?: {
@@ -11090,6 +11449,100 @@ export interface operations {
};
};
};
+ archive_history_api_histories__history_id__archive_post: {
+ /**
+ * Archive a history.
+ * @description Marks the given history as 'archived' and returns the history.
+ *
+ * Archiving a history will remove it from the list of active histories of the user but it will still be
+ * accessible via the `/api/histories/{id}` or the `/api/histories/archived` endpoints.
+ *
+ * Associating an export record:
+ *
+ * - Optionally, an export record (containing information about a recent snapshot of the history) can be associated with the
+ * archived history by providing an `archive_export_id` in the payload. The export record must belong to the history and
+ * must be in the ready state.
+ * - When associating an export record, the history can be purged after it has been archived using the `purge_history` flag.
+ *
+ * If the history is already archived, this endpoint will return a 409 Conflict error, indicating that the history is already archived.
+ * If the history was not purged after it was archived, you can restore it using the `/api/histories/{id}/archive/restore` endpoint.
+ */
+ parameters: {
+ /** @description The user ID that will be used to effectively make this API call. Only admins and designated users can make API calls on behalf of other users. */
+ header?: {
+ "run-as"?: string;
+ };
+ /** @description The encoded database identifier of the History. */
+ path: {
+ history_id: string;
+ };
+ };
+ requestBody?: {
+ content: {
+ "application/json": components["schemas"]["ArchiveHistoryRequestPayload"];
+ };
+ };
+ responses: {
+ /** @description Successful Response */
+ 200: {
+ content: {
+ "application/json":
+ | components["schemas"]["ArchivedHistorySummary"]
+ | components["schemas"]["ArchivedHistoryDetailed"]
+ | Record;
+ };
+ };
+ /** @description Validation Error */
+ 422: {
+ content: {
+ "application/json": components["schemas"]["HTTPValidationError"];
+ };
+ };
+ };
+ };
+ restore_archived_history_api_histories__history_id__archive_restore_put: {
+ /**
+ * Restore an archived history.
+ * @description Restores an archived history and returns it.
+ *
+ * Restoring an archived history will add it back to the list of active histories of the user (unless it was purged).
+ *
+ * **Warning**: Please note that histories that are associated with an archive export might be purged after export, so un-archiving them
+ * will not restore the datasets that were in the history before it was archived. You will need to import back the archive export
+ * record to restore the history and its datasets as a new copy. See `/api/histories/from_store_async` for more information.
+ */
+ parameters: {
+ /** @description If true, the history will be un-archived even if it has an associated archive export record and was purged. */
+ query?: {
+ force?: boolean;
+ };
+ /** @description The user ID that will be used to effectively make this API call. Only admins and designated users can make API calls on behalf of other users. */
+ header?: {
+ "run-as"?: string;
+ };
+ /** @description The encoded database identifier of the History. */
+ path: {
+ history_id: string;
+ };
+ };
+ responses: {
+ /** @description Successful Response */
+ 200: {
+ content: {
+ "application/json":
+ | components["schemas"]["HistorySummary"]
+ | components["schemas"]["HistoryDetailed"]
+ | Record;
+ };
+ };
+ /** @description Validation Error */
+ 422: {
+ content: {
+ "application/json": components["schemas"]["HTTPValidationError"];
+ };
+ };
+ };
+ };
citations_api_histories__history_id__citations_get: {
/** Return all the citations for the tools used to produce the datasets in the history. */
parameters: {
@@ -11165,10 +11618,7 @@ export interface operations {
*/
/** @description Starts at the beginning skip the first ( offset - 1 ) items and begin returning at the Nth item */
/** @description The maximum number of items to return. */
- /**
- * @description String containing one of the valid ordering attributes followed (optionally) by '-asc' or '-dsc' for ascending and descending order respectively. Orders can be stacked as a comma-separated list of values.
- * @example name-dsc,create_time
- */
+ /** @description String containing one of the valid ordering attributes followed (optionally) by '-asc' or '-dsc' for ascending and descending order respectively. Orders can be stacked as a comma-separated list of values. */
query?: {
v?: string;
details?: string;
@@ -11333,10 +11783,7 @@ export interface operations {
*/
/** @description Starts at the beginning skip the first ( offset - 1 ) items and begin returning at the Nth item */
/** @description The maximum number of items to return. */
- /**
- * @description String containing one of the valid ordering attributes followed (optionally) by '-asc' or '-dsc' for ascending and descending order respectively. Orders can be stacked as a comma-separated list of values.
- * @example name-dsc,create_time
- */
+ /** @description String containing one of the valid ordering attributes followed (optionally) by '-asc' or '-dsc' for ascending and descending order respectively. Orders can be stacked as a comma-separated list of values. */
query?: {
filename?: string;
dry_run?: boolean;
@@ -11389,10 +11836,7 @@ export interface operations {
*/
/** @description Starts at the beginning skip the first ( offset - 1 ) items and begin returning at the Nth item */
/** @description The maximum number of items to return. */
- /**
- * @description String containing one of the valid ordering attributes followed (optionally) by '-asc' or '-dsc' for ascending and descending order respectively. Orders can be stacked as a comma-separated list of values.
- * @example name-dsc,create_time
- */
+ /** @description String containing one of the valid ordering attributes followed (optionally) by '-asc' or '-dsc' for ascending and descending order respectively. Orders can be stacked as a comma-separated list of values. */
query?: {
dry_run?: boolean;
q?: string[];
@@ -11988,10 +12432,7 @@ export interface operations {
*/
/** @description Starts at the beginning skip the first ( offset - 1 ) items and begin returning at the Nth item */
/** @description The maximum number of items to return. */
- /**
- * @description String containing one of the valid ordering attributes followed (optionally) by '-asc' or '-dsc' for ascending and descending order respectively. Orders can be stacked as a comma-separated list of values.
- * @example name-dsc,create_time
- */
+ /** @description String containing one of the valid ordering attributes followed (optionally) by '-asc' or '-dsc' for ascending and descending order respectively. Orders can be stacked as a comma-separated list of values. */
query?: {
v?: string;
details?: string;
diff --git a/client/src/stores/historyStore.ts b/client/src/stores/historyStore.ts
index cf2ab2b2abdb..a6a8f412b064 100644
--- a/client/src/stores/historyStore.ts
+++ b/client/src/stores/historyStore.ts
@@ -15,6 +15,7 @@ import {
setCurrentHistoryOnServer,
updateHistoryFields,
} from "@/stores/services/history.services";
+import * as ArchiveServices from "@/stores/services/historyArchive.services";
import { useUserLocalStorage } from "@/composables/userLocalStorage";
export type HistorySummary = components["schemas"]["HistorySummary"];
@@ -32,7 +33,9 @@ export const useHistoryStore = defineStore("historyStore", () => {
const storedHistories = ref<{ [key: string]: HistorySummary }>({});
const histories = computed(() => {
- return Object.values(storedHistories.value).sort(sortByObjectProp("name"));
+ return Object.values(storedHistories.value)
+ .filter((h) => !h.archived)
+ .sort(sortByObjectProp("name"));
});
const getFirstHistoryId = computed(() => {
@@ -252,6 +255,28 @@ export const useHistoryStore = defineStore("historyStore", () => {
setHistory(securedHistory as HistorySummary);
}
+ async function archiveHistoryById(historyId: string, archiveExportId?: string, purgeHistory = false) {
+ const history = await ArchiveServices.archiveHistoryById(historyId, archiveExportId, purgeHistory);
+ setHistory(history as HistorySummary);
+ if (!history.archived) {
+ return;
+ }
+ // If the current history is archived, we need to switch to another one as it is
+ // no longer part of the active histories.
+ const nextHistoryId = getNextAvailableHistoryId([historyId]);
+ if (nextHistoryId) {
+ return setCurrentHistory(nextHistoryId);
+ } else {
+ return createNewHistory();
+ }
+ }
+
+ async function unarchiveHistoryById(historyId: string, force?: boolean) {
+ const history = await ArchiveServices.unarchiveHistoryById(historyId, force);
+ setHistory(history as HistorySummary);
+ return history;
+ }
+
async function updateHistory({ id, ...update }: HistorySummary) {
const savedHistory = await updateHistoryFields(id, update);
setHistory(savedHistory as HistorySummary);
@@ -282,6 +307,8 @@ export const useHistoryStore = defineStore("historyStore", () => {
loadHistoryById,
secureHistory,
updateHistory,
+ archiveHistoryById,
+ unarchiveHistoryById,
historiesLoading,
historiesOffset,
totalHistoryCount,
diff --git a/client/src/stores/services/historyArchive.services.ts b/client/src/stores/services/historyArchive.services.ts
new file mode 100644
index 000000000000..0c11f6e73bc9
--- /dev/null
+++ b/client/src/stores/services/historyArchive.services.ts
@@ -0,0 +1,130 @@
+import { fetcher, type components } from "@/schema";
+import type { FetchArgType } from "openapi-typescript-fetch";
+
+export type ArchivedHistorySummary = components["schemas"]["ArchivedHistorySummary"];
+export type ArchivedHistoryDetailed = components["schemas"]["ArchivedHistoryDetailed"];
+export type AsyncTaskResultSummary = components["schemas"]["AsyncTaskResultSummary"];
+
+type GetArchivedHistoriesParams = FetchArgType;
+type SerializationOptions = Pick;
+
+interface FilterOptions {
+ query?: string;
+}
+
+interface PaginationOptions {
+ currentPage?: number;
+ pageSize?: number;
+}
+
+interface SortingOptions {
+ sortBy?: string;
+ sortDesc?: boolean;
+}
+
+interface GetArchivedHistoriesOptions extends FilterOptions, PaginationOptions, SortingOptions, SerializationOptions {}
+
+interface ArchivedHistoriesResult {
+ histories: ArchivedHistorySummary[] | ArchivedHistoryDetailed[];
+ totalMatches: number;
+}
+
+const DEFAULT_PAGE_SIZE = 10;
+
+const getArchivedHistories = fetcher.path("/api/histories/archived").method("get").create();
+
+/**
+ * Get a list of archived histories.
+ */
+export async function fetchArchivedHistories(
+ options: GetArchivedHistoriesOptions = {}
+): Promise {
+ const params = optionsToApiParams(options);
+ const { data, headers } = await getArchivedHistories(params);
+ const totalMatches = parseInt(headers.get("total_matches") ?? "0");
+ if (params.view === "detailed") {
+ return {
+ histories: data as ArchivedHistoryDetailed[],
+ totalMatches,
+ };
+ }
+ return {
+ histories: data as ArchivedHistorySummary[],
+ totalMatches,
+ };
+}
+
+const archiveHistory = fetcher.path("/api/histories/{history_id}/archive").method("post").create();
+
+/**
+ * Archive a history.
+ * @param historyId The history to archive
+ * @param archiveExportId The optional archive export record to associate. This can be used to restore a snapshot copy of the history in the future.
+ * @param purgeHistory Whether to purge the history after archiving. Can only be used in combination with an archive export record.
+ * @returns The archived history summary.
+ */
+export async function archiveHistoryById(
+ historyId: string,
+ archiveExportId?: string,
+ purgeHistory?: boolean
+): Promise {
+ const { data } = await archiveHistory({
+ history_id: historyId,
+ archive_export_id: archiveExportId,
+ purge_history: purgeHistory,
+ });
+ return data as ArchivedHistorySummary;
+}
+
+const unarchiveHistory = fetcher
+ .path("/api/histories/{history_id}/archive/restore")
+ .method("put")
+ // @ts-ignore: workaround for optional query parameters in PUT. More info here https://github.com/ajaishankar/openapi-typescript-fetch/pull/55
+ .create({ force: undefined });
+
+/**
+ * Unarchive/restore a history.
+ * @param historyId The history to unarchive.
+ * @param force Whether to force un-archiving for purged histories.
+ * @returns The restored history summary.
+ */
+export async function unarchiveHistoryById(historyId: string, force?: boolean): Promise {
+ const { data } = await unarchiveHistory({ history_id: historyId, force });
+ return data as ArchivedHistorySummary;
+}
+
+const reimportHistoryFromStore = fetcher.path("/api/histories/from_store_async").method("post").create();
+
+/**
+ * Reimport an archived history as a new copy from the associated export record.
+ *
+ * @param archivedHistory The archived history to reimport. It must have an associated export record.
+ * @returns The async task result summary to track the reimport progress.
+ */
+export async function reimportHistoryFromExportRecordAsync(
+ archivedHistory: ArchivedHistorySummary
+): Promise {
+ if (!archivedHistory.export_record_data) {
+ throw new Error("The archived history does not have an associated export record.");
+ }
+ const { data } = await reimportHistoryFromStore({
+ model_store_format: archivedHistory.export_record_data.model_store_format,
+ store_content_uri: archivedHistory.export_record_data.target_uri,
+ });
+ return data as AsyncTaskResultSummary;
+}
+
+function optionsToApiParams(options: GetArchivedHistoriesOptions): GetArchivedHistoriesParams {
+ const params: GetArchivedHistoriesParams = {};
+ if (options.query) {
+ params.q = ["name-contains"];
+ params.qv = [options.query];
+ }
+ const pageSize = options.pageSize ?? DEFAULT_PAGE_SIZE;
+ params.offset = (options.currentPage ? options.currentPage - 1 : 0) * pageSize;
+ params.limit = pageSize;
+ params.order = options.sortBy ? `${options.sortBy}${options.sortDesc ? "-dsc" : "-asc"}` : undefined;
+ params.view = options.view;
+ params.keys = options.keys;
+ return params;
+}
diff --git a/client/src/utils/simple-error.ts b/client/src/utils/simple-error.ts
index 3c764aa5f5ca..05680d279ab9 100644
--- a/client/src/utils/simple-error.ts
+++ b/client/src/utils/simple-error.ts
@@ -4,6 +4,8 @@ export function errorMessageAsString(e: any, defaultMessage = "Request failed.")
let message = defaultMessage;
if (e && e.response && e.response.data && e.response.data.err_msg) {
message = e.response.data.err_msg;
+ } else if (e && e.data && e.data.err_msg) {
+ message = e.data.err_msg;
} else if (e && e.response) {
message = `${e.response.statusText} (${e.response.status})`;
} else if (e instanceof Error) {
diff --git a/lib/galaxy/managers/export_tracker.py b/lib/galaxy/managers/export_tracker.py
index 9a929631309b..29ec780e0261 100644
--- a/lib/galaxy/managers/export_tracker.py
+++ b/lib/galaxy/managers/export_tracker.py
@@ -48,6 +48,14 @@ def set_export_association_metadata(self, export_association_id: int, export_met
with transaction(self.session):
self.session.commit()
+ def get_export_association(self, export_association_id: int) -> StoreExportAssociation:
+ try:
+ stmt = select(StoreExportAssociation).where(StoreExportAssociation.id == export_association_id)
+ export_association: StoreExportAssociation = self.session.execute(stmt).scalars().one()
+ except NoResultFound:
+ raise ObjectNotFound("Cannot get export association. Reason: Export association not found")
+ return export_association
+
def get_object_exports(
self, object_id: int, object_type: ExportObjectType, limit: Optional[int] = None, offset: Optional[int] = None
) -> List[StoreExportAssociation]:
diff --git a/lib/galaxy/managers/histories.py b/lib/galaxy/managers/histories.py
index 88fc2db0cdbf..ff72ee9f7b16 100644
--- a/lib/galaxy/managers/histories.py
+++ b/lib/galaxy/managers/histories.py
@@ -368,6 +368,41 @@ def make_members_public(self, trans, item):
else:
log.warning(f"User without permissions tried to make dataset with id: {dataset.id} public")
+ def archive_history(self, history: model.History, archive_export_id: Optional[int]):
+ """Marks the history with the given id as archived and optionally associates it with the given archive export record.
+
+ **Important**: The caller is responsible for passing a valid `archive_export_id` that belongs to the given history.
+ """
+ history.archived = True
+ history.archive_export_id = archive_export_id
+ with transaction(self.session()):
+ self.session().commit()
+
+ return history
+
+ def restore_archived_history(self, history: model.History, force: bool = False):
+ """Marks the history with the given id as not archived anymore.
+
+ Only un-archives the history if it is not associated with an archive export record. You can force the un-archiving
+ in this case by passing `force=True`.
+
+ Please note that histories that are associated with an archive export are usually purged after export, so un-archiving them
+ will not restore the datasets that were in the history before it was archived. You will need to import the archive export
+ record to restore the history and its datasets as a new copy.
+ """
+ if history.archive_export_id is not None and history.purged and not force:
+ raise glx_exceptions.RequestParameterInvalidException(
+ "Cannot restore an archived (and purged) history that is associated with an archive export record. "
+ "Please try importing it back as a new copy from the associated archive export record instead. "
+ "You can still force the un-archiving of the purged history by setting the 'force' parameter."
+ )
+
+ history.archived = False
+ with transaction(self.session()):
+ self.session().commit()
+
+ return history
+
class HistoryStorageCleanerManager(StorageCleanerManager):
def __init__(self, history_manager: HistoryManager):
@@ -460,20 +495,23 @@ def get_task_exports(self, trans, history_id: int, limit: Optional[int] = None,
)
return [self._serialize_task_export(export, history) for export in export_associations]
+ def get_task_export_by_id(self, store_export_id: int) -> model.StoreExportAssociation:
+ return self.export_tracker.get_export_association(store_export_id)
+
def create_export_association(self, history_id: int) -> model.StoreExportAssociation:
return self.export_tracker.create_export_association(object_id=history_id, object_type=self.export_object_type)
+ def get_record_metadata(self, export: model.StoreExportAssociation) -> Optional[ExportObjectMetadata]:
+ json_metadata = export.export_metadata
+ export_metadata = ExportObjectMetadata.parse_raw(json_metadata) if json_metadata else None
+ return export_metadata
+
def _serialize_task_export(self, export: model.StoreExportAssociation, history: model.History):
task_uuid = export.task_uuid
export_date = export.create_time
history_has_changed = history.update_time > export_date
- json_metadata = export.export_metadata
- export_metadata = ExportObjectMetadata.parse_raw(json_metadata) if json_metadata else None
- is_ready = (
- export_metadata is not None
- and export_metadata.result_data is not None
- and export_metadata.result_data.success
- )
+ export_metadata = self.get_record_metadata(export)
+ is_ready = export_metadata is not None and export_metadata.is_ready()
is_export_up_to_date = is_ready and not history_has_changed
return {
"id": export.id,
@@ -559,6 +597,7 @@ def __init__(
"name",
"deleted",
"purged",
+ "archived",
"count",
"url",
# TODO: why these?
diff --git a/lib/galaxy/model/__init__.py b/lib/galaxy/model/__init__.py
index bb5fe2bfc703..39ad3f72eddd 100644
--- a/lib/galaxy/model/__init__.py
+++ b/lib/galaxy/model/__init__.py
@@ -2822,6 +2822,8 @@ class History(Base, HasTags, Dictifiable, UsesAnnotations, HasName, Serializable
slug = Column(TEXT)
published = Column(Boolean, index=True, default=False)
preferred_object_store_id = Column(String(255), nullable=True)
+ archived = Column(Boolean, index=True, default=False, server_default=false())
+ archive_export_id = Column(Integer, ForeignKey("store_export_association.id"), nullable=True, default=None)
datasets = relationship(
"HistoryDatasetAssociation", back_populates="history", cascade_backrefs=False, order_by=lambda: asc(HistoryDatasetAssociation.hid) # type: ignore[has-type]
@@ -2915,6 +2917,7 @@ class History(Base, HasTags, Dictifiable, UsesAnnotations, HasName, Serializable
"genome_build",
"deleted",
"purged",
+ "archived",
"update_time",
"published",
"importable",
diff --git a/lib/galaxy/model/migrations/alembic/versions_gxy/e0561d5fc8c7_add_archived_columns_to_history.py b/lib/galaxy/model/migrations/alembic/versions_gxy/e0561d5fc8c7_add_archived_columns_to_history.py
new file mode 100644
index 000000000000..9de6fae98ca9
--- /dev/null
+++ b/lib/galaxy/model/migrations/alembic/versions_gxy/e0561d5fc8c7_add_archived_columns_to_history.py
@@ -0,0 +1,77 @@
+"""add archived columns to history
+
+Revision ID: e0561d5fc8c7
+Revises: 2d749563e1fe
+Create Date: 2023-05-03 11:57:58.710098
+
+"""
+import sqlalchemy as sa
+
+from galaxy.model.database_object_names import (
+ build_foreign_key_name,
+ build_index_name,
+)
+from galaxy.model.migrations.util import (
+ add_column,
+ create_foreign_key,
+ create_index,
+ drop_column,
+ drop_constraint,
+ drop_index,
+ transaction,
+)
+
+# revision identifiers, used by Alembic.
+revision = "e0561d5fc8c7"
+down_revision = "2d749563e1fe"
+branch_labels = None
+depends_on = None
+
+table_name = "history"
+
+archived_column_name = "archived"
+archived_column_index_name = build_index_name(table_name, archived_column_name)
+
+ref_table_name = "store_export_association"
+archived_export_id_column_name = "archive_export_id"
+archived_export_id_column_fk_name = build_foreign_key_name(table_name, archived_export_id_column_name)
+
+
+def upgrade():
+ with transaction():
+ _add_column_archived()
+ _add_column_archive_export_id()
+
+
+def downgrade():
+ with transaction():
+ _drop_column_archived()
+ _drop_column_archive_export_id()
+
+
+def _add_column_archived():
+ add_column(
+ table_name,
+ sa.Column(archived_column_name, sa.Boolean(), default=False, server_default=sa.false()),
+ )
+ create_index(archived_column_index_name, table_name, [archived_column_name])
+
+
+def _add_column_archive_export_id():
+ add_column(
+ table_name,
+ sa.Column(archived_export_id_column_name, sa.Integer, nullable=True, default=None),
+ )
+ create_foreign_key(
+ archived_export_id_column_fk_name, table_name, ref_table_name, [archived_export_id_column_name], ["id"]
+ )
+
+
+def _drop_column_archived():
+ drop_index(archived_column_index_name, table_name)
+ drop_column(table_name, archived_column_name)
+
+
+def _drop_column_archive_export_id():
+ drop_constraint(archived_export_id_column_fk_name, table_name)
+ drop_column(table_name, archived_export_id_column_name)
diff --git a/lib/galaxy/schema/schema.py b/lib/galaxy/schema/schema.py
index 9943adf93553..3ea6bcd95072 100644
--- a/lib/galaxy/schema/schema.py
+++ b/lib/galaxy/schema/schema.py
@@ -1038,6 +1038,11 @@ class HistorySummary(HistoryBase):
title="Purged",
description="Whether this item has been permanently removed.",
)
+ archived: bool = Field(
+ ...,
+ title="Archived",
+ description="Whether this item has been archived and is no longer active.",
+ )
url: RelativeUrl = RelativeUrlField
published: bool = Field(
...,
@@ -1537,6 +1542,14 @@ class ExportObjectMetadata(Model):
request_data: ExportObjectRequestMetadata
result_data: Optional[ExportObjectResultMetadata]
+ def is_short_term(self):
+ """Whether the export is a short term export."""
+ return isinstance(self.request_data.payload, ShortTermStoreExportPayload)
+
+ def is_ready(self):
+ """Whether the export has finished and it's ready to be used."""
+ return self.result_data is not None and self.result_data.success
+
class ObjectExportTaskResponse(ObjectExportResponseBase):
task_uuid: UUID4 = Field(
@@ -1557,6 +1570,55 @@ class ExportTaskListResponse(Model):
__accept_type__ = "application/vnd.galaxy.task.export+json"
+class ArchiveHistoryRequestPayload(Model):
+ archive_export_id: Optional[DecodedDatabaseIdField] = Field(
+ default=None,
+ title="Export Record ID",
+ description=(
+ "The encoded ID of the export record to associate with this history archival."
+ "This is used to be able to recover the history from the export record."
+ ),
+ )
+ purge_history: bool = Field(
+ default=False,
+ title="Purge History",
+ description="Whether to purge the history after archiving it. It requires an `archive_export_id` to be set.",
+ )
+
+
+class ExportRecordData(WriteStoreToPayload):
+ """Data of an export record associated with a history that was archived."""
+
+ # Initially this is just a WriteStoreToPayload, but we may want to add more data to
+ # this in the future to support more complex export scenarios or target destinations.
+ pass
+
+
+class ExportAssociationData(Model):
+ export_record_data: Optional[ExportRecordData] = Field(
+ default=None,
+ title="Export Record Data",
+ description="The export record data associated with this archived history. Used to recover the history.",
+ )
+
+
+class ArchivedHistorySummary(HistorySummary, ExportAssociationData):
+ pass
+
+
+class ArchivedHistoryDetailed(HistoryDetailed, ExportAssociationData):
+ pass
+
+
+AnyArchivedHistoryView = Union[
+ ArchivedHistorySummary,
+ ArchivedHistoryDetailed,
+ # Any will cover those cases in which only specific `keys` are requested
+ # otherwise the validation will fail because the required fields are not returned
+ Any,
+]
+
+
class LabelValuePair(Model):
"""Generic Label/Value pair model."""
diff --git a/lib/galaxy/webapps/galaxy/api/common.py b/lib/galaxy/webapps/galaxy/api/common.py
index 7764a7aa4aae..eb312879abcc 100644
--- a/lib/galaxy/webapps/galaxy/api/common.py
+++ b/lib/galaxy/webapps/galaxy/api/common.py
@@ -60,6 +60,16 @@
description="The maximum number of items to return.",
)
+OrderQueryParam: Optional[str] = Query(
+ default=None,
+ title="Order",
+ description=(
+ "String containing one of the valid ordering attributes followed (optionally) "
+ "by '-asc' or '-dsc' for ascending and descending order respectively. "
+ "Orders can be stacked as a comma-separated list of values."
+ ),
+)
+
def parse_serialization_params(
view: Optional[str] = None,
@@ -99,16 +109,7 @@ def get_filter_query_params(
qv: Optional[List[str]] = FilterValueQueryParam,
offset: Optional[int] = OffsetQueryParam,
limit: Optional[int] = LimitQueryParam,
- order: Optional[str] = Query(
- default=None,
- title="Order",
- description=(
- "String containing one of the valid ordering attributes followed (optionally) "
- "by '-asc' or '-dsc' for ascending and descending order respectively. "
- "Orders can be stacked as a comma-separated list of values."
- ),
- example="name-dsc,create_time",
- ),
+ order: Optional[str] = OrderQueryParam,
) -> FilterQueryParams:
"""
This function is meant to be used as a Dependency.
diff --git a/lib/galaxy/webapps/galaxy/api/histories.py b/lib/galaxy/webapps/galaxy/api/histories.py
index 7985ea9822f6..edeb029c4622 100644
--- a/lib/galaxy/webapps/galaxy/api/histories.py
+++ b/lib/galaxy/webapps/galaxy/api/histories.py
@@ -33,7 +33,9 @@
)
from galaxy.schema.fields import DecodedDatabaseIdField
from galaxy.schema.schema import (
+ AnyArchivedHistoryView,
AnyHistoryView,
+ ArchiveHistoryRequestPayload,
AsyncFile,
AsyncTaskResultSummary,
CreateHistoryFromStore,
@@ -180,6 +182,27 @@ def shared_with_me(
) -> List[AnyHistoryView]:
return self.service.shared_with_me(trans, serialization_params, filter_query_params)
+ @router.get(
+ "/api/histories/archived",
+ summary="Get a list of all archived histories for the current user.",
+ )
+ def get_archived_histories(
+ self,
+ response: Response,
+ trans: ProvidesHistoryContext = DependsOnTrans,
+ serialization_params: SerializationParams = Depends(query_serialization_params),
+ filter_query_params: FilterQueryParams = Depends(get_filter_query_params),
+ ) -> List[AnyArchivedHistoryView]:
+ """Get a list of all archived histories for the current user.
+
+ Archived histories are histories are not part of the active histories of the user but they can be accessed using this endpoint.
+ """
+ archived_histories, total_matches = self.service.get_archived_histories(
+ trans, serialization_params, filter_query_params, include_total_matches=True
+ )
+ response.headers["total_matches"] = str(total_matches)
+ return archived_histories
+
@router.get(
"/api/histories/most_recently_used",
summary="Returns the most recently used history of the user.",
@@ -457,6 +480,56 @@ def get_custom_builds_metadata(
) -> CustomBuildsMetadataResponse:
return self.service.get_custom_builds_metadata(trans, history_id)
+ @router.post(
+ "/api/histories/{history_id}/archive",
+ summary="Archive a history.",
+ )
+ def archive_history(
+ self,
+ trans: ProvidesHistoryContext = DependsOnTrans,
+ history_id: DecodedDatabaseIdField = HistoryIDPathParam,
+ payload: Optional[ArchiveHistoryRequestPayload] = Body(default=None),
+ ) -> AnyArchivedHistoryView:
+ """Marks the given history as 'archived' and returns the history.
+
+ Archiving a history will remove it from the list of active histories of the user but it will still be
+ accessible via the `/api/histories/{id}` or the `/api/histories/archived` endpoints.
+
+ Associating an export record:
+
+ - Optionally, an export record (containing information about a recent snapshot of the history) can be associated with the
+ archived history by providing an `archive_export_id` in the payload. The export record must belong to the history and
+ must be in the ready state.
+ - When associating an export record, the history can be purged after it has been archived using the `purge_history` flag.
+
+ If the history is already archived, this endpoint will return a 409 Conflict error, indicating that the history is already archived.
+ If the history was not purged after it was archived, you can restore it using the `/api/histories/{id}/archive/restore` endpoint.
+ """
+ return self.service.archive_history(trans, history_id, payload)
+
+ @router.put(
+ "/api/histories/{history_id}/archive/restore",
+ summary="Restore an archived history.",
+ )
+ def restore_archived_history(
+ self,
+ trans: ProvidesHistoryContext = DependsOnTrans,
+ history_id: DecodedDatabaseIdField = HistoryIDPathParam,
+ force: Optional[bool] = Query(
+ default=None,
+ description="If true, the history will be un-archived even if it has an associated archive export record and was purged.",
+ ),
+ ) -> AnyHistoryView:
+ """Restores an archived history and returns it.
+
+ Restoring an archived history will add it back to the list of active histories of the user (unless it was purged).
+
+ **Warning**: Please note that histories that are associated with an archive export might be purged after export, so un-archiving them
+ will not restore the datasets that were in the history before it was archived. You will need to import back the archive export
+ record to restore the history and its datasets as a new copy. See `/api/histories/from_store_async` for more information.
+ """
+ return self.service.restore_archived_history(trans, history_id, force)
+
@router.get(
"/api/histories/{history_id}/sharing",
summary="Get the current sharing status of the given item.",
diff --git a/lib/galaxy/webapps/galaxy/buildapp.py b/lib/galaxy/webapps/galaxy/buildapp.py
index 38088273017f..db71235de962 100644
--- a/lib/galaxy/webapps/galaxy/buildapp.py
+++ b/lib/galaxy/webapps/galaxy/buildapp.py
@@ -240,7 +240,9 @@ def app_pair(global_conf, load_app_kwds=None, wsgi_preflight=True, **kwargs):
webapp.add_client_route("/histories/list")
webapp.add_client_route("/histories/import")
webapp.add_client_route("/histories/{history_id}/export")
+ webapp.add_client_route("/histories/{history_id}/archive")
webapp.add_client_route("/histories/{history_id}/invocations")
+ webapp.add_client_route("/histories/archived")
webapp.add_client_route("/histories/list_published")
webapp.add_client_route("/histories/list_shared")
webapp.add_client_route("/histories/rename")
diff --git a/lib/galaxy/webapps/galaxy/controllers/history.py b/lib/galaxy/webapps/galaxy/controllers/history.py
index fe7eaac31b3c..00411d5c160b 100644
--- a/lib/galaxy/webapps/galaxy/controllers/history.py
+++ b/lib/galaxy/webapps/galaxy/controllers/history.py
@@ -171,7 +171,7 @@ def get_current_item(self, trans, **kwargs):
return trans.get_history()
def apply_query_filter(self, trans, query, **kwargs):
- return query.filter_by(user=trans.user, importing=False)
+ return query.filter_by(user=trans.user, importing=False, archived=False)
class SharedHistoryListGrid(grids.Grid):
diff --git a/lib/galaxy/webapps/galaxy/services/histories.py b/lib/galaxy/webapps/galaxy/services/histories.py
index f10d3b00eeb5..70ce87d3b768 100644
--- a/lib/galaxy/webapps/galaxy/services/histories.py
+++ b/lib/galaxy/webapps/galaxy/services/histories.py
@@ -48,7 +48,9 @@
)
from galaxy.schema.fields import DecodedDatabaseIdField
from galaxy.schema.schema import (
+ AnyArchivedHistoryView,
AnyHistoryView,
+ ArchiveHistoryRequestPayload,
AsyncFile,
AsyncTaskResultSummary,
CreateHistoryFromStore,
@@ -156,7 +158,9 @@ def index(
raise glx_exceptions.AdminRequiredException(message)
else:
filters += [model.History.user == current_user]
- # and any sent in from the query string
+ # exclude archived histories
+ filters += [model.History.archived == false()]
+ # and apply any other filters
filters += self.filters.parse_filters(filter_params)
order_by = self._build_order_by(filter_query_params.order)
@@ -670,3 +674,122 @@ def _serialize_history(
def _build_order_by(self, order: Optional[str]):
return self.build_order_by(self.manager, order or DEFAULT_ORDER_BY)
+
+ def archive_history(
+ self,
+ trans: ProvidesHistoryContext,
+ history_id: DecodedDatabaseIdField,
+ payload: Optional[ArchiveHistoryRequestPayload] = None,
+ ) -> AnyArchivedHistoryView:
+ """Marks the history with the given id as archived and optionally associates it with the given archive export record in the payload.
+
+ Archived histories are not part of the active histories of the user, so they won't be shown to the user by default.
+ """
+ if trans.anonymous:
+ raise glx_exceptions.AuthenticationRequired("Only registered users can archive histories.")
+
+ history = self.manager.get_owned(history_id, trans.user)
+ if history.archived:
+ raise glx_exceptions.Conflict("History is already archived.")
+
+ archive_export_id = payload.archive_export_id if payload else None
+ if archive_export_id:
+ export_record = self.history_export_manager.get_task_export_by_id(archive_export_id)
+ self._ensure_export_record_can_be_associated_with_history_archival(history_id, export_record)
+ # After this point, the export record is valid and can be associated with the history archival
+ purge_history = payload.purge_history if payload else False
+ if purge_history:
+ if archive_export_id is None:
+ raise glx_exceptions.RequestParameterMissingException(
+ "Cannot purge history without an export record. A valid archive_export_id is required."
+ )
+ self.manager.purge(history)
+ history = self.manager.archive_history(history, archive_export_id=archive_export_id)
+ return self._serialize_archived_history(trans, history)
+
+ def _ensure_export_record_can_be_associated_with_history_archival(
+ self, history_id: int, export_record: model.StoreExportAssociation
+ ):
+ if export_record.object_id != history_id or export_record.object_type != "history":
+ raise glx_exceptions.RequestParameterInvalidException(
+ "The given archive export record does not belong to this history."
+ )
+ export_metadata = self.history_export_manager.get_record_metadata(export_record)
+ if export_metadata is None:
+ log.error(
+ f"Trying to archive history [{history_id}] with an export record. "
+ f"But the given archive export record [{export_record.id}] does not have the required metadata."
+ )
+ raise glx_exceptions.RequestParameterInvalidException(
+ "The given archive export record does not have the required metadata."
+ )
+ if not export_metadata.is_ready():
+ raise glx_exceptions.RequestParameterInvalidException(
+ "The given archive export record must be ready before it can be used to archive a history. "
+ "Please wait for the export to finish and try again."
+ )
+ if export_metadata.is_short_term():
+ raise glx_exceptions.RequestParameterInvalidException(
+ "The given archive export record is temporal, only persistent sources can be used to archive a history."
+ )
+ # TODO: should we also ensure the export was requested to include files with `include_files`, `include_hidden`, etc.?
+
+ def restore_archived_history(
+ self,
+ trans: ProvidesHistoryContext,
+ history_id: DecodedDatabaseIdField,
+ force: Optional[bool] = False,
+ ) -> AnyHistoryView:
+ if trans.anonymous:
+ raise glx_exceptions.AuthenticationRequired("Only registered users can access archived histories.")
+
+ history = self.manager.get_owned(history_id, trans.user)
+ history = self.manager.restore_archived_history(history, force=force or False)
+ return self._serialize_archived_history(trans, history)
+
+ def get_archived_histories(
+ self,
+ trans: ProvidesHistoryContext,
+ serialization_params: SerializationParams,
+ filter_query_params: FilterQueryParams,
+ include_total_matches: bool = False,
+ ) -> Tuple[List[AnyArchivedHistoryView], Optional[int]]:
+ if trans.anonymous:
+ raise glx_exceptions.AuthenticationRequired("Only registered users can have or access archived histories.")
+
+ filters = self.filters.parse_query_filters(filter_query_params)
+ filters += [
+ model.History.user == trans.user,
+ model.History.archived == true(),
+ ]
+ total_matches = self.manager.count(filters=filters) if include_total_matches else None
+ order_by = self._build_order_by(filter_query_params.order)
+ histories = self.manager.list(
+ filters=filters, order_by=order_by, limit=filter_query_params.limit, offset=filter_query_params.offset
+ )
+
+ histories = [self._serialize_archived_history(trans, history, serialization_params) for history in histories]
+ return histories, total_matches
+
+ def _serialize_archived_history(
+ self,
+ trans: ProvidesHistoryContext,
+ history: model.History,
+ serialization_params: Optional[SerializationParams] = None,
+ ):
+ if serialization_params is None:
+ serialization_params = SerializationParams(default_view="summary")
+ archived_history = self.serializer.serialize_to_view(
+ history, user=trans.user, trans=trans, **serialization_params.dict()
+ )
+ export_record_data = self._get_export_record_data(history)
+ archived_history["export_record_data"] = export_record_data.dict() if export_record_data else None
+ return archived_history
+
+ def _get_export_record_data(self, history: model.History) -> Optional[WriteStoreToPayload]:
+ if history.archive_export_id:
+ export_record = self.history_export_manager.get_task_export_by_id(history.archive_export_id)
+ export_metadata = self.history_export_manager.get_record_metadata(export_record)
+ if export_metadata and isinstance(export_metadata.request_data.payload, WriteStoreToPayload):
+ return export_metadata.request_data.payload
+ return None
diff --git a/lib/galaxy_test/api/test_histories.py b/lib/galaxy_test/api/test_histories.py
index 9dce9596c8fc..2f30a3431562 100644
--- a/lib/galaxy_test/api/test_histories.py
+++ b/lib/galaxy_test/api/test_histories.py
@@ -805,3 +805,112 @@ def _update_permissions(self, history_id: str, dataset_id: str, payload):
update_url = self._api_url(url, **{"use_admin_key": True})
update_response = put(update_url, json=payload)
return update_response
+
+
+class TestArchivingHistoriesWithoutExportRecord(ApiTestCase, BaseHistories):
+ def setUp(self):
+ super().setUp()
+ self.dataset_populator = DatasetPopulator(self.galaxy_interactor)
+
+ def test_archive(self):
+ history_id = self.dataset_populator.new_history()
+
+ history_details = self._show(history_id)
+ assert history_details["archived"] is False
+
+ archive_response = self.dataset_populator.archive_history(history_id)
+ self._assert_status_code_is(archive_response, 200)
+ assert archive_response.json()["archived"] is True
+
+ history_details = self._show(history_id)
+ assert history_details["archived"] is True
+
+ def test_other_users_cannot_archive_history(self):
+ history_id = self.dataset_populator.new_history()
+
+ with self._different_user():
+ archive_response = self.dataset_populator.archive_history(history_id)
+ self._assert_status_code_is(archive_response, 403)
+
+ def test_restore(self):
+ history_id = self.dataset_populator.new_history()
+
+ archive_response = self.dataset_populator.archive_history(history_id)
+ self._assert_status_code_is(archive_response, 200)
+ assert archive_response.json()["archived"] is True
+
+ restore_response = self.dataset_populator.restore_archived_history(history_id)
+ self._assert_status_code_is(restore_response, 200)
+ assert restore_response.json()["archived"] is False
+
+ def test_other_users_cannot_restore_history(self):
+ history_id = self.dataset_populator.new_history()
+
+ archive_response = self.dataset_populator.archive_history(history_id)
+ self._assert_status_code_is(archive_response, 200)
+ assert archive_response.json()["archived"] is True
+
+ with self._different_user():
+ restore_response = self.dataset_populator.restore_archived_history(history_id)
+ self._assert_status_code_is(restore_response, 403)
+
+ def test_archived_histories_index(self):
+ with self._different_user("archived_histories_index_user@bx.psu.edu"):
+ history_id = self.dataset_populator.new_history()
+
+ archive_response = self.dataset_populator.archive_history(history_id)
+ self._assert_status_code_is(archive_response, 200)
+ assert archive_response.json()["archived"] is True
+
+ archived_histories = self.dataset_populator.get_archived_histories()
+ assert len(archived_histories) == 1
+ assert archived_histories[0]["id"] == history_id
+
+ def test_archived_histories_filtering_and_sorting(self):
+ with self._different_user("archived_histories_filtering_user@bx.psu.edu"):
+ num_histories = 2
+ history_ids = []
+ for i in range(num_histories):
+ history_id = self.dataset_populator.new_history(name=f"History {i}")
+ archive_response = self.dataset_populator.archive_history(history_id)
+ self._assert_status_code_is(archive_response, 200)
+ assert archive_response.json()["archived"] is True
+ history_ids.append(history_id)
+
+ # Filter by name
+ archived_histories = self.dataset_populator.get_archived_histories(query="q=name-contains&qv=history")
+ assert len(archived_histories) == num_histories
+
+ archived_histories = self.dataset_populator.get_archived_histories(query="q=name-contains&qv=History 1")
+ assert len(archived_histories) == 1
+
+ # Order by name
+ archived_histories = self.dataset_populator.get_archived_histories(query="order=name-dsc")
+ assert len(archived_histories) == num_histories
+ assert archived_histories[0]["name"] == "History 1"
+ assert archived_histories[1]["name"] == "History 0"
+
+ archived_histories = self.dataset_populator.get_archived_histories(query="order=name-asc")
+ assert len(archived_histories) == num_histories
+ assert archived_histories[0]["name"] == "History 0"
+ assert archived_histories[1]["name"] == "History 1"
+
+ def test_archiving_an_archived_history_conflicts(self):
+ history_id = self.dataset_populator.new_history()
+
+ archive_response = self.dataset_populator.archive_history(history_id)
+ self._assert_status_code_is(archive_response, 200)
+ assert archive_response.json()["archived"] is True
+
+ archive_response = self.dataset_populator.archive_history(history_id)
+ self._assert_status_code_is(archive_response, 409)
+
+ def test_archived_histories_are_not_listed_by_default(self):
+ history_id = self.dataset_populator.new_history()
+ archive_response = self.dataset_populator.archive_history(history_id)
+ self._assert_status_code_is(archive_response, 200)
+ assert archive_response.json()["archived"] is True
+
+ histories = self.dataset_populator.get_histories()
+ for history in histories:
+ assert history["id"] != history_id
diff --git a/lib/galaxy_test/base/populators.py b/lib/galaxy_test/base/populators.py
index 6d0fcd7103b4..72ee71034b12 100644
--- a/lib/galaxy_test/base/populators.py
+++ b/lib/galaxy_test/base/populators.py
@@ -1480,6 +1480,36 @@ def get_history_export_tasks(self, history_id: str):
api_asserts.assert_status_code_is_ok(response)
return response.json()
+ def wait_for_export_task_on_record(self, export_record):
+ if export_record["preparing"]:
+ assert export_record["task_uuid"]
+ self.wait_on_task_id(export_record["task_uuid"])
+
+ def archive_history(
+ self, history_id: str, export_record_id: Optional[str] = None, purge_history: Optional[bool] = False
+ ) -> Response:
+ payload = (
+ {
+ "archive_export_id": export_record_id,
+ "purge_history": purge_history,
+ }
+ if export_record_id is not None or purge_history is not None
+ else None
+ )
+ archive_response = self._post(f"histories/{history_id}/archive", data=payload, json=True)
+ return archive_response
+
+ def restore_archived_history(self, history_id: str, force: Optional[bool] = None) -> Response:
+ restore_response = self._put(f"histories/{history_id}/archive/restore{f'?force={force}' if force else ''}")
+ return restore_response
+
+ def get_archived_histories(self, query: Optional[str] = None) -> List[Dict[str, Any]]:
+ if query:
+ query = f"?{query}"
+ index_response = self._get(f"histories/archived{query if query else ''}")
+ index_response.raise_for_status()
+ return index_response.json()
+
class GalaxyInteractorHttpMixin:
galaxy_interactor: ApiTestInteractor
diff --git a/test/integration/test_history_archiving.py b/test/integration/test_history_archiving.py
new file mode 100644
index 000000000000..a25b56cf5aff
--- /dev/null
+++ b/test/integration/test_history_archiving.py
@@ -0,0 +1,242 @@
+from typing import Optional
+from uuid import uuid4
+
+from galaxy.schema.schema import ModelStoreFormat
+from galaxy_test.base.api import UsesCeleryTasks
+from galaxy_test.base.populators import DatasetPopulator
+from galaxy_test.driver.integration_setup import PosixFileSourceSetup
+from galaxy_test.driver.integration_util import IntegrationTestCase
+
+
+class TestHistoryArchivingWithExportRecord(IntegrationTestCase, UsesCeleryTasks, PosixFileSourceSetup):
+ dataset_populator: DatasetPopulator
+ task_based = True
+
+ @classmethod
+ def handle_galaxy_config_kwds(cls, config):
+ PosixFileSourceSetup.handle_galaxy_config_kwds(config, cls)
+ UsesCeleryTasks.handle_galaxy_config_kwds(config)
+
+ def setUp(self):
+ super().setUp()
+ self.dataset_populator = DatasetPopulator(self.galaxy_interactor)
+
+ def test_archive_history_with_export_record_purges_history(self):
+ history_name = f"for_archiving_{uuid4()}"
+ history_id = self.dataset_populator.setup_history_for_export_testing(history_name)
+ history = self._get(f"histories/{history_id}").json()
+ assert history["deleted"] is False
+ assert history["purged"] is False
+ assert history["archived"] is False
+
+ target_uri = f"gxfiles://posix_test/history_{history_id}"
+ export_record = self._export_history_to_permanent_storage(history_id, target_uri=target_uri)
+ archive_response = self.dataset_populator.archive_history(
+ history_id,
+ export_record_id=export_record["id"],
+ purge_history=True,
+ )
+ self._assert_status_code_is_ok(archive_response)
+
+ archived_history = self._get_archived_history_with_name(history_name)
+ assert archived_history["deleted"] is True
+ assert archived_history["purged"] is True
+ assert archived_history["archived"] is True
+ assert archived_history["export_record_data"] is not None
+ assert archived_history["export_record_data"]["target_uri"] == target_uri
+
+ def test_archive_history_does_not_purge_history_with_export_record_but_purge_history_false(self):
+ history_name = f"for_archiving_{uuid4()}"
+ history_id = self.dataset_populator.setup_history_for_export_testing(history_name)
+ history = self._get(f"histories/{history_id}").json()
+ assert history["deleted"] is False
+ assert history["purged"] is False
+ assert history["archived"] is False
+
+ target_uri = f"gxfiles://posix_test/history_{history_id}"
+ export_record = self._export_history_to_permanent_storage(history_id, target_uri=target_uri)
+ archive_response = self.dataset_populator.archive_history(
+ history_id,
+ export_record_id=export_record["id"],
+ purge_history=False,
+ )
+ self._assert_status_code_is_ok(archive_response)
+
+ archived_history = self._get_archived_history_with_name(history_name)
+ assert archived_history["deleted"] is False
+ assert archived_history["purged"] is False
+ assert archived_history["archived"] is True
+ assert archived_history["export_record_data"] is not None
+ assert archived_history["export_record_data"]["target_uri"] == target_uri
+
+ def test_archive_history_does_not_purge_history_without_export_record(self):
+ history_name = f"for_archiving_{uuid4()}"
+ history_id = self.dataset_populator.setup_history_for_export_testing(history_name)
+ history = self._get(f"histories/{history_id}").json()
+ assert history["deleted"] is False
+ assert history["purged"] is False
+ assert history["archived"] is False
+
+ archive_response = self.dataset_populator.archive_history(history_id, purge_history=True)
+ self._assert_status_code_is(archive_response, 400)
+ assert "Cannot purge history without an export record" in archive_response.json()["err_msg"]
+
+ def test_archive_history_with_invalid_export_record_fails(self):
+ history_name = f"for_archiving_failure_{uuid4()}"
+ history_id = self.dataset_populator.setup_history_for_export_testing(history_name)
+ history = self._get(f"histories/{history_id}").json()
+ assert history["archived"] is False
+
+ archive_response = self.dataset_populator.archive_history(history_id, export_record_id="invalid")
+ self._assert_status_code_is(archive_response, 400)
+ assert "Invalid id" in archive_response.json()["err_msg"]
+
+ # Only export records belonging to the history can be used to archive the history.
+ other_history_id = self.dataset_populator.new_history(name=f"other_{uuid4()}")
+ target_uri = f"gxfiles://posix_test/history_{other_history_id}"
+ other_export_record = self._export_history_to_permanent_storage(other_history_id, target_uri=target_uri)
+ archive_response = self.dataset_populator.archive_history(
+ history_id, export_record_id=other_export_record["id"]
+ )
+ self._assert_status_code_is(archive_response, 400)
+ assert "The given archive export record does not belong to this history" in archive_response.json()["err_msg"]
+
+ # Only permanent export records can be used to archive the history.
+ export_record = self._export_history_to_short_term_storage(history_id)
+ archive_response = self.dataset_populator.archive_history(history_id, export_record_id=export_record["id"])
+ self._assert_status_code_is(archive_response, 400)
+ assert "The given archive export record is temporal" in archive_response.json()["err_msg"]
+
+ history = self._get(f"histories/{history_id}").json()
+ assert history["archived"] is False
+
+ def test_restore_archived_history_with_export_record_and_purged(self):
+ history_name = f"for_restoring_{uuid4()}"
+ history_id = self.dataset_populator.setup_history_for_export_testing(history_name)
+
+ target_uri = f"gxfiles://posix_test/history_{history_id}"
+ export_record = self._export_history_to_permanent_storage(history_id, target_uri=target_uri)
+ archive_response = self.dataset_populator.archive_history(
+ history_id,
+ export_record_id=export_record["id"],
+ purge_history=True,
+ )
+ self._assert_status_code_is_ok(archive_response)
+
+ # Trying to restore an archived (and purged) history with an export record should fail by default
+ archived_history = self._get_archived_history_with_name(history_name)
+ restore_response = self.dataset_populator.restore_archived_history(archived_history["id"])
+ self._assert_status_code_is(restore_response, 400)
+ assert (
+ "Cannot restore an archived (and purged) history that is associated with an archive export record"
+ in restore_response.json()["err_msg"]
+ )
+
+ # Trying to restore an archived (and purged) history with an export record should succeed if the force flag is set
+ restore_response = self.dataset_populator.restore_archived_history(archived_history["id"], force=True)
+ restored_history = self._get(f"histories/{history_id}").json()
+ assert restored_history["archived"] is False
+ # But of course, restoring the history this way will not change the fact that the history is still purged
+ assert restored_history["deleted"] is True
+ assert restored_history["purged"] is True
+
+ def test_restore_archived_history_with_export_record_and_not_purged(self):
+ history_name = f"for_restoring_{uuid4()}"
+ history_id = self.dataset_populator.setup_history_for_export_testing(history_name)
+
+ target_uri = f"gxfiles://posix_test/history_{history_id}"
+ export_record = self._export_history_to_permanent_storage(history_id, target_uri=target_uri)
+ archive_response = self.dataset_populator.archive_history(
+ history_id,
+ export_record_id=export_record["id"],
+ purge_history=False,
+ )
+ self._assert_status_code_is_ok(archive_response)
+ archived_history = self._get_archived_history_with_name(history_name)
+ assert archived_history["archived"] is True
+ assert archived_history["export_record_data"] is not None
+ assert archived_history["export_record_data"]["target_uri"] == target_uri
+
+ # Trying to restore an archived (non-purged) history with an export record should succeed without the force flag
+ restore_response = self.dataset_populator.restore_archived_history(archived_history["id"])
+ self._assert_status_code_is_ok(restore_response)
+ restored_history = self._get(f"histories/{history_id}").json()
+ assert restored_history["archived"] is False
+ assert restored_history["deleted"] is False
+ assert restored_history["purged"] is False
+
+ def test_reimport_history_copy_from_archive_export_record(self):
+ history_name = f"for_reimporting_{uuid4()}"
+ history_id = self.dataset_populator.setup_history_for_export_testing(history_name)
+
+ model_store_format = ModelStoreFormat.ROCRATE_ZIP
+ target_uri = f"gxfiles://posix_test/history_{history_id}"
+ export_record = self._export_history_to_permanent_storage(
+ history_id, target_uri=target_uri, model_store_format=model_store_format
+ )
+ archive_response = self.dataset_populator.archive_history(
+ history_id,
+ export_record_id=export_record["id"],
+ purge_history=True,
+ )
+ self._assert_status_code_is_ok(archive_response)
+ archived_history = self._get_archived_history_with_name(history_name)
+ assert archived_history["purged"] is True
+ assert archived_history["archived"] is True
+ assert archived_history["export_record_data"] is not None
+ assert archived_history["export_record_data"]["target_uri"] == target_uri
+
+ # Re-importing the history from the export record data should succeed
+ self.dataset_populator.import_history_from_uri_async(
+ target_uri=target_uri, model_store_format=model_store_format
+ )
+ last_history = self._get("histories?limit=1").json()
+ assert len(last_history) == 1
+ imported_history = last_history[0]
+ imported_history_id = imported_history["id"]
+ assert imported_history_id != history_id
+ assert imported_history["name"] == history_name
+ assert imported_history["deleted"] is False
+ assert imported_history["purged"] is False
+ self.dataset_populator.wait_for_history(imported_history_id)
+ history_contents = self.dataset_populator.get_history_contents(imported_history_id)
+ assert len(history_contents) == 2
+ for dataset in history_contents:
+ if dataset["deleted"] is True:
+ assert dataset["state"] == "discarded"
+ assert dataset["purged"] is True
+ else:
+ assert dataset["state"] == "ok"
+ assert dataset["purged"] is False
+
+ def _get_archived_history_with_name(self, history_name: str):
+ archived_histories = self.dataset_populator.get_archived_histories(query=f"q=name-eq&qv={history_name}")
+ assert len(archived_histories) == 1
+ archived_history = archived_histories[0]
+ return archived_history
+
+ def _export_history_to_permanent_storage(
+ self,
+ history_id: str,
+ target_uri: Optional[str] = None,
+ model_store_format: ModelStoreFormat = ModelStoreFormat.ROCRATE_ZIP,
+ ):
+ target_uri = (
+ f"gxfiles://posix_test/history_{history_id}.{model_store_format}" if target_uri is None else target_uri
+ )
+ self.dataset_populator.export_history_to_uri_async(history_id, target_uri, model_store_format)
+ export_records = self.dataset_populator.get_history_export_tasks(history_id)
+ assert len(export_records) == 1
+ last_record = export_records[0]
+ self.dataset_populator.wait_for_export_task_on_record(last_record)
+ assert last_record["ready"] is True
+ return last_record
+
+ def _export_history_to_short_term_storage(self, history_id):
+ self.dataset_populator.download_history_to_store(history_id)
+ export_records = self.dataset_populator.get_history_export_tasks(history_id)
+ assert len(export_records) == 1
+ last_record = export_records[0]
+ self.dataset_populator.wait_for_export_task_on_record(last_record)
+ assert last_record["ready"] is True
+ return last_record
diff --git a/test/integration/test_history_import_export.py b/test/integration/test_history_import_export.py
index eb9848609cef..d1537cac8600 100644
--- a/test/integration/test_history_import_export.py
+++ b/test/integration/test_history_import_export.py
@@ -135,7 +135,7 @@ def test_export_tracking(self):
export_records = self.dataset_populator.get_history_export_tasks(history_id)
assert len(export_records) == 1
last_record = export_records[0]
- self._wait_for_export_task_on_record(last_record)
+ self.dataset_populator.wait_for_export_task_on_record(last_record)
assert last_record["ready"] is True
# Check metadata
@@ -153,7 +153,7 @@ def test_export_tracking(self):
export_records = self.dataset_populator.get_history_export_tasks(history_id)
assert len(export_records) == 2
last_record = export_records[0]
- self._wait_for_export_task_on_record(last_record)
+ self.dataset_populator.wait_for_export_task_on_record(last_record)
assert last_record["ready"] is True
# Check metadata
@@ -196,7 +196,7 @@ def test_export_history_with_discarded_dataset_to_rocrate(self):
export_records = self.dataset_populator.get_history_export_tasks(history_id)
assert len(export_records) == 1
last_record = export_records[0]
- self._wait_for_export_task_on_record(last_record)
+ self.dataset_populator.wait_for_export_task_on_record(last_record)
assert last_record["ready"] is True
def test_export_missing_dataset_fails(self):
@@ -215,11 +215,6 @@ def test_export_missing_dataset_fails(self):
self._assert_status_code_is(result_response, 500)
assert "Cannot export history dataset" in result_response.json()["err_msg"]
- def _wait_for_export_task_on_record(self, record):
- if record["preparing"]:
- assert record["task_uuid"]
- self.dataset_populator.wait_on_task_id(record["task_uuid"])
-
class TestImportExportHistoryContentsViaTasksIntegration(IntegrationTestCase, UsesCeleryTasks):
dataset_populator: DatasetPopulator