diff --git a/.gitignore b/.gitignore index 5e64b96..c555cfb 100644 --- a/.gitignore +++ b/.gitignore @@ -7,3 +7,5 @@ .settings/ .gradle/ .idea/ +/build.gradle +/~ diff --git a/build.gradle b/build.gradle index e8a5713..4267bcc 100644 --- a/build.gradle +++ b/build.gradle @@ -85,4 +85,4 @@ task listJars { doLast { configurations.compile.each { File file -> println file.name } } -} +} \ No newline at end of file diff --git a/src/integration-test/java/com/researchspace/dataverse/http/AbstractIntegrationTest.java b/src/integration-test/java/com/researchspace/dataverse/http/AbstractIntegrationTest.java index a82b64a..f28b8f3 100644 --- a/src/integration-test/java/com/researchspace/dataverse/http/AbstractIntegrationTest.java +++ b/src/integration-test/java/com/researchspace/dataverse/http/AbstractIntegrationTest.java @@ -1,23 +1,3 @@ -/* - * - */ -package com.researchspace.dataverse.http; - -import com.researchspace.dataverse.api.v1.*; -import com.researchspace.dataverse.spring.config.DataverseSpringConfig; -import lombok.extern.slf4j.Slf4j; -import org.apache.commons.lang3.StringUtils; -import org.apache.commons.lang3.Validate; -import org.junit.Test; -import org.springframework.beans.factory.annotation.Autowired; -import org.springframework.beans.factory.annotation.Value; -import org.springframework.test.context.ContextConfiguration; -import org.springframework.test.context.TestPropertySource; -import org.springframework.test.context.junit4.AbstractJUnit4SpringContextTests; - -import java.net.URL; - -import static org.junit.Assert.assertTrue; /**
 Copyright 2016 ResearchSpace
 
@@ -33,60 +13,83 @@
  See the License for the specific language governing permissions and
  limitations under the License.
 
-*/ + */ +package com.researchspace.dataverse.http; + +import java.net.URL; + +import org.apache.commons.lang3.StringUtils; +import org.apache.commons.lang3.Validate; +import org.junit.Test; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.beans.factory.annotation.Value; +import org.springframework.test.context.ContextConfiguration; +import org.springframework.test.context.TestPropertySource; +import org.springframework.test.context.junit4.AbstractJUnit4SpringContextTests; + +import com.researchspace.dataverse.api.v1.DatasetOperations; +import com.researchspace.dataverse.api.v1.DataverseAPI; +import com.researchspace.dataverse.api.v1.DataverseConfig; +import com.researchspace.dataverse.api.v1.DataverseOperations; +import com.researchspace.dataverse.api.v1.InfoOperations; +import com.researchspace.dataverse.api.v1.MetadataOperations; +import com.researchspace.dataverse.api.v1.SearchOperations; +import com.researchspace.dataverse.spring.config.DataverseSpringConfig; + +/** + * Integration tests. + */ @TestPropertySource(locations = "classpath:/test.properties") @ContextConfiguration(classes = { DataverseSpringConfig.class }) -@Slf4j public class AbstractIntegrationTest extends AbstractJUnit4SpringContextTests { - DatasetOperations datasetOps; - DataverseOperations dataverseOps; - MetadataOperations metadataOPs; - InfoOperations infoOps; - SearchOperations searchOps; - @Autowired - DataverseAPI dataverseAPI; - @Value("#{systemProperties['dataverseAlias']}") - protected String dataverseAlias; - @Value("#{systemProperties['dataverseApiKey']}") - protected String apiKey; - @Value("#{systemProperties['dataverseServerURL']}") - protected String serverURL; + DatasetOperations datasetOps; + DataverseOperations dataverseOps; + MetadataOperations metadataOPs; + InfoOperations infoOps; + SearchOperations searchOps; + @Autowired + DataverseAPI dataverseAPI; + @Value("#{systemProperties['dataverseAlias']}") + protected String dataverseAlias; + @Value("#{systemProperties['dataverseApiKey']}") + protected String apiKey; + @Value("#{systemProperties['dataverseServerURL']}") + protected String serverURL; - // defaults from test.properties - @Value("${dataverseAlias}") - protected String dataverseAliasdefault; - @Value("${dataverseServerURL}") - protected String serverURLDevault; + // defaults from test.properties + @Value("${dataverseAlias}") + protected String dataverseAliasdefault; + @Value("${dataverseServerURL}") + protected String serverURLDevault; - protected static final String ERROR_MSG = "ERROR"; + protected static final String ERROR_MSG = "ERROR"; - public void setUp() throws Exception { - validateServerCredentials(); - // log.info("serverURL: [{}], apiKey: [{}], dataverseId=[{}]", serverURL, apiKey, dataverseAlias); - URL uri = new URL(serverURL); - DataverseConfig cfg = new DataverseConfig(uri, apiKey, dataverseAlias); - dataverseAPI.configure(cfg); - datasetOps = dataverseAPI.getDatasetOperations(); - dataverseOps = dataverseAPI.getDataverseOperations(); - metadataOPs = dataverseAPI.getMetadataOperations(); - infoOps = dataverseAPI.getInfoOperations(); - searchOps = dataverseAPI.getSearchOperations(); - } + public void setUp() throws Exception { + validateServerCredentials(); + final URL uri = new URL(serverURL); + final DataverseConfig cfg = new DataverseConfig(uri, apiKey, dataverseAlias); + dataverseAPI.configure(cfg); + datasetOps = dataverseAPI.getDatasetOperations(); + dataverseOps = dataverseAPI.getDataverseOperations(); + metadataOPs = dataverseAPI.getMetadataOperations(); + infoOps = dataverseAPI.getInfoOperations(); + searchOps = dataverseAPI.getSearchOperations(); + } - private void validateServerCredentials() { - Validate.notEmpty(apiKey, "ApiKey must be set via command line -DdataverseApiKey option"); - dataverseAlias = (StringUtils.isEmpty(dataverseAlias)) ? dataverseAliasdefault : dataverseAlias; - serverURL = (StringUtils.isEmpty(serverURL)) ? serverURLDevault : serverURL; - Validate.notEmpty(dataverseAlias, - "Dataverse alias must be set via command line -DdataverseAlias option or in test.properties"); - Validate.notEmpty(serverURL, - "Dataverse server URL must be set via command line -DdataverseServerURL option or in test.properties)"); - } + private void validateServerCredentials() { + Validate.notEmpty(apiKey, "ApiKey must be set via command line -DdataverseApiKey option"); + dataverseAlias = StringUtils.isEmpty(dataverseAlias) ? dataverseAliasdefault : dataverseAlias; + serverURL = StringUtils.isEmpty(serverURL) ? serverURLDevault : serverURL; + Validate.notEmpty(dataverseAlias, + "Dataverse alias must be set via command line -DdataverseAlias option or in test.properties"); + Validate.notEmpty(serverURL, + "Dataverse server URL must be set via command line -DdataverseServerURL option or in test.properties)"); + } - @Test - public void test() { - assertTrue(1 == 1);// stop complaints about no methods - } + @Test + public void test() { + // stop complaints about no methods + } } diff --git a/src/integration-test/java/com/researchspace/dataverse/http/DatasetOperationsTest.java b/src/integration-test/java/com/researchspace/dataverse/http/DatasetOperationsTest.java index bfbb43b..0cd7a78 100644 --- a/src/integration-test/java/com/researchspace/dataverse/http/DatasetOperationsTest.java +++ b/src/integration-test/java/com/researchspace/dataverse/http/DatasetOperationsTest.java @@ -1,28 +1,3 @@ -/* - * - */ -package com.researchspace.dataverse.http; - -import com.researchspace.dataverse.entities.*; -import com.researchspace.dataverse.entities.facade.DatasetFacade; -import org.apache.commons.io.FileUtils; -import org.apache.commons.lang.RandomStringUtils; -import org.junit.Before; -import org.junit.Ignore; -import org.junit.Test; - -import java.io.ByteArrayInputStream; -import java.io.File; -import java.io.IOException; -import java.net.MalformedURLException; -import java.net.URISyntaxException; -import java.util.Arrays; -import java.util.List; - -import static com.researchspace.dataverse.entities.facade.DatasetTestFactory.createFacade; -import static org.junit.Assert.*; - - /**
 Copyright 2016 ResearchSpace
 
@@ -38,113 +13,159 @@
  See the License for the specific language governing permissions and
  limitations under the License.
 
-*/ + */ +package com.researchspace.dataverse.http; + +import static com.researchspace.dataverse.entities.facade.DatasetTestFactory.createFacade; +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertNotNull; +import static org.junit.Assert.assertNull; +import static org.junit.Assert.assertTrue; + +import java.io.ByteArrayInputStream; +import java.io.File; +import java.io.IOException; +import java.net.MalformedURLException; +import java.net.URISyntaxException; +import java.util.Arrays; +import java.util.List; +import org.apache.commons.io.FileUtils; +import org.apache.commons.lang.RandomStringUtils; +import org.junit.Before; +import org.junit.Test; + +import com.researchspace.dataverse.entities.Dataset; +import com.researchspace.dataverse.entities.DatasetFile; +import com.researchspace.dataverse.entities.DatasetFileList; +import com.researchspace.dataverse.entities.DatasetVersion; +import com.researchspace.dataverse.entities.DataverseObject; +import com.researchspace.dataverse.entities.DataversePost; +import com.researchspace.dataverse.entities.DataverseResponse; +import com.researchspace.dataverse.entities.Identifier; +import com.researchspace.dataverse.entities.PublishedDataset; +import com.researchspace.dataverse.entities.Version; +import com.researchspace.dataverse.entities.facade.DatasetFacade; +import com.researchspace.springrest.ext.RestClientException; + +/** + * Dataset operations tests. + */ public class DatasetOperationsTest extends AbstractIntegrationTest { - - @Before - public void setup() throws Exception { - super.setUp(); - } - - File exampleDatasetJson = new File("src/integration-test/resources/dataset-create-new-all-default-fields.json"); - @Test - public void testListDatasets() { - List results = dataverseOps.getDataverseContents(dataverseAlias); - assertTrue(results.size() > 0); - - } - - //TODO figure out why data is invalid - @Test - @Ignore("this test fails with message: Error parsing Json: incorrect multiple for field collectionMode") - public void testPostSampleDataset() throws IOException, InterruptedException, URISyntaxException { - String toPost = FileUtils.readFileToString(exampleDatasetJson); - Identifier datasetId = dataverseOps.createDataset(toPost, dataverseAlias); - assertNotNull(datasetId.getId()); - } - - @Test - public void uploadFileToDataSetWithNativeApiBytes() throws IOException, URISyntaxException { - //arrange - Identifier datasetId = createADataset(); - assertNotNull(datasetId.getId()); - FileUploadMetadata meta = getUploadMetadata(); - - //act - DatasetFileList datasetFileList = datasetOps.uploadNativeFile(new byte[]{1, 2, 3, 4, 5}, meta, datasetId, "myFileName.dat"); - - //assert - assertNotNull(datasetFileList); - assertEquals(1, datasetFileList.getFiles().size()); - assertTrue(datasetFileList.getFiles().get(0).getCategories().contains("Data")); - assertTrue(datasetFileList.getFiles().get(0).getDescription().equals(("My description."))); - assertEquals(5 ,datasetFileList.getFiles().get(0).getDataFile().getFilesize()); - } - - @Test - public void uploadFileToDataSetWithNativeApiInputStream() throws IOException, URISyntaxException { - // arrange - Identifier datasetId = createADataset(); - assertNotNull(datasetId.getId()); - FileUploadMetadata meta = getUploadMetadata(); - - //act - DatasetFileList datasetFileList = datasetOps.uploadNativeFile(new ByteArrayInputStream(new byte[]{1, 2, 3, 4, 5,6}), 6, meta, datasetId, "myFileName.dat"); - - //assert - assertNotNull(datasetFileList); - assertEquals(1, datasetFileList.getFiles().size()); - DatasetFile uploadedFile = datasetFileList.getFiles().get(0); - assertTrue(uploadedFile.getCategories().contains("Data")); - assertTrue(uploadedFile.getDescription().equals(("My description."))); - assertEquals(6 ,uploadedFile.getDataFile().getFilesize()); - } - - private Identifier createADataset() throws MalformedURLException, URISyntaxException { - DatasetFacade facade = createFacade(); - Identifier datasetId = dataverseOps.createDataset(facade, dataverseAlias); - return datasetId; - } - - private FileUploadMetadata getUploadMetadata() { - return FileUploadMetadata.builder().description("My description.").categories(Arrays.asList(new String[]{"Data"})) - .directoryLabel("test/x").build(); - } - - @Test - public void testPostGetDeleteDataset() throws IOException, InterruptedException, URISyntaxException { - DatasetFacade facade = createFacade(); - //create a new, unpublished Dataverse - String newAlias = RandomStringUtils.randomAlphabetic(10); - DataversePost toCreate = DataverseOperationsTest.createADataverse(newAlias); - DataversePost newDV = dataverseOps.createNewDataverse(dataverseAlias, toCreate).getData(); - - // create Dataset in child dataverse - Identifier datasetId = dataverseOps.createDataset(facade, newDV.getAlias()); - assertNotNull(datasetId.getId()); - assertNotNull(datasetId.getPersistentId()); - Dataset ds = datasetOps.getDataset(datasetId); - String doiId = ds.getDoiId().get(); - datasetOps.uploadFile(doiId, getTestFile()); - - //publishing will fail, as parent DV is not published - DataverseResponse response = datasetOps.publishDataset (datasetId, Version.MAJOR); - assertNull(response.getData()); - assertNotNull(response.getMessage()); - - facade.setTitle("Updated title2"); - datasetOps.updateDataset(facade, datasetId); - List versions = datasetOps.getDatasetVersions(datasetId); - assertEquals(1, versions.size()); // published and draft - assertEquals("DRAFT", versions.get(0).getVersionState()); - String msg = datasetOps.deleteDataset(datasetId).getMessage(); - dataverseOps.deleteDataverse(newAlias); - assertNotNull(msg); - - } - - private File getTestFile() { - return new File("src/integration-test/resources/ResizablePng.zip"); - } + + @Before + public void setup() throws Exception { + super.setUp(); + } + + File exampleDatasetJson = new File("src/integration-test/resources/dataset-create-new-all-default-fields.json"); + + @Test + public void testListDatasets() { + final List results = dataverseOps.getDataverseContents(dataverseAlias); + assertTrue(results.size() > 0); + } + + @Test + public void testPostSampleDataset() throws IOException { + final String toPost = FileUtils.readFileToString(exampleDatasetJson); + final Identifier datasetId = dataverseOps.createDataset(toPost, dataverseAlias); + assertNotNull(datasetId.getId()); + } + + @Test + public void uploadFileToDataSetWithNativeApiBytes() throws IOException, URISyntaxException { + //arrange + final Identifier datasetId = createADataset(); + assertNotNull(datasetId.getId()); + final FileUploadMetadata meta = getUploadMetadata(); + + //act + final DatasetFileList datasetFileList = datasetOps.uploadNativeFile(new byte[]{1, 2, 3, 4, 5}, meta, datasetId, "myFileName.dat"); + + //assert + assertNotNull(datasetFileList); + assertEquals(1, datasetFileList.getFiles().size()); + assertTrue(datasetFileList.getFiles().get(0).getCategories().contains("Data")); + assertTrue(datasetFileList.getFiles().get(0).getDescription().equals("My description.")); + assertEquals(5 ,datasetFileList.getFiles().get(0).getDataFile().getFilesize()); + } + + @Test + public void uploadFileToDataSetWithNativeApiInputStream() throws IOException, URISyntaxException { + // arrange + final Identifier datasetId = createADataset(); + assertNotNull(datasetId.getId()); + final FileUploadMetadata meta = getUploadMetadata(); + + //act + final DatasetFileList datasetFileList = datasetOps.uploadNativeFile(new ByteArrayInputStream(new byte[]{1, 2, 3, 4, 5,6}), 6, meta, datasetId, "myFileName.dat"); + + //assert + assertNotNull(datasetFileList); + assertEquals(1, datasetFileList.getFiles().size()); + final DatasetFile uploadedFile = datasetFileList.getFiles().get(0); + assertTrue(uploadedFile.getCategories().contains("Data")); + assertTrue(uploadedFile.getDescription().equals("My description.")); + assertEquals(6 ,uploadedFile.getDataFile().getFilesize()); + } + + private Identifier createADataset() throws MalformedURLException, URISyntaxException { + final DatasetFacade facade = createFacade(); + final Identifier datasetId = dataverseOps.createDataset(facade, dataverseAlias); + return datasetId; + } + + private FileUploadMetadata getUploadMetadata() { + return FileUploadMetadata.builder().description("My description.").categories(Arrays.asList("Data")) + .directoryLabel("test/x").build(); + } + + @Test + public void testPostGetDeleteDataset() throws IOException, InterruptedException, URISyntaxException { + final DatasetFacade facade = createFacade(); + // create a new, unpublished Dataverse + final String newAlias = RandomStringUtils.randomAlphabetic(10); + final DataversePost toCreate = DataverseOperationsTest.createADataverse(newAlias); + final DataversePost newDV = dataverseOps.createNewDataverse(dataverseAlias, toCreate).getData(); + + // create Dataset in child dataverse + final Identifier datasetId = dataverseOps.createDataset(facade, newDV.getAlias()); + assertNotNull(datasetId.getId()); + assertNotNull(datasetId.getPersistentId()); + final Dataset ds = datasetOps.getDataset(datasetId); + String doiId = null; + if (ds.getDoiId().isPresent()) { + doiId = ds.getDoiId().get(); + } + assertNotNull(doiId); + datasetOps.uploadFile(doiId, getTestFile()); + + DataverseResponse response = null; + RestClientException exception = null; + // publishing will fail, as parent DV is not published + try { + response = datasetOps.publishDataset(datasetId, Version.MAJOR); + } catch (final RestClientException e) { + exception = e; + } + assertNull(response); + assertNotNull(exception); + assertEquals("403", exception.getCode().toString()); + + facade.setTitle("Updated title2"); + datasetOps.updateDataset(facade, datasetId); + final List versions = datasetOps.getDatasetVersions(datasetId); + assertEquals(1, versions.size()); // published and draft + assertEquals("DRAFT", versions.get(0).getVersionState()); + final String msg = datasetOps.deleteDataset(datasetId).getMessage(); + dataverseOps.deleteDataverse(newAlias); + assertNotNull(msg); + + } + + private File getTestFile() { + return new File("src/integration-test/resources/ResizablePng.zip"); + } + } diff --git a/src/integration-test/java/com/researchspace/dataverse/http/DataverseOperationsTest.java b/src/integration-test/java/com/researchspace/dataverse/http/DataverseOperationsTest.java index 64e7e52..69e2c59 100644 --- a/src/integration-test/java/com/researchspace/dataverse/http/DataverseOperationsTest.java +++ b/src/integration-test/java/com/researchspace/dataverse/http/DataverseOperationsTest.java @@ -1,18 +1,3 @@ -/* - * - */ -package com.researchspace.dataverse.http; - -import com.researchspace.dataverse.entities.*; -import org.apache.commons.lang.RandomStringUtils; -import org.junit.After; -import org.junit.Assert; -import org.junit.Before; -import org.junit.Test; - -import java.util.Arrays; - -import static org.junit.Assert.*; /**
 Copyright 2016 ResearchSpace
 
@@ -28,69 +13,126 @@
  See the License for the specific language governing permissions and
  limitations under the License.
 
-*/ + */ +package com.researchspace.dataverse.http; + +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertFalse; +import static org.junit.Assert.assertNotNull; +import static org.junit.Assert.assertTrue; + +import java.util.Arrays; + +import org.apache.commons.lang.RandomStringUtils; +import org.junit.After; +import org.junit.Before; +import org.junit.Test; + +import com.researchspace.dataverse.entities.DataverseContacts; +import com.researchspace.dataverse.entities.DataverseGet; +import com.researchspace.dataverse.entities.DataversePost; +import com.researchspace.dataverse.entities.DataverseResponse; +import com.researchspace.dataverse.entities.DvMessage; +import com.researchspace.springrest.ext.RestClientException; + +/** + * Dataverse operations tests. + */ public class DataverseOperationsTest extends AbstractIntegrationTest { - - - @Before - public void setup() throws Exception { - super.setUp(); - } - - @After - public void tearDown() throws Exception { - } - - @Test - public void createPublishAndDeleteNewDataverse(){ - String dvName = RandomStringUtils.randomAlphabetic(10); - DataversePost dv = createADataverse(dvName); - DataverseResponse success = dataverseOps.createNewDataverse(dataverseAlias, dv); - assertNotNull(success.getData()); - assertNotNull(success.getData().getId()); - - dataverseOps.publishDataverse(dvName); - - DataverseResponse deleted = dataverseOps.deleteDataverse(dvName); - assertTrue(deleted.getStatus().equals("OK")); - assertNotNull(deleted.getData()); - } - - static DataversePost createADataverse(String dvName) { - DataversePost dv = new DataversePost(); - dv.setAlias(dvName); - dv.setName("Test Instance " + dvName); - dv.setDataverseContacts(Arrays.asList(new DataverseContacts("a@b.com"))); - return dv; - } - - @Test - public void deleteUnknownDataverseHandled () { - DataverseResponse deleted = dataverseOps.deleteDataverse("ra"); - assertTrue(deleted.getStatus().equals(ERROR_MSG)); - assertNull(deleted.getData()); - } - - @Test(expected=IllegalArgumentException.class) - public void createDataverseValidation () { - String dvName = RandomStringUtils.randomAlphabetic(10); - DataversePost dv = createADataverse(dvName); - dv.setAlias(""); - dataverseOps.createNewDataverse("rspace", dv); - } - @Test - public void createDataverseValidationContactRequired () { - String dvName = RandomStringUtils.randomAlphabetic(10); - DataversePost dv = createADataverse(dvName); - dv.setDataverseContacts(null); - Assert.assertThrows(NullPointerException.class, ()->dataverseOps.createNewDataverse("rspace", dv)); - } - - @Test - public void testGetDataverseById() { - DataverseGet dv = dataverseOps.getDataverseById(dataverseAlias); - assertNotNull(dv.getId()); - assertTrue(dv.getContactEmails().size() > 0); - } + /** + * Not permitted error. + */ + private static final String UNAUTHORIZED = "is not permitted to perform requested action"; + + /** + * Not permitted error code. + */ + private static final Integer UNAUTHORIZED_CODE = 401; + + /** + * Not published error. + */ + private static final String NOT_PUBLISHED = "may not be published because its host dataverse"; + + /** + * Not published part 2 error. + */ + private static final String NOT_PUBLISHED_2 = "has not been published"; + + @Before + public void setup() throws Exception { + super.setUp(); + } + + @After + public void tearDown() throws Exception { + } + + @Test + public void createPublishAndDeleteNewDataverse(){ + final String dvName = RandomStringUtils.randomAlphabetic(10); + final DataversePost dv = createADataverse(dvName); + final DataverseResponse success = dataverseOps.createNewDataverse(dataverseAlias, dv); + assertNotNull(success.getData()); + assertNotNull(success.getData().getId()); + try { + + dataverseOps.publishDataverse(dvName); + } catch (final RestClientException e) { + assertTrue("[" + e.getLocalizedMessage() + "] should contain [" + + NOT_PUBLISHED + "] & [" + NOT_PUBLISHED_2 + "]", + e.getLocalizedMessage().contains(NOT_PUBLISHED) + && e.getLocalizedMessage().contains(NOT_PUBLISHED_2)); + } + + final DataverseResponse deleted = dataverseOps.deleteDataverse(dvName); + assertTrue(deleted.getStatus().equals("OK")); + assertNotNull(deleted.getData()); + } + + static DataversePost createADataverse(final String dvName) { + final DataversePost dv = new DataversePost(); + dv.setAlias(dvName); + dv.setName("Test Instance " + dvName); + dv.setDataverseContacts(Arrays.asList(new DataverseContacts("a@b.com"))); + return dv; + } + + @Test + public void deleteUnknownDataverseHandled() { + RestClientException exception = null; + try { + dataverseOps.deleteDataverse("ra"); + } catch (final RestClientException e) { + exception = e; + assertEquals(UNAUTHORIZED_CODE, e.getCode()); + assertTrue("[" + e.getLocalizedMessage() + "] should contain [" + UNAUTHORIZED + "]", + e.getLocalizedMessage().contains(UNAUTHORIZED)); + } + assertNotNull(exception); + } + + @Test(expected=IllegalArgumentException.class) + public void createDataverseValidation() { + final String dvName = RandomStringUtils.randomAlphabetic(10); + final DataversePost dv = createADataverse(dvName); + dv.setAlias(""); + dataverseOps.createNewDataverse("rspace", dv); + } + + @Test(expected=NullPointerException.class) + public void createDataverseValidationContactRequired() { + final String dvName = RandomStringUtils.randomAlphabetic(10); + final DataversePost dv = createADataverse(dvName); + dv.setDataverseContacts(null); + dataverseOps.createNewDataverse("rspace", dv); + } + + @Test + public void testGetDataverseById() { + final DataverseGet dv = dataverseOps.getDataverseById(dataverseAlias); + assertNotNull(dv.getId()); + assertFalse(dv.getContactEmails().isEmpty()); + } } diff --git a/src/integration-test/java/com/researchspace/dataverse/http/InfoOperationsTest.java b/src/integration-test/java/com/researchspace/dataverse/http/InfoOperationsTest.java index da19d5f..082f742 100644 --- a/src/integration-test/java/com/researchspace/dataverse/http/InfoOperationsTest.java +++ b/src/integration-test/java/com/researchspace/dataverse/http/InfoOperationsTest.java @@ -1,18 +1,3 @@ -/* - * - */ -package com.researchspace.dataverse.http; - -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertTrue; - -import org.apache.commons.lang.StringUtils; -import org.junit.After; -import org.junit.Before; -import org.junit.Test; - -import com.researchspace.dataverse.entities.DataverseResponse; -import com.researchspace.dataverse.entities.DvMessage; /**
 Copyright 2016 ResearchSpace
 
@@ -28,26 +13,41 @@
  See the License for the specific language governing permissions and
  limitations under the License.
 
-*/ + */ +package com.researchspace.dataverse.http; + +import static org.junit.Assert.assertTrue; + +import org.apache.commons.lang.StringUtils; +import org.junit.After; +import org.junit.Before; +import org.junit.Test; + +import com.researchspace.dataverse.entities.DvMessage; + +/** + * Info operations tests. + */ public class InfoOperationsTest extends AbstractIntegrationTest { - private static final String PUBLISH_TEXT = "By default datasets are published with the"; + private static final String PUBLISH_TEXT = "By default datasets are published with the"; + + @Override + @Before + public void setUp() throws Exception { + super.setUp(); + } - @Before - public void setUp() throws Exception { - super.setUp(); - } + @After + public void tearDown() throws Exception { + } - @After - public void tearDown() throws Exception { - } + @Test + public void testGetDatasetPublishPopupCustomText() { + final DvMessage originalText = infoOps.getDatasetPublishPopupCustomText(); + assertTrue("Message text was empty", !StringUtils.isEmpty(originalText.getMessage())); + assertTrue(originalText.getMessage().contains(PUBLISH_TEXT)); - @Test - public void testGetDatasetPublishPopupCustomText() { - DvMessage originalText = infoOps.getDatasetPublishPopupCustomText(); - assertTrue("Message text was empty", !StringUtils.isEmpty(originalText.getMessage())); - assertTrue(originalText.getMessage().contains(PUBLISH_TEXT)); - - } + } } diff --git a/src/integration-test/java/com/researchspace/dataverse/http/MetadataOperationsTest.java b/src/integration-test/java/com/researchspace/dataverse/http/MetadataOperationsTest.java index 4511018..e6d488b 100644 --- a/src/integration-test/java/com/researchspace/dataverse/http/MetadataOperationsTest.java +++ b/src/integration-test/java/com/researchspace/dataverse/http/MetadataOperationsTest.java @@ -1,18 +1,3 @@ -/* - * - */ -package com.researchspace.dataverse.http; - -import com.researchspace.dataverse.entities.DVField; -import com.researchspace.dataverse.entities.MetadataBlock; -import lombok.extern.slf4j.Slf4j; -import org.junit.Before; -import org.junit.Test; -import org.springframework.web.client.RestClientException; - -import java.io.IOException; - -import static org.junit.Assert.assertTrue; /**
 Copyright 2016 ResearchSpace
 
@@ -28,33 +13,52 @@
  See the License for the specific language governing permissions and
  limitations under the License.
 
-*/ -@Slf4j + */ +package com.researchspace.dataverse.http; + +import static org.junit.Assert.assertNotNull; +import static org.junit.Assert.assertTrue; + +import java.io.IOException; + +import org.junit.Before; +import org.junit.Test; + +import com.researchspace.dataverse.entities.DVField; +import com.researchspace.dataverse.entities.MetadataBlock; +import com.researchspace.springrest.ext.RestClientException; + + +/** + * Metadata operations tests. + */ public class MetadataOperationsTest extends AbstractIntegrationTest { - private static final int MIN_BLOCK_SIZE = 6; - - @Before - public void setup() throws Exception { - super.setUp(); - } - - @Test - public void testGetMetdataBlockInfo() throws IOException { - // metadat block size is variable - assertTrue( metadataOPs.getMetadataBlockInfo().size() >= MIN_BLOCK_SIZE); - } - - @Test(expected = RestClientException.class) - public void testGetMetdataByIdBlockInfoWithInvalidId() throws IOException { - metadataOPs.getMetadataById("abcde"); - } - - @Test - public void testGetMetdataByIdBlockInfo() throws IOException { - MetadataBlock block = metadataOPs.getMetadataById("biomedical"); - for (DVField fld : block.getFields().values()) { - // log.info(fld.toString()); - } - } + private static final int MIN_BLOCK_SIZE = 6; + + @Override + @Before + public void setUp() throws Exception { + super.setUp(); + } + + @Test + public void testGetMetdataBlockInfo() { + // metadat block size is variable + assertTrue(metadataOPs.getMetadataBlockInfo().size() >= MIN_BLOCK_SIZE); + } + + @Test(expected = RestClientException.class) + public void testGetMetdataByIdBlockInfoWithInvalidId() { + metadataOPs.getMetadataById("abcde"); + } + + @Test + public void testGetMetdataByIdBlockInfo() throws IOException { + final MetadataBlock block = metadataOPs.getMetadataById("biomedical"); + for (final DVField fld : block.getFields().values()) { + assertNotNull(fld); + // log.info(fld.toString()); + } + } } diff --git a/src/integration-test/java/com/researchspace/dataverse/http/SearchOperationsIntegrationTest.java b/src/integration-test/java/com/researchspace/dataverse/http/SearchOperationsIntegrationTest.java index 12d14ff..67be798 100644 --- a/src/integration-test/java/com/researchspace/dataverse/http/SearchOperationsIntegrationTest.java +++ b/src/integration-test/java/com/researchspace/dataverse/http/SearchOperationsIntegrationTest.java @@ -1,5 +1,18 @@ -/* - * +/**
+Copyright 2016 ResearchSpace
+
+ Licensed under the Apache License, Version 2.0 (the "License");
+ you may not use this file except in compliance with the License.
+ You may obtain a copy of the License at
+
+     http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
+
*/ package com.researchspace.dataverse.http; @@ -15,60 +28,44 @@ import com.researchspace.dataverse.search.entities.SearchResults; import com.researchspace.dataverse.search.entities.SortBy; import com.researchspace.dataverse.search.entities.SortOrder; -/** - * /**
-Copyright 2016 ResearchSpace
-
- Licensed under the Apache License, Version 2.0 (the "License");
- you may not use this file except in compliance with the License.
- You may obtain a copy of the License at
-
-     http://www.apache.org/licenses/LICENSE-2.0
-
- Unless required by applicable law or agreed to in writing, software
- distributed under the License is distributed on an "AS IS" BASIS,
- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- See the License for the specific language governing permissions and
- limitations under the License.
-
+/** * The results of these tests will vary depnding on the Dataverse instance used for testing. * @author rspace - * */ public class SearchOperationsIntegrationTest extends AbstractIntegrationTest { - private static final String FILE_SEARCH_TERM = "documentSchema.xsd"; - private static final String NEW_TEXT = "Do you want to publish"; + private static final String FILE_SEARCH_TERM = "documentSchema.xsd"; + + @Override + @Before + public void setUp() throws Exception { + super.setUp(); + } - @Before - public void setUp() throws Exception { - super.setUp(); - } + @After + public void tearDown() throws Exception { + } - @After - public void tearDown() throws Exception { - } + @Test + public void testBasicSearchByTermOnly() { + final SearchConfig cfg = searchOps.builder().q(FILE_SEARCH_TERM).build(); + final DataverseResponse> results = searchOps.search(cfg); + assertNotNull(results.getMessage(), results.getData()); + } - @Test - public void testBasicSearchByTermOnly() { - SearchConfig cfg = searchOps.builder().q(FILE_SEARCH_TERM).build(); - DataverseResponse> results = searchOps.search(cfg); - assertNotNull(results.getMessage(), results.getData()); - } - - @Test - public void testComplexSearch() { - SearchConfig cfg = searchOps.builder().q(FILE_SEARCH_TERM) - .sortBy(SortBy.date) - .sortOrder(SortOrder.asc) - .showFacets(true) - .showRelevance(true) - .start(1) - .perPage(3) - .build(); - DataverseResponse> results = searchOps.search(cfg); - assertNotNull(results.getMessage(), results.getData()); - } + @Test + public void testComplexSearch() { + final SearchConfig cfg = searchOps.builder().q(FILE_SEARCH_TERM) + .sortBy(SortBy.date) + .sortOrder(SortOrder.asc) + .showFacets(true) + .showRelevance(true) + .start(1) + .perPage(3) + .build(); + final DataverseResponse> results = searchOps.search(cfg); + assertNotNull(results.getMessage(), results.getData()); + } } diff --git a/src/integration-test/resources/dataset-builder-test.json b/src/integration-test/resources/dataset-builder-test.json new file mode 100644 index 0000000..7d8559e --- /dev/null +++ b/src/integration-test/resources/dataset-builder-test.json @@ -0,0 +1,368 @@ +{ + "datasetVersion" : { + "id" : null, + "versionState" : null, + "productionDate" : null, + "termsOfUse" : null, + "lastUpdateTime" : null, + "createTime" : null, + "metadataBlocks" : { + "geospatial" : { + "displayName" : "Geospatial Metadata", + "fields" : [ ] + }, + "citation" : { + "displayName" : "Citation Metadata", + "fields" : [ { + "typeName" : "title", + "typeClass" : "primitive", + "multiple" : false, + "value" : "title1" + }, { + "typeName" : "author", + "typeClass" : "compound", + "multiple" : true, + "value" : [ { + "authorName" : { + "typeName" : "authorName", + "typeClass" : "primitive", + "multiple" : false, + "value" : "Fred Blogs" + }, + "authorIdentifier" : { + "typeName" : "authorIdentifier", + "typeClass" : "primitive", + "multiple" : false, + "value" : "1234-5678" + }, + "authorIdentifierScheme" : { + "typeName" : "authorIdentifierScheme", + "typeClass" : "controlledVocabulary", + "multiple" : false, + "value" : "ORCID" + }, + "authorAffiliation" : { + "typeName" : "authorAffiliation", + "typeClass" : "primitive", + "multiple" : false, + "value" : "RSpace" + } + }, { + "authorName" : { + "typeName" : "authorName", + "typeClass" : "primitive", + "multiple" : false, + "value" : "John Smith" + }, + "authorIdentifier" : { + "typeName" : "authorIdentifier", + "typeClass" : "primitive", + "multiple" : false, + "value" : "1234-5678" + }, + "authorIdentifierScheme" : { + "typeName" : "authorIdentifierScheme", + "typeClass" : "controlledVocabulary", + "multiple" : false, + "value" : "ISNI" + }, + "authorAffiliation" : { + "typeName" : "authorAffiliation", + "typeClass" : "primitive", + "multiple" : false, + "value" : "Dataverse" + } + } ] + }, { + "typeName" : "dsDescription", + "typeClass" : "compound", + "multiple" : true, + "value" : [ { + "dsDescriptionDate" : { + "typeName" : "dsDescriptionDate", + "typeClass" : "primitive", + "multiple" : false, + "value" : "1970-01-01" + }, + "dsDescriptionValue" : { + "typeName" : "dsDescriptionValue", + "typeClass" : "primitive", + "multiple" : false, + "value" : "some desc" + } + } ] + }, { + "typeName" : "keyword", + "typeClass" : "compound", + "multiple" : true, + "value" : [ { + "keywordVocabulary" : { + "typeName" : "keywordVocabulary", + "typeClass" : "primitive", + "multiple" : false, + "value" : "keywordVocab" + }, + "keywordVocabularyURI" : { + "typeName" : "keywordVocabularyURI", + "typeClass" : "primitive", + "multiple" : false, + "value" : "http://vocab.com" + }, + "keywordValue" : { + "typeName" : "keywordValue", + "typeClass" : "primitive", + "multiple" : false, + "value" : "key1" + } + }, { + "keywordVocabulary" : { + "typeName" : "keywordVocabulary", + "typeClass" : "primitive", + "multiple" : false, + "value" : "keywordVocab" + }, + "keywordVocabularyURI" : { + "typeName" : "keywordVocabularyURI", + "typeClass" : "primitive", + "multiple" : false, + "value" : "http://vocab.com" + }, + "keywordValue" : { + "typeName" : "keywordValue", + "typeClass" : "primitive", + "multiple" : false, + "value" : "key2" + } + } ] + }, { + "typeName" : "topicClassification", + "typeClass" : "compound", + "multiple" : true, + "value" : [ { + "topicClassVocab" : { + "typeName" : "topicClassVocab", + "typeClass" : "primitive", + "multiple" : false, + "value" : "a topic vocab" + }, + "topicClassVocabURI" : { + "typeName" : "topicClassVocabURI", + "typeClass" : "primitive", + "multiple" : false, + "value" : "http://www.vocab.org" + }, + "topicClassValue" : { + "typeName" : "topicClassValue", + "typeClass" : "primitive", + "multiple" : false, + "value" : "topic1" + } + } ] + }, { + "typeName" : "publication", + "typeClass" : "compound", + "multiple" : true, + "value" : [ { + "publicationIDType" : { + "typeName" : "publicationIDType", + "typeClass" : "controlledVocabulary", + "multiple" : false, + "value" : "ean13" + }, + "publicationIDNumber" : { + "typeName" : "publicationIDNumber", + "typeClass" : "primitive", + "multiple" : false, + "value" : "12435" + }, + "publicationCitation" : { + "typeName" : "publicationCitation", + "typeClass" : "primitive", + "multiple" : false, + "value" : "citation" + }, + "publicationURL" : { + "typeName" : "publicationURL", + "typeClass" : "primitive", + "multiple" : false, + "value" : "http://pubmed.com/1234" + } + } ] + }, { + "typeName" : "language", + "typeClass" : "controlledVocabulary", + "multiple" : true, + "value" : [ "English" ] + }, { + "typeName" : "notesText", + "typeClass" : "primitive", + "multiple" : false, + "value" : "Some note" + }, { + "typeName" : "producer", + "typeClass" : "compound", + "multiple" : true, + "value" : [ { + "producerAffiliation" : { + "typeName" : "producerAffiliation", + "typeClass" : "primitive", + "multiple" : false, + "value" : "UoE" + }, + "producerAbbreviation" : { + "typeName" : "producerAbbreviation", + "typeClass" : "primitive", + "multiple" : false, + "value" : "abbr" + }, + "producerName" : { + "typeName" : "producerName", + "typeClass" : "primitive", + "multiple" : false, + "value" : "a producer" + }, + "producerLogoURL" : { + "typeName" : "producerLogoURL", + "typeClass" : "primitive", + "multiple" : false, + "value" : "http://pubmed.logo.com/1234" + }, + "producerURL" : { + "typeName" : "producerURL", + "typeClass" : "primitive", + "multiple" : false, + "value" : "http://pubmed.com/1234" + } + } ] + }, { + "typeName" : "productionDate", + "typeClass" : "primitive", + "multiple" : false, + "value" : "1970-01-01" + }, { + "typeName" : "productionPlace", + "typeClass" : "primitive", + "multiple" : true, + "value" : [ "Edinburgh, UK" ] + }, { + "typeName" : "contributor", + "typeClass" : "compound", + "multiple" : true, + "value" : [ { + "contributorType" : { + "typeName" : "contributorType", + "typeClass" : "controlledVocabulary", + "multiple" : false, + "value" : "Project Leader" + }, + "contributorName" : { + "typeName" : "contributorName", + "typeClass" : "primitive", + "multiple" : false, + "value" : "Fred" + } + }, { + "contributorType" : { + "typeName" : "contributorType", + "typeClass" : "controlledVocabulary", + "multiple" : false, + "value" : "Project Leader" + }, + "contributorName" : { + "typeName" : "contributorName", + "typeClass" : "primitive", + "multiple" : false, + "value" : "Tim" + } + } ] + }, { + "typeName" : "subject", + "typeClass" : "controlledVocabulary", + "multiple" : true, + "value" : [ "Chemistry" ] + }, { + "typeName" : "datasetContact", + "typeClass" : "compound", + "multiple" : true, + "value" : [ { + "datasetContactEmail" : { + "typeName" : "datasetContactEmail", + "typeClass" : "primitive", + "multiple" : false, + "value" : "contact@email.com" + }, + "datasetContactAffiliation" : { + "typeName" : "datasetContactAffiliation", + "typeClass" : "primitive", + "multiple" : false, + "value" : "Some place" + }, + "datasetContactName" : { + "typeName" : "datasetContactName", + "typeClass" : "primitive", + "multiple" : false, + "value" : "Sarah Contact" + } + }, { + "datasetContactEmail" : { + "typeName" : "datasetContactEmail", + "typeClass" : "primitive", + "multiple" : false, + "value" : "contact2@email.com" + }, + "datasetContactAffiliation" : { + "typeName" : "datasetContactAffiliation", + "typeClass" : "primitive", + "multiple" : false, + "value" : "Another place" + }, + "datasetContactName" : { + "typeName" : "datasetContactName", + "typeClass" : "primitive", + "multiple" : false, + "value" : "Brian Contact2" + } + } ] + }, { + "typeName" : "depositor", + "typeClass" : "primitive", + "multiple" : false, + "value" : "A depositor" + }, { + "typeName" : "subtitle", + "typeClass" : "primitive", + "multiple" : false, + "value" : " A subtitle" + }, { + "typeName" : "alternativeTitle", + "typeClass" : "primitive", + "multiple" : false, + "value" : "altTitle" + }, { + "typeName" : "alternativeURL", + "typeClass" : "primitive", + "multiple" : false, + "value" : "http://www.myrepo.com" + }, { + "typeName" : "kindOfData", + "typeClass" : "primitive", + "multiple" : true, + "value" : [ "Dataset", "Collection" ] + }, { + "typeName" : "timePeriodCovered", + "typeClass" : "compound", + "multiple" : true, + "value" : [ ] + } ] + } + }, + "versionNumber" : 0, + "versionMinorNumber" : 0 + }, + "latestVersion" : null, + "id" : null, + "identifier" : null, + "protocol" : null, + "authority" : null, + "persistentUrl" : null +} \ No newline at end of file diff --git a/src/integration-test/resources/dataset-create-new-all-default-fields.json b/src/integration-test/resources/dataset-create-new-all-default-fields.json index 0bc7fb6..b9a6b70 100644 --- a/src/integration-test/resources/dataset-create-new-all-default-fields.json +++ b/src/integration-test/resources/dataset-create-new-all-default-fields.json @@ -464,9 +464,9 @@ }, { "typeName": "productionPlace", - "multiple": false, + "multiple": true, "typeClass": "primitive", - "value": "ProductionPlace" + "value": ["ProductionPlace"] }, { "typeName": "contributor", @@ -702,8 +702,8 @@ "multiple": true, "typeClass": "primitive", "value": [ - "KindOfData1", - "KindOfData2" + "Dataset", + "Collection" ] }, { @@ -909,13 +909,13 @@ "typeName": "northLongitude", "multiple": false, "typeClass": "primitive", - "value": "30" + "value": "40" }, "southLongitude": { "typeName": "southLongitude", "multiple": false, "typeClass": "primitive", - "value": "40" + "value": "30" } }, { @@ -935,13 +935,13 @@ "typeName": "northLongitude", "multiple": false, "typeClass": "primitive", - "value": "70" + "value": "80" }, "southLongitude": { "typeName": "southLongitude", "multiple": false, "typeClass": "primitive", - "value": "80" + "value": "70" } } ] @@ -1026,9 +1026,9 @@ }, { "typeName": "collectionMode", - "multiple": false, + "multiple": true, "typeClass": "primitive", - "value": "CollectionMode" + "value": ["CollectionMode"] }, { "typeName": "researchInstrument", diff --git a/src/main/java/com/researchspace/dataverse/api/v1/DatasetOperations.java b/src/main/java/com/researchspace/dataverse/api/v1/DatasetOperations.java index 48dabee..3c1fb81 100644 --- a/src/main/java/com/researchspace/dataverse/api/v1/DatasetOperations.java +++ b/src/main/java/com/researchspace/dataverse/api/v1/DatasetOperations.java @@ -1,15 +1,3 @@ -/* - * - */ -package com.researchspace.dataverse.api.v1; - -import com.researchspace.dataverse.entities.*; -import com.researchspace.dataverse.entities.facade.DatasetFacade; -import com.researchspace.dataverse.http.FileUploadMetadata; - -import java.io.File; -import java.io.InputStream; -import java.util.List; /**
   Copyright 2016 ResearchSpace
@@ -26,86 +14,135 @@
    See the License for the specific language governing permissions and
    limitations under the License.
  
- - * @author richard - * + */ +package com.researchspace.dataverse.api.v1; + +import java.io.File; +import java.io.InputStream; +import java.net.URISyntaxException; +import java.util.List; + +import com.researchspace.dataverse.entities.Dataset; +import com.researchspace.dataverse.entities.DatasetFileList; +import com.researchspace.dataverse.entities.DatasetVersion; +import com.researchspace.dataverse.entities.DataverseResponse; +import com.researchspace.dataverse.entities.DvMessage; +import com.researchspace.dataverse.entities.Identifier; +import com.researchspace.dataverse.entities.PublishedDataset; +import com.researchspace.dataverse.entities.Version; +import com.researchspace.dataverse.entities.facade.DatasetFacade; +import com.researchspace.dataverse.http.FileUploadMetadata; +import com.researchspace.springrest.ext.RestClientException; +import com.researchspace.springrest.ext.SWORDException; + +/** * Operations on Datasets - * - * + * @author richard */ public interface DatasetOperations { - /** - * @param facade The {@link DatasetFacade} containing the updated DataSet data. - * @param id The {@link DataSetMetadataBlock} identifier - * @return A {@link DatasetVersion} - */ - DatasetVersion updateDataset(DatasetFacade facade, Identifier id); - - /** - * Retrieves a {@link Dataset} based on its Id. - * @param dsIdentifier - * @return - */ - Dataset getDataset(Identifier dsIdentifier); - - /** - * Gets list of metadata about different Dataset versions. - * @param dsIdentifier - * @return - */ - List getDatasetVersions(Identifier dsIdentifier); - - /** - * Upload a file to a dataset using Dataverse's native API (not Sword) - * @param metadata Metadata to attach to the file upload - * @param dsIdentifier The persistent identifier of the dataset - * @param data bytes of data to upload - * @param fileName The name of the file to be created on Dataverse - * @return DatasetFileList information about the uploaded file. - */ + /** + * @param facade The {@link DatasetFacade} containing the updated DataSet data. + * @param id The {@link DataSetMetadataBlock} identifier + * @return A {@link DatasetVersion} + */ + DatasetVersion updateDataset(DatasetFacade facade, Identifier id) throws RestClientException; + + /** + * @param dataset The {@link Dataset} containing the updated DataSet data. + * @param id The {@link DataSetMetadataBlock} identifier + * @return A {@link DatasetVersion} + * @throws RestClientException + */ + DatasetVersion updateDataset(Dataset dataset, Identifier id) throws RestClientException; + + + /** + * Retrieves a {@link Dataset} based on its Id. + * @param dsIdentifier + * @return a {@link Dataset} from dataverse + * @throws RestClientException + */ + Dataset getDataset(Identifier dsIdentifier) throws RestClientException; + + /** + * Gets list of metadata about different {@link Dataset} versions. + * @param dsIdentifier + * @return a list of {@link DatasetVersion} + * @throws RestClientException + */ + List getDatasetVersions(Identifier dsIdentifier) throws RestClientException; + + /** + * Uploads a file to a dataset + * @param doi The DOI of the Dataset + * @param file The file to add to the DataSet + * @throws SWORDException + */ + void uploadFile(String doi, File file) throws SWORDException; + + /** + * List files ids present in a dataset. + * @param doi + * @return a List of files urls + * @throws SWORDException + */ + List listFilesUrls(String doi) throws SWORDException; + + /** + * Deletes a {@link Dataset}. + * @param dsIdentifier + * @return message from dataverse + * @throws RestClientException + */ + DvMessage deleteDataset(Identifier dsIdentifier) throws RestClientException; + + /** + * Publishes a DataSet, if the parent dataverse is published. + * @param dsIdentifier + * @param version Major/Minor + * @return A {@link DataverseResponse} with an error message if Dataset could not be published. + */ + DataverseResponse publishDataset(Identifier dsIdentifier, Version version); + + /** + * Delete a file with it's name and dataset {@link Identifier}. + * @param fileName filename in the dataverse + * @param dsIdentifier dataset identifier + * @throws SWORDException + * @throws URISyntaxException + */ + void deleteFile(final String fileName, final Identifier dsIdentifier) throws URISyntaxException, SWORDException; + + /** + * Upload a file to a dataset using Dataverse's native API (not Sword) + * @param metadata Metadata to attach to the file upload + * @param dsIdentifier The persistent identifier of the dataset + * @param data bytes of data to upload + * @param fileName The name of the file to be created on Dataverse + * @return DatasetFileList information about the uploaded file. + */ DatasetFileList uploadNativeFile( byte[] data, FileUploadMetadata metadata, Identifier dsIdentifier, String fileName); - /** - * Upload a file to a dataset using Dataverse's native API (not Sword). - * @param metadata Metadata to attach to the file upload - * @param contentLength The length of the stream - * @param dsIdentifier The persistent identifier of the dataset - * @param data bytes of data to upload - * @param fileName The name of the file to be created on Dataverse - * @return DatasetFileList information about the uploaded file. - */ - DatasetFileList uploadNativeFile(InputStream data, long contentLength, FileUploadMetadata metadata, - Identifier dsIdentifier, String fileName); + /** + * Upload a file to a dataset using Dataverse's native API (not Sword). + * @param metadata Metadata to attach to the file upload + * @param contentLength The length of the stream + * @param dsIdentifier The persistent identifier of the dataset + * @param data bytes of data to upload + * @param fileName The name of the file to be created on Dataverse + * @return DatasetFileList information about the uploaded file. + */ + DatasetFileList uploadNativeFile(InputStream data, long contentLength, FileUploadMetadata metadata, + Identifier dsIdentifier, String fileName); /** - * Uploads a file to a dataset - * @param doi The DOI of the Dataset - * @param file The file to add to the DataSet - */ - void uploadFile(String doi, File file); - - /** - * Uploads a file using a data stream. - * - * @param doi Identifier of the dataset that we are sending the data to. - * @param inputStream Stream of data to upload as a file in Dataverse. - * @param filename Contents of the field "name" that will appear as in Dataverse. - */ - void uploadFile(String doi, InputStream inputStream, String filename); - - /** - * Deletes a {@link Dataset} - * @param dsIdentifier - * @return - */ - DvMessage deleteDataset(Identifier dsIdentifier); - - /** - * Publishes a DataSet, if the parent dataverse is published. - * @param dsIdentifier - * @param version Major/Minor - * @return A {@link DataverseResponse} with an error message if Dataset could not be published. - */ - DataverseResponse publishDataset(Identifier dsIdentifier, Version version); + * Uploads a file using a data stream. + * + * @param doi Identifier of the dataset that we are sending the data to. + * @param inputStream Stream of data to upload as a file in Dataverse. + * @param filename Contents of the field "name" that will appear as in Dataverse. + */ + void uploadFile(String doi, InputStream inputStream, String filename); + } diff --git a/src/main/java/com/researchspace/dataverse/api/v1/DataverseAPI.java b/src/main/java/com/researchspace/dataverse/api/v1/DataverseAPI.java index 7efbe18..ed8e6d0 100644 --- a/src/main/java/com/researchspace/dataverse/api/v1/DataverseAPI.java +++ b/src/main/java/com/researchspace/dataverse/api/v1/DataverseAPI.java @@ -1,10 +1,5 @@ -/* - * - */ -package com.researchspace.dataverse.api.v1; - /** - *
+
   Copyright 2016 ResearchSpace
 
    Licensed under the Apache License, Version 2.0 (the "License");
@@ -19,43 +14,57 @@
    See the License for the specific language governing permissions and
    limitations under the License.
  
- Top-level entry point into the Dataverse Level1 API + */ +package com.researchspace.dataverse.api.v1; + +/** + * Top-level entry point into the Dataverse Level1 API * @author rspace * */ public interface DataverseAPI { - /** - * Gets class for performing DatasetOperations - * @return - */ - DatasetOperations getDatasetOperations (); - - /** - * Gets class for performing Metadata Operations - * @return - */ - MetadataOperations getMetadataOperations (); - /** - * Gets class for performing Dataverse Operations - * @return - */ - DataverseOperations getDataverseOperations (); - - /** - * Configures the connection settings. - * @param config - */ - void configure (DataverseConfig config); - - InfoOperations getInfoOperations(); - - /** - * Accesses the Search API - * @return - */ - SearchOperations getSearchOperations(); + * Gets class for performing DatasetOperations + * @return DatasetOperations + */ + DatasetOperations getDatasetOperations(); + + /** + * Gets class for performing Metadata Operations + * @return MetadataOperations + */ + MetadataOperations getMetadataOperations(); + + /** + * Gets class for performing Dataverse Operations + * @return DataverseOperations + */ + DataverseOperations getDataverseOperations(); + + /** + * Configures the connection settings. + * @param config + */ + void configure(DataverseConfig config); + + /** + * Get class for performing Info Operations. + * @return InfoOperations + */ + InfoOperations getInfoOperations(); + + /** + * Get class for performing Search Operations. + * @return SearchOperations + */ + SearchOperations getSearchOperations(); + + /** + * Get class for performing Users Operations. + * @return UsersOperations + */ + UsersOperations getUsersOperations(); } \ No newline at end of file diff --git a/src/main/java/com/researchspace/dataverse/api/v1/DataverseConfig.java b/src/main/java/com/researchspace/dataverse/api/v1/DataverseConfig.java index 357116e..8bb44ff 100644 --- a/src/main/java/com/researchspace/dataverse/api/v1/DataverseConfig.java +++ b/src/main/java/com/researchspace/dataverse/api/v1/DataverseConfig.java @@ -1,16 +1,5 @@ -/* - * - */ -package com.researchspace.dataverse.api.v1; - -import java.net.URL; - -import lombok.AllArgsConstructor; -import lombok.EqualsAndHashCode; -import lombok.Getter; -import lombok.ToString; /** -
+
   Copyright 2016 ResearchSpace
 
    Licensed under the Apache License, Version 2.0 (the "License");
@@ -25,22 +14,34 @@
    See the License for the specific language governing permissions and
    limitations under the License.
  
+ */ +package com.researchspace.dataverse.api.v1; + +import java.net.URL; + +import lombok.AllArgsConstructor; +import lombok.EqualsAndHashCode; +import lombok.Getter; +import lombok.ToString; +/** * Configures the server, apikey and root dataverse alias * @author rspace - * */ @EqualsAndHashCode(of={"serverURL"}) @AllArgsConstructor @ToString() public class DataverseConfig { - private @Getter URL serverURL; - - /** - * The API Key provided from Dataverse account - * @return the apiKey - */ - private @Getter String apiKey; - private @Getter String repositoryName; + private @Getter + final URL serverURL; + + /** + * The API Key provided from Dataverse account + * @return the apiKey + */ + private @Getter + final String apiKey; + private @Getter + final String repositoryName; } diff --git a/src/main/java/com/researchspace/dataverse/api/v1/DataverseOperations.java b/src/main/java/com/researchspace/dataverse/api/v1/DataverseOperations.java index 6c4a31d..ea5ee5c 100644 --- a/src/main/java/com/researchspace/dataverse/api/v1/DataverseOperations.java +++ b/src/main/java/com/researchspace/dataverse/api/v1/DataverseOperations.java @@ -1,20 +1,5 @@ -/* - * - */ -package com.researchspace.dataverse.api.v1; - -import java.io.IOException; -import java.util.List; - -import com.researchspace.dataverse.entities.DataversePost; -import com.researchspace.dataverse.entities.DataverseGet; -import com.researchspace.dataverse.entities.DataverseObject; -import com.researchspace.dataverse.entities.DataverseResponse; -import com.researchspace.dataverse.entities.DvMessage; -import com.researchspace.dataverse.entities.Identifier; -import com.researchspace.dataverse.entities.facade.DatasetFacade; -/** - *
+/**
+
   Copyright 2016 ResearchSpace
 
    Licensed under the Apache License, Version 2.0 (the "License");
@@ -29,62 +14,95 @@
    See the License for the specific language governing permissions and
    limitations under the License.
  
+ */ +package com.researchspace.dataverse.api.v1; -*/ +import java.io.IOException; +import java.util.List; + +import com.researchspace.dataverse.entities.DataverseGet; +import com.researchspace.dataverse.entities.DataverseObject; +import com.researchspace.dataverse.entities.DataversePost; +import com.researchspace.dataverse.entities.DataverseResponse; +import com.researchspace.dataverse.entities.DvMessage; +import com.researchspace.dataverse.entities.Identifier; +import com.researchspace.dataverse.entities.facade.DatasetFacade; +import com.researchspace.springrest.ext.RestClientException; +import com.researchspace.springrest.ext.SWORDException; +/** + * Operations on the Dataverse. + */ public interface DataverseOperations { - - - /** - * Create a new dataverse. The supplied {@link DataversePost} must contain as a minimum: - *
    - *
  • alias - *
  • Name - *
  • At least one contact email - *
- * @param parentDataverseAlias The owning Dataverse - * @throws IllegalArgumentException if any mandatory fields are null - */ - public DataverseResponse createNewDataverse (String parentDataverseAlias, DataversePost toCreate); - - /** - * Creates a new Dataset within the specified dataverse. - * @param facade - * @param dataverseAlias - * @return The id of the created dataset - * @throws IOException - */ - Identifier createDataset(DatasetFacade facade, String dataverseAlias); - - - Identifier createDataset(String dataSetJson, String dataverseAlias); - + + + /** + * Create a new dataverse. The supplied {@link DataversePost} must contain as a minimum: + *
    + *
  • alias + *
  • Name + *
  • At least one contact email + *
+ * @param parentDataverseAlias The owning Dataverse + * @throws IllegalArgumentException if any mandatory fields are null + * @throws RestClientException + */ + DataverseResponse createNewDataverse (String parentDataverseAlias, DataversePost toCreate) + throws RestClientException; + + /** + * Creates a new Dataset within the specified dataverse. + * @param facade + * @param dataverseAlias + * @return The id of the created dataset + * @throws IOException + * @throws RestClientException + */ + Identifier createDataset(DatasetFacade facade, String dataverseAlias) + throws RestClientException; + + + /** + * Create a dataset from JSON within the specified dataverse. + * @param dataSetJson representation of the dataset as expected by dataverse for a dataset creation + * @param dataverseAlias alias + * @return The id of the created dataset + * @throws RestClientException + */ + Identifier createDataset(String dataSetJson, String dataverseAlias) throws RestClientException; + /** * Gets an overview of the contents of the specified Dataverse * @param dataverseAlias - * @return + * @return + */ + List getDataverseContents(String dataverseAlias) throws RestClientException; + + /** + * Returns complete information on the dataverse + * @param dataverseAlias + * @return data representation of the dataverse + */ + DataverseGet getDataverseById(String dataverseAlias) throws RestClientException; + + /** + * Deletes a dataverse + * @param dataverseAlias numeric or unique identifier of the dataverse + * @return A DataverseResponse. If deleted successfully, getData will contain a message. + * If status is error ( e.g. couldn't be deleted) the DataverseResponse will contain a message + */ + DataverseResponse deleteDataverse(String dataverseAlias); + + /** + * Publishes the specified Dataverse. + * @param dataverseAlias numeric or unique identifier of the dataverse + * @return The updated {@link DataversePost} object + */ + DataverseResponse publishDataverse(String dataverseAlias); + + /** + * Delete a file in a dataset. + * @param fileId */ - List getDataverseContents(String dataverseAlias); - - /** - * Returns complete information on the dataverse - * @param dataverseAlias - * @return - */ - DataverseGet getDataverseById(String dataverseAlias); - - /** - * Deletes a dataverse - * @param dataverseAlias numeric or unique identifier of the dataverse - * @return A DataverseResponse. If deleted successfully, getData will contain a message. - * If status is error ( e.g. couldn't be deleted) the DataverseResponse will contain a message - */ - DataverseResponse deleteDataverse(String dataverseAlias); - - /** - * Publishes the specified Datavers - * @param dataverseAlias numeric or unique identifier of the dataverse - * @return The updated {@link DataversePost} object - */ - DataverseResponse publishDataverse(String dataverseAlias); + void deleteFile(String fileId) throws SWORDException; } diff --git a/src/main/java/com/researchspace/dataverse/api/v1/InfoOperations.java b/src/main/java/com/researchspace/dataverse/api/v1/InfoOperations.java index cc3c813..8b8550b 100644 --- a/src/main/java/com/researchspace/dataverse/api/v1/InfoOperations.java +++ b/src/main/java/com/researchspace/dataverse/api/v1/InfoOperations.java @@ -1,44 +1,40 @@ -/* - * +/** +
+  Copyright 2016 ResearchSpace
+
+   Licensed under the Apache License, Version 2.0 (the "License");
+   you may not use this file except in compliance with the License.
+   You may obtain a copy of the License at
+
+       http://www.apache.org/licenses/LICENSE-2.0
+
+   Unless required by applicable law or agreed to in writing, software
+   distributed under the License is distributed on an "AS IS" BASIS,
+   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+   See the License for the specific language governing permissions and
+   limitations under the License.
+ 
*/ package com.researchspace.dataverse.api.v1; import com.researchspace.dataverse.entities.DataverseResponse; import com.researchspace.dataverse.entities.DvMessage; -/** + +/** + * Wrapper for InfoOperations: *
- * Copyright 2016 ResearchSpace
-
-Licensed under the Apache License, Version 2.0 (the "License");
-you may not use this file except in compliance with the License.
-You may obtain a copy of the License at
-
-    http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing, software
-distributed under the License is distributed on an "AS IS" BASIS,
-WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-See the License for the specific language governing permissions and
-limitations under the License. 
-
-

-* -*Wrapper for InfoOperations: -*

-*GET http://$SERVER/api/info/settings/:DatasetPublishPopupCustomText
-*and
-*url -X PUT  -d "publish" https://demo.dataverse.org/api/admin/settings/:DatasetPublishPopupCustomText
-*
-*and -* -*/ + * GET http://$SERVER/api/info/settings/:DatasetPublishPopupCustomText + * and + * url -X PUT -d "publish" https://demo.dataverse.org/api/admin/settings/:DatasetPublishPopupCustomText + *
+ */ public interface InfoOperations { - - DvMessage getDatasetPublishPopupCustomText () ; - - /** - * Deprecated, does not work for client calls from non-Localhost URLs from Dataverse 4.8 onwards - */ - DataverseResponse setDatasetPublishPopupCustomText (String text) ; + + DvMessage getDatasetPublishPopupCustomText () ; + + /** + * Deprecated, does not work for client calls from non-Localhost URLs from Dataverse 4.8 onwards + */ + DataverseResponse setDatasetPublishPopupCustomText (String text) ; } diff --git a/src/main/java/com/researchspace/dataverse/api/v1/MetadataOperations.java b/src/main/java/com/researchspace/dataverse/api/v1/MetadataOperations.java index 28cb080..f2ea35c 100644 --- a/src/main/java/com/researchspace/dataverse/api/v1/MetadataOperations.java +++ b/src/main/java/com/researchspace/dataverse/api/v1/MetadataOperations.java @@ -1,5 +1,19 @@ -/* - * +/** +
+  Copyright 2016 ResearchSpace
+
+   Licensed under the Apache License, Version 2.0 (the "License");
+   you may not use this file except in compliance with the License.
+   You may obtain a copy of the License at
+
+       http://www.apache.org/licenses/LICENSE-2.0
+
+   Unless required by applicable law or agreed to in writing, software
+   distributed under the License is distributed on an "AS IS" BASIS,
+   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+   See the License for the specific language governing permissions and
+   limitations under the License.
+ 
*/ package com.researchspace.dataverse.api.v1; @@ -7,38 +21,25 @@ import com.researchspace.dataverse.entities.MetadataBlock; -/**
-Copyright 2016 ResearchSpace
-
- Licensed under the Apache License, Version 2.0 (the "License");
- you may not use this file except in compliance with the License.
- You may obtain a copy of the License at
+/**
+ * Operations on Metadatas.
+ */
+public interface MetadataOperations {
 
-     http://www.apache.org/licenses/LICENSE-2.0
+    /**
+     * Return data about the block whose identifier is passed.
+     *  identifier can either be the block’s id, or its name:
+     *  

+ * Possible bug Doesn't actually work for numeric ids, only name + * @param name The MetadataBlock name + * @return + */ + MetadataBlock getMetadataById(String name); - Unless required by applicable law or agreed to in writing, software - distributed under the License is distributed on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - See the License for the specific language governing permissions and - limitations under the License. -

-*/ -public interface MetadataOperations { - - /** - * Return data about the block whose identifier is passed. - * identifier can either be the block’s id, or its name: - *

- * Possible bug Doesn't actually work for numeric ids, only name - * @param name The MetadataBlock name - * @return - */ - MetadataBlock getMetadataById(String name); - - /** - * Lists brief info about all metadata blocks registered in the system: - * @return a {@link List} of {@link MetadataBlock} - */ - List getMetadataBlockInfo(); + /** + * Lists brief info about all metadata blocks registered in the system: + * @return a {@link List} of {@link MetadataBlock} + */ + List getMetadataBlockInfo(); } diff --git a/src/main/java/com/researchspace/dataverse/api/v1/SearchOperations.java b/src/main/java/com/researchspace/dataverse/api/v1/SearchOperations.java index 6c30c43..634356d 100644 --- a/src/main/java/com/researchspace/dataverse/api/v1/SearchOperations.java +++ b/src/main/java/com/researchspace/dataverse/api/v1/SearchOperations.java @@ -1,19 +1,5 @@ -/* - * - */ -package com.researchspace.dataverse.api.v1; - -import com.researchspace.dataverse.entities.DataverseResponse; -import com.researchspace.dataverse.search.entities.DatasetItem; -import com.researchspace.dataverse.search.entities.DataverseItem; -import com.researchspace.dataverse.search.entities.FileSearchHit; -import com.researchspace.dataverse.search.entities.Item; -import com.researchspace.dataverse.search.entities.SearchConfig; -import com.researchspace.dataverse.search.entities.SearchConfig.SearchConfigBuilder; -import com.researchspace.dataverse.search.entities.SearchResults; - /** - *

+
   Copyright 2016 ResearchSpace
 
    Licensed under the Apache License, Version 2.0 (the "License");
@@ -28,43 +14,55 @@
    See the License for the specific language governing permissions and
    limitations under the License.
  
- Top-level entry point into the Dataverse Level1 Search API + */ +package com.researchspace.dataverse.api.v1; + +import com.researchspace.dataverse.entities.DataverseResponse; +import com.researchspace.dataverse.search.entities.DatasetItem; +import com.researchspace.dataverse.search.entities.DataverseItem; +import com.researchspace.dataverse.search.entities.FileSearchHit; +import com.researchspace.dataverse.search.entities.Item; +import com.researchspace.dataverse.search.entities.SearchConfig; +import com.researchspace.dataverse.search.entities.SearchConfig.SearchConfigBuilder; +import com.researchspace.dataverse.search.entities.SearchResults; + +/** + * Top-level entry point into the Dataverse Level1 Search API * @author rspace - * */ public interface SearchOperations { - /** - * Gets a new instance of a SearchConfigBuilder to configure a search query - * @return - */ - SearchConfigBuilder builder(); + /** + * Gets a new instance of a SearchConfigBuilder to configure a search query + * @return + */ + SearchConfigBuilder builder(); - /** - * Perform a search - * @param cfg A {@link SearchConfig} object generated from a {@link SearchConfigBuilder} - * @return - */ - DataverseResponse> search(SearchConfig cfg); + /** + * Perform a search + * @param cfg A {@link SearchConfig} object generated from a {@link SearchConfigBuilder} + * @return + */ + DataverseResponse> search(SearchConfig cfg); - /** - * A search restricted to Dataverse files that returns a typed list of {@link FileSearchHit}. - * @param A {@link SearchConfig} configured to search by SearchType.file only - * @throws IllegalArgumentException if search config is not set to return files only. - */ - DataverseResponse> searchFiles(SearchConfig cfg); + /** + * A search restricted to Dataverse files that returns a typed list of {@link FileSearchHit}. + * @param A {@link SearchConfig} configured to search by SearchType.file only + * @throws IllegalArgumentException if search config is not set to return files only. + */ + DataverseResponse> searchFiles(SearchConfig cfg); - /** - * A search restricted to Dataverses that returns a typed list of {@link DataverseItem}. - * @param A {@link SearchConfig} configured to search by SearchType.dataverse only - * @throws IllegalArgumentException if search config is not set to return dataverses only. - */ - DataverseResponse> searchDataverses(SearchConfig cfg); + /** + * A search restricted to Dataverses that returns a typed list of {@link DataverseItem}. + * @param A {@link SearchConfig} configured to search by SearchType.dataverse only + * @throws IllegalArgumentException if search config is not set to return dataverses only. + */ + DataverseResponse> searchDataverses(SearchConfig cfg); - /** - * A search restricted to Dataverses that returns a typed list of {@link DatasetItem}. - * @param A {@link SearchConfig} configured to search by SearchType.dataset only - * @throws IllegalArgumentException if search config is not set to return datasets only. - */ - DataverseResponse> searchDatasets(SearchConfig cfg); + /** + * A search restricted to Dataverses that returns a typed list of {@link DatasetItem}. + * @param A {@link SearchConfig} configured to search by SearchType.dataset only + * @throws IllegalArgumentException if search config is not set to return datasets only. + */ + DataverseResponse> searchDatasets(SearchConfig cfg); } diff --git a/src/main/java/com/researchspace/dataverse/api/v1/UsersOperations.java b/src/main/java/com/researchspace/dataverse/api/v1/UsersOperations.java new file mode 100644 index 0000000..6da682f --- /dev/null +++ b/src/main/java/com/researchspace/dataverse/api/v1/UsersOperations.java @@ -0,0 +1,46 @@ +/** +
+  Copyright 2016 ResearchSpace
+
+   Licensed under the Apache License, Version 2.0 (the "License");
+   you may not use this file except in compliance with the License.
+   You may obtain a copy of the License at
+
+       http://www.apache.org/licenses/LICENSE-2.0
+
+   Unless required by applicable law or agreed to in writing, software
+   distributed under the License is distributed on an "AS IS" BASIS,
+   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+   See the License for the specific language governing permissions and
+   limitations under the License.
+ 
+ */ +package com.researchspace.dataverse.api.v1; + +import java.text.ParseException; +import java.time.LocalDateTime; + +import com.researchspace.springrest.ext.RestClientException; + +/** + * Operations on Users (tokens). + */ +public interface UsersOperations { + + /** + * Get token expiration date. + * @return java.time.LocalDateTime token expiration. + * @throws ParseException + * @throws RestClientException + */ + LocalDateTime getTokenExpirationDate() throws ParseException; + + /** + * Get token expiration text returned by dataverse. + * @return String as the form of . + * @throws ParseException + * @throws RestClientException + */ + String getTokenExpiration() throws ParseException; + +} diff --git a/src/main/java/com/researchspace/dataverse/entities/DataSetMetadataBlock.java b/src/main/java/com/researchspace/dataverse/entities/DataSetMetadataBlock.java deleted file mode 100644 index 56ef47c..0000000 --- a/src/main/java/com/researchspace/dataverse/entities/DataSetMetadataBlock.java +++ /dev/null @@ -1,28 +0,0 @@ -/* - * - */ -package com.researchspace.dataverse.entities; - -import lombok.Data; -/**
-Copyright 2016 ResearchSpace
-
- Licensed under the Apache License, Version 2.0 (the "License");
- you may not use this file except in compliance with the License.
- You may obtain a copy of the License at
-
-     http://www.apache.org/licenses/LICENSE-2.0
-
- Unless required by applicable law or agreed to in writing, software
- distributed under the License is distributed on an "AS IS" BASIS,
- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- See the License for the specific language governing permissions and
- limitations under the License.
-
-*/ -@Data -public class DataSetMetadataBlock { - - private Citation citation; - -} diff --git a/src/main/java/com/researchspace/dataverse/entities/Dataset.java b/src/main/java/com/researchspace/dataverse/entities/Dataset.java index abdd52f..1dedb18 100644 --- a/src/main/java/com/researchspace/dataverse/entities/Dataset.java +++ b/src/main/java/com/researchspace/dataverse/entities/Dataset.java @@ -1,11 +1,13 @@ /* - * + * */ package com.researchspace.dataverse.entities; import java.net.URL; import java.util.Optional; +import com.fasterxml.jackson.annotation.JsonIgnore; + import lombok.AllArgsConstructor; import lombok.Data; import lombok.NoArgsConstructor; @@ -24,25 +26,29 @@ See the License for the specific language governing permissions and limitations under the License.
-*/ + */ @Data @AllArgsConstructor() @NoArgsConstructor() public class Dataset { - private DatasetVersion datasetVersion, latestVersion; - private Long id; - private String identifier, protocol, authority; - private URL persistentUrl; - - /** - * Getter for the DOI String used to identify a dataset for SWORD upload - * @return an {@link Optional}. Will be null if persistentURL is not set. - */ - public Optional getDoiId (){ - if(persistentUrl == null) { - return Optional.empty(); - } else { - return Optional.of(getPersistentUrl().getPath().substring(1)); - } - } + private DatasetVersion datasetVersion; + private DatasetVersion latestVersion; + private Long id; + private String identifier; + private String protocol; + private String authority; + private URL persistentUrl; + + /** + * Getter for the DOI String used to identify a dataset for SWORD upload + * @return an {@link Optional}. Will be null if persistentURL is not set. + */ + @JsonIgnore + public Optional getDoiId (){ + if(persistentUrl == null) { + return Optional.empty(); + } else { + return Optional.of(getPersistentUrl().getPath().substring(1)); + } + } } diff --git a/src/main/java/com/researchspace/dataverse/entities/DatasetMetadataBlock.java b/src/main/java/com/researchspace/dataverse/entities/DatasetMetadataBlock.java new file mode 100644 index 0000000..f9f2bb4 --- /dev/null +++ b/src/main/java/com/researchspace/dataverse/entities/DatasetMetadataBlock.java @@ -0,0 +1,34 @@ +package com.researchspace.dataverse.entities; + +import java.util.List; + +import lombok.AllArgsConstructor; +import lombok.Data; +import lombok.NoArgsConstructor; +/**
+Copyright 2016 ResearchSpace
+
+Licensed under the Apache License, Version 2.0 (the "License");
+you may not use this file except in compliance with the License.
+You may obtain a copy of the License at
+
+    http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+
+ */ +/** + * Dataset metadata block structure. + * @author ltromel + */ +@Data +@AllArgsConstructor +@NoArgsConstructor +public class DatasetMetadataBlock { + private String displayName; + private List fields; +} diff --git a/src/main/java/com/researchspace/dataverse/entities/DatasetVersion.java b/src/main/java/com/researchspace/dataverse/entities/DatasetVersion.java index dc3acfc..0fe29f0 100644 --- a/src/main/java/com/researchspace/dataverse/entities/DatasetVersion.java +++ b/src/main/java/com/researchspace/dataverse/entities/DatasetVersion.java @@ -1,9 +1,10 @@ /* - * + * */ package com.researchspace.dataverse.entities; import java.util.Date; +import java.util.Map; import lombok.Data; /**
@@ -21,15 +22,16 @@
  See the License for the specific language governing permissions and
  limitations under the License.
 
-*/ + */ @Data public class DatasetVersion { - private Long id; - private String versionState; - private String productionDate; - private Date lastUpdateTime; - private Date createTime; - private DataSetMetadataBlock metadataBlocks; - private int versionNumber, versionMinorNumber; - + private Long id; + private String versionState; + private String productionDate; + private String termsOfUse; + private Date lastUpdateTime; + private Date createTime; + private Map metadataBlocks; + private int versionNumber; + private int versionMinorNumber; } diff --git a/src/main/java/com/researchspace/dataverse/entities/DataverseGet.java b/src/main/java/com/researchspace/dataverse/entities/DataverseGet.java index 755a027..616be96 100644 --- a/src/main/java/com/researchspace/dataverse/entities/DataverseGet.java +++ b/src/main/java/com/researchspace/dataverse/entities/DataverseGet.java @@ -5,8 +5,8 @@ import java.util.Map; import com.fasterxml.jackson.annotation.JsonFormat; - import com.fasterxml.jackson.annotation.JsonIgnore; + import lombok.Data; /**
 Copyright 2016 ResearchSpace
@@ -23,41 +23,42 @@
  See the License for the specific language governing permissions and
  limitations under the License.
 
-*/ + */ @Data @JsonFormat public class DataverseGet { - private String id; - private String alias; - private String name; - private String affiliation; - private String permissionRoot; - private String description; - private String ownerId; - private Date creationDate; - - /** - * This can be a list of Strings or objects [displayOrder, contactEmail]. - * Parsing as Object means that deserialisation won't fail - */ - private List dataverseContacts = new ArrayList<>(); - - /** - * Gets list of contact emails independent of underlying representation - * @return - */ - @JsonIgnore - public List getContactEmails(){ - List rc = new ArrayList<>(); - for (Object o: dataverseContacts) { - if (o instanceof String) { - rc.add((String) o); - } else { - Map object = (Map)o; - rc.add(object.getOrDefault("contactEmail", "")); - } - } - return rc; - } + private String id; + private String alias; + private String name; + private String affiliation; + private String permissionRoot; + private String description; + private String ownerId; + private Date creationDate; + + /** + * This can be a list of Strings or objects [displayOrder, contactEmail]. + * Parsing as Object means that deserialisation won't fail + */ + private List dataverseContacts = new ArrayList<>(); + + /** + * Gets list of contact emails independent of underlying representation + * @return + */ + @JsonIgnore + public List getContactEmails(){ + final List rc = new ArrayList<>(); + for (final Object o: dataverseContacts) { + if (o instanceof String) { + rc.add((String) o); + } else { + @SuppressWarnings("unchecked") + final Map object = (Map) o; + rc.add(object.getOrDefault("contactEmail", "")); + } + } + return rc; + } } diff --git a/src/main/java/com/researchspace/dataverse/entities/CitationField.java b/src/main/java/com/researchspace/dataverse/entities/Field.java similarity index 81% rename from src/main/java/com/researchspace/dataverse/entities/CitationField.java rename to src/main/java/com/researchspace/dataverse/entities/Field.java index 1bf5af5..608332f 100644 --- a/src/main/java/com/researchspace/dataverse/entities/CitationField.java +++ b/src/main/java/com/researchspace/dataverse/entities/Field.java @@ -1,6 +1,3 @@ -/* - * - */ package com.researchspace.dataverse.entities; import lombok.AllArgsConstructor; @@ -22,12 +19,17 @@ See the License for the specific language governing permissions and limitations under the License. -*/ + */ +/** + * Metadata field structure. + * @author ltromel + */ @Data @AllArgsConstructor @NoArgsConstructor -public class CitationField { - private String typeName, typeClass; - private boolean multiple; - private Object value; +public class Field { + + private String typeName, typeClass; + private boolean multiple; + private Object value; } diff --git a/src/main/java/com/researchspace/dataverse/entities/Identifier.java b/src/main/java/com/researchspace/dataverse/entities/Identifier.java index 2a2b2c8..b29345c 100644 --- a/src/main/java/com/researchspace/dataverse/entities/Identifier.java +++ b/src/main/java/com/researchspace/dataverse/entities/Identifier.java @@ -1,5 +1,5 @@ /* - * + * */ package com.researchspace.dataverse.entities; @@ -22,12 +22,20 @@ See the License for the specific language governing permissions and limitations under the License. -*/ + */ @Data @AllArgsConstructor @NoArgsConstructor public class Identifier { - private Long id; - private String persistentId; + private Long id; + private String persistentId; + + /** + * One argument constructor (compatibility). + * @param datasetId + */ + public Identifier(final Long datasetId) { + id = datasetId; + } } diff --git a/src/main/java/com/researchspace/dataverse/entities/facade/ContributorType.java b/src/main/java/com/researchspace/dataverse/entities/facade/ContributorType.java index 3941f48..8b56e99 100644 --- a/src/main/java/com/researchspace/dataverse/entities/facade/ContributorType.java +++ b/src/main/java/com/researchspace/dataverse/entities/facade/ContributorType.java @@ -1,9 +1,4 @@ -/* - * - */ -package com.researchspace.dataverse.entities.facade; -/** - * /**
+/** 
 Copyright 2016 ResearchSpace
 
  Licensed under the Apache License, Version 2.0 (the "License");
@@ -17,11 +12,11 @@
  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  See the License for the specific language governing permissions and
  limitations under the License.
-
- +
*/ +package com.researchspace.dataverse.entities.facade; +/** * Listing of Contributor Types for Dataset 'Contributor' Citation field. * @author rspace - * */ public enum ContributorType { DataCollector("Data Collector"), @@ -41,13 +36,13 @@ public enum ContributorType { Supervisor("Supervisor"), WorkPackageLeader("Work Package Leader"), Other("Other"); - - private String displayName; - private ContributorType (String displayName) { - this.displayName = displayName; - } - public String getDisplayName() { - return displayName; - } + + private String displayName; + ContributorType (final String displayName) { + this.displayName = displayName; + } + public String getDisplayName() { + return displayName; + } } diff --git a/src/main/java/com/researchspace/dataverse/entities/facade/DatasetAuthor.java b/src/main/java/com/researchspace/dataverse/entities/facade/DatasetAuthor.java index 5ab7c2c..3fc02ea 100644 --- a/src/main/java/com/researchspace/dataverse/entities/facade/DatasetAuthor.java +++ b/src/main/java/com/researchspace/dataverse/entities/facade/DatasetAuthor.java @@ -1,12 +1,3 @@ -/* - * - */ -package com.researchspace.dataverse.entities.facade; - -import lombok.Builder; -import lombok.Data; -import lombok.NonNull; - /**
 Copyright 2016 ResearchSpace
 
@@ -22,11 +13,20 @@
  See the License for the specific language governing permissions and
  limitations under the License.
 
-*/ + */ +package com.researchspace.dataverse.entities.facade; + +import lombok.Builder; +import lombok.Data; +import lombok.NonNull; + +/** + * Dataset author. + */ @Data @Builder -public class DatasetAuthor { - private @NonNull String authorName; - private String authorAffiliation, authorIdentifierScheme, authorIdentifier; +public class DatasetAuthor { + private @NonNull String authorName; + private String authorAffiliation, authorIdentifierScheme, authorIdentifier; } diff --git a/src/main/java/com/researchspace/dataverse/entities/facade/DatasetBuilder.java b/src/main/java/com/researchspace/dataverse/entities/facade/DatasetBuilder.java index 52d6ae5..b83be6d 100644 --- a/src/main/java/com/researchspace/dataverse/entities/facade/DatasetBuilder.java +++ b/src/main/java/com/researchspace/dataverse/entities/facade/DatasetBuilder.java @@ -1,5 +1,18 @@ -/* - * +/**
+Copyright 2016 ResearchSpace
+
+ Licensed under the Apache License, Version 2.0 (the "License");
+ you may not use this file except in compliance with the License.
+ You may obtain a copy of the License at
+
+     http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
+
*/ package com.researchspace.dataverse.entities.facade; @@ -13,360 +26,410 @@ import java.util.List; import java.util.Map; -import com.researchspace.dataverse.entities.Citation; -import com.researchspace.dataverse.entities.CitationField; import com.researchspace.dataverse.entities.CitationType; -import com.researchspace.dataverse.entities.DataSetMetadataBlock; import com.researchspace.dataverse.entities.Dataset; +import com.researchspace.dataverse.entities.DatasetMetadataBlock; import com.researchspace.dataverse.entities.DatasetVersion; +import com.researchspace.dataverse.entities.Field; /** - * /**
-Copyright 2016 ResearchSpace
-
- Licensed under the Apache License, Version 2.0 (the "License");
- you may not use this file except in compliance with the License.
- You may obtain a copy of the License at
-
-     http://www.apache.org/licenses/LICENSE-2.0
-
- Unless required by applicable law or agreed to in writing, software
- distributed under the License is distributed on an "AS IS" BASIS,
- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- See the License for the specific language governing permissions and
- limitations under the License.
-
* Converts POJO Java facade into underlying JSON object representation - * * @author rspace - * */ public class DatasetBuilder { - private static final String CONTRIBUTOR = "contributor"; - private static final String CONTRIBUTOR_TYPE = "contributorType"; - private static final String CONTRIBUTOR_NAME = "contributorName"; - private static final String PRODUCER_LOGO_URL = "producerLogoURL"; - private static final String PRODUCER_URL = "producerURL"; - private static final String PRODUCER_AFFILIATION = "producerAffiliation"; - private static final String PRODUCER_ABBREVIATION = "producerAbbreviation"; - private static final String PRODUCER_NAME = "producerName"; - private static final String PUBLICATION_URL = "publicationURL"; - private static final String PUBLICATION_ID = "publicationIDNumber"; - private static final String PUBLICATION_ID_TYPE = "publicationIDType"; - private static final String PUBLICATION_CITATION= "publicationCitation"; - - private static final String KEYWORD_VOCABULARY_URI = "keywordVocabularyURI"; - private static final String KEYWORD_VOCABULARY = "keywordVocabulary"; - private static final String KEYWORD_VALUE = "keywordValue"; - private static final String TOPIC_VOCABULARY_URI = "topicClassVocabURI"; - private static final String TOPIC_VOCABULARY = "topicClassVocab"; - private static final String TOPIC_VALUE = "topicClassValue"; - private static final String DATASET_DESC_VALUE = "dsDescriptionValue"; - private static final String DATASET_DESC_DATE = "dsDescriptionDate"; - private static final String DATASET_CONTACT_EMAIL = "datasetContactEmail"; - private static final String DATASET_CONTACT_NAME = "datasetContactName"; - private static final String DATASET_CONTACT_AFFILIATION = "datasetContactAffiliation"; - private static final String AUTHOR_IDENTIFIER_SCHEME = "authorIdentifierScheme"; - private static final String AUTHOR_IDENTIFIER = "authorIdentifier"; - private static final String AUTHOR_AFFILIATION = "authorAffiliation"; - private static final String AUTHOR_NAME = "authorName"; - - public Dataset build(DatasetFacade facade) { - DatasetVersion dv = new DatasetVersion(); - DataSetMetadataBlock blocks = new DataSetMetadataBlock(); - Citation citation = new Citation(); - - List fields = createFields(facade, citation); - - dv.setMetadataBlocks(blocks); - blocks.setCitation(citation); - citation.setFields(fields); - Dataset toSubmit = new Dataset(); - toSubmit.setDatasetVersion(dv); - return toSubmit; - } - - private List createFields(DatasetFacade facade, Citation citation) { - List fields = new ArrayList<>(); - //mandatory fields - addTitle(facade, fields); - addAuthors(facade, fields); - addDescription(facade, fields); - addKeywords(facade, fields); - addTopicClassifications(facade, fields); - addPublications(facade, fields); - addLanguages(facade, fields); - addNotes(facade, fields); - addProducers(facade, fields); - addProductionDate(facade, fields); - addProductionPlace(facade, fields); - addContributor(facade, fields); - addSubject(facade, fields); - addContacts(facade, fields); - - //optional fields - addDepositor(facade, fields); - addSubTitle(facade, fields); - addAlternativeTitle(facade, fields); - addAlternativeURL(facade, fields); - return fields; - } - - - - - - private void addProductionDate(DatasetFacade facade, List fields) { - if( facade.getProductionDate() != null) { - CitationField prodDate = createPrimitiveSingleField("productionDate", isoDate(facade.getProductionDate())); - fields.add(prodDate); - } - } - - private void addProductionPlace(DatasetFacade facade, List fields) { - if(!isEmpty(facade.getProductionPlace())){ - CitationField prodPlace = createPrimitiveSingleField("productionPlace", facade.getProductionPlace()); - fields.add(prodPlace); - } - } - - - private void addLanguages(DatasetFacade facade, List fields) { - if (!facade.getLanguages().isEmpty()) { - CitationField field = createControlledVocabField("language", true, facade.getLanguages()); - fields.add(field); - } - } - - private void addNotes(DatasetFacade facade, List fields) { - if(!isEmpty(facade.getNote())){ - CitationField altUrl = createPrimitiveSingleField("notesText", facade.getNote()); - fields.add(altUrl); - } - } - - private void addAlternativeURL(DatasetFacade facade, List fields) { - if (facade.getAlternativeURL() != null) { - CitationField altUrl = createPrimitiveSingleField("alternativeURL", facade.getAlternativeURL().toString()); - fields.add(altUrl); - } - } - - private void addAlternativeTitle(DatasetFacade facade, List fields) { - if (!isEmpty(facade.getAlternativeTitle())) { - CitationField title = createPrimitiveSingleField("alternativeTitle", facade.getAlternativeTitle()); - fields.add(title); - } - } - - private void addSubTitle(DatasetFacade facade, List fields) { - if (!isEmpty(facade.getSubtitle())) { - CitationField subtitle = createPrimitiveSingleField("subtitle", facade.getSubtitle()); - fields.add(subtitle); - } - } - - private void addSubject(DatasetFacade facade, List fields) { - CitationField subject = createControlledVocabField("subject", true, Arrays.asList(facade.getSubject())); - fields.add(subject); - } - - private void addDescription(DatasetFacade facade, List fields) { - List descs = facade.getDescriptions(); - List> descList = new ArrayList<>(); - for (DatasetDescription desc: descs) { - Map map2 = new HashMap<>(); - CitationField descF = createPrimitiveSingleField(DATASET_DESC_VALUE, desc.getDescription()); - map2.put(DATASET_DESC_VALUE, descF); - if(desc.getDate() != null) { - CitationField dateF = createPrimitiveSingleField(DATASET_DESC_DATE, isoDate(desc.getDate())); - map2.put(DATASET_DESC_DATE, dateF); - } - descList.add(map2); - } - - CitationField desc = createCompoundField("dsDescription", true, descList); - fields.add(desc); - } - - private void addTopicClassifications(DatasetFacade facade, List fields) { - List topics = facade.getTopicClassifications(); - List> topicsList = new ArrayList<>(); - for (DatasetTopicClassification topic: topics) { - Map map = new HashMap<>(); - addOptionalPrimitiveField(topic.getTopicClassValue(), map, TOPIC_VALUE); - addOptionalPrimitiveField(topic.getTopicClassVocab(), map, TOPIC_VOCABULARY); - addOptionalPrimitiveField(topic.getTopicClassVocabURI().toString(), map, TOPIC_VOCABULARY_URI); - topicsList.add(map); - } - CitationField topicClassifn = createCompoundField("topicClassification", true, topicsList); - fields.add(topicClassifn); - } - - private void addProducers(DatasetFacade facade, List fields) { - List topics = facade.getProducers(); - List> topicsList = new ArrayList<>(); - for (DatasetProducer topic: topics) { - Map map = new HashMap<>(); - addOptionalPrimitiveField(topic.getName(), map, PRODUCER_NAME); - addOptionalPrimitiveField(topic.getAbbreviation(), map, PRODUCER_ABBREVIATION); - addOptionalPrimitiveField(topic.getAffiliation(), map, PRODUCER_AFFILIATION); - addOptionalPrimitiveField(topic.getUrl().toString(), map, PRODUCER_URL); - addOptionalPrimitiveField(topic.getLogoURL().toString(), map, PRODUCER_LOGO_URL); - topicsList.add(map); - } - CitationField topicClassifn = createCompoundField("producer", true, topicsList); - fields.add(topicClassifn); - } - - private void addContributor(DatasetFacade facade, List fields) { - List contribs = facade.getContributors(); - List> contribList = new ArrayList<>(); - for (DatasetContributor contrib: contribs) { - Map map = new HashMap<>(); - addOptionalPrimitiveField(contrib.getName(), map, CONTRIBUTOR_NAME); - if(contrib.getType()!=null) { - CitationField cf =createControlledVocabField(CONTRIBUTOR_TYPE, false, - asList(new String[] { contrib.getType().getDisplayName() })); - map.put(CONTRIBUTOR_TYPE, cf); - } - contribList.add(map); - } - CitationField topicClassifn = createCompoundField(CONTRIBUTOR, true, contribList); - fields.add(topicClassifn); - } - - private void addPublications(DatasetFacade facade, List fields) { - List publications = facade.getPublications(); - List> list = new ArrayList<>(); - for (DatasetPublication publication: publications) { - Map map = new HashMap<>(); - addOptionalPrimitiveField(publication.getPublicationCitation(), map, PUBLICATION_CITATION); - addOptionalPrimitiveField(publication.getPublicationIdNumber(), map, PUBLICATION_ID); - addOptionalPrimitiveField(publication.getPublicationURL().toString(), map, PUBLICATION_URL); - if (publication.getPublicationIDType()!= null) { - CitationField scheme = createControlledVocabField(PUBLICATION_ID_TYPE, false, - asList(new String[] { publication.getPublicationIDType().name() })); - map.put(PUBLICATION_ID_TYPE, scheme); - } - list.add(map); - } - CitationField publication = createCompoundField("publication", true, list); - fields.add(publication); - } - - private void addKeywords(DatasetFacade facade, List fields) { - List keywords = facade.getKeywords(); - List> keysList = new ArrayList<>(); - for (DatasetKeyword keyword: keywords) { - Map map2 = new HashMap<>(); - CitationField descF = createPrimitiveSingleField(KEYWORD_VALUE, keyword.getValue()); - map2.put(KEYWORD_VALUE, descF); - addOptionalPrimitiveField(keyword.getVocabulary(), map2, KEYWORD_VOCABULARY); - addOptionalPrimitiveField(keyword.getVocabularyURI().toString(), map2, KEYWORD_VOCABULARY_URI); - keysList.add(map2); - } - - CitationField desc = createCompoundField("keyword", true, keysList); - fields.add(desc); - - } - - private String isoDate(Date date) { - return String.format("%tF", date); - } - - private void addContacts (DatasetFacade facade, List fields) { - List contacts = facade.getContacts(); - List> contactsList = new ArrayList<>(); - for (DatasetContact contact : contacts) { - Map map2 = new HashMap<>(); - CitationField email = createPrimitiveSingleField(DATASET_CONTACT_EMAIL, contact.getDatasetContactEmail()); - map2.put(DATASET_CONTACT_EMAIL, email); - addOptionalPrimitiveField(contact.getDatasetContactName(), map2, DATASET_CONTACT_NAME); - addOptionalPrimitiveField(contact.getDatasetContactAffiliation(), map2, DATASET_CONTACT_AFFILIATION); - contactsList.add(map2); - } - CitationField contact = createCompoundField("datasetContact", true, contactsList); - fields.add(contact); - } - - private void addAuthors (DatasetFacade facade, List fields) { - List authors = facade.getAuthors(); - List> authorsMap = new ArrayList<>(); - for (DatasetAuthor author : authors) { - Map map = new HashMap<>(); - CitationField authorName = createPrimitiveSingleField(AUTHOR_NAME, author.getAuthorName()); - map.put(AUTHOR_NAME, authorName); - addOptionalPrimitiveField(author.getAuthorAffiliation(), map, AUTHOR_AFFILIATION); - addOptionalPrimitiveField(author.getAuthorIdentifier(), map, AUTHOR_IDENTIFIER); - - if (!isEmpty(author.getAuthorIdentifierScheme())) { - CitationField scheme = createControlledVocabField(AUTHOR_IDENTIFIER_SCHEME, false, - asList(new String[] { author.getAuthorIdentifierScheme() })); - map.put(AUTHOR_IDENTIFIER_SCHEME, scheme); - } - authorsMap.add(map); - } - CitationField toAdd = createCompoundField("author", true, authorsMap); - fields.add(toAdd); - } - - private void addOptionalPrimitiveField(String value, Map map, String field) { - if (!isEmpty(value)) { - CitationField affil = createPrimitiveSingleField(field, value); - map.put(field, affil); - } - } - - private void addTitle(DatasetFacade facade, List fields) { - CitationField title = createPrimitiveSingleField("title", facade.getTitle()); - fields.add(title); - } - - private void addDepositor(DatasetFacade facade, List fields) { - CitationField deposit = createPrimitiveSingleField("depositor", facade.getDepositor()); - fields.add(deposit); - } - - public CitationField createPrimitiveSingleField(String name, String value) { - CitationField cf = new CitationField(name, CitationType.PRIMITIVE.toString(), false, value); - return cf; - } - - public CitationField createPrimitiveMultipleField(String name, String... value) { - CitationField cf = new CitationField(name, CitationType.PRIMITIVE.toString(), true, value); - return cf; - } - - public CitationField createCompoundField(String name, boolean isMultiple, List> values) { - CitationField cf = null; - checkArgs(isMultiple, values); - if (isMultiple) { - cf = new CitationField(name, CitationType.COMPOUND.toString(), isMultiple, values); - } else { - cf = new CitationField(name, CitationType.COMPOUND.toString(), isMultiple, values.get(0)); - } - return cf; - } - - private void checkArgs(boolean isMultiple, List values) { - if (!isMultiple && values.size() > 1) { - throw new IllegalArgumentException( - String.format("Field is not multiple but %d arguments were supplied", values.size())); - } - } - - public CitationField createControlledVocabField(String name, boolean isMultiple, List values) { - CitationField cf = null; - checkArgs(isMultiple, values); - if (isMultiple) { - cf = new CitationField(name, CitationType.CONTROLLEDVOCABULARY.toString(), isMultiple, values); - } else { - cf = new CitationField(name, CitationType.CONTROLLEDVOCABULARY.toString(), isMultiple, values.get(0)); - } - return cf; - } + private static final String CONTRIBUTOR = "contributor"; + private static final String CONTRIBUTOR_TYPE = "contributorType"; + private static final String CONTRIBUTOR_NAME = "contributorName"; + private static final String PRODUCER_LOGO_URL = "producerLogoURL"; + private static final String PRODUCER_URL = "producerURL"; + private static final String PRODUCER_AFFILIATION = "producerAffiliation"; + private static final String PRODUCER_ABBREVIATION = "producerAbbreviation"; + private static final String PRODUCER_NAME = "producerName"; + private static final String PUBLICATION_URL = "publicationURL"; + private static final String PUBLICATION_ID = "publicationIDNumber"; + private static final String PUBLICATION_ID_TYPE = "publicationIDType"; + private static final String PUBLICATION_CITATION= "publicationCitation"; + private static final String KEYWORD_VOCABULARY_URI = "keywordVocabularyURI"; + private static final String KEYWORD_VOCABULARY = "keywordVocabulary"; + private static final String KEYWORD_VALUE = "keywordValue"; + private static final String TOPIC_VOCABULARY_URI = "topicClassVocabURI"; + private static final String TOPIC_VOCABULARY = "topicClassVocab"; + private static final String TOPIC_VALUE = "topicClassValue"; + private static final String DATASET_DESC_VALUE = "dsDescriptionValue"; + private static final String DATASET_DESC_DATE = "dsDescriptionDate"; + private static final String DATASET_CONTACT_EMAIL = "datasetContactEmail"; + private static final String DATASET_CONTACT_NAME = "datasetContactName"; + private static final String DATASET_CONTACT_AFFILIATION = "datasetContactAffiliation"; + private static final String AUTHOR_IDENTIFIER_SCHEME = "authorIdentifierScheme"; + private static final String AUTHOR_IDENTIFIER = "authorIdentifier"; + private static final String AUTHOR_AFFILIATION = "authorAffiliation"; + private static final String AUTHOR_NAME = "authorName"; + private static final String KIND_OF_DATA = "kindOfData"; + private static final String TIME_PERIOD_COVERED = "timePeriodCovered"; + private static final String TIME_PERIOD_COVERED_START = "timePeriodCoveredStart"; + private static final String TIME_PERIOD_COVERED_END = "timePeriodCoveredEnd"; + + // Note : North and south latitude are named "Longitude" because dataverse expect this in the .json. + // It's obviously an error, in the meantime, they'll be named like this waiting for a patch. + private static final String WEST_LONGITUDE = "westLongitude"; + private static final String EAST_LONGITUDE = "eastLongitude"; + private static final String NORTH_LONGITUDE = "northLongitude"; + private static final String SOUTH_LONGITUDE = "southLongitude"; + private static final String GEOGRAPHIC_BOUNDING_BOX = "geographicBoundingBox"; + + public Dataset build(final DatasetFacade facade) { + final Map metadataBlocks = new HashMap<>(); + metadataBlocks.put("citation", new DatasetMetadataBlock("Citation Metadata", createCitationFields(facade))); + metadataBlocks.put("geospatial", new DatasetMetadataBlock("Geospatial Metadata", createFields(facade))); + if (facade.getTermsOfUse() != null) { + return build(metadataBlocks, facade.getTermsOfUse()); + } + return build(metadataBlocks); + } + + public Dataset build(final Map metadataBlocks) { + final DatasetVersion dv = new DatasetVersion(); + final Dataset toSubmit = new Dataset(); + toSubmit.setDatasetVersion(dv); + dv.setMetadataBlocks(metadataBlocks); + return toSubmit; + } + + public Dataset build(final Map metadataBlocks, final String termsOfUse) { + final Dataset dataset = build(metadataBlocks); + dataset.getDatasetVersion().setTermsOfUse(termsOfUse); + return dataset; + } + + private List createFields(final DatasetFacade facade) { + final List fields = new ArrayList<>(); + addGeographicBoundingBox(facade, fields); + return fields; + } + + private List createCitationFields(final DatasetFacade facade) { + final List fields = new ArrayList<>(); + //mandatory fields + addTitle(facade, fields); + addAuthors(facade, fields); + addDescription(facade, fields); + addKeywords(facade, fields); + addTopicClassifications(facade, fields); + addPublications(facade, fields); + addLanguages(facade, fields); + addNotes(facade, fields); + addProducers(facade, fields); + addProductionDate(facade, fields); + addProductionPlace(facade, fields); + addContributor(facade, fields); + addSubject(facade, fields); + addContacts(facade, fields); + + //optional fields + addDepositor(facade, fields); + addSubTitle(facade, fields); + addAlternativeTitle(facade, fields); + addAlternativeURL(facade, fields); + addKindOfData(facade, fields); + addTimePeriodCovered(facade, fields); + return fields; + } + + private void addProductionDate(final DatasetFacade facade, final List fields) { + if( facade.getProductionDate() != null) { + final Field prodDate = createPrimitiveSingleField("productionDate", isoDate(facade.getProductionDate())); + fields.add(prodDate); + } + } + + private void addProductionPlace(final DatasetFacade facade, final List fields) { + if(!facade.getProductionPlaces().isEmpty()){ + final Field prodPlace = createPrimitiveMultipleField("productionPlace", + facade.getProductionPlaces().toArray(new String[0])); + fields.add(prodPlace); + } + } + + + private void addLanguages(final DatasetFacade facade, final List fields) { + if (!facade.getLanguages().isEmpty()) { + final Field field = createControlledVocabField("language", true, facade.getLanguages()); + fields.add(field); + } + } + + private void addNotes(final DatasetFacade facade, final List fields) { + if(!isEmpty(facade.getNote())){ + final Field altUrl = createPrimitiveSingleField("notesText", facade.getNote()); + fields.add(altUrl); + } + } + + private void addAlternativeURL(final DatasetFacade facade, final List fields) { + if (facade.getAlternativeURL() != null) { + final Field altUrl = createPrimitiveSingleField("alternativeURL", facade.getAlternativeURL().toString()); + fields.add(altUrl); + } + } + + private void addKindOfData(final DatasetFacade facade, final List fields) { + if (facade.getKindsOfData() != null) { + final Field kindOfData = createPrimitiveMultipleField(KIND_OF_DATA, + facade.getKindsOfData().toArray(new String[0])); + fields.add(kindOfData); + + } + } + + private void addAlternativeTitle(final DatasetFacade facade, final List fields) { + if (!isEmpty(facade.getAlternativeTitle())) { + final Field title = createPrimitiveSingleField("alternativeTitle", facade.getAlternativeTitle()); + fields.add(title); + } + } + + private void addSubTitle(final DatasetFacade facade, final List fields) { + if (!isEmpty(facade.getSubtitle())) { + final Field subtitle = createPrimitiveSingleField("subtitle", facade.getSubtitle()); + fields.add(subtitle); + } + } + + private void addSubject(final DatasetFacade facade, final List fields) { + final Field subject = createControlledVocabField("subject", true, Arrays.asList(facade.getSubject())); + fields.add(subject); + } + + private void addDescription(final DatasetFacade facade, final List fields) { + final List descs = facade.getDescriptions(); + final List> descList = new ArrayList<>(); + for (final DatasetDescription desc: descs) { + final Map map2 = new HashMap<>(); + final Field descF = createPrimitiveSingleField(DATASET_DESC_VALUE, desc.getDescription()); + map2.put(DATASET_DESC_VALUE, descF); + if(desc.getDate() != null) { + final Field dateF = createPrimitiveSingleField(DATASET_DESC_DATE, isoDate(desc.getDate())); + map2.put(DATASET_DESC_DATE, dateF); + } + descList.add(map2); + } + + final Field desc = createCompoundField("dsDescription", true, descList); + fields.add(desc); + } + + private void addTopicClassifications(final DatasetFacade facade, final List fields) { + final List topics = facade.getTopicClassifications(); + final List> topicsList = new ArrayList<>(); + for (final DatasetTopicClassification topic: topics) { + final Map map = new HashMap<>(); + addOptionalPrimitiveField(topic.getTopicClassValue(), map, TOPIC_VALUE); + addOptionalPrimitiveField(topic.getTopicClassVocab(), map, TOPIC_VOCABULARY); + addOptionalPrimitiveField(topic.getTopicClassVocabURI().toString(), map, TOPIC_VOCABULARY_URI); + topicsList.add(map); + } + final Field topicClassifn = createCompoundField("topicClassification", true, topicsList); + fields.add(topicClassifn); + } + + private void addProducers(final DatasetFacade facade, final List fields) { + final List topics = facade.getProducers(); + final List> topicsList = new ArrayList<>(); + for (final DatasetProducer topic: topics) { + final Map map = new HashMap<>(); + addOptionalPrimitiveField(topic.getName(), map, PRODUCER_NAME); + addOptionalPrimitiveField(topic.getAbbreviation(), map, PRODUCER_ABBREVIATION); + addOptionalPrimitiveField(topic.getAffiliation(), map, PRODUCER_AFFILIATION); + addOptionalPrimitiveField(topic.getUrl().toString(), map, PRODUCER_URL); + addOptionalPrimitiveField(topic.getLogoURL().toString(), map, PRODUCER_LOGO_URL); + topicsList.add(map); + } + final Field topicClassifn = createCompoundField("producer", true, topicsList); + fields.add(topicClassifn); + } + + private void addContributor(final DatasetFacade facade, final List fields) { + final List contribs = facade.getContributors(); + final List> contribList = new ArrayList<>(); + for (final DatasetContributor contrib: contribs) { + final Map map = new HashMap<>(); + addOptionalPrimitiveField(contrib.getName(), map, CONTRIBUTOR_NAME); + if(contrib.getType()!=null) { + final Field cf =createControlledVocabField(CONTRIBUTOR_TYPE, false, + asList(contrib.getType().getDisplayName())); + map.put(CONTRIBUTOR_TYPE, cf); + } + contribList.add(map); + } + final Field topicClassifn = createCompoundField(CONTRIBUTOR, true, contribList); + fields.add(topicClassifn); + } + + private void addPublications(final DatasetFacade facade, final List fields) { + final List publications = facade.getPublications(); + final List> list = new ArrayList<>(); + for (final DatasetPublication publication: publications) { + final Map map = new HashMap<>(); + addOptionalPrimitiveField(publication.getPublicationCitation(), map, PUBLICATION_CITATION); + addOptionalPrimitiveField(publication.getPublicationIdNumber(), map, PUBLICATION_ID); + addOptionalPrimitiveField(publication.getPublicationURL().toString(), map, PUBLICATION_URL); + if (publication.getPublicationIDType()!= null) { + final Field scheme = createControlledVocabField(PUBLICATION_ID_TYPE, false, + asList(publication.getPublicationIDType().name())); + map.put(PUBLICATION_ID_TYPE, scheme); + } + list.add(map); + } + final Field publication = createCompoundField("publication", true, list); + fields.add(publication); + } + + private void addKeywords(final DatasetFacade facade, final List fields) { + final List keywords = facade.getKeywords(); + final List> keysList = new ArrayList<>(); + for (final DatasetKeyword keyword: keywords) { + final Map map2 = new HashMap<>(); + final Field descF = createPrimitiveSingleField(KEYWORD_VALUE, keyword.getValue()); + map2.put(KEYWORD_VALUE, descF); + addOptionalPrimitiveField(keyword.getVocabulary(), map2, KEYWORD_VOCABULARY); + addOptionalPrimitiveField(keyword.getVocabularyURI().toString(), map2, KEYWORD_VOCABULARY_URI); + keysList.add(map2); + } + + final Field desc = createCompoundField("keyword", true, keysList); + fields.add(desc); + + } + + private String isoDate(final Date date) { + return String.format("%tF", date); + } + + private void addContacts (final DatasetFacade facade, final List fields) { + final List contacts = facade.getContacts(); + final List> contactsList = new ArrayList<>(); + for (final DatasetContact contact : contacts) { + final Map map2 = new HashMap<>(); + final Field email = createPrimitiveSingleField(DATASET_CONTACT_EMAIL, contact.getDatasetContactEmail()); + map2.put(DATASET_CONTACT_EMAIL, email); + addOptionalPrimitiveField(contact.getDatasetContactName(), map2, DATASET_CONTACT_NAME); + addOptionalPrimitiveField(contact.getDatasetContactAffiliation(), map2, DATASET_CONTACT_AFFILIATION); + contactsList.add(map2); + } + final Field contact = createCompoundField("datasetContact", true, contactsList); + fields.add(contact); + } + + private void addAuthors (final DatasetFacade facade, final List fields) { + final List authors = facade.getAuthors(); + final List> authorsMap = new ArrayList<>(); + for (final DatasetAuthor author : authors) { + final Map map = new HashMap<>(); + final Field authorName = createPrimitiveSingleField(AUTHOR_NAME, author.getAuthorName()); + map.put(AUTHOR_NAME, authorName); + addOptionalPrimitiveField(author.getAuthorAffiliation(), map, AUTHOR_AFFILIATION); + addOptionalPrimitiveField(author.getAuthorIdentifier(), map, AUTHOR_IDENTIFIER); + + if (!isEmpty(author.getAuthorIdentifierScheme())) { + final Field scheme = createControlledVocabField(AUTHOR_IDENTIFIER_SCHEME, false, + asList(author.getAuthorIdentifierScheme())); + map.put(AUTHOR_IDENTIFIER_SCHEME, scheme); + } + authorsMap.add(map); + } + final Field toAdd = createCompoundField("author", true, authorsMap); + fields.add(toAdd); + } + + private void addTimePeriodCovered (final DatasetFacade facade, final List fields) { + final List timePeriodCovered = facade.getTimePeriodsCovered(); + final List> timePeriodMap = new ArrayList<>(); + for (final DatasetTimePeriodCovered period : timePeriodCovered) { + final Map map = new HashMap<>(); + final Field start = createPrimitiveSingleField(TIME_PERIOD_COVERED_START, isoDate(period.getTimePeriodCoveredStart())); + map.put(TIME_PERIOD_COVERED_START, start); + final Field end = createPrimitiveSingleField(TIME_PERIOD_COVERED_END, isoDate(period.getTimePeriodCoveredEnd())); + map.put(TIME_PERIOD_COVERED_END, end); + timePeriodMap.add(map); + } + final Field toAdd = createCompoundField(TIME_PERIOD_COVERED, true, timePeriodMap); + fields.add(toAdd); + } + + private void addGeographicBoundingBox (final DatasetFacade facade, final List fields) { + final List geographicBoundingBoxes = facade.getGeographicBoundingBoxes(); + if (geographicBoundingBoxes != null && !geographicBoundingBoxes.isEmpty()) { + final List> geographicBoundariesMap = new ArrayList<>(); + for (final DatasetGeographicBoundingBox geographicBoundingBox : geographicBoundingBoxes) { + final Map map = new HashMap<>(); + final Field west = createPrimitiveSingleField(WEST_LONGITUDE, geographicBoundingBox.getWestLongitude().toString()); + map.put(WEST_LONGITUDE, west); + final Field east = createPrimitiveSingleField(EAST_LONGITUDE, geographicBoundingBox.getEastLongitude().toString()); + map.put(EAST_LONGITUDE, east); + final Field north = createPrimitiveSingleField(NORTH_LONGITUDE, geographicBoundingBox.getNorthLongitude().toString()); + map.put(NORTH_LONGITUDE, north); + final Field south = createPrimitiveSingleField(SOUTH_LONGITUDE, geographicBoundingBox.getSouthLongitude().toString()); + map.put(SOUTH_LONGITUDE, south); + geographicBoundariesMap.add(map); + } + final Field toAdd = createCompoundField(GEOGRAPHIC_BOUNDING_BOX, true, geographicBoundariesMap); + fields.add(toAdd); + } + } + + private void addOptionalPrimitiveField(final String value, final Map map, final String field) { + if (!isEmpty(value)) { + final Field affil = createPrimitiveSingleField(field, value); + map.put(field, affil); + } + } + + private void addTitle(final DatasetFacade facade, final List fields) { + final Field title = createPrimitiveSingleField("title", facade.getTitle()); + fields.add(title); + } + + private void addDepositor(final DatasetFacade facade, final List fields) { + final Field deposit = createPrimitiveSingleField("depositor", facade.getDepositor()); + fields.add(deposit); + } + + public Field createPrimitiveSingleField(final String name, final String value) { + return new Field(name, CitationType.PRIMITIVE.toString(), false, value); + } + + public Field createPrimitiveMultipleField(final String name, final String... value) { + return new Field(name, CitationType.PRIMITIVE.toString(), true, value); + } + + public Field createCompoundField(final String name, final boolean isMultiple, final List> values) { + Field cf = null; + checkArgs(isMultiple, values); + if (isMultiple) { + cf = new Field(name, CitationType.COMPOUND.toString(), isMultiple, values); + } else { + cf = new Field(name, CitationType.COMPOUND.toString(), isMultiple, values.get(0)); + } + return cf; + } + + private void checkArgs(final boolean isMultiple, final List values) { + if (!isMultiple && values.size() > 1) { + throw new IllegalArgumentException( + String.format("Field is not multiple but %d arguments were supplied", values.size())); + } + } + + public Field createControlledVocabField(final String name, final boolean isMultiple, final List values) { + Field cf = null; + checkArgs(isMultiple, values); + if (isMultiple) { + cf = new Field(name, CitationType.CONTROLLEDVOCABULARY.toString(), isMultiple, values); + } else { + cf = new Field(name, CitationType.CONTROLLEDVOCABULARY.toString(), isMultiple, values.get(0)); + } + return cf; + } } diff --git a/src/main/java/com/researchspace/dataverse/entities/facade/DatasetContact.java b/src/main/java/com/researchspace/dataverse/entities/facade/DatasetContact.java index 740ddd0..857056e 100644 --- a/src/main/java/com/researchspace/dataverse/entities/facade/DatasetContact.java +++ b/src/main/java/com/researchspace/dataverse/entities/facade/DatasetContact.java @@ -1,12 +1,3 @@ -/* - * - */ -package com.researchspace.dataverse.entities.facade; - -import lombok.Builder; -import lombok.Data; -import lombok.NonNull; - /**
 Copyright 2016 ResearchSpace
 
@@ -22,11 +13,20 @@
  See the License for the specific language governing permissions and
  limitations under the License.
 
-*/ + */ +package com.researchspace.dataverse.entities.facade; + +import lombok.Builder; +import lombok.Data; +import lombok.NonNull; + +/** + * Dataset contact. + */ @Data @Builder -public class DatasetContact { - private @NonNull String datasetContactEmail; - private String datasetContactAffiliation, datasetContactName; +public class DatasetContact { + private @NonNull String datasetContactEmail; + private String datasetContactAffiliation, datasetContactName; } diff --git a/src/main/java/com/researchspace/dataverse/entities/facade/DatasetContributor.java b/src/main/java/com/researchspace/dataverse/entities/facade/DatasetContributor.java index c1be315..09d9c7c 100644 --- a/src/main/java/com/researchspace/dataverse/entities/facade/DatasetContributor.java +++ b/src/main/java/com/researchspace/dataverse/entities/facade/DatasetContributor.java @@ -1,11 +1,3 @@ -/* - * - */ -package com.researchspace.dataverse.entities.facade; - -import lombok.Builder; -import lombok.Data; - /**
 Copyright 2016 ResearchSpace
 
@@ -21,12 +13,20 @@
  See the License for the specific language governing permissions and
  limitations under the License.
 
-*/ + */ +package com.researchspace.dataverse.entities.facade; + +import lombok.Builder; +import lombok.Data; + +/** + * Dataset contributor. + */ @Data @Builder public class DatasetContributor { - - private String name; - private ContributorType type; + + private String name; + private ContributorType type; } diff --git a/src/main/java/com/researchspace/dataverse/entities/facade/DatasetDescription.java b/src/main/java/com/researchspace/dataverse/entities/facade/DatasetDescription.java index a4bed88..9b9b468 100644 --- a/src/main/java/com/researchspace/dataverse/entities/facade/DatasetDescription.java +++ b/src/main/java/com/researchspace/dataverse/entities/facade/DatasetDescription.java @@ -1,16 +1,3 @@ -/* - * - */ -package com.researchspace.dataverse.entities.facade; - -import java.util.Date; - - - -import lombok.Builder; -import lombok.Data; -import lombok.NonNull; - /**
 Copyright 2016 ResearchSpace
 
@@ -26,12 +13,24 @@
  See the License for the specific language governing permissions and
  limitations under the License.
 
-*/ + */ +package com.researchspace.dataverse.entities.facade; + +import java.util.Date; + +import lombok.Builder; +import lombok.Data; +import lombok.NonNull; + +/** + * Dataset description. + */ @Data @Builder public class DatasetDescription { - - private @NonNull String description; - private Date date; + + private @NonNull String description; + private Date date; + private String language; } diff --git a/src/main/java/com/researchspace/dataverse/entities/facade/DatasetFacade.java b/src/main/java/com/researchspace/dataverse/entities/facade/DatasetFacade.java index baae94c..2037cf1 100644 --- a/src/main/java/com/researchspace/dataverse/entities/facade/DatasetFacade.java +++ b/src/main/java/com/researchspace/dataverse/entities/facade/DatasetFacade.java @@ -1,16 +1,4 @@ -/* - * - */ -package com.researchspace.dataverse.entities.facade; - -import lombok.*; - -import java.net.URL; -import java.util.ArrayList; -import java.util.Date; -import java.util.List; -/** - * /**
+/** 
 Copyright 2016 ResearchSpace
 
  Licensed under the Apache License, Version 2.0 (the "License");
@@ -24,57 +12,79 @@
  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  See the License for the specific language governing permissions and
  limitations under the License.
-
- * Simple POJO to set info for Dataset. +
*/ +package com.researchspace.dataverse.entities.facade; + +import java.net.URL; +import java.util.Date; +import java.util.List; + +import lombok.AllArgsConstructor; +import lombok.Builder; +import lombok.Data; +import lombok.NonNull; +import lombok.Singular; + +/** + * Simple POJO to set info for Dataset. * @author rspace - * */ @Data @Builder @AllArgsConstructor public class DatasetFacade { - - private @NonNull String title; - private @NonNull @Singular List authors; - private @NonNull @Singular List contacts; - private @NonNull String subject; - private @NonNull @Singular List descriptions; - private String depositor, subtitle, alternativeTitle; - private URL alternativeURL; - private @Singular List keywords; - private @Singular List topicClassifications; - private @Singular List publications; - private @Singular List producers; - private String note; - private List languages = new ArrayList<>(); - private Date productionDate; - private String productionPlace; - private @Singular List contributors; - - /** - * Returns a copy if the internally stored Date - * @return - */ - public Date getProductionDate (){ - if(productionDate != null){ - return new Date(productionDate.getTime()); - } else { - return null; - } - - } - /** - * Sets this object's date as a copy of the parameter Date. - * @param date - */ - public void setProductionDate(Date date) { - this.productionDate = new Date (date.getTime()); - } - /* - * For testing - */ - DatasetFacade() { - // TODO Auto-generated constructor stub - } + + // License + private String termsOfUse; + + // Citation metadata + private @NonNull String title; + private @NonNull @Singular List authors; + private @NonNull @Singular List contacts; + private @NonNull String subject; + private @NonNull @Singular List descriptions; + private String depositor, subtitle, alternativeTitle; + private URL alternativeURL; + private @Singular List keywords; + private @Singular List topicClassifications; + private @Singular List publications; + private @Singular List producers; + private String note; + private List languages; + private Date productionDate; + private @Singular("productionPlace") List productionPlaces; + private @Singular List contributors; + private @Singular("kindOfData") List kindsOfData; + private @Singular("timePeriodCovered") List timePeriodsCovered; + + // Geospatial metadata + private @Singular List geographicBoundingBoxes; + + /** + * Returns a copy if the internally stored Date + * @return + */ + public Date getProductionDate () { + if(productionDate != null){ + return new Date(productionDate.getTime()); + } else { + return null; + } + } + + /** + * Sets this object's date as a copy of the parameter Date. + * @param date + */ + public void setProductionDate(final Date date) { + productionDate = new Date(date.getTime()); + } + + /** + * For testing + */ + DatasetFacade() { + // TODO Auto-generated constructor stub + } } diff --git a/src/main/java/com/researchspace/dataverse/entities/facade/DatasetGeographicBoundingBox.java b/src/main/java/com/researchspace/dataverse/entities/facade/DatasetGeographicBoundingBox.java new file mode 100644 index 0000000..f55d9bb --- /dev/null +++ b/src/main/java/com/researchspace/dataverse/entities/facade/DatasetGeographicBoundingBox.java @@ -0,0 +1,54 @@ +/**
+Copyright 2016 ResearchSpace
+
+ Licensed under the Apache License, Version 2.0 (the "License");
+ you may not use this file except in compliance with the License.
+ You may obtain a copy of the License at
+
+     http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
+
*/ +package com.researchspace.dataverse.entities.facade; + +import lombok.Builder; +import lombok.Data; +import lombok.NonNull; + +/** + * Dataset geographic limits for 'geographicBoundingBox' field. + * @author ltromel + */ +@Data +@Builder +public class DatasetGeographicBoundingBox { + + // Note : North and south latitude are named "Longitude" because dataverse expect this in the .json. + // It's obviously an error, in the meantime, they'll be named like this waiting for a patch. + + /** + * Expected decimal degrees, at least two decimals. + */ + private @NonNull Double westLongitude; + + /** + * Expected decimal degrees, at least two decimals. + */ + private @NonNull Double eastLongitude; + + /** + * Expected decimal degrees, at least two decimals. + */ + private @NonNull Double northLongitude; + + /** + * Expected decimal degrees, at least two decimals. + */ + private @NonNull Double southLongitude; + + +} diff --git a/src/main/java/com/researchspace/dataverse/entities/facade/DatasetKeyword.java b/src/main/java/com/researchspace/dataverse/entities/facade/DatasetKeyword.java index d402f31..2f66785 100644 --- a/src/main/java/com/researchspace/dataverse/entities/facade/DatasetKeyword.java +++ b/src/main/java/com/researchspace/dataverse/entities/facade/DatasetKeyword.java @@ -1,14 +1,3 @@ -/* - * - */ -package com.researchspace.dataverse.entities.facade; - -import java.net.URI; - -import lombok.Builder; -import lombok.Data; -import lombok.NonNull; - /**
 Copyright 2016 ResearchSpace
 
@@ -23,15 +12,25 @@
  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  See the License for the specific language governing permissions and
  limitations under the License.
-
-*/ + */ +package com.researchspace.dataverse.entities.facade; + +import java.net.URI; + +import lombok.Builder; +import lombok.Data; +import lombok.NonNull; + +/** + * Dataset keyword. + */ @Builder @Data public class DatasetKeyword { - - private @NonNull String value; - private String vocabulary; - private URI vocabularyURI; - + + private @NonNull String value; + private String vocabulary; + private URI vocabularyURI; + } diff --git a/src/main/java/com/researchspace/dataverse/entities/facade/DatasetProducer.java b/src/main/java/com/researchspace/dataverse/entities/facade/DatasetProducer.java index 9654d3f..58aa354 100644 --- a/src/main/java/com/researchspace/dataverse/entities/facade/DatasetProducer.java +++ b/src/main/java/com/researchspace/dataverse/entities/facade/DatasetProducer.java @@ -1,12 +1,3 @@ -/* - * - */ -package com.researchspace.dataverse.entities.facade; - -import java.net.URL; - -import lombok.Builder; -import lombok.Data; /**
 Copyright 2016 ResearchSpace
 
@@ -21,13 +12,22 @@
  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  See the License for the specific language governing permissions and
  limitations under the License.
-
-*/ + */ +package com.researchspace.dataverse.entities.facade; + +import java.net.URL; + +import lombok.Builder; +import lombok.Data; + +/** + * Dataset producer. + */ @Data @Builder public class DatasetProducer { - - private String name, affiliation, abbreviation; - private URL url, logoURL; + + private String name, affiliation, abbreviation; + private URL url, logoURL; } diff --git a/src/main/java/com/researchspace/dataverse/entities/facade/DatasetPublication.java b/src/main/java/com/researchspace/dataverse/entities/facade/DatasetPublication.java index cb49545..54c5bb0 100644 --- a/src/main/java/com/researchspace/dataverse/entities/facade/DatasetPublication.java +++ b/src/main/java/com/researchspace/dataverse/entities/facade/DatasetPublication.java @@ -1,12 +1,3 @@ -/* - * - */ -package com.researchspace.dataverse.entities.facade; - -import java.net.URL; - -import lombok.Builder; -import lombok.Data; /**
 Copyright 2016 ResearchSpace
 
@@ -21,14 +12,22 @@
  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  See the License for the specific language governing permissions and
  limitations under the License.
-
-*/ + */ +package com.researchspace.dataverse.entities.facade; + +import java.net.URL; + +import lombok.Builder; +import lombok.Data; +/** + * Dataset publication + */ @Data @Builder public class DatasetPublication { - - private String publicationCitation, publicationIdNumber; - private PublicationIDType publicationIDType; - private URL publicationURL; + + private String publicationCitation, publicationIdNumber; + private PublicationIDType publicationIDType; + private URL publicationURL; } diff --git a/src/main/java/com/researchspace/dataverse/entities/Citation.java b/src/main/java/com/researchspace/dataverse/entities/facade/DatasetTimePeriodCovered.java similarity index 64% rename from src/main/java/com/researchspace/dataverse/entities/Citation.java rename to src/main/java/com/researchspace/dataverse/entities/facade/DatasetTimePeriodCovered.java index e613d1a..de969b8 100644 --- a/src/main/java/com/researchspace/dataverse/entities/Citation.java +++ b/src/main/java/com/researchspace/dataverse/entities/facade/DatasetTimePeriodCovered.java @@ -1,11 +1,3 @@ -/* - * - */ -package com.researchspace.dataverse.entities; - -import java.util.List; - -import lombok.Data; /**
 Copyright 2016 ResearchSpace
 
@@ -20,10 +12,23 @@
  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  See the License for the specific language governing permissions and
  limitations under the License.
-
-*/ + */ +package com.researchspace.dataverse.entities.facade; + +import java.util.Date; + +import lombok.Builder; +import lombok.Data; +import lombok.NonNull; + +/** + * Dataset time bounds for 'timePeriodCovered' field. + * @author ltromel + */ @Data -public class Citation { - private String displayName = "Citation Metadata"; - private List fields; +@Builder +public class DatasetTimePeriodCovered { + + private @NonNull Date timePeriodCoveredStart, timePeriodCoveredEnd; + } diff --git a/src/main/java/com/researchspace/dataverse/entities/facade/DatasetTopicClassification.java b/src/main/java/com/researchspace/dataverse/entities/facade/DatasetTopicClassification.java index f546489..8531124 100644 --- a/src/main/java/com/researchspace/dataverse/entities/facade/DatasetTopicClassification.java +++ b/src/main/java/com/researchspace/dataverse/entities/facade/DatasetTopicClassification.java @@ -1,12 +1,3 @@ -/* - * - */ -package com.researchspace.dataverse.entities.facade; - -import java.net.URI; - -import lombok.Builder; -import lombok.Data; /**
 Copyright 2016 ResearchSpace
 
@@ -21,13 +12,23 @@
  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  See the License for the specific language governing permissions and
  limitations under the License.
-
-*/ + */ +package com.researchspace.dataverse.entities.facade; + +import java.net.URI; + +import lombok.Builder; +import lombok.Data; + +/** + * Representation of one Dataset's topic classification for 'topicClassification' field. + * @author ltromel + */ @Data @Builder public class DatasetTopicClassification { - - private String topicClassValue, topicClassVocab; - private URI topicClassVocabURI; + + private String topicClassValue, topicClassVocab; + private URI topicClassVocabURI; } diff --git a/src/main/java/com/researchspace/dataverse/entities/facade/PublicationIDType.java b/src/main/java/com/researchspace/dataverse/entities/facade/PublicationIDType.java index 4338775..0218a27 100644 --- a/src/main/java/com/researchspace/dataverse/entities/facade/PublicationIDType.java +++ b/src/main/java/com/researchspace/dataverse/entities/facade/PublicationIDType.java @@ -1,8 +1,3 @@ -/* - * - */ -package com.researchspace.dataverse.entities.facade; - /**
 Copyright 2016 ResearchSpace
 
@@ -17,10 +12,14 @@
  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  See the License for the specific language governing permissions and
  limitations under the License.
-
-*/ + */ +package com.researchspace.dataverse.entities.facade; + +/** + * Accepted types list for 'publicationIDType' field. + */ public enum PublicationIDType { - - ark, arXiv, bibcode, doi, ean13, eissn, handle, isbn, issn, istc, lissn, lsid, pmid, purl, upc, url, urn + + ark, arXiv, bibcode, doi, ean13, eissn, handle, isbn, issn, istc, lissn, lsid, pmid, purl, upc, url, urn } diff --git a/src/main/java/com/researchspace/dataverse/http/AbstractOpsImplV1.java b/src/main/java/com/researchspace/dataverse/http/AbstractOpsImplV1.java index f0710e4..50b9d48 100644 --- a/src/main/java/com/researchspace/dataverse/http/AbstractOpsImplV1.java +++ b/src/main/java/com/researchspace/dataverse/http/AbstractOpsImplV1.java @@ -1,22 +1,3 @@ -/* - * - */ -package com.researchspace.dataverse.http; - -import com.researchspace.dataverse.api.v1.DataverseConfig; -import com.researchspace.dataverse.entities.DataverseResponse; -import com.researchspace.springrest.ext.LoggingResponseErrorHandler; -import com.researchspace.springrest.ext.RestUtil; -import lombok.extern.slf4j.Slf4j; -import org.apache.commons.lang.StringUtils; -import org.springframework.http.HttpHeaders; -import org.springframework.http.MediaType; -import org.springframework.http.ResponseEntity; -import org.springframework.web.client.RestClientException; -import org.springframework.web.client.RestTemplate; - -import java.util.Arrays; - /**
 Copyright 2016 ResearchSpace
 
@@ -31,80 +12,122 @@
  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  See the License for the specific language governing permissions and
  limitations under the License.
-
-*/ + */ +package com.researchspace.dataverse.http; + +import java.io.BufferedReader; +import java.io.IOException; +import java.io.InputStreamReader; +import java.util.Arrays; +import java.util.stream.Collectors; + +import org.apache.commons.lang.StringUtils; +import org.springframework.http.HttpHeaders; +import org.springframework.http.MediaType; +import org.springframework.http.ResponseEntity; +import org.springframework.http.client.ClientHttpResponse; +import org.springframework.web.client.ResponseErrorHandler; +import org.springframework.web.client.RestTemplate; + +import com.researchspace.dataverse.api.v1.DataverseConfig; +import com.researchspace.dataverse.entities.DataverseResponse; +import com.researchspace.springrest.ext.RestClientException; +import com.researchspace.springrest.ext.RestUtil; + +import lombok.extern.slf4j.Slf4j; + +/** + * Abstract extension class for v1 operation classes. + */ @Slf4j public abstract class AbstractOpsImplV1 { - - String apiKey = ""; - String serverURL = ""; - String serverAPIURL = serverURL +"/api"; - String serverAPIv1URL = serverAPIURL +"/v1"; - protected RestTemplate template; - - public AbstractOpsImplV1() { - super(); - this.template = createTemplate(); - } - - protected void setTemplate(RestTemplate template) { - this.template = template; - } - - public final static String apiHeader = "X-Dataverse-key"; - public void setApiKey(String apiKey) { - this.apiKey = apiKey; - } - - public void setServerURL(String serverURL) { - this.serverURL = serverURL; - this.serverAPIURL = serverURL + "/api"; - this.serverAPIv1URL = this.serverAPIURL +"/v1"; - } - - public void configure(DataverseConfig config) { - setApiKey(config.getApiKey()); - setServerURL(config.getServerURL().toString()); - } - - void handleError(ResponseEntity> resp) { - log.debug("{}", resp.getBody()); - if (RestUtil.isError(resp.getStatusCode())) { - String msg = String.format("Error code returned %d with message [%s]", resp.getStatusCodeValue(), - resp.getBody().getMessage()); - log.error(msg); - throw new RestClientException(msg); - } - } - - RestTemplate createTemplate() { - RestTemplate template = new RestTemplate(); - template.setErrorHandler(new LoggingResponseErrorHandler()); - return template; - } - - String createV1Url(String ... pathComponents) { - String url = serverAPIv1URL + "/" + StringUtils.join(pathComponents, "/") ; - log.info("URL is {}", url); - return url; - } - - String createAdminUrl(String ... pathComponents) { - String url = serverAPIURL + "/" + StringUtils.join(pathComponents, "/") ; - log.info("URL is {}", url); - return url; - } - + + String apiKey = ""; + String serverURL = ""; + String serverAPIURL = serverURL +"/api"; + String serverAPIv1URL = serverAPIURL +"/v1"; + protected RestTemplate template; + + protected AbstractOpsImplV1() { + createTemplate(); + } + + public class CustomErrorHandler implements ResponseErrorHandler { + + @Override + public boolean hasError(final ClientHttpResponse response) throws IOException { + return RestUtil.isError(response.getStatusCode()); + } + + @Override + public void handleError(final ClientHttpResponse response) throws IOException, RestClientException { + final BufferedReader in = new BufferedReader(new InputStreamReader(response.getBody())); + final String body = in.lines().collect(Collectors.joining("\n")); + log.error("Error code returned {} with message {}", + response.getStatusCode().value(), + body); + throw new RestClientException(response.getStatusCode().value(), body); + } + } + + protected void setTemplate(final RestTemplate template) { + this.template = template; + } + + public static final String API_HEADER = "X-Dataverse-key"; + + public void setApiKey(final String apiKey) { + this.apiKey = apiKey; + } + + public void setServerURL(final String serverURL) { + this.serverURL = serverURL; + serverAPIURL = serverURL + "/api"; + serverAPIv1URL = serverAPIURL +"/v1"; + } + + public void configure(final DataverseConfig config) { + setApiKey(config.getApiKey()); + setServerURL(config.getServerURL().toString()); + } + + void handleError(final ResponseEntity> resp) throws RestClientException { + log.debug(resp.getBody().toString()); + if (RestUtil.isError(resp.getStatusCode())) { + log.info(resp.getBody().toString()); + log.error(String.format("Error code returned %d with message [%s]", resp.getStatusCodeValue(), + resp.getBody().getMessage())); + throw new RestClientException(resp.getStatusCodeValue(), resp.getBody().getMessage()); + } + } + + void createTemplate() { + template = new RestTemplate(); + template.setErrorHandler(new CustomErrorHandler()); + } + + String createV1Url(final String ... pathComponents) { + final String url = serverAPIv1URL + "/" + StringUtils.join(pathComponents, "/") ; + log.debug("URL is {}", url); + return url; + } + + String createAdminUrl(final String ... pathComponents) { + final String url = serverAPIURL + "/" + StringUtils.join(pathComponents, "/") ; + log.debug("URL is {}", url); + return url; + } + HttpHeaders addAPIKeyToHeader() { - HttpHeaders headers = new HttpHeaders(); - headers.setContentType(MediaType.APPLICATION_JSON); - headers.setAccept(Arrays.asList(MediaType.APPLICATION_JSON)); - addApiKey(headers); - return headers; - } - - void addApiKey(HttpHeaders headers) { - headers.add(apiHeader, apiKey); - } + final HttpHeaders headers = new HttpHeaders(); + headers.setContentType(MediaType.APPLICATION_JSON); + headers.setAccept(Arrays.asList(MediaType.APPLICATION_JSON)); + addApiKey(headers); + return headers; + } + + void addApiKey(final HttpHeaders headers) { + headers.add(API_HEADER, apiKey); + } } diff --git a/src/main/java/com/researchspace/dataverse/http/DataverseAPIImpl.java b/src/main/java/com/researchspace/dataverse/http/DataverseAPIImpl.java index 8a9b700..367cb10 100644 --- a/src/main/java/com/researchspace/dataverse/http/DataverseAPIImpl.java +++ b/src/main/java/com/researchspace/dataverse/http/DataverseAPIImpl.java @@ -1,6 +1,18 @@ -/* - * - */ +/**
+Copyright 2016 ResearchSpace
+
+ Licensed under the Apache License, Version 2.0 (the "License");
+ you may not use this file except in compliance with the License.
+ You may obtain a copy of the License at
+
+     http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
+
*/ package com.researchspace.dataverse.http; import com.researchspace.dataverse.api.v1.DatasetOperations; @@ -10,53 +22,50 @@ import com.researchspace.dataverse.api.v1.InfoOperations; import com.researchspace.dataverse.api.v1.MetadataOperations; import com.researchspace.dataverse.api.v1.SearchOperations; -/** Copyright 2016 ResearchSpace - -Licensed under the Apache License, Version 2.0 (the "License"); -you may not use this file except in compliance with the License. -You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - -Unless required by applicable law or agreed to in writing, software -distributed under the License is distributed on an "AS IS" BASIS, -WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -See the License for the specific language governing permissions and -limitations under the License. - -*/ -public class DataverseAPIImpl implements DataverseAPI { - private DataverseOperationsImplV1 dvOperationsImpl = new DataverseOperationsImplV1(); - private SearchOperationsImplV1 searchOperationsImpl = new SearchOperationsImplV1(); - - @Override - public DatasetOperations getDatasetOperations() { - return dvOperationsImpl; - } - - @Override - public MetadataOperations getMetadataOperations() { - return dvOperationsImpl; - } - - @Override - public DataverseOperations getDataverseOperations() { - return dvOperationsImpl; - } - - @Override - public void configure(DataverseConfig config) { - dvOperationsImpl.configure(config); - searchOperationsImpl.configure(config); - } - - @Override - public InfoOperations getInfoOperations() { - return dvOperationsImpl; - } - - @Override - public SearchOperations getSearchOperations() { - return searchOperationsImpl; - } +import com.researchspace.dataverse.api.v1.UsersOperations; + +/** + * Dataverse API v1 implementation. + */ +public class DataverseAPIImpl implements DataverseAPI { + + private final DataverseOperationsImplV1 dvOperationsImpl = new DataverseOperationsImplV1(); + + private final SearchOperationsImplV1 searchOperationsImpl = new SearchOperationsImplV1(); + + @Override + public DatasetOperations getDatasetOperations() { + return dvOperationsImpl; + } + + @Override + public MetadataOperations getMetadataOperations() { + return dvOperationsImpl; + } + + @Override + public DataverseOperations getDataverseOperations() { + return dvOperationsImpl; + } + + @Override + public void configure(final DataverseConfig config) { + dvOperationsImpl.configure(config); + searchOperationsImpl.configure(config); + } + + @Override + public InfoOperations getInfoOperations() { + return dvOperationsImpl; + } + + @Override + public SearchOperations getSearchOperations() { + return searchOperationsImpl; + } + + @Override + public UsersOperations getUsersOperations() { + return dvOperationsImpl; + } } diff --git a/src/main/java/com/researchspace/dataverse/http/DataverseOperationsImplV1.java b/src/main/java/com/researchspace/dataverse/http/DataverseOperationsImplV1.java index a5fd29e..59ca9ac 100644 --- a/src/main/java/com/researchspace/dataverse/http/DataverseOperationsImplV1.java +++ b/src/main/java/com/researchspace/dataverse/http/DataverseOperationsImplV1.java @@ -1,20 +1,37 @@ -/* - * - */ +/**
+Copyright 2016 ResearchSpace
+
+ Licensed under the Apache License, Version 2.0 (the "License");
+ you may not use this file except in compliance with the License.
+ You may obtain a copy of the License at
+
+     http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
+
*/ package com.researchspace.dataverse.http; -import com.fasterxml.jackson.core.JsonProcessingException; -import com.fasterxml.jackson.databind.ObjectMapper; -import com.researchspace.dataverse.api.v1.DatasetOperations; -import com.researchspace.dataverse.api.v1.DataverseOperations; -import com.researchspace.dataverse.api.v1.InfoOperations; -import com.researchspace.dataverse.api.v1.MetadataOperations; -import com.researchspace.dataverse.entities.*; -import com.researchspace.dataverse.entities.facade.DatasetBuilder; -import com.researchspace.dataverse.entities.facade.DatasetFacade; -import com.researchspace.dataverse.sword.FileUploader; -import lombok.extern.slf4j.Slf4j; -import org.apache.commons.lang3.StringUtils; +import static org.apache.commons.lang3.StringUtils.isEmpty; +import static org.apache.commons.lang3.Validate.isTrue; +import static org.apache.commons.lang3.Validate.noNullElements; + +import java.io.File; +import java.io.IOException; +import java.io.InputStream; +import java.net.URI; +import java.net.URISyntaxException; +import java.text.ParseException; +import java.time.LocalDateTime; +import java.time.format.DateTimeFormatter; +import java.time.format.DateTimeParseException; +import java.util.ArrayList; +import java.util.List; + +import org.apache.abdera.model.Entry; import org.springframework.core.ParameterizedTypeReference; import org.springframework.core.io.AbstractResource; import org.springframework.core.io.ByteArrayResource; @@ -24,393 +41,508 @@ import org.springframework.http.HttpMethod; import org.springframework.http.ResponseEntity; import org.springframework.util.MultiValueMap; -import org.springframework.web.client.RestClientException; +import org.swordapp.client.CollectionEntries; import org.swordapp.client.ProtocolViolationException; import org.swordapp.client.SWORDClientException; import org.swordapp.client.SWORDError; -import java.io.*; -import java.net.URI; -import java.net.URISyntaxException; -import java.util.List; - -import static org.apache.commons.lang3.StringUtils.isEmpty; -import static org.apache.commons.lang3.Validate.isTrue; -import static org.apache.commons.lang3.Validate.noNullElements; -/** Copyright 2016 ResearchSpace - -Licensed under the Apache License, Version 2.0 (the "License"); -you may not use this file except in compliance with the License. -You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 +import com.fasterxml.jackson.core.JsonProcessingException; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.researchspace.dataverse.api.v1.DatasetOperations; +import com.researchspace.dataverse.api.v1.DataverseOperations; +import com.researchspace.dataverse.api.v1.InfoOperations; +import com.researchspace.dataverse.api.v1.MetadataOperations; +import com.researchspace.dataverse.api.v1.UsersOperations; +import com.researchspace.dataverse.entities.Dataset; +import com.researchspace.dataverse.entities.DatasetFileList; +import com.researchspace.dataverse.entities.DatasetVersion; +import com.researchspace.dataverse.entities.DataverseGet; +import com.researchspace.dataverse.entities.DataverseObject; +import com.researchspace.dataverse.entities.DataversePost; +import com.researchspace.dataverse.entities.DataverseResponse; +import com.researchspace.dataverse.entities.DvMessage; +import com.researchspace.dataverse.entities.Identifier; +import com.researchspace.dataverse.entities.MetadataBlock; +import com.researchspace.dataverse.entities.PublishedDataset; +import com.researchspace.dataverse.entities.Version; +import com.researchspace.dataverse.entities.facade.DatasetBuilder; +import com.researchspace.dataverse.entities.facade.DatasetFacade; +import com.researchspace.dataverse.sword.SwordAPI; +import com.researchspace.springrest.ext.RestClientException; +import com.researchspace.springrest.ext.SWORDException; -Unless required by applicable law or agreed to in writing, software -distributed under the License is distributed on an "AS IS" BASIS, -WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -See the License for the specific language governing permissions and -limitations under the License. +import lombok.extern.slf4j.Slf4j; -*/ +/** + * Operation classes implementation. + */ @Slf4j -public class DataverseOperationsImplV1 extends AbstractOpsImplV1 implements DatasetOperations, MetadataOperations, InfoOperations, DataverseOperations { - - - - - /* (non-Javadoc) - * @see com.researchspace.dataverse.http.DataverseAPI#getDataverseById(java.lang.String) - */ - @Override - public DataverseGet getDataverseById(String dataverseAlias) { - String url = createV1Url("dataverses" , dataverseAlias); - log.debug(url); - - HttpEntity entity = createHttpEntity(""); - ParameterizedTypeReference> type = new ParameterizedTypeReference>() { - }; - ParameterizedTypeReference type2 = new ParameterizedTypeReference() { - }; - ResponseEntity resp2 = template.exchange(url, HttpMethod.GET, entity, type2); - String what = resp2.getBody(); - ResponseEntity> resp = template.exchange(url, HttpMethod.GET, entity, type); - log.debug(resp.getBody().toString()); - return resp.getBody().getData(); - } - - @Override - public DataverseResponse deleteDataverse(String dataverseAlias) { - String url = createV1Url("dataverses", dataverseAlias); - log.debug(url); - - HttpEntity entity = createHttpEntity(""); - ParameterizedTypeReference> type = - new ParameterizedTypeReference>() {}; - ResponseEntity> resp = template.exchange(url, HttpMethod.DELETE, entity, type); - log.debug(resp.getBody().toString()); - return resp.getBody(); - - } - - @Override - public DataverseResponse createNewDataverse(String parentDv, DataversePost toCreate) { - isTrue(!isEmpty(toCreate.getAlias()), "Alias must be specified"); - isTrue(!isEmpty(toCreate.getName()), "Name must be specified"); - noNullElements(toCreate.getDataverseContacts(), "At least 1 email contact must be provided"); - isTrue(!isEmpty(toCreate.getAlias()), "Alias must be specified"); - String url = createV1Url("dataverses", parentDv); - - - String json = marshalDataset(toCreate); - HttpEntity entity = createHttpEntity(json); - ParameterizedTypeReference> type = new ParameterizedTypeReference>() { - }; - ResponseEntity> resp = template.exchange(url, HttpMethod.POST, entity, type); - log.debug(resp.getBody().toString()); - handleError(resp); - return resp.getBody(); - - } - - /* (non-Javadoc) - * @see com.researchspace.dataverse.http.DataverseAPI#createDataset(com.researchspace.dataverse.entities.facade.DatasetFacade, java.lang.String) - */ - @Override - public Identifier createDataset(DatasetFacade facade, String dataverseAlias) { - String url = createV1Url("dataverses", dataverseAlias,"datasets"); - - String json = getJsonFromFacade(facade); - HttpEntity entity = createHttpEntity(json); - ParameterizedTypeReference> type = new ParameterizedTypeReference>() { - }; - - ResponseEntity> resp = template.exchange(url, HttpMethod.POST, entity, type); - handleError(resp); - return resp.getBody().getData(); - } - - @Override - public Identifier createDataset(String dataSetJson, String dataverseAlias) { - String url = createV1Url("dataverses", dataverseAlias,"datasets"); - HttpEntity entity = createHttpEntity(dataSetJson); - ParameterizedTypeReference> type = new ParameterizedTypeReference>() { - }; - - ResponseEntity> resp = template.exchange(url, HttpMethod.POST, entity, type); - handleError(resp); - return resp.getBody().getData(); - } - - - /* (non-Javadoc) - * @see com.researchspace.dataverse.http.DataverseAPI#updateDataset(com.researchspace.dataverse.entities.facade.DatasetFacade, com.researchspace.dataverse.entities.Identifier) - */ - @Override - public DatasetVersion updateDataset(DatasetFacade facade, Identifier id) { - String url = createV1Url("datasets", id.getId() +"", "versions",":draft"); - - Dataset ds = new DatasetBuilder().build(facade); - String json = marshalDataset(ds.getDatasetVersion()); - - - HttpEntity entity = createHttpEntity(json); - ParameterizedTypeReference> type = new ParameterizedTypeReference>() { - }; - ResponseEntity> resp = template.exchange(url, HttpMethod.PUT, entity,type); - - handleError(resp); - return resp.getBody().getData(); - - } - - /* (non-Javadoc) - * @see com.researchspace.dataverse.http.DataverseAPI#getDataset(com.researchspace.dataverse.entities.Identifier) - */ - @Override - public Dataset getDataset(Identifier dsIdentifier) { - String url = createV1Url("datasets", dsIdentifier.getId() +""); - - HttpEntity entity = createHttpEntity(""); - ParameterizedTypeReference> type = new ParameterizedTypeReference>() { - }; - ResponseEntity> resp = template.exchange(url, HttpMethod.GET, entity, type); - handleError(resp); - return resp.getBody().getData(); - } - - /* (non-Javadoc) - * @see com.researchspace.dataverse.http.DataverseAPI#getDatasetVersions(com.researchspace.dataverse.entities.Identifier) - */ - @Override - public List getDatasetVersions (Identifier dsIdentifier) { - String url = createV1Url("datasets", dsIdentifier.getId() +"", "versions"); - - HttpEntity entity = createHttpEntity(""); - ParameterizedTypeReference>> type = new ParameterizedTypeReference>>() { - }; - ResponseEntity>> resp = template.exchange(url, HttpMethod.GET, entity, type); - log.debug("{}", resp.getBody()); - handleError(resp); - return resp.getBody().getData(); - } - - @Override - public DatasetFileList uploadNativeFile( byte[] data, FileUploadMetadata metadata, Identifier dsIdentifier, String fileName){ - ByteArrayResource resource = new ByteArrayResource(data){ - @Override - public String getFilename(){ - return fileName; - } - }; - return getDatasetFileList(metadata, dsIdentifier, resource); - } - @Override - public DatasetFileList uploadNativeFile(InputStream data, long contentLength, FileUploadMetadata metadata, Identifier dsIdentifier, String fileName) { - InputStreamResource resource = new InputStreamResource(data) { - @Override - public String getFilename(){ - return fileName; - } - - @Override - public long contentLength() throws IOException { - return contentLength; - } - }; - return getDatasetFileList(metadata, dsIdentifier, resource); - - } - - private DatasetFileList getDatasetFileList(FileUploadMetadata metadata, Identifier dsIdentifier, AbstractResource resource) { - String url = createV1Url("datasets", ":persistentId", "add") + "?persistentId=" + dsIdentifier.getPersistentId(); - ParameterizedTypeReference> type = - new ParameterizedTypeReference>() {}; - HttpEntity> entity = new NativeFileUploader().createFileUploadEntity(metadata, apiKey, resource); - ResponseEntity> resp = template.exchange(url, HttpMethod.POST, entity, type); - log.debug("{}", resp.getBody()); - handleError(resp); - return resp.getBody().getData(); - } - - /* (non-Javadoc) - * @see com.researchspace.dataverse.http.DataverseAPI#uploadFile(java.lang.String, java.io.File) - */ - @Override - public void uploadFile (String doi, File file) { - try { - this.uploadFile(doi, new FileInputStream(file), file.getName()); - } catch (FileNotFoundException e) { - e.printStackTrace(); - } - } - - @Override - public void uploadFile(String doi, InputStream file, String filename) { - FileUploader uploader = new FileUploader(); - try { - uploader.deposit(file, filename, apiKey, new URI(serverURL), doi); - } catch (IOException | SWORDClientException | ProtocolViolationException | URISyntaxException e) { - log.error("Couldn't upload file {} with doi {} : {}", filename, doi.toString(), e.getMessage()); - throw new RestClientException(e.getMessage()); - } catch (SWORDError error) { - if (!StringUtils.isEmpty(error.getErrorBody())) { - log.error("SwordError: {}", error.getErrorBody()); - throw new RestClientException(error.getErrorBody()); - } - } - - } - - /* (non-Javadoc) - * @see com.researchspace.dataverse.http.DataverseAPI#deleteDataset(com.researchspace.dataverse.entities.Identifier) - */ - @Override - public DvMessage deleteDataset(Identifier dsIdentifier) { - String url = createV1Url("datasets", dsIdentifier.getId() +"" ); - - HttpEntity entity = createHttpEntity(""); - ParameterizedTypeReference> type = new ParameterizedTypeReference>() { - }; - ResponseEntity> resp = template.exchange(url, HttpMethod.DELETE, entity, type); - handleError(resp); - return resp.getBody().getData(); - } - - private HttpEntity createHttpEntity(String body) { - HttpHeaders headers = addAPIKeyToHeader(); - HttpEntity entity = new HttpEntity(body, headers); - return entity; - } - - private String getJsonFromFacade(DatasetFacade facade) { - Dataset dataset = new DatasetBuilder().build(facade); - return marshalDataset(dataset); - } - - private String marshalDataset(Object object) { - ObjectMapper mapper = new ObjectMapper(); - String json = ""; - try { - json = mapper.writeValueAsString(object); - } catch (JsonProcessingException e) { - // TODO Auto-generated catch block - e.printStackTrace(); - } - return json; - } - - /* (non-Javadoc) - * @see com.researchspace.dataverse.http.DataverseAPI#getDataverseContents(java.lang.String) - */ - @Override - public List getDataverseContents(String dataverseAlias) { - String url = createV1Url("dataverses", dataverseAlias, "contents" ); - - HttpEntity entity = createHttpEntity(""); - ParameterizedTypeReference>> type = new ParameterizedTypeReference>>() { - }; - ParameterizedTypeReference type2 = new ParameterizedTypeReference() { - }; - ResponseEntity resp2 = template.exchange(url, HttpMethod.GET, entity, - type2); - String respB = resp2.getBody(); - ResponseEntity>> resp = template.exchange(url, HttpMethod.GET, entity, - type); - handleError(resp); - DataverseResponse> dv = resp.getBody(); - log.debug("Response is status {} with number elements {}", resp.getBody().getStatus(), - resp.getBody().getData().size()); - if (!dv.getData().isEmpty()) { - log.debug("First entry is {}", dv.getData().get(0)); - } - return dv.getData(); - } - - /* (non-Javadoc) - * @see com.researchspace.dataverse.http.DataverseAPI#getMetadataBLockInfo() - */ - @Override - public List getMetadataBlockInfo() { - String url = createV1Url("metadatablocks" ); - - HttpHeaders headers = addAPIKeyToHeader(); - HttpEntity entity = new HttpEntity("parameters", headers); - ParameterizedTypeReference>> type = new ParameterizedTypeReference>>() { - }; - ResponseEntity>> resp = template.exchange(url, HttpMethod.GET, entity, - type); - handleError(resp); - return resp.getBody().getData(); - } - - /* (non-Javadoc) - * @see com.researchspace.dataverse.http.DataverseAPI#getMetadataById(java.lang.String) - */ - @Override - public MetadataBlock getMetadataById(String name) { - String url = createV1Url("metadatablocks", name ); - - HttpHeaders headers = addAPIKeyToHeader(); - HttpEntity entity = new HttpEntity("parameters", headers); - ParameterizedTypeReference> type = new ParameterizedTypeReference>() { - }; - ResponseEntity> resp = template.exchange(url, HttpMethod.GET, entity, type); - handleError(resp); - - return resp.getBody().getData(); - } - - - - /* (non-Javadoc) - * @see com.researchspace.dataverse.http.DataverseAPI#publishDataset(com.researchspace.dataverse.entities.Identifier, com.researchspace.dataverse.entities.Version) - */ - @Override - public DataverseResponse publishDataset(Identifier dsIdentifier, Version version) { - String url = createV1Url("datasets", dsIdentifier.getId() + "", "actions", ":publish") + "?type=" - + version.name().toLowerCase(); - - HttpEntity entity = createHttpEntity(""); - ParameterizedTypeReference> type = new ParameterizedTypeReference>() { - }; - ResponseEntity> resp = template.exchange(url, HttpMethod.POST, entity, type); - log.debug(resp.getBody().toString()); - return resp.getBody(); - - } - - @Override - public DataverseResponse publishDataverse(String dvName) { - String url = createV1Url("dataverses", dvName, "actions", ":publish"); - HttpEntity entity = createHttpEntity(""); - ParameterizedTypeReference> type = new ParameterizedTypeReference>() { - }; - ResponseEntity> resp = template.exchange(url, HttpMethod.POST, entity, type); - log.debug(resp.getBody().toString()); - return resp.getBody(); - - } - - @Override - public DvMessage getDatasetPublishPopupCustomText() { - String url = createAdminUrl("info", "settings", ":DatasetPublishPopupCustomText"); - HttpEntity entity = createHttpEntity(""); - ParameterizedTypeReference> type = new ParameterizedTypeReference>() { - }; - ResponseEntity> resp = template.exchange(url, HttpMethod.GET, entity, type); - log.debug(resp.getBody().toString()); - return resp.getBody().getData(); - - } - - @Override - public DataverseResponse setDatasetPublishPopupCustomText(String text) { - String url = createAdminUrl("admin", "settings", ":DatasetPublishPopupCustomText"); - HttpEntity entity = createHttpEntity(text); - ParameterizedTypeReference> type = new ParameterizedTypeReference>() { - }; - ResponseEntity> resp = template.exchange(url, HttpMethod.PUT, entity, type); - log.debug(resp.getBody().toString()); - return resp.getBody(); - } +public class DataverseOperationsImplV1 extends AbstractOpsImplV1 +implements DatasetOperations, MetadataOperations, InfoOperations, +DataverseOperations, UsersOperations { + + /* (non-Javadoc) + * @see com.researchspace.dataverse.http.DataverseAPI#getDataverseById(java.lang.String) + */ + @Override + public DataverseGet getDataverseById(final String dataverseAlias) { + final String url = createV1Url("dataverses" , dataverseAlias); + log.debug(url); + final HttpEntity entity = createHttpEntity(""); + final ParameterizedTypeReference> type = + new ParameterizedTypeReference>() { + }; + final ResponseEntity> resp = template.exchange(url, HttpMethod.GET, entity, type); + log.debug(resp.getBody().toString()); + return resp.getBody().getData(); + } + + @Override + public DataverseResponse deleteDataverse(final String dataverseAlias) { + final String url = createV1Url("dataverses", dataverseAlias); + log.debug(url); + + final HttpEntity entity = createHttpEntity(""); + final ParameterizedTypeReference> type = + new ParameterizedTypeReference>() {}; + final ResponseEntity> resp = template.exchange(url, HttpMethod.DELETE, entity, type); + log.debug(resp.getBody().toString()); + return resp.getBody(); + + } + + @Override + public DataverseResponse createNewDataverse(final String parentDv, final DataversePost toCreate) + throws RestClientException { + isTrue(!isEmpty(toCreate.getAlias()), "Alias must be specified"); + isTrue(!isEmpty(toCreate.getName()), "Name must be specified"); + noNullElements(toCreate.getDataverseContacts(), "At least 1 email contact must be provided"); + isTrue(!isEmpty(toCreate.getAlias()), "Alias must be specified"); + final String url = createV1Url("dataverses", parentDv); + final String json = marshalDataset(toCreate); + final HttpEntity entity = createHttpEntity(json); + final ParameterizedTypeReference> type = + new ParameterizedTypeReference>() { + }; + final ResponseEntity> resp = + template.exchange(url, HttpMethod.POST, entity, type); + log.debug(resp.getBody().toString()); + handleError(resp); + return resp.getBody(); + + } + + /* (non-Javadoc) + * @see com.researchspace.dataverse.http.DataverseAPI#createDataset(com.researchspace.dataverse.entities.facade.DatasetFacade, java.lang.String) + */ + @Override + public Identifier createDataset(final DatasetFacade facade, final String dataverseAlias) throws RestClientException { + final String url = createV1Url("dataverses", dataverseAlias, "datasets"); + final String json = getJsonFromFacade(facade); + log.debug(json); + final HttpEntity entity = createHttpEntity(json); + final ParameterizedTypeReference> type = + new ParameterizedTypeReference>() { + }; + final ResponseEntity> resp = template.exchange(url, HttpMethod.POST, entity, type); + handleError(resp); + return resp.getBody().getData(); + } + + @Override + public Identifier createDataset(final String dataSetJson, final String dataverseAlias) throws RestClientException { + final String url = createV1Url("dataverses", dataverseAlias, "datasets"); + final HttpEntity entity = createHttpEntity(dataSetJson); + final ParameterizedTypeReference> type = + new ParameterizedTypeReference>() { + }; + final ResponseEntity> resp = template.exchange(url, HttpMethod.POST, entity, type); + handleError(resp); + return resp.getBody().getData(); + } + + + /* (non-Javadoc) + * @see com.researchspace.dataverse.http.DataverseAPI#updateDataset(com.researchspace.dataverse.entities.facade.DatasetFacade, com.researchspace.dataverse.entities.Identifier) + */ + @Override + public DatasetVersion updateDataset(final DatasetFacade facade, final Identifier id) throws RestClientException { + final String url = createV1Url("datasets", id.getId() + "", "versions", ":draft"); + final Dataset ds = new DatasetBuilder().build(facade); + final String json = marshalDataset(ds.getDatasetVersion()); + final HttpEntity entity = createHttpEntity(json); + final ParameterizedTypeReference> type = + new ParameterizedTypeReference>() { + }; + final ResponseEntity> resp = template.exchange(url, HttpMethod.PUT, entity,type); + handleError(resp); + return resp.getBody().getData(); + + } + + /* (non-Javadoc) + * @see com.researchspace.dataverse.http.DataverseAPI#updateDataset(com.researchspace.dataverse.entities.facade.DatasetFacade, com.researchspace.dataverse.entities.Identifier) + */ + @Override + public DatasetVersion updateDataset(final Dataset dataset, final Identifier id) throws RestClientException { + final String url = createV1Url("datasets", id.getId() + "", "versions", ":draft"); + final String json = marshalDataset(dataset.getDatasetVersion()); + final HttpEntity entity = createHttpEntity(json); + final ParameterizedTypeReference> type = + new ParameterizedTypeReference>() { + }; + final ResponseEntity> resp = template.exchange(url, HttpMethod.PUT, entity,type); + handleError(resp); + return resp.getBody().getData(); + + } + + /* (non-Javadoc) + * @see com.researchspace.dataverse.http.DataverseAPI#getDataset(com.researchspace.dataverse.entities.Identifier) + */ + @Override + public Dataset getDataset(final Identifier dsIdentifier) throws RestClientException { + final String url = createV1Url("datasets", dsIdentifier.getId() + ""); + final HttpEntity entity = createHttpEntity(""); + final ParameterizedTypeReference> type = + new ParameterizedTypeReference>() { + }; + final ResponseEntity> resp = template.exchange(url, HttpMethod.GET, entity, type); + handleError(resp); + return resp.getBody().getData(); + } + + /* (non-Javadoc) + * @see com.researchspace.dataverse.http.DataverseAPI#getDatasetVersions(com.researchspace.dataverse.entities.Identifier) + */ + @Override + public List getDatasetVersions (final Identifier dsIdentifier) throws RestClientException { + final String url = createV1Url("datasets", dsIdentifier.getId() + "", "versions"); + final HttpEntity entity = createHttpEntity(""); + final ParameterizedTypeReference>> type = + new ParameterizedTypeReference>>() {}; + final ResponseEntity>> resp = template.exchange(url, HttpMethod.GET, entity, type); + log.debug("{}", resp.getBody()); + handleError(resp); + return resp.getBody().getData(); + } + + /* (non-Javadoc) + * @see com.researchspace.dataverse.http.DataverseAPI#uploadFile(java.lang.String, java.io.File) + */ + @Override + public void uploadFile (final String doi, final File file) throws SWORDException{ + final SwordAPI uploader = new SwordAPI(); + try { + uploader.deposit(file, apiKey, new URI(serverURL), doi); + } catch (IOException | SWORDClientException | ProtocolViolationException | URISyntaxException e) { + final String msg = String.format("Couldn't upload file %s with doi %s : %s", + file.getName(), doi, e.getMessage()); + log.error(msg); + throw new SWORDException(msg); + } catch (final SWORDError error) { + if (!isEmpty(error.getErrorBody())) { + final String msg = String.format("SwordError: %s", error.getErrorBody()); + log.error(msg); + throw new SWORDException(msg); + } + } + } + + /* (non-Javadoc) + * @see com.researchspace.dataverse.http.DataverseAPI#uploadFile(java.lang.String, java.io.File) + */ + @Override + public List listFilesUrls (final String doi) throws SWORDException { + final SwordAPI swordApi = new SwordAPI(); + final List fileNames = new ArrayList<>(); + try { + final CollectionEntries entries = swordApi.getEntries(apiKey, new URI(serverURL), doi); + for (final Entry entry : entries.getEntries()) { + fileNames.add(entry.getIdElement().getText()); + } + return fileNames; + } catch (SWORDClientException | ProtocolViolationException | URISyntaxException e) { + final String msg = String.format("Couldn't download file names with doi %s : %s", doi, e.getMessage()); + log.error(msg); + throw new SWORDException(msg); + } + } + + /* (non-Javadoc) + * @see com.researchspace.dataverse.http.DataverseAPI#deleteDataset(com.researchspace.dataverse.entities.Identifier) + */ + @Override + public DvMessage deleteDataset(final Identifier dsIdentifier) throws RestClientException { + final String url = createV1Url("datasets", dsIdentifier.getId() +"" ); + + final HttpEntity entity = createHttpEntity(""); + final ParameterizedTypeReference> type = + new ParameterizedTypeReference>() {}; + final ResponseEntity> resp = template.exchange(url, HttpMethod.DELETE, entity, type); + handleError(resp); + return resp.getBody().getData(); + } + + private HttpEntity createHttpEntity(final String body) { + final HttpHeaders headers = addAPIKeyToHeader(); + return new HttpEntity<>(body, headers); + } + + private String getJsonFromFacade(final DatasetFacade facade) { + final Dataset dataset = new DatasetBuilder().build(facade); + return marshalDataset(dataset); + } + + private String marshalDataset(final Object object) { + final ObjectMapper mapper = new ObjectMapper(); + String json = ""; + try { + json = mapper.writeValueAsString(object); + } catch (final JsonProcessingException e) { + // TODO Auto-generated catch block + e.printStackTrace(); + } + return json; + } + + /* (non-Javadoc) + * @see com.researchspace.dataverse.http.DataverseAPI#getDataverseContents(java.lang.String) + */ + @Override + public List getDataverseContents(final String dataverseAlias) throws RestClientException { + final String url = createV1Url("dataverses", dataverseAlias, "contents" ); + final HttpEntity entity = createHttpEntity(""); + final ParameterizedTypeReference>> type = + new ParameterizedTypeReference>>() { + }; + final ResponseEntity>> resp = + template.exchange(url, HttpMethod.GET, entity, type); + handleError(resp); + final DataverseResponse> dv = resp.getBody(); + log.debug("Response is status {} with number elements {}", resp.getBody().getStatus(), + resp.getBody().getData().size()); + if (!dv.getData().isEmpty()) { + log.debug("First entry is {}", dv.getData().get(0)); + } + return dv.getData(); + } + + /* (non-Javadoc) + * @see com.researchspace.dataverse.http.DataverseAPI#getMetadataBLockInfo() + */ + @Override + public List getMetadataBlockInfo() throws RestClientException { + final String url = createV1Url("metadatablocks"); + final HttpHeaders headers = addAPIKeyToHeader(); + final HttpEntity entity = new HttpEntity<>("parameters", headers); + final ParameterizedTypeReference>> type = + new ParameterizedTypeReference>>() { + }; + final ResponseEntity>> resp = template.exchange(url, HttpMethod.GET, entity, + type); + handleError(resp); + return resp.getBody().getData(); + } + + /* (non-Javadoc) + * @see com.researchspace.dataverse.http.DataverseAPI#getMetadataById(java.lang.String) + */ + @Override + public MetadataBlock getMetadataById(final String name) throws RestClientException { + final String url = createV1Url("metadatablocks", name ); + + final HttpHeaders headers = addAPIKeyToHeader(); + final HttpEntity entity = new HttpEntity<>("parameters", headers); + final ParameterizedTypeReference> type = new ParameterizedTypeReference>() { + }; + final ResponseEntity> resp = template.exchange(url, HttpMethod.GET, entity, type); + handleError(resp); + + return resp.getBody().getData(); + } + + + + /* (non-Javadoc) + * @see com.researchspace.dataverse.http.DataverseAPI#publishDataset(com.researchspace.dataverse.entities.Identifier, com.researchspace.dataverse.entities.Version) + */ + @Override + public DataverseResponse publishDataset(final Identifier dsIdentifier, final Version version) { + final String url = createV1Url("datasets", dsIdentifier.getId() + "", "actions", ":publish") + "?type=" + + version.name().toLowerCase(); + + final HttpEntity entity = createHttpEntity(""); + final ParameterizedTypeReference> type = new ParameterizedTypeReference>() { + }; + try { + final ResponseEntity> resp = template.exchange(url, HttpMethod.POST, entity, type); + log.debug(resp.getBody().toString()); + return resp.getBody(); + } catch (final RestClientException e) { + throw new RestClientException(e.getCode(), e.getMessage()); + } + + } + + @Override + public DataverseResponse publishDataverse(final String dvName) throws RestClientException { + final String url = createV1Url("dataverses", dvName, "actions", ":publish"); + final HttpEntity entity = createHttpEntity(""); + final ParameterizedTypeReference> type = new ParameterizedTypeReference>() { + }; + final ResponseEntity> resp = template.exchange(url, HttpMethod.POST, entity, type); + log.debug(resp.getBody().toString()); + handleError(resp); + return resp.getBody(); + } + + @Override + public DvMessage getDatasetPublishPopupCustomText() { + final String url = createAdminUrl("info", "settings", ":DatasetPublishPopupCustomText"); + final HttpEntity entity = createHttpEntity(""); + final ParameterizedTypeReference> type = new ParameterizedTypeReference>() { + }; + final ResponseEntity> resp = template.exchange(url, HttpMethod.GET, entity, type); + log.debug(resp.getBody().toString()); + return resp.getBody().getData(); + + } + + @Override + public DataverseResponse setDatasetPublishPopupCustomText(final String text) { + final String url = createAdminUrl("admin", "settings", ":DatasetPublishPopupCustomText"); + final HttpEntity entity = createHttpEntity(text); + final ParameterizedTypeReference> type = new ParameterizedTypeReference>() { + }; + final ResponseEntity> resp = template.exchange(url, HttpMethod.PUT, entity, type); + log.debug(resp.getBody().toString()); + return resp.getBody(); + } + + + + @Override + public void deleteFile(final String fileId) throws SWORDException { + final SwordAPI swordApi = new SwordAPI(); + try { + swordApi.deleteFile(apiKey, fileId); + } catch (SWORDClientException | ProtocolViolationException e) { + final String msg = String.format("Couldn't delete file named %s : %s", fileId, e.getMessage()); + log.error(msg); + throw new SWORDException(msg); + } catch (final SWORDError error) { + if (!isEmpty(error.getErrorBody())) { + final String msg = String.format("SwordError: %s", error.getErrorBody()); + log.error(msg); + throw new SWORDException(msg); + } + } + } + + @Override + public void deleteFile(final String fileName, final Identifier dsIdentifier) + throws URISyntaxException, SWORDException { + final SwordAPI swordApi = new SwordAPI(); + try { + swordApi.deleteFile(apiKey, new URI(serverURL), dsIdentifier, fileName); + } catch (SWORDClientException | ProtocolViolationException e) { + log.error("Couldn't delete file named {} : {}", fileName, e.getMessage()); + log.error("cause : {}", e.getCause()); + throw new SWORDException(e.getMessage()); + } catch (final SWORDError error) { + if (!isEmpty(error.getErrorBody())) { + log.error("SwordError: {}", error.getErrorBody()); + throw new SWORDException(error.getErrorBody()); + } + } catch (final URISyntaxException error) { + log.error("URI Malformed: {}", serverURL); + throw error; + } + } + + @Override + public LocalDateTime getTokenExpirationDate() throws ParseException, RestClientException { + final String url = createAdminUrl("users", "token"); + final HttpEntity entity = createHttpEntity(""); + final ParameterizedTypeReference> type = + new ParameterizedTypeReference>() { + }; + final ResponseEntity> resp = template.exchange(url, HttpMethod.GET, entity, type); + // Split the String given as + final String date = resp.getBody().getData().getMessage().split(" expires on ")[1]; + LocalDateTime time; + try { + time = LocalDateTime.parse(date, DateTimeFormatter.ofPattern("yyyy-MM-dd HH:mm:ss.SSS")); + } catch (final DateTimeParseException e) { + time = LocalDateTime.parse(date, DateTimeFormatter.ofPattern("yyyy-MM-dd HH:mm:ss.SS")); + } + return time; + } + + @Override + public String getTokenExpiration() throws ParseException, RestClientException { + final String url = createAdminUrl("users", "token"); + final HttpEntity entity = createHttpEntity(""); + final ParameterizedTypeReference> type = + new ParameterizedTypeReference>() { + }; + final ResponseEntity> resp = template.exchange(url, HttpMethod.GET, entity, type); + return resp.getBody().getData().getMessage(); + } + + @Override + public DatasetFileList uploadNativeFile( final byte[] data, final FileUploadMetadata metadata, final Identifier dsIdentifier, final String fileName){ + final ByteArrayResource resource = new ByteArrayResource(data){ + @Override + public String getFilename(){ + return fileName; + } + }; + return getDatasetFileList(metadata, dsIdentifier, resource); + } + @Override + public DatasetFileList uploadNativeFile(final InputStream data, final long contentLength, final FileUploadMetadata metadata, final Identifier dsIdentifier, final String fileName) { + final InputStreamResource resource = new InputStreamResource(data) { + @Override + public String getFilename(){ + return fileName; + } + + @Override + public long contentLength() throws IOException { + return contentLength; + } + }; + return getDatasetFileList(metadata, dsIdentifier, resource); + + } + + private DatasetFileList getDatasetFileList(final FileUploadMetadata metadata, final Identifier dsIdentifier, final AbstractResource resource) { + final String url = createV1Url("datasets", ":persistentId", "add") + "?persistentId=" + dsIdentifier.getPersistentId(); + final ParameterizedTypeReference> type = + new ParameterizedTypeReference>() {}; + final HttpEntity> entity = new NativeFileUploader().createFileUploadEntity(metadata, apiKey, resource); + final ResponseEntity> resp = template.exchange(url, HttpMethod.POST, entity, type); + log.debug("{}", resp.getBody()); + handleError(resp); + return resp.getBody().getData(); + } + + @Override + public void uploadFile(final String doi, final InputStream file, final String filename) { + final SwordAPI uploader = new SwordAPI(); + try { + uploader.deposit(file, filename, apiKey, new URI(serverURL), doi); + } catch (IOException | SWORDClientException | ProtocolViolationException | URISyntaxException e) { + log.error("Couldn't upload file {} with doi {} : {}", filename, doi, e.getMessage()); + throw new SWORDException("Couldn't upload file.", e); + } catch (final SWORDError e) { + if (!isEmpty(e.getErrorBody())) { + log.error("SwordError: {}", e.getErrorBody()); + throw new SWORDException("SwordError", e); + } + } + + } } diff --git a/src/main/java/com/researchspace/dataverse/http/NativeFileUploader.java b/src/main/java/com/researchspace/dataverse/http/NativeFileUploader.java index 8dee915..85fba53 100644 --- a/src/main/java/com/researchspace/dataverse/http/NativeFileUploader.java +++ b/src/main/java/com/researchspace/dataverse/http/NativeFileUploader.java @@ -12,25 +12,25 @@ */ public class NativeFileUploader { - public HttpEntity> createFileUploadEntity(FileUploadMetadata metadata, String apiKey, AbstractResource resource){ + public HttpEntity> createFileUploadEntity(final FileUploadMetadata metadata, final String apiKey, final AbstractResource resource){ - MultiValueMap multipartRequest = new LinkedMultiValueMap<>(); + final MultiValueMap multipartRequest = new LinkedMultiValueMap<>(); - HttpHeaders requestHeaders = new HttpHeaders(); - requestHeaders.add(AbstractOpsImplV1.apiHeader, apiKey); + final HttpHeaders requestHeaders = new HttpHeaders(); + requestHeaders.add(AbstractOpsImplV1.API_HEADER, apiKey); requestHeaders.setContentType(MediaType.MULTIPART_FORM_DATA);//Main request's headers - HttpHeaders requestHeadersAttachment = new HttpHeaders(); + final HttpHeaders requestHeadersAttachment = new HttpHeaders(); - HttpEntity attachmentPart = new HttpEntity<>(resource,requestHeadersAttachment); + final HttpEntity attachmentPart = new HttpEntity<>(resource, requestHeadersAttachment); multipartRequest.set("file",attachmentPart); - HttpHeaders requestHeadersJSON = new HttpHeaders(); + final HttpHeaders requestHeadersJSON = new HttpHeaders(); requestHeadersJSON.setContentType(MediaType.APPLICATION_JSON); - HttpEntity requestEntityJSON = new HttpEntity<>(metadata, requestHeadersJSON); + final HttpEntity requestEntityJSON = new HttpEntity<>(metadata, requestHeadersJSON); multipartRequest.set("jsonData",requestEntityJSON); - HttpEntity> requestEntity = new HttpEntity<>(multipartRequest,requestHeaders);//final request + final HttpEntity> requestEntity = new HttpEntity<>(multipartRequest,requestHeaders);//final request return requestEntity; } diff --git a/src/main/java/com/researchspace/dataverse/http/SearchOperationsImplV1.java b/src/main/java/com/researchspace/dataverse/http/SearchOperationsImplV1.java index 3c64e27..553a4a8 100644 --- a/src/main/java/com/researchspace/dataverse/http/SearchOperationsImplV1.java +++ b/src/main/java/com/researchspace/dataverse/http/SearchOperationsImplV1.java @@ -1,6 +1,18 @@ -/* - * - */ +/**
+Copyright 2016 ResearchSpace
+
+ Licensed under the Apache License, Version 2.0 (the "License");
+ you may not use this file except in compliance with the License.
+ You may obtain a copy of the License at
+
+     http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
+
*/ package com.researchspace.dataverse.http; import org.springframework.core.ParameterizedTypeReference; @@ -21,96 +33,84 @@ import com.researchspace.dataverse.search.entities.SearchType; import lombok.extern.slf4j.Slf4j; -/**
-Copyright 2016 ResearchSpace
-
- Licensed under the Apache License, Version 2.0 (the "License");
- you may not use this file except in compliance with the License.
- You may obtain a copy of the License at
 
-     http://www.apache.org/licenses/LICENSE-2.0
-
- Unless required by applicable law or agreed to in writing, software
- distributed under the License is distributed on an "AS IS" BASIS,
- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- See the License for the specific language governing permissions and
- limitations under the License.
-
-*/ +/** + * Search operations implementation class. + */ @Slf4j public class SearchOperationsImplV1 extends AbstractOpsImplV1 implements SearchOperations { - private SearchURLBuilder urlBuilder = new SearchURLBuilder(); - - @Override - public SearchConfigBuilder builder() { - return SearchConfig.builder(); - } - - @Override - public DataverseResponse> search(SearchConfig cfg) { - String url = createV1Url("search"); - url = urlBuilder.buildSearchUrl(url, cfg); - HttpHeaders headers = addAPIKeyToHeader(); - ParameterizedTypeReference>> type = new ParameterizedTypeReference>>() { - }; - ResponseEntity>> resp = template.exchange(url, HttpMethod.GET, createHttpEntity(headers), type); - log.debug(resp.getBody().getData().toString()); - return resp.getBody(); - } - - private HttpEntity createHttpEntity(HttpHeaders headers) { - HttpEntity entity = new HttpEntity("", headers); - return entity; - } - - @Override - public DataverseResponse> searchFiles(SearchConfig cfg) { - validateSrchConfig(cfg, SearchType.file); - String url = createV1Url("search"); - url = urlBuilder.buildSearchUrl(url, cfg); - HttpHeaders headers = addAPIKeyToHeader(); - ParameterizedTypeReference>> type = - new ParameterizedTypeReference>>() { - }; - ResponseEntity>> resp = template.exchange(url, HttpMethod.GET, createHttpEntity(headers), type); - log.debug(resp.getBody().getData().toString()); - return resp.getBody(); - } - - @Override - public DataverseResponse> searchDatasets(SearchConfig cfg) { - validateSrchConfig(cfg, SearchType.dataset); - String url = createV1Url("search"); - url = urlBuilder.buildSearchUrl(url, cfg); - HttpHeaders headers = addAPIKeyToHeader(); - ParameterizedTypeReference>> type = - new ParameterizedTypeReference>>() { - }; - ResponseEntity>> resp = template.exchange(url, HttpMethod.GET, createHttpEntity(headers), type); - log.debug(resp.getBody().getData().toString()); - return resp.getBody(); - } - - @Override - public DataverseResponse> searchDataverses(SearchConfig cfg) { - validateSrchConfig(cfg, SearchType.dataverse); - String url = createV1Url("search"); - url = urlBuilder.buildSearchUrl(url, cfg); - HttpHeaders headers = addAPIKeyToHeader(); - ParameterizedTypeReference>> type = - new ParameterizedTypeReference>>() { - }; - ResponseEntity>> resp = template.exchange(url, HttpMethod.GET, createHttpEntity(headers), type); - log.debug(resp.getBody().getData().toString()); - return resp.getBody(); - } - - private void validateSrchConfig(SearchConfig cfg, SearchType expected) { - if(cfg.getType().size() != 1 || !cfg.getType().contains(expected)) { - throw new IllegalArgumentException(String.format("Search must be configured to search only %ss", expected.name())); - } - } - - + private final SearchURLBuilder urlBuilder = new SearchURLBuilder(); + + @Override + public SearchConfigBuilder builder() { + return SearchConfig.builder(); + } + + @Override + public DataverseResponse> search(final SearchConfig cfg) { + String url = createV1Url("search"); + url = urlBuilder.buildSearchUrl(url, cfg); + final HttpHeaders headers = addAPIKeyToHeader(); + final ParameterizedTypeReference>> type = new ParameterizedTypeReference>>() { + }; + final ResponseEntity>> resp = template.exchange(url, HttpMethod.GET, createHttpEntity(headers), type); + log.debug(resp.getBody().getData().toString()); + return resp.getBody(); + } + + private HttpEntity createHttpEntity(final HttpHeaders headers) { + final HttpEntity entity = new HttpEntity("", headers); + return entity; + } + + @Override + public DataverseResponse> searchFiles(final SearchConfig cfg) { + validateSrchConfig(cfg, SearchType.file); + String url = createV1Url("search"); + url = urlBuilder.buildSearchUrl(url, cfg); + final HttpHeaders headers = addAPIKeyToHeader(); + final ParameterizedTypeReference>> type = + new ParameterizedTypeReference>>() { + }; + final ResponseEntity>> resp = template.exchange(url, HttpMethod.GET, createHttpEntity(headers), type); + log.debug(resp.getBody().getData().toString()); + return resp.getBody(); + } + + @Override + public DataverseResponse> searchDatasets(final SearchConfig cfg) { + validateSrchConfig(cfg, SearchType.dataset); + String url = createV1Url("search"); + url = urlBuilder.buildSearchUrl(url, cfg); + final HttpHeaders headers = addAPIKeyToHeader(); + final ParameterizedTypeReference>> type = + new ParameterizedTypeReference>>() { + }; + final ResponseEntity>> resp = template.exchange(url, HttpMethod.GET, createHttpEntity(headers), type); + log.debug(resp.getBody().getData().toString()); + return resp.getBody(); + } + + @Override + public DataverseResponse> searchDataverses(final SearchConfig cfg) { + validateSrchConfig(cfg, SearchType.dataverse); + String url = createV1Url("search"); + url = urlBuilder.buildSearchUrl(url, cfg); + final HttpHeaders headers = addAPIKeyToHeader(); + final ParameterizedTypeReference>> type = + new ParameterizedTypeReference>>() { + }; + final ResponseEntity>> resp = template.exchange(url, HttpMethod.GET, createHttpEntity(headers), type); + log.debug(resp.getBody().getData().toString()); + return resp.getBody(); + } + + private void validateSrchConfig(final SearchConfig cfg, final SearchType expected) { + if(cfg.getType().size() != 1 || !cfg.getType().contains(expected)) { + throw new IllegalArgumentException(String.format("Search must be configured to search only %ss", expected.name())); + } + } + + } diff --git a/src/main/java/com/researchspace/dataverse/http/SearchURLBuilder.java b/src/main/java/com/researchspace/dataverse/http/SearchURLBuilder.java index 952b824..ac2296d 100644 --- a/src/main/java/com/researchspace/dataverse/http/SearchURLBuilder.java +++ b/src/main/java/com/researchspace/dataverse/http/SearchURLBuilder.java @@ -1,16 +1,3 @@ -/* - * - */ -package com.researchspace.dataverse.http; - -import static org.apache.commons.lang.StringUtils.isEmpty; - -import java.util.EnumSet; - -import org.springframework.web.util.UriComponentsBuilder; - -import com.researchspace.dataverse.search.entities.SearchConfig; -import com.researchspace.dataverse.search.entities.SearchType; /**
 Copyright 2016 ResearchSpace
 
@@ -25,42 +12,55 @@
  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  See the License for the specific language governing permissions and
  limitations under the License.
-
-*/ + */ +package com.researchspace.dataverse.http; + +import static org.apache.commons.lang.StringUtils.isEmpty; + +import java.util.EnumSet; + +import org.springframework.web.util.UriComponentsBuilder; + +import com.researchspace.dataverse.search.entities.SearchConfig; +import com.researchspace.dataverse.search.entities.SearchType; + +/** + * URL builder for SearchOperationsImplV1. + */ class SearchURLBuilder { - String buildSearchUrl(String path, SearchConfig cfg) { + String buildSearchUrl(String path, final SearchConfig cfg) { - UriComponentsBuilder urlBuilder = UriComponentsBuilder.fromUriString(path).queryParam("q", cfg.getQ()); - EnumSet types = cfg.getType(); - if (types != null && !types.isEmpty()) { - urlBuilder.queryParam("type", types.toArray()); - } - if (!isEmpty(cfg.getSubtree())) { - urlBuilder.queryParam("subtree", cfg.getSubtree()); - } - if (!isEmpty(cfg.getFilterQuery())) { - urlBuilder.queryParam("fq", cfg.getFilterQuery()); - } - if (cfg.getSortBy() != null) { - urlBuilder.queryParam("sort", cfg.getSortBy()); - } - if (cfg.getSortOrder() != null) { - urlBuilder.queryParam("order", cfg.getSortOrder()); - } - if (cfg.getPerPage() != 0) { - urlBuilder.queryParam("per_page", cfg.getPerPage()); - } - if (cfg.getStart() != 0) { - urlBuilder.queryParam("start", cfg.getStart()); - } - if (cfg.isShowFacets()) { - urlBuilder.queryParam("show_facets", true); - } - if (cfg.isShowRelevance()) { - urlBuilder.queryParam("show_relevance", true); - } - path = urlBuilder.build(true).toUriString(); - return path; - } + final UriComponentsBuilder urlBuilder = UriComponentsBuilder.fromUriString(path).queryParam("q", cfg.getQ()); + final EnumSet types = cfg.getType(); + if (types != null && !types.isEmpty()) { + urlBuilder.queryParam("type", types.toArray()); + } + if (!isEmpty(cfg.getSubtree())) { + urlBuilder.queryParam("subtree", cfg.getSubtree()); + } + if (!isEmpty(cfg.getFilterQuery())) { + urlBuilder.queryParam("fq", cfg.getFilterQuery()); + } + if (cfg.getSortBy() != null) { + urlBuilder.queryParam("sort", cfg.getSortBy()); + } + if (cfg.getSortOrder() != null) { + urlBuilder.queryParam("order", cfg.getSortOrder()); + } + if (cfg.getPerPage() != 0) { + urlBuilder.queryParam("per_page", cfg.getPerPage()); + } + if (cfg.getStart() != 0) { + urlBuilder.queryParam("start", cfg.getStart()); + } + if (cfg.isShowFacets()) { + urlBuilder.queryParam("show_facets", true); + } + if (cfg.isShowRelevance()) { + urlBuilder.queryParam("show_relevance", true); + } + path = urlBuilder.build(true).toUriString(); + return path; + } } diff --git a/src/main/java/com/researchspace/dataverse/search/entities/DatasetItem.java b/src/main/java/com/researchspace/dataverse/search/entities/DatasetItem.java index 2923f15..5428b15 100644 --- a/src/main/java/com/researchspace/dataverse/search/entities/DatasetItem.java +++ b/src/main/java/com/researchspace/dataverse/search/entities/DatasetItem.java @@ -1,14 +1,3 @@ -/* - * - */ -package com.researchspace.dataverse.search.entities; - -import com.fasterxml.jackson.annotation.JsonProperty; - -import lombok.Data; -import lombok.EqualsAndHashCode; -import lombok.ToString; - /**
 Copyright 2016 ResearchSpace
 
@@ -23,19 +12,27 @@
  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  See the License for the specific language governing permissions and
  limitations under the License.
-
-*/ + */ +package com.researchspace.dataverse.search.entities; + +import com.fasterxml.jackson.annotation.JsonProperty; + +import lombok.Data; +import lombok.EqualsAndHashCode; +import lombok.ToString; + @Data @EqualsAndHashCode(callSuper = true) @ToString(callSuper = true) public class DatasetItem extends Item { - private @JsonProperty("global_id") String globalId; - private @JsonProperty("published_at") String publishedAt; - private String description, citation, citationHtml; + private @JsonProperty("global_id") String globalId; + private @JsonProperty("published_at") String publishedAt; + private String description, citation, citationHtml; - public String getType() { - return "dataset"; - } + @Override + public String getType() { + return "dataset"; + } } diff --git a/src/main/java/com/researchspace/dataverse/search/entities/DataverseItem.java b/src/main/java/com/researchspace/dataverse/search/entities/DataverseItem.java index 8c57a26..49d89e4 100644 --- a/src/main/java/com/researchspace/dataverse/search/entities/DataverseItem.java +++ b/src/main/java/com/researchspace/dataverse/search/entities/DataverseItem.java @@ -1,13 +1,3 @@ -/* - * - */ -package com.researchspace.dataverse.search.entities; - -import com.fasterxml.jackson.annotation.JsonProperty; - -import lombok.Data; -import lombok.EqualsAndHashCode; -import lombok.ToString; /**
 Copyright 2016 ResearchSpace
 
@@ -22,19 +12,27 @@
  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  See the License for the specific language governing permissions and
  limitations under the License.
-
-*/ + */ +package com.researchspace.dataverse.search.entities; + +import com.fasterxml.jackson.annotation.JsonProperty; + +import lombok.Data; +import lombok.EqualsAndHashCode; +import lombok.ToString; + @Data @EqualsAndHashCode(callSuper = true) @ToString(callSuper = true) public class DataverseItem extends Item { - String identifier; - @JsonProperty("published_at") - String publishedAt; - - public String getType() { - return "dataverse"; - } + String identifier; + @JsonProperty("published_at") + String publishedAt; + + @Override + public String getType() { + return "dataverse"; + } } diff --git a/src/main/java/com/researchspace/dataverse/search/entities/FileSearchHit.java b/src/main/java/com/researchspace/dataverse/search/entities/FileSearchHit.java index b748a34..b2256c3 100644 --- a/src/main/java/com/researchspace/dataverse/search/entities/FileSearchHit.java +++ b/src/main/java/com/researchspace/dataverse/search/entities/FileSearchHit.java @@ -1,13 +1,3 @@ -/* - * - */ -package com.researchspace.dataverse.search.entities; - -import com.fasterxml.jackson.annotation.JsonProperty; - -import lombok.Data; -import lombok.EqualsAndHashCode; -import lombok.ToString; /**
 Copyright 2016 ResearchSpace
 
@@ -22,23 +12,31 @@
  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  See the License for the specific language governing permissions and
  limitations under the License.
-
-*/ + */ +package com.researchspace.dataverse.search.entities; + +import com.fasterxml.jackson.annotation.JsonProperty; + +import lombok.Data; +import lombok.EqualsAndHashCode; +import lombok.ToString; + @EqualsAndHashCode(callSuper = true) @ToString(callSuper = true) @Data public class FileSearchHit extends Item { - private @JsonProperty("file_id") String fileId; - private @JsonProperty("dataset_citation") String datasetCitation; - private @JsonProperty("file_content_type") String fileContentType; - private String description, md5; - private @JsonProperty("size_in_bytes") int size; - @JsonProperty("published_at") - private String publishedAt; - - public String getType() { - return "file"; - } + private @JsonProperty("file_id") String fileId; + private @JsonProperty("dataset_citation") String datasetCitation; + private @JsonProperty("file_content_type") String fileContentType; + private String description, md5; + private @JsonProperty("size_in_bytes") int size; + @JsonProperty("published_at") + private String publishedAt; + + @Override + public String getType() { + return "file"; + } } diff --git a/src/main/java/com/researchspace/dataverse/search/entities/Item.java b/src/main/java/com/researchspace/dataverse/search/entities/Item.java index 74c4403..2700ac1 100644 --- a/src/main/java/com/researchspace/dataverse/search/entities/Item.java +++ b/src/main/java/com/researchspace/dataverse/search/entities/Item.java @@ -1,17 +1,4 @@ -/* - * - */ -package com.researchspace.dataverse.search.entities; - -import com.fasterxml.jackson.annotation.JsonProperty; -import com.fasterxml.jackson.annotation.JsonSubTypes; -import com.fasterxml.jackson.annotation.JsonSubTypes.Type; -import com.fasterxml.jackson.annotation.JsonTypeInfo; - -import lombok.Data; - -/** - * /**
+/** 
 Copyright 2016 ResearchSpace
 
  Licensed under the Apache License, Version 2.0 (the "License");
@@ -25,25 +12,33 @@
  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  See the License for the specific language governing permissions and
  limitations under the License.
-
+
*/ +package com.researchspace.dataverse.search.entities; + +import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonSubTypes; +import com.fasterxml.jackson.annotation.JsonSubTypes.Type; +import com.fasterxml.jackson.annotation.JsonTypeInfo; + +import lombok.Data; + +/** * Base class of SearchHits returned in the items field of a search - * result. - * + * result. Used to deserialise Json into correct subclasses using value of 'type' + * property. * @author rspace - * */ -// used to deserialise Json into correct subclasses using value of 'type' -// property @JsonTypeInfo(use = JsonTypeInfo.Id.NAME, include = JsonTypeInfo.As.PROPERTY, property = "type") @JsonSubTypes({ @Type(value = DataverseItem.class, name = "dataverse"), - @Type(value = DatasetItem.class, name = "dataset"), - @Type(value = FileSearchHit.class, name = "file"), }) + @Type(value = DatasetItem.class, name = "dataset"), + @Type(value = FileSearchHit.class, name = "file"), }) @Data public abstract class Item { - private String name, type, url; - @JsonProperty(value = "image_url") - private String imageUrl; + private String name, type, url; + + @JsonProperty(value = "image_url") + private String imageUrl; } diff --git a/src/main/java/com/researchspace/dataverse/search/entities/SearchConfig.java b/src/main/java/com/researchspace/dataverse/search/entities/SearchConfig.java index d3861a7..a773702 100644 --- a/src/main/java/com/researchspace/dataverse/search/entities/SearchConfig.java +++ b/src/main/java/com/researchspace/dataverse/search/entities/SearchConfig.java @@ -1,18 +1,4 @@ -/* - * - */ -package com.researchspace.dataverse.search.entities; - -import java.util.EnumSet; - -import org.apache.commons.lang3.Validate; - -import lombok.Builder; -import lombok.NonNull; -import lombok.Value; - -/** - * /**
+/** 
 Copyright 2016 ResearchSpace
 
  Licensed under the Apache License, Version 2.0 (the "License");
@@ -26,66 +12,77 @@
  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  See the License for the specific language governing permissions and
  limitations under the License.
-
+
*/ +package com.researchspace.dataverse.search.entities; + + +import java.util.EnumSet; + +import org.apache.commons.lang3.Validate; + +import lombok.Builder; +import lombok.NonNull; +import lombok.Value; + +/** * Read-only search configuration object.
* Use the builder() method to return a new SearchConfigBuilder to * build a search configuration in a Fluent style. - * * @author rspace - * */ @Builder(toBuilder = true) @Value public class SearchConfig { - /** - * Builder for configuring search via fluent API - * - * @author rspace - * - */ - public static class SearchConfigBuilder { - /** - * Sets results per page. Maximum is 1000 - * - * @param perPage - * if > 1000, will set to 1000 - * @return - * @throws IllegalArgumentException - * if perPage <= 0 - */ - public SearchConfigBuilder perPage(int perPage) { - Validate.isTrue(perPage > 0, "Cannot have negative results per page"); - if (perPage > MAX_RESULTS_PER_PAGE) { - perPage = MAX_RESULTS_PER_PAGE; - } - this.perPage = perPage; - return this; - } - /** - * Sets results per page. Maximum is 1000 - * - * @param perPage - * if > 1000, will set to 1000 - * @return - * @throws IllegalArgumentException - * if perPage <= 0 - */ - public SearchConfigBuilder start(int start) { - Validate.isTrue(start > 0, "Cannot have negative starting point"); - this.start = start; - return this; - } + /** + * Builder for configuring search via fluent API + * @author rspace + */ + public static class SearchConfigBuilder { + /** + * Sets results per page. Maximum is 1000 + * + * @param perPage + * if > 1000, will set to 1000 + * @return + * @throws IllegalArgumentException + * if perPage <= 0 + */ + public SearchConfigBuilder perPage(int perPage) { + Validate.isTrue(perPage > 0, "Cannot have negative results per page"); + if (perPage > MAX_RESULTS_PER_PAGE) { + perPage = MAX_RESULTS_PER_PAGE; + } + this.perPage = perPage; + return this; + } + + /** + * Sets results per page. Maximum is 1000 + * + * @param perPage + * if > 1000, will set to 1000 + * @return + * @throws IllegalArgumentException + * if perPage <= 0 + */ + public SearchConfigBuilder start(final int start) { + Validate.isTrue(start > 0, "Cannot have negative starting point"); + this.start = start; + return this; + } - } + } - public static final int MAX_RESULTS_PER_PAGE = 1000; - private EnumSet type; - private @NonNull String q; - private String subtree, filterQuery; - private SortBy sortBy; - private SortOrder sortOrder; - private int perPage, start; - private boolean showRelevance, showFacets; + public static final int MAX_RESULTS_PER_PAGE = 1000; + private EnumSet type; + private @NonNull String q; + private String subtree; + private String filterQuery; + private SortBy sortBy; + private SortOrder sortOrder; + private int perPage, start; + private boolean showRelevance; + private boolean showFacets; } diff --git a/src/main/java/com/researchspace/dataverse/search/entities/SearchResults.java b/src/main/java/com/researchspace/dataverse/search/entities/SearchResults.java index ac99f2b..cbff847 100644 --- a/src/main/java/com/researchspace/dataverse/search/entities/SearchResults.java +++ b/src/main/java/com/researchspace/dataverse/search/entities/SearchResults.java @@ -1,17 +1,4 @@ -/* - * - */ -package com.researchspace.dataverse.search.entities; - -import java.util.List; -import java.util.stream.Collectors; - -import com.fasterxml.jackson.annotation.JsonProperty; - -import lombok.Data; - -/** - * /**
+/** 
 Copyright 2016 ResearchSpace
 
  Licensed under the Apache License, Version 2.0 (the "License");
@@ -25,36 +12,43 @@
  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  See the License for the specific language governing permissions and
  limitations under the License.
-
+
*/ +package com.researchspace.dataverse.search.entities; + +import java.util.List; +import java.util.stream.Collectors; + +import com.fasterxml.jackson.annotation.JsonProperty; + +import lombok.Data; + +/** * Encapsulates search results. - *

- * Items' subclass can be identified from the 'type' value. - * + * Items subclass can be identified from the 'type' value. * @author rspace - * */ @Data public class SearchResults { - String q; - @JsonProperty(value = "total_count") - private int totalCount; - @JsonProperty(value = "count_in_response") - private int countInResponse; - int start; - - List spellingAlternatives; - List items; - - /** - * Filters a list of SearchHits by their type. - * @param type - * @return - */ - public List filterByType (SearchType type) { - return items.stream().filter((i)-> - i.getType().equalsIgnoreCase(type.name())) - .collect(Collectors.toList()); - } + String q; + @JsonProperty(value = "total_count") + private int totalCount; + @JsonProperty(value = "count_in_response") + private int countInResponse; + int start; + + List spellingAlternatives; + List items; + + /** + * Filters a list of SearchHits by their type. + * @param type + * @return + */ + public List filterByType (final SearchType type) { + return items.stream().filter((i)-> + i.getType().equalsIgnoreCase(type.name())) + .collect(Collectors.toList()); + } } diff --git a/src/main/java/com/researchspace/dataverse/search/entities/SearchType.java b/src/main/java/com/researchspace/dataverse/search/entities/SearchType.java index 353a4c8..b3b303d 100644 --- a/src/main/java/com/researchspace/dataverse/search/entities/SearchType.java +++ b/src/main/java/com/researchspace/dataverse/search/entities/SearchType.java @@ -1,10 +1,4 @@ -/* - * - */ -package com.researchspace.dataverse.search.entities; - -/** - * /**
+/** 
 Copyright 2016 ResearchSpace
 
  Licensed under the Apache License, Version 2.0 (the "License");
@@ -18,14 +12,14 @@
  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  See the License for the specific language governing permissions and
  limitations under the License.
-
+
*/ +package com.researchspace.dataverse.search.entities; +/** * A value for the 'type' parameter of a search - * * @author rspace - * */ public enum SearchType { - dataverse, dataset, file; + dataverse, dataset, file; } diff --git a/src/main/java/com/researchspace/dataverse/search/entities/SortBy.java b/src/main/java/com/researchspace/dataverse/search/entities/SortBy.java index 8da4267..a72b321 100644 --- a/src/main/java/com/researchspace/dataverse/search/entities/SortBy.java +++ b/src/main/java/com/researchspace/dataverse/search/entities/SortBy.java @@ -1,10 +1,4 @@ -/* - * - */ -package com.researchspace.dataverse.search.entities; - -/** - * /**
+/** 
 Copyright 2016 ResearchSpace
 
  Licensed under the Apache License, Version 2.0 (the "License");
@@ -18,13 +12,13 @@
  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  See the License for the specific language governing permissions and
  limitations under the License.
-
+
*/ +package com.researchspace.dataverse.search.entities; +/** * Sorting category - * * @author rspace - * */ public enum SortBy { - name, date + name, date } diff --git a/src/main/java/com/researchspace/dataverse/search/entities/SortOrder.java b/src/main/java/com/researchspace/dataverse/search/entities/SortOrder.java index bdefd8a..1a87b39 100644 --- a/src/main/java/com/researchspace/dataverse/search/entities/SortOrder.java +++ b/src/main/java/com/researchspace/dataverse/search/entities/SortOrder.java @@ -1,11 +1,4 @@ -/* - * - */ -package com.researchspace.dataverse.search.entities; - -/** - * - * /**
+/** 
 Copyright 2016 ResearchSpace
 
  Licensed under the Apache License, Version 2.0 (the "License");
@@ -19,13 +12,14 @@
  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  See the License for the specific language governing permissions and
  limitations under the License.
-
+
*/ +package com.researchspace.dataverse.search.entities; + +/** * Order by asc or desc - * * @author rspace - * */ public enum SortOrder { - asc, desc; + asc, desc; } diff --git a/src/main/java/com/researchspace/dataverse/spring/config/DataverseSpringConfig.java b/src/main/java/com/researchspace/dataverse/spring/config/DataverseSpringConfig.java index 9bf0a94..f152829 100644 --- a/src/main/java/com/researchspace/dataverse/spring/config/DataverseSpringConfig.java +++ b/src/main/java/com/researchspace/dataverse/spring/config/DataverseSpringConfig.java @@ -1,17 +1,4 @@ -/* - * - */ -package com.researchspace.dataverse.spring.config; - -import org.springframework.context.annotation.Bean; -import org.springframework.context.annotation.Configuration; -import org.springframework.context.annotation.Scope; - -import com.researchspace.dataverse.api.v1.DataverseAPI; -import com.researchspace.dataverse.http.DataverseAPIImpl; - -/** - * /**
+/** 
 Copyright 2016 ResearchSpace
 
  Licensed under the Apache License, Version 2.0 (the "License");
@@ -25,18 +12,27 @@
  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  See the License for the specific language governing permissions and
  limitations under the License.
-
+
*/ +package com.researchspace.dataverse.spring.config; + +import org.springframework.context.annotation.Bean; +import org.springframework.context.annotation.Configuration; +import org.springframework.context.annotation.Scope; + +import com.researchspace.dataverse.api.v1.DataverseAPI; +import com.researchspace.dataverse.http.DataverseAPIImpl; + +/** * Wires up classes and produces Beans for this component. * @author rspace - * */ @Configuration public class DataverseSpringConfig { - - @Bean - @Scope(value="prototype") - DataverseAPI dataverseAPI(){ - return new DataverseAPIImpl(); - } - + + @Bean + @Scope(value="prototype") + DataverseAPI dataverseAPI(){ + return new DataverseAPIImpl(); + } + } diff --git a/src/main/java/com/researchspace/dataverse/spring/config/PackageOrganization.java b/src/main/java/com/researchspace/dataverse/spring/config/PackageOrganization.java index cebd459..fc91dd3 100644 --- a/src/main/java/com/researchspace/dataverse/spring/config/PackageOrganization.java +++ b/src/main/java/com/researchspace/dataverse/spring/config/PackageOrganization.java @@ -1,28 +1,26 @@ -/* - * - */ -package com.researchspace.dataverse.spring.config; -/** - * /** Copyright 2016 ResearchSpace +/**
+Copyright 2016 ResearchSpace
 
-Licensed under the Apache License, Version 2.0 (the "License");
-you may not use this file except in compliance with the License.
-You may obtain a copy of the License at
+ Licensed under the Apache License, Version 2.0 (the "License");
+ you may not use this file except in compliance with the License.
+ You may obtain a copy of the License at
 
-    http://www.apache.org/licenses/LICENSE-2.0
+     http://www.apache.org/licenses/LICENSE-2.0
 
-Unless required by applicable law or agreed to in writing, software
-distributed under the License is distributed on an "AS IS" BASIS,
-WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-See the License for the specific language governing permissions and
-limitations under the License. 
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
+
*/ +package com.researchspace.dataverse.spring.config; -* +/** * Plant UML documentation for package dependencies for future refactoring into separate Java binding module. @startuml skinparam componentStyle uml2 title Dataverse integration package relations - + [external Sword lib] #green [spring-rest] #green [c.r.d.api1] -up-> [c.r.d.entities / facade] diff --git a/src/main/java/com/researchspace/dataverse/sword/FileUploader.java b/src/main/java/com/researchspace/dataverse/sword/FileUploader.java deleted file mode 100644 index 4293ffd..0000000 --- a/src/main/java/com/researchspace/dataverse/sword/FileUploader.java +++ /dev/null @@ -1,97 +0,0 @@ -/* - * - */ -package com.researchspace.dataverse.sword; - -import java.io.File; -import java.io.FileInputStream; -import java.io.IOException; -import java.io.InputStream; -import java.net.URI; - -import org.swordapp.client.AuthCredentials; -import org.swordapp.client.Deposit; -import org.swordapp.client.DepositReceipt; -import org.swordapp.client.ProtocolViolationException; -import org.swordapp.client.SWORDClient; -import org.swordapp.client.SWORDClientException; -import org.swordapp.client.SWORDError; - -import lombok.extern.slf4j.Slf4j; -/** - * Uploads using SWORD client library - * Copyright 2016 ResearchSpace - -Licensed under the Apache License, Version 2.0 (the "License"); -you may not use this file except in compliance with the License. -You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - -Unless required by applicable law or agreed to in writing, software -distributed under the License is distributed on an "AS IS" BASIS, -WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -See the License for the specific language governing permissions and -limitations under the License. - -* This is an internal package for calling SWORD-API. - * @author rspace - * - */ -@Slf4j -public class FileUploader { - - private static final String APPLICATION_ZIP = "application/zip"; - private static final String ZIP_PACKAGING = "http://purl.org/net/sword/package/SimpleZip"; - - /** - - * @param file - * @param apiKey - * @param dataverseServer Server root e.g. "https://apitest.dataverse.org" - * @param doi FRagment e.g. "10.5072/FK2/MGADL1" - * @return - * @throws IOException - * @throws SWORDClientException - * @throws SWORDError - * @throws ProtocolViolationException - */ - public DepositReceipt deposit(File file, String apiKey, URI dataverseServer, String doi) - throws IOException, SWORDClientException, SWORDError, ProtocolViolationException { - return this.deposit(new FileInputStream(file), file.getName(), apiKey, dataverseServer, doi); - } - - - /** - * Creates a deposit object to upload a file into a dataverse instance using the SWORD library client. - * - * @param is Data coming as a stream. - * @param filename Name of the file to upload. - * @param apiKey Key used to authenticate actions into the goal dataverse instance. - * @param dataverseServer URL of the dataverse instance to attack. - * @param doi To identify the dataset that is the goal of the file upload. - * @return Information of the result of the upload via a {@code DepositReceipt} instance. - * @throws IOException Thrown when a IO error occurs, which is a general error. - * @throws SWORDClientException Thrown when an exception happens inside the SWORD client. - * @throws SWORDError Thrown when an exception happens inside the SWORD client. - * @throws ProtocolViolationException Thrown for unknown reasons. - */ - public DepositReceipt deposit(InputStream is, String filename, String apiKey, URI dataverseServer, String doi) - throws IOException, SWORDClientException, SWORDError, ProtocolViolationException { - SWORDClient cli = new SWORDClient(); - Deposit dep = new Deposit(); - dep.setFilename(filename); - dep.setFile(is); - dep.setMimeType(APPLICATION_ZIP); - dep.setPackaging(ZIP_PACKAGING); - - AuthCredentials cred = new AuthCredentials(apiKey, ""); - - String depositURI = dataverseServer.toString() + "/dvn/api/data-deposit/v1.1/swordv2/edit-media/study/doi:" - + doi; - DepositReceipt rct = cli.deposit(depositURI, dep, cred); - log.info("Deposit received with status {}" ,rct.getStatusCode()); - return rct; - } - -} diff --git a/src/main/java/com/researchspace/dataverse/sword/SwordAPI.java b/src/main/java/com/researchspace/dataverse/sword/SwordAPI.java new file mode 100644 index 0000000..7a9c7ff --- /dev/null +++ b/src/main/java/com/researchspace/dataverse/sword/SwordAPI.java @@ -0,0 +1,124 @@ +/**
+Copyright 2016 ResearchSpace
+
+ Licensed under the Apache License, Version 2.0 (the "License");
+ you may not use this file except in compliance with the License.
+ You may obtain a copy of the License at
+
+     http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
+
*/ +package com.researchspace.dataverse.sword; + +import java.io.File; +import java.io.FileInputStream; +import java.io.IOException; +import java.io.InputStream; +import java.net.URI; + +import org.swordapp.client.AuthCredentials; +import org.swordapp.client.CollectionEntries; +import org.swordapp.client.Deposit; +import org.swordapp.client.DepositReceipt; +import org.swordapp.client.ProtocolViolationException; +import org.swordapp.client.SWORDClient; +import org.swordapp.client.SWORDClientException; +import org.swordapp.client.SWORDError; + +import com.researchspace.dataverse.entities.Identifier; + +import lombok.extern.slf4j.Slf4j; + +/** + * Operations using SWORD client library + * This is an internal package for calling SWORD-API. + * @author rspace + */ +@Slf4j +public class SwordAPI { + + private static final String SWORD_FRAGMENT = "/dvn/api/data-deposit/v1.1/swordv2/"; + + private static final String APPLICATION_ZIP = "application/zip"; + + private static final String ZIP_PACKAGING = "http://purl.org/net/sword/package/SimpleZip"; + + /** + + * @param file + * @param apiKey + * @param dataverseServer Server root e.g. "https://apitest.dataverse.org" + * @param doi FRagment e.g. "10.5072/FK2/MGADL1" + * @return + * @throws IOException + * @throws SWORDClientException + * @throws SWORDError + * @throws ProtocolViolationException + */ + public DepositReceipt deposit(final File file, final String apiKey, final URI dataverseServer, final String doi) + throws IOException, SWORDClientException, SWORDError, ProtocolViolationException { + return this.deposit(new FileInputStream(file), file.getName(), apiKey, dataverseServer, doi); + } + + /** + * Creates a deposit object to upload a file into a dataverse instance using the SWORD library client. + * + * @param is Data coming as a stream. + * @param filename Name of the file to upload. + * @param apiKey Key used to authenticate actions into the goal dataverse instance. + * @param dataverseServer URL of the dataverse instance to attack. + * @param doi To identify the dataset that is the goal of the file upload. + * @return Information of the result of the upload via a {@code DepositReceipt} instance. + * @throws IOException Thrown when a IO error occurs, which is a general error. + * @throws SWORDClientException Thrown when an exception happens inside the SWORD client. + * @throws SWORDError Thrown when an exception happens inside the SWORD client. + * @throws ProtocolViolationException Thrown for unknown reasons. + */ + public DepositReceipt deposit(final InputStream is, final String filename, final String apiKey, final URI dataverseServer, final String doi) + throws IOException, SWORDClientException, SWORDError, ProtocolViolationException { + final SWORDClient cli = new SWORDClient(); + final Deposit dep = new Deposit(); + dep.setFilename(filename); + dep.setFile(is); + dep.setMimeType(APPLICATION_ZIP); + dep.setPackaging(ZIP_PACKAGING); + + final AuthCredentials cred = new AuthCredentials(apiKey, ""); + + final String depositURI = dataverseServer.toString() + "/dvn/api/data-deposit/v1.1/swordv2/edit-media/study/doi:" + + doi; + final DepositReceipt rct = cli.deposit(depositURI, dep, cred); + log.info("Deposit received with status {}" ,rct.getStatusCode()); + return rct; + } + + public CollectionEntries getEntries(final String apiKey, final URI dataverseServer, final String doi) + throws SWORDClientException, ProtocolViolationException { + final String depositURI = dataverseServer.toString() + SWORD_FRAGMENT +"statement/study/doi:" + + doi; + final SWORDClient cli = new SWORDClient(); + final AuthCredentials cred = new AuthCredentials(apiKey, ""); + return cli.listCollection(depositURI, cred); + } + + public void deleteFile(final String apiKey, final String fileId) + throws SWORDClientException, SWORDError, ProtocolViolationException { + final SWORDClient cli = new SWORDClient(); + final AuthCredentials cred = new AuthCredentials(apiKey, ""); + cli.delete(fileId, cred); + } + + public void deleteFile(final String apiKey, final URI dataverseServer, + final Identifier datasetIdentifier, final String fileName) + throws SWORDClientException, SWORDError, ProtocolViolationException { + final String fileId = dataverseServer.toString() + SWORD_FRAGMENT + "edit-media/file/" + + datasetIdentifier.getId() + "/" + fileName; + deleteFile(apiKey, fileId); + } + +} diff --git a/src/main/java/com/researchspace/springrest/ext/LoggingResponseErrorHandler.java b/src/main/java/com/researchspace/springrest/ext/LoggingResponseErrorHandler.java index 728ed5a..930e9d9 100644 --- a/src/main/java/com/researchspace/springrest/ext/LoggingResponseErrorHandler.java +++ b/src/main/java/com/researchspace/springrest/ext/LoggingResponseErrorHandler.java @@ -1,17 +1,4 @@ -/* - * - */ -package com.researchspace.springrest.ext; - - -import java.io.IOException; - -import org.springframework.http.client.ClientHttpResponse; -import org.springframework.web.client.ResponseErrorHandler; - -import lombok.extern.slf4j.Slf4j; -/** - * /**
+/** 
 Copyright 2016 ResearchSpace
 
  Licensed under the Apache License, Version 2.0 (the "License");
@@ -25,24 +12,32 @@
  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  See the License for the specific language governing permissions and
  limitations under the License.
-
+
*/ +package com.researchspace.springrest.ext; + + +import java.io.IOException; + +import org.springframework.http.client.ClientHttpResponse; +import org.springframework.web.client.ResponseErrorHandler; +import lombok.extern.slf4j.Slf4j; + +/** * Logs a 400 response instead of throwing exception, so we can capture the error message from Json - * * @author rspace * @see http://springinpractice.com/2013/10/07/handling-json-error-object-responses-with-springs-resttemplate/ - * */ @Slf4j public class LoggingResponseErrorHandler implements ResponseErrorHandler { @Override - public void handleError(ClientHttpResponse response) throws IOException { + public void handleError(final ClientHttpResponse response) throws IOException { log.error("Response error: {} {}", response.getStatusCode(), response.getStatusText()); } @Override - public boolean hasError(ClientHttpResponse response) throws IOException { + public boolean hasError(final ClientHttpResponse response) throws IOException { return RestUtil.isError(response.getStatusCode()); } } diff --git a/src/main/java/com/researchspace/springrest/ext/RestClientException.java b/src/main/java/com/researchspace/springrest/ext/RestClientException.java new file mode 100644 index 0000000..f0ef493 --- /dev/null +++ b/src/main/java/com/researchspace/springrest/ext/RestClientException.java @@ -0,0 +1,50 @@ +/**
+Copyright 2016 ResearchSpace
+
+ Licensed under the Apache License, Version 2.0 (the "License");
+ you may not use this file except in compliance with the License.
+ You may obtain a copy of the License at
+
+     http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
+
*/ +package com.researchspace.springrest.ext; + +import org.springframework.web.client.RestTemplate; + +/** + * Local class for exceptions thrown by {@link RestTemplate} whenever it encounters + * client-side HTTP errors. + */ +public class RestClientException extends RuntimeException { + + /** + * Serial. + */ + private static final long serialVersionUID = -4790742590602186386L; + + /** + * Error code. + */ + private final Integer code; + + /** + * Construct a new instance of {@code RestClientException} with the given message. + * @param errorCode error code + * @param msg the message + */ + public RestClientException(final Integer errorCode, final String msg) { + super(msg); + code = errorCode; + } + + public Integer getCode() { + return code; + } + +} diff --git a/src/main/java/com/researchspace/springrest/ext/RestUtil.java b/src/main/java/com/researchspace/springrest/ext/RestUtil.java index 59d16f5..28a1df5 100644 --- a/src/main/java/com/researchspace/springrest/ext/RestUtil.java +++ b/src/main/java/com/researchspace/springrest/ext/RestUtil.java @@ -1,9 +1,3 @@ -/* - * - */ -package com.researchspace.springrest.ext; - -import org.springframework.http.HttpStatus; /**
 Copyright 2016 ResearchSpace
 
@@ -18,13 +12,19 @@
  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  See the License for the specific language governing permissions and
  limitations under the License.
-
-*/ + */ +package com.researchspace.springrest.ext; + +import org.springframework.http.HttpStatus; + +/** + * Rest client Utils. + */ public class RestUtil { - public static boolean isError(HttpStatus status) { - HttpStatus.Series series = status.series(); - return (HttpStatus.Series.CLIENT_ERROR.equals(series) - || HttpStatus.Series.SERVER_ERROR.equals(series)); + public static boolean isError(final HttpStatus status) { + final HttpStatus.Series series = status.series(); + return HttpStatus.Series.CLIENT_ERROR.equals(series) + || HttpStatus.Series.SERVER_ERROR.equals(series); } } diff --git a/src/main/java/com/researchspace/springrest/ext/SWORDException.java b/src/main/java/com/researchspace/springrest/ext/SWORDException.java new file mode 100644 index 0000000..cac57ff --- /dev/null +++ b/src/main/java/com/researchspace/springrest/ext/SWORDException.java @@ -0,0 +1,46 @@ +/**
+Copyright 2016 ResearchSpace
+
+ Licensed under the Apache License, Version 2.0 (the "License");
+ you may not use this file except in compliance with the License.
+ You may obtain a copy of the License at
+
+     http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
+
*/ +package com.researchspace.springrest.ext; + +import org.swordapp.client.SWORDClient; + +/** + * Class for exceptions thrown by {@link SWORDClient} whenever it encounters an error. + */ +public class SWORDException extends RuntimeException { + + /** + * Serial. + */ + private static final long serialVersionUID = 6151353497066402179L; + + /** + * Construct a new instance of {@code RestClientException} with the given message. + * @param msg the message + */ + public SWORDException(final String msg) { + super(msg); + } + + /** + * Construct a new instance of {@code RestClientException} with the given message. + * @param msg the message + */ + public SWORDException(final String msg, final Exception e) { + super(msg, e.getCause()); + } + +} diff --git a/src/test/java/com/researchspace/dataverse/entities/DatasetTest.java b/src/test/java/com/researchspace/dataverse/entities/DatasetTest.java index a1f374c..e861a37 100644 --- a/src/test/java/com/researchspace/dataverse/entities/DatasetTest.java +++ b/src/test/java/com/researchspace/dataverse/entities/DatasetTest.java @@ -1,17 +1,3 @@ -/* - * - */ -package com.researchspace.dataverse.entities; - -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertFalse; - -import java.net.MalformedURLException; -import java.net.URL; - -import org.junit.After; -import org.junit.Before; -import org.junit.Test; /**
 Copyright 2016 ResearchSpace
 
@@ -26,27 +12,39 @@
  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  See the License for the specific language governing permissions and
  limitations under the License.
-
-*/ + */ +package com.researchspace.dataverse.entities; + +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertFalse; + +import java.net.MalformedURLException; +import java.net.URL; + +import org.junit.After; +import org.junit.Before; +import org.junit.Test; + + public class DatasetTest { - private static final String EXPECTED_DOI_ID = "10.5072/FK2/TGM6D5"; - private static final String EXAMPLE_DOI_URL = "http://dx.doi.org/10.5072/FK2/TGM6D5"; + private static final String EXPECTED_DOI_ID = "10.5072/FK2/TGM6D5"; + private static final String EXAMPLE_DOI_URL = "http://dx.doi.org/10.5072/FK2/TGM6D5"; - @Before - public void setUp() throws Exception { - } + @Before + public void setUp() throws Exception { + } - @After - public void tearDown() throws Exception { - } + @After + public void tearDown() throws Exception { + } - @Test - public void GetDoiId() throws MalformedURLException { - Dataset ds = new Dataset(); - assertFalse(ds.getDoiId().isPresent()); - ds.setPersistentUrl(new URL(EXAMPLE_DOI_URL)); - assertEquals(EXPECTED_DOI_ID, ds.getDoiId().get()); - } + @Test + public void GetDoiId() throws MalformedURLException { + final Dataset ds = new Dataset(); + assertFalse(ds.getDoiId().isPresent()); + ds.setPersistentUrl(new URL(EXAMPLE_DOI_URL)); + assertEquals(EXPECTED_DOI_ID, ds.getDoiId().get()); + } } diff --git a/src/test/java/com/researchspace/dataverse/entities/facade/DatasetAuthorTest.java b/src/test/java/com/researchspace/dataverse/entities/facade/DatasetAuthorTest.java index ec74f8e..f491325 100644 --- a/src/test/java/com/researchspace/dataverse/entities/facade/DatasetAuthorTest.java +++ b/src/test/java/com/researchspace/dataverse/entities/facade/DatasetAuthorTest.java @@ -1,13 +1,3 @@ -/* - * - */ -package com.researchspace.dataverse.entities.facade; - -import org.junit.After; -import org.junit.Before; -import org.junit.Test; - -import com.researchspace.dataverse.entities.facade.DatasetAuthor; /**
 Copyright 2016 ResearchSpace
 
@@ -22,21 +12,26 @@
  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  See the License for the specific language governing permissions and
  limitations under the License.
-
-*/ + */ +package com.researchspace.dataverse.entities.facade; + +import org.junit.After; +import org.junit.Before; +import org.junit.Test; + public class DatasetAuthorTest { - @Before - public void setUp() throws Exception { - } + @Before + public void setUp() throws Exception { + } - @After - public void tearDown() throws Exception { - } + @After + public void tearDown() throws Exception { + } - @Test(expected = NullPointerException.class) - public void testNameRequired() { - DatasetAuthor.builder().authorAffiliation("somewhere").build(); - } + @Test(expected = NullPointerException.class) + public void testNameRequired() { + DatasetAuthor.builder().authorAffiliation("somewhere").build(); + } } diff --git a/src/test/java/com/researchspace/dataverse/entities/facade/DatasetBuilderTest.java b/src/test/java/com/researchspace/dataverse/entities/facade/DatasetBuilderTest.java index c39de52..33dcd2d 100644 --- a/src/test/java/com/researchspace/dataverse/entities/facade/DatasetBuilderTest.java +++ b/src/test/java/com/researchspace/dataverse/entities/facade/DatasetBuilderTest.java @@ -1,23 +1,3 @@ -/* - * - */ -package com.researchspace.dataverse.entities.facade; - -import static com.researchspace.dataverse.entities.facade.DatasetTestFactory.*; - -import java.net.MalformedURLException; -import java.net.URISyntaxException; - -import org.junit.After; -import org.junit.Before; -import org.junit.Test; - -import com.fasterxml.jackson.core.JsonProcessingException; -import com.fasterxml.jackson.databind.ObjectMapper; -import com.fasterxml.jackson.databind.ObjectWriter; -import com.researchspace.dataverse.entities.Dataset; -import com.researchspace.dataverse.entities.facade.DatasetBuilder; -import com.researchspace.dataverse.entities.facade.DatasetFacade; /**
 Copyright 2016 ResearchSpace
 
@@ -32,26 +12,48 @@
  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  See the License for the specific language governing permissions and
  limitations under the License.
-
-*/ + */ +package com.researchspace.dataverse.entities.facade; + +import static com.researchspace.dataverse.entities.facade.DatasetTestFactory.createFacade; +import static org.junit.Assert.assertEquals; + +import java.io.File; +import java.io.IOException; +import java.net.URISyntaxException; + +import org.apache.commons.io.FileUtils; +import org.apache.commons.lang3.StringUtils; +import org.junit.After; +import org.junit.Before; +import org.junit.Test; + +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.ObjectWriter; +import com.researchspace.dataverse.entities.Dataset; + public class DatasetBuilderTest { - DatasetBuilder builder; - @Before - public void setUp() throws Exception { - builder = new DatasetBuilder(); - } - - @After - public void tearDown() throws Exception { - } - - @Test - public void test() throws JsonProcessingException, MalformedURLException, URISyntaxException { - DatasetFacade facade = createFacade(); - ObjectWriter mapper = new ObjectMapper().writerWithDefaultPrettyPrinter(); - Dataset dversion = builder.build(facade); - String json = mapper.writeValueAsString(dversion); - System.out.println(json); - } + DatasetBuilder builder; + @Before + public void setUp() throws Exception { + builder = new DatasetBuilder(); + } + + @After + public void tearDown() throws Exception { + } + + @Test + public void test() throws URISyntaxException, IOException { + final DatasetFacade facade = createFacade(); + final ObjectWriter mapper = new ObjectMapper().writerWithDefaultPrettyPrinter(); + final Dataset dversion = builder.build(facade); + final String json = mapper.writeValueAsString(dversion); + assertEquals(StringUtils.difference("There are differences between output and expected : " + + FileUtils.readFileToString(new File("src/integration-test/resources/dataset-builder-test.json")), + json), + FileUtils.readFileToString(new File("src/integration-test/resources/dataset-builder-test.json")), + json); + } } diff --git a/src/test/java/com/researchspace/dataverse/entities/facade/DatasetFacadeTest.java b/src/test/java/com/researchspace/dataverse/entities/facade/DatasetFacadeTest.java index b7e7a06..a581f95 100644 --- a/src/test/java/com/researchspace/dataverse/entities/facade/DatasetFacadeTest.java +++ b/src/test/java/com/researchspace/dataverse/entities/facade/DatasetFacadeTest.java @@ -1,18 +1,3 @@ -/* - * - */ -package com.researchspace.dataverse.entities.facade; - -import static org.junit.Assert.*; - -import java.util.Date; - -import org.junit.After; -import org.junit.Before; -import org.junit.Test; - -import java.util.Calendar; -import com.researchspace.dataverse.entities.facade.DatasetFacade; /**
 Copyright 2016 ResearchSpace
 
@@ -27,40 +12,52 @@
  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  See the License for the specific language governing permissions and
  limitations under the License.
-
-*/ + */ +package com.researchspace.dataverse.entities.facade; + +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertNull; + +import java.util.Calendar; +import java.util.Date; + +import org.junit.After; +import org.junit.Before; +import org.junit.Test; + + public class DatasetFacadeTest { - @Before - public void setUp() throws Exception { - } - - @After - public void tearDown() throws Exception { - } - - @Test(expected = NullPointerException.class) - public void testAuthorRequired() { - DatasetFacade.builder().contact(DatasetTestFactory.buildAContact()).build(); - } - - @Test(expected = NullPointerException.class) - public void testContactRequired() { - DatasetFacade.builder().author(DatasetTestFactory.buildAnAuthor()).build(); - } - - @Test - public void testProductionDate() { - DatasetFacade facade = new DatasetFacade() ; - assertNull(facade.getProductionDate()); - Calendar cal = Calendar.getInstance(); - Date now = cal.getTime(); - long nowMillis = now.getTime(); - facade.setProductionDate(now); - now.setTime(10000L); - - assertEquals(nowMillis, facade.getProductionDate().getTime()); - - } + @Before + public void setUp() throws Exception { + } + + @After + public void tearDown() throws Exception { + } + + @Test(expected = NullPointerException.class) + public void testAuthorRequired() { + DatasetFacade.builder().contact(DatasetTestFactory.buildAContact()).build(); + } + + @Test(expected = NullPointerException.class) + public void testContactRequired() { + DatasetFacade.builder().author(DatasetTestFactory.buildAnAuthor()).build(); + } + + @Test + public void testProductionDate() { + final DatasetFacade facade = new DatasetFacade() ; + assertNull(facade.getProductionDate()); + final Calendar cal = Calendar.getInstance(); + final Date now = cal.getTime(); + final long nowMillis = now.getTime(); + facade.setProductionDate(now); + now.setTime(10000L); + + assertEquals(nowMillis, facade.getProductionDate().getTime()); + + } } diff --git a/src/test/java/com/researchspace/dataverse/entities/facade/DatasetTestFactory.java b/src/test/java/com/researchspace/dataverse/entities/facade/DatasetTestFactory.java index af45915..26079f9 100644 --- a/src/test/java/com/researchspace/dataverse/entities/facade/DatasetTestFactory.java +++ b/src/test/java/com/researchspace/dataverse/entities/facade/DatasetTestFactory.java @@ -1,16 +1,4 @@ -/* - * - */ -package com.researchspace.dataverse.entities.facade; - -import java.net.MalformedURLException; -import java.net.URI; -import java.net.URISyntaxException; -import java.net.URL; -import java.util.Arrays; -import java.util.Date; -/** - * /**
+/** 
 Copyright 2016 ResearchSpace
 
  Licensed under the Apache License, Version 2.0 (the "License");
@@ -24,110 +12,124 @@
  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  See the License for the specific language governing permissions and
  limitations under the License.
-
+
*/ +package com.researchspace.dataverse.entities.facade; + +import java.net.MalformedURLException; +import java.net.URI; +import java.net.URISyntaxException; +import java.net.URL; +import java.time.Instant; +import java.util.Arrays; +import java.util.Date; + +/** * Factory methods for returning valid Dataset objects and metadata * @author rspace - * */ public class DatasetTestFactory { - /** - * Creates a complex DataSet object - * @return - * @throws MalformedURLException - * @throws URISyntaxException - */ - public static DatasetFacade createFacade() throws MalformedURLException, URISyntaxException { - return DatasetFacade.builder() - .author(buildAnAuthor()).author(buildAnotherAuthor()) - .title("title1") - .contact(buildAContact()).contact(buildAnotherContact()) - .description(buildADesc()) - .keyword(buildAKeyword("key1")).keyword(buildAKeyword("key2")) - .topicClassification(buildATopicClassification("topic1")) - .publication(buildAPublication()) - .producer(buildAProducer()) - .productionDate(new Date()) - .productionPlace("Edinburgh, UK") - .contributor(buildAContributor("Fred")).contributor(buildAContributor("Tim")) - .subject("Chemistry") - .depositor("A depositor") - .subtitle(" A subtitle") - .alternativeTitle("altTitle") - .alternativeURL(new URL("https://www.myrepo.com")) - .note("Some note") - .languages(Arrays.asList(new String []{"English", "French"})) - .build(); - } - - private static DatasetContributor buildAContributor(String name) { - return DatasetContributor.builder() - .name(name) - .type(ContributorType.ProjectLeader) - .build(); - } - - private static DatasetProducer buildAProducer() throws MalformedURLException { - return DatasetProducer.builder() - .name("a producer") - .abbreviation("abbr") - .affiliation("UoE") - .logoURL(new URL("https://pubmed.logo.com/1234")) - .url(new URL("https://pubmed.com/1234")) - .build(); - } - - private static DatasetPublication buildAPublication() throws MalformedURLException { - return DatasetPublication.builder() - .publicationCitation("citation") - .publicationIdNumber("12435") - .publicationIDType(PublicationIDType.ean13) - .publicationURL(new URL("https://pubmed.com/1234")) - .build(); - } - - private static DatasetTopicClassification buildATopicClassification(String value) throws URISyntaxException { - return DatasetTopicClassification.builder().topicClassValue(value) - .topicClassVocab("a topic vocab").topicClassVocabURI(new URI("https://www.vocab.org")) - .build(); - } - - private static DatasetKeyword buildAKeyword(String key) throws URISyntaxException { - return DatasetKeyword.builder().value(key).vocabulary("keywordVocab") - .vocabularyURI(new URI("https://vocab.com")).build(); - } - - private static DatasetDescription buildADesc() { - return DatasetDescription.builder() - .date(new Date()).description("some desc") - .build(); - } - - static DatasetContact buildAContact() { - return DatasetContact.builder() - .datasetContactAffiliation("Some place").datasetContactEmail("contact@email.com").datasetContactName("Sarah Contact") - .build(); - } - - static DatasetContact buildAnotherContact() { - return DatasetContact.builder() - .datasetContactAffiliation("Another place") - .datasetContactEmail("contact2@email.com") - .datasetContactName("Brian Contact2") - .build(); - } - - static DatasetAuthor buildAnotherAuthor() { - return DatasetAuthor.builder().authorName("John Smith") - .authorAffiliation("Dataverse") - .authorIdentifierScheme("ISNI") - .authorIdentifier("1234-5678").build(); - } - - static DatasetAuthor buildAnAuthor() { - return DatasetAuthor.builder().authorName("Fred Blogs") - .authorAffiliation("RSpace") - .authorIdentifierScheme("ORCID") - .authorIdentifier("1234-5678").build(); - } + + private static final Date SOME_DATE = Date.from(Instant.ofEpochMilli(1)); + + /** + * Creates a complex DataSet object + * @return + * @throws MalformedURLException + * @throws URISyntaxException + */ + public static DatasetFacade createFacade() throws MalformedURLException, URISyntaxException { + return DatasetFacade.builder() + .author(buildAnAuthor()).author(buildAnotherAuthor()) + .title("title1") + .contact(buildAContact()).contact(buildAnotherContact()) + .description(buildADesc()) + .keyword(buildAKeyword("key1")).keyword(buildAKeyword("key2")) + .topicClassification(buildATopicClassification("topic1")) + .publication(buildAPublication()) + .producer(buildAProducer()) + .productionDate(SOME_DATE) + .productionPlace("Edinburgh, UK") + .contributor(buildAContributor("Fred")).contributor(buildAContributor("Tim")) + .subject("Chemistry") + .depositor("A depositor") + .subtitle(" A subtitle") + .alternativeTitle("altTitle") + .alternativeURL(new URL("http://www.myrepo.com")) + .note("Some note") + .kindsOfData(Arrays.asList("Dataset", "Collection")) + .languages(Arrays.asList("English")) + .build(); + } + + private static DatasetContributor buildAContributor(final String name) { + return DatasetContributor.builder() + .name(name) + .type(ContributorType.ProjectLeader) + .build(); + } + + private static DatasetProducer buildAProducer() throws MalformedURLException { + return DatasetProducer.builder() + .name("a producer") + .abbreviation("abbr") + .affiliation("UoE") + .logoURL(new URL("http://pubmed.logo.com/1234")) + .url(new URL("http://pubmed.com/1234")) + .build(); + } + + private static DatasetPublication buildAPublication() throws MalformedURLException { + return DatasetPublication.builder() + .publicationCitation("citation") + .publicationIdNumber("12435") + .publicationIDType(PublicationIDType.ean13) + .publicationURL(new URL("http://pubmed.com/1234")) + .build(); + } + + private static DatasetTopicClassification buildATopicClassification(final String value) throws URISyntaxException { + return DatasetTopicClassification.builder().topicClassValue(value) + .topicClassVocab("a topic vocab").topicClassVocabURI(new URI("http://www.vocab.org")) + .build(); + } + + private static DatasetKeyword buildAKeyword(final String key) throws URISyntaxException { + return DatasetKeyword.builder().value(key).vocabulary("keywordVocab") + .vocabularyURI(new URI("http://vocab.com")).build(); + } + + private static DatasetDescription buildADesc() { + return DatasetDescription.builder() + .date(SOME_DATE).description("some desc") + .build(); + } + + static DatasetContact buildAContact() { + return DatasetContact.builder() + .datasetContactAffiliation("Some place").datasetContactEmail("contact@email.com").datasetContactName("Sarah Contact") + .build(); + } + + static DatasetContact buildAnotherContact() { + return DatasetContact.builder() + .datasetContactAffiliation("Another place") + .datasetContactEmail("contact2@email.com") + .datasetContactName("Brian Contact2") + .build(); + } + + static DatasetAuthor buildAnotherAuthor() { + return DatasetAuthor.builder().authorName("John Smith") + .authorAffiliation("Dataverse") + .authorIdentifierScheme("ISNI") + .authorIdentifier("1234-5678").build(); + } + + static DatasetAuthor buildAnAuthor() { + return DatasetAuthor.builder().authorName("Fred Blogs") + .authorAffiliation("RSpace") + .authorIdentifierScheme("ORCID") + .authorIdentifier("1234-5678").build(); + } } diff --git a/src/test/java/com/researchspace/dataverse/http/DatasetFilePostMockServerTest.java b/src/test/java/com/researchspace/dataverse/http/DatasetFilePostMockServerTest.java index bf3ae1c..5a908b7 100644 --- a/src/test/java/com/researchspace/dataverse/http/DatasetFilePostMockServerTest.java +++ b/src/test/java/com/researchspace/dataverse/http/DatasetFilePostMockServerTest.java @@ -1,13 +1,16 @@ /* - * + * */ package com.researchspace.dataverse.http; -import com.researchspace.dataverse.api.v1.DataverseConfig; -import com.researchspace.dataverse.entities.DatasetFileList; -import com.researchspace.dataverse.entities.Identifier; -import com.researchspace.dataverse.search.entities.SearchConfig; -import com.researchspace.dataverse.testutils.TestFileUtils; +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertNotNull; +import static org.springframework.http.MediaType.APPLICATION_JSON; +import static org.springframework.test.web.client.match.MockRestRequestMatchers.method; + +import java.net.MalformedURLException; +import java.net.URL; + import org.junit.Test; import org.springframework.http.HttpMethod; import org.springframework.test.web.client.ExpectedCount; @@ -16,13 +19,10 @@ import org.springframework.test.web.client.response.MockRestResponseCreators; import org.springframework.web.client.RestTemplate; -import java.net.MalformedURLException; -import java.net.URL; - -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertNotNull; -import static org.springframework.http.MediaType.APPLICATION_JSON; -import static org.springframework.test.web.client.match.MockRestRequestMatchers.method; +import com.researchspace.dataverse.api.v1.DataverseConfig; +import com.researchspace.dataverse.entities.DatasetFileList; +import com.researchspace.dataverse.entities.Identifier; +import com.researchspace.dataverse.testutils.TestFileUtils; /**
 Copyright 2016 ResearchSpace
@@ -39,59 +39,58 @@
  See the License for the specific language governing permissions and
  limitations under the License.
 
-*/ + */ public class DatasetFilePostMockServerTest { - @Test - public void testNativeFilePost() throws MalformedURLException { - RestTemplate template = new RestTemplate(); - DataverseOperationsImplV1 tss = setupDataverseOps(template); - final String persistentid = "doi://dsfh.dsdsd.sds"; - setUpServerResponse(template, "http://anyDataverse.com/api/v1/datasets/:persistentId/add?persistentId="+persistentid, - getDataSetFileUploadResults() ); - - DataverseConfig cfg = new DataverseConfig(new URL("http://anyDataverse.com"), "any", "alias"); - tss.configure(cfg); - Identifier id = new Identifier(); - id.setId(1234L); - id.setPersistentId(persistentid); - DatasetFileList resp = tss.uploadNativeFile(new byte []{}, FileUploadMetadata.builder().build(), id, "any"); - assertNotNull(resp.getFiles()); - assertEquals(1, resp.getFiles().size()); - } - - private void setUpServerResponse(RestTemplate template, String url, String response) { - MockRestServiceServer server = MockRestServiceServer.bindTo(template).build(); - server.expect(ExpectedCount.once(), MockRestRequestMatchers.requestTo(url)) - .andExpect(method(HttpMethod.POST)) - .andRespond(MockRestResponseCreators.withSuccess(response, - APPLICATION_JSON)); - } - - DataverseOperationsImplV1 setUpDataset (SearchConfig srchCfg, String url, GetJson expectedJsonGetter) throws MalformedURLException { - RestTemplate template = new RestTemplate(); - DataverseOperationsImplV1 tss = setupDataverseOps(template); - setUpServerResponse(template, url, expectedJsonGetter.getJson() ); - DataverseConfig cfg = new DataverseConfig(new URL("http://anyDataverse.com"), "any", "alias"); - tss.configure(cfg); - return tss; - } - - private DataverseOperationsImplV1 setupDataverseOps(RestTemplate template) { - DataverseOperationsImplV1 tss = new DataverseOperationsImplV1(); - tss.setTemplate(template); - return tss; - } - - @FunctionalInterface - static interface GetJson { - String getJson (); - } - - private String getDataSetFileUploadResults() { - return TestFileUtils.getJsonFromFile("nativeFileUploadResponse.json"); - } + @Test + public void testNativeFilePost() throws MalformedURLException { + final RestTemplate template = new RestTemplate(); + final DataverseOperationsImplV1 tss = setupDataverseOps(template); + final String persistentid = "doi://dsfh.dsdsd.sds"; + setUpServerResponse(template, "http://anyDataverse.com/api/v1/datasets/:persistentId/add?persistentId="+persistentid, + getDataSetFileUploadResults() ); + + final DataverseConfig cfg = new DataverseConfig(new URL("http://anyDataverse.com"), "any", "alias"); + tss.configure(cfg); + final Identifier id = new Identifier(); + id.setId(1234L); + id.setPersistentId(persistentid); + final DatasetFileList resp = tss.uploadNativeFile(new byte []{}, FileUploadMetadata.builder().build(), id, "any"); + assertNotNull(resp.getFiles()); + assertEquals(1, resp.getFiles().size()); + } + + private void setUpServerResponse(final RestTemplate template, final String url, final String response) { + final MockRestServiceServer server = MockRestServiceServer.bindTo(template).build(); + server.expect(ExpectedCount.once(), MockRestRequestMatchers.requestTo(url)) + .andExpect(method(HttpMethod.POST)) + .andRespond(MockRestResponseCreators.withSuccess(response, + APPLICATION_JSON)); + } + + DataverseOperationsImplV1 setUpDataset (final String url, final GetJson expectedJsonGetter) throws MalformedURLException { + final RestTemplate template = new RestTemplate(); + final DataverseOperationsImplV1 tss = setupDataverseOps(template); + setUpServerResponse(template, url, expectedJsonGetter.getJson() ); + final DataverseConfig cfg = new DataverseConfig(new URL("http://anyDataverse.com"), "any", "alias"); + tss.configure(cfg); + return tss; + } + + private DataverseOperationsImplV1 setupDataverseOps(final RestTemplate template) { + final DataverseOperationsImplV1 tss = new DataverseOperationsImplV1(); + tss.setTemplate(template); + return tss; + } + + @FunctionalInterface interface GetJson { + String getJson (); + } + + private String getDataSetFileUploadResults() { + return TestFileUtils.getJsonFromFile("nativeFileUploadResponse.json"); + } } diff --git a/src/test/java/com/researchspace/dataverse/http/SearchOpsMockServerTest.java b/src/test/java/com/researchspace/dataverse/http/SearchOpsMockServerTest.java index 784c78d..011cd57 100644 --- a/src/test/java/com/researchspace/dataverse/http/SearchOpsMockServerTest.java +++ b/src/test/java/com/researchspace/dataverse/http/SearchOpsMockServerTest.java @@ -1,6 +1,18 @@ -/* - * - */ +/**
+Copyright 2016 ResearchSpace
+
+ Licensed under the Apache License, Version 2.0 (the "License");
+ you may not use this file except in compliance with the License.
+ You may obtain a copy of the License at
+
+     http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
+
*/ package com.researchspace.dataverse.http; import static org.junit.Assert.assertEquals; @@ -32,169 +44,154 @@ import com.researchspace.dataverse.search.entities.SearchResults; import com.researchspace.dataverse.search.entities.SearchType; import com.researchspace.dataverse.testutils.TestFileUtils; -/**
-Copyright 2016 ResearchSpace
 
- Licensed under the Apache License, Version 2.0 (the "License");
- you may not use this file except in compliance with the License.
- You may obtain a copy of the License at
-
-     http://www.apache.org/licenses/LICENSE-2.0
 
- Unless required by applicable law or agreed to in writing, software
- distributed under the License is distributed on an "AS IS" BASIS,
- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- See the License for the specific language governing permissions and
- limitations under the License.
-
-*/ public class SearchOpsMockServerTest { - - @Before - public void setUp() throws Exception { - } - - @After - public void tearDown() throws Exception { - } - - @Test - public void testSimpleSearch() throws MalformedURLException { - SearchConfig scfg = SearchConfig.builder().q("trees").build(); - RestTemplate template = new RestTemplate(); - SearchOperationsImplV1 tss = setupSearchOps(template); - setUpServerResponse(template, "http://anyDataverse.com/api/v1/search?q=trees", getSimpleQueryResult() ); - - DataverseConfig cfg = new DataverseConfig(new URL("http://anyDataverse.com"), "any", "alias"); - tss.configure(cfg); - DataverseResponse> resp = tss.search(scfg); - assertNotNull(resp.getData()); - assertEquals("trees", resp.getData().getQ()); - } - - private void setUpServerResponse(RestTemplate template, String url, String response) { - MockRestServiceServer server = MockRestServiceServer.bindTo(template).build(); - server.expect(ExpectedCount.once(), MockRestRequestMatchers.requestTo(url)) - .andExpect(method(HttpMethod.GET)) - .andRespond(MockRestResponseCreators.withSuccess(response, - APPLICATION_JSON)); - } - - @Test - public void testComplexSearch() throws MalformedURLException { - SearchConfig scfg = SearchConfig.builder().q("trees").build(); - RestTemplate template = new RestTemplate(); - SearchOperationsImplV1 tss = setupSearchOps(template); - setUpServerResponse(template, "http://anyDataverse.com/api/v1/search?q=trees", getComplexQueryResult() ); - DataverseConfig cfg = new DataverseConfig(new URL("http://anyDataverse.com"), "any", "alias"); - tss.configure(cfg); - DataverseResponse> resp = tss.search(scfg); - assertNotNull(resp.getData()); - assertEquals("trees", resp.getData().getQ()); - SearchResults searches = resp.getData(); - assertEquals(3, searches.getTotalCount()); - assertEquals(3, searches.getCountInResponse()); - assertEquals(3, searches.getItems().size()); - assertEquals(1, searches.filterByType(SearchType.dataset).size()); - assertEquals(1, searches.filterByType(SearchType.dataverse).size()); - assertEquals(1, searches.filterByType(SearchType.file).size()); - } - - @Test - public void testFileSearch() throws MalformedURLException { - SearchConfig scfg = SearchConfig.builder().q("trees").type(EnumSet.of(SearchType.file)).build(); - RestTemplate template = new RestTemplate(); - SearchOperationsImplV1 tss = setupSearchOps(template); - setUpServerResponse(template, "http://anyDataverse.com/api/v1/search?q=trees&type=file", getFileQueryResult() ); - DataverseConfig cfg = new DataverseConfig(new URL("http://anyDataverse.com"), "any", "alias"); - tss.configure(cfg); - DataverseResponse> resp = tss.searchFiles(scfg); - SearchResults searches = resp.getData(); - - assertBasicSearchResultParsing(searches); - assertEquals("text/plain", searches.getItems().get(0).getFileContentType()); - } - - @Test - public void testDatasetSearch() throws MalformedURLException { - SearchConfig scfg = SearchConfig.builder().q("trees").type(EnumSet.of(SearchType.dataset)).build(); - SearchOperationsImplV1 tss = setUpSearch(scfg, "http://anyDataverse.com/api/v1/search?q=trees&type=dataset", ()->getDatasetQueryResult() ); - - DataverseResponse> resp = tss.searchDatasets(scfg); - SearchResults searches = resp.getData(); - - assertBasicSearchResultParsing(searches); - assertEquals("doi:10.5072/FK2/1FUEXN", searches.getItems().get(0).getGlobalId()); - } - - @Test - public void testDataverseSearch() throws MalformedURLException { - SearchConfig scfg = SearchConfig.builder().q("trees").type(EnumSet.of(SearchType.dataverse)).build(); - SearchOperationsImplV1 tss = setUpSearch(scfg, "http://anyDataverse.com/api/v1/search?q=trees&type=dataverse", ()->getDataverseQueryResult() ); - - DataverseResponse> resp = tss.searchDataverses(scfg); - SearchResults searches = resp.getData(); - - assertBasicSearchResultParsing(searches); - assertEquals("https://demo.dataverse.org/dataverse/trunctest", searches.getItems().get(0).getUrl()); - } - - SearchOperationsImplV1 setUpSearch (SearchConfig srchCfg, String url, GetJson expectedJsonGetter) throws MalformedURLException { - RestTemplate template = new RestTemplate(); - SearchOperationsImplV1 tss = setupSearchOps(template); - setUpServerResponse(template, url, expectedJsonGetter.getJson() ); - DataverseConfig cfg = new DataverseConfig(new URL("http://anyDataverse.com"), "any", "alias"); - tss.configure(cfg); - return tss; - } - - void assertBasicSearchResultParsing (SearchResults searches) { - assertNotNull(searches); - assertEquals("trees", searches.getQ()); - assertEquals(1, searches.getTotalCount()); - assertEquals(1, searches.getCountInResponse()); - assertEquals(1, searches.getItems().size()); - } - - @Test(expected=IllegalArgumentException.class) - public void testFileSearchThrowsIAEIfTypeNotFile() { - SearchConfig scfg = SearchConfig.builder().q("trees").type(EnumSet.of(SearchType.dataset)).build(); - RestTemplate template = new RestTemplate(); - SearchOperationsImplV1 tss = setupSearchOps(template); - tss.searchFiles(scfg); - } - - - private SearchOperationsImplV1 setupSearchOps(RestTemplate template) { - SearchOperationsImplV1 tss = new SearchOperationsImplV1(); - tss.setTemplate(template); - return tss; - } - - @FunctionalInterface - static interface GetJson { - - String getJson (); - - } - - - private String getSimpleQueryResult() { - return TestFileUtils.getJsonFromFile("simpleQuery.json"); - } - - //gets 3 results: file, dataset, dataverse - private String getComplexQueryResult() { - return TestFileUtils.getJsonFromFile("multiTypeSearch.json"); - } - private String getFileQueryResult() { - return TestFileUtils.getJsonFromFile("fileSearch.json"); - } - private String getDatasetQueryResult() { - return TestFileUtils.getJsonFromFile("datasetSearch.json"); - } - private String getDataverseQueryResult() { - return TestFileUtils.getJsonFromFile("dataverseSearch.json"); - } + + @Before + public void setUp() throws Exception { + } + + @After + public void tearDown() throws Exception { + } + + @Test + public void testSimpleSearch() throws MalformedURLException { + final SearchConfig scfg = SearchConfig.builder().q("trees").build(); + final RestTemplate template = new RestTemplate(); + final SearchOperationsImplV1 tss = setupSearchOps(template); + setUpServerResponse(template, "http://anyDataverse.com/api/v1/search?q=trees", getSimpleQueryResult() ); + + final DataverseConfig cfg = new DataverseConfig(new URL("http://anyDataverse.com"), "any", "alias"); + tss.configure(cfg); + final DataverseResponse> resp = tss.search(scfg); + assertNotNull(resp.getData()); + assertEquals("trees", resp.getData().getQ()); + } + + private void setUpServerResponse(final RestTemplate template, final String url, final String response) { + final MockRestServiceServer server = MockRestServiceServer.bindTo(template).build(); + server.expect(ExpectedCount.once(), MockRestRequestMatchers.requestTo(url)) + .andExpect(method(HttpMethod.GET)) + .andRespond(MockRestResponseCreators.withSuccess(response, + APPLICATION_JSON)); + } + + @Test + public void testComplexSearch() throws MalformedURLException { + final SearchConfig scfg = SearchConfig.builder().q("trees").build(); + final RestTemplate template = new RestTemplate(); + final SearchOperationsImplV1 tss = setupSearchOps(template); + setUpServerResponse(template, "http://anyDataverse.com/api/v1/search?q=trees", getComplexQueryResult() ); + final DataverseConfig cfg = new DataverseConfig(new URL("http://anyDataverse.com"), "any", "alias"); + tss.configure(cfg); + final DataverseResponse> resp = tss.search(scfg); + assertNotNull(resp.getData()); + assertEquals("trees", resp.getData().getQ()); + final SearchResults searches = resp.getData(); + assertEquals(3, searches.getTotalCount()); + assertEquals(3, searches.getCountInResponse()); + assertEquals(3, searches.getItems().size()); + assertEquals(1, searches.filterByType(SearchType.dataset).size()); + assertEquals(1, searches.filterByType(SearchType.dataverse).size()); + assertEquals(1, searches.filterByType(SearchType.file).size()); + } + + @Test + public void testFileSearch() throws MalformedURLException { + final SearchConfig scfg = SearchConfig.builder().q("trees").type(EnumSet.of(SearchType.file)).build(); + final RestTemplate template = new RestTemplate(); + final SearchOperationsImplV1 tss = setupSearchOps(template); + setUpServerResponse(template, "http://anyDataverse.com/api/v1/search?q=trees&type=file", getFileQueryResult() ); + final DataverseConfig cfg = new DataverseConfig(new URL("http://anyDataverse.com"), "any", "alias"); + tss.configure(cfg); + final DataverseResponse> resp = tss.searchFiles(scfg); + final SearchResults searches = resp.getData(); + + assertBasicSearchResultParsing(searches); + assertEquals("text/plain", searches.getItems().get(0).getFileContentType()); + } + + @Test + public void testDatasetSearch() throws MalformedURLException { + final SearchConfig scfg = SearchConfig.builder().q("trees").type(EnumSet.of(SearchType.dataset)).build(); + final SearchOperationsImplV1 tss = setUpSearch(scfg, "http://anyDataverse.com/api/v1/search?q=trees&type=dataset", ()->getDatasetQueryResult() ); + + final DataverseResponse> resp = tss.searchDatasets(scfg); + final SearchResults searches = resp.getData(); + + assertBasicSearchResultParsing(searches); + assertEquals("doi:10.5072/FK2/1FUEXN", searches.getItems().get(0).getGlobalId()); + } + + @Test + public void testDataverseSearch() throws MalformedURLException { + final SearchConfig scfg = SearchConfig.builder().q("trees").type(EnumSet.of(SearchType.dataverse)).build(); + final SearchOperationsImplV1 tss = setUpSearch(scfg, "http://anyDataverse.com/api/v1/search?q=trees&type=dataverse", ()->getDataverseQueryResult() ); + + final DataverseResponse> resp = tss.searchDataverses(scfg); + final SearchResults searches = resp.getData(); + + assertBasicSearchResultParsing(searches); + assertEquals("https://demo.dataverse.org/dataverse/trunctest", searches.getItems().get(0).getUrl()); + } + + SearchOperationsImplV1 setUpSearch (final SearchConfig srchCfg, final String url, final GetJson expectedJsonGetter) throws MalformedURLException { + final RestTemplate template = new RestTemplate(); + final SearchOperationsImplV1 tss = setupSearchOps(template); + setUpServerResponse(template, url, expectedJsonGetter.getJson() ); + final DataverseConfig cfg = new DataverseConfig(new URL("http://anyDataverse.com"), "any", "alias"); + tss.configure(cfg); + return tss; + } + + void assertBasicSearchResultParsing (final SearchResults searches) { + assertNotNull(searches); + assertEquals("trees", searches.getQ()); + assertEquals(1, searches.getTotalCount()); + assertEquals(1, searches.getCountInResponse()); + assertEquals(1, searches.getItems().size()); + } + + @Test(expected=IllegalArgumentException.class) + public void testFileSearchThrowsIAEIfTypeNotFile() { + final SearchConfig scfg = SearchConfig.builder().q("trees").type(EnumSet.of(SearchType.dataset)).build(); + final RestTemplate template = new RestTemplate(); + final SearchOperationsImplV1 tss = setupSearchOps(template); + tss.searchFiles(scfg); + } + + + private SearchOperationsImplV1 setupSearchOps(final RestTemplate template) { + final SearchOperationsImplV1 tss = new SearchOperationsImplV1(); + tss.setTemplate(template); + return tss; + } + + @FunctionalInterface interface GetJson { + + String getJson (); + + } + + + private String getSimpleQueryResult() { + return TestFileUtils.getJsonFromFile("simpleQuery.json"); + } + + //gets 3 results: file, dataset, dataverse + private String getComplexQueryResult() { + return TestFileUtils.getJsonFromFile("multiTypeSearch.json"); + } + private String getFileQueryResult() { + return TestFileUtils.getJsonFromFile("fileSearch.json"); + } + private String getDatasetQueryResult() { + return TestFileUtils.getJsonFromFile("datasetSearch.json"); + } + private String getDataverseQueryResult() { + return TestFileUtils.getJsonFromFile("dataverseSearch.json"); + } } diff --git a/src/test/java/com/researchspace/dataverse/http/SearchURLBuilderTest.java b/src/test/java/com/researchspace/dataverse/http/SearchURLBuilderTest.java index 0a52585..fed3e68 100644 --- a/src/test/java/com/researchspace/dataverse/http/SearchURLBuilderTest.java +++ b/src/test/java/com/researchspace/dataverse/http/SearchURLBuilderTest.java @@ -1,6 +1,18 @@ -/* - * - */ +/**
+Copyright 2016 ResearchSpace
+
+ Licensed under the Apache License, Version 2.0 (the "License");
+ you may not use this file except in compliance with the License.
+ You may obtain a copy of the License at
+
+     http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
+
*/ package com.researchspace.dataverse.http; import static org.junit.Assert.assertTrue; @@ -15,82 +27,68 @@ import com.researchspace.dataverse.search.entities.SearchType; import com.researchspace.dataverse.search.entities.SortBy; import com.researchspace.dataverse.search.entities.SortOrder; -/**
-Copyright 2016 ResearchSpace
-
- Licensed under the Apache License, Version 2.0 (the "License");
- you may not use this file except in compliance with the License.
- You may obtain a copy of the License at
 
-     http://www.apache.org/licenses/LICENSE-2.0
 
- Unless required by applicable law or agreed to in writing, software
- distributed under the License is distributed on an "AS IS" BASIS,
- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- See the License for the specific language governing permissions and
- limitations under the License.
-
-*/ public class SearchURLBuilderTest { - - SearchURLBuilder builder; - - @Before - public void setUp() throws Exception { - builder = new SearchURLBuilder(); - } - - @After - public void tearDown() throws Exception { - } - - @Test - public void testBuildSearchUrlTypes() { - SearchConfig cfg = SearchConfig.builder() - .q("any") - .type(EnumSet.of(SearchType.dataset, SearchType.file)) - .build(); - String url = builder.buildSearchUrl("/any", cfg); - assertTrue(url, url.contains("type=dataset&type=file")); - } - - @Test - public void testBuildSearchUrlSort() { - SearchConfig cfg = SearchConfig.builder() - .q("any") - .sortBy(SortBy.name) - .build(); - String url = builder.buildSearchUrl("/any", cfg); - assertTrue(url, url.contains("sort=name")); - } - - @Test - public void testBuildSearchUrlOrder() { - SearchConfig cfg = SearchConfig.builder() - .q("any") - .sortOrder(SortOrder.asc) - .build(); - String url = builder.buildSearchUrl("/any", cfg); - assertTrue(url, url.contains("order=asc")); - } - - @Test - public void testBuildSearchUrlStart() { - SearchConfig cfg = SearchConfig.builder() - .q("any") - .start(10) - .build(); - String url = builder.buildSearchUrl("/any", cfg); - assertTrue(url, url.contains("start=10")); - } - - @Test - public void testBuildSearchUrlPerPage() { - SearchConfig cfg = SearchConfig.builder() - .q("any") - .perPage(5) - .build(); - String url = builder.buildSearchUrl("/any", cfg); - assertTrue(url, url.contains("per_page=5")); - } + + SearchURLBuilder builder; + + @Before + public void setUp() throws Exception { + builder = new SearchURLBuilder(); + } + + @After + public void tearDown() throws Exception { + } + + @Test + public void testBuildSearchUrlTypes() { + final SearchConfig cfg = SearchConfig.builder() + .q("any") + .type(EnumSet.of(SearchType.dataset, SearchType.file)) + .build(); + final String url = builder.buildSearchUrl("/any", cfg); + assertTrue(url, url.contains("type=dataset&type=file")); + } + + @Test + public void testBuildSearchUrlSort() { + final SearchConfig cfg = SearchConfig.builder() + .q("any") + .sortBy(SortBy.name) + .build(); + final String url = builder.buildSearchUrl("/any", cfg); + assertTrue(url, url.contains("sort=name")); + } + + @Test + public void testBuildSearchUrlOrder() { + final SearchConfig cfg = SearchConfig.builder() + .q("any") + .sortOrder(SortOrder.asc) + .build(); + final String url = builder.buildSearchUrl("/any", cfg); + assertTrue(url, url.contains("order=asc")); + } + + @Test + public void testBuildSearchUrlStart() { + final SearchConfig cfg = SearchConfig.builder() + .q("any") + .start(10) + .build(); + final String url = builder.buildSearchUrl("/any", cfg); + assertTrue(url, url.contains("start=10")); + } + + @Test + public void testBuildSearchUrlPerPage() { + final SearchConfig cfg = SearchConfig.builder() + .q("any") + .perPage(5) + .build(); + final String url = builder.buildSearchUrl("/any", cfg); + assertTrue(url, url.contains("per_page=5")); + } } diff --git a/src/test/java/com/researchspace/dataverse/search/SearchConfigTest.java b/src/test/java/com/researchspace/dataverse/search/SearchConfigTest.java index e30a8ba..6db9011 100644 --- a/src/test/java/com/researchspace/dataverse/search/SearchConfigTest.java +++ b/src/test/java/com/researchspace/dataverse/search/SearchConfigTest.java @@ -1,15 +1,3 @@ -/* - * - */ -package com.researchspace.dataverse.search; - -import com.researchspace.dataverse.search.entities.SearchConfig; -import org.junit.After; -import org.junit.Before; -import org.junit.Test; - -import static com.researchspace.dataverse.search.entities.SearchConfig.MAX_RESULTS_PER_PAGE; -import static org.junit.Assert.assertEquals; /**
 Copyright 2016 ResearchSpace
 
@@ -24,40 +12,49 @@
  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  See the License for the specific language governing permissions and
  limitations under the License.
-
-*/ + */ +package com.researchspace.dataverse.search; + +import static com.researchspace.dataverse.search.entities.SearchConfig.MAX_RESULTS_PER_PAGE; +import static org.junit.Assert.assertEquals; + +import org.junit.After; +import org.junit.Before; +import org.junit.Test; + +import com.researchspace.dataverse.search.entities.SearchConfig; + + public class SearchConfigTest { - SearchConfig cfg; - - @Before - public void setUp() throws Exception { - } - - @After - public void tearDown() throws Exception { - } - - @Test(expected = NullPointerException.class) - public void builderRequiresNonNullSearchTerm() { - SearchConfig.builder().build(); - } - - @Test(expected = IllegalArgumentException.class) - public void startCannotBeNegative() { - SearchConfig.builder().start(-1); - } - - @Test(expected = IllegalArgumentException.class) - public void perPageCannotBeNegative() { - SearchConfig.builder().perPage(-1); - } - - @Test - public void perPageHas1000Maximum() { - final int MORE_THAN_MAX = 1000000; - SearchConfig cfg = SearchConfig.builder().q("any").perPage(MORE_THAN_MAX).build(); - assertEquals(MAX_RESULTS_PER_PAGE, cfg.getPerPage()); - } + @Before + public void setUp() throws Exception { + } + + @After + public void tearDown() throws Exception { + } + + @Test(expected = NullPointerException.class) + public void builderRequiresNonNullSearchTerm() { + SearchConfig.builder().build(); + } + + @Test(expected = IllegalArgumentException.class) + public void startCannotBeNegative() { + SearchConfig.builder().start(-1); + } + + @Test(expected = IllegalArgumentException.class) + public void perPageCannotBeNegative() { + SearchConfig.builder().perPage(-1); + } + + @Test + public void perPageHas1000Maximum() { + final int MORE_THAN_MAX = 1000000; + final SearchConfig cfg = SearchConfig.builder().q("any").perPage(MORE_THAN_MAX).build(); + assertEquals(MAX_RESULTS_PER_PAGE, cfg.getPerPage()); + } } diff --git a/src/test/java/com/researchspace/dataverse/testutils/TestFileUtils.java b/src/test/java/com/researchspace/dataverse/testutils/TestFileUtils.java index 29c7e35..2d78c00 100644 --- a/src/test/java/com/researchspace/dataverse/testutils/TestFileUtils.java +++ b/src/test/java/com/researchspace/dataverse/testutils/TestFileUtils.java @@ -1,12 +1,3 @@ -/* - * - */ -package com.researchspace.dataverse.testutils; - -import java.io.File; -import java.io.IOException; - -import org.apache.commons.io.FileUtils; /**
 Copyright 2016 ResearchSpace
 
@@ -21,16 +12,23 @@
  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  See the License for the specific language governing permissions and
  limitations under the License.
-
-*/ + */ +package com.researchspace.dataverse.testutils; + +import java.io.File; +import java.io.IOException; + +import org.apache.commons.io.FileUtils; + + public class TestFileUtils { - - public static String getJsonFromFile (String filename) { - try { - return FileUtils.readFileToString(new File("src/test/resources/data/json", filename)); - } catch (IOException e) { - throw new IllegalStateException("Couldn't read file " + filename); - } - } + + public static String getJsonFromFile (final String filename) { + try { + return FileUtils.readFileToString(new File("src/test/resources/data/json", filename)); + } catch (final IOException e) { + throw new IllegalStateException("Couldn't read file " + filename); + } + } }