diff --git a/dataverse b/dataverse new file mode 160000 index 0000000..d8d9df4 --- /dev/null +++ b/dataverse @@ -0,0 +1 @@ +Subproject commit d8d9df46fe9a305e7c0c539075ba229f1b8db418 diff --git a/dataversedock/0prep_deps.sh b/dataversedock/0prep_deps.sh new file mode 100755 index 0000000..056bec0 --- /dev/null +++ b/dataversedock/0prep_deps.sh @@ -0,0 +1,27 @@ +#!/bin/sh +if [ ! -d dv/deps ]; then + mkdir -p dv/deps +fi +wdir=`pwd` +if [ ! -e dv/deps/glassfish4dv.tgz ]; then + echo "glassfish dependency prep" + mkdir -p /tmp/dv-prep/gf + cd /tmp/dv-prep/gf + wget http://download.java.net/glassfish/4.1/release/glassfish-4.1.zip + wget http://search.maven.org/remotecontent?filepath=org/jboss/weld/weld-osgi-bundle/2.2.10.Final/weld-osgi-bundle-2.2.10.Final-glassfish4.jar -O weld-osgi-bundle-2.2.10.Final-glassfish4.jar + unzip glassfish-4.1.zip + rm glassfish4/glassfish/modules/weld-osgi-bundle.jar + mv weld-osgi-bundle-2.2.10.Final-glassfish4.jar glassfish4/glassfish/modules + tar zcf $wdir/dv/deps/glassfish4dv.tgz glassfish4 + cd $wdir + # assuming that folks usually have /tmp auto-clean as needed +fi + +if [ ! -e dv/deps/solr-4.6.0dv.tgz ]; then + echo "solr dependency prep" + # schema changes *should* be the only ones... + cd dv/deps/ + wget https://archive.apache.org/dist/lucene/solr/4.6.0/solr-4.6.0.tgz -O solr-4.6.0dv.tgz + cd ../../ +fi + diff --git a/dataversedock/1prep.sh b/dataversedock/1prep.sh new file mode 100755 index 0000000..73f28b3 --- /dev/null +++ b/dataversedock/1prep.sh @@ -0,0 +1,22 @@ +#!/bin/sh + +# move things necessary for integration tests into build context. +# this was based off the phoenix deployment; and is likely uglier and bulkier than necessary in a perfect world + +mkdir -p testdata/doc/sphinx-guides/source/_static/util/ +cp ../solr/4.6.0/schema.xml testdata/ +cp ../jhove/jhove.conf testdata/ +cd ../../ +cp -r scripts conf/docker-aio/testdata/ +cp doc/sphinx-guides/source/_static/util/pg8-createsequence-prep.sql conf/docker-aio/testdata/doc/sphinx-guides/source/_static/util/ +cp doc/sphinx-guides/source/_static/util/createsequence.sql conf/docker-aio/testdata/doc/sphinx-guides/source/_static/util/ + +# not using dvinstall.zip for setupIT.bash; but still used in install.bash for normal ops +mvn clean +./scripts/database/homebrew/custom-build-number +mvn package +cd scripts/installer +make clean +make +cp dvinstall.zip ../../conf/docker-aio/dv/install/ + diff --git a/dataversedock/Dockerfile b/dataversedock/Dockerfile new file mode 100644 index 0000000..117a168 --- /dev/null +++ b/dataversedock/Dockerfile @@ -0,0 +1,29 @@ +FROM centos:7.2.1511 +# OS dependencies +RUN yum install -y java-1.8.0-openjdk-headless sudo epel-release unzip perl curl +RUN yum install -y wget patch postgresql lynx jq + +# copy and unpack dependencies (solr, glassfish) +COPY lang.properties /opt/dv +WORKDIR /opt/dv +COPY dv /opt/dv + +COPY dv/deps/glassfish4 /opt/glassfish4 + +# glassfish port +EXPOSE 8080 + +COPY dv/deps/dvinstall.zip /opt/dv +COPY docker.patch /opt/dv + +# yeah - still not happy if glassfish isn't in /usr/local :< +RUN ln -s /opt/glassfish4 /usr/local/glassfish4 +#COPY dv/install/ /opt/dv/ +COPY install.bash /opt/dv/ +COPY entrypoint.bash /opt +COPY testdata /opt/dv/testdata +COPY testscripts/* /opt/dv/testdata/ +COPY setupIT.bash /opt/dv +COPY lang.properties /opt/dv +RUN echo "Installing and running Dataverse..." +CMD ["/opt/entrypoint.bash"] diff --git a/dataversedock/c7.dockerfile b/dataversedock/c7.dockerfile new file mode 100644 index 0000000..cf589ff --- /dev/null +++ b/dataversedock/c7.dockerfile @@ -0,0 +1,40 @@ +FROM centos:7 +# OS dependencies +RUN yum install -y java-1.8.0-openjdk-headless postgresql-server sudo epel-release unzip perl curl +RUN yum install -y jq + +# copy and unpack dependencies (solr, glassfish) +COPY dv /tmp/dv +COPY testdata/schema.xml /tmp/dv +RUN cd /opt ; tar zxf /tmp/dv/deps/solr-4.6.0dv.tgz +RUN cd /opt ; tar zxf /tmp/dv/deps/glassfish4dv.tgz + +RUN sudo -u postgres /usr/bin/initdb -D /var/lib/pgsql/data +#RUN sudo -u postgres createuser dvnapp + +# copy configuration related files +RUN cp /tmp/dv/pg_hba.conf /var/lib/pgsql/data/ ; cp /tmp/dv/schema.xml /opt/solr-4.6.0/example/solr/collection1/conf/schema.xml + +# skipping glassfish user and solr user (run both as root) + +#solr port +EXPOSE 8983 + +# postgres port +EXPOSE 5432 + +# glassfish port +EXPOSE 8080 + +RUN mkdir /opt/dv + +# yeah - still not happy if glassfish isn't in /usr/local :< +RUN ln -s /opt/glassfish4 /usr/local/glassfish4 +COPY dv/install/ /opt/dv/ +COPY install.bash /opt/dv/ +COPY entrypoint.bash /opt/dv/ +COPY testdata /opt/dv/testdata +COPY testscripts/* /opt/dv/testdata/ +COPY setupIT.bash /opt/dv +WORKDIR /opt/dv +CMD ["/opt/dv/entrypoint.bash"] diff --git a/dataversedock/default.config b/dataversedock/default.config new file mode 100644 index 0000000..7c99866 --- /dev/null +++ b/dataversedock/default.config @@ -0,0 +1,16 @@ +HOST_DNS_ADDRESS localhost +GLASSFISH_DIRECTORY /opt/glassfish4 +ADMIN_EMAIL +MAIL_SERVER mail.hmdc.harvard.edu +POSTGRES_ADMIN_PASSWORD secret +POSTGRES_SERVER db +POSTGRES_PORT 5432 +POSTGRES_DATABASE dvndb +POSTGRES_USER dvnapp +POSTGRES_PASSWORD secret +SOLR_LOCATION idx +TWORAVENS_LOCATION NOT INSTALLED +RSERVE_HOST localhost +RSERVE_PORT 6311 +RSERVE_USER rserve +RSERVE_PASSWORD rserve diff --git a/dataversedock/docker.patch b/dataversedock/docker.patch new file mode 100644 index 0000000..9f0b375 --- /dev/null +++ b/dataversedock/docker.patch @@ -0,0 +1,8 @@ +83c83 +< 'POSTGRES_SERVER', 'postgres', +--- +> 'POSTGRES_SERVER', '127.0.0.1', +89c89 +< 'SOLR_LOCATION', 'solr:8983', +--- +> 'SOLR_LOCATION', 'LOCAL', diff --git a/dataversedock/entrypoint.bash b/dataversedock/entrypoint.bash new file mode 100755 index 0000000..7d734ff --- /dev/null +++ b/dataversedock/entrypoint.bash @@ -0,0 +1,11 @@ +#!/usr/bin/env bash + +if [ ! -e "/opt/glassfish4/glassfish/domains/domain1/applications/dataverse/WEB-INF/classes/Bundle.properties" ]; then + cd /opt/dv + ./setupIT.bash +fi +cd /opt/glassfish4 +bin/asadmin stop-domain +bin/asadmin start-domain +sleep infinity + diff --git a/dataversedock/glassfish-4.1.zip b/dataversedock/glassfish-4.1.zip new file mode 100644 index 0000000..9a79f82 Binary files /dev/null and b/dataversedock/glassfish-4.1.zip differ diff --git a/dataversedock/glassfish.bash b/dataversedock/glassfish.bash new file mode 100755 index 0000000..9d51396 --- /dev/null +++ b/dataversedock/glassfish.bash @@ -0,0 +1,16 @@ +#!/bin/sh + +wdir="/opt" +if [ ! -e $wdir/dv/deps/glassfish4dv.tgz ]; then + echo "glassfish dependency prep" + mkdir -p /tmp/dv-prep/gf + cd $wdir/dv/deps +# wget http://download.java.net/glassfish/4.1/release/glassfish-4.1.zip +# wget http://search.maven.org/remotecontent?filepath=org/jboss/weld/weld-osgi-bundle/2.2.10.Final/weld-osgi-bundle-2.2.10.Final-glassfish4.jar -O weld-osgi-bundle-2.2.10.Final-glassfish4.jar + unzip glassfish-4.1.zip + rm glassfish4/glassfish/modules/weld-osgi-bundle.jar + mv weld-osgi-bundle-2.2.10.Final-glassfish4.jar glassfish4/glassfish/modules + tar zcf $wdir/dv/deps/glassfish4dv.tgz glassfish4 + mv glassfish4 $wdir/ + # assuming that folks usually have /tmp auto-clean as needed +fi diff --git a/dataversedock/install.bash b/dataversedock/install.bash new file mode 100755 index 0000000..febe04d --- /dev/null +++ b/dataversedock/install.bash @@ -0,0 +1,9 @@ +#!/usr/bin/env bash +unzip dvinstall.zip +cd dvinstall/ +patch -R install < ../docker.patch +./install -admin_email=pameyer+dvinstall@crystal.harvard.edu -y -f > install.out 2> install.err +curl https://raw.githubusercontent.com/scholarsportal/SP-dataverse/SP_v4.7.1/src/main/java/Bundle_fr.properties > /opt/glassfish4/glassfish/domains/domain1/applications/dataverse/WEB-INF/classes/Bundle.properties + +echo "installer complete" +cat install.err diff --git a/dataversedock/lang.properties/Bundle.properties b/dataversedock/lang.properties/Bundle.properties new file mode 100755 index 0000000..53a9b23 --- /dev/null +++ b/dataversedock/lang.properties/Bundle.properties @@ -0,0 +1,1708 @@ +dataverse=Dataverse +newDataverse=New Dataverse +hostDataverse=Host Dataverse +dataverses=Dataverses +passwd=Password +dataset=Dataset +datasets=Datasets +newDataset=New Dataset +files=Files +file=File +restricted=Restricted +restrictedaccess=Restricted Access +find=Find +search=Search +unpublished=Unpublished +cancel=Cancel +ok=OK +saveChanges=Save Changes +acceptTerms=Accept +submit=Submit +signup=Sign Up +login=Log In +email=Email +account=Account +requiredField=Required field +new=New +identifier=Identifier +description=Description +subject=Subject +close=Close +preview=Preview +continue=Continue +name=Name +institution=Institution +position=Position +affiliation=Affiliation +createDataverse=Create Dataverse +remove=Remove +done=Done +editor=Contributor +manager=Manager +curator=Curator +explore=Explore +download=Download +deaccession=Deaccession +share=Share +link=Link +linked=Linked +harvested=Harvested +apply=Apply +add=Add +delete=Delete +yes=Yes +no=No +previous=Previous +next=Next +first=First +last=Last +more=More... +less=Less... +select=Select... +selectedFiles=Selected Files +htmlAllowedTitle=Allowed HTML Tags +htmlAllowedMsg=This field supports only certain HTML tags. +htmlAllowedTags=, ,
,
, , ,
,
,
, ,
,

-

, , , ,
  • ,
      ,

      ,

      , , , , , , 
        + +# dataverse_header.xhtml +header.status.header=Status +header.search.title=Search all dataverses... +header.about=About +header.support=Support +header.guides=Guides +header.guides.user=User Guide +header.guides.developer=Developer Guide +header.guides.installation=Installation Guide +header.guides.api=API Guide +header.guides.admin=Admin Guide +header.signUp=Sign Up +header.logOut=Log Out +header.accountInfo=Account Information +header.dashboard=Dashboard +header.user.selectTab.dataRelated=My Data +header.user.selectTab.notifications=Notifications +header.user.selectTab.accountInfo=Account Information +header.user.selectTab.groupsAndRoles=Groups + Roles +header.user.selectTab.apiToken=API Token + +# dataverse_template.xhtml +head.meta.description=The Dataverse Project is an open source software application to share, cite and archive data. Dataverse provides a robust infrastructure for data stewards to host and archive data, while offering researchers an easy way to share and get credit for their data. +body.skip=Skip to main content + +# dataverse_footer.xhtml +footer.copyright=Copyright © {0} +footer.widget.datastored=Data is stored at {0}. +footer.widget.login=Log in to +footer.privacyPolicy=Privacy Policy +footer.poweredby=Powered by +footer.dataverseProject=The Dataverse Project + +# messages.xhtml +messages.error=Error +messages.success=Success! +messages.info=Info +messages.validation=Validation Error +messages.validation.msg=Required fields were missed or there was a validation error. Please scroll down to see details. + +# contactFormFragment.xhtml +contact.header=Contact {0} +contact.dataverse.header=Email Dataverse Contact +contact.dataset.header=Email Dataset Contact +contact.to=To +contact.support=Support +contact.from=From +contact.from.required=User email is required. +contact.from.invalid=Email is invalid. +contact.subject=Subject +contact.subject.required=Subject is required. +contact.subject.selectTab.top=Select subject... +contact.subject.selectTab.support=Support Question +contact.subject.selectTab.dataIssue=Data Issue +contact.msg=Message +contact.msg.required=Message text is required. +contact.send=Send Message +contact.question=Please fill this out to prove you are not a robot. +contact.sum.required=Value is required. +contact.sum.invalid=Incorrect sum, please try again. +contact.sum.converterMessage=Please enter a number. +contact.contact=Contact + +# dataverseuser.xhtml +account.info=Account Information +account.edit=Edit Account +account.apiToken=API Token +user.isShibUser=Account information cannot be edited when logged in through an institutional account. +user.helpShibUserMigrateOffShibBeforeLink=Leaving your institution? Please contact +user.helpShibUserMigrateOffShibAfterLink=for assistance. +user.helpOAuthBeforeLink=Your Dataverse account uses {0} for login. If you are interested in changing login methods, please contact +user.helpOAuthAfterLink=for assistance. +user.lostPasswdTip=If you have lost or forgotten your password, please enter your username or email address below and click Submit. We will send you an e-mail with your new password. +user.dataRelatedToMe=My Data +wasCreatedIn=, was created in +wasCreatedTo=, was added to +wasSubmittedForReview=, was submitted for review to be published in +wasPublished=, was published in +wasReturnedByReviewer=, was returned by the curator of +# TODO: Confirm that "toReview" can be deleted. +toReview=Don't forget to publish it or send it back to the contributor! +worldMap.added=dataset had a WorldMap layer data added to it. +# Bundle file editors, please note that "notification.welcome" is used in a unit test. +notification.welcome=Welcome to {0}! Get started by adding or finding data. Have questions? Check out the {1}. Want to test out Dataverse features? Use our {2}. Also, check for your welcome email to verify your address. +notification.demoSite=Demo Site +notification.requestFileAccess=File access requested for dataset: {0}. +notification.grantFileAccess=Access granted for files in dataset: {0}. +notification.rejectFileAccess=Access rejected for requested files in dataset: {0}. +notification.createDataverse={0} was created in {1} . To learn more about what you can do with your dataverse, check out the {2}. +notification.dataverse.management.title=Dataverse Management - Dataverse User Guide +notification.createDataset={0} was created in {1}. To learn more about what you can do with a dataset, check out the {2}. +notification.dataset.management.title=Dataset Management - Dataset User Guide +notification.wasSubmittedForReview={0} was submitted for review to be published in {1}. Don''t forget to publish it or send it back to the contributor\! +notification.wasReturnedByReviewer={0} was returned by the curator of {1}. +notification.wasPublished={0} was published in {1}. +notification.worldMap.added={0}, dataset had WorldMap layer data added to it. +notification.maplayer.deletefailed=Failed to delete the map layer associated with the restricted file {0} from WorldMap. Please try again, or contact WorldMap and/or Dataverse support. (Dataset: {1}) +notification.generic.objectDeleted=The dataverse, dataset, or file for this notification has been deleted. +notification.access.granted.dataverse=You have been granted the {0} role for {1}. +notification.access.granted.dataset=You have been granted the {0} role for {1}. +notification.access.granted.datafile=You have been granted the {0} role for file in {1}. +notification.access.granted.fileDownloader.additionalDataverse={0} You now have access to all published restricted and unrestricted files in this dataverse. +notification.access.granted.fileDownloader.additionalDataset={0} You now have access to all published restricted and unrestricted files in this dataset. +notification.access.revoked.dataverse=You have been removed from a role in {0}. +notification.access.revoked.dataset=You have been removed from a role in {0}. +notification.access.revoked.datafile=You have been removed from a role in {0}. +notification.checksumfail=One or more files in your upload failed checksum validation for dataset {0}. Please re-run the upload script. If the problem persists, please contact support. +notification.mail.import.filesystem=Dataset {2} ({0}/dataset.xhtml?persistentId={1}) has been successfully uploaded and verified. +notification.import.filesystem=Dataset {1} has been successfully uploaded and verified. +notification.import.checksum={1}, dataset had file checksums added via a batch job. +removeNotification=Remove Notification +groupAndRoles.manageTips=Here is where you can access and manage all the groups you belong to, and the roles you have been assigned. +user.signup.tip=Why have a Dataverse account? To create your own dataverse and customize it, add datasets, or request access to restricted files. +user.signup.otherLogInOptions.tip=You can also create a Dataverse account with one of our other log in options. +user.username.illegal.tip=Between 2-60 characters, and can use "a-z", "0-9", "_" for your username. +user.username=Username +user.username.taken=This username is already taken. +user.username.invalid=This username contains an invalid character or is outside the length requirement (2-60 characters). +user.username.valid=Create a valid username of 2 to 60 characters in length containing letters (a-Z), numbers (0-9), dashes (-), underscores (_), and periods (.). +user.noPasswd=No Password +user.currentPasswd=Current Password +user.currentPasswd.tip=Please enter the current password for this account. +user.passwd.illegal.tip=Password needs to be at least 6 characters, include one letter and one number, and special characters may be used. +user.rePasswd=Retype Password +user.rePasswd.tip=Please retype the password you entered above. +user.firstName=Given Name +user.firstName.tip=The first name or name you would like to use for this account. +user.lastName=Family Name +user.lastName.tip=The last name you would like to use for this account. +user.email.tip=A valid email address you have access to in order to be contacted. +user.email.taken=This email address is already taken. +user.affiliation.tip=The organization with which you are affiliated. +user.position=Position +user.position.tip=Your role or title at the organization you are affiliated with; such as staff, faculty, student, etc. +user.acccountterms=General Terms of Use +user.acccountterms.tip=The terms and conditions for using the application and services. +user.acccountterms.required=Please check the box to indicate your acceptance of the General Terms of Use. +user.acccountterms.iagree=I have read and accept the Dataverse General Terms of Use as outlined above. +user.createBtn=Create Account +user.updatePassword.welcome=Welcome to Dataverse {0}, {1} +user.updatePassword.warning=With the release of our new Dataverse 4.0 upgrade, the password requirements and General Terms of Use have updated. As this is the first time you are using Dataverse since the update, you need to create a new password and agree to the new General Terms of Use. +user.updatePassword.password={0} +authenticationProvidersAvailable.tip={0}There are no active authentication providers{1}If you are a system administrator, please enable one using the API.{2}If you are not a system administrator, please contact the one for your institution. + +passwdVal.passwdReq.title=Your password must contain: +passwdVal.passwdReq.goodStrength =passwords of at least {0} characters are exempt from all other requirements +passwdVal.passwdReq.lengthReq =At least {0} characters +passwdVal.passwdReq.characteristicsReq =At least 1 character from {0} of the following types: +passwdVal.passwdReq.notInclude =It may not include: +passwdVal.passwdReq.consecutiveDigits =More than {0} numbers in a row +passwdVal.passwdReq.dictionaryWords =Dictionary words +passwdVal.passwdReq.unknownPasswordRule =Unknown, contact your administrator +#printf syntax used to pass to passay library +passwdVal.expireRule.errorCode =EXPIRED +passwdVal.expireRule.errorMsg =The password is over %1$s days old and has expired. +passwdVal.goodStrengthRule.errorMsg =Note: passwords are always valid with a %1$s or more character length regardless. +passwdVal.goodStrengthRule.errorCode =NO_GOODSTRENGTH +passwdVal.passwdReset.resetLinkTitle =Password Reset Link +passwdVal.passwdReset.resetLinkDesc =Your password reset link is not valid +passwdVal.passwdReset.valBlankLog =new password is blank +passwdVal.passwdReset.valFacesError =Password Error +passwdVal.passwdReset.valFacesErrorDesc =Please enter a new password for your account. +passwdVal.passwdValBean.warnDictionaryRead =Dictionary was set, but none was read in. +passwdVal.passwdValBean.warnDictionaryObj =PwDictionaries not set and no default password file found: +passwdVal.passwdValBean.warnSetStrength =The PwGoodStrength {0} value competes with the PwMinLength value of {1} and is added to {2} + +#loginpage.xhtml +login.System=Login System +login.forgot.text=Forgot your password? +login.builtin=Dataverse Account +login.institution=Institutional Account +login.institution.blurb=Log in or sign up with your institutional account — learn more. +login.institution.support.beforeLink=Leaving your institution? Please contact +login.institution.support.afterLink=for assistance. +login.builtin.credential.usernameOrEmail=Username/Email +login.builtin.credential.password=Password +login.builtin.invalidUsernameEmailOrPassword=The username, email address, or password you entered is invalid. Need assistance accessing your account? +# how do we exercise login.error? Via a password upgrade failure? See https://github.com/IQSS/dataverse/pull/2922 +login.error=Error validating the username, email address, or password. Please try again. If the problem persists, contact an administrator. +user.error.cannotChangePassword=Sorry, your password cannot be changed. Please contact your system administrator. +user.error.wrongPassword=Sorry, wrong password. +login.button=Log In with {0} +login.button.orcid=Create or Connect your ORCID +# authentication providers +auth.providers.title=Other options +auth.providers.tip=You can convert a Dataverse account to use one of the options above. Learn more. +auth.providers.title.builtin=Username/Email +auth.providers.title.shib=Your Institution +auth.providers.title.orcid=ORCID +auth.providers.title.google=Google +auth.providers.title.github=GitHub +auth.providers.blurb=Log in or sign up with your {0} account — learn more. Having trouble? Please contact {3} for assistance. +auth.providers.persistentUserIdName.orcid=ORCID iD +auth.providers.persistentUserIdName.github=ID +auth.providers.persistentUserIdTooltip.orcid=ORCID provides a persistent digital identifier that distinguishes you from other researchers. +auth.providers.persistentUserIdTooltip.github=GitHub assigns a unique number to every user. +auth.providers.orcid.insufficientScope=Dataverse was not granted the permission to read user data from ORCID. +# Friendly AuthenticationProvider names +authenticationProvider.name.builtin=Dataverse +authenticationProvider.name.null=(provider is unknown) +authenticationProvider.name.github=GitHub +authenticationProvider.name.google=Google +authenticationProvider.name.orcid=ORCiD +authenticationProvider.name.orcid-sandbox=ORCiD Sandbox +authenticationProvider.name.shib=Shibboleth + +#confirmemail.xhtml +confirmEmail.pageTitle=Email Verification +confirmEmail.submitRequest=Verify Email +confirmEmail.submitRequest.success=A verification email has been sent to {0}. Note, the verify link will expire after {1}. +confirmEmail.details.success=Email address verified! +confirmEmail.details.failure=We were unable to verify your email address. Please navigate to your Account Information page and click the "Verify Email" button. +confirmEmail.details.goToAccountPageButton=Go to Account Information +confirmEmail.notVerified=Not Verified +confirmEmail.verified=Verified + +#shib.xhtml +shib.btn.convertAccount=Convert Account +shib.btn.createAccount=Create Account +shib.askToConvert=Would you like to convert your Dataverse account to always use your institutional log in? +# Bundle file editors, please note that "shib.welcomeExistingUserMessage" is used in a unit test +shib.welcomeExistingUserMessage=Your institutional log in for {0} matches an email address already being used for a Dataverse account. By entering your current Dataverse password below, your existing Dataverse account can be converted to use your institutional log in. After converting, you will only need to use your institutional log in. +# Bundle file editors, please note that "shib.welcomeExistingUserMessageDefaultInstitution" is used in a unit test +shib.welcomeExistingUserMessageDefaultInstitution=your institution +shib.dataverseUsername=Dataverse Username +shib.currentDataversePassword=Current Dataverse Password +shib.accountInformation=Account Information +shib.offerToCreateNewAccount=This information is provided by your institution and will be used to create your Dataverse account. +shib.passwordRejected=Validation Error - Your account can only be converted if you provide the correct password for your existing account. + +# oauth2/firstLogin.xhtml +oauth2.btn.convertAccount=Convert Existing Account +oauth2.btn.createAccount=Create New Account +oauth2.askToConvert=Would you like to convert your Dataverse account to always use your institutional log in? +oauth2.welcomeExistingUserMessage=Your institutional log in for {0} matches an email address already being used for a Dataverse account. By entering your current Dataverse password below, your existing Dataverse account can be converted to use your institutional log in. After converting, you will only need to use your institutional log in. +oauth2.welcomeExistingUserMessageDefaultInstitution=your institution +oauth2.dataverseUsername=Dataverse Username +oauth2.currentDataversePassword=Current Dataverse Password +oauth2.chooseUsername=Username: +oauth2.passwordRejected=Validation Error - Wrong username or password. +# oauth2.newAccount.title=Account Creation +oauth2.newAccount.welcomeWithName=Welcome to Dataverse, {0} +oauth2.newAccount.welcomeNoName=Welcome to Dataverse +# oauth2.newAccount.email=Email +# oauth2.newAccount.email.tip=Dataverse uses this email to notify you of issues regarding your data. +oauth2.newAccount.suggestedEmails=Suggested Email Addresses: +oauth2.newAccount.username=Username +oauth2.newAccount.username.tip=This username will be your unique identifier as a Dataverse user. +oauth2.newAccount.explanation=This information is provided by {0} and will be used to create your {1} account. To log in again, you will have to use the {0} log in option. +oauth2.newAccount.suggestConvertInsteadOfCreate=If you already have a {0} account, you will need to convert your account. +# oauth2.newAccount.tabs.convertAccount=Convert Existing Account +oauth2.newAccount.buttons.convertNewAccount=Convert Account +oauth2.newAccount.emailTaken=Email already taken. Consider merging the corresponding account instead. +oauth2.newAccount.emailOk=Email OK. +oauth2.newAccount.emailInvalid=Invalid email address. +# oauth2.newAccount.usernameTaken=Username already taken. +# oauth2.newAccount.usernameOk=Username OK. + +# oauth2/convert.xhtml +# oauth2.convertAccount.title=Account Conversion +oauth2.convertAccount.explanation=Please enter your {0} account username or email and password to convert your account to the {1} log in option. Learn more about converting your account. +oauth2.convertAccount.username=Existing username +oauth2.convertAccount.password=Password +oauth2.convertAccount.authenticationFailed=Authentication failed - bad username or password. +oauth2.convertAccount.buttonTitle=Convert Account +oauth2.convertAccount.success=Your Dataverse account is now associated with your {0} account. + +# oauth2/callback.xhtml +oauth2.callback.page.title=OAuth Callback +oauth2.callback.message=Authentication Error - Dataverse could not authenticate your ORCID login. Please make sure you authorize your ORCID account to connect with Dataverse. For more details about the information being requested, see the User Guide. + +# tab on dataverseuser.xhtml +apitoken.title=API Token +apitoken.message=Your API Token is displayed below after it has been created. Check out our {0}API Guide{1} for more information on using your API Token with the Dataverse APIs. +apitoken.notFound=API Token for {0} has not been created. +apitoken.generateBtn=Create Token +apitoken.regenerateBtn=Recreate Token + +#dashboard.xhtml +dashboard.title=Dashboard +dashboard.card.harvestingclients.header=Harvesting Clients +dashboard.card.harvestingclients.btn.manage=Manage Clients +dashboard.card.harvestingclients.clients={0, choice, 0#Clients|1#Client|2#Clients} +dashboard.card.harvestingclients.datasets={0, choice, 0#Datasets|1#Dataset|2#Datasets} +dashboard.card.harvestingserver.header=Harvesting Server +dashboard.card.harvestingserver.enabled=OAI server enabled +dashboard.card.harvestingserver.disabled=OAI server disabled +dashboard.card.harvestingserver.status=Status +dashboard.card.harvestingserver.sets={0, choice, 0#Sets|1#Set|2#Sets} +dashboard.card.harvestingserver.btn.manage=Manage Server +dashboard.card.metadataexport.header=Metadata Export +dashboard.card.metadataexport.message=Dataset metadata export is only available through the {0} API. Learn more in the {0} {1}API Guide{2}. + +#harvestclients.xhtml +harvestclients.title=Manage Harvesting Clients +harvestclients.toptip= - Harvesting can be scheduled to run at a specific time or on demand. Harvesting can be initiated here or via the REST API. +harvestclients.noClients.label=No clients are configured. +harvestclients.noClients.why.header=What is Harvesting? +harvestclients.noClients.why.reason1=Harvesting is a process of exchanging metadata with other repositories. As a harvesting client, your Dataverse gathers metadata records from remote sources. These can be other Dataverse instances, or other archives that support OAI-PMH, the standard harvesting protocol. +harvestclients.noClients.why.reason2=Harvested metadata records are searchable by users. Clicking on a harvested dataset in the search results takes the user to the original repository. Harvested datasets cannot be edited in your Dataverse installation. +harvestclients.noClients.how.header=How To Use Harvesting +harvestclients.noClients.how.tip1=To harvest metadata, a Harvesting Client is created and configured for each remote repository. Note that when creating a client you will need to select an existing local dataverse to host harvested datasets. +harvestclients.noClients.how.tip2=Harvested records can be kept in sync with the original repository through scheduled incremental updates, for example, daily or weekly. Alternatively, harvests can be run on demand, from this page or via the REST API. +harvestclients.noClients.getStarted=To get started, click on the Add Client button above. To learn more about Harvesting, visit the Harvesting section of the User Guide. +harvestclients.btn.add=Add Client +harvestclients.tab.header.name=Nickname +harvestclients.tab.header.url=URL +harvestclients.tab.header.lastrun=Last Run +harvestclients.tab.header.lastresults=Last Results +harvestclients.tab.header.action=Actions +harvestclients.tab.header.action.btn.run=Run Harvesting +harvestclients.tab.header.action.btn.edit=Edit +harvestclients.tab.header.action.btn.delete=Delete +harvestclients.tab.header.action.btn.delete.dialog.header=Delete Harvesting Client +harvestclients.tab.header.action.btn.delete.dialog.warning=Are you sure you want to delete the harvesting client "{0}"? Deleting the client will delete all datasets harvested from this remote server. +harvestclients.tab.header.action.btn.delete.dialog.tip=Note, this action may take a while to process, depending on the number of harvested datasets. +harvestclients.tab.header.action.delete.infomessage=Harvesting client is being deleted. Note, that this may take a while, depending on the amount of harvested content. +harvestclients.actions.runharvest.success=Successfully started an asynchronous harvest for client "{0}" . Please reload the page to check on the harvest results). +harvestclients.newClientDialog.step1=Step 1 of 4 - Client Information +harvestclients.newClientDialog.title.new=Create Harvesting Client +harvestclients.newClientDialog.help=Configure a client to harvest content from a remote server. +harvestclients.newClientDialog.nickname=Nickname +harvestclients.newClientDialog.nickname.helptext=Consists of letters, digits, underscores (_) and dashes (-). +harvestclients.newClientDialog.nickname.required=Client nickname cannot be empty! +harvestclients.newClientDialog.nickname.invalid=Client nickname can contain only letters, digits, underscores (_) and dashes (-); and must be at most 30 characters. +harvestclients.newClientDialog.nickname.alreadyused=This nickname is already used. +harvestclients.newClientDialog.type=Server Protocol +harvestclients.newClientDialog.type.helptext=Only the OAI server protocol is currently supported. +harvestclients.newClientDialog.type.OAI=OAI +harvestclients.newClientDialog.type.Nesstar=Nesstar +harvestclients.newClientDialog.url=Server URL +harvestclients.newClientDialog.url.tip=URL of a harvesting resource. +harvestclients.newClientDialog.url.watermark=Remote harvesting server, http://... +harvestclients.newClientDialog.url.helptext.notvalidated=URL of a harvesting resource. Once you click 'Next', we will try to establish a connection to the server in order to verify that it is working, and to obtain extra information about its capabilities. +harvestclients.newClientDialog.url.required=A valid harvesting server address is required. +harvestclients.newClientDialog.url.invalid=Invalid URL. Failed to establish connection and receive a valid server response. +harvestclients.newClientDialog.url.noresponse=Failed to establish connection to the server. +harvestclients.newClientDialog.url.badresponse=Invalid response from the server. +harvestclients.newClientDialog.dataverse=Local Dataverse +harvestclients.newClientDialog.dataverse.tip=Dataverse that will host the datasets harvested from this remote resource. +harvestclients.newClientDialog.dataverse.menu.enterName=Enter Dataverse Alias +harvestclients.newClientDialog.dataverse.menu.header=Dataverse Name (Affiliate), Alias +harvestclients.newClientDialog.dataverse.menu.invalidMsg=No matches found +harvestclients.newClientDialog.dataverse.required=You must select an existing dataverse for this harvesting client. +harvestclients.newClientDialog.step2=Step 2 of 4 - Format +harvestclients.newClientDialog.oaiSets=OAI Set +harvestclients.newClientDialog.oaiSets.tip=Harvesting sets offered by this OAI server. +harvestclients.newClientDialog.oaiSets.noset=None +harvestclients.newClientDialog.oaiSets.helptext=Selecting "none" will harvest the default set, as defined by the server. Often this will be the entire body of content across all sub-sets. +harvestclients.newClientDialog.oaiSets.helptext.noset=This OAI server does not support named sets. The entire body of content offered by the server will be harvested. +harvestclients.newClientDialog.oaiMetadataFormat=Metadata Format +harvestclients.newClientDialog.oaiMetadataFormat.tip=Metadata formats offered by the remote server. +harvestclients.newClientDialog.oaiMetadataFormat.required=Please select the metadata format to harvest from this archive. +harvestclients.newClientDialog.step3=Step 3 of 4 - Schedule +harvestclients.newClientDialog.schedule=Schedule +harvestclients.newClientDialog.schedule.tip=Schedule harvesting to run automatically daily or weekly. +harvestclients.newClientDialog.schedule.time.none.helptext=Leave harvesting unscheduled to run on demand only. +harvestclients.newClientDialog.schedule.none=None +harvestclients.newClientDialog.schedule.daily=Daily +harvestclients.newClientDialog.schedule.weekly=Weekly +harvestclients.newClientDialog.schedule.time=Time +harvestclients.newClientDialog.schedule.day=Day +harvestclients.newClientDialog.schedule.time.am=AM +harvestclients.newClientDialog.schedule.time.pm=PM +harvestclients.newClientDialog.schedule.time.helptext=Scheduled times are in your local time. +harvestclients.newClientDialog.btn.create=Create Client +harvestclients.newClientDialog.success=Successfully created harvesting client "{0}". +harvestclients.newClientDialog.step4=Step 4 of 4 - Display +harvestclients.newClientDialog.harvestingStyle=Archive Type +harvestclients.newClientDialog.harvestingStyle.tip=Type of remote archive. +harvestclients.newClientDialog.harvestingStyle.helptext=Select the archive type that best describes this remote server in order to properly apply formatting rules and styles to the harvested metadata as they are shown in the search results. Note that improperly selecting the type of the remote archive can result in incomplete entries in the search results, and a failure to redirect the user to the archival source of the data. +harvestclients.viewEditDialog.title=Edit Harvesting Client +harvestclients.viewEditDialog.archiveUrl=Archive URL +harvestclients.viewEditDialog.archiveUrl.tip=The URL of the archive that serves the data harvested by this client, which is used in search results for links to the original sources of the harvested content. +harvestclients.viewEditDialog.archiveUrl.helptext=Edit if this URL differs from the Server URL. +harvestclients.viewEditDialog.archiveDescription=Archive Description +harvestclients.viewEditDialog.archiveDescription.tip=Description of the archival source of the harvested content, displayed in search results. +harvestclients.viewEditDialog.archiveDescription.default.generic=This Dataset is harvested from our partners. Clicking the link will take you directly to the archival source of the data. +harvestclients.viewEditDialog.btn.save=Save Changes +harvestclients.newClientDialog.title.edit=Edit Group {0} + +#harvestset.xhtml +harvestserver.title=Manage Harvesting Server +harvestserver.toptip= - Define sets of local datasets that will be available for harvesting by remote clients. +harvestserver.service.label=OAI Server +harvestserver.service.enabled=Enabled +harvestserver.service.disabled=Disabled +harvestserver.service.disabled.msg=Harvesting Server is currently disabled. +harvestserver.service.empty=No sets are configured. +harvestserver.service.enable.success=OAI Service has been successfully enabled. +harvestserver.noSets.why.header=What is a Harvesting Server? +harvestserver.noSets.why.reason1=Harvesting is a process of exchanging metadata with other repositories. As a harvesting server, your Dataverse can make some of the local dataset metadata available to remote harvesting clients. These can be other Dataverse instances, or any other clients that support OAI-PMH harvesting protocol. +harvestserver.noSets.why.reason2=Only the published, unrestricted datasets in your Dataverse can be harvested. Remote clients normally keep their records in sync through scheduled incremental updates, daily or weekly, thus minimizing the load on your server. Note that it is only the metadata that are harvested. Remote harvesters will generally not attempt to download the data files themselves. +harvestserver.noSets.how.header=How to run a Harvesting Server? +harvestserver.noSets.how.tip1=Harvesting server can be enabled or disabled on this page. +harvestserver.noSets.how.tip2=Once the service is enabled, you can define collections of local datasets that will be available to remote harvesters as OAI Sets. Sets are defined by search queries (for example, authorName:king; or parentId:1234 - to select all the datasets that belong to the dataverse specified; or dsPersistentId:"doi:1234/" to select all the datasets with the persistent identifier authority specified). Consult the Search API section of the Dataverse User Guide for more information on the search queries. +harvestserver.noSets.getStarted=To get started, enable the OAI server and click on the Add Set button. To learn more about Harvesting, visit the Harvesting section of the User Guide. +harvestserver.btn.add=Add Set +harvestserver.tab.header.spec=OAI setSpec +harvestserver.tab.header.description=Description +harvestserver.tab.header.definition=Definition Query +harvestserver.tab.header.stats=Datasets +harvestserver.tab.col.stats.empty=No records (empty set) +harvestserver.tab.col.stats.results={0} {0, choice, 0#datasets|1#dataset|2#datasets} ({1} {1, choice, 0#records|1#record|2#records} exported, {2} marked as deleted) +harvestserver.tab.header.action=Actions +harvestserver.tab.header.action.btn.export=Run Export +harvestserver.actions.runreexport.success=Successfully started an asynchronous re-export job for OAI set "{0}" (please reload the page to check on the export progress). +harvestserver.tab.header.action.btn.edit=Edit +harvestserver.tab.header.action.btn.delete=Delete +harvestserver.tab.header.action.btn.delete.dialog.header=Delete Harvesting Set +harvestserver.tab.header.action.btn.delete.dialog.tip=Are you sure you want to delete the OAI set "{0}"? You cannot undo a delete! +harvestserver.tab.header.action.delete.infomessage=Selected harvesting set is being deleted. (this may take a few moments) +harvestserver.newSetDialog.title.new=Create Harvesting Set +harvestserver.newSetDialog.help=Define a set of local datasets available for harvesting to remote clients. +harvestserver.newSetDialog.setspec=Name/OAI setSpec +harvestserver.newSetDialog.setspec.tip=A unique name (OAI setSpec) identifying this set. +harvestserver.newSetDialog.setspec.helptext=Consists of letters, digits, underscores (_) and dashes (-). +harvestserver.editSetDialog.setspec.helptext=The name can not be changed once the set has been created. +harvestserver.newSetDialog.setspec.required=Name (OAI setSpec) cannot be empty! +harvestserver.newSetDialog.setspec.invalid=Name (OAI setSpec) can contain only letters, digits, underscores (_) and dashes (-). +harvestserver.newSetDialog.setspec.alreadyused=This set name (OAI setSpec) is already used. +harvestserver.newSetDialog.setdescription=Description +harvestserver.newSetDialog.setdescription.tip=Provide a brief description for this OAI set. +harvestserver.newSetDialog.setdescription.required=Set description cannot be empty! +harvestserver.newSetDialog.setquery=Definition Query +harvestserver.newSetDialog.setquery.tip=Search query that defines the content of the dataset. +harvestserver.newSetDialog.setquery.helptext=Example query: authorName:king +harvestserver.newSetDialog.setquery.required=Search query cannot be left empty! +harvestserver.newSetDialog.setquery.results=Search query returned {0} datasets! +harvestserver.newSetDialog.setquery.empty=WARNING: Search query returned no results! +harvestserver.newSetDialog.btn.create=Create Set +harvestserver.newSetDialog.success=Successfully created harvesting set "{0}". +harvestserver.viewEditDialog.title=Edit Harvesting Set +harvestserver.viewEditDialog.btn.save=Save Changes + +#dashboard-users.xhtml +dashboard.card.users=Users +dashboard.card.users.header=Dashboard - User List +dashboard.card.users.super=Superusers +dashboard.card.users.manage=Manage Users +dashboard.card.users.message=List and manage users. +dashboard.list_users.searchTerm.watermark=Search these users... +dashboard.list_users.tbl_header.userId=ID +dashboard.list_users.tbl_header.userIdentifier=Username +dashboard.list_users.tbl_header.name=Name +dashboard.list_users.tbl_header.lastName=Last Name +dashboard.list_users.tbl_header.firstName=First Name +dashboard.list_users.tbl_header.email=Email +dashboard.list_users.tbl_header.affiliation=Affiliation +dashboard.list_users.tbl_header.roles=Roles +dashboard.list_users.tbl_header.position=Position +dashboard.list_users.tbl_header.isSuperuser=Superuser +dashboard.list_users.tbl_header.authProviderFactoryAlias=Authentication +dashboard.list_users.tbl_header.createdTime=Created Time +dashboard.list_users.tbl_header.lastLoginTime=Last Login Time +dashboard.list_users.tbl_header.lastApiUseTime=Last API Use Time +dashboard.list_users.tbl_header.roles.removeAll=Remove All +dashboard.list_users.tbl_header.roles.removeAll.header=Remove All Roles +dashboard.list_users.tbl_header.roles.removeAll.confirmationText=Are you sure you want to remove all roles for user {0}? +dashboard.list_users.removeAll.message.success=All roles have been removed for user {0}. +dashboard.list_users.removeAll.message.failure=Failed to remove roles for user {0}. + +dashboard.list_users.toggleSuperuser=Edit Superuser Status +dashboard.list_users.toggleSuperuser.confirmationText.add=Are you sure you want to enable superuser status for user {0}? +dashboard.list_users.toggleSuperuser.confirmationText.remove=Are you sure you want to disable superuser status for user {0}? +dashboard.list_users.toggleSuperuser.confirm=Continue +dashboard.list_users.api.auth.invalid_apikey=The API key is invalid. +dashboard.list_users.api.auth.not_superuser=Forbidden. You must be a superuser. + +#MailServiceBean.java +notification.email.create.dataverse.subject={0}: Your dataverse has been created +notification.email.create.dataset.subject={0}: Your dataset has been created +notification.email.request.file.access.subject={0}: Access has been requested for a restricted file +notification.email.grant.file.access.subject={0}: You have been granted access to a restricted file +notification.email.rejected.file.access.subject={0}: Your request for access to a restricted file has been rejected +notification.email.update.maplayer={0}: WorldMap layer added to dataset +notification.email.maplayer.deletefailed.subject={0}: Failed to delete WorldMap layer +notification.email.maplayer.deletefailed.text=We failed to delete the WorldMap layer associated with the restricted file {0}, and any related data that may still be publicly available on the WorldMap site. Please try again, or contact WorldMap and/or Dataverse support. (Dataset: {1}) +notification.email.submit.dataset.subject={0}: Your dataset has been submitted for review +notification.email.publish.dataset.subject={0}: Your dataset has been published +notification.email.returned.dataset.subject={0}: Your dataset has been returned +notification.email.create.account.subject={0}: Your account has been created +notification.email.assign.role.subject={0}: You have been assigned a role +notification.email.revoke.role.subject={0}: Your role has been revoked +notification.email.verifyEmail.subject={0}: Verify your email address +notification.email.greeting=Hello, \n +# Bundle file editors, please note that "notification.email.welcome" is used in a unit test +notification.email.welcome=Welcome to {0}! Get started by adding or finding data. Have questions? Check out the User Guide at {1}/{2}/user or contact {3} at {4} for assistance. +notification.email.welcomeConfirmEmailAddOn=\n\nPlease verify your email address at {0} . Note, the verify link will expire after {1}. Send another verification email by visiting your account page. +notification.email.requestFileAccess=File access requested for dataset: {0}. Manage permissions at {1}. +notification.email.grantFileAccess=Access granted for files in dataset: {0} (view at {1}). +notification.email.rejectFileAccess=Your request for access was rejected for the requested files in the dataset: {0} (view at {1}). If you have any questions about why your request was rejected, you may reach the dataset owner using the "Contact" link on the upper right corner of the dataset page. +# Bundle file editors, please note that "notification.email.createDataverse" is used in a unit test +notification.email.createDataverse=Your new dataverse named {0} (view at {1} ) was created in {2} (view at {3} ). To learn more about what you can do with your dataverse, check out the Dataverse Management - User Guide at {4}/{5}/user/dataverse-management.html . +# Bundle file editors, please note that "notification.email.createDataset" is used in a unit test +notification.email.createDataset=Your new dataset named {0} (view at {1} ) was created in {2} (view at {3} ). To learn more about what you can do with a dataset, check out the Dataset Management - User Guide at {4}/{5}/user/dataset-management.html . +notification.email.wasSubmittedForReview={0} (view at {1}) was submitted for review to be published in {2} (view at {3}). Don''t forget to publish it or send it back to the contributor\! +notification.email.wasReturnedByReviewer={0} (view at {1}) was returned by the curator of {2} (view at {3}). +notification.email.wasPublished={0} (view at {1}) was published in {2} (view at {3}). +notification.email.worldMap.added={0} (view at {1}) had WorldMap layer data added to it. +notification.email.closing=\n\nThank you,\n{0} +notification.email.assignRole=You are now {0} for the {1} "{2}" (view at {3}). +notification.email.revokeRole=One of your roles for the {0} "{1}" has been revoked (view at {2}). +notification.email.changeEmail=Hello, {0}.{1}\n\nPlease contact us if you did not intend this change or if you need assistance. +hours=hours +hour=hour +minutes=minutes +minute=minute +notification.email.checksumfail.subject={0}: Your upload failed checksum validation +notification.email.import.filesystem.subject=Dataset {0} has been successfully uploaded and verified +notification.email.import.checksum.subject={0}: Your file checksum job has completed + +# passwordreset.xhtml +pageTitle.passwdReset.pre=Account Password Reset +passwdReset.token=token : +passwdReset.userLookedUp=user looked up : +passwdReset.emailSubmitted=email submitted : +passwdReset.details={0} Password Reset{1} - To initiate the password reset process, please provide your email address. +passwdReset.submitRequest=Submit Password Request +passwdReset.successSubmit.tip=If this email is associated with an account, then an email will be sent with further instructions to {0}. +passwdReset.debug=DEBUG +passwdReset.resetUrl=The reset URL is +passwdReset.noEmail.tip=No email was actually sent because a user could not be found using the provided email address {0} but we don't mention this because we don't malicious users to use the form to determine if there is an account associated with an email address. +passwdReset.illegalLink.tip=Your password reset link is not valid. If you need to reset your password, {0}click here{1} in order to request that your password to be reset again. +passwdReset.newPasswd.details={0} Reset Password{1} \u2013 Our password requirements have changed. Please pick a strong password that matches the criteria below. +passwdReset.newPasswd=New Password +passwdReset.rePasswd=Retype Password +passwdReset.resetBtn=Reset Password + +# dataverse.xhtml +dataverse.title=The project, department, university, professor, or journal this dataverse will contain data for. +dataverse.enterName=Enter name... +dataverse.host.title=The dataverse which contains this data. +dataverse.identifier.title=Short name used for the URL of this dataverse. +dataverse.affiliation.title=The organization with which this dataverse is affiliated. +dataverse.category=Category +dataverse.category.title=The type that most closely reflects this dataverse. +dataverse.type.selectTab.top=Select one... +dataverse.type.selectTab.researchers=Researcher +dataverse.type.selectTab.researchProjects=Research Project +dataverse.type.selectTab.journals=Journal +dataverse.type.selectTab.organizationsAndInsitutions=Organization or Institution +dataverse.type.selectTab.teachingCourses=Teaching Course +dataverse.type.selectTab.uncategorized=Uncategorized +dataverse.type.selectTab.researchGroup=Research Group +dataverse.type.selectTab.laboratory=Laboratory +dataverse.type.selectTab.department=Department +dataverse.description.title=A summary describing the purpose, nature, or scope of this dataverse. +dataverse.email=Email +dataverse.email.title=The e-mail address(es) of the contact(s) for the dataverse. +dataverse.share.dataverseShare=Share Dataverse +dataverse.share.dataverseShare.tip=Share this dataverse on your favorite social media networks. +dataverse.share.dataverseShare.shareText=View this dataverse. +dataverse.subject.title=Subject(s) covered in this dataverse. +dataverse.metadataElements=Metadata Fields +dataverse.metadataElements.tip=Choose the metadata fields to use in dataset templates and when adding a dataset to this dataverse. +dataverse.metadataElements.from.tip=Use metadata fields from {0} +dataverse.resetModifications=Reset Modifications +dataverse.resetModifications.text=Are you sure you want to reset the selected metadata fields? If you do this, any customizations (hidden, required, optional) you have done will no longer appear. +dataverse.field.required=(Required) +dataverse.field.example1= (Examples: +dataverse.field.example2=) +dataverse.field.set.tip=[+] View fields + set as hidden, required, or optional +dataverse.field.set.view=[+] View fields +dataverse.field.requiredByDataverse=Required by Dataverse +dataverse.facetPickList.text=Browse/Search Facets +dataverse.facetPickList.tip=Choose the metadata fields to use as facets for browsing datasets and dataverses in this dataverse. +dataverse.facetPickList.facetsFromHost.text=Use browse/search facets from {0} +dataverse.facetPickList.metadataBlockList.all=All Metadata Fields +dataverse.edit=Edit +dataverse.option.generalInfo=General Information +dataverse.option.themeAndWidgets=Theme + Widgets +dataverse.option.featuredDataverse=Featured Dataverses +dataverse.option.permissions=Permissions +dataverse.option.dataverseGroups=Groups +dataverse.option.datasetTemplates=Dataset Templates +dataverse.option.datasetGuestbooks=Dataset Guestbooks +dataverse.option.deleteDataverse=Delete Dataverse +dataverse.publish.btn=Publish +dataverse.publish.header=Publish Dataverse +dataverse.nopublished=No Published Dataverses +dataverse.nopublished.tip=In order to use this feature you must have at least one published dataverse. +dataverse.contact=Email Dataverse Contact +dataset.link=Link Dataset +dataverse.link=Link Dataverse +dataverse.link.btn.tip=Link to Your Dataverse +dataverse.link.yourDataverses=Your {0, choice, 1#Dataverse|2#Dataverses} +dataverse.link.save=Save Linked Dataverse +dataset.link.save=Save Linked Dataset +dataverse.link.dataverse.choose=Choose which of your dataverses you would like to link this dataverse to. +dataverse.link.dataset.choose=Choose which of your dataverses you would like to link this dataset to. +dataverse.link.no.choice=You have one dataverse you can add linked dataverses and datasets in. +dataverse.link.no.linkable=To be able to link a dataverse or dataset, you need to have your own dataverse. Click on the Add Data button on the homepage to get started. +dataverse.link.no.linkable.remaining=You have already linked all of your eligible dataverses. +dataverse.savedsearch.link=Link Search +dataverse.savedsearch.searchquery=Search +dataverse.savedsearch.filterQueries=Facets +dataverse.savedsearch.save=Save Linked Search +dataverse.savedsearch.dataverse.choose=Choose which of your dataverses you would like to link this search to. +dataverse.savedsearch.no.choice=You have one dataverse to which you may add a saved search. +# Bundle file editors, please note that "dataverse.savedsearch.save.success" is used in a unit test +dataverse.saved.search.success=The saved search has been successfully linked to {0}. +dataverse.saved.search.failure=The saved search was not able to be linked. +dataverse.linked.success= {0} has been successfully linked to {1}. +dataverse.linked.success.wait= {0} has been successfully linked to {1}. Please wait for its contents to appear. +dataverse.linked.internalerror={0} has been successfully linked to {1} but contents will not appear until an internal error has been fixed. +dataverse.page.pre=Previous +dataverse.page.next=Next +dataverse.byCategory=Dataverses by Category +dataverse.displayFeatured=Display the dataverses selected below on the homepage for this dataverse. +dataverse.selectToFeature=Select dataverses to feature on the homepage of this dataverse. +dataverse.publish.tip=Are you sure you want to publish your dataverse? Once you do so it must remain published. +dataverse.publish.failed.tip=This dataverse cannot be published because the dataverse it is in has not been published. +dataverse.publish.failed=Cannot publish dataverse. +dataverse.publish.success=Your dataverse is now public. +dataverse.publish.failure=This dataverse was not able to be published. +dataverse.delete.tip=Are you sure you want to delete your dataverse? You cannot undelete this dataverse. +dataverse.delete=Delete Dataverse +dataverse.delete.success=Your dataverse has been deleted. +dataverse.delete.failure=This dataverse was not able to be deleted. +# Bundle file editors, please note that "dataverse.create.success" is used in a unit test because it's so fancy with two parameters +dataverse.create.success=You have successfully created your dataverse! To learn more about what you can do with your dataverse, check out the User Guide. +dataverse.create.failure=This dataverse was not able to be created. +dataverse.create.authenticatedUsersOnly=Only authenticated users can create dataverses. +dataverse.update.success=You have successfully updated your dataverse! +dataverse.update.failure=This dataverse was not able to be updated. + +# rolesAndPermissionsFragment.xhtml + +# advanced.xhtml +advanced.search.header.dataverses=Dataverses +advanced.search.dataverses.name.tip=The project, department, university, professor, or journal this Dataverse will contain data for. +advanced.search.dataverses.affiliation.tip=The organization with which this Dataverse is affiliated. +advanced.search.dataverses.description.tip=A summary describing the purpose, nature, or scope of this Dataverse. +advanced.search.dataverses.subject.tip=Domain-specific Subject Categories that are topically relevant to this Dataverse. +advanced.search.header.datasets=Datasets +advanced.search.header.files=Files +advanced.search.files.name.tip=The name given to identify the file. +advanced.search.files.description.tip=A summary describing the file and its variables. +advanced.search.files.fileType=File Type +advanced.search.files.fileType.tip=The extension for a file, e.g. CSV, zip, Stata, R, PDF, JPEG, etc. +advanced.search.files.variableName=Variable Name +advanced.search.files.variableName.tip=The name of the variable's column in the data frame. +advanced.search.files.variableLabel=Variable Label +advanced.search.files.variableLabel.tip=A short description of the variable. + +# search-include-fragment.xhtml +dataverse.search.advancedSearch=Advanced Search +dataverse.search.input.watermark=Search this dataverse... +account.search.input.watermark=Search this data... +dataverse.search.btn.find=Find +dataverse.results.btn.addData=Add Data +dataverse.results.btn.addData.newDataverse=New Dataverse +dataverse.results.btn.addData.newDataset=New Dataset +dataverse.results.dialog.addDataGuest.header=Add Data +dataverse.results.dialog.addDataGuest.msg=You need to Log In to create a dataverse or add a dataset. +dataverse.results.dialog.addDataGuest.msg.signup=You need to Sign Up or Log In to create a dataverse or add a dataset. +dataverse.results.types.dataverses=Dataverses +dataverse.results.types.datasets=Datasets +dataverse.results.types.files=Files +# Bundle file editors, please note that "dataverse.results.empty.zero" is used in a unit test +dataverse.results.empty.zero=There are no dataverses, datasets, or files that match your search. Please try a new search by using other or broader terms. You can also check out the search guide for tips. +# Bundle file editors, please note that "dataverse.results.empty.hidden" is used in a unit test +dataverse.results.empty.hidden=There are no search results based on how you have narrowed your search. You can check out the search guide for tips. +dataverse.results.empty.browse.guest.zero=This dataverse currently has no dataverses, datasets, or files. Please log in to see if you are able to add to it. +dataverse.results.empty.browse.guest.hidden=There are no dataverses within this dataverse. Please log in to see if you are able to add to it. +dataverse.results.empty.browse.loggedin.noperms.zero=This dataverse currently has no dataverses, datasets, or files. You can use the Email Dataverse Contact button above to ask about this dataverse or request access for this dataverse. +dataverse.results.empty.browse.loggedin.noperms.hidden=There are no dataverses within this dataverse. +dataverse.results.empty.browse.loggedin.perms.zero=This dataverse currently has no dataverses, datasets, or files. You can add to it by using the Add Data button on this page. +account.results.empty.browse.loggedin.perms.zero=You have no dataverses, datasets, or files associated with your account. You can add a dataverse or dataset by clicking the Add Data button above. Read more about adding data in the User Guide. +dataverse.results.empty.browse.loggedin.perms.hidden=There are no dataverses within this dataverse. You can add to it by using the Add Data button on this page. +dataverse.results.empty.link.technicalDetails=More technical details +dataverse.search.facet.error=There was an error with your search parameters. Please clear your search and try again. +dataverse.results.count.toofresults={0} to {1} of {2} {2, choice, 0#Results|1#Result|2#Results} +dataverse.results.paginator.current=(Current) +dataverse.results.btn.sort=Sort +dataverse.results.btn.sort.option.nameAZ=Name (A-Z) +dataverse.results.btn.sort.option.nameZA=Name (Z-A) +dataverse.results.btn.sort.option.newest=Newest +dataverse.results.btn.sort.option.oldest=Oldest +dataverse.results.btn.sort.option.relevance=Relevance +dataverse.results.cards.foundInMetadata=Found in Metadata Fields: +dataverse.results.cards.files.tabularData=Tabular Data +dataverse.results.solrIsDown=Please note: Due to an internal error, browsing and searching is not available. +dataverse.theme.title=Theme +dataverse.theme.inheritCustomization.title=For this dataverse, use the same theme as the parent dataverse. +dataverse.theme.inheritCustomization.label=Inherit Theme +dataverse.theme.inheritCustomization.checkbox=Inherit theme from {0} +dataverse.theme.logo=Logo +dataverse.theme.logo.tip=Supported image types are JPG, TIF, or PNG and should be no larger than 500 KB. The maximum display size for an image file in a dataverse's theme is 940 pixels wide by 120 pixels high. +dataverse.theme.logo.format=Logo Format +dataverse.theme.logo.format.selectTab.square=Square +dataverse.theme.logo.format.selectTab.rectangle=Rectangle +dataverse.theme.logo.alignment=Logo Alignment +dataverse.theme.logo.alignment.selectTab.left=Left +dataverse.theme.logo.alignment.selectTab.center=Center +dataverse.theme.logo.alignment.selectTab.right=Right +dataverse.theme.logo.backColor=Logo Background Color +dataverse.theme.logo.image.upload=Upload Image +dataverse.theme.tagline=Tagline +dataverse.theme.website=Website +dataverse.theme.linkColor=Link Color +dataverse.theme.txtColor=Text Color +dataverse.theme.backColor=Background Color +dataverse.theme.success=You have successfully updated the theme for this dataverse! +dataverse.theme.failure=The dataverse theme has not been updated. +dataverse.theme.logo.image=Logo Image +dataverse.theme.logo.image.title=The logo or image file you wish to display in the header of this dataverse. +dataverse.theme.logo.image.uploadNewFile=Upload New File +dataverse.theme.logo.image.invalidMsg=The image could not be uploaded. Please try again with a JPG, TIF, or PNG file. +dataverse.theme.logo.image.uploadImgFile=Upload Image File +dataverse.theme.logo.format.title=The shape for the logo or image file you upload for this dataverse. +dataverse.theme.logo.format.selectTab.square2=Square +dataverse.theme.logo.format.selectTab.rectangle2=Rectangle +dataverse.theme.logo.alignment.title=Where the logo or image should display in the header. +dataverse.theme.logo.alignment.selectTab.left2=Left +dataverse.theme.logo.alignment.selectTab.center2=Center +dataverse.theme.logo.alignment.selectTab.right2=Right +dataverse.theme.logo.backColor.title=Select a color to display behind the logo of this dataverse. +dataverse.theme.headerColor=Header Colors +dataverse.theme.headerColor.tip=Colors you select to style the header of this dataverse. +dataverse.theme.backColor.title=Color for the header area that contains the image, tagline, URL, and text. +dataverse.theme.linkColor.title=Color for the link to display as. +dataverse.theme.txtColor.title=Color for the tagline text and the name of this dataverse. +dataverse.theme.tagline.title=A phrase or sentence that describes this dataverse. +dataverse.theme.tagline.tip=Provide a tagline that is 140 characters or less. +dataverse.theme.website.title=URL for your personal website, institution, or any website that relates to this dataverse. +dataverse.theme.website.tip=The website will be linked behind the tagline. To have a website listed, you must also provide a tagline. +dataverse.theme.website.watermark=Your personal site, http://... +dataverse.theme.website.invalidMsg=Invalid URL. +dataverse.theme.disabled=The theme for the root dataverse has been administratively disabled with the :DisableRootDataverseTheme database setting. +dataverse.widgets.title=Widgets +dataverse.widgets.notPublished.why.header=Why Use Widgets? +dataverse.widgets.notPublished.why.reason1=Increases the web visibility of your data by allowing you to embed your dataverse and datasets into your personal or project website. +dataverse.widgets.notPublished.why.reason2=Allows others to browse your dataverse and datasets without leaving your personal or project website. +dataverse.widgets.notPublished.how.header=How To Use Widgets +dataverse.widgets.notPublished.how.tip1=To use widgets, your dataverse and datasets need to be published. +dataverse.widgets.notPublished.how.tip2=After publishing, code will be available on this page for you to copy and add to your personal or project website. +dataverse.widgets.notPublished.how.tip3=Do you have an OpenScholar website? If so, learn more about adding the Dataverse widgets to your website here. +dataverse.widgets.notPublished.getStarted=To get started, publish your dataverse. To learn more about Widgets, visit the Theme + Widgets section of the User Guide. +dataverse.widgets.tip=Copy and paste this code into the HTML on your site. To learn more about Widgets, visit the Theme + Widgets section of the User Guide. +dataverse.widgets.searchBox.txt=Dataverse Search Box +dataverse.widgets.searchBox.tip=Add a way for visitors on your website to be able to search Dataverse. +dataverse.widgets.dataverseListing.txt=Dataverse Listing +dataverse.widgets.dataverseListing.tip=Add a way for visitors on your website to be able to view your dataverses and datasets, sort, or browse through them. +dataverse.widgets.advanced.popup.header=Widget Advanced Options +dataverse.widgets.advanced.prompt=Forward dataset citation persistent URL's to your personal website. The page you submit as your Personal Website URL must contain the code snippet for the Dataverse Listing widget. +dataverse.widgets.advanced.url.label=Personal Website URL +dataverse.widgets.advanced.url.watermark=http://www.example.com/page-name +dataverse.widgets.advanced.invalid.message=Please enter a valid URL +dataverse.widgets.advanced.success.message=Successfully updated your Personal Website URL +dataverse.widgets.advanced.failure.message=The dataverse Personal Website URL has not been updated. + +# permissions-manage.xhtml +dataverse.permissions.title=Permissions +dataverse.permissions.dataset.title=Dataset Permissions +dataverse.permissions.access.accessBtn=Edit Access +dataverse.permissions.usersOrGroups=Users/Groups +dataverse.permissions.usersOrGroups.assignBtn=Assign Roles to Users/Groups +dataverse.permissions.usersOrGroups.createGroupBtn=Create Group +dataverse.permissions.usersOrGroups.description=All the users and groups that have access to your dataverse. +dataverse.permissions.usersOrGroups.tabHeader.userOrGroup=User/Group Name (Affiliation) +dataverse.permissions.usersOrGroups.tabHeader.id=ID +dataverse.permissions.usersOrGroups.tabHeader.role=Role +dataverse.permissions.usersOrGroups.tabHeader.action=Action +dataverse.permissions.usersOrGroups.assignedAt=Role assigned at {0} +dataverse.permissions.usersOrGroups.removeBtn=Remove Assigned Role +dataverse.permissions.usersOrGroups.removeBtn.confirmation=Are you sure you want to remove this role assignment? +dataverse.permissions.roles=Roles +dataverse.permissions.roles.add=Add New Role +dataverse.permissions.roles.description=All the roles set up in your dataverse, that you can assign to users and groups. +dataverse.permissions.roles.edit=Edit Role +dataverse.permissions.roles.copy=Copy Role + +# permissions-manage-files.xhtml +dataverse.permissionsFiles.title=Restricted File Permissions +dataverse.permissionsFiles.usersOrGroups=Users/Groups +dataverse.permissionsFiles.usersOrGroups.assignBtn=Grant Access to Users/Groups +dataverse.permissionsFiles.usersOrGroups.description=All the users and groups that have access to restricted files in this dataset. +dataverse.permissionsFiles.usersOrGroups.tabHeader.userOrGroup=User/Group Name (Affiliation) +dataverse.permissionsFiles.usersOrGroups.tabHeader.id=ID +dataverse.permissionsFiles.usersOrGroups.tabHeader.email=Email +dataverse.permissionsFiles.usersOrGroups.tabHeader.files=Files +dataverse.permissionsFiles.usersOrGroups.tabHeader.access=Access +dataverse.permissionsFiles.usersOrGroups.file=File +dataverse.permissionsFiles.usersOrGroups.files=Files +dataverse.permissionsFiles.usersOrGroups.invalidMsg=There are no users or groups with access to the restricted files in this dataset. +dataverse.permissionsFiles.files=Restricted Files +dataverse.permissionsFiles.files.label={0, choice, 0#Restricted Files|1#Restricted File|2#Restricted Files} +dataverse.permissionsFiles.files.description=All the restricted files in this dataset. +dataverse.permissionsFiles.files.tabHeader.fileName=File Name +dataverse.permissionsFiles.files.tabHeader.roleAssignees=Users/Groups +dataverse.permissionsFiles.files.tabHeader.access=Access +dataverse.permissionsFiles.files.tabHeader.publishedRestrictedState=Published +dataverse.permissionsFiles.files.tabHeader.draftRestrictedState=Draft +dataverse.permissionsFiles.files.deleted=Deleted +dataverse.permissionsFiles.files.public=Public +dataverse.permissionsFiles.files.restricted=Restricted +dataverse.permissionsFiles.files.roleAssignee=User/Group +dataverse.permissionsFiles.files.roleAssignees=Users/Groups +dataverse.permissionsFiles.files.roleAssignees.label={0, choice, 0#Users/Groups|1#User/Group|2#Users/Groups} +dataverse.permissionsFiles.files.assignBtn=Assign Access +dataverse.permissionsFiles.files.invalidMsg=There are no restricted files in this dataset. +dataverse.permissionsFiles.files.requested=Requested Files +dataverse.permissionsFiles.files.selected=Selecting {0} of {1} {2} +dataverse.permissionsFiles.viewRemoveDialog.header=File Access +dataverse.permissionsFiles.viewRemoveDialog.removeBtn=Remove Access +dataverse.permissionsFiles.viewRemoveDialog.removeBtn.confirmation=Are you sure you want to remove access to this file? Once access has been removed, the user or group will no longer be able to download this file. +dataverse.permissionsFiles.assignDialog.header=Grant File Access +dataverse.permissionsFiles.assignDialog.description=Grant file access to users and groups. +dataverse.permissionsFiles.assignDialog.userOrGroup=Users/Groups +dataverse.permissionsFiles.assignDialog.userOrGroup.enterName=Enter User/Group Name +dataverse.permissionsFiles.assignDialog.userOrGroup.invalidMsg=No matches found. +dataverse.permissionsFiles.assignDialog.userOrGroup.requiredMsg=Please select at least one user or group. +dataverse.permissionsFiles.assignDialog.fileName=File Name +dataverse.permissionsFiles.assignDialog.grantBtn=Grant +dataverse.permissionsFiles.assignDialog.rejectBtn=Reject + +# permissions-configure.xhtml +dataverse.permissions.accessDialog.header=Edit Access +dataverse.permissions.description=Current access configuration to your dataverse. +dataverse.permissions.tip=Select if all users or only certain users are able to add to this dataverse, by clicking the Edit Access button. +dataverse.permissions.Q1=Who can add to this dataverse? +dataverse.permissions.Q1.answer1=Anyone adding to this dataverse needs to be given access +dataverse.permissions.Q1.answer2=Anyone with a Dataverse account can add sub dataverses +dataverse.permissions.Q1.answer3=Anyone with a Dataverse account can add datasets +dataverse.permissions.Q1.answer4=Anyone with a Dataverse account can add sub dataverses and datasets +dataverse.permissions.Q2=When a user adds a new dataset to this dataverse, which role should be automatically assigned to them on that dataset? +dataverse.permissions.Q2.answer.editor.description=- Edit metadata, upload files, and edit files, edit Terms, Guestbook, Submit datasets for review +dataverse.permissions.Q2.answer.manager.description=- Edit metadata, upload files, and edit files, edit Terms, Guestbook, File Restrictions (Files Access + Use) +dataverse.permissions.Q2.answer.curator.description=- Edit metadata, upload files, and edit files, edit Terms, Guestbook, File Restrictions (Files Access + Use), Edit Permissions/Assign Roles + Publish + +# roles-assign.xhtml +dataverse.permissions.usersOrGroups.assignDialog.header=Assign Role +dataverse.permissions.usersOrGroups.assignDialog.description=Grant permissions to users and groups by assigning them a role. +dataverse.permissions.usersOrGroups.assignDialog.userOrGroup=Users/Groups +dataverse.permissions.usersOrGroups.assignDialog.userOrGroup.enterName=Enter User/Group Name +dataverse.permissions.usersOrGroups.assignDialog.userOrGroup.invalidMsg=No matches found. +dataverse.permissions.usersOrGroups.assignDialog.userOrGroup.requiredMsg=Please select at least one user or group. +dataverse.permissions.usersOrGroups.assignDialog.role.description=These are the permissions associated with the selected role. +dataverse.permissions.usersOrGroups.assignDialog.role.warning=Assigning the {0} role means the user(s) will also have the {0} role applied to all {1} within this {2}. +dataverse.permissions.usersOrGroups.assignDialog.role.requiredMsg=Please select a role to assign. + +# roles-edit.xhtml +dataverse.permissions.roles.header=Edit Role +dataverse.permissions.roles.name=Role Name +dataverse.permissions.roles.name.title=Enter a name for the role. +dataverse.permissions.roles.id=Identifier +dataverse.permissions.roles.id.title=Enter a name for the alias. +dataverse.permissions.roles.description.title=Describe the role (1000 characters max). +dataverse.permissions.roles.description.counter={0} characters remaining +dataverse.permissions.roles.roleList.header=Role Permissions +dataverse.permissions.roles.roleList.authorizedUserOnly=Permissions with the information icon indicate actions that can be performed by users not logged into Dataverse. + +# explicitGroup-new-dialog.xhtml +dataverse.permissions.explicitGroupEditDialog.title.new=Create Group +dataverse.permissions.explicitGroupEditDialog.title.edit=Edit Group {0} +dataverse.permissions.explicitGroupEditDialog.help=Add users or other groups to this group. +dataverse.permissions.explicitGroupEditDialog.groupIdentifier=Group Identifier +dataverse.permissions.explicitGroupEditDialog.groupIdentifier.tip=Short name used for the ID of this group. +dataverse.permissions.explicitGroupEditDialog.groupIdentifier.required=Group identifier cannot be empty +dataverse.permissions.explicitGroupEditDialog.groupIdentifier.invalid=Group identifier can contain only letters, digits, underscores (_) and dashes (-) +dataverse.permissions.explicitGroupEditDialog.groupIdentifier.helpText=Consists of letters, digits, underscores (_) and dashes (-) +dataverse.permissions.explicitGroupEditDialog.groupIdentifier.taken=Group identifier already used in this dataverse +dataverse.permissions.explicitGroupEditDialog.groupName=Group Name +dataverse.permissions.explicitGroupEditDialog.groupName.required=Group name cannot be empty +dataverse.permissions.explicitGroupEditDialog.groupDescription=Description +dataverse.permissions.explicitGroupEditDialog.roleAssigneeName=User/Group +dataverse.permissions.explicitGroupEditDialog.roleAssigneeNames=Users/Groups +dataverse.permissions.explicitGroupEditDialog.createGroup=Create Group + +# manage-templates.xhtml +dataset.manageTemplates.pageTitle=Manage Dataset Templates +dataset.manageTemplates.select.txt=Include Templates from {0} +dataset.manageTemplates.createBtn=Create Dataset Template +dataset.manageTemplates.saveNewTerms=Save Dataset Template +dataset.manageTemplates.noTemplates.why.header=Why Use Templates? +dataset.manageTemplates.noTemplates.why.reason1=Templates are useful when you have several datasets that have the same information in multiple metadata fields that you would prefer not to have to keep manually typing in. +dataset.manageTemplates.noTemplates.why.reason2=Templates can be used to input instructions for those uploading datasets into your dataverse if you have a specific way you want a metadata field to be filled out. +dataset.manageTemplates.noTemplates.how.header=How To Use Templates +dataset.manageTemplates.noTemplates.how.tip1=Templates are created at the dataverse level, can be deleted (so it does not show for future datasets), set to default (not required), and can be copied so you do not have to start over when creating a new template with similar metadata from another template. When a template is deleted, it does not impact the datasets that have used the template already. +dataset.manageTemplates.noTemplates.how.tip2=Please note that the ability to choose which metadata fields are hidden, required, or optional is done on the General Information page for this dataverse. +dataset.manageTemplates.noTemplates.getStarted=To get started, click on the Create Dataset Template button above. To learn more about templates, visit the Dataset Templates section of the User Guide. +dataset.manageTemplates.tab.header.templte=Template Name +dataset.manageTemplates.tab.header.date=Date Created +dataset.manageTemplates.tab.header.usage=Usage +dataset.manageTemplates.tab.header.action=Action +dataset.manageTemplates.tab.action.btn.makeDefault=Make Default +dataset.manageTemplates.tab.action.btn.default=Default +dataset.manageTemplates.tab.action.btn.view=View +dataset.manageTemplates.tab.action.btn.copy=Copy +dataset.manageTemplates.tab.action.btn.edit=Edit +dataset.manageTemplates.tab.action.btn.edit.metadata=Metadata +dataset.manageTemplates.tab.action.btn.edit.terms=Terms +dataset.manageTemplates.tab.action.btn.delete=Delete +dataset.manageTemplates.tab.action.btn.delete.dialog.tip=Are you sure you want to delete this template? A new dataset will not be able to use this template. +dataset.manageTemplates.tab.action.btn.delete.dialog.header=Delete Template +dataset.manageTemplates.tab.action.btn.view.dialog.header=Dataset Template Preview +dataset.manageTemplates.tab.action.btn.view.dialog.datasetTemplate=Dataset Template +dataset.manageTemplates.tab.action.btn.view.dialog.datasetTemplate.title=The dataset template which prepopulates info into the form automatically. +dataset.manageTemplates.tab.action.noedit.createdin=Template created at {0} +dataset.manageTemplates.delete.usedAsDefault=This template is the default template for the following dataverse(s). It will be removed as default as well. +dataset.manageTemplates.info.message.notEmptyTable=Create, clone, edit, view, or delete dataset templates. Create a dataset template to prefill metadata fields with standard values, such as author affiliation, to help users create datasets in this dataverse. You can also add help text directly into the metadata fields to give users more information on what to add to these metadata fields. + +# metadataFragment.xhtml + +# template.xhtml +dataset.template.name.tip=The name of the dataset template. +dataset.template.returnBtn=Return to Manage Templates +dataset.template.name.title=Enter a unique name for the template. +template.asterisk.tip=Asterisks indicate metadata fields that users will be required to fill out while adding a dataset to this dataverse. +dataset.template.popup.create.title=Create Template +dataset.template.popup.create.text=Do you want to add default Terms of Use and/or Access? +dataset.create.add.terms=Save + Add Terms + +# manage-groups.xhtml +dataverse.manageGroups.pageTitle=Manage Dataverse Groups +dataverse.manageGroups.createBtn=Create Group +dataverse.manageGroups.noGroups.why.header=Why Use Groups? +dataverse.manageGroups.noGroups.why.reason1=Groups allow you to assign roles and permissions for many users at once. +dataverse.manageGroups.noGroups.why.reason2=You can use groups to manage multiple different kinds of users (students, collaborators, etc.) +dataverse.manageGroups.noGroups.how.header=How To Use Groups +dataverse.manageGroups.noGroups.how.tip1=A group can contain both users and other groups. +dataverse.manageGroups.noGroups.how.tip2=You can assign permissions to a group in the "Permissions" view. +dataverse.manageGroups.noGroups.getStarted=To get started, click on the Create Group button above. +dataverse.manageGroups.tab.header.name=Group Name +dataverse.manageGroups.tab.header.id=Group ID +dataverse.manageGroups.tab.header.membership=Membership +dataverse.manageGroups.tab.header.action=Action +dataverse.manageGroups.tab.action.btn.view=View +dataverse.manageGroups.tab.action.btn.copy=Copy +dataverse.manageGroups.tab.action.btn.enable=Enable +dataverse.manageGroups.tab.action.btn.disable=Disable +dataverse.manageGroups.tab.action.btn.edit=Edit +dataverse.manageGroups.tab.action.btn.viewCollectedData=View Collected Data +dataverse.manageGroups.tab.action.btn.delete=Delete +dataverse.manageGroups.tab.action.btn.delete.dialog.header=Delete Group +dataverse.manageGroups.tab.action.btn.delete.dialog.tip=Are you sure you want to delete this group? You cannot undelete a group. +dataverse.manageGroups.tab.action.btn.view.dialog.header=Dataverse Group +dataverse.manageGroups.tab.action.btn.view.dialog.group=Group Name +dataverse.manageGroups.tab.action.btn.view.dialog.groupView.name=Member Name +dataverse.manageGroups.tab.action.btn.view.dialog.groupView.type=Member Type +dataverse.manageGroups.tab.action.btn.view.dialog.groupView.action=Action +dataverse.manageGroups.tab.action.btn.view.dialog.groupView.delete=Delete +dataverse.manageGroups.tab.action.btn.view.dialog.groupMembers=Group Members +dataverse.manageGroups.tab.action.btn.view.dialog.enterName=Enter User/Group Name +dataverse.manageGroups.tab.action.btn.view.dialog.invalidMsg=No matches found. + +# manage-guestbooks.xhtml +dataset.manageGuestbooks.pageTitle=Manage Dataset Guestbooks +dataset.manageGuestbooks.include=Include Guestbooks from {0} +dataset.manageGuestbooks.createBtn=Create Dataset Guestbook +dataset.manageGuestbooks.download.all.responses=Download All Responses +dataset.manageGuestbooks.download.responses=Download Responses +dataset.manageGuestbooks.noGuestbooks.why.header=Why Use Guestbooks? +dataset.manageGuestbooks.noGuestbooks.why.reason1=Guestbooks allow you to collect data about who is downloading the files from your datasets. You can decide to collect account information (username, given name & last name, affiliation, etc.) as well as create custom questions (e.g., What do you plan to use this data for?). +dataset.manageGuestbooks.noGuestbooks.why.reason2=You can download the data collected from the enabled guestbooks to be able to store it outside of Dataverse. +dataset.manageGuestbooks.noGuestbooks.how.header=How To Use Guestbooks +dataset.manageGuestbooks.noGuestbooks.how.tip1=A guestbook can be used for multiple datasets but only one guestbook can be used for a dataset. +dataset.manageGuestbooks.noGuestbooks.how.tip2=Custom questions can have free form text answers or have a user select an answer from several options. +dataset.manageGuestbooks.noGuestbooks.getStarted=To get started, click on the Create Dataset Guestbook button above. To learn more about Guestbooks, visit the Dataset Guestbook section of the User Guide. +dataset.manageGuestbooks.tab.header.name=Guestbook Name +dataset.manageGuestbooks.tab.header.date=Date Created +dataset.manageGuestbooks.tab.header.usage=Usage +dataset.manageGuestbooks.tab.header.responses=Responses +dataset.manageGuestbooks.tab.header.action=Action +dataset.manageGuestbooks.tab.action.btn.view=Preview +dataset.manageGuestbooks.tab.action.btn.copy=Copy +dataset.manageGuestbooks.tab.action.btn.enable=Enable +dataset.manageGuestbooks.tab.action.btn.disable=Disable +dataset.manageGuestbooks.tab.action.btn.edit=Edit +dataset.manageGuestbooks.tab.action.btn.preview=Preview +dataset.manageGuestbooks.tab.action.btn.viewCollectedData=View Responses +dataset.manageGuestbooks.tab.action.btn.delete=Delete +dataset.manageGuestbooks.tab.action.btn.delete.dialog.header=Delete Guestbook +dataset.manageGuestbooks.tab.action.btn.delete.dialog.tip=Are you sure you want to delete this guestbook? You cannot undelete a guestbook. +dataset.manageGuestbooks.tab.action.btn.view.dialog.header=Preview Guestbook +dataset.manageGuestbooks.tab.action.btn.view.dialog.datasetGuestbook.title=Upon downloading files the guestbook asks for the following information. +dataset.manageGuestbooks.tab.action.btn.view.dialog.datasetGuestbook=Guestbook Name +dataset.manageGuestbooks.tab.action.btn.viewCollectedData.dialog.header=Dataset Guestbook Collected Data +dataset.manageGuestbooks.tab.action.btn.view.dialog.userCollectedData.title=User data collected by the guestbook. +dataset.manageGuestbooks.tab.action.btn.view.dialog.userCollectedData=Collected Data +dataset.manageGuestbooks.tab.action.noedit.createdin=Guestbook created at {0} +dataset.manageGuestbooks.message.deleteSuccess=The guestbook has been deleted. +dataset.manageGuestbooks.message.deleteFailure=The guestbook cannot be deleted. +dataset.manageGuestbooks.message.editSuccess=The guestbook has been updated. +dataset.manageGuestbooks.message.editFailure=The guestbook could not be updated. +dataset.manageGuestbooks.message.enableSuccess=The guestbook has been enabled. +dataset.manageGuestbooks.message.enableFailure=The guestbook could not be enabled. +dataset.manageGuestbooks.message.disableSuccess=The guestbook has been disabled. +dataset.manageGuestbooks.message.disableFailure=The guestbook could not be disabled. +dataset.manageGuestbooks.tip.title=Manage Dataset Guestbooks +dataset.manageGuestbooks.tip.downloadascsv=Click \"Download All Responses\" to download all collected guestbook responses for this dataverse, as a CSV file. To navigate and analyze your collected responses, we recommend importing this CSV file into Excel, Google Sheets or similar software. +dataset.guestbooksResponses.dataset=Dataset +dataset.guestbooksResponses.date=Date +dataset.guestbooksResponses.type=Type +dataset.guestbooksResponses.file=File +dataset.guestbooksResponses.tip.title=Guestbook Responses +dataset.guestbooksResponses.count.responses={0} {0, choice, 0#Responses|1#Response|2#Responses} +dataset.guestbooksResponses.count.toofresults={0} to {1} of {2} {2, choice, 0#Responses|1#Response|2#Responses} +dataset.guestbooksResponses.tip.downloadascsv=Click \"Download Responses\" to download all collected responses for this guestbook, as a CSV file. To navigate and analyze your collected responses, we recommend importing this CSV file into Excel, Google Sheets or similar software. +dataset.guestbooksResponses.tooManyResponses.message=Note: this guestbook has too many responses to display on this page. Only the most recent {0} responses are shown below. Click \"Download Responses\" to download all collected responses ({1} total) as a CSV file. + +# guestbook-responses.xhtml +dataset.guestbookResponses.pageTitle=Guestbook Responses + +# guestbook.xhtml +dataset.manageGuestbooks.guestbook.name=Guestbook Name +dataset.manageGuestbooks.guestbook.name.tip=Enter a unique name for this Guestbook. +dataset.manageGuestbooks.guestbook.dataCollected=Data Collected +dataset.manageGuestbooks.guestbook.dataCollected.description=Dataverse account information that will be collected when a user downloads a file. Check the ones that will be required. +dataset.manageGuestbooks.guestbook.customQuestions=Custom Questions +dataset.manageGuestbooks.guestbook.accountInformation=Account Information +dataset.manageGuestbooks.guestbook.required=(Required) +dataset.manageGuestbooks.guestbook.optional=(Optional) +dataset.manageGuestbooks.guestbook.customQuestions.description=Create your own questions to have users provide more than their account information when they download a file. Questions can be required or optional and answers can be text or multiple choice. +dataset.manageGuestbooks.guestbook.customQuestions.questionType=Question Type +dataset.manageGuestbooks.guestbook.customQuestions.questionText=Question Text +dataset.manageGuestbooks.guestbook.customQuestions.responseOptions=Response Options +dataset.manageGuestbooks.guestbook.customQuestions.questionType.text=Text +dataset.manageGuestbooks.guestbook.customQuestions.questionType.multiple=Multiple Choice + +# guestbookResponseFragment.xhtml +dataset.guestbookResponse.guestbook.additionalQuestions=Additional Questions +dataset.guestbookResponse.guestbook.responseTooLong=Please limit response to 255 characters + +# dataset.xhtml +dataset.configureBtn=Configure +dataset.pageTitle=Add New Dataset +dataset.editBtn=Edit +dataset.editBtn.itemLabel.upload=Files (Upload) +dataset.editBtn.itemLabel.metadata=Metadata +dataset.editBtn.itemLabel.terms=Terms +dataset.editBtn.itemLabel.permissions=Permissions +dataset.editBtn.itemLabel.thumbnailsAndWidgets=Thumbnails + Widgets +dataset.editBtn.itemLabel.privateUrl=Private URL +dataset.editBtn.itemLabel.permissionsDataset=Dataset +dataset.editBtn.itemLabel.permissionsFile=Restricted Files +dataset.editBtn.itemLabel.deleteDataset=Delete Dataset +dataset.editBtn.itemLabel.deleteDraft=Delete Draft Version +dataset.editBtn.itemLabel.deaccession=Deaccession Dataset +dataset.exportBtn=Export Metadata +dataset.exportBtn.itemLabel.ddi=DDI +dataset.exportBtn.itemLabel.dublinCore=Dublin Core +dataset.exportBtn.itemLabel.schemaDotOrg=Schema.org JSON-LD +dataset.exportBtn.itemLabel.json=JSON +metrics.title=Metrics +metrics.title.tip=View more metrics information +metrics.comingsoon=Coming soon... +metrics.views=Views +metrics.downloads={0, choice, 0#Downloads|1#Download|2#Downloads} +metrics.citations=Citations +metrics.shares=Shares +dataset.publish.btn=Publish +dataset.publish.header=Publish Dataset +dataset.rejectBtn=Return to Author +dataset.submitBtn=Submit for Review +dataset.disabledSubmittedBtn=Submitted for Review +dataset.submitMessage=You will not be able to make changes to this dataset while it is in review. +dataset.submit.success=Your dataset has been submitted for review. +dataset.inreview.infoMessage=\u2013 This dataset is currently under review prior to publication. +dataset.submit.failure=Dataset Submission Failed - {0} +dataset.submit.failure.null=Can't submit for review. Dataset is null. +dataset.submit.failure.isReleased=Latest version of dataset is already released. Only draft versions can be submitted for review. +dataset.submit.failure.inReview=You cannot submit this dataset for review because it is already in review. +dataset.rejectMessage=Return this dataset to contributor for modification. +dataset.rejectWatermark=Please enter a reason for returning this dataset to its author(s). +dataset.reject.enterReason=Reason for return to author is required +dataset.reject.enterReason.header=Required entry +dataset.reject.success=This dataset has been sent back to the contributor. +dataset.reject.failure=Dataset Submission Return Failed - {0} +dataset.reject.datasetNull=Cannot return the dataset to the author(s) because it is null. +dataset.reject.datasetNotInReview=This dataset cannot be return to the author(s) because the latest version is not In Review. The author(s) needs to click Submit for Review first. +dataset.publish.tip=Are you sure you want to publish this dataset? Once you do so it must remain published. +dataset.publishBoth.tip=Once you publish this dataset it must remain published. +dataset.unregistered.tip= This dataset is unregistered. We will attempt to register it before publishing. +dataset.republish.tip=Are you sure you want to republish this dataset? +dataset.selectVersionNumber=Select if this is a minor or major version update. +dataset.majorRelease=Major Release +dataset.minorRelease=Minor Release +dataset.majorRelease.tip=Due to the nature of changes to the current draft this will be a major release ({0}) +dataset.mayNotBePublished=Cannot publish dataset. +dataset.mayNotPublish.administrator= This dataset cannot be published until {0} is published by its administrator. +dataset.mayNotPublish.both= This dataset cannot be published until {0} is published. Would you like to publish both right now? +dataset.mayNotPublish.twoGenerations= This dataset cannot be published until {0} and {1} are published. +dataset.mayNotBePublished.both.button=Yes, Publish Both +dataset.viewVersion.unpublished=View Unpublished Version +dataset.viewVersion.published=View Published Version +dataset.email.datasetContactBtn=Email Dataset Contact +dataset.email.hiddenMessage= +dataset.email.messageSubject=Test Message Subject +dataset.email.datasetLinkBtn.tip=Link Dataset to Your Dataverse +dataset.share.datasetShare=Share Dataset +dataset.share.datasetShare.tip=Share this dataset on your favorite social media networks. +dataset.share.datasetShare.shareText=View this dataset. +dataset.locked.message=Dataset Locked +dataset.locked.inReview.message=Submitted for Review +dataset.publish.error=This dataset may not be published because the {0} Service is currently inaccessible. Please try again. Does the issue continue to persist? +dataset.publish.error.doi=This dataset may not be published because the DOI update failed. +dataset.delete.error=Could not deaccession the dataset because the {0} update failed. +dataset.publish.worldMap.deleteConfirm=Please note that your data and map on WorldMap will be removed due to restricted file access changes in this dataset version which you are publishing. Do you want to continue? +dataset.publish.workflow.inprogress=Publish workflow in progress +dataset.versionUI.draft=Draft +dataset.versionUI.inReview=In Review +dataset.versionUI.unpublished=Unpublished +dataset.versionUI.deaccessioned=Deaccessioned +dataset.cite.title.released=DRAFT VERSION will be replaced in the citation with V1 once the dataset has been published. +dataset.cite.title.draft=DRAFT VERSION will be replaced in the citation with the selected version once the dataset has been published. +dataset.cite.title.deassessioned=DEACCESSIONED VERSION has been added to the citation for this version since it is no longer available. +dataset.cite.standards.tip=Learn about Data Citation Standards. +dataset.cite.downloadBtn=Cite Dataset +dataset.cite.downloadBtn.xml=EndNote XML +dataset.cite.downloadBtn.ris=RIS +dataset.cite.downloadBtn.bib=BibTeX +dataset.create.authenticatedUsersOnly=Only authenticated users can create datasets. +dataset.deaccession.reason=Deaccession Reason +dataset.beAccessedAt=The dataset can now be accessed at: +dataset.descriptionDisplay.title=Description +dataset.keywordDisplay.title=Keyword +dataset.subjectDisplay.title=Subject +dataset.contact.tip=Use email button above to contact. +dataset.asterisk.tip=Asterisks indicate required fields +dataset.message.uploadFiles=Upload Dataset Files - You can drag and drop files from your desktop, directly into the upload widget. +dataset.message.editMetadata=Edit Dataset Metadata - Add more metadata about this dataset to help others easily find it. +dataset.message.editTerms=Edit Dataset Terms - Update this dataset's terms of use. +dataset.message.locked.editNotAllowedInReview=Dataset cannot be edited due to In Review dataset lock. +dataset.message.locked.downloadNotAllowedInReview=Dataset file(s) may not be downloaded due to In Review dataset lock. +dataset.message.locked.downloadNotAllowed=Dataset file(s) may not be downloaded due to dataset lock. +dataset.message.locked.editNotAllowed=Dataset cannot be edited due to dataset lock. +dataset.message.createSuccess=This dataset has been created. +dataset.message.linkSuccess= {0} has been successfully linked to {1}. +dataset.message.metadataSuccess=The metadata for this dataset has been updated. +dataset.message.termsSuccess=The terms for this dataset has been updated. +dataset.message.filesSuccess=The files for this dataset have been updated. +dataset.message.publishSuccess=This dataset has been published. +dataset.message.only.authenticatedUsers=Only authenticated users may release Datasets. +dataset.message.deleteSuccess=This dataset has been deleted. +dataset.message.bulkFileUpdateSuccess=The selected files have been updated. +dataset.message.bulkFileDeleteSuccess=The selected files have been deleted. +datasetVersion.message.deleteSuccess=This dataset draft has been deleted. +datasetVersion.message.deaccessionSuccess=The selected version(s) have been deaccessioned. +dataset.message.deaccessionSuccess=This dataset has been deaccessioned. +dataset.message.validationError=Validation Error - Required fields were missed or there was a validation error. Please scroll down to see details. +dataset.message.publishFailure=The dataset could not be published. +dataset.message.metadataFailure=The metadata could not be updated. +dataset.message.filesFailure=The files could not be updated. +dataset.message.bulkFileDeleteFailure=The selected files could not be deleted. +dataset.message.files.ingestFailure=The file(s) could not be ingested. +dataset.message.deleteFailure=This dataset draft could not be deleted. +dataset.message.deaccessionFailure=This dataset could not be deaccessioned. +dataset.message.createFailure=The dataset could not be created. +dataset.message.termsFailure=The dataset terms could not be updated. +dataset.message.publicInstall=File Access - Files are stored on a publicly accessible storage server. +dataset.metadata.publicationDate=Publication Date +dataset.metadata.publicationDate.tip=The publication date of a dataset. +dataset.metadata.persistentId=Dataset Persistent ID +dataset.metadata.persistentId.tip=The unique persistent identifier for a Dataset, which can be a Handle or DOI in Dataverse. +dataset.versionDifferences.termsOfUseAccess=Terms of Use and Access +dataset.versionDifferences.termsOfUseAccessChanged=Terms of Use/Access Changed +file.viewDiffDialog.restricted=Restricted +dataset.template.tip=Changing the template will clear any fields you may have entered data into. +dataset.noTemplate.label=None +dataset.noSelectedFiles.header=Select File(s) +dataset.noSelectedFilesForDownload=Please select a file or files to be downloaded. +dataset.noSelectedFilesForRequestAccess=Please select a file or files for access request. +dataset.noSelectedFilesForDelete=Please select a file or files to be deleted. +dataset.noSelectedFilesForMetadataEdit=Please select a file or files to be edited. +dataset.noSelectedFilesForRestrict=Please select unrestricted file(s) to be restricted. +dataset.noSelectedFilesForUnRestrict=Please select restricted file(s) to be unrestricted. +dataset.inValidSelectedFilesForDownload=Restricted Files Selected +dataset.noValidSelectedFilesForDownload=The restricted file(s) selected may not be downloaded because you have not been granted access. +dataset.mixedSelectedFilesForDownload=The restricted file(s) selected may not be downloaded because you have not been granted access. +dataset.downloadUnrestricted=Click Continue to download the files you have access to download. +dataset.requestAccessToRestrictedFiles=You may request access to the restricted file(s) by clicking the Request Access button. +dataset.privateurl.infoMessageAuthor=Unpublished Dataset Private URL - Privately share this dataset before it is published: {0} +dataset.privateurl.infoMessageReviewer=Unpublished Dataset Private URL - This unpublished dataset is being privately shared. You will not be able to access it when logged into your Dataverse account. +dataset.privateurl.header=Unpublished Dataset Private URL +dataset.privateurl.tip=Use a Private URL to allow those without Dataverse accounts to access your unpublished dataset. For more information about the Private URL feature, please refer to the User Guide. +dataset.privateurl.absent=Private URL has not been created. +dataset.privateurl.createPrivateUrl=Create Private URL +dataset.privateurl.disablePrivateUrl=Disable Private URL +dataset.privateurl.disablePrivateUrlConfirm=Yes, Disable Private URL +dataset.privateurl.disableConfirmationText=Are you sure you want to disable the Private URL? If you have shared the Private URL with others they will no longer be able to use it to access your unpublished dataset. +dataset.privateurl.cannotCreate=Private URL can only be used with unpublished versions of datasets. +dataset.privateurl.roleassigeeTitle=Private URL Enabled +dataset.privateurl.createdSuccess=Success! +dataset.privateurl.disabledSuccess=You have successfully disabled the Private URL for this unpublished dataset. +dataset.privateurl.noPermToCreate=To create a Private URL you must have the following permissions: {0}. +file.count={0} {0, choice, 0#Files|1#File|2#Files} +file.count.selected={0} {0, choice, 0#Files Selected|1#File Selected|2#Files Selected} +file.selectToAddBtn=Select Files to Add +file.selectToAdd.tipLimit=File upload limit is {0} bytes per file. +file.selectToAdd.tipMoreInformation=For more information about supported file formats, please refer to the User Guide. +file.selectToAdd.dragdropMsg=Drag and drop files here. +file.createUploadDisabled=Once you have saved your dataset, you can upload your data using the "Upload Files" button on the dataset page. For more information about supported file formats, please refer to the User Guide. +file.fromDropbox=Upload from Dropbox +file.fromDropbox.tip=Files can also be uploaded directly from Dropbox. +file.replace.original=Original File +file.editFiles=Edit Files +file.bulkUpdate=Bulk Update +file.uploadFiles=Upload Files +file.replaceFile=Replace File +file.notFound.tip=There are no files in this dataset. +file.noSelectedFiles.tip=There are no selected files to display. +file.noUploadedFiles.tip=Files you upload will appear here. +file.replace=Replace +file.replaced.warning.header=Edit File +file.replaced.warning.draft.warningMessage=You can not replace a file that has been replaced in a dataset draft. In order to replace it with a different file you must delete the dataset draft. Note that doing so will discard any other changes within this draft. +file.replaced.warning.previous.warningMessage=You can not edit a file that has been replaced in a previous dataset version. In order to edit it you must go to the most recently published version of the file. +file.alreadyDeleted.previous.warningMessage=This file has already been deleted in current version. It may not be edited. +file.delete=Delete +file.metadata=Metadata +file.deleted.success=Files "{0}" will be permanently deleted from this version of this dataset once you click on the Save Changes button. +file.deleted.replacement.success=The replacement file has been deleted. +file.editAccess=Edit Access +file.restrict=Restrict +file.unrestrict=Unrestrict +file.restricted.success=Files "{0}" will be restricted once you click on the Save Changes button. +file.download.header=Download +file.download.subset.header=Download Data Subset +file.preview=Preview: +file.previewMap=Preview Map:o +file.fileName=File Name +file.type.tabularData=Tabular Data +file.originalChecksumType=Original File {0} +file.checksum.exists.tip=A file with this checksum already exists in the dataset. +file.selectedThumbnail=Thumbnail +file.selectedThumbnail.tip=The thumbnail for this file is used as the default thumbnail for the dataset. Click 'Advanced Options' button of another file to select that file. +file.cloudStorageAccess=Cloud Storage Access +file.cloudStorageAccess.tip=The container name for this dataset needed to access files in cloud storage. +file.cloudStorageAccess.help=To directly access this data in the {2} cloud environment, use the container name in the Cloud Storage Access box below. To learn more about the cloud environment, visit the Cloud Storage Access section of the User Guide. +file.copy=Copy +file.compute=Compute +file.rsyncUpload.info=Follow these steps to upload your data. To learn more about the upload process and how to prepare your data, please refer to the User Guide. +file.rsyncUpload.noScriptAvailable=Rsync script not available! +file.rsyncUpload.filesExist=You can not upload additional files to this dataset. +file.rsyncUpload.step1=Make sure your data is stored under a single directory. All files within this directory and its subdirectories will be uploaded to your dataset. +file.rsyncUpload.step2=Download this file upload script: +file.rsyncUpload.step2.downloadScriptButton=Download Script +file.rsyncUpload.step3=Open a terminal window in the same directory you saved the script and run this command: bash ./{0} +file.rsyncUpload.step4=Follow the instructions in the script. It will ask for a full path (beginning with "/") to the directory containing your data. Note: this script will expire after 7 days. +file.rsyncUpload.inProgressMessage.summary=DCM File Upload +file.rsyncUpload.inProgressMessage.details=This dataset is locked until the data files have been transferred and verified. + +file.metaData.dataFile.dataTab.variables=Variables +file.metaData.dataFile.dataTab.observations=Observations +file.metaData.viewOnWorldMap=Explore on WorldMap +file.addDescription=Add file description... +file.tags=Tags +file.editTags=Edit Tags +file.editTagsDialog.tip=Select existing file tags or create new tags to describe your files. Each file can have more than one tag. +file.editTagsDialog.select=File Tags +file.editTagsDialog.selectedTags=Selected Tags +file.editTagsDialog.selectedTags.none=No tags selected +file.editTagsDialog.add=Custom File Tag +file.editTagsDialog.add.tip=Creating a new tag will add it as a tag option for all files in this dataset. +file.editTagsDialog.newName=Add new file tag... +dataset.removeUnusedFileTags.label=Delete Tags +dataset.removeUnusedFileTags.tip=Select to delete Custom File Tags not used by the files in the dataset. +dataset.removeUnusedFileTags.check=Delete tags not being used +file.setThumbnail=Set Thumbnail +file.setThumbnail.header=Set Dataset Thumbnail +file.datasetThumbnail=Dataset Thumbnail +file.datasetThumbnail.tip=Select to use this image as the thumbnail image that is displayed in the search results for this dataset. +file.setThumbnail.confirmation=Are you sure you want to set this image as your dataset thumbnail? There is already an image uploaded to be the thumbnail and this action will remove it. +file.useThisIamge=Use this image as the dataset thumbnail image +file.advancedOptions=Advanced Options +file.advancedIngestOptions=Advanced Ingest Options +file.assignedDataverseImage.success={0} has been saved as the thumbnail for this dataset. +file.assignedTabFileTags.success=The tag(s) were successfully added for {0}. +file.tabularDataTags=Tabular Data Tags +file.tabularDataTags.tip=Select a tag to describe the type(s) of data this is (survey, time series, geospatial, etc). +file.spss-savEncoding=Language Encoding +file.spss-savEncoding.title=Select the language used for encoding this SPSS (sav) Data file. +file.spss-savEncoding.current=Current Selection: +file.spss-porExtraLabels=Variable Labels +file.spss-porExtraLabels.title=Upload an additional text file with extra variable labels. +file.spss-porExtraLabels.selectToAddBtn=Select File to Add +file.ingestFailed.header=Upload Completed with Errors +file.ingestFailed.message=Tabular data ingest failed. +file.map=Map +file.mapData=Map Data +file.mapData.worldMap=WorldMap +file.mapData.unpublished.header=Data Not Published +file.mapData.unpublished.message=In order to map your data with WorldMap, your data must be published. Please publish this dataset, then retry the Map Data feature. +file.downloadBtn.format.all=All File Formats + Information +file.downloadBtn.format.tab=Tab-Delimited +file.downloadBtn.format.original=Original File Format ({0}) +file.downloadBtn.format.rdata=RData Format +file.downloadBtn.format.var=Variable Metadata +file.downloadBtn.format.citation=Data File Citation +file.more.information.link=Link to more file information for +file.requestAccess=Request Access +file.requestAccess.dialog.msg=You need to Log In to request access to this file. +file.requestAccess.dialog.msg.signup=You need to Sign Up or Log In to request access to this file. +file.accessRequested=Access Requested +file.restrictions=File Restrictions +file.restrictions.description=Limit access to published files by marking them as restricted. Provide users Terms of Access and allow them to request access. +file.restrictions.worldmap.warning=Please note, once your file access changes are published your map on WorldMap will be deleted and the Explore on WorldMap feature will be removed. +file.ingestInProgress=Ingest in progress... +file.dataFilesTab.metadata.header=Metadata +file.dataFilesTab.metadata.addBtn=Add + Edit Metadata +file.dataFilesTab.terms.header=Terms +file.dataFilesTab.terms.editTermsBtn=Edit Terms Requirements +file.dataFilesTab.terms.list.termsOfUse.header=Terms of Use +file.dataFilesTab.terms.list.termsOfUse.waiver=Waiver +file.dataFilesTab.terms.list.termsOfUse.waiver.title=The waiver informs data downloaders how they can use this dataset. +file.dataFilesTab.terms.list.termsOfUse.waiver.txt=CC0 - "Public Domain Dedication" +file.dataFilesTab.terms.list.termsOfUse.waiver.description=Datasets will default to a CC0 public domain dedication . CC0 facilitates reuse and extensibility of research data. Our Community Norms as well as good scientific practices expect that proper credit is given via citation. If you are unable to give datasets a CC0 waiver you may enter custom Terms of Use for datasets. +file.dataFilesTab.terms.list.termsOfUse.no.waiver.txt=No waiver has been selected for this dataset. +file.dataFilesTab.terms.list.termsOfUse.waiver.txt.description=Our Community Norms as well as good scientific practices expect that proper credit is given via citation. Please use the data citation above, generated by the Dataverse. +file.dataFilesTab.terms.list.termsOfUse.waiver.select.CCO=Yes, apply CC0 - "Public Domain Dedication" +file.dataFilesTab.terms.list.termsOfUse.waiver.select.notCCO=No, do not apply CC0 - "Public Domain Dedication" +file.dataFilesTab.terms.list.termsOfUse.waiver.select.tip=This is what end users will see displayed on this dataset +file.dataFilesTab.terms.list.termsOfUse.termsOfUse=Terms of Use +file.dataFilesTab.terms.list.termsOfUse.termsOfUse.title=Outlines how this data can be used once downloaded. +file.dataFilesTab.terms.list.termsOfUse.termsOfUse.description=If you are unable to use CC0 for datasets you are able to set custom terms of use. Here is an example of a Data Usage Agreement for datasets that have de-identified human subject data. +file.dataFilesTab.terms.list.termsOfUse.addInfo=Additional Information +file.dataFilesTab.terms.list.termsOfUse.addInfo.declaration=Confidentiality Declaration +file.dataFilesTab.terms.list.termsOfUse.addInfo.declaration.title=Indicates whether signing of a confidentiality declaration is needed to access a resource. +file.dataFilesTab.terms.list.termsOfUse.addInfo.permissions=Special Permissions +file.dataFilesTab.terms.list.termsOfUse.addInfo.permissions.title=Determine if any special permissions are required to access a resource (e.g., if form is a needed and where to access the form). +file.dataFilesTab.terms.list.termsOfUse.addInfo.restrictions=Restrictions +file.dataFilesTab.terms.list.termsOfUse.addInfo.restrictions.title=Any restrictions on access to or use of the collection, such as privacy certification or distribution restrictions, should be indicated here. These can be restrictions applied by the author, producer, or disseminator of the data collection. If the data are restricted to only a certain class of user, specify which type. +file.dataFilesTab.terms.list.termsOfUse.addInfo.citationRequirements=Citation Requirements +file.dataFilesTab.terms.list.termsOfUse.addInfo.citationRequirements.title=Include special/explicit citation requirements for data to be cited properly in articles or other publications that are based on analysis of the data. For standard data citation requirements refer to our Community Norms. +file.dataFilesTab.terms.list.termsOfUse.addInfo.depositorRequirements=Depositor Requirements +file.dataFilesTab.terms.list.termsOfUse.addInfo.depositorRequirements.title=Information regarding user responsibility for informing Dataset Depositors, Authors or Curators of their use of data through providing citations to the published work or providing copies of the manuscripts. +file.dataFilesTab.terms.list.termsOfUse.addInfo.conditions=Conditions +file.dataFilesTab.terms.list.termsOfUse.addInfo.conditions.title=Any additional information that will assist the user in understanding the access and use conditions of the Dataset. +file.dataFilesTab.terms.list.termsOfUse.addInfo.disclaimer=Disclaimer +file.dataFilesTab.terms.list.termsOfUse.addInfo.disclaimer.title=Information regarding responsibility for uses of the Dataset. +file.dataFilesTab.terms.list.termsOfAccess.header=Restricted Files + Terms of Access +file.dataFilesTab.terms.list.termsOfAccess.restrictedFiles=Restricted Files +file.dataFilesTab.terms.list.termsOfAccess.restrictedFiles.title=The number of restricted files in this dataset. +file.dataFilesTab.terms.list.termsOfAccess.restrictedFiles.txt=There {0, choice, 0#are|1#is|2#are} {0} restricted {0, choice, 0#files|1#file|2#files} in this dataset. +file.dataFilesTab.terms.list.termsOfAccess.termsOfsAccess=Terms of Access +file.dataFilesTab.terms.list.termsOfAccess.termsOfsAccess.title=Information on how and if users can gain access to the restricted files in this dataset. +file.dataFilesTab.terms.list.termsOfAccess.requestAccess=Request Access +file.dataFilesTab.terms.list.termsOfAccess.requestAccess.title=If checked, users can request access to the restricted files in this dataset. +file.dataFilesTab.terms.list.termsOfAccess.requestAccess.request=Users may request access to files. +file.dataFilesTab.terms.list.termsOfAccess.requestAccess.notRequest=Users may not request access to files. +file.dataFilesTab.terms.list.termsOfAccess.requestAccess.enableBtn=Enable access request +file.dataFilesTab.terms.list.termsOfAccess.addInfo.dataAccessPlace=Data Access Place +file.dataFilesTab.terms.list.termsOfAccess.addInfo.dataAccessPlace.title=If the data is not only in Dataverse, list the location(s) where the data are currently stored. +file.dataFilesTab.terms.list.termsOfAccess.addInfo.originalArchive=Original Archive +file.dataFilesTab.terms.list.termsOfAccess.addInfo.originalArchive.title=Archive from which the data was obtained. +file.dataFilesTab.terms.list.termsOfAccess.addInfo.availabilityStatus=Availability Status +file.dataFilesTab.terms.list.termsOfAccess.addInfo.availabilityStatus.title=Statement of Dataset availability. A depositor may need to indicate that a Dataset is unavailable because it is embargoed for a period of time, because it has been superseded, because a new edition is imminent, etc. +file.dataFilesTab.terms.list.termsOfAccess.addInfo.contactForAccess=Contact for Access +file.dataFilesTab.terms.list.termsOfAccess.addInfo.contactForAccess.title=If different from the Dataset Contact, this is the Contact person or organization (include email or full address, and telephone number if available) that controls access to a collection. +file.dataFilesTab.terms.list.termsOfAccess.addInfo.sizeOfCollection=Size of Collection +file.dataFilesTab.terms.list.termsOfAccess.addInfo.sizeOfCollection.tip=Summary of the number of physical files that exist in a Dataset, recording the number of files that contain data and noting whether the collection contains machine readable documentation and/or other supplementary files and information, such as code, data dictionaries, data definition statements, or data collection instruments. +file.dataFilesTab.terms.list.termsOfAccess.addInfo.studyCompletion=Study Completion +file.dataFilesTab.terms.list.termsOfAccess.addInfo.studyCompletion.title=Relationship of the data collected to the amount of data coded and stored in the Dataset. Information as to why certain items of collected information were not included in the dataset or a specific data file should be provided. +file.dataFilesTab.terms.list.guestbook=Guestbook +file.dataFilesTab.terms.list.guestbook.title=User information (i.e., name, email, institution, and position) will be collected when files are downloaded. +file.dataFilesTab.terms.list.guestbook.noSelected.tip=No guestbook is assigned to this dataset, you will not be prompted to provide any information on file download. +file.dataFilesTab.terms.list.guestbook.noSelected.admin.tip=There are no guestbooks available in {0} to assign to this dataset. +file.dataFilesTab.terms.list.guestbook.inUse.tip=The following guestbook will prompt a user to provide additional information when downloading a file. +file.dataFilesTab.terms.list.guestbook.viewBtn=Preview Guestbook +file.dataFilesTab.terms.list.guestbook.select.tip=Select a guestbook to have a user provide additional information when downloading a file. +file.dataFilesTab.terms.list.guestbook.noAvailable.tip=There are no guestbooks enabled in {0}. To create a guestbook, return to {0}, click the "Edit" button and select the "Dataset Guestbooks" option. +file.dataFilesTab.terms.list.guestbook.clearBtn=Clear Selection + +file.dataFilesTab.dataAccess=Data Access +file.dataFilesTab.dataAccess.info=This data file can be accessed through a terminal window, using the commands below. For more information about downloading and verifying data, see our User Guide. +file.dataFilesTab.dataAccess.info.draft=Data files can not be accessed until the dataset draft has been published. For more information about downloading and verifying data, see our User Guide. +file.dataFilesTab.dataAccess.local.label=Local Access +file.dataFilesTab.dataAccess.download.label=Download Access +file.dataFilesTab.dataAccess.verify.label=Verify Data +file.dataFilesTab.dataAccess.local.tooltip=If this data is locally available to you, this is its file path. +file.dataFilesTab.dataAccess.download.tooltip=Download this data from your preferred mirror by running this command. +file.dataFilesTab.dataAccess.verify.tooltip=This command runs a checksum to verify the integrity of the data you have downloaded. + +file.dataFilesTab.versions=Versions +file.dataFilesTab.versions.headers.dataset=Dataset +file.dataFilesTab.versions.headers.summary=Summary +file.dataFilesTab.versions.headers.contributors=Contributors +file.dataFilesTab.versions.headers.published=Published +file.dataFilesTab.versions.viewDiffBtn=View Differences +file.dataFilesTab.versions.citationMetadata=Citation Metadata: +file.dataFilesTab.versions.added=Added +file.dataFilesTab.versions.removed=Removed +file.dataFilesTab.versions.changed=Changed +file.dataFilesTab.versions.replaced=Replaced +file.dataFilesTab.versions.original=Original +file.dataFilesTab.versions.replacment=Replacement +file.dataFilesTab.versions.additionalCitationMetadata=Additional Citation Metadata: +file.dataFilesTab.versions.description.draft=This is a draft version. +file.dataFilesTab.versions.description.deaccessioned=Due to the previous version being deaccessioned, there are no difference notes available for this published version. +file.dataFilesTab.versions.description.firstPublished=This is the first published version. +file.dataFilesTab.versions.description.deaccessionedReason=Deaccessioned Reason: +file.dataFilesTab.versions.description.beAccessedAt=The dataset can now be accessed at: +file.dataFilesTab.versions.viewDetails.btn=View Details +file.dataFilesTab.versions.widget.viewMoreInfo=To view more information about the versions of this dataset, and to edit it if this is your dataset, please visit the full version of this dataset at the {2}. +file.deleteDialog.tip=Are you sure you want to delete this dataset? You cannot undelete this dataset. +file.deleteDialog.header=Delete Dataset +file.deleteDraftDialog.tip=Are you sure you want to delete this draft version? You cannot undelete this draft. +file.deleteDraftDialog.header=Delete Draft Version +file.deleteFileDialog.tip=The file(s) will be deleted after you click on the Save Changes button on the bottom of this page. +file.deleteFileDialog.immediate=The file will be deleted after you click on the Delete button. +file.deleteFileDialog.multiple.immediate=The file(s) will be deleted after you click on the Delete button. +file.deleteFileDialog.header=Delete Files +file.deleteFileDialog.failed.tip=Files will not be removed from previously published versions of the dataset. +file.deaccessionDialog.tip=Once you deaccession this dataset it will no longer be viewable by the public. +file.deaccessionDialog.version=Version +file.deaccessionDialog.reason.question1=Which version(s) do you want to deaccession? +file.deaccessionDialog.reason.question2=What is the reason for deaccession? +file.deaccessionDialog.reason.selectItem.identifiable=There is identifiable data in one or more files +file.deaccessionDialog.reason.selectItem.beRetracted=The research article has been retracted +file.deaccessionDialog.reason.selectItem.beTransferred=The dataset has been transferred to another repository +file.deaccessionDialog.reason.selectItem.IRB=IRB request +file.deaccessionDialog.reason.selectItem.legalIssue=Legal issue or Data Usage Agreement +file.deaccessionDialog.reason.selectItem.notValid=Not a valid dataset +file.deaccessionDialog.reason.selectItem.other=Other (Please type reason in space provided below) +file.deaccessionDialog.enterInfo=Please enter additional information about the reason for deaccession. +file.deaccessionDialog.leaveURL=If applicable, please leave a URL where this dataset can be accessed after deaccessioning. +file.deaccessionDialog.leaveURL.watermark=Optional dataset site, http://... +file.deaccessionDialog.deaccession.tip=Are you sure you want to deaccession? The selected version(s) will no longer be viewable by the public. +file.deaccessionDialog.deaccessionDataset.tip=Are you sure you want to deaccession this dataset? It will no longer be viewable by the public. +file.deaccessionDialog.dialog.selectVersion.tip=Please select version(s) for deaccessioning. +file.deaccessionDialog.dialog.selectVersion.header=Please Select Version(s) +file.deaccessionDialog.dialog.reason.tip=Please select reason for deaccessioning. +file.deaccessionDialog.dialog.reason.header=Please Select Reason +file.deaccessionDialog.dialog.url.tip=Please enter valid forwarding URL. +file.deaccessionDialog.dialog.url.header=Invalid URL +file.deaccessionDialog.dialog.textForReason.tip=Please enter text for reason for deaccessioning. +file.deaccessionDialog.dialog.textForReason.header=Enter additional information +file.deaccessionDialog.dialog.limitChar.tip=Text for reason for deaccessioning may be no longer than 1000 characters. +file.deaccessionDialog.dialog.limitChar.header=Limit 1000 characters +file.viewDiffDialog.header=Version Differences Details +file.viewDiffDialog.dialog.warning=Please select two versions to view the differences. +file.viewDiffDialog.version=Version +file.viewDiffDialog.lastUpdated=Last Updated +file.viewDiffDialog.fileID=File ID +file.viewDiffDialog.fileName=Name +file.viewDiffDialog.fileType=Type +file.viewDiffDialog.fileSize=Size +file.viewDiffDialog.category=Tag(s) +file.viewDiffDialog.description=Description +file.viewDiffDialog.fileReplaced=File Replaced +file.viewDiffDialog.filesReplaced=File(s) Replaced +file.viewDiffDialog.files.header=Files +file.viewDiffDialog.msg.draftFound= This is the "DRAFT" version. +file.viewDiffDialog.msg.draftNotFound=The "DRAFT" version was not found. +file.viewDiffDialog.msg.versionFound= This is version "{0}". +file.viewDiffDialog.msg.versionNotFound=Version "{0}" was not found. +file.metadataTip=Metadata Tip: After adding the dataset, click the Edit Dataset button to add more metadata. +file.addBtn=Save Dataset +file.dataset.allFiles=All Files from this Dataset +file.downloadDialog.header=Dataset Terms +file.downloadDialog.tip=Please confirm and/or complete the information needed below in order to continue. +file.requestAccessTermsDialog.tip=Please confirm and/or complete the information needed below in order to request access to files in this dataset. +file.search.placeholder=Search this dataset... +file.results.btn.sort=Sort +file.results.btn.sort.option.nameAZ=Name (A-Z) +file.results.btn.sort.option.nameZA=Name (Z-A) +file.results.btn.sort.option.newest=Newest +file.results.btn.sort.option.oldest=Oldest +file.results.btn.sort.option.size=Size +file.results.btn.sort.option.type=Type +file.compute.fileRestricted=File Restricted +file.compute.fileAccessDenied=You cannot compute on this restricted file because you do not have permission to access it. +file.configure.Button=Configure +file.configure.launchMessage.details=Please refresh this page once you have finished configuring your +dataset.compute.datasetCompute=Dataset Compute Not Supported +dataset.compute.datasetAccessDenied=You cannot compute on this dataset because you do not have permission to access all of the restricted files. +dataset.compute.datasetComputeDisabled=You cannot compute on this dataset because this functionality is not enabled yet. Please click on a file to access computing features. + +# dataset-widgets.xhtml +dataset.widgets.title=Dataset Thumbnail + Widgets +dataset.widgets.notPublished.why.header=Why Use Widgets? +dataset.widgets.notPublished.why.reason1=Increases the web visibility of your data by allowing you to embed your dataverse and datasets into your personal or project website. +dataset.widgets.notPublished.why.reason2=Allows others to browse your dataverse and datasets without leaving your personal or project website. +dataset.widgets.notPublished.how.header=How To Use Widgets +dataset.widgets.notPublished.how.tip1=To use widgets, your dataverse and datasets need to be published. +dataset.widgets.notPublished.how.tip2=After publishing, code will be available on this page for you to copy and add to your personal or project website. +dataset.widgets.notPublished.how.tip3=Do you have an OpenScholar website? If so, learn more about adding the Dataverse widgets to your website here. +dataset.widgets.notPublished.getStarted=To get started, publish your dataset. To learn more about Widgets, visit the Widgets section of the User Guide. +dataset.widgets.editAdvanced=Edit Advanced Options +dataset.widgets.editAdvanced.tip=Advanced Options – Additional options for configuring your widget on your personal or project website. +dataset.widgets.tip=Copy and paste this code into the HTML on your site. To learn more about Widgets, visit the Widgets section of the User Guide. +dataset.widgets.citation.txt=Dataset Citation +dataset.widgets.citation.tip=Add a citation for your dataset to your personal or project website. +dataset.widgets.datasetFull.txt=Dataset +dataset.widgets.datasetFull.tip=Add a way for visitors on your website to be able to view your datasets, download files, etc. +dataset.widgets.advanced.popup.header=Widget Advanced Options +dataset.widgets.advanced.prompt=Forward persistent URL's in your dataset citation to your personal website. +dataset.widgets.advanced.url.label=Personal Website URL +dataset.widgets.advanced.url.watermark=http://www.example.com/page-name +dataset.widgets.advanced.invalid.message=Please enter a valid URL +dataset.widgets.advanced.success.message=Successfully updated your Personal Website URL +dataset.widgets.advanced.failure.message=The dataverse Personal Website URL has not been updated. +dataset.thumbnailsAndWidget.breadcrumbs.title=Thumbnail + Widgets +dataset.thumbnailsAndWidget.thumbnails.title=Thumbnail +dataset.thumbnailsAndWidget.widgets.title=Widgets +dataset.thumbnailsAndWidget.thumbnailImage=Thumbnail Image +dataset.thumbnailsAndWidget.thumbnailImage.title=The logo or image file you wish to display as the thumbnail of this dataset. +dataset.thumbnailsAndWidget.thumbnailImage.tip=Supported image types are JPG, TIF, or PNG and should be no larger than {0} KB. The maximum display size for an image file as a dataset thumbnail is 48 pixels wide by 48 pixels high. +dataset.thumbnailsAndWidget.thumbnailImage.default=Default Icon +dataset.thumbnailsAndWidget.thumbnailImage.selectAvailable=Select Available File +dataset.thumbnailsAndWidget.thumbnailImage.selectThumbnail=Select Thumbnail +dataset.thumbnailsAndWidget.thumbnailImage.selectAvailable.title=Select a thumbnail from those available as image data files that belong to your dataset. +dataset.thumbnailsAndWidget.thumbnailImage.uploadNew=Upload New File +dataset.thumbnailsAndWidget.thumbnailImage.uploadNew.title=Upload an image file as your dataset thumbnail, which will be stored separately from the data files that belong to your dataset. +dataset.thumbnailsAndWidget.thumbnailImage.upload=Upload Image +dataset.thumbnailsAndWidget.thumbnailImage.upload.invalidMsg=The image could not be uploaded. Please try again with a JPG, TIF, or PNG file. +dataset.thumbnailsAndWidget.success=Dataset thumbnail updated. +dataset.thumbnailsAndWidget.removeThumbnail=Remove Thumbnail +dataset.thumbnailsAndWidget.removeThumbnail.tip=You are only removing this image as the dataset thumbnail, not removing it from your dataset. To do that, go to the Edit Files page. +dataset.thumbnailsAndWidget.availableThumbnails=Available Thumbnails +dataset.thumbnailsAndWidget.availableThumbnails.tip=Select a thumbnail from the data files that belong to your dataset. Continue back to the Thumbnail + Widgets page to save your changes. + +# file.xhtml +file.share.fileShare=Share File +file.share.fileShare.tip=Share this file on your favorite social media networks. +file.share.fileShare.shareText=View this file. +file.title.label=Title +file.citation.label=Citation +file.cite.downloadBtn=Cite Data File +file.general.metadata.label=General Metadata +file.description.label=Description +file.tags.label=Tags +file.lastupdated.label=Last Updated +file.DatasetVersion=Version +file.metadataTab.fileMetadata.header=File Metadata +file.metadataTab.fileMetadata.persistentid.label=Data File Persistent ID +file.metadataTab.fileMetadata.downloadUrl.label=Download URL +file.metadataTab.fileMetadata.unf.label=UNF +file.metadataTab.fileMetadata.size.label=Size +file.metadataTab.fileMetadata.type.label=Type +file.metadataTab.fileMetadata.description.label=Description +file.metadataTab.fileMetadata.publicationDate.label=Publication Date +file.metadataTab.fileMetadata.depositDate.label=Deposit Date +file.metadataTab.fitsMetadata.header=FITS Metadata +file.metadataTab.provenance.header=File Provenance +file.metadataTab.provenance.body=File Provenance information coming in a later release... +file.versionDifferences.noChanges=No changes associated with this version +file.versionDifferences.fileNotInVersion=File not included in this version +file.versionDifferences.actionChanged=Changed +file.versionDifferences.actionAdded=Added +file.versionDifferences.actionRemoved=Removed +file.versionDifferences.actionReplaced=Replaced +file.versionDifferences.fileMetadataGroupTitle=File Metadata +file.versionDifferences.fileTagsGroupTitle=File Tags +file.versionDifferences.descriptionDetailTitle=Description +file.versionDifferences.fileNameDetailTitle=File Name +file.versionDifferences.fileAccessTitle=File Access +file.versionDifferences.fileRestricted=Restricted +file.versionDifferences.fileUnrestricted=Unrestricted +file.versionDifferences.fileGroupTitle=File + +# File Ingest +ingest.csv.invalidHeader=Invalid header row. One of the cells is empty. +ingest.csv.lineMismatch=Mismatch between line counts in first and final passes!, {0} found on first pass, but {1} found on second. +ingest.csv.recordMismatch=Reading mismatch, line {0} of the Data file: {1} delimited values expected, {2} found. +ingest.csv.nullStream=Stream can't be null. + +# editdatafile.xhtml + +# editFilesFragment.xhtml +file.edit.error.file_exceeds_limit=This file exceeds the size limit. +# File metadata error +file.metadata.datafiletag.not_tabular=You cannot add Tabular Data Tags to a non-tabular file. + +# File Edit Success +file.message.editSuccess=This file has been updated. +file.message.deleteSuccess=The file has been deleted. +file.message.replaceSuccess=This file has been replaced. + +# File Add/Replace operation messages +file.addreplace.file_size_ok=File size is in range. +file.addreplace.error.file_exceeds_limit=This file size ({0}) exceeds the size limit of {1} bytes. +file.addreplace.error.dataset_is_null=The dataset cannot be null. +file.addreplace.error.dataset_id_is_null=The dataset ID cannot be null. +find.dataset.error.dataset_id_is_null=When accessing a dataset based on Persistent ID, a {0} query parameter must be present. +find.dataset.error.dataset.not.found.persistentId=Dataset with Persistent ID {0} not found. +find.dataset.error.dataset.not.found.id=Dataset with ID {0} not found. +find.dataset.error.dataset.not.found.bad.id=Bad dataset ID number: {0}. +file.addreplace.error.dataset_id_not_found=There was no dataset found for ID: +file.addreplace.error.no_edit_dataset_permission=You do not have permission to edit this dataset. +file.addreplace.error.filename_undetermined=The file name cannot be determined. +file.addreplace.error.file_content_type_undetermined=The file content type cannot be determined. +file.addreplace.error.file_upload_failed=The file upload failed. +file.addreplace.error.duplicate_file=This file already exists in the dataset. +file.addreplace.error.existing_file_to_replace_id_is_null=The ID of the existing file to replace must be provided. +file.addreplace.error.existing_file_to_replace_not_found_by_id=Replacement file not found. There was no file found for ID: {0} +file.addreplace.error.existing_file_to_replace_is_null=The file to replace cannot be null. +file.addreplace.error.existing_file_to_replace_not_in_dataset=The file to replace does not belong to this dataset. +file.addreplace.error.existing_file_not_in_latest_published_version=You cannot replace a file that is not in the most recently published dataset. (The file is unpublished or was deleted from a previous version.) +file.addreplace.content_type.header=File Type Different +file.addreplace.error.replace.new_file_has_different_content_type=The original file ({0}) and replacement file ({1}) are different file types. +file.addreplace.error.replace.new_file_same_as_replacement=You cannot replace a file with the exact same file. +file.addreplace.error.unpublished_file_cannot_be_replaced=You cannot replace an unpublished file. Please delete it instead of replacing it. +file.addreplace.error.ingest_create_file_err=There was an error when trying to add the new file. +file.addreplace.error.initial_file_list_empty=An error occurred and the new file was not added. +file.addreplace.error.initial_file_list_more_than_one=You cannot replace a single file with multiple files. The file you uploaded was ingested into multiple files. +file.addreplace.error.final_file_list_empty=There are no files to add. (This error should not happen if steps called in sequence.) +file.addreplace.error.only_replace_operation=This should only be called for file replace operations! +file.addreplace.error.failed_to_remove_old_file_from_dataset=Unable to remove old file from new DatasetVersion. +file.addreplace.error.add.add_file_error=Failed to add file to dataset. +file.addreplace.error.phase2_called_early_no_new_files=There was an error saving the dataset - no new files found. +file.addreplace.success.add=File successfully added! +file.addreplace.success.replace=File successfully replaced! +file.addreplace.error.auth=The API key is invalid. +file.addreplace.error.invalid_datafile_tag=Not a valid Tabular Data Tag: + +# 500.xhtml +error.500.page.title=500 Internal Server Error +error.500.message=Internal Server Error - An unexpected error was encountered, no more information is available. + +# 404.xhtml +error.404.page.title=404 Not Found +error.404.message=Page Not Found - The page you are looking for was not found. + +# 403.xhtml +error.403.page.title=403 Not Authorized +error.403.message=Not Authorized - You are not authorized to view this page. + +# general error - support message +error.support.message= If you believe this is an error, please contact {0} for assistance. + +# citation-frame.xhtml +citationFrame.banner.message=If the site below does not load, the archived data can be found in the {0} {1}. {2} +citationFrame.banner.message.here=here +citationFrame.banner.closeIcon=Close this message, go to dataset +citationFrame.banner.countdownMessage= This message will close in +citationFrame.banner.countdownMessage.seconds=seconds + +# Friendly AuthenticationProvider names +authenticationProvider.name.builtin=Dataverse +authenticationProvider.name.null=(provider is unknown) +authenticationProvider.name.github=GitHub +authenticationProvider.name.google=Google +authenticationProvider.name.orcid=ORCiD +authenticationProvider.name.orcid-sandbox=ORCiD Sandbox +authenticationProvider.name.shib=Shibboleth +ingest.csv.invalidHeader=Invalid header row. One of the cells is empty. +ingest.csv.lineMismatch=Mismatch between line counts in first and final passes!, {0} found on first pass, but {1} found on second. +ingest.csv.recordMismatch=Reading mismatch, line {0} of the Data file: {1} delimited values expected, {2} found. +ingest.csv.nullStream=Stream can't be null. diff --git a/dataversedock/lang.properties/Bundle_de.properties b/dataversedock/lang.properties/Bundle_de.properties new file mode 100644 index 0000000..8504a63 --- /dev/null +++ b/dataversedock/lang.properties/Bundle_de.properties @@ -0,0 +1,1711 @@ +dataverse=Dataverse Deutschland +newDataverse=New Dataverse +hostDataverse=Host Dataverse +dataverses=Dataverses +passwd=Password +dataset=Dataset +datasets=Datasets +newDataset=New Dataset +files=Files +file=File +restricted=Restricted +restrictedaccess=Restricted Access +find=Find +search=Search +unpublished=Unpublished +cancel=Cancel +ok=OK +saveChanges=Save Changes +acceptTerms=Accept +submit=Submit +signup=Sign Up +login=Einloggen +email=Email +account=Account +requiredField=Required field +new=New +identifier=Identifier +description=Description +subject=Subject +close=Close +preview=Preview +continue=Continue +name=Name +institution=Institution +position=Position +affiliation=Affiliation +createDataverse=Create Dataverse +remove=Remove +done=Done +editor=Contributor +manager=Manager +curator=Curator +explore=Explore +download=Download +deaccession=Deaccession +share=Share +link=Link +linked=Linked +harvested=Harvested +apply=Apply +add=Add +delete=Delete +yes=Yes +no=No +previous=Previous +next=Next +first=First +last=Last +more=More... +less=Less... +select=Select... +selectedFiles=Selected Files +htmlAllowedTitle=Allowed HTML Tags +htmlAllowedMsg=This field supports only certain HTML tags. +htmlAllowedTags=, ,
        ,
        , , ,
        ,
        ,
        , ,
        ,

        -

        , , , ,
      • ,
          ,

          ,

          , , , , , , 
            + +# dataverse_header.xhtml +header.status.header=Status +header.search.title=Search all dataverses... +header.about=About +header.support=Unterst\u00fctzung +header.guides=Guides +header.guides.user=Benutzerhandbuch +header.guides.developer=Developer Guide +header.guides.installation=Installation Guide +header.guides.api=API Guide +header.guides.admin=Admin Guide +header.signUp=Anmelden +header.logOut=Log Out +header.accountInfo=Account Information +header.dashboard=Dashboard +header.user.selectTab.dataRelated=My Data +header.user.selectTab.notifications=Notifications +header.user.selectTab.accountInfo=Account Information +header.user.selectTab.groupsAndRoles=Groups + Roles +header.user.selectTab.apiToken=API Token + +# dataverse_template.xhtml +head.meta.description=The Dataverse Project is an open source software application to share, cite and archive data. Dataverse provides a robust infrastructure for data stewards to host and archive data, while offering researchers an easy way to share and get credit for their data. +body.skip=Skip to main content + +# dataverse_footer.xhtml +footer.copyright=Copyright © {0} +footer.widget.datastored=Data is stored at {0}. +footer.widget.login=Log in to +footer.privacyPolicy=Privacy Policy +footer.poweredby=Powered by +footer.dataverseProject=The Dataverse Project + +# messages.xhtml +messages.error=Error +messages.success=Success! +messages.info=Info +messages.validation=Validation Error +messages.validation.msg=Required fields were missed or there was a validation error. Please scroll down to see details. + +# contactFormFragment.xhtml +contact.header=Contact {0} +contact.dataverse.header=Email Dataverse Contact +contact.dataset.header=Email Dataset Contact +contact.to=To +contact.support=Support +contact.from=From +contact.from.required=User email is required. +contact.from.invalid=Email is invalid. +contact.subject=Subject +contact.subject.required=Subject is required. +contact.subject.selectTab.top=Select subject... +contact.subject.selectTab.support=Support Question +contact.subject.selectTab.dataIssue=Data Issue +contact.msg=Message +contact.msg.required=Message text is required. +contact.send=Send Message +contact.question=Please fill this out to prove you are not a robot. +contact.sum.required=Value is required. +contact.sum.invalid=Incorrect sum, please try again. +contact.sum.converterMessage=Please enter a number. +contact.contact=Contact + +# dataverseuser.xhtml +account.info=Account Information +account.edit=Edit Account +account.apiToken=API Token +user.isShibUser=Account information cannot be edited when logged in through an institutional account. +user.helpShibUserMigrateOffShibBeforeLink=Leaving your institution? Please contact +user.helpShibUserMigrateOffShibAfterLink=for assistance. +user.helpOAuthBeforeLink=Your Dataverse account uses {0} for login. If you are interested in changing login methods, please contact +user.helpOAuthAfterLink=for assistance. +user.lostPasswdTip=If you have lost or forgotten your password, please enter your username or email address below and click Submit. We will send you an e-mail with your new password. +user.dataRelatedToMe=My Data +wasCreatedIn=, was created in +wasCreatedTo=, was added to +wasSubmittedForReview=, was submitted for review to be published in +wasPublished=, was published in +wasReturnedByReviewer=, was returned by the curator of +# TODO: Confirm that "toReview" can be deleted. +toReview=Don't forget to publish it or send it back to the contributor! +worldMap.added=dataset had a WorldMap layer data added to it. +# Bundle file editors, please note that "notification.welcome" is used in a unit test. +notification.welcome=Welcome to {0}! Get started by adding or finding data. Have questions? Check out the {1}. Want to test out Dataverse features? Use our {2}. Also, check for your welcome email to verify your address. +notification.demoSite=Demo Site +notification.requestFileAccess=File access requested for dataset: {0}. +notification.grantFileAccess=Access granted for files in dataset: {0}. +notification.rejectFileAccess=Access rejected for requested files in dataset: {0}. +notification.createDataverse={0} was created in {1} . To learn more about what you can do with your dataverse, check out the {2}. +notification.dataverse.management.title=Dataverse Management - Dataverse User Guide +notification.createDataset={0} was created in {1}. To learn more about what you can do with a dataset, check out the {2}. +notification.dataset.management.title=Dataset Management - Dataset User Guide +notification.wasSubmittedForReview={0} was submitted for review to be published in {1}. Don''t forget to publish it or send it back to the contributor\! +notification.wasReturnedByReviewer={0} was returned by the curator of {1}. +notification.wasPublished={0} was published in {1}. +notification.worldMap.added={0}, dataset had WorldMap layer data added to it. +notification.maplayer.deletefailed=Failed to delete the map layer associated with the restricted file {0} from WorldMap. Please try again, or contact WorldMap and/or Dataverse support. (Dataset: {1}) +notification.generic.objectDeleted=The dataverse, dataset, or file for this notification has been deleted. +notification.access.granted.dataverse=You have been granted the {0} role for {1}. +notification.access.granted.dataset=You have been granted the {0} role for {1}. +notification.access.granted.datafile=You have been granted the {0} role for file in {1}. +notification.access.granted.fileDownloader.additionalDataverse={0} You now have access to all published restricted and unrestricted files in this dataverse. +notification.access.granted.fileDownloader.additionalDataset={0} You now have access to all published restricted and unrestricted files in this dataset. +notification.access.revoked.dataverse=You have been removed from a role in {0}. +notification.access.revoked.dataset=You have been removed from a role in {0}. +notification.access.revoked.datafile=You have been removed from a role in {0}. +notification.checksumfail=One or more files in your upload failed checksum validation for dataset {0}. Please re-run the upload script. If the problem persists, please contact support. +notification.mail.import.filesystem=Dataset {2} ({0}/dataset.xhtml?persistentId={1}) has been successfully uploaded and verified. +notification.import.filesystem=Dataset {1} has been successfully uploaded and verified. +notification.import.checksum={1}, dataset had file checksums added via a batch job. +removeNotification=Remove Notification +groupAndRoles.manageTips=Here is where you can access and manage all the groups you belong to, and the roles you have been assigned. +user.signup.tip=Why have a Dataverse account? To create your own dataverse and customize it, add datasets, or request access to restricted files. +user.signup.otherLogInOptions.tip=You can also create a Dataverse account with one of our other log in options. +user.username.illegal.tip=Between 2-60 characters, and can use "a-z", "0-9", "_" for your username. +user.username=Username +user.username.taken=This username is already taken. +user.username.invalid=This username contains an invalid character or is outside the length requirement (2-60 characters). +user.username.valid=Create a valid username of 2 to 60 characters in length containing letters (a-Z), numbers (0-9), dashes (-), underscores (_), and periods (.). +user.noPasswd=No Password +user.currentPasswd=Current Password +user.currentPasswd.tip=Please enter the current password for this account. +user.passwd.illegal.tip=Password needs to be at least 6 characters, include one letter and one number, and special characters may be used. +user.rePasswd=Retype Password +user.rePasswd.tip=Please retype the password you entered above. +user.firstName=Given Name +user.firstName.tip=The first name or name you would like to use for this account. +user.lastName=Family Name +user.lastName.tip=The last name you would like to use for this account. +user.email.tip=A valid email address you have access to in order to be contacted. +user.email.taken=This email address is already taken. +user.affiliation.tip=The organization with which you are affiliated. +user.position=Position +user.position.tip=Your role or title at the organization you are affiliated with; such as staff, faculty, student, etc. +user.acccountterms=General Terms of Use +user.acccountterms.tip=The terms and conditions for using the application and services. +user.acccountterms.required=Please check the box to indicate your acceptance of the General Terms of Use. +user.acccountterms.iagree=I have read and accept the Dataverse General Terms of Use as outlined above. +user.createBtn=Create Account +user.updatePassword.welcome=Welcome to Dataverse {0}, {1} +user.updatePassword.warning=With the release of our new Dataverse 4.0 upgrade, the password requirements and General Terms of Use have updated. As this is the first time you are using Dataverse since the update, you need to create a new password and agree to the new General Terms of Use. +user.updatePassword.password={0} +authenticationProvidersAvailable.tip={0}There are no active authentication providers{1}If you are a system administrator, please enable one using the API.{2}If you are not a system administrator, please contact the one for your institution. + +passwdVal.passwdReq.title=Your password must contain: +passwdVal.passwdReq.goodStrength =passwords of at least {0} characters are exempt from all other requirements +passwdVal.passwdReq.lengthReq =At least {0} characters +passwdVal.passwdReq.characteristicsReq =At least 1 character from {0} of the following types: +passwdVal.passwdReq.notInclude =It may not include: +passwdVal.passwdReq.consecutiveDigits =More than {0} numbers in a row +passwdVal.passwdReq.dictionaryWords =Dictionary words +passwdVal.passwdReq.unknownPasswordRule =Unknown, contact your administrator +#printf syntax used to pass to passay library +passwdVal.expireRule.errorCode =EXPIRED +passwdVal.expireRule.errorMsg =The password is over %1$s days old and has expired. +passwdVal.goodStrengthRule.errorMsg =Note: passwords are always valid with a %1$s or more character length regardless. +passwdVal.goodStrengthRule.errorCode =NO_GOODSTRENGTH +passwdVal.passwdReset.resetLinkTitle =Password Reset Link +passwdVal.passwdReset.resetLinkDesc =Your password reset link is not valid +passwdVal.passwdReset.valBlankLog =new password is blank +passwdVal.passwdReset.valFacesError =Password Error +passwdVal.passwdReset.valFacesErrorDesc =Please enter a new password for your account. +passwdVal.passwdValBean.warnDictionaryRead =Dictionary was set, but none was read in. +passwdVal.passwdValBean.warnDictionaryObj =PwDictionaries not set and no default password file found: +passwdVal.passwdValBean.warnSetStrength =The PwGoodStrength {0} value competes with the PwMinLength value of {1} and is added to {2} + +#loginpage.xhtml +login.System=Login System +login.forgot.text=Forgot your password? +login.builtin=Dataverse Account +login.institution=Institutional Account +login.institution.blurb=Log in or sign up with your institutional account — learn more. +login.institution.support.beforeLink=Leaving your institution? Please contact +login.institution.support.afterLink=for assistance. +login.builtin.credential.usernameOrEmail=Username/Email +login.builtin.credential.password=Password +login.builtin.invalidUsernameEmailOrPassword=The username, email address, or password you entered is invalid. Need assistance accessing your account? +# how do we exercise login.error? Via a password upgrade failure? See https://github.com/IQSS/dataverse/pull/2922 +login.error=Error validating the username, email address, or password. Please try again. If the problem persists, contact an administrator. +user.error.cannotChangePassword=Sorry, your password cannot be changed. Please contact your system administrator. +user.error.wrongPassword=Sorry, wrong password. +login.button=Log In with {0} +login.button.orcid=Create or Connect your ORCID +# authentication providers +auth.providers.title=Other options +auth.providers.tip=You can convert a Dataverse account to use one of the options above. Learn more. +auth.providers.title.builtin=Username/Email +auth.providers.title.shib=Your Institution +auth.providers.title.orcid=ORCID +auth.providers.title.google=Google +auth.providers.title.github=GitHub +auth.providers.blurb=Log in or sign up with your {0} account — learn more. Having trouble? Please contact {3} for assistance. +auth.providers.persistentUserIdName.orcid=ORCID iD +auth.providers.persistentUserIdName.github=ID +auth.providers.persistentUserIdTooltip.orcid=ORCID provides a persistent digital identifier that distinguishes you from other researchers. +auth.providers.persistentUserIdTooltip.github=GitHub assigns a unique number to every user. +auth.providers.orcid.insufficientScope=Dataverse was not granted the permission to read user data from ORCID. +# Friendly AuthenticationProvider names +authenticationProvider.name.builtin=Dataverse +authenticationProvider.name.null=(provider is unknown) +authenticationProvider.name.github=GitHub +authenticationProvider.name.google=Google +authenticationProvider.name.orcid=ORCiD +authenticationProvider.name.orcid-sandbox=ORCiD Sandbox +authenticationProvider.name.shib=Shibboleth + +#confirmemail.xhtml +confirmEmail.pageTitle=Email Verification +confirmEmail.submitRequest=Verify Email +confirmEmail.submitRequest.success=A verification email has been sent to {0}. Note, the verify link will expire after {1}. +confirmEmail.details.success=Email address verified! +confirmEmail.details.failure=We were unable to verify your email address. Please navigate to your Account Information page and click the "Verify Email" button. +confirmEmail.details.goToAccountPageButton=Go to Account Information +confirmEmail.notVerified=Not Verified +confirmEmail.verified=Verified + +#shib.xhtml +shib.btn.convertAccount=Convert Account +shib.btn.createAccount=Create Account +shib.askToConvert=Would you like to convert your Dataverse account to always use your institutional log in? +# Bundle file editors, please note that "shib.welcomeExistingUserMessage" is used in a unit test +shib.welcomeExistingUserMessage=Your institutional log in for {0} matches an email address already being used for a Dataverse account. By entering your current Dataverse password below, your existing Dataverse account can be converted to use your institutional log in. After converting, you will only need to use your institutional log in. +# Bundle file editors, please note that "shib.welcomeExistingUserMessageDefaultInstitution" is used in a unit test +shib.welcomeExistingUserMessageDefaultInstitution=your institution +shib.dataverseUsername=Dataverse Username +shib.currentDataversePassword=Current Dataverse Password +shib.accountInformation=Account Information +shib.offerToCreateNewAccount=This information is provided by your institution and will be used to create your Dataverse account. +shib.passwordRejected=Validation Error - Your account can only be converted if you provide the correct password for your existing account. + +# oauth2/firstLogin.xhtml +oauth2.btn.convertAccount=Convert Existing Account +oauth2.btn.createAccount=Create New Account +oauth2.askToConvert=Would you like to convert your Dataverse account to always use your institutional log in? +oauth2.welcomeExistingUserMessage=Your institutional log in for {0} matches an email address already being used for a Dataverse account. By entering your current Dataverse password below, your existing Dataverse account can be converted to use your institutional log in. After converting, you will only need to use your institutional log in. +oauth2.welcomeExistingUserMessageDefaultInstitution=your institution +oauth2.dataverseUsername=Dataverse Username +oauth2.currentDataversePassword=Current Dataverse Password +oauth2.chooseUsername=Username: +oauth2.passwordRejected=Validation Error - Wrong username or password. +# oauth2.newAccount.title=Account Creation +oauth2.newAccount.welcomeWithName=Welcome to Dataverse, {0} +oauth2.newAccount.welcomeNoName=Welcome to Dataverse +# oauth2.newAccount.email=Email +# oauth2.newAccount.email.tip=Dataverse uses this email to notify you of issues regarding your data. +oauth2.newAccount.suggestedEmails=Suggested Email Addresses: +oauth2.newAccount.username=Username +oauth2.newAccount.username.tip=This username will be your unique identifier as a Dataverse user. +oauth2.newAccount.explanation=This information is provided by {0} and will be used to create your {1} account. To log in again, you will have to use the {0} log in option. +oauth2.newAccount.suggestConvertInsteadOfCreate=If you already have a {0} account, you will need to convert your account. +# oauth2.newAccount.tabs.convertAccount=Convert Existing Account +oauth2.newAccount.buttons.convertNewAccount=Convert Account +oauth2.newAccount.emailTaken=Email already taken. Consider merging the corresponding account instead. +oauth2.newAccount.emailOk=Email OK. +oauth2.newAccount.emailInvalid=Invalid email address. +# oauth2.newAccount.usernameTaken=Username already taken. +# oauth2.newAccount.usernameOk=Username OK. + +# oauth2/convert.xhtml +# oauth2.convertAccount.title=Account Conversion +oauth2.convertAccount.explanation=Please enter your {0} account username or email and password to convert your account to the {1} log in option. Learn more about converting your account. +oauth2.convertAccount.username=Existing username +oauth2.convertAccount.password=Password +oauth2.convertAccount.authenticationFailed=Authentication failed - bad username or password. +oauth2.convertAccount.buttonTitle=Convert Account +oauth2.convertAccount.success=Your Dataverse account is now associated with your {0} account. + +# oauth2/callback.xhtml +oauth2.callback.page.title=OAuth Callback +oauth2.callback.message=Authentication Error - Dataverse could not authenticate your ORCID login. Please make sure you authorize your ORCID account to connect with Dataverse. For more details about the information being requested, see the User Guide. + +# tab on dataverseuser.xhtml +apitoken.title=API Token +apitoken.message=Your API Token is displayed below after it has been created. Check out our {0}API Guide{1} for more information on using your API Token with the Dataverse APIs. +apitoken.notFound=API Token for {0} has not been created. +apitoken.generateBtn=Create Token +apitoken.regenerateBtn=Recreate Token + +#dashboard.xhtml +dashboard.title=Dashboard +dashboard.card.harvestingclients.header=Harvesting Clients +dashboard.card.harvestingclients.btn.manage=Manage Clients +dashboard.card.harvestingclients.clients={0, choice, 0#Clients|1#Client|2#Clients} +dashboard.card.harvestingclients.datasets={0, choice, 0#Datasets|1#Dataset|2#Datasets} +dashboard.card.harvestingserver.header=Harvesting Server +dashboard.card.harvestingserver.enabled=OAI server enabled +dashboard.card.harvestingserver.disabled=OAI server disabled +dashboard.card.harvestingserver.status=Status +dashboard.card.harvestingserver.sets={0, choice, 0#Sets|1#Set|2#Sets} +dashboard.card.harvestingserver.btn.manage=Manage Server +dashboard.card.metadataexport.header=Metadata Export +dashboard.card.metadataexport.message=Dataset metadata export is only available through the {0} API. Learn more in the {0} {1}API Guide{2}. + +#harvestclients.xhtml +harvestclients.title=Manage Harvesting Clients +harvestclients.toptip= - Harvesting can be scheduled to run at a specific time or on demand. Harvesting can be initiated here or via the REST API. +harvestclients.noClients.label=No clients are configured. +harvestclients.noClients.why.header=What is Harvesting? +harvestclients.noClients.why.reason1=Harvesting is a process of exchanging metadata with other repositories. As a harvesting client, your Dataverse gathers metadata records from remote sources. These can be other Dataverse instances, or other archives that support OAI-PMH, the standard harvesting protocol. +harvestclients.noClients.why.reason2=Harvested metadata records are searchable by users. Clicking on a harvested dataset in the search results takes the user to the original repository. Harvested datasets cannot be edited in your Dataverse installation. +harvestclients.noClients.how.header=How To Use Harvesting +harvestclients.noClients.how.tip1=To harvest metadata, a Harvesting Client is created and configured for each remote repository. Note that when creating a client you will need to select an existing local dataverse to host harvested datasets. +harvestclients.noClients.how.tip2=Harvested records can be kept in sync with the original repository through scheduled incremental updates, for example, daily or weekly. Alternatively, harvests can be run on demand, from this page or via the REST API. +harvestclients.noClients.getStarted=To get started, click on the Add Client button above. To learn more about Harvesting, visit the Harvesting section of the User Guide. +harvestclients.btn.add=Add Client +harvestclients.tab.header.name=Nickname +harvestclients.tab.header.url=URL +harvestclients.tab.header.lastrun=Last Run +harvestclients.tab.header.lastresults=Last Results +harvestclients.tab.header.action=Actions +harvestclients.tab.header.action.btn.run=Run Harvesting +harvestclients.tab.header.action.btn.edit=Edit +harvestclients.tab.header.action.btn.delete=Delete +harvestclients.tab.header.action.btn.delete.dialog.header=Delete Harvesting Client +harvestclients.tab.header.action.btn.delete.dialog.warning=Are you sure you want to delete the harvesting client "{0}"? Deleting the client will delete all datasets harvested from this remote server. +harvestclients.tab.header.action.btn.delete.dialog.tip=Note, this action may take a while to process, depending on the number of harvested datasets. +harvestclients.tab.header.action.delete.infomessage=Harvesting client is being deleted. Note, that this may take a while, depending on the amount of harvested content. +harvestclients.actions.runharvest.success=Successfully started an asynchronous harvest for client "{0}" . Please reload the page to check on the harvest results). +harvestclients.newClientDialog.step1=Step 1 of 4 - Client Information +harvestclients.newClientDialog.title.new=Create Harvesting Client +harvestclients.newClientDialog.help=Configure a client to harvest content from a remote server. +harvestclients.newClientDialog.nickname=Nickname +harvestclients.newClientDialog.nickname.helptext=Consists of letters, digits, underscores (_) and dashes (-). +harvestclients.newClientDialog.nickname.required=Client nickname cannot be empty! +harvestclients.newClientDialog.nickname.invalid=Client nickname can contain only letters, digits, underscores (_) and dashes (-); and must be at most 30 characters. +harvestclients.newClientDialog.nickname.alreadyused=This nickname is already used. +harvestclients.newClientDialog.type=Server Protocol +harvestclients.newClientDialog.type.helptext=Only the OAI server protocol is currently supported. +harvestclients.newClientDialog.type.OAI=OAI +harvestclients.newClientDialog.type.Nesstar=Nesstar +harvestclients.newClientDialog.url=Server URL +harvestclients.newClientDialog.url.tip=URL of a harvesting resource. +harvestclients.newClientDialog.url.watermark=Remote harvesting server, http://... +harvestclients.newClientDialog.url.helptext.notvalidated=URL of a harvesting resource. Once you click 'Next', we will try to establish a connection to the server in order to verify that it is working, and to obtain extra information about its capabilities. +harvestclients.newClientDialog.url.required=A valid harvesting server address is required. +harvestclients.newClientDialog.url.invalid=Invalid URL. Failed to establish connection and receive a valid server response. +harvestclients.newClientDialog.url.noresponse=Failed to establish connection to the server. +harvestclients.newClientDialog.url.badresponse=Invalid response from the server. +harvestclients.newClientDialog.dataverse=Local Dataverse +harvestclients.newClientDialog.dataverse.tip=Dataverse that will host the datasets harvested from this remote resource. +harvestclients.newClientDialog.dataverse.menu.enterName=Enter Dataverse Alias +harvestclients.newClientDialog.dataverse.menu.header=Dataverse Name (Affiliate), Alias +harvestclients.newClientDialog.dataverse.menu.invalidMsg=No matches found +harvestclients.newClientDialog.dataverse.required=You must select an existing dataverse for this harvesting client. +harvestclients.newClientDialog.step2=Step 2 of 4 - Format +harvestclients.newClientDialog.oaiSets=OAI Set +harvestclients.newClientDialog.oaiSets.tip=Harvesting sets offered by this OAI server. +harvestclients.newClientDialog.oaiSets.noset=None +harvestclients.newClientDialog.oaiSets.helptext=Selecting "none" will harvest the default set, as defined by the server. Often this will be the entire body of content across all sub-sets. +harvestclients.newClientDialog.oaiSets.helptext.noset=This OAI server does not support named sets. The entire body of content offered by the server will be harvested. +harvestclients.newClientDialog.oaiMetadataFormat=Metadata Format +harvestclients.newClientDialog.oaiMetadataFormat.tip=Metadata formats offered by the remote server. +harvestclients.newClientDialog.oaiMetadataFormat.required=Please select the metadata format to harvest from this archive. +harvestclients.newClientDialog.step3=Step 3 of 4 - Schedule +harvestclients.newClientDialog.schedule=Schedule +harvestclients.newClientDialog.schedule.tip=Schedule harvesting to run automatically daily or weekly. +harvestclients.newClientDialog.schedule.time.none.helptext=Leave harvesting unscheduled to run on demand only. +harvestclients.newClientDialog.schedule.none=None +harvestclients.newClientDialog.schedule.daily=Daily +harvestclients.newClientDialog.schedule.weekly=Weekly +harvestclients.newClientDialog.schedule.time=Time +harvestclients.newClientDialog.schedule.day=Day +harvestclients.newClientDialog.schedule.time.am=AM +harvestclients.newClientDialog.schedule.time.pm=PM +harvestclients.newClientDialog.schedule.time.helptext=Scheduled times are in your local time. +harvestclients.newClientDialog.btn.create=Create Client +harvestclients.newClientDialog.success=Successfully created harvesting client "{0}". +harvestclients.newClientDialog.step4=Step 4 of 4 - Display +harvestclients.newClientDialog.harvestingStyle=Archive Type +harvestclients.newClientDialog.harvestingStyle.tip=Type of remote archive. +harvestclients.newClientDialog.harvestingStyle.helptext=Select the archive type that best describes this remote server in order to properly apply formatting rules and styles to the harvested metadata as they are shown in the search results. Note that improperly selecting the type of the remote archive can result in incomplete entries in the search results, and a failure to redirect the user to the archival source of the data. +harvestclients.viewEditDialog.title=Edit Harvesting Client +harvestclients.viewEditDialog.archiveUrl=Archive URL +harvestclients.viewEditDialog.archiveUrl.tip=The URL of the archive that serves the data harvested by this client, which is used in search results for links to the original sources of the harvested content. +harvestclients.viewEditDialog.archiveUrl.helptext=Edit if this URL differs from the Server URL. +harvestclients.viewEditDialog.archiveDescription=Archive Description +harvestclients.viewEditDialog.archiveDescription.tip=Description of the archival source of the harvested content, displayed in search results. +harvestclients.viewEditDialog.archiveDescription.default.generic=This Dataset is harvested from our partners. Clicking the link will take you directly to the archival source of the data. +harvestclients.viewEditDialog.btn.save=Save Changes +harvestclients.newClientDialog.title.edit=Edit Group {0} + +#harvestset.xhtml +harvestserver.title=Manage Harvesting Server +harvestserver.toptip= - Define sets of local datasets that will be available for harvesting by remote clients. +harvestserver.service.label=OAI Server +harvestserver.service.enabled=Enabled +harvestserver.service.disabled=Disabled +harvestserver.service.disabled.msg=Harvesting Server is currently disabled. +harvestserver.service.empty=No sets are configured. +harvestserver.service.enable.success=OAI Service has been successfully enabled. +harvestserver.noSets.why.header=What is a Harvesting Server? +harvestserver.noSets.why.reason1=Harvesting is a process of exchanging metadata with other repositories. As a harvesting server, your Dataverse can make some of the local dataset metadata available to remote harvesting clients. These can be other Dataverse instances, or any other clients that support OAI-PMH harvesting protocol. +harvestserver.noSets.why.reason2=Only the published, unrestricted datasets in your Dataverse can be harvested. Remote clients normally keep their records in sync through scheduled incremental updates, daily or weekly, thus minimizing the load on your server. Note that it is only the metadata that are harvested. Remote harvesters will generally not attempt to download the data files themselves. +harvestserver.noSets.how.header=How to run a Harvesting Server? +harvestserver.noSets.how.tip1=Harvesting server can be enabled or disabled on this page. +harvestserver.noSets.how.tip2=Once the service is enabled, you can define collections of local datasets that will be available to remote harvesters as OAI Sets. Sets are defined by search queries (for example, authorName:king; or parentId:1234 - to select all the datasets that belong to the dataverse specified; or dsPersistentId:"doi:1234/" to select all the datasets with the persistent identifier authority specified). Consult the Search API section of the Dataverse User Guide for more information on the search queries. +harvestserver.noSets.getStarted=To get started, enable the OAI server and click on the Add Set button. To learn more about Harvesting, visit the Harvesting section of the User Guide. +harvestserver.btn.add=Add Set +harvestserver.tab.header.spec=OAI setSpec +harvestserver.tab.header.description=Description +harvestserver.tab.header.definition=Definition Query +harvestserver.tab.header.stats=Datasets +harvestserver.tab.col.stats.empty=No records (empty set) +harvestserver.tab.col.stats.results={0} {0, choice, 0#datasets|1#dataset|2#datasets} ({1} {1, choice, 0#records|1#record|2#records} exported, {2} marked as deleted) +harvestserver.tab.header.action=Actions +harvestserver.tab.header.action.btn.export=Run Export +harvestserver.actions.runreexport.success=Successfully started an asynchronous re-export job for OAI set "{0}" (please reload the page to check on the export progress). +harvestserver.tab.header.action.btn.edit=Edit +harvestserver.tab.header.action.btn.delete=Delete +harvestserver.tab.header.action.btn.delete.dialog.header=Delete Harvesting Set +harvestserver.tab.header.action.btn.delete.dialog.tip=Are you sure you want to delete the OAI set "{0}"? You cannot undo a delete! +harvestserver.tab.header.action.delete.infomessage=Selected harvesting set is being deleted. (this may take a few moments) +harvestserver.newSetDialog.title.new=Create Harvesting Set +harvestserver.newSetDialog.help=Define a set of local datasets available for harvesting to remote clients. +harvestserver.newSetDialog.setspec=Name/OAI setSpec +harvestserver.newSetDialog.setspec.tip=A unique name (OAI setSpec) identifying this set. +harvestserver.newSetDialog.setspec.helptext=Consists of letters, digits, underscores (_) and dashes (-). +harvestserver.editSetDialog.setspec.helptext=The name can not be changed once the set has been created. +harvestserver.newSetDialog.setspec.required=Name (OAI setSpec) cannot be empty! +harvestserver.newSetDialog.setspec.invalid=Name (OAI setSpec) can contain only letters, digits, underscores (_) and dashes (-). +harvestserver.newSetDialog.setspec.alreadyused=This set name (OAI setSpec) is already used. +harvestserver.newSetDialog.setdescription=Description +harvestserver.newSetDialog.setdescription.tip=Provide a brief description for this OAI set. +harvestserver.newSetDialog.setdescription.required=Set description cannot be empty! +harvestserver.newSetDialog.setquery=Definition Query +harvestserver.newSetDialog.setquery.tip=Search query that defines the content of the dataset. +harvestserver.newSetDialog.setquery.helptext=Example query: authorName:king +harvestserver.newSetDialog.setquery.required=Search query cannot be left empty! +harvestserver.newSetDialog.setquery.results=Search query returned {0} datasets! +harvestserver.newSetDialog.setquery.empty=WARNING: Search query returned no results! +harvestserver.newSetDialog.btn.create=Create Set +harvestserver.newSetDialog.success=Successfully created harvesting set "{0}". +harvestserver.viewEditDialog.title=Edit Harvesting Set +harvestserver.viewEditDialog.btn.save=Save Changes + +#dashboard-users.xhtml +dashboard.card.users=Users +dashboard.card.users.header=Dashboard - User List +dashboard.card.users.super=Superusers +dashboard.card.users.manage=Manage Users +dashboard.card.users.message=List and manage users. +dashboard.list_users.searchTerm.watermark=Search these users... +dashboard.list_users.tbl_header.userId=ID +dashboard.list_users.tbl_header.userIdentifier=Username +dashboard.list_users.tbl_header.name=Name +dashboard.list_users.tbl_header.lastName=Last Name +dashboard.list_users.tbl_header.firstName=First Name +dashboard.list_users.tbl_header.email=Email +dashboard.list_users.tbl_header.affiliation=Affiliation +dashboard.list_users.tbl_header.roles=Roles +dashboard.list_users.tbl_header.position=Position +dashboard.list_users.tbl_header.isSuperuser=Superuser +dashboard.list_users.tbl_header.authProviderFactoryAlias=Authentication +dashboard.list_users.tbl_header.createdTime=Created Time +dashboard.list_users.tbl_header.lastLoginTime=Last Login Time +dashboard.list_users.tbl_header.lastApiUseTime=Last API Use Time +dashboard.list_users.tbl_header.roles.removeAll=Remove All +dashboard.list_users.tbl_header.roles.removeAll.header=Remove All Roles +dashboard.list_users.tbl_header.roles.removeAll.confirmationText=Are you sure you want to remove all roles for user {0}? +dashboard.list_users.removeAll.message.success=All roles have been removed for user {0}. +dashboard.list_users.removeAll.message.failure=Failed to remove roles for user {0}. + +dashboard.list_users.toggleSuperuser=Edit Superuser Status +dashboard.list_users.toggleSuperuser.confirmationText.add=Are you sure you want to enable superuser status for user {0}? +dashboard.list_users.toggleSuperuser.confirmationText.remove=Are you sure you want to disable superuser status for user {0}? +dashboard.list_users.toggleSuperuser.confirm=Continue +dashboard.list_users.api.auth.invalid_apikey=The API key is invalid. +dashboard.list_users.api.auth.not_superuser=Forbidden. You must be a superuser. + +#MailServiceBean.java +notification.email.create.dataverse.subject={0}: Your dataverse has been created +notification.email.create.dataset.subject={0}: Your dataset has been created +notification.email.request.file.access.subject={0}: Access has been requested for a restricted file +notification.email.grant.file.access.subject={0}: You have been granted access to a restricted file +notification.email.rejected.file.access.subject={0}: Your request for access to a restricted file has been rejected +notification.email.update.maplayer={0}: WorldMap layer added to dataset +notification.email.maplayer.deletefailed.subject={0}: Failed to delete WorldMap layer +notification.email.maplayer.deletefailed.text=We failed to delete the WorldMap layer associated with the restricted file {0}, and any related data that may still be publicly available on the WorldMap site. Please try again, or contact WorldMap and/or Dataverse support. (Dataset: {1}) +notification.email.submit.dataset.subject={0}: Your dataset has been submitted for review +notification.email.publish.dataset.subject={0}: Your dataset has been published +notification.email.returned.dataset.subject={0}: Your dataset has been returned +notification.email.create.account.subject={0}: Your account has been created +notification.email.assign.role.subject={0}: You have been assigned a role +notification.email.revoke.role.subject={0}: Your role has been revoked +notification.email.verifyEmail.subject={0}: Verify your email address +notification.email.greeting=Hello, \n +# Bundle file editors, please note that "notification.email.welcome" is used in a unit test +notification.email.welcome=Welcome to {0}! Get started by adding or finding data. Have questions? Check out the User Guide at {1}/{2}/user or contact {3} at {4} for assistance. +notification.email.welcomeConfirmEmailAddOn=\n\nPlease verify your email address at {0} . Note, the verify link will expire after {1}. Send another verification email by visiting your account page. +notification.email.requestFileAccess=File access requested for dataset: {0}. Manage permissions at {1}. +notification.email.grantFileAccess=Access granted for files in dataset: {0} (view at {1}). +notification.email.rejectFileAccess=Your request for access was rejected for the requested files in the dataset: {0} (view at {1}). If you have any questions about why your request was rejected, you may reach the dataset owner using the "Contact" link on the upper right corner of the dataset page. +# Bundle file editors, please note that "notification.email.createDataverse" is used in a unit test +notification.email.createDataverse=Your new dataverse named {0} (view at {1} ) was created in {2} (view at {3} ). To learn more about what you can do with your dataverse, check out the Dataverse Management - User Guide at {4}/{5}/user/dataverse-management.html . +# Bundle file editors, please note that "notification.email.createDataset" is used in a unit test +notification.email.createDataset=Your new dataset named {0} (view at {1} ) was created in {2} (view at {3} ). To learn more about what you can do with a dataset, check out the Dataset Management - User Guide at {4}/{5}/user/dataset-management.html . +notification.email.wasSubmittedForReview={0} (view at {1}) was submitted for review to be published in {2} (view at {3}). Don''t forget to publish it or send it back to the contributor\! +notification.email.wasReturnedByReviewer={0} (view at {1}) was returned by the curator of {2} (view at {3}). +notification.email.wasPublished={0} (view at {1}) was published in {2} (view at {3}). +notification.email.worldMap.added={0} (view at {1}) had WorldMap layer data added to it. +notification.email.closing=\n\nThank you,\n{0} +notification.email.assignRole=You are now {0} for the {1} "{2}" (view at {3}). +notification.email.revokeRole=One of your roles for the {0} "{1}" has been revoked (view at {2}). +notification.email.changeEmail=Hello, {0}.{1}\n\nPlease contact us if you did not intend this change or if you need assistance. +hours=hours +hour=hour +minutes=minutes +minute=minute +notification.email.checksumfail.subject={0}: Your upload failed checksum validation +notification.email.import.filesystem.subject=Dataset {0} has been successfully uploaded and verified +notification.email.import.checksum.subject={0}: Your file checksum job has completed + +# passwordreset.xhtml +pageTitle.passwdReset.pre=Account Password Reset +passwdReset.token=token : +passwdReset.userLookedUp=user looked up : +passwdReset.emailSubmitted=email submitted : +passwdReset.details={0} Password Reset{1} - To initiate the password reset process, please provide your email address. +passwdReset.submitRequest=Submit Password Request +passwdReset.successSubmit.tip=If this email is associated with an account, then an email will be sent with further instructions to {0}. +passwdReset.debug=DEBUG +passwdReset.resetUrl=The reset URL is +passwdReset.noEmail.tip=No email was actually sent because a user could not be found using the provided email address {0} but we don't mention this because we don't malicious users to use the form to determine if there is an account associated with an email address. +passwdReset.illegalLink.tip=Your password reset link is not valid. If you need to reset your password, {0}click here{1} in order to request that your password to be reset again. +passwdReset.newPasswd.details={0} Reset Password{1} \u2013 Our password requirements have changed. Please pick a strong password that matches the criteria below. +passwdReset.newPasswd=New Password +passwdReset.rePasswd=Retype Password +passwdReset.resetBtn=Reset Password + +# dataverse.xhtml +dataverse.title=The project, department, university, professor, or journal this dataverse will contain data for. +dataverse.enterName=Enter name... +dataverse.host.title=The dataverse which contains this data. +dataverse.identifier.title=Short name used for the URL of this dataverse. +dataverse.affiliation.title=The organization with which this dataverse is affiliated. +dataverse.category=Category +dataverse.category.title=The type that most closely reflects this dataverse. +dataverse.type.selectTab.top=Select one... +dataverse.type.selectTab.researchers=Researcher +dataverse.type.selectTab.researchProjects=Research Project +dataverse.type.selectTab.journals=Journal +dataverse.type.selectTab.organizationsAndInsitutions=Organization or Institution +dataverse.type.selectTab.teachingCourses=Teaching Course +dataverse.type.selectTab.uncategorized=Uncategorized +dataverse.type.selectTab.researchGroup=Research Group +dataverse.type.selectTab.laboratory=Laboratory +dataverse.type.selectTab.department=Department +dataverse.description.title=A summary describing the purpose, nature, or scope of this dataverse. +dataverse.email=Email +dataverse.email.title=The e-mail address(es) of the contact(s) for the dataverse. +dataverse.share.dataverseShare=Share Dataverse +dataverse.share.dataverseShare.tip=Share this dataverse on your favorite social media networks. +dataverse.share.dataverseShare.shareText=View this dataverse. +dataverse.subject.title=Subject(s) covered in this dataverse. +dataverse.metadataElements=Metadata Fields +dataverse.metadataElements.tip=Choose the metadata fields to use in dataset templates and when adding a dataset to this dataverse. +dataverse.metadataElements.from.tip=Use metadata fields from {0} +dataverse.resetModifications=Reset Modifications +dataverse.resetModifications.text=Are you sure you want to reset the selected metadata fields? If you do this, any customizations (hidden, required, optional) you have done will no longer appear. +dataverse.field.required=(Required) +dataverse.field.example1= (Examples: +dataverse.field.example2=) +dataverse.field.set.tip=[+] View fields + set as hidden, required, or optional +dataverse.field.set.view=[+] View fields +dataverse.field.requiredByDataverse=Required by Dataverse +dataverse.facetPickList.text=Browse/Search Facets +dataverse.facetPickList.tip=Choose the metadata fields to use as facets for browsing datasets and dataverses in this dataverse. +dataverse.facetPickList.facetsFromHost.text=Use browse/search facets from {0} +dataverse.facetPickList.metadataBlockList.all=All Metadata Fields +dataverse.edit=Edit +dataverse.option.generalInfo=General Information +dataverse.option.themeAndWidgets=Theme + Widgets +dataverse.option.featuredDataverse=Featured Dataverses +dataverse.option.permissions=Permissions +dataverse.option.dataverseGroups=Groups +dataverse.option.datasetTemplates=Dataset Templates +dataverse.option.datasetGuestbooks=Dataset Guestbooks +dataverse.option.deleteDataverse=Delete Dataverse +dataverse.publish.btn=Publish +dataverse.publish.header=Publish Dataverse +dataverse.nopublished=No Published Dataverses +dataverse.nopublished.tip=In order to use this feature you must have at least one published dataverse. +dataverse.contact=Email Dataverse Contact +dataset.link=Link Dataset +dataverse.link=Link Dataverse +dataverse.link.btn.tip=Link to Your Dataverse +dataverse.link.yourDataverses=Your {0, choice, 1#Dataverse|2#Dataverses} +dataverse.link.save=Save Linked Dataverse +dataset.link.save=Save Linked Dataset +dataverse.link.dataverse.choose=Choose which of your dataverses you would like to link this dataverse to. +dataverse.link.dataset.choose=Choose which of your dataverses you would like to link this dataset to. +dataverse.link.no.choice=You have one dataverse you can add linked dataverses and datasets in. +dataverse.link.no.linkable=To be able to link a dataverse or dataset, you need to have your own dataverse. Click on the Add Data button on the homepage to get started. +dataverse.link.no.linkable.remaining=You have already linked all of your eligible dataverses. +dataverse.savedsearch.link=Link Search +dataverse.savedsearch.searchquery=Search +dataverse.savedsearch.filterQueries=Facets +dataverse.savedsearch.save=Save Linked Search +dataverse.savedsearch.dataverse.choose=Choose which of your dataverses you would like to link this search to. +dataverse.savedsearch.no.choice=You have one dataverse to which you may add a saved search. +# Bundle file editors, please note that "dataverse.savedsearch.save.success" is used in a unit test +dataverse.saved.search.success=The saved search has been successfully linked to {0}. +dataverse.saved.search.failure=The saved search was not able to be linked. +dataverse.linked.success= {0} has been successfully linked to {1}. +dataverse.linked.success.wait= {0} has been successfully linked to {1}. Please wait for its contents to appear. +dataverse.linked.internalerror={0} has been successfully linked to {1} but contents will not appear until an internal error has been fixed. +dataverse.page.pre=Previous +dataverse.page.next=Next +dataverse.byCategory=Dataverses by Category +dataverse.displayFeatured=Display the dataverses selected below on the homepage for this dataverse. +dataverse.selectToFeature=Select dataverses to feature on the homepage of this dataverse. +dataverse.publish.tip=Are you sure you want to publish your dataverse? Once you do so it must remain published. +dataverse.publish.failed.tip=This dataverse cannot be published because the dataverse it is in has not been published. +dataverse.publish.failed=Cannot publish dataverse. +dataverse.publish.success=Your dataverse is now public. +dataverse.publish.failure=This dataverse was not able to be published. +dataverse.delete.tip=Are you sure you want to delete your dataverse? You cannot undelete this dataverse. +dataverse.delete=Delete Dataverse +dataverse.delete.success=Your dataverse has been deleted. +dataverse.delete.failure=This dataverse was not able to be deleted. +# Bundle file editors, please note that "dataverse.create.success" is used in a unit test because it's so fancy with two parameters +dataverse.create.success=You have successfully created your dataverse! To learn more about what you can do with your dataverse, check out the User Guide. +dataverse.create.failure=This dataverse was not able to be created. +dataverse.create.authenticatedUsersOnly=Only authenticated users can create dataverses. +dataverse.update.success=You have successfully updated your dataverse! +dataverse.update.failure=This dataverse was not able to be updated. + +# rolesAndPermissionsFragment.xhtml + +# advanced.xhtml +advanced.search.header.dataverses=Dataverses +advanced.search.dataverses.name.tip=The project, department, university, professor, or journal this Dataverse will contain data for. +advanced.search.dataverses.affiliation.tip=The organization with which this Dataverse is affiliated. +advanced.search.dataverses.description.tip=A summary describing the purpose, nature, or scope of this Dataverse. +advanced.search.dataverses.subject.tip=Domain-specific Subject Categories that are topically relevant to this Dataverse. +advanced.search.header.datasets=Datasets +advanced.search.header.files=Files +advanced.search.files.name.tip=The name given to identify the file. +advanced.search.files.description.tip=A summary describing the file and its variables. +advanced.search.files.fileType=File Type +advanced.search.files.fileType.tip=The extension for a file, e.g. CSV, zip, Stata, R, PDF, JPEG, etc. +advanced.search.files.variableName=Variable Name +advanced.search.files.variableName.tip=The name of the variable's column in the data frame. +advanced.search.files.variableLabel=Variable Label +advanced.search.files.variableLabel.tip=A short description of the variable. + +# search-include-fragment.xhtml +dataverse.search.advancedSearch=Advanced Search +dataverse.search.input.watermark=Search this dataverse... +account.search.input.watermark=Search this data... +dataverse.search.btn.find=Find +dataverse.results.btn.addData=Add Data +dataverse.results.btn.addData.newDataverse=New Dataverse +dataverse.results.btn.addData.newDataset=New Dataset +dataverse.results.dialog.addDataGuest.header=Add Data +dataverse.results.dialog.addDataGuest.msg=You need to Log In to create a dataverse or add a dataset. +dataverse.results.dialog.addDataGuest.msg.signup=You need to Sign Up or Log In to create a dataverse or add a dataset. +dataverse.results.types.dataverses=Dataverses +dataverse.results.types.datasets=Datasets +dataverse.results.types.files=Files +# Bundle file editors, please note that "dataverse.results.empty.zero" is used in a unit test +dataverse.results.empty.zero=There are no dataverses, datasets, or files that match your search. Please try a new search by using other or broader terms. You can also check out the search guide for tips. +# Bundle file editors, please note that "dataverse.results.empty.hidden" is used in a unit test +dataverse.results.empty.hidden=There are no search results based on how you have narrowed your search. You can check out the search guide for tips. +dataverse.results.empty.browse.guest.zero=Dieser Datenverkehr hat derzeit keine Datenversetzungen, Datasets oder Dateien. Bitte logge dich ein, um zu sehen, ob du es hinzuf\u00fcgen kannst. +dataverse.results.empty.browse.guest.hidden=There are no dataverses within this dataverse. Please log in to see if you are able to add to it. +dataverse.results.empty.browse.loggedin.noperms.zero=This dataverse currently has no dataverses, datasets, or files. You can use the Email Dataverse Contact button above to ask about this dataverse or request access for this dataverse. +dataverse.results.empty.browse.loggedin.noperms.hidden=There are no dataverses within this dataverse. +dataverse.results.empty.browse.loggedin.perms.zero=This dataverse currently has no dataverses, datasets, or files. You can add to it by using the Add Data button on this page. +account.results.empty.browse.loggedin.perms.zero=You have no dataverses, datasets, or files associated with your account. You can add a dataverse or dataset by clicking the Add Data button above. Read more about adding data in the User Guide. +dataverse.results.empty.browse.loggedin.perms.hidden=There are no dataverses within this dataverse. You can add to it by using the Add Data button on this page. +dataverse.results.empty.link.technicalDetails=More technical details +dataverse.search.facet.error=There was an error with your search parameters. Please clear your search and try again. +dataverse.results.count.toofresults={0} to {1} of {2} {2, choice, 0#Results|1#Result|2#Results} +dataverse.results.paginator.current=(Current) +dataverse.results.btn.sort=Sort +dataverse.results.btn.sort.option.nameAZ=Name (A-Z) +dataverse.results.btn.sort.option.nameZA=Name (Z-A) +dataverse.results.btn.sort.option.newest=Newest +dataverse.results.btn.sort.option.oldest=Oldest +dataverse.results.btn.sort.option.relevance=Relevance +dataverse.results.cards.foundInMetadata=Found in Metadata Fields: +dataverse.results.cards.files.tabularData=Tabular Data +dataverse.results.solrIsDown=Please note: Due to an internal error, browsing and searching is not available. +dataverse.theme.title=Theme +dataverse.theme.inheritCustomization.title=For this dataverse, use the same theme as the parent dataverse. +dataverse.theme.inheritCustomization.label=Inherit Theme +dataverse.theme.inheritCustomization.checkbox=Inherit theme from {0} +dataverse.theme.logo=Logo +dataverse.theme.logo.tip=Supported image types are JPG, TIF, or PNG and should be no larger than 500 KB. The maximum display size for an image file in a dataverse's theme is 940 pixels wide by 120 pixels high. +dataverse.theme.logo.format=Logo Format +dataverse.theme.logo.format.selectTab.square=Square +dataverse.theme.logo.format.selectTab.rectangle=Rectangle +dataverse.theme.logo.alignment=Logo Alignment +dataverse.theme.logo.alignment.selectTab.left=Left +dataverse.theme.logo.alignment.selectTab.center=Center +dataverse.theme.logo.alignment.selectTab.right=Right +dataverse.theme.logo.backColor=Logo Background Color +dataverse.theme.logo.image.upload=Upload Image +dataverse.theme.tagline=Tagline +dataverse.theme.website=Website +dataverse.theme.linkColor=Link Color +dataverse.theme.txtColor=Text Color +dataverse.theme.backColor=Background Color +dataverse.theme.success=You have successfully updated the theme for this dataverse! +dataverse.theme.failure=The dataverse theme has not been updated. +dataverse.theme.logo.image=Logo Image +dataverse.theme.logo.image.title=The logo or image file you wish to display in the header of this dataverse. +dataverse.theme.logo.image.uploadNewFile=Upload New File +dataverse.theme.logo.image.invalidMsg=The image could not be uploaded. Please try again with a JPG, TIF, or PNG file. +dataverse.theme.logo.image.uploadImgFile=Upload Image File +dataverse.theme.logo.format.title=The shape for the logo or image file you upload for this dataverse. +dataverse.theme.logo.format.selectTab.square2=Square +dataverse.theme.logo.format.selectTab.rectangle2=Rectangle +dataverse.theme.logo.alignment.title=Where the logo or image should display in the header. +dataverse.theme.logo.alignment.selectTab.left2=Left +dataverse.theme.logo.alignment.selectTab.center2=Center +dataverse.theme.logo.alignment.selectTab.right2=Right +dataverse.theme.logo.backColor.title=Select a color to display behind the logo of this dataverse. +dataverse.theme.headerColor=Header Colors +dataverse.theme.headerColor.tip=Colors you select to style the header of this dataverse. +dataverse.theme.backColor.title=Color for the header area that contains the image, tagline, URL, and text. +dataverse.theme.linkColor.title=Color for the link to display as. +dataverse.theme.txtColor.title=Color for the tagline text and the name of this dataverse. +dataverse.theme.tagline.title=A phrase or sentence that describes this dataverse. +dataverse.theme.tagline.tip=Provide a tagline that is 140 characters or less. +dataverse.theme.website.title=URL for your personal website, institution, or any website that relates to this dataverse. +dataverse.theme.website.tip=The website will be linked behind the tagline. To have a website listed, you must also provide a tagline. +dataverse.theme.website.watermark=Your personal site, http://... +dataverse.theme.website.invalidMsg=Invalid URL. +dataverse.theme.disabled=The theme for the root dataverse has been administratively disabled with the :DisableRootDataverseTheme database setting. +dataverse.widgets.title=Widgets +dataverse.widgets.notPublished.why.header=Why Use Widgets? +dataverse.widgets.notPublished.why.reason1=Increases the web visibility of your data by allowing you to embed your dataverse and datasets into your personal or project website. +dataverse.widgets.notPublished.why.reason2=Allows others to browse your dataverse and datasets without leaving your personal or project website. +dataverse.widgets.notPublished.how.header=How To Use Widgets +dataverse.widgets.notPublished.how.tip1=To use widgets, your dataverse and datasets need to be published. +dataverse.widgets.notPublished.how.tip2=After publishing, code will be available on this page for you to copy and add to your personal or project website. +dataverse.widgets.notPublished.how.tip3=Do you have an OpenScholar website? If so, learn more about adding the Dataverse widgets to your website here. +dataverse.widgets.notPublished.getStarted=To get started, publish your dataverse. To learn more about Widgets, visit the Theme + Widgets section of the User Guide. +dataverse.widgets.tip=Copy and paste this code into the HTML on your site. To learn more about Widgets, visit the Theme + Widgets section of the User Guide. +dataverse.widgets.searchBox.txt=Dataverse Search Box +dataverse.widgets.searchBox.tip=Add a way for visitors on your website to be able to search Dataverse. +dataverse.widgets.dataverseListing.txt=Dataverse Listing +dataverse.widgets.dataverseListing.tip=Add a way for visitors on your website to be able to view your dataverses and datasets, sort, or browse through them. +dataverse.widgets.advanced.popup.header=Widget Advanced Options +dataverse.widgets.advanced.prompt=Forward dataset citation persistent URL's to your personal website. The page you submit as your Personal Website URL must contain the code snippet for the Dataverse Listing widget. +dataverse.widgets.advanced.url.label=Personal Website URL +dataverse.widgets.advanced.url.watermark=http://www.example.com/page-name +dataverse.widgets.advanced.invalid.message=Please enter a valid URL +dataverse.widgets.advanced.success.message=Successfully updated your Personal Website URL +dataverse.widgets.advanced.failure.message=The dataverse Personal Website URL has not been updated. + +# permissions-manage.xhtml +dataverse.permissions.title=Permissions +dataverse.permissions.dataset.title=Dataset Permissions +dataverse.permissions.access.accessBtn=Edit Access +dataverse.permissions.usersOrGroups=Users/Groups +dataverse.permissions.usersOrGroups.assignBtn=Assign Roles to Users/Groups +dataverse.permissions.usersOrGroups.createGroupBtn=Create Group +dataverse.permissions.usersOrGroups.description=All the users and groups that have access to your dataverse. +dataverse.permissions.usersOrGroups.tabHeader.userOrGroup=User/Group Name (Affiliation) +dataverse.permissions.usersOrGroups.tabHeader.id=ID +dataverse.permissions.usersOrGroups.tabHeader.role=Role +dataverse.permissions.usersOrGroups.tabHeader.action=Action +dataverse.permissions.usersOrGroups.assignedAt=Role assigned at {0} +dataverse.permissions.usersOrGroups.removeBtn=Remove Assigned Role +dataverse.permissions.usersOrGroups.removeBtn.confirmation=Are you sure you want to remove this role assignment? +dataverse.permissions.roles=Roles +dataverse.permissions.roles.add=Add New Role +dataverse.permissions.roles.description=All the roles set up in your dataverse, that you can assign to users and groups. +dataverse.permissions.roles.edit=Edit Role +dataverse.permissions.roles.copy=Copy Role + +# permissions-manage-files.xhtml +dataverse.permissionsFiles.title=Restricted File Permissions +dataverse.permissionsFiles.usersOrGroups=Users/Groups +dataverse.permissionsFiles.usersOrGroups.assignBtn=Grant Access to Users/Groups +dataverse.permissionsFiles.usersOrGroups.description=All the users and groups that have access to restricted files in this dataset. +dataverse.permissionsFiles.usersOrGroups.tabHeader.userOrGroup=User/Group Name (Affiliation) +dataverse.permissionsFiles.usersOrGroups.tabHeader.id=ID +dataverse.permissionsFiles.usersOrGroups.tabHeader.email=Email +dataverse.permissionsFiles.usersOrGroups.tabHeader.files=Files +dataverse.permissionsFiles.usersOrGroups.tabHeader.access=Access +dataverse.permissionsFiles.usersOrGroups.file=File +dataverse.permissionsFiles.usersOrGroups.files=Files +dataverse.permissionsFiles.usersOrGroups.invalidMsg=There are no users or groups with access to the restricted files in this dataset. +dataverse.permissionsFiles.files=Restricted Files +dataverse.permissionsFiles.files.label={0, choice, 0#Restricted Files|1#Restricted File|2#Restricted Files} +dataverse.permissionsFiles.files.description=All the restricted files in this dataset. +dataverse.permissionsFiles.files.tabHeader.fileName=File Name +dataverse.permissionsFiles.files.tabHeader.roleAssignees=Users/Groups +dataverse.permissionsFiles.files.tabHeader.access=Access +dataverse.permissionsFiles.files.tabHeader.publishedRestrictedState=Published +dataverse.permissionsFiles.files.tabHeader.draftRestrictedState=Draft +dataverse.permissionsFiles.files.deleted=Deleted +dataverse.permissionsFiles.files.public=Public +dataverse.permissionsFiles.files.restricted=Restricted +dataverse.permissionsFiles.files.roleAssignee=User/Group +dataverse.permissionsFiles.files.roleAssignees=Users/Groups +dataverse.permissionsFiles.files.roleAssignees.label={0, choice, 0#Users/Groups|1#User/Group|2#Users/Groups} +dataverse.permissionsFiles.files.assignBtn=Assign Access +dataverse.permissionsFiles.files.invalidMsg=There are no restricted files in this dataset. +dataverse.permissionsFiles.files.requested=Requested Files +dataverse.permissionsFiles.files.selected=Selecting {0} of {1} {2} +dataverse.permissionsFiles.viewRemoveDialog.header=File Access +dataverse.permissionsFiles.viewRemoveDialog.removeBtn=Remove Access +dataverse.permissionsFiles.viewRemoveDialog.removeBtn.confirmation=Are you sure you want to remove access to this file? Once access has been removed, the user or group will no longer be able to download this file. +dataverse.permissionsFiles.assignDialog.header=Grant File Access +dataverse.permissionsFiles.assignDialog.description=Grant file access to users and groups. +dataverse.permissionsFiles.assignDialog.userOrGroup=Users/Groups +dataverse.permissionsFiles.assignDialog.userOrGroup.enterName=Enter User/Group Name +dataverse.permissionsFiles.assignDialog.userOrGroup.invalidMsg=No matches found. +dataverse.permissionsFiles.assignDialog.userOrGroup.requiredMsg=Please select at least one user or group. +dataverse.permissionsFiles.assignDialog.fileName=File Name +dataverse.permissionsFiles.assignDialog.grantBtn=Grant +dataverse.permissionsFiles.assignDialog.rejectBtn=Reject + +# permissions-configure.xhtml +dataverse.permissions.accessDialog.header=Edit Access +dataverse.permissions.description=Current access configuration to your dataverse. +dataverse.permissions.tip=Select if all users or only certain users are able to add to this dataverse, by clicking the Edit Access button. +dataverse.permissions.Q1=Who can add to this dataverse? +dataverse.permissions.Q1.answer1=Anyone adding to this dataverse needs to be given access +dataverse.permissions.Q1.answer2=Anyone with a Dataverse account can add sub dataverses +dataverse.permissions.Q1.answer3=Anyone with a Dataverse account can add datasets +dataverse.permissions.Q1.answer4=Anyone with a Dataverse account can add sub dataverses and datasets +dataverse.permissions.Q2=When a user adds a new dataset to this dataverse, which role should be automatically assigned to them on that dataset? +dataverse.permissions.Q2.answer.editor.description=- Edit metadata, upload files, and edit files, edit Terms, Guestbook, Submit datasets for review +dataverse.permissions.Q2.answer.manager.description=- Edit metadata, upload files, and edit files, edit Terms, Guestbook, File Restrictions (Files Access + Use) +dataverse.permissions.Q2.answer.curator.description=- Edit metadata, upload files, and edit files, edit Terms, Guestbook, File Restrictions (Files Access + Use), Edit Permissions/Assign Roles + Publish + +# roles-assign.xhtml +dataverse.permissions.usersOrGroups.assignDialog.header=Assign Role +dataverse.permissions.usersOrGroups.assignDialog.description=Grant permissions to users and groups by assigning them a role. +dataverse.permissions.usersOrGroups.assignDialog.userOrGroup=Users/Groups +dataverse.permissions.usersOrGroups.assignDialog.userOrGroup.enterName=Enter User/Group Name +dataverse.permissions.usersOrGroups.assignDialog.userOrGroup.invalidMsg=No matches found. +dataverse.permissions.usersOrGroups.assignDialog.userOrGroup.requiredMsg=Please select at least one user or group. +dataverse.permissions.usersOrGroups.assignDialog.role.description=These are the permissions associated with the selected role. +dataverse.permissions.usersOrGroups.assignDialog.role.warning=Assigning the {0} role means the user(s) will also have the {0} role applied to all {1} within this {2}. +dataverse.permissions.usersOrGroups.assignDialog.role.requiredMsg=Please select a role to assign. + +# roles-edit.xhtml +dataverse.permissions.roles.header=Edit Role +dataverse.permissions.roles.name=Role Name +dataverse.permissions.roles.name.title=Enter a name for the role. +dataverse.permissions.roles.id=Identifier +dataverse.permissions.roles.id.title=Enter a name for the alias. +dataverse.permissions.roles.description.title=Describe the role (1000 characters max). +dataverse.permissions.roles.description.counter={0} characters remaining +dataverse.permissions.roles.roleList.header=Role Permissions +dataverse.permissions.roles.roleList.authorizedUserOnly=Permissions with the information icon indicate actions that can be performed by users not logged into Dataverse. + +# explicitGroup-new-dialog.xhtml +dataverse.permissions.explicitGroupEditDialog.title.new=Create Group +dataverse.permissions.explicitGroupEditDialog.title.edit=Edit Group {0} +dataverse.permissions.explicitGroupEditDialog.help=Add users or other groups to this group. +dataverse.permissions.explicitGroupEditDialog.groupIdentifier=Group Identifier +dataverse.permissions.explicitGroupEditDialog.groupIdentifier.tip=Short name used for the ID of this group. +dataverse.permissions.explicitGroupEditDialog.groupIdentifier.required=Group identifier cannot be empty +dataverse.permissions.explicitGroupEditDialog.groupIdentifier.invalid=Group identifier can contain only letters, digits, underscores (_) and dashes (-) +dataverse.permissions.explicitGroupEditDialog.groupIdentifier.helpText=Consists of letters, digits, underscores (_) and dashes (-) +dataverse.permissions.explicitGroupEditDialog.groupIdentifier.taken=Group identifier already used in this dataverse +dataverse.permissions.explicitGroupEditDialog.groupName=Group Name +dataverse.permissions.explicitGroupEditDialog.groupName.required=Group name cannot be empty +dataverse.permissions.explicitGroupEditDialog.groupDescription=Description +dataverse.permissions.explicitGroupEditDialog.roleAssigneeName=User/Group +dataverse.permissions.explicitGroupEditDialog.roleAssigneeNames=Users/Groups +dataverse.permissions.explicitGroupEditDialog.createGroup=Create Group + +# manage-templates.xhtml +dataset.manageTemplates.pageTitle=Manage Dataset Templates +dataset.manageTemplates.select.txt=Include Templates from {0} +dataset.manageTemplates.createBtn=Create Dataset Template +dataset.manageTemplates.saveNewTerms=Save Dataset Template +dataset.manageTemplates.noTemplates.why.header=Why Use Templates? +dataset.manageTemplates.noTemplates.why.reason1=Templates are useful when you have several datasets that have the same information in multiple metadata fields that you would prefer not to have to keep manually typing in. +dataset.manageTemplates.noTemplates.why.reason2=Templates can be used to input instructions for those uploading datasets into your dataverse if you have a specific way you want a metadata field to be filled out. +dataset.manageTemplates.noTemplates.how.header=How To Use Templates +dataset.manageTemplates.noTemplates.how.tip1=Templates are created at the dataverse level, can be deleted (so it does not show for future datasets), set to default (not required), and can be copied so you do not have to start over when creating a new template with similar metadata from another template. When a template is deleted, it does not impact the datasets that have used the template already. +dataset.manageTemplates.noTemplates.how.tip2=Please note that the ability to choose which metadata fields are hidden, required, or optional is done on the General Information page for this dataverse. +dataset.manageTemplates.noTemplates.getStarted=To get started, click on the Create Dataset Template button above. To learn more about templates, visit the Dataset Templates section of the User Guide. +dataset.manageTemplates.tab.header.templte=Template Name +dataset.manageTemplates.tab.header.date=Date Created +dataset.manageTemplates.tab.header.usage=Usage +dataset.manageTemplates.tab.header.action=Action +dataset.manageTemplates.tab.action.btn.makeDefault=Make Default +dataset.manageTemplates.tab.action.btn.default=Default +dataset.manageTemplates.tab.action.btn.view=View +dataset.manageTemplates.tab.action.btn.copy=Copy +dataset.manageTemplates.tab.action.btn.edit=Edit +dataset.manageTemplates.tab.action.btn.edit.metadata=Metadata +dataset.manageTemplates.tab.action.btn.edit.terms=Terms +dataset.manageTemplates.tab.action.btn.delete=Delete +dataset.manageTemplates.tab.action.btn.delete.dialog.tip=Are you sure you want to delete this template? A new dataset will not be able to use this template. +dataset.manageTemplates.tab.action.btn.delete.dialog.header=Delete Template +dataset.manageTemplates.tab.action.btn.view.dialog.header=Dataset Template Preview +dataset.manageTemplates.tab.action.btn.view.dialog.datasetTemplate=Dataset Template +dataset.manageTemplates.tab.action.btn.view.dialog.datasetTemplate.title=The dataset template which prepopulates info into the form automatically. +dataset.manageTemplates.tab.action.noedit.createdin=Template created at {0} +dataset.manageTemplates.delete.usedAsDefault=This template is the default template for the following dataverse(s). It will be removed as default as well. +dataset.manageTemplates.info.message.notEmptyTable=Create, clone, edit, view, or delete dataset templates. Create a dataset template to prefill metadata fields with standard values, such as author affiliation, to help users create datasets in this dataverse. You can also add help text directly into the metadata fields to give users more information on what to add to these metadata fields. + +# metadataFragment.xhtml + +# template.xhtml +dataset.template.name.tip=The name of the dataset template. +dataset.template.returnBtn=Return to Manage Templates +dataset.template.name.title=Enter a unique name for the template. +template.asterisk.tip=Asterisks indicate metadata fields that users will be required to fill out while adding a dataset to this dataverse. +dataset.template.popup.create.title=Create Template +dataset.template.popup.create.text=Do you want to add default Terms of Use and/or Access? +dataset.create.add.terms=Save + Add Terms + +# manage-groups.xhtml +dataverse.manageGroups.pageTitle=Manage Dataverse Groups +dataverse.manageGroups.createBtn=Create Group +dataverse.manageGroups.noGroups.why.header=Why Use Groups? +dataverse.manageGroups.noGroups.why.reason1=Groups allow you to assign roles and permissions for many users at once. +dataverse.manageGroups.noGroups.why.reason2=You can use groups to manage multiple different kinds of users (students, collaborators, etc.) +dataverse.manageGroups.noGroups.how.header=How To Use Groups +dataverse.manageGroups.noGroups.how.tip1=A group can contain both users and other groups. +dataverse.manageGroups.noGroups.how.tip2=You can assign permissions to a group in the "Permissions" view. +dataverse.manageGroups.noGroups.getStarted=To get started, click on the Create Group button above. +dataverse.manageGroups.tab.header.name=Group Name +dataverse.manageGroups.tab.header.id=Group ID +dataverse.manageGroups.tab.header.membership=Membership +dataverse.manageGroups.tab.header.action=Action +dataverse.manageGroups.tab.action.btn.view=View +dataverse.manageGroups.tab.action.btn.copy=Copy +dataverse.manageGroups.tab.action.btn.enable=Enable +dataverse.manageGroups.tab.action.btn.disable=Disable +dataverse.manageGroups.tab.action.btn.edit=Edit +dataverse.manageGroups.tab.action.btn.viewCollectedData=View Collected Data +dataverse.manageGroups.tab.action.btn.delete=Delete +dataverse.manageGroups.tab.action.btn.delete.dialog.header=Delete Group +dataverse.manageGroups.tab.action.btn.delete.dialog.tip=Are you sure you want to delete this group? You cannot undelete a group. +dataverse.manageGroups.tab.action.btn.view.dialog.header=Dataverse Group +dataverse.manageGroups.tab.action.btn.view.dialog.group=Group Name +dataverse.manageGroups.tab.action.btn.view.dialog.groupView.name=Member Name +dataverse.manageGroups.tab.action.btn.view.dialog.groupView.type=Member Type +dataverse.manageGroups.tab.action.btn.view.dialog.groupView.action=Action +dataverse.manageGroups.tab.action.btn.view.dialog.groupView.delete=Delete +dataverse.manageGroups.tab.action.btn.view.dialog.groupMembers=Group Members +dataverse.manageGroups.tab.action.btn.view.dialog.enterName=Enter User/Group Name +dataverse.manageGroups.tab.action.btn.view.dialog.invalidMsg=No matches found. + +# manage-guestbooks.xhtml +dataset.manageGuestbooks.pageTitle=Manage Dataset Guestbooks +dataset.manageGuestbooks.include=Include Guestbooks from {0} +dataset.manageGuestbooks.createBtn=Create Dataset Guestbook +dataset.manageGuestbooks.download.all.responses=Download All Responses +dataset.manageGuestbooks.download.responses=Download Responses +dataset.manageGuestbooks.noGuestbooks.why.header=Why Use Guestbooks? +dataset.manageGuestbooks.noGuestbooks.why.reason1=Guestbooks allow you to collect data about who is downloading the files from your datasets. You can decide to collect account information (username, given name & last name, affiliation, etc.) as well as create custom questions (e.g., What do you plan to use this data for?). +dataset.manageGuestbooks.noGuestbooks.why.reason2=You can download the data collected from the enabled guestbooks to be able to store it outside of Dataverse. +dataset.manageGuestbooks.noGuestbooks.how.header=How To Use Guestbooks +dataset.manageGuestbooks.noGuestbooks.how.tip1=A guestbook can be used for multiple datasets but only one guestbook can be used for a dataset. +dataset.manageGuestbooks.noGuestbooks.how.tip2=Custom questions can have free form text answers or have a user select an answer from several options. +dataset.manageGuestbooks.noGuestbooks.getStarted=To get started, click on the Create Dataset Guestbook button above. To learn more about Guestbooks, visit the Dataset Guestbook section of the User Guide. +dataset.manageGuestbooks.tab.header.name=Guestbook Name +dataset.manageGuestbooks.tab.header.date=Date Created +dataset.manageGuestbooks.tab.header.usage=Usage +dataset.manageGuestbooks.tab.header.responses=Responses +dataset.manageGuestbooks.tab.header.action=Action +dataset.manageGuestbooks.tab.action.btn.view=Preview +dataset.manageGuestbooks.tab.action.btn.copy=Copy +dataset.manageGuestbooks.tab.action.btn.enable=Enable +dataset.manageGuestbooks.tab.action.btn.disable=Disable +dataset.manageGuestbooks.tab.action.btn.edit=Edit +dataset.manageGuestbooks.tab.action.btn.preview=Preview +dataset.manageGuestbooks.tab.action.btn.viewCollectedData=View Responses +dataset.manageGuestbooks.tab.action.btn.delete=Delete +dataset.manageGuestbooks.tab.action.btn.delete.dialog.header=Delete Guestbook +dataset.manageGuestbooks.tab.action.btn.delete.dialog.tip=Are you sure you want to delete this guestbook? You cannot undelete a guestbook. +dataset.manageGuestbooks.tab.action.btn.view.dialog.header=Preview Guestbook +dataset.manageGuestbooks.tab.action.btn.view.dialog.datasetGuestbook.title=Upon downloading files the guestbook asks for the following information. +dataset.manageGuestbooks.tab.action.btn.view.dialog.datasetGuestbook=Guestbook Name +dataset.manageGuestbooks.tab.action.btn.viewCollectedData.dialog.header=Dataset Guestbook Collected Data +dataset.manageGuestbooks.tab.action.btn.view.dialog.userCollectedData.title=User data collected by the guestbook. +dataset.manageGuestbooks.tab.action.btn.view.dialog.userCollectedData=Collected Data +dataset.manageGuestbooks.tab.action.noedit.createdin=Guestbook created at {0} +dataset.manageGuestbooks.message.deleteSuccess=The guestbook has been deleted. +dataset.manageGuestbooks.message.deleteFailure=The guestbook cannot be deleted. +dataset.manageGuestbooks.message.editSuccess=The guestbook has been updated. +dataset.manageGuestbooks.message.editFailure=The guestbook could not be updated. +dataset.manageGuestbooks.message.enableSuccess=The guestbook has been enabled. +dataset.manageGuestbooks.message.enableFailure=The guestbook could not be enabled. +dataset.manageGuestbooks.message.disableSuccess=The guestbook has been disabled. +dataset.manageGuestbooks.message.disableFailure=The guestbook could not be disabled. +dataset.manageGuestbooks.tip.title=Manage Dataset Guestbooks +dataset.manageGuestbooks.tip.downloadascsv=Click \"Download All Responses\" to download all collected guestbook responses for this dataverse, as a CSV file. To navigate and analyze your collected responses, we recommend importing this CSV file into Excel, Google Sheets or similar software. +dataset.guestbooksResponses.dataset=Dataset +dataset.guestbooksResponses.date=Date +dataset.guestbooksResponses.type=Type +dataset.guestbooksResponses.file=File +dataset.guestbooksResponses.tip.title=Guestbook Responses +dataset.guestbooksResponses.count.responses={0} {0, choice, 0#Responses|1#Response|2#Responses} +dataset.guestbooksResponses.count.toofresults={0} to {1} of {2} {2, choice, 0#Responses|1#Response|2#Responses} +dataset.guestbooksResponses.tip.downloadascsv=Click \"Download Responses\" to download all collected responses for this guestbook, as a CSV file. To navigate and analyze your collected responses, we recommend importing this CSV file into Excel, Google Sheets or similar software. +dataset.guestbooksResponses.tooManyResponses.message=Note: this guestbook has too many responses to display on this page. Only the most recent {0} responses are shown below. Click \"Download Responses\" to download all collected responses ({1} total) as a CSV file. + +# guestbook-responses.xhtml +dataset.guestbookResponses.pageTitle=Guestbook Responses + +# guestbook.xhtml +dataset.manageGuestbooks.guestbook.name=Guestbook Name +dataset.manageGuestbooks.guestbook.name.tip=Enter a unique name for this Guestbook. +dataset.manageGuestbooks.guestbook.dataCollected=Data Collected +dataset.manageGuestbooks.guestbook.dataCollected.description=Dataverse account information that will be collected when a user downloads a file. Check the ones that will be required. +dataset.manageGuestbooks.guestbook.customQuestions=Custom Questions +dataset.manageGuestbooks.guestbook.accountInformation=Account Information +dataset.manageGuestbooks.guestbook.required=(Required) +dataset.manageGuestbooks.guestbook.optional=(Optional) +dataset.manageGuestbooks.guestbook.customQuestions.description=Create your own questions to have users provide more than their account information when they download a file. Questions can be required or optional and answers can be text or multiple choice. +dataset.manageGuestbooks.guestbook.customQuestions.questionType=Question Type +dataset.manageGuestbooks.guestbook.customQuestions.questionText=Question Text +dataset.manageGuestbooks.guestbook.customQuestions.responseOptions=Response Options +dataset.manageGuestbooks.guestbook.customQuestions.questionType.text=Text +dataset.manageGuestbooks.guestbook.customQuestions.questionType.multiple=Multiple Choice + +# guestbookResponseFragment.xhtml +dataset.guestbookResponse.guestbook.additionalQuestions=Additional Questions +dataset.guestbookResponse.guestbook.responseTooLong=Please limit response to 255 characters + +# dataset.xhtml +dataset.configureBtn=Configure +dataset.pageTitle=Add New Dataset +dataset.editBtn=Edit +dataset.editBtn.itemLabel.upload=Files (Upload) +dataset.editBtn.itemLabel.metadata=Metadata +dataset.editBtn.itemLabel.terms=Terms +dataset.editBtn.itemLabel.permissions=Permissions +dataset.editBtn.itemLabel.thumbnailsAndWidgets=Thumbnails + Widgets +dataset.editBtn.itemLabel.privateUrl=Private URL +dataset.editBtn.itemLabel.permissionsDataset=Dataset +dataset.editBtn.itemLabel.permissionsFile=Restricted Files +dataset.editBtn.itemLabel.deleteDataset=Delete Dataset +dataset.editBtn.itemLabel.deleteDraft=Delete Draft Version +dataset.editBtn.itemLabel.deaccession=Deaccession Dataset +dataset.exportBtn=Export Metadata +dataset.exportBtn.itemLabel.ddi=DDI +dataset.exportBtn.itemLabel.dublinCore=Dublin Core +dataset.exportBtn.itemLabel.schemaDotOrg=Schema.org JSON-LD +dataset.exportBtn.itemLabel.json=JSON +metrics.title=Metrics +metrics.title.tip=View more metrics information +metrics.comingsoon=Coming soon... +metrics.views=Views +metrics.downloads={0, choice, 0#Downloads|1#Download|2#Downloads} +metrics.citations=Citations +metrics.shares=Shares +dataset.publish.btn=Publish +dataset.publish.header=Publish Dataset +dataset.rejectBtn=Return to Author +dataset.submitBtn=Submit for Review +dataset.disabledSubmittedBtn=Submitted for Review +dataset.submitMessage=You will not be able to make changes to this dataset while it is in review. +dataset.submit.success=Your dataset has been submitted for review. +dataset.inreview.infoMessage=\u2013 This dataset is currently under review prior to publication. +dataset.submit.failure=Dataset Submission Failed - {0} +dataset.submit.failure.null=Can't submit for review. Dataset is null. +dataset.submit.failure.isReleased=Latest version of dataset is already released. Only draft versions can be submitted for review. +dataset.submit.failure.inReview=You cannot submit this dataset for review because it is already in review. +dataset.rejectMessage=Return this dataset to contributor for modification. +dataset.rejectWatermark=Please enter a reason for returning this dataset to its author(s). +dataset.reject.enterReason=Reason for return to author is required +dataset.reject.enterReason.header=Required entry +dataset.reject.success=This dataset has been sent back to the contributor. +dataset.reject.failure=Dataset Submission Return Failed - {0} +dataset.reject.datasetNull=Cannot return the dataset to the author(s) because it is null. +dataset.reject.datasetNotInReview=This dataset cannot be return to the author(s) because the latest version is not In Review. The author(s) needs to click Submit for Review first. +dataset.publish.tip=Are you sure you want to publish this dataset? Once you do so it must remain published. +dataset.publishBoth.tip=Once you publish this dataset it must remain published. +dataset.unregistered.tip= This dataset is unregistered. We will attempt to register it before publishing. +dataset.republish.tip=Are you sure you want to republish this dataset? +dataset.selectVersionNumber=Select if this is a minor or major version update. +dataset.majorRelease=Major Release +dataset.minorRelease=Minor Release +dataset.majorRelease.tip=Due to the nature of changes to the current draft this will be a major release ({0}) +dataset.mayNotBePublished=Cannot publish dataset. +dataset.mayNotPublish.administrator= This dataset cannot be published until {0} is published by its administrator. +dataset.mayNotPublish.both= This dataset cannot be published until {0} is published. Would you like to publish both right now? +dataset.mayNotPublish.twoGenerations= This dataset cannot be published until {0} and {1} are published. +dataset.mayNotBePublished.both.button=Yes, Publish Both +dataset.viewVersion.unpublished=View Unpublished Version +dataset.viewVersion.published=View Published Version +dataset.email.datasetContactBtn=Email Dataset Contact +dataset.email.hiddenMessage= +dataset.email.messageSubject=Test Message Subject +dataset.email.datasetLinkBtn.tip=Link Dataset to Your Dataverse +dataset.share.datasetShare=Share Dataset +dataset.share.datasetShare.tip=Share this dataset on your favorite social media networks. +dataset.share.datasetShare.shareText=View this dataset. +dataset.locked.message=Dataset Locked +dataset.locked.inReview.message=Submitted for Review +dataset.publish.error=This dataset may not be published because the {0} Service is currently inaccessible. Please try again. Does the issue continue to persist? +dataset.publish.error.doi=This dataset may not be published because the DOI update failed. +dataset.delete.error=Could not deaccession the dataset because the {0} update failed. +dataset.publish.worldMap.deleteConfirm=Please note that your data and map on WorldMap will be removed due to restricted file access changes in this dataset version which you are publishing. Do you want to continue? +dataset.publish.workflow.inprogress=Publish workflow in progress +dataset.versionUI.draft=Draft +dataset.versionUI.inReview=In Review +dataset.versionUI.unpublished=Unpublished +dataset.versionUI.deaccessioned=Deaccessioned +dataset.cite.title.released=DRAFT VERSION will be replaced in the citation with V1 once the dataset has been published. +dataset.cite.title.draft=DRAFT VERSION will be replaced in the citation with the selected version once the dataset has been published. +dataset.cite.title.deassessioned=DEACCESSIONED VERSION has been added to the citation for this version since it is no longer available. +dataset.cite.standards.tip=Learn about Data Citation Standards. +dataset.cite.downloadBtn=Cite Dataset +dataset.cite.downloadBtn.xml=EndNote XML +dataset.cite.downloadBtn.ris=RIS +dataset.cite.downloadBtn.bib=BibTeX +dataset.create.authenticatedUsersOnly=Only authenticated users can create datasets. +dataset.deaccession.reason=Deaccession Reason +dataset.beAccessedAt=The dataset can now be accessed at: +dataset.descriptionDisplay.title=Description +dataset.keywordDisplay.title=Keyword +dataset.subjectDisplay.title=Subject +dataset.contact.tip=Use email button above to contact. +dataset.asterisk.tip=Asterisks indicate required fields +dataset.message.uploadFiles=Upload Dataset Files - You can drag and drop files from your desktop, directly into the upload widget. +dataset.message.editMetadata=Edit Dataset Metadata - Add more metadata about this dataset to help others easily find it. +dataset.message.editTerms=Edit Dataset Terms - Update this dataset's terms of use. +dataset.message.locked.editNotAllowedInReview=Dataset cannot be edited due to In Review dataset lock. +dataset.message.locked.downloadNotAllowedInReview=Dataset file(s) may not be downloaded due to In Review dataset lock. +dataset.message.locked.downloadNotAllowed=Dataset file(s) may not be downloaded due to dataset lock. +dataset.message.locked.editNotAllowed=Dataset cannot be edited due to dataset lock. +dataset.message.createSuccess=This dataset has been created. +dataset.message.linkSuccess= {0} has been successfully linked to {1}. +dataset.message.metadataSuccess=The metadata for this dataset has been updated. +dataset.message.termsSuccess=The terms for this dataset has been updated. +dataset.message.filesSuccess=The files for this dataset have been updated. +dataset.message.publishSuccess=This dataset has been published. +dataset.message.only.authenticatedUsers=Only authenticated users may release Datasets. +dataset.message.deleteSuccess=This dataset has been deleted. +dataset.message.bulkFileUpdateSuccess=The selected files have been updated. +dataset.message.bulkFileDeleteSuccess=The selected files have been deleted. +datasetVersion.message.deleteSuccess=This dataset draft has been deleted. +datasetVersion.message.deaccessionSuccess=The selected version(s) have been deaccessioned. +dataset.message.deaccessionSuccess=This dataset has been deaccessioned. +dataset.message.files.ingestSuccess=The file(s) have been successfully ingested. You can now explore them with TwoRavens or download them in alternative formats. +dataset.message.validationError=Validation Error - Required fields were missed or there was a validation error. Please scroll down to see details. +dataset.message.publishFailure=The dataset could not be published. +dataset.message.metadataFailure=The metadata could not be updated. +dataset.message.filesFailure=The files could not be updated. +dataset.message.bulkFileDeleteFailure=The selected files could not be deleted. +dataset.message.files.ingestFailure=The file(s) could not be ingested. +dataset.message.deleteFailure=This dataset draft could not be deleted. +dataset.message.deaccessionFailure=This dataset could not be deaccessioned. +dataset.message.createFailure=The dataset could not be created. +dataset.message.termsFailure=The dataset terms could not be updated. +dataset.message.publicInstall=File Access - Files are stored on a publicly accessible storage server. +dataset.metadata.publicationDate=Publication Date +dataset.metadata.publicationDate.tip=The publication date of a dataset. +dataset.metadata.persistentId=Dataset Persistent ID +dataset.metadata.persistentId.tip=The unique persistent identifier for a Dataset, which can be a Handle or DOI in Dataverse. +dataset.versionDifferences.termsOfUseAccess=Terms of Use and Access +dataset.versionDifferences.termsOfUseAccessChanged=Terms of Use/Access Changed +file.viewDiffDialog.restricted=Restricted +dataset.template.tip=Changing the template will clear any fields you may have entered data into. +dataset.noTemplate.label=None +dataset.noSelectedFiles.header=Select File(s) +dataset.noSelectedFilesForDownload=Please select a file or files to be downloaded. +dataset.noSelectedFilesForRequestAccess=Please select a file or files for access request. +dataset.noSelectedFilesForDelete=Please select a file or files to be deleted. +dataset.noSelectedFilesForMetadataEdit=Please select a file or files to be edited. +dataset.noSelectedFilesForRestrict=Please select unrestricted file(s) to be restricted. +dataset.noSelectedFilesForUnRestrict=Please select restricted file(s) to be unrestricted. +dataset.inValidSelectedFilesForDownload=Restricted Files Selected +dataset.noValidSelectedFilesForDownload=The restricted file(s) selected may not be downloaded because you have not been granted access. +dataset.mixedSelectedFilesForDownload=The restricted file(s) selected may not be downloaded because you have not been granted access. +dataset.downloadUnrestricted=Click Continue to download the files you have access to download. +dataset.requestAccessToRestrictedFiles=You may request access to the restricted file(s) by clicking the Request Access button. +dataset.privateurl.infoMessageAuthor=Unpublished Dataset Private URL - Privately share this dataset before it is published: {0} +dataset.privateurl.infoMessageReviewer=Unpublished Dataset Private URL - This unpublished dataset is being privately shared. You will not be able to access it when logged into your Dataverse account. +dataset.privateurl.header=Unpublished Dataset Private URL +dataset.privateurl.tip=Use a Private URL to allow those without Dataverse accounts to access your unpublished dataset. For more information about the Private URL feature, please refer to the User Guide. +dataset.privateurl.absent=Private URL has not been created. +dataset.privateurl.createPrivateUrl=Create Private URL +dataset.privateurl.disablePrivateUrl=Disable Private URL +dataset.privateurl.disablePrivateUrlConfirm=Yes, Disable Private URL +dataset.privateurl.disableConfirmationText=Are you sure you want to disable the Private URL? If you have shared the Private URL with others they will no longer be able to use it to access your unpublished dataset. +dataset.privateurl.cannotCreate=Private URL can only be used with unpublished versions of datasets. +dataset.privateurl.roleassigeeTitle=Private URL Enabled +dataset.privateurl.createdSuccess=Success! +dataset.privateurl.disabledSuccess=You have successfully disabled the Private URL for this unpublished dataset. +dataset.privateurl.noPermToCreate=To create a Private URL you must have the following permissions: {0}. +file.count={0} {0, choice, 0#Files|1#File|2#Files} +file.count.selected={0} {0, choice, 0#Files Selected|1#File Selected|2#Files Selected} +file.selectToAddBtn=Select Files to Add +file.selectToAdd.tipLimit=File upload limit is {0} bytes per file. +file.selectToAdd.tipMoreInformation=For more information about supported file formats, please refer to the User Guide. +file.selectToAdd.dragdropMsg=Drag and drop files here. +file.createUploadDisabled=Once you have saved your dataset, you can upload your data using the "Upload Files" button on the dataset page. For more information about supported file formats, please refer to the User Guide. +file.fromDropbox=Upload from Dropbox +file.fromDropbox.tip=Files can also be uploaded directly from Dropbox. +file.replace.original=Original File +file.editFiles=Edit Files +file.bulkUpdate=Bulk Update +file.uploadFiles=Upload Files +file.replaceFile=Replace File +file.notFound.tip=There are no files in this dataset. +file.noSelectedFiles.tip=There are no selected files to display. +file.noUploadedFiles.tip=Files you upload will appear here. +file.replace=Replace +file.replaced.warning.header=Edit File +file.replaced.warning.draft.warningMessage=You can not replace a file that has been replaced in a dataset draft. In order to replace it with a different file you must delete the dataset draft. Note that doing so will discard any other changes within this draft. +file.replaced.warning.previous.warningMessage=You can not edit a file that has been replaced in a previous dataset version. In order to edit it you must go to the most recently published version of the file. +file.alreadyDeleted.previous.warningMessage=This file has already been deleted in current version. It may not be edited. +file.delete=Delete +file.metadata=Metadata +file.deleted.success=Files "{0}" will be permanently deleted from this version of this dataset once you click on the Save Changes button. +file.deleted.replacement.success=The replacement file has been deleted. +file.editAccess=Edit Access +file.restrict=Restrict +file.unrestrict=Unrestrict +file.restricted.success=Files "{0}" will be restricted once you click on the Save Changes button. +file.download.header=Download +file.download.subset.header=Download Data Subset +file.preview=Preview: +file.previewMap=Preview Map:o +file.fileName=File Name +file.type.tabularData=Tabular Data +file.originalChecksumType=Original File {0} +file.checksum.exists.tip=A file with this checksum already exists in the dataset. +file.selectedThumbnail=Thumbnail +file.selectedThumbnail.tip=The thumbnail for this file is used as the default thumbnail for the dataset. Click 'Advanced Options' button of another file to select that file. +file.cloudStorageAccess=Cloud Storage Access +file.cloudStorageAccess.tip=The container name for this dataset needed to access files in cloud storage. +file.cloudStorageAccess.help=To directly access this data in the {2} cloud environment, use the container name in the Cloud Storage Access box below. To learn more about the cloud environment, visit the Cloud Storage Access section of the User Guide. +file.copy=Copy +file.compute=Compute +file.rsyncUpload.info=Follow these steps to upload your data. To learn more about the upload process and how to prepare your data, please refer to the User Guide. +file.rsyncUpload.noScriptAvailable=Rsync script not available! +file.rsyncUpload.filesExist=You can not upload additional files to this dataset. +file.rsyncUpload.step1=Make sure your data is stored under a single directory. All files within this directory and its subdirectories will be uploaded to your dataset. +file.rsyncUpload.step2=Download this file upload script: +file.rsyncUpload.step2.downloadScriptButton=Download Script +file.rsyncUpload.step3=Open a terminal window in the same directory you saved the script and run this command: bash ./{0} +file.rsyncUpload.step4=Follow the instructions in the script. It will ask for a full path (beginning with "/") to the directory containing your data. Note: this script will expire after 7 days. +file.rsyncUpload.inProgressMessage.summary=DCM File Upload +file.rsyncUpload.inProgressMessage.details=This dataset is locked until the data files have been transferred and verified. + +file.metaData.dataFile.dataTab.variables=Variables +file.metaData.dataFile.dataTab.observations=Observations +file.metaData.viewOnWorldMap=Explore on WorldMap +file.addDescription=Add file description... +file.tags=Tags +file.editTags=Edit Tags +file.editTagsDialog.tip=Select existing file tags or create new tags to describe your files. Each file can have more than one tag. +file.editTagsDialog.select=File Tags +file.editTagsDialog.selectedTags=Selected Tags +file.editTagsDialog.selectedTags.none=No tags selected +file.editTagsDialog.add=Custom File Tag +file.editTagsDialog.add.tip=Creating a new tag will add it as a tag option for all files in this dataset. +file.editTagsDialog.newName=Add new file tag... +dataset.removeUnusedFileTags.label=Delete Tags +dataset.removeUnusedFileTags.tip=Select to delete Custom File Tags not used by the files in the dataset. +dataset.removeUnusedFileTags.check=Delete tags not being used +file.setThumbnail=Set Thumbnail +file.setThumbnail.header=Set Dataset Thumbnail +file.datasetThumbnail=Dataset Thumbnail +file.datasetThumbnail.tip=Select to use this image as the thumbnail image that is displayed in the search results for this dataset. +file.setThumbnail.confirmation=Are you sure you want to set this image as your dataset thumbnail? There is already an image uploaded to be the thumbnail and this action will remove it. +file.useThisIamge=Use this image as the dataset thumbnail image +file.advancedOptions=Advanced Options +file.advancedIngestOptions=Advanced Ingest Options +file.assignedDataverseImage.success={0} has been saved as the thumbnail for this dataset. +file.assignedTabFileTags.success=The tag(s) were successfully added for {0}. +file.tabularDataTags=Tabular Data Tags +file.tabularDataTags.tip=Select a tag to describe the type(s) of data this is (survey, time series, geospatial, etc). +file.spss-savEncoding=Language Encoding +file.spss-savEncoding.title=Select the language used for encoding this SPSS (sav) Data file. +file.spss-savEncoding.current=Current Selection: +file.spss-porExtraLabels=Variable Labels +file.spss-porExtraLabels.title=Upload an additional text file with extra variable labels. +file.spss-porExtraLabels.selectToAddBtn=Select File to Add +file.ingestFailed.header=Upload Completed with Errors +file.ingestFailed.message=Tabular data ingest failed. +file.explore.twoRavens=TwoRavens +file.map=Map +file.mapData=Map Data +file.mapData.worldMap=WorldMap +file.mapData.unpublished.header=Data Not Published +file.mapData.unpublished.message=In order to map your data with WorldMap, your data must be published. Please publish this dataset, then retry the Map Data feature. +file.downloadBtn.format.all=All File Formats + Information +file.downloadBtn.format.tab=Tab-Delimited +file.downloadBtn.format.original=Original File Format ({0}) +file.downloadBtn.format.rdata=RData Format +file.downloadBtn.format.var=Variable Metadata +file.downloadBtn.format.citation=Data File Citation +file.more.information.link=Link to more file information for +file.requestAccess=Request Access +file.requestAccess.dialog.msg=You need to Log In to request access to this file. +file.requestAccess.dialog.msg.signup=You need to Sign Up or Log In to request access to this file. +file.accessRequested=Access Requested +file.restrictions=File Restrictions +file.restrictions.description=Limit access to published files by marking them as restricted. Provide users Terms of Access and allow them to request access. +file.restrictions.worldmap.warning=Please note, once your file access changes are published your map on WorldMap will be deleted and the Explore on WorldMap feature will be removed. +file.ingestInProgress=Ingest in progress... +file.dataFilesTab.metadata.header=Metadata +file.dataFilesTab.metadata.addBtn=Add + Edit Metadata +file.dataFilesTab.terms.header=Terms +file.dataFilesTab.terms.editTermsBtn=Edit Terms Requirements +file.dataFilesTab.terms.list.termsOfUse.header=Terms of Use +file.dataFilesTab.terms.list.termsOfUse.waiver=Waiver +file.dataFilesTab.terms.list.termsOfUse.waiver.title=The waiver informs data downloaders how they can use this dataset. +file.dataFilesTab.terms.list.termsOfUse.waiver.txt=CC0 - "Public Domain Dedication" +file.dataFilesTab.terms.list.termsOfUse.waiver.description=Datasets will default to a CC0 public domain dedication . CC0 facilitates reuse and extensibility of research data. Our Community Norms as well as good scientific practices expect that proper credit is given via citation. If you are unable to give datasets a CC0 waiver you may enter custom Terms of Use for datasets. +file.dataFilesTab.terms.list.termsOfUse.no.waiver.txt=No waiver has been selected for this dataset. +file.dataFilesTab.terms.list.termsOfUse.waiver.txt.description=Our Community Norms as well as good scientific practices expect that proper credit is given via citation. Please use the data citation above, generated by the Dataverse. +file.dataFilesTab.terms.list.termsOfUse.waiver.select.CCO=Yes, apply CC0 - "Public Domain Dedication" +file.dataFilesTab.terms.list.termsOfUse.waiver.select.notCCO=No, do not apply CC0 - "Public Domain Dedication" +file.dataFilesTab.terms.list.termsOfUse.waiver.select.tip=This is what end users will see displayed on this dataset +file.dataFilesTab.terms.list.termsOfUse.termsOfUse=Terms of Use +file.dataFilesTab.terms.list.termsOfUse.termsOfUse.title=Outlines how this data can be used once downloaded. +file.dataFilesTab.terms.list.termsOfUse.termsOfUse.description=If you are unable to use CC0 for datasets you are able to set custom terms of use. Here is an example of a Data Usage Agreement for datasets that have de-identified human subject data. +file.dataFilesTab.terms.list.termsOfUse.addInfo=Additional Information +file.dataFilesTab.terms.list.termsOfUse.addInfo.declaration=Confidentiality Declaration +file.dataFilesTab.terms.list.termsOfUse.addInfo.declaration.title=Indicates whether signing of a confidentiality declaration is needed to access a resource. +file.dataFilesTab.terms.list.termsOfUse.addInfo.permissions=Special Permissions +file.dataFilesTab.terms.list.termsOfUse.addInfo.permissions.title=Determine if any special permissions are required to access a resource (e.g., if form is a needed and where to access the form). +file.dataFilesTab.terms.list.termsOfUse.addInfo.restrictions=Restrictions +file.dataFilesTab.terms.list.termsOfUse.addInfo.restrictions.title=Any restrictions on access to or use of the collection, such as privacy certification or distribution restrictions, should be indicated here. These can be restrictions applied by the author, producer, or disseminator of the data collection. If the data are restricted to only a certain class of user, specify which type. +file.dataFilesTab.terms.list.termsOfUse.addInfo.citationRequirements=Citation Requirements +file.dataFilesTab.terms.list.termsOfUse.addInfo.citationRequirements.title=Include special/explicit citation requirements for data to be cited properly in articles or other publications that are based on analysis of the data. For standard data citation requirements refer to our Community Norms. +file.dataFilesTab.terms.list.termsOfUse.addInfo.depositorRequirements=Depositor Requirements +file.dataFilesTab.terms.list.termsOfUse.addInfo.depositorRequirements.title=Information regarding user responsibility for informing Dataset Depositors, Authors or Curators of their use of data through providing citations to the published work or providing copies of the manuscripts. +file.dataFilesTab.terms.list.termsOfUse.addInfo.conditions=Conditions +file.dataFilesTab.terms.list.termsOfUse.addInfo.conditions.title=Any additional information that will assist the user in understanding the access and use conditions of the Dataset. +file.dataFilesTab.terms.list.termsOfUse.addInfo.disclaimer=Disclaimer +file.dataFilesTab.terms.list.termsOfUse.addInfo.disclaimer.title=Information regarding responsibility for uses of the Dataset. +file.dataFilesTab.terms.list.termsOfAccess.header=Restricted Files + Terms of Access +file.dataFilesTab.terms.list.termsOfAccess.restrictedFiles=Restricted Files +file.dataFilesTab.terms.list.termsOfAccess.restrictedFiles.title=The number of restricted files in this dataset. +file.dataFilesTab.terms.list.termsOfAccess.restrictedFiles.txt=There {0, choice, 0#are|1#is|2#are} {0} restricted {0, choice, 0#files|1#file|2#files} in this dataset. +file.dataFilesTab.terms.list.termsOfAccess.termsOfsAccess=Terms of Access +file.dataFilesTab.terms.list.termsOfAccess.termsOfsAccess.title=Information on how and if users can gain access to the restricted files in this dataset. +file.dataFilesTab.terms.list.termsOfAccess.requestAccess=Request Access +file.dataFilesTab.terms.list.termsOfAccess.requestAccess.title=If checked, users can request access to the restricted files in this dataset. +file.dataFilesTab.terms.list.termsOfAccess.requestAccess.request=Users may request access to files. +file.dataFilesTab.terms.list.termsOfAccess.requestAccess.notRequest=Users may not request access to files. +file.dataFilesTab.terms.list.termsOfAccess.requestAccess.enableBtn=Enable access request +file.dataFilesTab.terms.list.termsOfAccess.addInfo.dataAccessPlace=Data Access Place +file.dataFilesTab.terms.list.termsOfAccess.addInfo.dataAccessPlace.title=If the data is not only in Dataverse, list the location(s) where the data are currently stored. +file.dataFilesTab.terms.list.termsOfAccess.addInfo.originalArchive=Original Archive +file.dataFilesTab.terms.list.termsOfAccess.addInfo.originalArchive.title=Archive from which the data was obtained. +file.dataFilesTab.terms.list.termsOfAccess.addInfo.availabilityStatus=Availability Status +file.dataFilesTab.terms.list.termsOfAccess.addInfo.availabilityStatus.title=Statement of Dataset availability. A depositor may need to indicate that a Dataset is unavailable because it is embargoed for a period of time, because it has been superseded, because a new edition is imminent, etc. +file.dataFilesTab.terms.list.termsOfAccess.addInfo.contactForAccess=Contact for Access +file.dataFilesTab.terms.list.termsOfAccess.addInfo.contactForAccess.title=If different from the Dataset Contact, this is the Contact person or organization (include email or full address, and telephone number if available) that controls access to a collection. +file.dataFilesTab.terms.list.termsOfAccess.addInfo.sizeOfCollection=Size of Collection +file.dataFilesTab.terms.list.termsOfAccess.addInfo.sizeOfCollection.tip=Summary of the number of physical files that exist in a Dataset, recording the number of files that contain data and noting whether the collection contains machine readable documentation and/or other supplementary files and information, such as code, data dictionaries, data definition statements, or data collection instruments. +file.dataFilesTab.terms.list.termsOfAccess.addInfo.studyCompletion=Study Completion +file.dataFilesTab.terms.list.termsOfAccess.addInfo.studyCompletion.title=Relationship of the data collected to the amount of data coded and stored in the Dataset. Information as to why certain items of collected information were not included in the dataset or a specific data file should be provided. +file.dataFilesTab.terms.list.guestbook=Guestbook +file.dataFilesTab.terms.list.guestbook.title=User information (i.e., name, email, institution, and position) will be collected when files are downloaded. +file.dataFilesTab.terms.list.guestbook.noSelected.tip=No guestbook is assigned to this dataset, you will not be prompted to provide any information on file download. +file.dataFilesTab.terms.list.guestbook.noSelected.admin.tip=There are no guestbooks available in {0} to assign to this dataset. +file.dataFilesTab.terms.list.guestbook.inUse.tip=The following guestbook will prompt a user to provide additional information when downloading a file. +file.dataFilesTab.terms.list.guestbook.viewBtn=Preview Guestbook +file.dataFilesTab.terms.list.guestbook.select.tip=Select a guestbook to have a user provide additional information when downloading a file. +file.dataFilesTab.terms.list.guestbook.noAvailable.tip=There are no guestbooks enabled in {0}. To create a guestbook, return to {0}, click the "Edit" button and select the "Dataset Guestbooks" option. +file.dataFilesTab.terms.list.guestbook.clearBtn=Clear Selection + +file.dataFilesTab.dataAccess=Data Access +file.dataFilesTab.dataAccess.info=This data file can be accessed through a terminal window, using the commands below. For more information about downloading and verifying data, see our User Guide. +file.dataFilesTab.dataAccess.info.draft=Data files can not be accessed until the dataset draft has been published. For more information about downloading and verifying data, see our User Guide. +file.dataFilesTab.dataAccess.local.label=Local Access +file.dataFilesTab.dataAccess.download.label=Download Access +file.dataFilesTab.dataAccess.verify.label=Verify Data +file.dataFilesTab.dataAccess.local.tooltip=If this data is locally available to you, this is its file path. +file.dataFilesTab.dataAccess.download.tooltip=Download this data from your preferred mirror by running this command. +file.dataFilesTab.dataAccess.verify.tooltip=This command runs a checksum to verify the integrity of the data you have downloaded. + +file.dataFilesTab.versions=Versions +file.dataFilesTab.versions.headers.dataset=Dataset +file.dataFilesTab.versions.headers.summary=Summary +file.dataFilesTab.versions.headers.contributors=Contributors +file.dataFilesTab.versions.headers.published=Published +file.dataFilesTab.versions.viewDiffBtn=View Differences +file.dataFilesTab.versions.citationMetadata=Citation Metadata: +file.dataFilesTab.versions.added=Added +file.dataFilesTab.versions.removed=Removed +file.dataFilesTab.versions.changed=Changed +file.dataFilesTab.versions.replaced=Replaced +file.dataFilesTab.versions.original=Original +file.dataFilesTab.versions.replacment=Replacement +file.dataFilesTab.versions.additionalCitationMetadata=Additional Citation Metadata: +file.dataFilesTab.versions.description.draft=This is a draft version. +file.dataFilesTab.versions.description.deaccessioned=Due to the previous version being deaccessioned, there are no difference notes available for this published version. +file.dataFilesTab.versions.description.firstPublished=This is the first published version. +file.dataFilesTab.versions.description.deaccessionedReason=Deaccessioned Reason: +file.dataFilesTab.versions.description.beAccessedAt=The dataset can now be accessed at: +file.dataFilesTab.versions.viewDetails.btn=View Details +file.dataFilesTab.versions.widget.viewMoreInfo=To view more information about the versions of this dataset, and to edit it if this is your dataset, please visit the full version of this dataset at the {2}. +file.deleteDialog.tip=Are you sure you want to delete this dataset? You cannot undelete this dataset. +file.deleteDialog.header=Delete Dataset +file.deleteDraftDialog.tip=Are you sure you want to delete this draft version? You cannot undelete this draft. +file.deleteDraftDialog.header=Delete Draft Version +file.deleteFileDialog.tip=The file(s) will be deleted after you click on the Save Changes button on the bottom of this page. +file.deleteFileDialog.immediate=The file will be deleted after you click on the Delete button. +file.deleteFileDialog.multiple.immediate=The file(s) will be deleted after you click on the Delete button. +file.deleteFileDialog.header=Delete Files +file.deleteFileDialog.failed.tip=Files will not be removed from previously published versions of the dataset. +file.deaccessionDialog.tip=Once you deaccession this dataset it will no longer be viewable by the public. +file.deaccessionDialog.version=Version +file.deaccessionDialog.reason.question1=Which version(s) do you want to deaccession? +file.deaccessionDialog.reason.question2=What is the reason for deaccession? +file.deaccessionDialog.reason.selectItem.identifiable=There is identifiable data in one or more files +file.deaccessionDialog.reason.selectItem.beRetracted=The research article has been retracted +file.deaccessionDialog.reason.selectItem.beTransferred=The dataset has been transferred to another repository +file.deaccessionDialog.reason.selectItem.IRB=IRB request +file.deaccessionDialog.reason.selectItem.legalIssue=Legal issue or Data Usage Agreement +file.deaccessionDialog.reason.selectItem.notValid=Not a valid dataset +file.deaccessionDialog.reason.selectItem.other=Other (Please type reason in space provided below) +file.deaccessionDialog.enterInfo=Please enter additional information about the reason for deaccession. +file.deaccessionDialog.leaveURL=If applicable, please leave a URL where this dataset can be accessed after deaccessioning. +file.deaccessionDialog.leaveURL.watermark=Optional dataset site, http://... +file.deaccessionDialog.deaccession.tip=Are you sure you want to deaccession? The selected version(s) will no longer be viewable by the public. +file.deaccessionDialog.deaccessionDataset.tip=Are you sure you want to deaccession this dataset? It will no longer be viewable by the public. +file.deaccessionDialog.dialog.selectVersion.tip=Please select version(s) for deaccessioning. +file.deaccessionDialog.dialog.selectVersion.header=Please Select Version(s) +file.deaccessionDialog.dialog.reason.tip=Please select reason for deaccessioning. +file.deaccessionDialog.dialog.reason.header=Please Select Reason +file.deaccessionDialog.dialog.url.tip=Please enter valid forwarding URL. +file.deaccessionDialog.dialog.url.header=Invalid URL +file.deaccessionDialog.dialog.textForReason.tip=Please enter text for reason for deaccessioning. +file.deaccessionDialog.dialog.textForReason.header=Enter additional information +file.deaccessionDialog.dialog.limitChar.tip=Text for reason for deaccessioning may be no longer than 1000 characters. +file.deaccessionDialog.dialog.limitChar.header=Limit 1000 characters +file.viewDiffDialog.header=Version Differences Details +file.viewDiffDialog.dialog.warning=Please select two versions to view the differences. +file.viewDiffDialog.version=Version +file.viewDiffDialog.lastUpdated=Last Updated +file.viewDiffDialog.fileID=File ID +file.viewDiffDialog.fileName=Name +file.viewDiffDialog.fileType=Type +file.viewDiffDialog.fileSize=Size +file.viewDiffDialog.category=Tag(s) +file.viewDiffDialog.description=Description +file.viewDiffDialog.fileReplaced=File Replaced +file.viewDiffDialog.filesReplaced=File(s) Replaced +file.viewDiffDialog.files.header=Files +file.viewDiffDialog.msg.draftFound= This is the "DRAFT" version. +file.viewDiffDialog.msg.draftNotFound=The "DRAFT" version was not found. +file.viewDiffDialog.msg.versionFound= This is version "{0}". +file.viewDiffDialog.msg.versionNotFound=Version "{0}" was not found. +file.metadataTip=Metadata Tip: After adding the dataset, click the Edit Dataset button to add more metadata. +file.addBtn=Save Dataset +file.dataset.allFiles=All Files from this Dataset +file.downloadDialog.header=Download File +file.downloadDialog.tip=Please confirm and/or complete the information needed below in order to download files in this dataset. +file.downloadDialog.termsTip=I accept these Terms of Use. +file.requestAccessTermsDialog.tip=Please confirm and/or complete the information needed below in order to request access to files in this dataset. +file.search.placeholder=Search this dataset... +file.results.btn.sort=Sort +file.results.btn.sort.option.nameAZ=Name (A-Z) +file.results.btn.sort.option.nameZA=Name (Z-A) +file.results.btn.sort.option.newest=Newest +file.results.btn.sort.option.oldest=Oldest +file.results.btn.sort.option.size=Size +file.results.btn.sort.option.type=Type +file.compute.fileRestricted=File Restricted +file.compute.fileAccessDenied=You cannot compute on this restricted file because you do not have permission to access it. +file.configure.Button=Configure +file.configure.launchMessage.details=Please refresh this page once you have finished configuring your +dataset.compute.datasetCompute=Dataset Compute Not Supported +dataset.compute.datasetAccessDenied=You cannot compute on this dataset because you do not have permission to access all of the restricted files. +dataset.compute.datasetComputeDisabled=You cannot compute on this dataset because this functionality is not enabled yet. Please click on a file to access computing features. + +# dataset-widgets.xhtml +dataset.widgets.title=Dataset Thumbnail + Widgets +dataset.widgets.notPublished.why.header=Why Use Widgets? +dataset.widgets.notPublished.why.reason1=Increases the web visibility of your data by allowing you to embed your dataverse and datasets into your personal or project website. +dataset.widgets.notPublished.why.reason2=Allows others to browse your dataverse and datasets without leaving your personal or project website. +dataset.widgets.notPublished.how.header=How To Use Widgets +dataset.widgets.notPublished.how.tip1=To use widgets, your dataverse and datasets need to be published. +dataset.widgets.notPublished.how.tip2=After publishing, code will be available on this page for you to copy and add to your personal or project website. +dataset.widgets.notPublished.how.tip3=Do you have an OpenScholar website? If so, learn more about adding the Dataverse widgets to your website here. +dataset.widgets.notPublished.getStarted=To get started, publish your dataset. To learn more about Widgets, visit the Widgets section of the User Guide. +dataset.widgets.editAdvanced=Edit Advanced Options +dataset.widgets.editAdvanced.tip=Advanced Options – Additional options for configuring your widget on your personal or project website. +dataset.widgets.tip=Copy and paste this code into the HTML on your site. To learn more about Widgets, visit the Widgets section of the User Guide. +dataset.widgets.citation.txt=Dataset Citation +dataset.widgets.citation.tip=Add a citation for your dataset to your personal or project website. +dataset.widgets.datasetFull.txt=Dataset +dataset.widgets.datasetFull.tip=Add a way for visitors on your website to be able to view your datasets, download files, etc. +dataset.widgets.advanced.popup.header=Widget Advanced Options +dataset.widgets.advanced.prompt=Forward persistent URL's in your dataset citation to your personal website. +dataset.widgets.advanced.url.label=Personal Website URL +dataset.widgets.advanced.url.watermark=http://www.example.com/page-name +dataset.widgets.advanced.invalid.message=Please enter a valid URL +dataset.widgets.advanced.success.message=Successfully updated your Personal Website URL +dataset.widgets.advanced.failure.message=The dataverse Personal Website URL has not been updated. +dataset.thumbnailsAndWidget.breadcrumbs.title=Thumbnail + Widgets +dataset.thumbnailsAndWidget.thumbnails.title=Thumbnail +dataset.thumbnailsAndWidget.widgets.title=Widgets +dataset.thumbnailsAndWidget.thumbnailImage=Thumbnail Image +dataset.thumbnailsAndWidget.thumbnailImage.title=The logo or image file you wish to display as the thumbnail of this dataset. +dataset.thumbnailsAndWidget.thumbnailImage.tip=Supported image types are JPG, TIF, or PNG and should be no larger than {0} KB. The maximum display size for an image file as a dataset thumbnail is 48 pixels wide by 48 pixels high. +dataset.thumbnailsAndWidget.thumbnailImage.default=Default Icon +dataset.thumbnailsAndWidget.thumbnailImage.selectAvailable=Select Available File +dataset.thumbnailsAndWidget.thumbnailImage.selectThumbnail=Select Thumbnail +dataset.thumbnailsAndWidget.thumbnailImage.selectAvailable.title=Select a thumbnail from those available as image data files that belong to your dataset. +dataset.thumbnailsAndWidget.thumbnailImage.uploadNew=Upload New File +dataset.thumbnailsAndWidget.thumbnailImage.uploadNew.title=Upload an image file as your dataset thumbnail, which will be stored separately from the data files that belong to your dataset. +dataset.thumbnailsAndWidget.thumbnailImage.upload=Upload Image +dataset.thumbnailsAndWidget.thumbnailImage.upload.invalidMsg=The image could not be uploaded. Please try again with a JPG, TIF, or PNG file. +dataset.thumbnailsAndWidget.success=Dataset thumbnail updated. +dataset.thumbnailsAndWidget.removeThumbnail=Remove Thumbnail +dataset.thumbnailsAndWidget.removeThumbnail.tip=You are only removing this image as the dataset thumbnail, not removing it from your dataset. To do that, go to the Edit Files page. +dataset.thumbnailsAndWidget.availableThumbnails=Available Thumbnails +dataset.thumbnailsAndWidget.availableThumbnails.tip=Select a thumbnail from the data files that belong to your dataset. Continue back to the Thumbnail + Widgets page to save your changes. + +# file.xhtml +file.share.fileShare=Share File +file.share.fileShare.tip=Share this file on your favorite social media networks. +file.share.fileShare.shareText=View this file. +file.title.label=Title +file.citation.label=Citation +file.cite.downloadBtn=Cite Data File +file.general.metadata.label=General Metadata +file.description.label=Description +file.tags.label=Tags +file.lastupdated.label=Last Updated +file.DatasetVersion=Version +file.metadataTab.fileMetadata.header=File Metadata +file.metadataTab.fileMetadata.persistentid.label=Data File Persistent ID +file.metadataTab.fileMetadata.downloadUrl.label=Download URL +file.metadataTab.fileMetadata.unf.label=UNF +file.metadataTab.fileMetadata.size.label=Size +file.metadataTab.fileMetadata.type.label=Type +file.metadataTab.fileMetadata.description.label=Description +file.metadataTab.fileMetadata.publicationDate.label=Publication Date +file.metadataTab.fileMetadata.depositDate.label=Deposit Date +file.metadataTab.fitsMetadata.header=FITS Metadata +file.metadataTab.provenance.header=File Provenance +file.metadataTab.provenance.body=File Provenance information coming in a later release... +file.versionDifferences.noChanges=No changes associated with this version +file.versionDifferences.fileNotInVersion=File not included in this version +file.versionDifferences.actionChanged=Changed +file.versionDifferences.actionAdded=Added +file.versionDifferences.actionRemoved=Removed +file.versionDifferences.actionReplaced=Replaced +file.versionDifferences.fileMetadataGroupTitle=File Metadata +file.versionDifferences.fileTagsGroupTitle=File Tags +file.versionDifferences.descriptionDetailTitle=Description +file.versionDifferences.fileNameDetailTitle=File Name +file.versionDifferences.fileAccessTitle=File Access +file.versionDifferences.fileRestricted=Restricted +file.versionDifferences.fileUnrestricted=Unrestricted +file.versionDifferences.fileGroupTitle=File + +# File Ingest +ingest.csv.invalidHeader=Invalid header row. One of the cells is empty. +ingest.csv.lineMismatch=Mismatch between line counts in first and final passes!, {0} found on first pass, but {1} found on second. +ingest.csv.recordMismatch=Reading mismatch, line {0} of the Data file: {1} delimited values expected, {2} found. +ingest.csv.nullStream=Stream can't be null. + +# editdatafile.xhtml + +# editFilesFragment.xhtml +file.edit.error.file_exceeds_limit=This file exceeds the size limit. +# File metadata error +file.metadata.datafiletag.not_tabular=You cannot add Tabular Data Tags to a non-tabular file. + +# File Edit Success +file.message.editSuccess=This file has been updated. +file.message.deleteSuccess=The file has been deleted. +file.message.replaceSuccess=This file has been replaced. + +# File Add/Replace operation messages +file.addreplace.file_size_ok=File size is in range. +file.addreplace.error.file_exceeds_limit=This file size ({0}) exceeds the size limit of {1} bytes. +file.addreplace.error.dataset_is_null=The dataset cannot be null. +file.addreplace.error.dataset_id_is_null=The dataset ID cannot be null. +find.dataset.error.dataset_id_is_null=When accessing a dataset based on Persistent ID, a {0} query parameter must be present. +find.dataset.error.dataset.not.found.persistentId=Dataset with Persistent ID {0} not found. +find.dataset.error.dataset.not.found.id=Dataset with ID {0} not found. +find.dataset.error.dataset.not.found.bad.id=Bad dataset ID number: {0}. +file.addreplace.error.dataset_id_not_found=There was no dataset found for ID: +file.addreplace.error.no_edit_dataset_permission=You do not have permission to edit this dataset. +file.addreplace.error.filename_undetermined=The file name cannot be determined. +file.addreplace.error.file_content_type_undetermined=The file content type cannot be determined. +file.addreplace.error.file_upload_failed=The file upload failed. +file.addreplace.error.duplicate_file=This file already exists in the dataset. +file.addreplace.error.existing_file_to_replace_id_is_null=The ID of the existing file to replace must be provided. +file.addreplace.error.existing_file_to_replace_not_found_by_id=Replacement file not found. There was no file found for ID: {0} +file.addreplace.error.existing_file_to_replace_is_null=The file to replace cannot be null. +file.addreplace.error.existing_file_to_replace_not_in_dataset=The file to replace does not belong to this dataset. +file.addreplace.error.existing_file_not_in_latest_published_version=You cannot replace a file that is not in the most recently published dataset. (The file is unpublished or was deleted from a previous version.) +file.addreplace.content_type.header=File Type Different +file.addreplace.error.replace.new_file_has_different_content_type=The original file ({0}) and replacement file ({1}) are different file types. +file.addreplace.error.replace.new_file_same_as_replacement=You cannot replace a file with the exact same file. +file.addreplace.error.unpublished_file_cannot_be_replaced=You cannot replace an unpublished file. Please delete it instead of replacing it. +file.addreplace.error.ingest_create_file_err=There was an error when trying to add the new file. +file.addreplace.error.initial_file_list_empty=An error occurred and the new file was not added. +file.addreplace.error.initial_file_list_more_than_one=You cannot replace a single file with multiple files. The file you uploaded was ingested into multiple files. +file.addreplace.error.final_file_list_empty=There are no files to add. (This error should not happen if steps called in sequence.) +file.addreplace.error.only_replace_operation=This should only be called for file replace operations! +file.addreplace.error.failed_to_remove_old_file_from_dataset=Unable to remove old file from new DatasetVersion. +file.addreplace.error.add.add_file_error=Failed to add file to dataset. +file.addreplace.error.phase2_called_early_no_new_files=There was an error saving the dataset - no new files found. +file.addreplace.success.add=File successfully added! +file.addreplace.success.replace=File successfully replaced! +file.addreplace.error.auth=The API key is invalid. +file.addreplace.error.invalid_datafile_tag=Not a valid Tabular Data Tag: + +# 500.xhtml +error.500.page.title=500 Internal Server Error +error.500.message=Internal Server Error - An unexpected error was encountered, no more information is available. + +# 404.xhtml +error.404.page.title=404 Not Found +error.404.message=Page Not Found - The page you are looking for was not found. + +# 403.xhtml +error.403.page.title=403 Not Authorized +error.403.message=Not Authorized - You are not authorized to view this page. + +# general error - support message +error.support.message= If you believe this is an error, please contact {0} for assistance. + +# citation-frame.xhtml +citationFrame.banner.message=If the site below does not load, the archived data can be found in the {0} {1}. {2} +citationFrame.banner.message.here=here +citationFrame.banner.closeIcon=Close this message, go to dataset +citationFrame.banner.countdownMessage= This message will close in +citationFrame.banner.countdownMessage.seconds=seconds + +# Friendly AuthenticationProvider names +authenticationProvider.name.builtin=Dataverse +authenticationProvider.name.null=(provider is unknown) +authenticationProvider.name.github=GitHub +authenticationProvider.name.google=Google +authenticationProvider.name.orcid=ORCiD +authenticationProvider.name.orcid-sandbox=ORCiD Sandbox +authenticationProvider.name.shib=Shibboleth +ingest.csv.invalidHeader=Invalid header row. One of the cells is empty. +ingest.csv.lineMismatch=Mismatch between line counts in first and final passes!, {0} found on first pass, but {1} found on second. +ingest.csv.recordMismatch=Reading mismatch, line {0} of the Data file: {1} delimited values expected, {2} found. +ingest.csv.nullStream=Stream can't be null. diff --git a/dataversedock/lang.properties/Bundle_es_ES.properties b/dataversedock/lang.properties/Bundle_es_ES.properties new file mode 100644 index 0000000..09921b0 --- /dev/null +++ b/dataversedock/lang.properties/Bundle_es_ES.properties @@ -0,0 +1,1386 @@ +# New labels for Spanish translation +#EMailValidator +emailvalidator.notValid={0} is not a valid email address. + +#dataset.xhtml +Author=Autor +Contact=Contacto +Description=Descripci\u00f3n +Keyword=Palabra Clave +Notes=Notas +Depositor=Depositante +Name=Nombre +Affiliation=Afiliaci\u00f3n +Identifier\u0020Scheme=Identifier Scheme +Identifier=Identificador +E-mail=E-mail +Text=Texto +Date=Fecha +Term=T\u00e9rmino +Vocabulary=Vocabulario +Vocabulary\u0020URL=URL del Vocabulario + +Topic\u0020Classification=Clasificaci\u00f3n por Temas +Related\u0020Publication=Publicaci\u00f3n Relacionada +Subtitle=Subt\u00edtulo +Alternative\u0020Title=T\u00edtulo Alternativo +Other\u0020ID=Otro ID +Agency=Agencia +Citation=Cita +ID\u0020Type=Tipo de ID +ID\u0020Number=N\u00famero de ID +URL=URL +Producer=Productor +Abbreviation=Abreviaci\u00f3n +Production\u0020Place=Lugar de Production +Type=Tipo +Grant\u0020Information=Informaci\u00f3n de Subvenci\u00f3n +Grant\u0020Agency=Agencia Subvencionadora +Grant\u0020Number=N\u00famero de Subvenci\u00f3n +Distributor=Distribuidor +Logo\u0020URL=URL del Logo +Time\u0020Period\u0020Covered=Cobertura Temporal +Start=Inicio +End=Fin +Date\u0020of\u0020Collection=Fecha de Recolecci\u00f3n +Series=Series +Information=Informaci\u00f3n +Software=Software +Version=Versi\u00f3n +Related\u0020Material=Material Relacionado +Related\u0020Datasets=Datasets Relacionados +Other\u0020References=Otras Referencias +Data\u0020Sources=Fuentes de Datos +Origin\u0020of\u0020Sources=Origen de las Fuentes +Characteristic\u0020of\u0020Sources\u0020Noted=Caracter\u00edsticas de las Fuentes +Documentation\u0020and\u0020Access\u0020to\u0020Sources=Documentaci\u00f3n y Acceso a las Fuentes +Geographic\u0020Coverage=Cobertura Geogr\u00e1fica +Country\u0020/\u0020Nation=Pa\u00eds / Naci\u00f3n +State\u0020/\u0020Province=Estado / Provincia +City=Ciudad +Other=Otro +Geographic\u0020Unit=Unidad Geogr\u00e1fica +Geographic\u0020Bounding\u0020Box=Cuadro de Delimitaci\u00f3n Geogr\u00e1fica +West\u0020Longitude=Longitud Oeste +East\u0020Longitude=Longitud Este +North\u0020Latitude=Latitud Norte +South\u0020Latitude=Latitud Sur +Unit\u0020of\u0020Analysis=Unidad de An\u00e1lisis +Universe=Universo +Time\u0020Method=M\u00e9todo Temporal +Data\u0020Collector=Recolector de Datos +Collector\u0020Training=Recolecci\u00f3n de Pruebas +Frequency=Frequencia +Sampling\u0020Procedure=Procedimiento de Muestreo +Target\u0020Sample\u0020Size=Tama\u00f1o del objetivo del Muestreo +Actual=Real +Formula=F\u00f3rmula +Major\u0020Deviations\u0020for\u0020Sample\u0020Design=Major Deviations for Sample Design +Collection\u0020Mode=M\u00e9todo de Recolecci\u00f3n +Type\u0020of\u0020Research\u0020Instrument=Tipo de Instrumento de Investigaci\u00f3n +Characteristics\u0020of\u0020Data\u0020Collection\u0020Situation=Caracter\u00edsticas de la situaci\u00f3n de la Colecci\u00f3n de Datos +Actions\u0020to\u0020Minimize\u0020Losses=Acciones para Minimizar P\u00e9rdidas +Control\u0020Operations=Operaciones de Control +Weighting=Ponderaci\u00f3n +Cleaning\u0020Operations=Operaciones de Limpieza +Study\u0020Level\u0020Error\u0020Notes=Nivel de Estudio de las Anotaciones de Errores +Response\u0020Rate=Tasa de Respuesta +Estimates\u0020of\u0020Sampling\u0020Error=Estimaci\u00f3n del Error de Muestreo +Other\u0020Forms\u0020of\u0020Data\u0020Appraisal=Otras Formas de Estimaci\u00f3n de Datos +Facility=Servicio +Instrument=Instrumento +Object=Objeto +Spatial\u0020Resolution=Resoluci\u00f3n Espacial +Spectral\u0020Resolution=Resoluci\u00f3n Espectral +Time\u0020Resolution=Resoluci\u00f3n Temporal +Bandpass=Paso Banda +Central\u0020Wavelength\u0020(m)=Longitud de Onda Central (m) +Wavelength\u0020Range=Rango de Longitud de Onda +Minimum\u0020(m)=M\u00ednimo (m) +Maximum\u0020(m)=M\u00e1ximo (m) +Dataset\u0020Date\u0020Range=Rango de Fechas del Dataset +Sky\u0020Coverage=Cobertura del Cielo +Depth\u0020Coverage=Cobertura Profunda +Object\u0020Density=Densidad del Objeto +Object\u0020Count=N\u00famero de Objetos +Fraction\u0020of\u0020Sky=Porci\u00f3n de Cielo +Polarization=Polarizaci\u00f3n +RedshiftType=Tipo de Corrimiento al Rojo +Redshift\u0020Resolution=Resoluci\u00f3n del Corrimiento al Rojo +Redshift\u0020Value=Valor del Corrimiento al Rojo +Minimum=M\u00ednimo +Maximum=M\u00e1ximo +Design\u0020Type=Tipo de Dise\u00f1o +Factor\u0020Type=Tipo de Factor +Organism=Organismo +Other\u0020Organism=Otro Organismo +Measurement\u0020Type=Tipo de Medidas +Other\u0020Measurement\u0020Type=Otros tipos de Medidas +Technology\u0020Type=Tipo de Tecnolog\u00eda +Technology\u0020Platform=Plataforma Tecnol\u00f3gica +Cell\u0020Type=Tipo se Celda +Journal=Revista +Volume=Volumen +Issue=N\u00famero +Type\u0020of\u0020Article=Tipo de Art\u00edculo +Identifier\u0020Scheme=Esquema del Identificador + +Citation\u0020Metadata=Metadatos de Cita +Geospatial\u0020Metadata=Metadatos Geospatiales +Social\u0020Science\u0020and\u0020Humanities\u0020Metadata=Metadatos de Ciencias Sociales y Humanidades +Astronomy\u0020and\u0020Astrophysics\u0020Metadata=Metadatos de Astronom\u00eda y Astrof\u00edsica +Life\u0020Sciences\u0020Metadata=Metadatos de Ciencias de la Vida +Journal\u0020Metadata=Metadatos de Revista + +file.dataFilesTab.fileRestrictions=Restricciones del Fichero +datset.replicationDataFor=Datos de r\u00e9plica para: + +#Permission.java +permission.addDataverseDataverse=A\u00f1adir un dataverse dentro de otro dataverse +permission.deleteDataset=Borrar la versi\u00f3n preliminar del dataset +permission.deleteDataverse=Borrar un dataverse sin publicar +permission.publishDataset=Publicar un dataset +permission.publishDataverse=Publicar un dataverse +permission.managePermissionsDataset=Administrar los permisos de un dataset +permission.managePermissionsDataverse=Administrar los permisos de un dataverse +permission.editDataset=Editar los metadatos de un dataset +permission.editDataverse=Editar los metadatos, facetas, personalizaci\u00f3n y plantillas de un dataverse +permission.downloadFile=Descargar un fichero +permission.viewUnpublishedDataset=Ver un dataset sin publicar y sus ficheros +permission.viewUnpublishedDataverse=Ver un dataverse sin publicar +permission.addDatasetDataverse=A\u00f1adir un dataset a un dataverse + +#ManagePermissionsPage and ManageFilePermissionsPage +permission.roleWasRemoved=El rol {0} fue eliminado para {1}. +permission.roleNotAbleToBeRemoved=La asignaci\u00f3n del rol no pudo ser eliminada. +permission.permissionsMissing=Perdidos los permisos de {0}. +permission.fileAccessGranted=El acceso a ficheros para {0} fue concedido. +permission.fileAccessRejected=El acceso a ficheros para {0} fue rechacado. +permission.roleAssignedToFor=rol {0} asignado a {1} para {2}. +permission.roleNotAbleToBeAssigned=El rol no pudo ser asignado. +permission.defaultPermissionDataverseUpdated=Se han actualizado los permisos por defecto para este dataverse. +permission.CannotAssigntDefaultPermissions=No se pueden asignar los permisos por defecto. +permission.errorAssigningRole=Error asignando rol: {0} +permission.updated=actualizado +permission.created=creado +permission.roleWas=El rol ha sido {0}. Para asignarle a un usuario y/o grupo, pincha en, el bot\u00f3n "Asignar Roles a Usuarios/Grupos" en la secci\u00f3n "Usuarios/Grupos" de esta p\u00e1gina. +permission.roleNotSaved=No se pudo guardar el rol. +permission.anyoneWithAccount=Cualquiera con cuenta en Dataverse + +#datasetFieldForEditFragment.xhtml +dataset.AddReplication=A\u00f1ada "Datos de R\u00e9plica" al T\u00edtulo + +#result_message_only.html +result.status=Estado +result.role=Rol +result.to=De +result.of=A +result.result=Resultado +result.results=Resultados + +#loginpage.xhtml bundle [fc.credential.title]. Username and Password translation +Username=Usuario +Password=Contrase\u00f1a + +#search-include-fragment.xhtml bundle[facetCategory.friendlyName] +Dataverse\u0020Category=Categor\u00eda del Dataverse +Publication\u0020Date=Fecha de Publicaci\u00f3n +Author\u0020Name=Autor +Subject=Materia +Deposit\u0020Date=Fecha de Dep\u00f3sito +File\u0020Type=Tipo de Fichero +File\u0020Tag=Etitqueta de Fichero +Access=Acceso +Keyword\u0020Term=Palabra Clave +Author\u0020Affiliation=Afiliaci\u00f3n +Language=Idioma +Kind\u0020of\u0020Data=Tipo de Datos +Publication\u0020Status=Estado de la Publicaci\u00f3n + +#dataverseuser.xhtml bundle [DataverseUserPage.editMode == 'CREATE' ? 'Password' : 'New Password'] +user.password=Contrase\u00f1a +user.newPassword=Nueva Contrase\u00f1a + +#mydata_fragment.xhtml +mydataFragment.infoAccess=Aqu\u00ed puedes ver todos los dataverses, datasets y ficheros a los que tienes acceso. Puedes filtrarlos por estado de publicaci\u00f3n y rol del usuario. +mydataFragment.moreResults=Ver m\u00e1s resultados +mydataFragment.publicacionStatus=Estado de la publicaci\u00f3n +mydataFragment.roles=Roles +mydataFragment.resultsByUserName=Resultados por nombre de usuario +mydataFragment.search=Buscar en mis datos... + +Published=Publicado +Unpublished=Sin Publicar +Draft=Versi\u00f3n preliminar +In\u0020Review=En Revisi\u00f3n +Deaccessioned=Retirado + +Admin=Administrador +File\u0020Downloader=Descarga de Ficheros +Dataverse\u0020+\u0020Dataset\u0020Creator=Creador de Dataverses + Datasets +Dataverse\u0020Creator=Creador de Dataverses +Dataset\u0020Creator=Creador de Datasets +Contributor=Colaborador +Curator=Conservador/Revisor +Member=Miembro + +#webapp/search/advanced.xhtml #{bundle[item.displayName] +Title=T\u00edtulo +Description\u0020Text=Texto de Descripci\u00f3n +Topic\u0020Classification\u0020Term=T\u00e9rmino en su Clasificaci\u00f3n por Temas +Related\u0020Publication\u0020Citation=Cita de la Publicaci\u00f3n Relacionada +Related\u0020Publication\u0020ID\u0020Type=Tipo de ID de la Publicaci\u00f3n Relacionada +Related\u0020Publication\u0020ID\u0020Number=N\u00famero de ID de la Publicaci\u00f3n Relacionada +Producer\u0020Name=Nombre del Producto +Production\u0020Date=Fecha de Producci\u00f3n +Contributor\u0020Type=Tipo de Colaborador +Contributor\u0020Name=Nombre del Colaborador +Distributor\u0020Name=Nombre del Distribuidor +Distribution\u0020Date=Frecha de Distribuci\u00f3n +Time\u0020Period\u0020Covered\u0020Start=Inicio del Tiempo de Cobertura +Time\u0020Period\u0020Covered\u0020End=Fin del Tiempo de Cobertura +Series\u0020Name=Nombre de las Series + +#SystemConfig +system.app.terms=No hay Condiciones de Uso en esta instalaci\u00f3n de Dataverse. +system.api.terms=No hay Condiciones de Uso para el API en esta instalaci\u00f3n de Dataverse. + +#messages.xhtml +iqbs.message.validationErrorStrong={0}Error de Validaci\u00f3n{1} - Hab\u00eda campos obligatorios sin rellenar o hubo un error de validaci\u00f3n. Por favor, haga scroll para ver los detalles. +iqbs.message.success=\u00a1\u00c9xito! +iqbs.message.info=Info +iqbs.message.error=Error + +#iqbs/messages.xhtml +Please=Por favor, +contact\u0020support=contacte con su administrador + +#LoginPage.java +login.UserName=Por favor, introduca su Usuario +login.Password=Por favor, introduca su Contrase\u00f1a + +# PasswordResetPage.java +passwordReset.initiated=Se ha iniciado el reinicio de contrase\u00f1a + +# BuiltinUserPage +userPage.informationUpdated=Se ha actualizado la informaci\u00f3n de su cuenta con \u00e9xito. +userPage.passwordChanged=Ha cambiado con \u00e9xito su contrase\u00f1a. +userPage.usernameIncorrect=Nombre de usuario o e-mail err\u00f3neo +userPage.passwordStillNull=la contrase\u00f1a est\u00e1 todav\u00eda vac\u00eda. +userPage.passwordNotComplex=La contrase\u00f1a es demasiado simple. La contrase\u00f1a tiene que tener por lo menos una letra, un n\u00famero y {0} caracteres de longitud. +userPage.newPasswordNotBlank=la nueva contrase\u00f1a no est\u00e1 vac\u00eda +userPage.newPasswordBlankRetype=La nueva contrase\u00f1a est\u00e1 vac\u00eda, escr\u00edbala de nuevo. +userPage.newPasswordBlank=la nueva contrase\u00f1a est\u00e1 vac\u00eda +userPage.passwordIncorrect=La contrase\u00f1a es incorrecta. +userPage.passwordNotBlank=la contrase\u00f1a actual no est\u00e1 vac\u00eda +userPage.passwordBlankRetype=La contrase\u00f1a est\u00e1 vac\u00eda, escr\u00edbala de nuevo. +userPage.passwordError=Error con la contrase\u00f1a +userPage.passwordBlank=la contrase\u00f1a actual est\u00e1 vac\u00eda + +# DataRetrieverAPI +noResultsFound=Lo siento, no se encontraron resultados. + +# End of new labels for Spanish translation + + +dataverse=Dataverse +newDataverse=Nuevo Dataverse +hostDataverse=Servidor Dataverse +passwd=Contrase\u00f1a +dataset=Dataset +newDataset=Nuevo Dataset +files=Ficheros +file=Fichero +restricted=Restringido +restrictedaccess=Acceso Restringido +find=Buscar +search=Buscar +unpublished=Sin publicar +cancel=Cancelar +saveChanges=Guardar Cambios +acceptTerms=Aceptar +submit=Enviar +signup=Crear Cuenta +login=Iniciar Sesi\u00f3n +email=E-mail +account=Cuenta +requiredField=Campo obligatorio +new=Nuevo/a +identifier=Identificador +description=Descripci\u00f3n +subject=Materia +close=Cerrar +continue=Continuar +name=Nombre +institution=Instituci\u00f3n +position=Posici\u00f3n +affiliation=Afiliaci\u00f3n +createDataverse=Crear Dataverse +remove=Borrar +done=Hecho +editor=Editor +manager=Administrador +curator=Conservador/Revisor +explore=Explorar +download=Descargar +deaccession=Retirada +linked=Enlazado +harvested=Recolectado +add=A\u00f1adir +delete=Borrar +yes=S\u00ed +no=No +previous=Anterior +next=Siguiente +more=M\u00e1s... +less=Menos... +select=Seleccionar... +selectedFiles=Ficheros Seleccionados +htmlAllowedTitle=Etiquetas HTML Permitidas +htmlAllowedMsg=Este campo s\u00f3lo admite algunas etiquetas HTML. +htmlAllowedTags=, ,
            ,
            , , ,
            ,
            ,
            , ,
            ,

            -

            , , , ,
          • ,
              ,

              ,

              , , , , , , 
                + +# dataverse_header.xhtml + +header.status.header=Estado +header.search.title=Buscar en todos los dataverses... +header.about=Acerca de +header.support=Soporte +header.guides=Gu\u00edas +header.guides.user=Gu\u00eda de Usuario +header.guides.developers=Gu\u00eda del Desarrollador +header.guides.installation=Gu\u00eda de Instalaci\u00f3n +header.guides.api=Gu\u00eda del API +header.signUp=Crear Cuenta +header.logOut=Salir +header.accountInfo=Informaci\u00f3n de la Cuenta +header.user.selectTab.dataRelated=Mis Datos +header.user.selectTab.notifications=Notificaciones +header.user.selectTab.groupsAndRoles=Grupos + Roles + +# dataverse_template.xhtml + +head.meta.description=El proyecto Dataverse es una aplicaci\u00f3n de software de c\u00f3digo abierto para compartir, citar y archivar datos. Dataverse proporciona a los administradores de datos una infraestructura robusta para administrar, alojar y archivar datos, a la vez que ofrece a los investigadores una forma sencilla de compartir y obtener reconocimiento por sus datos. +body.skip=Ir al contenido principal +footer.codeAvailable=C\u00f3digo disponible en +footer.dataverseOnGitHub=Dataverse en GitHub +footer.dataverseProjectOn=Proyecto Dataverse en +footer.Twitter=Twitter +footer.dataScienceIQSS=Data Science at The Institute for Quantitative Social Science +footer.copyright=Copyright © 2015, The President & Fellows of Harvard College +footer.privacyPolicy=Pol\u00edtica de Privacidad +footer.poweredby=Powered by +# Should we translate the project name in main page? +footer.dataverseProject=The Dataverse Project + +# contactFormFragment.xhtml + +contact.header=Contactar con el Soporte de Dataverse +contact.dataverse.header=Contactar con el Administrador de Dataverse +contact.dataset.header=Contactar con el Administrador de Dataset +contact.to=Para +contact.support=Soporte de Dataverse +contact.from=De +contact.from.required=El e-mail es obligatorio. +contact.from.invalid=El e-mail no es v\u00e1lido. +contact.subject=Asunto +contact.subject.required=El asunto es obligatorio. +contact.subject.selectTab.top=Seleccionar asunto... +contact.subject.selectTab.support=Pregunta de Soporte +contact.subject.selectTab.dataIssue=Problema con los datos +contact.msg=Mensaje +contact.msg.required=El texto del mensaje es obligatorio. +contact.send=Enviar Mensaje +contact.question=Por favor, rellena este campo para demostrar que no eres un robot. +contact.sum.required=El valor es obligatorio. +contact.sum.invalid=Suma err\u00f3nea. Por favor, prueba otra vez. +contact.sum.converterMessage=Por favor, introduce un n\u00famero. +contact.contact=Contacto + + +# dataverseuser.xhtml + +account.info=Informaci\u00f3n de la Cuenta +account.edit=Editar Cuenta +apiTaken=Token del API +user.toEditDetail=Su sesi\u00f3n se ha validado a trav\u00e9s de su instituci\u00f3n. Si necesita actualizar alguna informaci\u00f3n, por favor, contacte con su instituci\u00f3n. +user.lostPasswdTip=Si pierde u olvida su contrase\u00f1a, por favor, introduzca su identificador de usuario o e-mail m\u00e1s abajo y pulse en Enviar. Le enviaremos un e-mail con su nueva contrase\u00f1a. +user.dataRelatedToMe=Mis Datos +wasCreatedIn=, fue creado el +wasCreatedTo=, fue a\u00f1adido a +wasPublished=, fue publicado el +wasSubmittedForReview=, fue enviado a revisi\u00f3n para su publicaci\u00f3n el +wasReturnedByReviewer=, fue devuelto por el conservador/revisor de +toReview=\!No olvide publicarlo o devolverlo al colaborador\! +worldMap.added=el dataset ten\u00eda a\u00f1adida una capa de datos de WorldMap. +notification.welcome=Bienvenido a {0} {1}! \u00bfNecesita ayuda? Revise la Gu\u00eda de Usuario o pregunte en el Grupo de Google Comunidad Dataverse (en ingl\u00e9s). +notification.requestFileAccess=Acceso a ficheros solicitado para el dataset: {1}. +notification.grantFileAccess=Acceso permitido para los ficheros del dataset: {1}. +notification.rejectFileAccess=Acceso denegado para los ficheros del dataset: {1}. +notification.createDataverse={1} creado en {3}. Para aprender m\u00e1s sobre lo que puede hacer con su dataverse, puede ver la Gu\u00eda de Usuario. +notification.createDataset={1} creado en {3}. Para saber m\u00e1s sobre lo que puede hacer con un dataset puede ver la Gu\u00eda de Usuario. +notification.wasSubmittedForReview={1}, fue enviado a revisi\u00f3n para ser publicado en {3}. \!No olvide publicarlo o devolverlo a los colaboradores\! + +# How translate curator?. We have choose conservador/revisor, but this translation should be revised +notification.wasReturnedByReviewer={1}, fue devuelto por el conservador/revisor de {3}. +notification.wasPublished={1}, fue publicado en {3}. +notification.worldMap.added=El dataset {1} tiene una capa de datos WorldMap a\u00f1adida. +notification.generic.objectDeleted=El dataverse, dataset o fichero relacionado con esta notificaci\u00f3n se ha eliminado. +notification.access.granted.dataverse=Tienes asignado el rol {0} para {2}. +notification.access.granted.dataset=Tienes asignado el rol {0} para {2}. +notification.access.granted.datafile=Tienes asignado el rol {0} para acceso a ficheros en {2}. +notification.access.revoked.dataverse=Se le ha dado de baja de un rol en {2}. +notification.access.revoked.dataset=Se le ha dado de baja de un rol en {2}. +notification.access.revoked.datafile=Se le ha dado de baja de un rol en {2}. +removeNotification=Borrar Notificaci\u00f3n + + +groupAndRoles.manageTips=Aqu\u00ed puede acceder y administrar los grupos a los que pertenece y los roles que tiene asignados. +user.signup.tip=\u00bfPor qu\u00e9 tener una cuenta en Dataverse? Para crear su propio dataverse y personalizarlo, a\u00f1adir datasets o solicitar acceso a ficheros con acceso restringido. +user.institutionLogIn.tip=\u00bfQuiere crear tu cuenta usando su instituci\u00f3n? aqu\u00ed. +user.username.illegal.tip=Entre 2 y 60 caracteres, puede usar "a-z", "0-9", "_" para su identificador de usuario. +user.username=Identificador de usuario +user.username.taken=Este identificador no est\u00e1 disponible. +user.noPasswd=Sin Contrase\u00f1a +user.currentPasswd=Contrase\u00f1a Actual + + +user.currentPasswd.tip=Por favor, introduzca la contrase\u00f1a para esta cuenta. +user.passwd.illegal.tip=La contrase\u00f1a ha de tener al menos 6 caracteres e incluir una letra y un n\u00famero. Se pueden usar caracteres especiales. +user.rePasswd=Confirme su contrase\u00f1a + +user.rePasswd.tip=Por favor, escriba de nuevo su contrase\u00f1a. +user.firstName=Nombre +user.firstName.tip=Su nombre o el nombre que le gustar\u00eda usar en esta cuenta. +user.lastName=Apellidos +user.lastName.tip=Los apellidos que le gustar\u00eda usar en esta cuenta. + + + +user.email.tip=Una direcci\u00f3n v\u00e1lida de e-mail para poder contactar con usted. +user.email.taken=Esta direcci\u00f3n de e-mail ya est\u00e1 en uso. +user.affiliation.tip=La organizaci\u00f3n a la que pertenece. +user.position=Puesto +user.position.tip=Su puesto o t\u00edtulo en la organizaci\u00f3n a la que pertenece; por ejemplo personal, profesor, estudiante, etc. +user.acccountterms=Condiciones Generales de Uso +user.acccountterms.tip=Los t\u00e9rminos y condiciones para utilizar la aplicaci\u00f3n y sus servicios. +user.acccountterms.required=Por favor, seleccione la casilla de selecci\u00f3n para indicar que acepta las condiciones generales de uso. +user.acccountterms.iagree=He le\u00eddo y acepto las Condiciones Generales de Uso de Dataverse. +user.createBtn=Crear Cuenta +user.updatePassword.welcome=Bienvenido a Dataverse {0}, {1} +user.updatePassword.warning=Con la actualizaci\u00f3n a Dataverse 4.2, los requisitos de las contrase\u00f1as y las condiciones generales de uso se han actualizados. Al ser la primera vez que usa Dataverse desde la actualizaci\u00f3n, necesita crear una nueva contrase\u00f1a y aceptar las Condiciones Generales de Uso. +user.updatePassword.password=Cree una contrase\u00f1a con un m\u00ednimo de seis caracteres que contenga por lo menos una letra y un n\u00famero. +authenticationProvidersAvailable.tip={0}No hay proveedores de autentificaci\u00f3n activos{1}Si es usted el administrador del sistema, por favor, act\u00edvelos usando el API.{2}Si no lo es, por favor, contacte con el administrador de su instituci\u00f3n. +login.System=Iniciar Sesi\u00f3n +login.forgot.text=\u00bfOlvid\u00f3 su contrase\u00f1a? +login.institution=Inicio de Sesi\u00f3n con su Instituci\u00f3n +login.invaliduserpassword=El nombre de usuario y/o la contrase\u00f1a que ha introducido no es v\u00e1lido. \u00bfNecesita ayuda para acceder a su cuenta? +login.error=Error en la validaci\u00f3n de su usuario y/o contrase\u00f1a. Int\u00e9ntelo de nuevo. Si el problema contin\u00faa, contacte con un administrador. +#shib.xhtml +shib.btn.acceptAndConvert=Aceptar las Condiciones y Convertir la Cuenta +shib.btn.acceptAndCreate=Aceptar las Condiciones y Crear la Cuenta +shib.welcome=Bienvenido/a, + +shib.welcomeExistingUserMessage=El e-mail proporcionado para la autentificaci\u00f3n de {0} coincide con una cuenta ya existente en Dataverse. Si quiere asociar su cuenta en Dataverse con la autentificaci\u00f3n de {0}, por favor, introduzca la contrase\u00f1a de su cuenta en Dataverse, revise las Condiciones Generales de Uso y, pulse el bot\u00f3n de Aceptar las Condiciones y Convertir la Cuenta. +shib.dataverseUsername=Usuario de Dataverse +shib.currentDataversePassword=Contrase\u00f1a Actual de Dataverse +shib.accountInformation=Informaci\u00f3n de la Cuenta +shib.offerToCreateNewAccount=Por favor, acepte las Condiciones Generales de Uso de Dataverse para crear su cuenta. + +shib.passwordRejected=Para convertir su cuenta es necesario indicar la contrase\u00f1a correcta de su cuenta anterior. +#apitoken.xhtml +apitoken.title=Token del API +apitoken.message=Aqu\u00ed tiene su token del API. Revise nuestra {0}Gu\u00eda del API{1} para m\u00e1s informaci\u00f3n. +apitoken.generateBtn=Generar Token +apitoken.regenerateBtn=Regenerar Token + +#MailServiceBean.java + +notification.email.create.dataverse.subject=Dataverse: Se ha creado su dataverse +notification.email.create.dataset.subject=Dataverse: Se ha creado su dataset +notification.email.request.file.access.subject=Dataverse: Se ha solicitado acceso a un fichero con acceso restringido +notification.email.grant.file.access.subject=Dataverse: Se le ha concedido acceso a un fichero con acceso restringido +notification.email.rejected.file.access.subject=Dataverse: Su solicitud de acceso a un fichero con acceso restringido ha sido denegada +notification.email.update.maplayer=Dataverse: Capa de WorldMap a\u00f1adida al dataset +notification.email.submit.dataset.subject=Dataverse: Se ha enviado su dataset para su revisi\u00f3n +notification.email.publish.dataset.subject=Dataverse: Se ha publicado su dataset +notification.email.returned.dataset.subject=Dataverse: Se ha devuelto su dataset +notification.email.create.account.subject=Dataverse: Se ha creado su cuenta +notification.email.assign.role.subject=Dataverse: Ha sido asignado a un rol +notification.email.revoke.role.subject=Dataverse: Su rol ha sido rechazado + +notification.email.greeting=Hola, \n +notification.email.welcome=\u00a1Bienvenido a Dataverse 4.2! Por favor, eche un vistazo general, pruebe todo lo que quiera y contacte con webmaster@consorciomadrono.es si tiene dudas o comentarios. +notification.email.requestFileAccess=Es necesario tener acceso al fichero para el dataset: {0}. Administrar permisos en {1}. +notification.email.grantFileAccess=Acceso permitido a los ficheros en el dataset: {0} (ver en {1}). +notification.email.rejectFileAccess=Acceso denegado para los ficheros pedidos en el dataset: {0} (ver en {1}). +# Bundle file editors, please note that "notification.email.createDataverse" is used in a unit test +notification.email.createDataverse=Su nuevo dataverse llamado {0} (ver en {1} ) fue creado en {2} (ver en {3} ). Para saber m\u00e1s sobre lo que puedes hacer con su dataverse, revise la Gu\u00eda de Usuario en {4}/{5}/user/dataverse-management.html . +# Bundle file editors, please note that "notification.email.createDataset" is used in a unit test +notification.email.createDataset=Su nuevo dataset llamado {0} (ver en {1} ) se ha creado en {2} (ver en {3} ). Para saber m\u00e1s sobre lo que puede hacer con un dataset, revise la Gu\u00eda de Usuario en {4}/{5}/user/dataset-management.html . +notification.email.wasSubmittedForReview={0} (ver en {1}) se ha enviado a revisi\u00f3n para ser publicado en {2} (ver en {3}). \!No olvide publicarlo o devolverlo a los colaboradores\! +notification.email.wasReturnedByReviewer={0} (ver en {1}) fue devuelto por el conservador/revisor de {2} (ver en {3}). +notification.email.wasPublished={0} (ver en {1}) fue publicado en {2} (ver en {3}). +notification.email.worldMap.added={0} (ver en {1}) ten\u00eda una capa de datos de WorldMap a\u00f1adida. +notification.email.closing=\n\nGracias,\nEl Proyecto Dataverse +notification.email.assignRole=Ahora eres {0} para el {1} "{2}" (ver en {3}). +notification.email.revokeRole=Uno de sus roles para el {0} "{1}" se ha eliminado (ver en {2}). + +# passwordreset.xhtml + +pageTitle.passwdReset.pre=Reinicio de Contrase\u00f1a +passwdReset.token=token : +passwdReset.userLookedUp=usuario buscado : +passwdReset.emailSubmitted=e-mail enviado : +passwdReset.details={0} Reinicio de Contrase\u00f1a{1} - Para iniciar el proceso de reinicio de contrase\u00f1a, por favor, indique su direcci\u00f3n de e-mail. +passwdReset.submitRequest=Enviar Petici\u00f3n de Contrase\u00f1a +passwdReset.successSubmit.tip=Si este e-mail est\u00e1 asociada con una cuenta, se le enviar\u00e1 un mensaje con instrucciones adicionales a {0}. +passwdReset.debug=DEBUG +passwdReset.resetUrl=La URL de reinicio es +passwdReset.noEmail.tip=No se envi\u00f3 el e-mail porque no se encontr\u00f3 un usuario con esa direcci\u00f3n de e-mail {0}, pero no hemos avisado para no alertar a posibles usuarios maliciosos que usan este m\u00e9todo para averiguar si una cuenta est\u00e1 asociada con una direcci\u00f3n de e-mail. +passwdReset.illegalLink.tip=El enlace para el reinicio de tu contrase\u00f1a no es v\u00e1lido. Si necesitas reiniciar tu contrase\u00f1a, {0}pulsa aqu\u00ed{1} para solicitar que tu contrase\u00f1a sea reiniciada de nuevo. +passwdReset.newPasswd.details={0} Nueva Contrase\u00f1a{1} \u00e2\u0080\u0093 Por favor, elija una contrase\u00f1a robusta que tenga por lo menos seis caracteres y que contenga al menos una letra y un n\u00famero. +passwdReset.newPasswd=Nueva Contrase\u00f1a +passwdReset.rePasswd=Re-escribir Contrase\u00f1a +passwdReset.resetBtn=Reiniciar Contrase\u00f1a + +# dataverse.xhtml + +dataverse.title=El proyecto, departamento, universidad o profesor propietario de los datos de este dataverse. +dataverse.enterName=Introduzca el nombre... +dataverse.host.title=El dataverse que contiene estos datos. +dataverse.identifier.title=Nombre corto utilizado para la URL de este dataverse. +dataverse.affiliation.title=La organizaci\u00f3n a la que pertenece este dataverse. + +dataverse.category=Categor\u00eda +dataverse.category.title=El tipo con el que mejor se identifica este dataverse. +dataverse.type.selectTab.top=Selecciona uno... +dataverse.type.selectTab.researchers=Investigador +dataverse.type.selectTab.researchProjects=Proyecto de Investigaci\u00f3n +dataverse.type.selectTab.journals=Revista +dataverse.type.selectTab.organizationsAndInsitutions=Organizaci\u00f3n o Instituci\u00f3n +dataverse.type.selectTab.teachingCourses=Curso +dataverse.type.selectTab.uncategorized=Sin Categorizar + +dataverse.description.title=Un resumen que describa el prop\u00f3sito, naturaleza o alcance de este dataverse. +dataverse.email=E-mail +dataverse.email.title=La(s) direcci\u00f3n(es) de e-mail de los contactos para el dataverse. +dataverse.share.dataverseShare=Compartir Dataverse +dataverse.share.dataverseShare.tip=Compartir este dataverse en sus redes sociales favoritas. +dataverse.share.dataverseShare.shareText=Ver este dataverse. + +dataverse.subject.title=Materia(s) cubierta(s) por este dataverse. + +dataverse.metadataElements=Campos de Metadatos +dataverse.metadataElements.tip=Elija un campo de metadatos para usar en las plantillas del dataset y cuando se a\u00f1ada un dataset a este dataverse. +dataverse.metadataElements.from.tip=Usar campos de metadatos desde {0} +dataverse.resetModifications=Reiniciar Modificaciones +dataverse.resetModifications.text=\u00bfEst\u00e1 seguro de que quiere reiniciar los campos de metadatos seleccionados?. Si lo hace, cualquier personalizaci\u00f3n (oculto, obligatorio, opcional) que haya hecho desaparecer\u00e1. +dataverse.field.required=(Obligatorio) +dataverse.field.example1= (Ejemplos: +dataverse.field.example2=) +dataverse.field.set.tip=[+] Ver campos + marcar como ocultos, obligatorios u opcionales +dataverse.field.set.view=[+] Ver Campos +dataverse.field.requiredByDataverse=Obligatorio para Dataverse +dataverse.facetPickList.text=Navegar/Buscar Facetas +dataverse.facetPickList.tip=Elija los campos de metadatos que se usar\u00e1n como facetas para navegar por datasets y dataverses en este dataverse. +dataverse.facetPickList.facetsFromHost.text=Use navegar/buscar facetas desde {0} +dataverse.facetPickList.metadataBlockList.all=Todos los Campos de Metadatos + +dataverse.edit=Editar +dataverse.option.generalInfo=Informaci\u00f3n General +dataverse.option.themeAndWidgets=Tema + Widgets +dataverse.option.featuredDataverse=Dataverses Destacados +dataverse.option.permissions=Permisos +dataverse.option.dataverseGroups=Grupos +dataverse.option.datasetTemplates=Plantillas de Dataset +dataverse.option.datasetGuestbooks=Libros de Visitas del Dataset +dataverse.option.deleteDataverse=Eliminar Dataverse +dataverse.publish.btn=Publicar +dataverse.publish.header=Publicar Dataverse +dataverse.nopublished=Dataverses sin Publicar +dataverse.nopublished.tip=Para usar esta funcionalidad ha de tener publicado al menos un dataverse. +dataverse.contact=Enviar E-Mail al Contacto del Dataverse +dataset.link=Enlace al Dataset +dataverse.link=Enlace al Dataverse +dataverse.link.btn.tip=Enlace a su Dataverse +dataverse.link.yourDataverses=Su {0, elige, 1#Dataverse|2#Dataverses} +dataverse.link.save=Guardar el Dataverse Enlazado +dataset.link.save=Guardar el Dataset Enlazado +dataverse.link.dataverse.choose=Elija a cual de sus dataverses quiere enlazar este dataverse. +dataverse.link.dataset.choose=Elija a cual de tus dataverses quiere enlazar este dataset. +dataverse.link.no.choice=Tiene un dataverse al que puede a\u00f1adir dataverses y datasets enlazados. +dataverse.link.no.linkable=Para enlazar un dataverse o un dataset, necesita tener su propio dataverse. Pulse el bot\u00f3n A\u00f1adir Datos de la p\u00e1gina de inicio para comenzar. +dataverse.link.no.linkable.remaining=Todos sus dataverses seleccionables ya est\u00e1n enlazados. +dataverse.savedsearch.link=Buscar Enlaces +dataverse.savedsearch.searchquery=Buscar +dataverse.savedsearch.filterQueries=Facetas +dataverse.savedsearch.save=Guardar la B\u00fasqueda Enlazada +dataverse.savedsearch.dataverse.choose=Seleccione a cual de tus dataverses le gustar\u00eda enlazar esta b\u00fasqueda. +dataverse.savedsearch.no.choice=Tiene un dataverse al cual puede a\u00f1adir una b\u00fasqueda guardada. +dataverse.linked.success= {0} se ha enlazado con \u00e9xito a {3}. +dataverse.linked.success.wait= {0} se ha enlazado con \u00e9xito a {3}. Por favor, espere para que su contenido sea visible. +dataverse.linked.internalerror={0} se ha enlazado con \u00e9xito a {3} pero el contenido no aparecer\u00e1 hasta que no se corrija un error interno. +dataverse.page.pre=Anterior +dataverse.page.next=Siguiente +dataverse.byCategory=Dataverses por Categor\u00eda +dataverse.displayFeatured=Mostrar los dataverses seleccionados abajo en la p\u00e1gina de inicio de este dataverse. +dataverse.selectToFeature=Seleccione los dataverses para presentar en la p\u00e1gina de inicio de este dataverse. +dataverse.publish.tip=\u00bfEst\u00e1 seguro de que quiere publicar su dataverse? Una vez hecho esto, deber\u00e1 permanecer publicado. +dataverse.publish.failed.tip=Este dataverse no puede publicarse porque el dataverse al que pertenece no ha sido publicado. +dataverse.publish.failed=No se puede publicar el dataverse. +dataverse.publish.success=Su dataverse es ahora p\u00fablico. +dataverse.publish.failure=No fue posible publicar este dataverse. +dataverse.delete.tip=\u00bfEst\u00e1 seguro de que quiere eliminar este dataverse? No podr\u00e1 recuperarlo. +dataverse.delete=Eliminar Dataverse +dataverse.delete.success=Su dataverse se ha eliminado. +dataverse.delete.failure=Este dataverse no puede ser eliminado. +# Bundle file editors, please note that "dataverse.create.success" is used in a unit test because it's so fancy with two parameters +dataverse.create.success=\u00a1Ha creado su dataverse con \u00e9xito!. Para saber que m\u00e1s cosas puede hacer con su dataverse, revise la Gu\u00eda de Usuario. +dataverse.create.failure=No se pudo crear este dataverse. +dataverse.create.authenticatedUsersOnly=S\u00f3lo pueden crear dataverses los usuarios autorizados. +dataverse.update.success=\u00a1Ha actualizado su dataverse con \u00e9xito! +dataverse.update.failure=Este dataverse no pudo ser actualizado + +# rolesAndPermissionsFragment.xhtml + +# advanced.xhtml +advanced.search.header.dataverses=Dataverses +advanced.search.dataverses.name.tip=El proyecto, departamento, universidad o profesor del que este Dataverse tiene datos. +advanced.search.dataverses.affiliation.tip=La organizaci\u00f3n con la que est\u00e1 afiliada este Dataverse. +advanced.search.dataverses.description.tip=Un resumen describiendo el prop\u00f3sito, naturaleza o \u00e1mbito de este Dataverse. +advanced.search.dataverses.subject.tip=Materias que son relevantes para este Dataverse en su dominio espec\u00edfico. +advanced.search.header.datasets=Datasets +advanced.search.header.files=Ficheros +advanced.search.files.name.tip=El nombre que identifica el fichero. +advanced.search.files.description.tip=Un resumen describiendo el fichero y sus variables. +advanced.search.files.fileType=Tipo de fichero +advanced.search.files.fileType.tip=Extensi\u00f3n de un fichero, p.e. CSV, zip, Stata, R, PDF, JPEG, etc. +advanced.search.files.variableName=Nombre de Variable +advanced.search.files.variableName.tip=El nombre de la columna de la variable en el marco de los datos. +advanced.search.files.variableLabel=Etiqueta de la variable +advanced.search.files.variableLabel.tip=Una descripci\u00f3n corta de la variable. + +# search-include-fragment.xhtml + +dataverse.search.advancedSearch=B\u00fasqueda Avanzada +dataverse.search.input.watermark=Buscar en este dataverse... +account.search.input.watermark=Buscar en estos datos... +dataverse.search.btn.find=Buscar + +dataverse.results.btn.addData=A\u00f1adir datos +dataverse.results.btn.addData.newDataverse=Nuevo Dataverse +dataverse.results.btn.addData.newDataset=Nuevo Dataset +dataverse.results.dialog.addDataGuest.header=A\u00f1adir datos +dataverse.results.dialog.addDataGuest.msg=Necesita Identificarse para crear un dataverse o a\u00f1adir un dataset. +dataverse.results.dialog.addDataGuest.msg.signup=Necesita Crear una Cuenta o Identificarse para crear un dataverse o a\u00f1adir un dataset. +dataverse.results.types.dataverses=Dataverses +dataverse.results.types.datasets=Datasets +dataverse.results.types.files=Ficheros +# Bundle file editors, please note that "dataverse.results.empty.zero" is used in a unit test +dataverse.results.empty.zero=No hay dataverses, datasets o ficheros que concuerden con su b\u00fasqueda. Por favor, busque de nuevo usando otras palabras o t\u00e9rminos m\u00e1s generales. Tambi\u00e9n puede probar los consejos de la gu\u00eda de b\u00fasqueda. +# Bundle file editors, please note that "dataverse.results.empty.hidden" is used in a unit test +dataverse.results.empty.hidden= No hay resultados de b\u00fasqueda de acuerdo a los criterios de su b\u00fasqueda. Puede probar los consejos de la gu\u00eda de b\u00fasqueda. +dataverse.results.empty.browse.guest.zero=Este dataverse no contiene dataverses, datasets, ni ficheros. Por favor identif\u00edquese para ver si puede a\u00f1adirlos. +dataverse.results.empty.browse.guest.hidden=Este dataverse no contiene dataverses. Por favor log in para ver si puede a\u00f1adirlos. +dataverse.results.empty.browse.loggedin.noperms.zero= Este dataverse no contiene dataverses, datasets, ni ficheros. Puede usar el bot\u00f3n Contactar con el Administrador del Dataverse para pedir m\u00e1s informaci\u00f3n o solicitar acceso a este dataverse. +dataverse.results.empty.browse.loggedin.noperms.hidden=Este dataverse no contiene dataverses. +dataverse.results.empty.browse.loggedin.perms.zero=Este dataverse no contiene dataverses, datasets, ni ficheros. Puede a\u00f1adirlos usando el bot\u00f3n A\u00f1adir Datos de esta p\u00e1gina. +account.results.empty.browse.loggedin.perms.zero=No hay dataverses, datasets, ni ficheros asociados a su cuenta. Puede a\u00f1adir un dataverse o un dataset usando el bot\u00f3n A\u00f1adir Datos m\u00e1s arriba. Tiene m\u00e1s informaci\u00f3n de como a\u00f1adir datos en la Gu\u00eda de Usuario. +dataverse.results.empty.browse.loggedin.perms.hidden=Este dataverse no contiene dataverses. Puede a\u00f1adirlos usando el bot\u00f3n A\u00f1adir Datos en esta p\u00e1gina. +dataverse.results.empty.link.technicalDetails=M\u00e1s detalles t\u00e9cnicos + +dataverse.results.count.toofresults={0} a {1} de {2} {2, choice, 0#Resultados|1#Resultado|2#Resultados} +dataverse.results.paginator.current=(Actual) +dataverse.results.btn.sort=Ordenar +dataverse.results.btn.sort.option.nameAZ=Nombre (A-Z) +dataverse.results.btn.sort.option.nameZA=Nombre (Z-A) +dataverse.results.btn.sort.option.newest=M\u00e1s nuevos +dataverse.results.btn.sort.option.oldest=M\u00e1s antiguos +dataverse.results.btn.sort.option.relevance=Relevancia + +dataverse.results.cards.foundInMetadata=Encontrados en Campos de Metadatos: +dataverse.results.cards.files.tabularData=Datos Tabulares + +dataverse.results.solrIsDown=Nota: Debido a un error interno, la b\u00fasqueda y la navegaci\u00f3n no est\u00e1n disponibles. + +# themeAndWidgetsFragment.xhtml + +dataverse.theme.title=Tema +dataverse.theme.inheritCustomization.title=Selecciona aqu\u00ed para usar el tema actual. +dataverse.theme.inheritCustomization.label=Heredar la personalizaci\u00f3n +dataverse.theme.inheritCustomization.checkbox=Heredar la personalizaci\u00f3n desde {0} +dataverse.theme.logo=Logo +dataverse.theme.logo.tip=No pueden subirse ficheros mayores de 500 kb. El \u00e1rea m\u00e1xima de visualizaci\u00f3n para los ficheros es de 940 pixels de ancho por 120 pixels de alto. +dataverse.theme.logo.format=Formato del Logo +dataverse.theme.logo.format.selectTab.square=Cuadrado +dataverse.theme.logo.format.selectTab.rectangle=Rect\u00e1ngulo +dataverse.theme.logo.alignment=Alineaci\u00f3n del Logo +dataverse.theme.logo.alignment.selectTab.left=Izquierda +dataverse.theme.logo.alignment.selectTab.center=Centro +dataverse.theme.logo.alignment.selectTab.right=Derecha +dataverse.theme.logo.backColor=Color de Fondo del Logo +dataverse.theme.logo.image.upload=Subir Imagen +dataverse.theme.tagline=Eslogan +dataverse.theme.website=Sitio Web +dataverse.theme.linkColor=Color del Enlace +dataverse.theme.txtColor=Color del Texto +dataverse.theme.backColor=Color de Fondo +dataverse.theme.success=\u00a1Has actualizado el tema de este dataverse con \u00e9xito! +dataverse.theme.failure=El tema del dataverse no se ha actualizado. +dataverse.theme.logo.image=Imagen del Logo +dataverse.theme.logo.image.title=El logo o fichero de imagen que quiere poner en la cabecera de este dataverse. +dataverse.theme.logo.image.uploadNewFile=Subir Fichero Nuevo +dataverse.theme.logo.image.invalidMsg=No se puede subir la imagen. Por favor, int\u00e9ntelo otra vez con un fichero jpeg, tiff, o png. +dataverse.theme.logo.image.uploadImgFile=Subir Fichero de Imagen +dataverse.theme.logo.format.title=La forma del logo o fichero de imagen que va a subir a este dataverse. +dataverse.theme.logo.format.selectTab.square2=Cuadrado +dataverse.theme.logo.format.selectTab.rectangle2=Rect\u00e1ngulo +dataverse.theme.logo.alignment.title=D\u00f3nde deber\u00eda mostrarse el logo o imagen en la cabecera. +dataverse.theme.logo.alignment.selectTab.left2=Izquierda +dataverse.theme.logo.alignment.selectTab.center2=Centro +dataverse.theme.logo.alignment.selectTab.right2=Derecha +dataverse.theme.logo.backColor.title=Elija un color para mostrar detr\u00e1s del logo de este dataverse. +dataverse.theme.headerColor=Colores de Cabecera +dataverse.theme.headerColor.tip=Los colores que seleccione para personalizar el estilo de cabecera de este dataverse. +dataverse.theme.backColor.title=Color para el \u00e1rea de cabecera que contiene la imagen, el eslogan, la URL y el texto. +dataverse.theme.linkColor.title=Color en que debe mostrarse el enlace. +dataverse.theme.txtColor.title=Color para el texto del eslogan y el nombre de este dataverse. +dataverse.theme.tagline.title=Frase o texto que describe este dataverse. +dataverse.theme.tagline.tip=Indique un eslogan de 140 caracteres como mucho. +dataverse.theme.website.title=URL de su web personal, instituci\u00f3n, o cualquier web relacionada con este dataverse. +dataverse.theme.website.tip=El portal web ser\u00e1 enlazado en el eslogan. Para que se muestre una web, debe elegir un eslogan. +dataverse.theme.website.watermark=Su sitio personal, http://... +dataverse.theme.website.invalidMsg=URL Inv\u00e1lida. + +dataverse.widgets.title=Widgets +dataverse.widgets.tip=Copie y pegue este c\u00f3digo en el HTML de su web. +dataverse.widgets.searchBox.txt=Cuadro de B\u00fasqueda de Dataverse. +dataverse.widgets.searchBox.tip=Proporcione un m\u00e9todo en el que los visitantes de su sitio web puedan buscar en Dataverse. +dataverse.widgets.dataverseListing.txt=Listado del Dataverse +dataverse.widgets.dataverseListing.tip=Proporcione un m\u00e9todo en el que los visitantes de su sitio web puedan ver sus dataverses y datasets, ordenar, o moverse por ellos. + +# permissions-manage.xhtml + +dataverse.permissions.title=Permisos +dataverse.permissions.dataset.title=Permisos del Dataset +dataverse.permissions.access.accessBtn=Editar Acceso +dataverse.permissions.usersOrGroups=Usuarios/Grupos +dataverse.permissions.usersOrGroups.assignBtn=Asignar Roles a Usuarios/Grupos +dataverse.permissions.usersOrGroups.createGroupBtn=Crear Grupo +dataverse.permissions.usersOrGroups.description=Aqu\u00ed tiene todos los usuarios y grupos que han accedido a su dataverse. +dataverse.permissions.usersOrGroups.tabHeader.userOrGroup=Nombre (Afiliaci\u00f3n) del Usuario/Grupo +dataverse.permissions.usersOrGroups.tabHeader.id=ID +dataverse.permissions.usersOrGroups.tabHeader.role=Rol +dataverse.permissions.usersOrGroups.tabHeader.action=Acci\u00f3n +dataverse.permissions.usersOrGroups.assignedAt=Rol Asignado a {0} +dataverse.permissions.usersOrGroups.removeBtn=Eliminar el Rol Asignado +dataverse.permissions.usersOrGroups.removeBtn.confirmation=\u00bfEst\u00e1 seguro de que quiere eliminar esta asignaci\u00f3n de rol? + +dataverse.permissions.roles=Roles +dataverse.permissions.roles.add=A\u00f1adir un Nuevo Rol +dataverse.permissions.roles.description=Estos son todos los roles asignados a su dataverse a los que puede asignar usuarios y grupos. +dataverse.permissions.roles.edit=Editar Rol +dataverse.permissions.roles.copy=Copiar Rol + +# permissions-manage-files.xhtml + +dataverse.permissionsFiles.title=Permisos del Fichero + +dataverse.permissionsFiles.usersOrGroups=Usuarios/Grupos +dataverse.permissionsFiles.usersOrGroups.assignBtn=Dar Acceso a Usuarios/Grupos +dataverse.permissionsFiles.usersOrGroups.description=Estos son todos los usuarios y grupos que tienen acceso a ficheros en este dataset. +dataverse.permissionsFiles.usersOrGroups.tabHeader.userOrGroup=Nombre (Afiliaci\u00f3n) del Usuario/Grupo +dataverse.permissionsFiles.usersOrGroups.tabHeader.id=ID +dataverse.permissionsFiles.usersOrGroups.tabHeader.email=E-mail +dataverse.permissionsFiles.usersOrGroups.tabHeader.files=Ficheros +dataverse.permissionsFiles.usersOrGroups.tabHeader.access=Acceso +dataverse.permissionsFiles.usersOrGroups.file=Fichero +dataverse.permissionsFiles.usersOrGroups.files=Ficheros +dataverse.permissionsFiles.usersOrGroups.invalidMsg=No hay usuarios ni grupos con acceso a los ficheros con acceso restringido de este dataset. + +dataverse.permissionsFiles.files=Ficheros +dataverse.permissionsFiles.files.description=Estos son todos los ficheros de acceso restringido en este dataset. +dataverse.permissionsFiles.files.tabHeader.fileName=Nombre del Fichero +dataverse.permissionsFiles.files.tabHeader.roleAssignees=Usuarios/Grupos +dataverse.permissionsFiles.files.tabHeader.access=Acceso +dataverse.permissionsFiles.files.tabHeader.publishedRestrictedState=Publicado +dataverse.permissionsFiles.files.tabHeader.draftRestrictedState=Preliminar +dataverse.permissionsFiles.files.deleted=Eliminado +dataverse.permissionsFiles.files.public=P\u00fablico +dataverse.permissionsFiles.files.restricted=Restringido +dataverse.permissionsFiles.files.roleAssignee=Usuario/Grupo +dataverse.permissionsFiles.files.roleAssignees=Usuarios/Grupos +dataverse.permissionsFiles.files.assignBtn=Asignar Accesos +dataverse.permissionsFiles.files.invalidMsg=No hay ficheros con acceso restringido en este dataset. + +dataverse.permissionsFiles.viewRemoveDialog.header=Acceso al Fichero +dataverse.permissionsFiles.viewRemoveDialog.removeBtn=Eliminar Acceso +dataverse.permissionsFiles.viewRemoveDialog.removeBtn.confirmation=\u00bfEst\u00e1 seguro de querer eliminar el acceso a este fichero?. Una vez eliminado, ni el usuario ni el grupo podr\u00e1n descargar este fichero. + +dataverse.permissionsFiles.assignDialog.header=Permitir Acceso al Fichero +dataverse.permissionsFiles.assignDialog.description=Permitir Acceso al Fichero para usuarios y grupos. +dataverse.permissionsFiles.assignDialog.userOrGroup=Usuario/Grupo +dataverse.permissionsFiles.assignDialog.userOrGroup.title=Usuario/Grupo +dataverse.permissionsFiles.assignDialog.userOrGroup.enterName=Introduzca el nombre del Usuario/Grupo +dataverse.permissionsFiles.assignDialog.userOrGroup.invalidMsg=No se encuentran coincidencias. +dataverse.permissionsFiles.assignDialog.userOrGroup.requiredMsg=Por favor, seleccione al menos un usuario o grupo. +dataverse.permissionsFiles.assignDialog.file=Fichero +dataverse.permissionsFiles.assignDialog.grantBtn=Permitir +dataverse.permissionsFiles.assignDialog.rejectBtn=Rechazar + +# permissions-configure.xhtml +dataverse.permissions.accessDialog.header=Editar Acceso +dataverse.permissions.description=Esta es la configuraci\u00f3n de acceso a su dataverse. +dataverse.permissions.Q1=\u00bfQui\u00e9n puede a\u00f1adir en este dataverse? +dataverse.permissions.Q1.answer1=Cualquiera que a\u00f1ada a este dataverse, necesita tener acceso +dataverse.permissions.Q1.answer2=Cualquiera con una cuenta en Dataverse puede a\u00f1adir sub dataverses +dataverse.permissions.Q1.answer3=Cualquiera con una cuenta en Dataverse puede a\u00f1adir datasets +dataverse.permissions.Q1.answer4=Cualquiera con una cuenta en Dataverse puede a\u00f1adir sub dataverses and datasets +dataverse.permissions.Q2=\u00bfCu\u00e1l deber\u00eda ser el rol por defecto para alguien que a\u00f1ada datasets a este dataverse? +dataverse.permissions.Q2.answer.editor.description=- Editar metadatos, subir ficheros y editar ficheros, editar Condiciones, Libro de Invitados, Enviar datasets a revisi\u00f3n +dataverse.permissions.Q2.answer.manager.description=- Editar metadatos, subir ficheros y editar ficheros, editar Condiciones, Libro de Invitados, Restricciones de Ficheros (Acceso a Ficheros + Uso) +dataverse.permissions.Q2.answer.curator.description=- Editar metadatos, subir ficheros y editar ficheros, editar Condiciones, Libro de Invitados, Restricciones de Ficheros (Acceso a Ficheros + Uso), Editar Permisos/Asignar Roles + Publicar + +# roles-assign.xhtml + +dataverse.permissions.usersOrGroups.assignDialog.header=Asignar Rol +dataverse.permissions.usersOrGroups.assignDialog.description=Conceder permisos a usuarios y grupos asign\u00e1ndoles a un rol. +dataverse.permissions.usersOrGroups.assignDialog.userOrGroup=Usuario/Grupo +dataverse.permissions.usersOrGroups.assignDialog.userOrGroup.enterName=Introduzca el nombre del Usuario/Grupo +dataverse.permissions.usersOrGroups.assignDialog.userOrGroup.invalidMsg=No se encuentran coincidencias. +dataverse.permissions.usersOrGroups.assignDialog.userOrGroup.requiredMsg=Por favor, seleccione al menos un usuario o grupo. +dataverse.permissions.usersOrGroups.assignDialog.role.description=Estos son los permisos asociados con el rol seleccionado. +dataverse.permissions.usersOrGroups.assignDialog.role.requiredMsg=Por favor, selecciona el rol que quiera asignar. + +# roles-edit.xhtml + +dataverse.permissions.roles.header=Editar Rol +dataverse.permissions.roles.name=Nombre del Rol +dataverse.permissions.roles.name.title=Introduzca un nombre para el rol. +dataverse.permissions.roles.id=Identificador +dataverse.permissions.roles.id.title=Introduzca un nombre para el alias. +dataverse.permissions.roles.description.title=Describa el rol (1000 caracteres m\u00e1x). +dataverse.permissions.roles.description.counter=Quedan {0} caracteres +dataverse.permissions.roles.roleList.header=Permisos del Rol + +# explicitGroup-new-dialog.xhtml + +dataverse.permissions.explicitGroupEditDialog.title.new=Crear Grupo +dataverse.permissions.explicitGroupEditDialog.title.edit=Editar Grupo {0} +dataverse.permissions.explicitGroupEditDialog.help=A\u00f1adir usuarios u otros grupos a este grupo. +dataverse.permissions.explicitGroupEditDialog.groupIdentifier=Identificador de Grupo +dataverse.permissions.explicitGroupEditDialog.groupIdentifier.required=El Identificador de Grupo no puede estar vac\u00edo +dataverse.permissions.explicitGroupEditDialog.groupIdentifier.invalid=El Identificador de Grupo s\u00f3lo puede tener letras, n\u00fameros, guiones bajos (_) y guiones (-) +dataverse.permissions.explicitGroupEditDialog.groupIdentifier.helpText=Consiste en letras, n\u00fameros, guiones bajos (_) y guiones (-) +dataverse.permissions.explicitGroupEditDialog.groupIdentifier.taken=El identificador de Grupo ya est\u00e1 en uso en este dataverse +dataverse.permissions.explicitGroupEditDialog.groupName=Nombre del Grupo +dataverse.permissions.explicitGroupEditDialog.groupName.required=El Nombre del grupo no puede estar vac\u00edo +dataverse.permissions.explicitGroupEditDialog.groupDescription=Descripci\u00f3n +dataverse.permissions.explicitGroupEditDialog.roleAssigneeName=Usuario/Grupo +dataverse.permissions.explicitGroupEditDialog.roleAssigneeNames=Usuarios/Grupos +dataverse.permissions.explicitGroupEditDialog.createGroup=Crear Grupo + +# manage-templates.xhtml + +dataset.manageTemplates.pageTitle=Asdministrar las Plantillas de Dataset +dataset.manageTemplates.select.txt=Incluir Plantillas desde {0} +dataset.manageTemplates.createBtn=Crear Plantilla de Dataset +dataset.manageTemplates.noTemplates.why.header=\u00bfPor qu\u00e9 Usar Plantillas? +dataset.manageTemplates.noTemplates.why.reason1=Las plantillas son \u00fatiles cuando tiene varios datasets con la misma informaci\u00f3n en varios campos de metadatos y prefiere no tener que introducirlos de forma manual continuamente. +dataset.manageTemplates.noTemplates.why.reason2=Las plantillas pueden usarse para a\u00f1adir instrucciones para aquellos que a\u00f1adan datasets en su dataverse si quiere que un campo de metadatos se rellene de una forma concreta. +dataset.manageTemplates.noTemplates.how.header=C\u00f3mo Usar Plantillas +dataset.manageTemplates.noTemplates.how.tip1=Las plantillas se crean a nivel de dataverse, pueden borrarse (de forma que no se muestre en futuros datasets), configuradas como por defecto (no es obligatorio) y pueden ser copiadas para que no tenga que crear desde cero una nueva plantilla con metadatos similares desde otra plantilla. Eliminar una platilla no afecta a los datasets que ya la hayan utilizado. +dataset.manageTemplates.noTemplates.how.tip2=Por favor, tenga en cuenta que la elecci\u00f3n de que campos de metadatos est\u00e1n ocultos, son obligatorios u opcionales, se realiza en la p\u00e1gina de Informaci\u00f3n General de este dataverse. +dataset.manageTemplates.noTemplates.getStarted=Para empezar, pulse el bot\u00f3n Crear Plantilla de Dataset. Para saber m\u00e1s sobre las plantillas, visita la secci\u00f3n Plantillas de Dataset de la Gu\u00eda de Usuario. +dataset.manageTemplates.tab.header.templte=Nombre de Plantilla +dataset.manageTemplates.tab.header.date=Fecha de Creaci\u00f3n +dataset.manageTemplates.tab.header.usage=Uso +dataset.manageTemplates.tab.header.action=Acci\u00f3n +dataset.manageTemplates.tab.action.btn.makeDefault=Hacer por Defecto +dataset.manageTemplates.tab.action.btn.default=Por Defecto +dataset.manageTemplates.tab.action.btn.view=Ver +dataset.manageTemplates.tab.action.btn.copy=Copiar +dataset.manageTemplates.tab.action.btn.edit=Editar +dataset.manageTemplates.tab.action.btn.edit.metadata=Metadatos +dataset.manageTemplates.tab.action.btn.edit.terms=Condiciones +dataset.manageTemplates.tab.action.btn.delete=Eliminar +dataset.manageTemplates.tab.action.btn.delete.dialog.tip=\u00bfEst\u00e1s seguro de que quiere eliminar esta plantilla?. Los nuevos datasets no podr\u00e1n usar esta plantilla. +dataset.manageTemplates.tab.action.btn.delete.dialog.header=Eliminar Plantilla +dataset.manageTemplates.tab.action.btn.view.dialog.header=Previsualizaci\u00f3n de la Plantilla del Dataset +dataset.manageTemplates.tab.action.btn.view.dialog.datasetTemplate=Plantilla del Dataset +dataset.manageTemplates.tab.action.btn.view.dialog.datasetTemplate.title=La plantilla del dataset que rellena informaci\u00f3n previamente en el formulario de forma autom\u00e1tica. +dataset.manageTemplates.delete.usedAsDefault=Esta plantilla es la plantilla por defecto para los siguientes dataverse/s. Tamb\u00eden ser\u00e1 eliminada como plantilla por defecto. +dataset.manageTemplates.info.message.notEmptyTable=Crear, clonar, editar, ver o borrar plantillas de dataset. Crear una plantilla de dataset para rellenar campos con valores est\u00e1ndar, como la afiliaci\u00f3n del autor, para ayudar a los usuarios a crear datasets en este dataverse. Tambi\u00e9n puede a\u00f1adir marcas de agua o texto de ayuda a los campos de metadatos para orientar a los usuarios sobre qu\u00e9 a\u00f1adir en estos campos de metadatos. + +# metadataFragment.xhtml + +# template.xhtml + +dataset.template.name.tip=El nombre de la plantilla del dataset. +dataset.template.returnBtn=Volver a Administrar Plantillas +dataset.template.name.title=Introducir un nombre \u00fanico para la plantilla. +template.asterisk.tip=Los asteriscos indican campos de metadatos obligatorios para que los rellenen los usuarios al a\u00f1adir un dataset a este dataverse. +dataset.template.popup.create.title=Crear Plantilla +dataset.template.popup.create.text=\u00bfQuiere a\u00f1adir Condiciones de Uso/Acceso por defecto? +dataset.create.add.terms=Guardar y A\u00f1adir Condiciones + +# manage-groups.xhtml + +dataverse.manageGroups.pageTitle=Administrar Grupos de Dataverse +dataverse.manageGroups.createBtn=Crear Grupo +dataverse.manageGroups.noGroups.why.header=\u00bfPor qu\u00e9 usar Grupos? +dataverse.manageGroups.noGroups.why.reason1=Los grupos le permiten asignar roles y permisos a muchos usuarios a la vez. +dataverse.manageGroups.noGroups.why.reason2=Puede usar grupos para administrar distintos tipos de usuarios (estudiantes, colaboradores, etc.) +dataverse.manageGroups.noGroups.how.header=C\u00f3mo Usar los Grupos +dataverse.manageGroups.noGroups.how.tip1=Un grupo puede contener tanto usuarios como otros grupos. +dataverse.manageGroups.noGroups.how.tip2=Puede asignar permisos a un grupo desde la vista "Permisos". +dataverse.manageGroups.noGroups.getStarted=Para comenzar, pulse el bot\u00f3n Crear Grupo. +dataverse.manageGroups.tab.header.name=Nombre del Grupo +dataverse.manageGroups.tab.header.id=Id del Grupo +dataverse.manageGroups.tab.header.membership=Afiliaci\u00f3n +dataverse.manageGroups.tab.header.action=Acci\u00f3n +dataverse.manageGroups.tab.action.btn.view=Vista +dataverse.manageGroups.tab.action.btn.copy=Copiar +dataverse.manageGroups.tab.action.btn.enable=Habilitar +dataverse.manageGroups.tab.action.btn.disable=Deshabilitar +dataverse.manageGroups.tab.action.btn.edit=Editar +dataverse.manageGroups.tab.action.btn.viewCollectedData=Ver los Datos Recolectados +dataverse.manageGroups.tab.action.btn.delete=Eliminar +dataverse.manageGroups.tab.action.btn.delete.dialog.header=Eliminar Grupo +dataverse.manageGroups.tab.action.btn.delete.dialog.tip=\u00bfEst\u00e1 seguro de que quiere eliminar este grupo?, No es posible deshacer esta acci\u00f3n. +dataverse.manageGroups.tab.action.btn.view.dialog.header=Grupo de Dataverse +dataverse.manageGroups.tab.action.btn.view.dialog.group=Nombre del Grupo +dataverse.manageGroups.tab.action.btn.view.dialog.groupView.name=Nombre del Miembro +dataverse.manageGroups.tab.action.btn.view.dialog.groupView.type=Tipo de Miembro +dataverse.manageGroups.tab.action.btn.view.dialog.groupView.action=Acci\u00f3n +dataverse.manageGroups.tab.action.btn.view.dialog.groupView.delete=Eliminar +dataverse.manageGroups.tab.action.btn.view.dialog.groupMembers=Miembros del Grupo +dataverse.manageGroups.tab.action.btn.view.dialog.enterName=Introduce el Nombre del Usuario/Grupo +dataverse.manageGroups.tab.action.btn.view.dialog.invalidMsg=No se encontraron coincidencias. +# manage-guestbooks.xhtml + +dataset.manageGuestbooks.pageTitle=Administrar Libros de Invitados del Dataset +dataset.manageGuestbooks.include=Incluir Libros de Invitados desde {0} +dataset.manageGuestbooks.createBtn=Crear Libros de Invitados del Dataset +dataset.manageGuestbooks.noGuestbooks.why.header=\u00bfPor qu\u00e9 Usar Libros de Invitados? +dataset.manageGuestbooks.noGuestbooks.why.reason1=Los libros de invitados le permiten recoger datos sobre qui\u00e9n descarga ficheros de sus datasets. Puede decidir recoger informaci\u00f3n de la cuenta (nombre de usuario, propio y apellidos, afiliaci\u00f3n, etc.) o bien crear sus propias preguntas (ej., \u00bfC\u00f3mo piensa utilizar estos datos?). +dataset.manageGuestbooks.noGuestbooks.why.reason2=Puede descargar los datos recogidos desde los libros de invitados habilitados para almacenarlos fuera del Dataverse. +dataset.manageGuestbooks.noGuestbooks.how.header=C\u00f3mo Usar los Libros de Invitados +dataset.manageGuestbooks.noGuestbooks.how.tip1=Se puede usra un libro de invitados en varios datasets, pero cada dataset s\u00f3lo puede tener un libro de invitados. +dataset.manageGuestbooks.noGuestbooks.how.tip2=Las preguntas personalizadas pueden tener respuestas en formato de texto libre u ofrecer al usuario la posibilidad de elegir una respuesta entre varias opciones. +dataset.manageGuestbooks.noGuestbooks.getStarted=Para comenzar, pulse el bot\u00f3n Crear Libro de Invitados del Dataset. Paras saber m\u00e1s sobre los Libros de Invitados, visita la secci\u00f3n Libro de Invitados del Dataset de la Gu\u00eda de Usuario. +dataset.manageGuestbooks.tab.header.name=Nombre del Libros de Invitados +dataset.manageGuestbooks.tab.header.date=Fecha de Creaci\u00f3n +dataset.manageGuestbooks.tab.header.usage=Uso +dataset.manageGuestbooks.tab.header.responses=Respuestas +dataset.manageGuestbooks.tab.header.action=Acci\u00f3n +dataset.manageGuestbooks.tab.action.btn.view=Ver +dataset.manageGuestbooks.tab.action.btn.copy=Copiar +dataset.manageGuestbooks.tab.action.btn.enable=Habilitar +dataset.manageGuestbooks.tab.action.btn.disable=Deshabilitar +dataset.manageGuestbooks.tab.action.btn.edit=Editar +dataset.manageGuestbooks.tab.action.btn.viewCollectedData=Ver Datos Recogidos +dataset.manageGuestbooks.tab.action.btn.delete=Eliminar +dataset.manageGuestbooks.tab.action.btn.delete.dialog.header=Eliminar Libro de Invitados +dataset.manageGuestbooks.tab.action.btn.delete.dialog.tip=\u00bfEst\u00e1 seguro de que quiere eliminar este libro de invitados? No podr\u00e1 deshacer la acci\u00f3n. +dataset.manageGuestbooks.tab.action.btn.view.dialog.header=Libro de Invitado del Dataset +dataset.manageGuestbooks.tab.action.btn.view.dialog.datasetGuestbook.title=Tras descargar los ficheros del libro de invitados pregunta por la informaci\u00f3n siguiente. +dataset.manageGuestbooks.tab.action.btn.view.dialog.datasetGuestbook=Nombre del Libro de Invitados +dataset.manageGuestbooks.tab.action.btn.viewCollectedData.dialog.header=Datos Recogidos por Libro de Invitados del Dataset +dataset.manageGuestbooks.tab.action.btn.view.dialog.userCollectedData.title=Datos del usuario recogidos por el libro de invitados. +dataset.manageGuestbooks.tab.action.btn.view.dialog.userCollectedData=Datos Recogidos +dataset.manageGuestbooks.message.deleteSuccess=El libro de invitados se ha eliminado. +dataset.manageGuestbooks.message.deleteFailure=El libro de invitados no se puede eliminar. +dataset.manageGuestbooks.message.editSuccess=El libro de invitados se ha actualizado. +dataset.manageGuestbooks.message.editFailure=El libro de invitados no se puede actualizar. +dataset.manageGuestbooks.message.enableSuccess=El libro de invitados se ha habilitado. +dataset.manageGuestbooks.message.enableFailure=El libro de invitados no se puede habilitar. +dataset.manageGuestbooks.message.disableSuccess=El libro de invitados se ha deshabilitado. +dataset.manageGuestbooks.message.disableFailure=El libro de invitados no se puede deshabilitar. +dataset.guestbooksResponses.dataset=Dataset +dataset.guestbooksResponses.date=Fecha +dataset.guestbooksResponses.type=Tipo +dataset.guestbooksResponses.file=Fichero + +# guestbook-responses.xhtml +dataset.guestbookResponses.pageTitle=Ver Respuestas del Libro de Invitados + +# guestbook.xhtml + +dataset.manageGuestbooks.guestbook.name=Nombre del Libro de Invitados +dataset.manageGuestbooks.guestbook.name.tip=Introduzca un nombre \u00fanico para este Libro de Invitados. +dataset.manageGuestbooks.guestbook.dataCollected=Datos Recogidos +dataset.manageGuestbooks.guestbook.dataCollected.description=Informaci\u00f3n sobre la cuenta de Dataverse que ser\u00e1 recogida cunado un usuario descargue un fichero. Marque las que vayan a ser obligatorias. +dataset.manageGuestbooks.guestbook.customQuestions=Preguntas Personalizadas +dataset.manageGuestbooks.guestbook.customQuestions.description=Cree sus propias preguntas para que los usuarios la introduzcan aparte de su informaci\u00f3n de cuenta cuando descarguen un fichero. Las preguntas pueden ser obligatorias u opcionales y las respuestas pueden ser textuales o de selecci\u00f3n m\u00faltiple. +dataset.manageGuestbooks.guestbook.customQuestions.questionType=Tipo de Pregunta +dataset.manageGuestbooks.guestbook.customQuestions.questionText=Texto de la Pregunta +dataset.manageGuestbooks.guestbook.customQuestions.responseOptions=Opciones de Respuesta +dataset.manageGuestbooks.guestbook.customQuestions.questionType.text=Texto +dataset.manageGuestbooks.guestbook.customQuestions.questionType.multiple=Selecci\u00f3n M\u00faltiple + +# guestbookResponseFragment.xhtml + +dataset.guestbookResponse.guestbook.additionalQuestions=Preguntas Adicionales + +# dataset.xhtml + +dataset.pageTitle=A\u00f1adir Nuevo Dataset +dataset.editBtn=Editar +dataset.editBtn.itemLabel.upload=Fichero (Subir) +dataset.editBtn.itemLabel.metadata=Metadatos +dataset.editBtn.itemLabel.terms=Condiciones +dataset.editBtn.itemLabel.permissions=Permisos +dataset.editBtn.itemLabel.deleteDataset=Eliminar Dataset +dataset.editBtn.itemLabel.deleteDraft=Eliminar Versi\u00f3n Preliminar +dataset.editBtn.itemLabel.deaccession=Eliminar Acceso al Dataset +metrics.title=Estad\u00edsticas +metrics.comingsoon=Pr\u00f3ximamente ... +metrics.views=Vistas +metrics.downloads={0, choice, 0#Descargas|1#Descarga|2#Descargas} +metrics.citations=Citas +metrics.shares=Compartido +dataset.publish.btn=Publicar +dataset.publish.header=Publicar Dataset +dataset.rejectBtn=Devolver al Autor +dataset.submitBtn=Enviar a Revisi\u00f3n +dataset.disabledSubmittedBtn=Enviado a Revisi\u00f3n +dataset.submitMessage=Enviar este dataset a revisi\u00f3n por el Conservador/Revisor de esta dataverse para su posible publicaci\u00f3n. +dataset.rejectMessage=Enviar este dataset al colaborador para su modificaci\u00f3n. +dataset.publish.tip=\u00bfEst\u00e1 seguro de que quiere publicar este dataset? Una vez hecho esto, permanecer\u00e1 publicado. +dataset.publishBoth.tip=Una vez publique el dataset, \u00e9ste quedar\u00e1 publicado. +dataset.unregistered.tip= Este dataset no est\u00e1 registrado. Intentaremos registrarlo antes de publicarlo. +dataset.republish.tip=\u00bfEst\u00e1 seguro de que quiere volver a publicar este dataset? +dataset.selectVersionNumber=Indique si es una actualizaci\u00f3n de versi\u00f3n mayor o menor. +dataset.majorRelease=Revisi\u00f3n Mayor +dataset.minorRelease=Revisi\u00f3n Menor +dataset.majorRelease.tip=Debido a la naturaleza de los cambios, la versi\u00f3n preliminar actual tendr\u00e1 una revisi\u00f3n mayor ({0}) +dataset.mayNotBePublished=No se puede publicar el dataset. +dataset.mayNotPublish.administrator= Este dataset no se puede publicar hasta que {1} sea publicado por su administrador. +dataset.mayNotPublish.both= Este dataset no se puede publicar hasta que {1} sea publicado. \u00bfQuiere publicar los dos ahora? +dataset.mayNotPublish.twoGenerations= Este dataset no se puede publicar hasta que {1} y {3} sean publicados. +dataset.mayNotBePublished.both.button=S\u00ed, Publicar Ambos +dataset.viewVersion.unpublished=Ver la Versi\u00f3n sin Publicar +dataset.viewVersion.published=Ver la Versi\u00f3n Publicada +dataset.email.datasetContactBtn=Mandar E-mail al Contacto del Dataset +dataset.email.hiddenMessage= +dataset.email.messageSubject=Asunto: Prueba de Mensaje +dataset.email.datasetLinkBtn.tip=Enlazar el Dataset a su Dataverse +dataset.share.datasetShare=Compartir Dataset +dataset.share.datasetShare.tip=Compartir este dataset en sus redes sociales favoritas. +dataset.share.datasetShare.shareText=Ver este dataset. + +dataset.versionUI.draft=Versi\u00f3n Preliminar +dataset.versionUI.unpublished=Sin Publicar +dataset.versionUI.deaccessioned=Sin Acceso +dataset.cite.title.released=VERSI\u00d3N PRELIMINAR que se reemplazar\u00e1 en la cita por V1 una vez el dataset haya sido publicado. +dataset.cite.title.draft=VERSI\u00d3N PRELIMINAR que se reemplazar\u00e1 en la cita por la versi\u00f3n seleccionada una vez el dataset haya sido publicado. +dataset.cite.title.deassessioned=La VERSI\u00d3N CON ACCESO RETIRADO se ha a\u00f1adido a la cita para esta versi\u00f3n, porque ya no no est\u00e1 disponible. +dataset.cite.standards.tip=Si quiere usar estos datos, por favor, a\u00f1ada esta cita a sus recursos acad\u00e9micos. M\u00e1s informaci\u00f3n en Est\u00e1ndares de Citas de Datos. +dataset.cite.downloadBtn=Descargar Cita +dataset.cite.downloadBtn.xml=XML de EndNote +dataset.cite.downloadBtn.ris=Formato RIS +dataset.create.authenticatedUsersOnly=S\u00f3los los usuarios identificados pueden descargar datasets. +dataset.deaccession.reason=Raz\u00f3n de Retirada de Acceso +dataset.beAccessedAt=Ahora se puede acceder al dataset en: +dataset.descriptionDisplay.title=Descripci\u00f3n +dataset.keywordDisplay.title=Palabra Clave +dataset.subjectDisplay.title=Materia +dataset.contact.tip=Utilice el bot\u00f3n de e-mail de arriba para contactar. +dataset.asterisk.tip=Los asteriscos indican campos obligatorios +dataset.message.editFiles=Subir + Editar Ficheros del Dataset - Puede usar arrastrar y soltar desde su escritorio directamente hasta el widget de subida. +dataset.message.editMetadata=Editar Metadatos del Dataset - A\u00f1ade m\u00e1s metadatos sobre este dataset para facilitar que sea encontrado facilmente. +dataset.message.editTerms=Editar las Condiciones del Dataset - Actualizar las condiciones de uso de este dataset. +dataset.message.createSuccess=Este dataset se ha creado. +dataset.message.linkSuccess= {0} se ha enlazado con \u00e9xito a {3}. +dataset.message.metadataSuccess=Los metadatos de este dataset se han actualizado. +dataset.message.termsSuccess=Las condiciones de este dataset se han actualizado. +dataset.message.filesSuccess=Los ficheros de este dataset se han actualizado. +dataset.message.publishSuccess=Este dataset se ha publicado. +dataset.message.deleteSuccess=Este dataset se ha eliminado. +datasetVersion.message.deleteSuccess=La versi\u00f3n preliminar de este dataset se ha eliminado. +datasetVersion.message.deaccessionSuccess=El/Las versi\u00f3n(es) se ha(n) retirado. +dataset.message.deaccessionSuccess=Este dataset se ha retirado. +dataset.message.files.ingestSuccess=El/Los fichero(s) se han a\u00f1adido con \u00e9xito. Ahora puedes explorarlos con TwoRavens o descargarlos en formatos alternativos. +dataset.message.validationError=Error de Validaci\u00f3n - Hab\u00eda campos obligatorios sin rellenar o hubo un error de validaci\u00f3n. Por favor, haga scroll para ver los detalles. +dataset.message.publishFailure=No se ha podido publicar el dataset. +dataset.message.metadataFailure=No se han podido actualizar los metadatos. +dataset.message.filesFailure=No se han podido actualizar los ficheros. +dataset.message.files.ingestFailure=No se pudo/ieron subir el/los fichero(s). +dataset.message.deleteFailure=No se pudo eliminar la versi\u00f3n preliminar del dataset. +dataset.message.deaccessionFailure=No se puede retirar el acceso a este dataset. +dataset.message.createFailure=No pudo crearse el dataset. +dataset.message.termsFailure=No pudieron actualizarse las condiciones del dataset. +dataset.metadata.publicationDate=Fecha de Publicaci\u00f3n +dataset.metadata.publicationDate.tip=La fecha de publicaci\u00f3n del dataset. +dataset.metadata.persistentId=ID Persistente del Dataset +dataset.metadata.persistentId.tip=Identificador \u00fanico y persistente para un Dataset, el cu\u00e1l puede ser un Handle o un DOI en Dataverse. +dataset.versionDifferences.termsOfUseAccess=Condiciones de Uso y Acceso +dataset.versionDifferences.termsOfUseAccessChanged=Condiciones de Uso/Acceso Cambiadas +file.viewDiffDialog.restricted=Restringido +file.viewDiffDialog.md5=MD5 +# traducido en femenino porque se refiere a una plantilla +dataset.noTemplate.label=Ninguna + +file.count={0} {0, choice, 0#Ficheros|1#Fichero|2#Ficheros} +file.selectToAddBtn=Selecciona los ficheros que quiera a\u00f1adir +file.selectToAdd.tip=Si quiere m\u00e1s informaci\u00f3n sobre los formatos de fichero soportados, por favor, visite la Gu\u00eda de Uso. +file.fromDropbox=Subir desde Dropbox +file.fromDropbox.tip=Los ficheros, tambi\u00e9n pueden ser actualizados directamente desde Dropbox. +file.fromDropbox.description=Arrastre y suelte los ficheros aqu\u00ed. + +file.uploadOrEdit=Subir + Editar Ficheros +file.notFound.tip=No hay ficheros en este dataset. +file.delete=Eliminar +file.deleted.success=Los ficheros {0} ser\u00e1n eliminados de forma permanente de esta versi\u00f3n de este dataset una vez pulses el bot\u00f3n Guardar Cambios. +file.restrict=Restringido +file.unrestrict=Eliminar restricci\u00f3n +file.restricted.success=El/Los fichero(s) {0} ser\u00e1n de acceso restringido cuando pulse el bot\u00f3n Guardar Cambios al final de esta p\u00e1gina. +file.download.header=Descargar +file.preview=Previsualizaci\u00f3n: +file.fileName=Nombre del Fichero +file.type.tabularData=Datos Tabulares +file.MD5=MD5 +file.MD5.origal=MD5 del Fichero Original +file.MD5.exists.tip=Ya existe en este dataset un fichero con ese MD5. +file.selectedThumbnail=Miniatura +file.selectedThumbnail.tip=La miniatura para este fichero se usa como miniatura para el dataset. Pulse el bot\u00f3n 'Opciones Avanzadas' de otro fichero para seleccionarlo. + +file.metaData.dataFile.dataTab.variables=Variables +file.metaData.dataFile.dataTab.observations=Observaciones +file.metaData.viewOnWorldMap=Ver en WorldMap +file.addDescription=A\u00f1adir descripci\u00f3n del fichero... +file.editTags=Editar Etiquetas +file.editTagsDialog.tip=Seleccionar etiquetas existentes o crear otras nuevas que describan sus ficheros. Cuando se crea una etiqueta nueva, \u00e9sta se a\u00f1ade como una opci\u00f3n de etiqueta para todos los ficheros de este dataset. Cada fichero puede tener m\u00e1s de una etiqueta. +file.editTagsDialog.select=Etiquetas de Fichero +file.editTagsDialog.add=Etiqueta de Fichero Personalizada +file.editTagsDialog.newName=A\u00f1adir nueva etiqueta de fichero... +file.setThumbnail=Establecer Previsualizaci\u00f3n +file.setThumbnail.header=Establecer Previsualizaci\u00f3n para el Dataset +file.datasetThumbnail=Previsualizaci\u00f3n del Dataset +file.datasetThumbnail.tip=Seleccionar esta imagen para mostrarla como previsualizaci\u00f3n en los resultados de b\u00fasqueda para este dataset. +file.useThisIamge=Usar esta imagen como imagen de previsualizaci\u00f3n de este dataset +file.advancedOptions=Opciones Avanzadas +file.advancedIngestOptions=Opciones Avanzadas de Procesamiento +file.assignedDataverseImage.success={0} se ha guardado como la previsualizaci\u00f3n de este dataset. +file.assignedTabFileTags.success=La(s) etiqueta(s) se han a\u00f1adido con \u00e9xito a {0}. +file.tabularDataTags=Etiquetas de Datos Tabulares +file.tabularDataTags.title=Seleccione una etiqueta para describir el/los tipo/s de datos de los que se trata (encuesta, series temporales, geoespacial, etc.). Esta etiqueta es s\u00f3lo para ficheros de Excel, SPSS, Stats o R Data. +file.tabularDataTags.tip=Etiquetas espec\u00edficas de ficheros de datos para identificar qu\u00e9 tipo(s) de datos tiene un fichero. +file.spss-savEncoding=Codificaci\u00f3n de Idioma +file.spss-savEncoding.title=Seleccione el idioma usado para codificar este fichero de datos SPSS (sav). +file.spss-savEncoding.current=Selecci\u00f3n actual: +file.spss-porExtraLabels=Etiquetas de Variables +file.spss-porExtraLabels.title=Subir un fichero adicional de texto con etiquetas de variable extra. +file.spss-porExtraLabels.selectToAddBtn=Seleccionar el Fichero a A\u00f1adir +file.ingestFailed=Fall\u00f3 el Procesamiento de los Datos Tabulares +file.mapData=Mapear Datos +file.mapData.viewMap=Explorar +file.mapData.unpublished.header=Datos No Publicados +file.mapData.unpublished.message=Para situar tus datos en WorldMap, estos datos han de ser publicados. Por favor, publique este dataset y pruebe de nuevo la funcionalidad Mapa de Datos. +file.downloadBtn.format.all=Todos los Formatos de Ficheros + Informaci\u00f3n +file.downloadBtn.format.tab=Delimitados por Tabuladores +file.downloadBtn.format.original=Formato de Fichero Original ({0}) +file.downloadBtn.format.rdata=Formato RData +file.downloadBtn.format.var=Metadatos Variables +file.downloadBtn.format.citation=Citas de Fichero de Datos +file.more.information.link=Enlace a m\u00e1s informaci\u00f3n sobre el fichero sobre + +file.requestAccess=Petici\u00f3n de acceso +file.requestAccess.dialog.msg=Tiene que Identificarse para solicitar acceso a este fichero. +file.requestAccess.dialog.msg.signup=Tiene que Crear una Cuenta o Identificarse para solicitar acceso a este fichero. +file.accessRequested=Acceso Pedido + +file.ingestInproGress=Procesamiento en curso... + +file.dataFilesTab.metadata.header=Metadatos +file.dataFilesTab.metadata.addBtn=A\u00f1adir + Editar Metadatos +file.dataFilesTab.terms.header=Condiciones +file.dataFilesTab.terms.editTermsBtn=Editar los Requisitos de las Condiciones +file.dataFilesTab.terms.list.termsOfUse.header=Condiciones de Uso +# waiver es m\u00e1s bien renuncia, pero en el contexto, en espa\u00f1ol, creo que es mejor usar licencia. +file.dataFilesTab.terms.list.termsOfUse.waiver=Licencia +file.dataFilesTab.terms.list.termsOfUse.waiver.title=La licencia informa a quienes descargan datos sobre c\u00f3mo se puede usar este dataset. +file.dataFilesTab.terms.list.termsOfUse.waiver.txt=CC0 - "Dedicaci\u00f3n de Dominio P\u00fablico" +file.dataFilesTab.terms.list.termsOfUse.waiver.description=Los datasets tendr\u00e1n por defecto una licencia CC0 dedicaci\u00f3n de dominio p\u00fablico. La licencia CC0 facilita la reutilizaci\u00f3n y ampliaci\u00f3n de nuestros datos de investigaci\u00f3n. Tanto las Normas de la Comunidad como las buenas pr\u00e1cticas cient\u00edficas, esperan que se de cr\u00e9dito al material usado mediante citas. Si no puede dar a sus datasets una licencia CC0, puede introducir Condiciones de Uso personalizadas para tus datasets. +file.dataFilesTab.terms.list.termsOfUse.no.waiver.txt=No se ha seleccionado ninguna licencia para este dataset. +file.dataFilesTab.terms.list.termsOfUse.waiver.txt.description=Tanto nuestras Normas de la Comunidad como las buenas pr\u00e1cticas cient\u00edficas, experan que se de cr\u00e9dito al material usado mediante citas. Por favor, usa la siguiente cita de datos generada por Dataverse. +file.dataFilesTab.terms.list.termsOfUse.waiver.select.CCO=Si, usar CC0 - "Dedicaci\u00f3n de Dominio P\u00fablico" +file.dataFilesTab.terms.list.termsOfUse.waiver.select.notCCO=No, no usar CC0 - "Dedicaci\u00f3n de Dominio P\u00fablico" +file.dataFilesTab.terms.list.termsOfUse.waiver.select.tip=Esto es lo que ver\u00e1n los usuarios finales en este dataset +file.dataFilesTab.terms.list.termsOfUse.termsOfUse=Condiciones de Uso +file.dataFilesTab.terms.list.termsOfUse.termsOfUse.title=Describe c\u00f3mo pueden usarse estos datos una vez descargados. +file.dataFilesTab.terms.list.termsOfUse.termsOfUse.description=Si no puede dar una licencia CC0 a sus datasets, puede introducir Condiciones de Uso personalizadas. \u00c9ste es un ejemplo de Condiciones de uso de Datos para datasets que tienen datos anonimizados relacionados con personas. +file.dataFilesTab.terms.list.termsOfUse.addInfo=Informaci\u00f3n Adicional +file.dataFilesTab.terms.list.termsOfUse.addInfo.declaration=Declaraci\u00f3n de Confidencialidad +file.dataFilesTab.terms.list.termsOfUse.addInfo.declaration.title=Indica si se necesita firmar una declaraci\u00f3n de confidencialidad para acceder a un recurso. +file.dataFilesTab.terms.list.termsOfUse.addInfo.permissions=Permisos Especiales +file.dataFilesTab.terms.list.termsOfUse.addInfo.permissions.title=Determina si son necesarios permisos especiales para acceder a un recurso (p.e., si es necesario rellenar un formulario y d\u00f3nde se accede a dicho formulario. +file.dataFilesTab.terms.list.termsOfUse.addInfo.restrictions=Restricciones +file.dataFilesTab.terms.list.termsOfUse.addInfo.restrictions.title=Todas las restricciones de acceso o uso de la colecci\u00f3n, tales como certificaciones de privacidad o restricciones en su distribuci\u00f3n, se deber\u00edan indicar aqu\u00ed. Estas restricciones pueden estar impuestas por el autor, el productor o el diseminador de la colecci\u00f3n de datos. Si la restricci\u00f3n de datos afecta s\u00f3lo a un tipo de usuarios, indique a qu\u00e9 tipo. +file.dataFilesTab.terms.list.termsOfUse.addInfo.citationRequirements=Requisitos de Citas +file.dataFilesTab.terms.list.termsOfUse.addInfo.citationRequirements.title=Incluya requisitos especiales/expl\u00edcitos para datos que necesiten ser citados de forma adecuada en art\u00edculos u otras publicaciones basadas en el an\u00e1lisis de datos. Para los requisitos de cita est\u00e1ndar, dir\u00edjase a las Normas de nuestra Comunidad. +file.dataFilesTab.terms.list.termsOfUse.addInfo.depositorRequirements=Requisitos del Depositante +file.dataFilesTab.terms.list.termsOfUse.addInfo.depositorRequirements.title=Informaci\u00f3n relacionada con la responsabilidad de los usuarios para Depositantes, Autores y Conservadores/Revisores sobre el uso de los datos, indicando las citas a los trabajos publicados o proporcionando las copias de los manuscritos. +file.dataFilesTab.terms.list.termsOfUse.addInfo.conditions=Condiciones +file.dataFilesTab.terms.list.termsOfUse.addInfo.conditions.title=Cualquier informaci\u00f3n adicional que ayude al usuario a entender las condiciones de uso y acceso del Dataset. +file.dataFilesTab.terms.list.termsOfUse.addInfo.disclaimer=Renuncia de Responsabilidad +file.dataFilesTab.terms.list.termsOfUse.addInfo.disclaimer.title=Informaci\u00f3n relacionada con la responsabilidad de uso del Dataset. + +file.dataFilesTab.terms.list.termsOfAccess.header=Ficheros de Acceso Restringido + Condiciones de Acceso +file.dataFilesTab.terms.list.termsOfAccess.restrictedFiles=Ficheros de Acceso Restringido +file.dataFilesTab.terms.list.termsOfAccess.restrictedFiles.title=El n\u00famero de ficheros de acceso restringido en este dataset. +file.dataFilesTab.terms.list.termsOfAccess.restrictedFiles.txt=Hay {0} {0, choice, 0#ficheros|1#fichero|2#ficheros} de acceso restringido en este dataset. +file.dataFilesTab.terms.list.termsOfAccess.termsOfsAccess=Condiciones de Acceso +file.dataFilesTab.terms.list.termsOfAccess.termsOfsAccess.title=Informaci\u00f3n sobre c\u00f3mo conseguir acceso a los ficheros con acceso restringido en este dataset y si los usuarios pueden conseguirlo. +file.dataFilesTab.terms.list.termsOfAccess.requestAccess=Pedir acceso +file.dataFilesTab.terms.list.termsOfAccess.requestAccess.title=Si est\u00e1 seleccionado, los usuarios pueden solicitar acceso a los ficheros de acceso restringido en este dataset. +file.dataFilesTab.terms.list.termsOfAccess.requestAccess.request=Los usuarios pueden solicitar acceso a los ficheros. +file.dataFilesTab.terms.list.termsOfAccess.requestAccess.notRequest=Los usuarios no pueden solicitar acceso a los ficheros. +file.dataFilesTab.terms.list.termsOfAccess.requestAccess.enableBtn=Habilitar la solicitud de acceso + +file.dataFilesTab.terms.list.termsOfAccess.addInfo.dataAccessPlace=Lugar de Acceso a los Datos +file.dataFilesTab.terms.list.termsOfAccess.addInfo.dataAccessPlace.title=Si los datos no est\u00e1n s\u00f3lo en Dataverse, indique la(s) localizaci\u00f3n(es) donde los datos est\u00e1n almacenados actualmente. +file.dataFilesTab.terms.list.termsOfAccess.addInfo.originalArchive=Archivo Original +file.dataFilesTab.terms.list.termsOfAccess.addInfo.originalArchive.title=Archivo desde el que se obtuvieron los datos. +file.dataFilesTab.terms.list.termsOfAccess.addInfo.availabilityStatus=Estado de Disponibilidad +file.dataFilesTab.terms.list.termsOfAccess.addInfo.availabilityStatus.title=Disponibilidad del Dataset. Un depositante puede indicar que un Dataset no est\u00e1 disponible porque tiene un per\u00edodo de embargo, porque se ha sustituido, porque habr\u00e1 una nueva edici\u00f3n inminentemente, etc. +file.dataFilesTab.terms.list.termsOfAccess.addInfo.contactForAccess=Contactar para Solicitar Acceso +file.dataFilesTab.terms.list.termsOfAccess.addInfo.contactForAccess.title=Si es diferente del Contacto del Dataset, esta es la persona u organizaci\u00f3n de contacto (incluir el e-mail o la direcci\u00f3n completa, y el n\u00famero de tel\u00e9fono si est\u00e1 disponible) que controla el acceso a la colecci\u00f3n. +file.dataFilesTab.terms.list.termsOfAccess.addInfo.sizeOfCollection=Tama\u00f1o de la Colecci\u00f3n +file.dataFilesTab.terms.list.termsOfAccess.addInfo.sizeOfCollection.tip=Resumen del n\u00famero de ficheros f\u00edsicos que contiene el Dataset, indicando el n\u00famero de ficheros que contienen datos y haciendo notar si la colecci\u00f3n contiene documentaci\u00f3n legible por m\u00e1quinas y/o otra informaci\u00f3n adicional como c\u00f3digo, diccionarios de datos, declaraciones de definici\u00f3n de datos o instrumentos de recolecci\u00f3n de datos. +file.dataFilesTab.terms.list.termsOfAccess.addInfo.studyCompletion=Conclusi\u00f3n del Estudio +file.dataFilesTab.terms.list.termsOfAccess.addInfo.studyCompletion.title=Relaci\u00f3n entre los datos recolectados con la cantidad de datos almacenados en el Dataset. Informaci\u00f3n sobre porqu\u00e9 algunos items recolectados no se incluyeron en el dataset o si se debe proporcionar un fichero de datos concreto. + +file.dataFilesTab.terms.list.guestbook=Libro de Invitados +file.dataFilesTab.terms.list.guestbook.title=La informaci\u00f3n del usuario (e.d., nombre, e-mail, instituci\u00f3n y puesto) ser\u00e1 recogida cuando se descarguen los ficheros. +file.dataFilesTab.terms.list.guestbook.noSelected.tip=No hay un libro de invitados asignado a este dataset, no se le preguntar\u00e1 si quiere proporcionar informaci\u00f3n durante la descarga del fichero. +file.dataFilesTab.terms.list.guestbook.noSelected.admin.tip=No hay libros de invitados disponibles en el Dataverse {0} para asignar a este dataset. +file.dataFilesTab.terms.list.guestbook.inUse.tip=El siguiente libro de invitados pedir\u00e1 al usuario informaci\u00f3n adicional cuando descargue un fichero. +file.dataFilesTab.terms.list.guestbook.viewBtn=Previsualizaci\u00f3n del Libro de Invitados +file.dataFilesTab.terms.list.guestbook.select.tip=Seleccionar un libro de invitados para que el usuario proporcione informaci\u00f3n adicional cuando descargue un fichero. +file.dataFilesTab.terms.list.guestbook.noAvailable.tip=No hay libros de invitados habilitados en el Dataverse {0}. Para crear un libro de invitados, vuelva al Dataverse {0}, pulsa el bot\u00f3n "Editar" y seleccione la opci\u00f3n "Libros de Invitados del Dataset". +file.dataFilesTab.terms.list.guestbook.clearBtn=Limpiar Selecci\u00f3n + +file.dataFilesTab.versions=Versiones +file.dataFilesTab.versions.viewDiffBtn=Ver Diferencias +file.dataFilesTab.versions.citationMetadata=Metadatos de Cita: +file.dataFilesTab.versions.added=A\u00f1adido/a +file.dataFilesTab.versions.removed=Eliminado/a +file.dataFilesTab.versions.changed=Cambiado/a +file.dataFilesTab.versions.additionalCitationMetadata=Metadatos de Cita Adicionales: +file.dataFilesTab.versions.description.draft=Esta es una versi\u00f3n preliminar. +file.dataFilesTab.versions.description.deaccessioned=Dado que la versi\u00f3n anterior se ha retirado, no hay notas disponibles de las diferencias para la versi\u00f3n publicada. +file.dataFilesTab.versions.description.firstPublished=Esta es la primera versi\u00f3n publicada. +file.dataFilesTab.versions.description.deaccessionedReason=Raz\u00f3n para la retirada de Acceso: +file.dataFilesTab.versions.description.beAccessedAt=Ahora se puede acceder al dataset en: +file.dataFilesTab.versions.viewDetails.btn=Ver detalles +file.deleteDialog.tip=\u00bfEst\u00e1 seguro de que quiere eliminar el dataset?. No podr\u00e1 deshacer la operaci\u00f3n. +file.deleteDialog.header=Eliminar Dataset +file.deleteDraftDialog.tip=\u00bfEst\u00e1 seguro de que quiere eliminar esta versi\u00f3n preliminar? No podr\u00e1 deshacer la operaci\u00f3n. +file.deleteDraftDialog.header=Eliminar Versi\u00f3n Preliminar +file.deleteFileDialog.tip=El/Los fichero(s) ser\u00e1(n) borrado(s) cuando pulse el bot\u00f3n Guardar Cambios en la parte inferior de esta p\u00e1gina. +file.deleteFileDialog.header=Borrar Ficheros +file.deleteFileDialog.failed.tip=Los ficheros no se eliminar\u00e1n de las versiones publicadas previamente en el dataset. +file.deaccessionDialog.tip=Una vez haya retirado este dataset no ser\u00e1 visible para el p\u00fablico. +file.deaccessionDialog.version=Versi\u00f3n +file.deaccessionDialog.reason.question1=\u00bfQu\u00e9 versi\u00f3n(es) quiere retirar? +file.deaccessionDialog.reason.question2=\u00bfCu\u00e1l es la raz\u00f3n para la retirada de acceso? +file.deaccessionDialog.reason.selectItem.identifiable=Hay datos identificables en uno o m\u00e1s ficheros +file.deaccessionDialog.reason.selectItem.beRetracted=El art\u00edculo de investigaci\u00f3n se ha retirado +file.deaccessionDialog.reason.selectItem.beTransferred=Se ha transferido el dataset a otro repositorio +file.deaccessionDialog.reason.selectItem.IRB=Petici\u00f3n IRB +file.deaccessionDialog.reason.selectItem.legalIssue=Cuesti\u00f3n legal o Acuerdo de Uso de Datos +file.deaccessionDialog.reason.selectItem.notValid=No es un dataset v\u00e1lido +file.deaccessionDialog.reason.selectItem.other=Otra (Por favor, indique la raz\u00f3n en el siguiente espacio) +file.deaccessionDialog.enterInfo=Por favor, introduzca informaci\u00f3n adicional sobre la raz\u00f3n para la retirada de acceso. +file.deaccessionDialog.leaveURL=Si es aplicable, por favor, indique una URL desde la que se pueda acceder a este dataset tras la retirada de acceso. +file.deaccessionDialog.leaveURL.watermark=Sitio web opcional del dataset, http://... +file.deaccessionDialog.deaccession.tip=\u00bfEst\u00e1 seguro de que quiere realizar la retirada? La(s) versi\u00f3n(es) seleccionada(s) no volver\u00e1(n) a estar disponible(s) para el p\u00fablico. +file.deaccessionDialog.deaccessionDataset.tip=\u00bfEst\u00e1 seguro de que quiere retirar este dataset? No volver\u00e1 a estar disponible para el p\u00fablico. +file.deaccessionDialog.dialog.selectVersion.tip=Por favor, seleccione la(s) versi\u00f3n(ones) que ser\u00e1(n) retirada(s). +file.deaccessionDialog.dialog.selectVersion.header=Por Favor, Seleccionar la(s) Versi\u00f3n(es) +file.deaccessionDialog.dialog.reason.tip=Por Favor, seleccione la raz\u00f3n de la retirada de acceso. +file.deaccessionDialog.dialog.reason.header=Por Favor, Seleccionar una Raz\u00f3n +file.deaccessionDialog.dialog.url.tip=Por Favor, introduzca una URL v\u00e1lida. +file.deaccessionDialog.dialog.url.header=URL Inv\u00e1lida. +file.deaccessionDialog.dialog.textForReason.tip=Por Favor, introduzca el texto introduzca la raz\u00f3n para la retirada de acceso. +file.deaccessionDialog.dialog.textForReason.header=Introduzca informaci\u00f3n adicional +file.deaccessionDialog.dialog.limitChar.tip=El texto para la raz\u00f3n de la retirada de acceso, no puede superar los 1000 caracteres. +file.deaccessionDialog.dialog.limitChar.header=L\u00edmite 1000 caracteres +file.viewDiffDialog.header=Detalles de las Diferencias de Versi\u00f3n +file.viewDiffDialog.dialog.warning=Por favor, seleccione dos versiones para ver sus diferencias. +file.viewDiffDialog.version=Versi\u00f3n +file.viewDiffDialog.lastUpdated=\u00daltima Modificaci\u00f3n +file.viewDiffDialog.fileID=ID de Fichero +file.viewDiffDialog.fileName=Nombre +file.viewDiffDialog.fileType=Tipo +file.viewDiffDialog.fileSize=Tama\u00f1o +file.viewDiffDialog.category=Etiqueta(s) +file.viewDiffDialog.description=Descripci\u00f3n +file.metadataTip=Truco de Metadatos: Despu\u00e9s de a\u00f1adir el dataset, pulse el bot\u00f3n Editar Dataset para a\u00f1adir m\u00e1s metadatos. +file.addBtn=Guardar Dataset + +file.downloadDialog.header=Descargar Fichero +file.downloadDialog.tip=Por favor, confirme y/o complete la informaci\u00f3n necesaria para descargar los ficheros de este dataset. +file.downloadDialog.termsTip=Acepto las Condiciones de Uso. + +file.search.placeholder=Buscar en estos ficheros de datos... +file.results.btn.sort=Ordenar +file.results.btn.sort.option.nameAZ=Nombre (A-Z) +file.results.btn.sort.option.nameZA=Nombre (Z-A) +file.results.btn.sort.option.newest=M\u00e1s Nuevo +file.results.btn.sort.option.oldest=M\u00e1s Antiguo +file.results.btn.sort.option.size=Tama\u00f1o +file.results.btn.sort.option.type=Tipo + +# file.xhtml +file.title.label=T\u00edtulo +file.citation.label=Cita +file.general.metadata.label=Metadatos Generales +file.description.label=Descripci\u00f3n +file.tags.label=Etiquetas + +file.metadataTab.fileMetadata.header=Metadatos del Fichero +file.metadataTab.fileMetadata.persistentid.label=Fichero de datos con el ID Persistente +file.metadataTab.fileMetadata.md5.label=MD5 +file.metadataTab.fileMetadata.unf.label=UNF +file.metadataTab.fileMetadata.size.label=Tama\u00f1o +file.metadataTab.fileMetadata.type.label=Tipo +file.metadataTab.fileMetadata.description.label=Descripci\u00f3n +file.metadataTab.fitsMetadata.header=Metadatos FITS +file.metadataTab.provenance.header=Procedencia del Fichero +file.metadataTab.provenance.body=La Informaci\u00f3n sobre la Procedencia del Fichero se a\u00f1adir\u00e1 en una versi\u00f3n posterior... + +# 500.xhtml +error.500.page.title=500 Internal Server Error +error.500.message=Error Interno del Servidor - Ocurri\u00f3 un error inesperado, no hay disponible m\u00e1s informaci\u00f3n. + +# 404.xhtml +error.404.page.title=404 Not Found +error.404.message=Page Not Found - No encontramos la p\u00e1gina que buscas. Para contactar con el soporte, por favor, pulsa en el enlace Soporte. + +# 403.xhtml +error.403.page.title=403 Not Authorized +error.403.message=Sin Autorizaci\u00f3n - Usted no est\u00e1 autorizado a ver esta p\u00e1gina. Para contactar con soporte, por favor, pulsa en el enlace Soporte m\u00e1s arriba. + +# nuevas etiquetas Dataverse 4.2 (2015-09-30) +file.count.selected={0} {0, choice, 0#Ficheros Seleccionados|1#Fichero Seleccionado|2#Ficheros Seleccionados} +dataverse.permissions.roles.roleList.authorizedUserOnly=Los permisos con el icono de informaci\u00f3n indican que las acciones se pueden realizar por parte de los usuarios no identificados en Dataverse. +dataset.manageTemplates.saveNewTerms=Guardar Plantilla de Dataset +dataset.versionUI.inReview=En Revisi\u00f3n +dataset.message.uploadFiles=Subir Ficheros de Dataset - Puede pulsar y arrastrar ficheros desde su escritorio directamente hasta el widget de subida de ficheros. +dataset.message.editFiles=Subir + Editar Ficheros de Dataset - Puede pulsar y arrastrar ficheros desde su escritorio directamente hasta el widget de subida de ficheros. +dataset.message.bulkFileUpdateSuccess=Se han actualizado los ficheros seleccionados. +dataset.template.tip=Al cambiar la plantilla se borrar\u00e1 el contenido de cualquier fichero en el que haya introducido datos. +file.editFiles=Editar Ficheros +file.bulkUpdate=Actualizaci\u00f3n en Bloque +file.uploadFiles=Subir Ficheros +file.uploadOrEdit=Subier + Editar Ficheros +file.noSelectedFiles.tip=No hay ficheros seleccionados para mostrar. +file.noUploadedFiles.tip=Los ficheros que suba aparecer\u00e1n aqu\u00ed. +file.tags=Etiquetas +file.deleteFileDialog.immediate=Se borrar\u00e1 el fichero despu\u00e9s de que pulse en el bot\u00f3n Borrar. +file.deleteFileDialog.multiple.immediate=Se borrar\u00e1/n el/los fichero/s despu\u00e9s de que pulse en el bot\u00f3n Borrar. +file.dataset.allFiles=Todos los Ficheros de este Dataset diff --git a/dataversedock/lang.properties/Bundle_es_ES.properties_utf b/dataversedock/lang.properties/Bundle_es_ES.properties_utf new file mode 100644 index 0000000..a935a46 --- /dev/null +++ b/dataversedock/lang.properties/Bundle_es_ES.properties_utf @@ -0,0 +1,1656 @@ +dataverse=Dataverse +newDataverse=Nuevo Dataverse +hostDataverse=Servidor Dataverse +dataverses=Dataverses +passwd=Contrase\u00f1a +dataset=Dataset +datasets=Datasets +newDataset=Nuevo Dataset +files=Ficheros +file=Fichero +restricted=Restringido +restrictedaccess=Acceso Restringido +find=Buscar +search=Buscar +unpublished=Sin publicar +cancel=Cancelar +ok=OK +saveChanges=Guardar Cambios +acceptTerms=Aceptar +submit=Enviar +signup=Crear Cuenta +login=Iniciar Sesi\u00f3n +email=E-mail +account=Cuenta +requiredField=Campo obligatorio +new=Nuevo/a +identifier=Identificador +description=Descripci\u00f3n +subject=Materia +close=Cerrar +preview=Preview +continue=Continuar +name=Nombre +institution=Instituci\u00f3n +position=Posici\u00f3n +affiliation=Afiliaci\u00f3n +createDataverse=Crear Dataverse +remove=Borrar +done=Hecho +editor=Editor +manager=Administrador +curator=Conservador/Revisor +explore=Explorar +download=Descargar +deaccession=Retirada +share=Share +link=Link +linked=Enlazado +harvested=Recolectado +apply=Apply +add=A\u00f1adir +delete=Borrar +yes=S\u00ed +no=No +previous=Anterior +next=Siguiente +first=First +last=Last +more=M\u00e1s... +less=Menos... +select=Seleccionar... +selectedFiles=Ficheros Seleccionados +htmlAllowedTitle=Etiquetas HTML Permitidas +htmlAllowedMsg=Este campo s\u00f3lo admite algunas etiquetas HTML. +htmlAllowedTags=, ,
                ,
                , , ,
                ,
                ,
                , ,
                ,

                -

                , , , ,
              • ,
                  ,

                  ,

                  , , , , , , 
                    +# dataverse_header.xhtml=# dataverse_header.xhtml +header.status.header=Estado +header.search.title=Buscar en todos los dataverses... +header.about=Acerca de +header.support=Soporte +header.guides=Gu\u00edas +header.guides.user=Gu\u00eda de Usuario +header.guides.developer=Developer Guide +header.guides.installation=Gu\u00eda de Instalaci\u00f3n +header.guides.api=Gu\u00eda del API +header.guides.admin=Admin Guide +header.signUp=Crear Cuenta +header.logOut=Salir +header.accountInfo=Informaci\u00f3n de la Cuenta +header.dashboard=Dashboard +header.user.selectTab.dataRelated=Mis Datos +header.user.selectTab.notifications=Notificaciones +header.user.selectTab.accountInfo=Account Information +header.user.selectTab.groupsAndRoles=Grupos + Roles +header.user.selectTab.apiToken=API Token +# dataverse_template.xhtml=# dataverse_template.xhtml +head.meta.description=El proyecto Dataverse es una aplicaci\u00f3n de software de c\u00f3digo abierto para compartir, citar y archivar datos. Dataverse proporciona a los administradores de datos una infraestructura robusta para administrar, alojar y archivar datos, a la vez que ofrece a los investigadores una forma sencilla de compartir y obtener reconocimiento por sus datos. +body.skip=Ir al contenido principal +# dataverse_footer.xhtml +footer.copyright=Copyright © 2015, The President & Fellows of Harvard College +footer.widget.datastored=Data is stored at {0}. +footer.widget.login=Log in to +footer.privacyPolicy=Pol\u00edtica de Privacidad +footer.poweredby=Powered by +footer.dataverseProject=The Dataverse Project +# messages.xhtml +messages.error=Error +messages.success=Success! +messages.info=Info +messages.validation=Validation Error +messages.validation.msg=Required fields were missed or there was a validation error. Please scroll down to see details. +# contactFormFragment.xhtml=# contactFormFragment.xhtml +contact.header=Contactar con el Soporte de Dataverse +contact.dataverse.header=Contactar con el Administrador de Dataverse +contact.dataset.header=Contactar con el Administrador de Dataset +contact.to=Para +contact.support=Soporte de Dataverse +contact.from=De +contact.from.required=El e-mail es obligatorio. +contact.from.invalid=El e-mail no es v\u00e1lido. +contact.subject=Asunto +contact.subject.required=El asunto es obligatorio. +contact.subject.selectTab.top=Seleccionar asunto... +contact.subject.selectTab.support=Pregunta de Soporte +contact.subject.selectTab.dataIssue=Problema con los datos +contact.msg=Mensaje +contact.msg.required=El texto del mensaje es obligatorio. +contact.send=Enviar Mensaje +contact.question=Por favor, rellena este campo para demostrar que no eres un robot. +contact.sum.required=El valor es obligatorio. +contact.sum.invalid=Suma err\u00f3nea. Por favor, prueba otra vez. +contact.sum.converterMessage=Por favor, introduce un n\u00famero. +contact.contact=Contacto +# dataverseuser.xhtml=# dataverseuser.xhtml +account.info=Informaci\u00f3n de la Cuenta +account.edit=Editar Cuenta +account.apiToken=API Token +user.isShibUser=Account information cannot be edited when logged in through an institutional account. +user.helpShibUserMigrateOffShibBeforeLink=Leaving your institution? Please contact +user.helpShibUserMigrateOffShibAfterLink=for assistance. +user.helpOAuthBeforeLink=Your Dataverse account uses {0} for login. If you are interested in changing login methods, please contact +user.helpOAuthAfterLink=for assistance. +user.lostPasswdTip=Si pierde u olvida su contrase\u00f1a, por favor, introduzca su identificador de usuario o e-mail m\u00e1s abajo y pulse en Enviar. Le enviaremos un e-mail con su nueva contrase\u00f1a. +user.dataRelatedToMe=Mis Datos +wasCreatedIn=, fue creado el +wasCreatedTo=, fue a\u00f1adido a +wasSubmittedForReview=, fue enviado a revisi\u00f3n para su publicaci\u00f3n el +wasPublished=, fue publicado el +wasReturnedByReviewer=, fue devuelto por el conservador/revisor de +# TODO: Confirm that "toReview" can be deleted. +toReview=\!No olvide publicarlo o devolverlo al colaborador\! +worldMap.added=el dataset ten\u00eda a\u00f1adida una capa de datos de WorldMap. +# Bundle file editors, please note that "notification.welcome" is used in a unit test. +notification.welcome=Bienvenido a {0} {1}! \u00bfNecesita ayuda? Revise la Gu\u00eda de Usuario o pregunte en el Grupo de Google Comunidad Dataverse (en ingl\u00e9s). +notification.demoSite=Demo Site +notification.requestFileAccess=Acceso a ficheros solicitado para el dataset: {1}. +notification.grantFileAccess=Acceso permitido para los ficheros del dataset: {1}. +notification.rejectFileAccess=Acceso denegado para los ficheros del dataset: {1}. +notification.createDataverse={1} creado en {3}. Para aprender m\u00e1s sobre lo que puede hacer con su dataverse, puede ver la Gu\u00eda de Usuario. +notification.dataverse.management.title=Dataverse Management - Dataverse User Guide +notification.createDataset={1} creado en {3}. Para saber m\u00e1s sobre lo que puede hacer con un dataset puede ver la Gu\u00eda de Usuario. +notification.dataset.management.title=Dataset Management - Dataset User Guide +notification.wasSubmittedForReview={1}, fue enviado a revisi\u00f3n para ser publicado en {3}. \!No olvide publicarlo o devolverlo a los colaboradores\! +notification.wasReturnedByReviewer={1}, fue devuelto por el conservador/revisor de {3}. +notification.wasPublished={1}, fue publicado en {3}. +notification.worldMap.added=El dataset {1} tiene una capa de datos WorldMap a\u00f1adida. +notification.maplayer.deletefailed=Failed to delete the map layer associated with the restricted file {0} from WorldMap. Please try again, or contact WorldMap and/or Dataverse support. (Dataset: {1}) +notification.generic.objectDeleted=El dataverse, dataset o fichero relacionado con esta notificaci\u00f3n se ha eliminado. +notification.access.granted.dataverse=Tienes asignado el rol {0} para {2}. +notification.access.granted.dataset=Tienes asignado el rol {0} para {2}. +notification.access.granted.datafile=Tienes asignado el rol {0} para acceso a ficheros en {2}. +notification.access.granted.fileDownloader.additionalDataverse={0} You now have access to all published restricted and unrestricted files in this dataverse. +notification.access.granted.fileDownloader.additionalDataset={0} You now have access to all published restricted and unrestricted files in this dataset. +notification.access.revoked.dataverse=Se le ha dado de baja de un rol en {2}. +notification.access.revoked.dataset=Se le ha dado de baja de un rol en {2}. +notification.access.revoked.datafile=Se le ha dado de baja de un rol en {2}. +notification.checksumfail=One or more files in your upload failed checksum validation for dataset {0}. Please re-run the upload script. If the problem persists, please contact support. +notification.mail.import.filesystem=Dataset {2} ({0}/dataset.xhtml?persistentId={1}) has been successfully uploaded and verified. +notification.import.filesystem=Dataset {1} has been successfully uploaded and verified. +notification.import.checksum={1}, dataset had file checksums added via a batch job. +removeNotification=Borrar Notificaci\u00f3n +groupAndRoles.manageTips=Aqu\u00ed puede acceder y administrar los grupos a los que pertenece y los roles que tiene asignados. +user.signup.tip=\u00bfPor qu\u00e9 tener una cuenta en Dataverse? Para crear su propio dataverse y personalizarlo, a\u00f1adir datasets o solicitar acceso a ficheros con acceso restringido. +user.signup.otherLogInOptions.tip=You can also create a Dataverse account with one of our other log in options. +user.username.illegal.tip=Entre 2 y 60 caracteres, puede usar "a-z", "0-9", "_" para su identificador de usuario. +user.username=Identificador de usuario +user.username.taken=Este identificador no est\u00e1 disponible. +user.username.invalid=This username contains an invalid character or is outside the length requirement (2-60 characters). +user.username.valid=Create a valid username of 2 to 60 characters in length containing letters (a-Z), numbers (0-9), dashes (-), underscores (_), and periods (.). +user.noPasswd=Sin Contrase\u00f1a +user.currentPasswd=Contrase\u00f1a Actual +user.currentPasswd.tip=Por favor, introduzca la contrase\u00f1a para esta cuenta. +user.passwd.illegal.tip=La contrase\u00f1a ha de tener al menos 6 caracteres e incluir una letra y un n\u00famero. Se pueden usar caracteres especiales. +user.rePasswd=Confirme su contrase\u00f1a +user.rePasswd.tip=Por favor, escriba de nuevo su contrase\u00f1a. +user.firstName=Nombre +user.firstName.tip=Su nombre o el nombre que le gustar\u00eda usar en esta cuenta. +user.lastName=Apellidos +user.lastName.tip=Los apellidos que le gustar\u00eda usar en esta cuenta. +user.email.tip=Una direcci\u00f3n v\u00e1lida de e-mail para poder contactar con usted. +user.email.taken=Esta direcci\u00f3n de e-mail ya est\u00e1 en uso. +user.affiliation.tip=La organizaci\u00f3n a la que pertenece. +user.position=Puesto +user.position.tip=Su puesto o t\u00edtulo en la organizaci\u00f3n a la que pertenece; por ejemplo personal, profesor, estudiante, etc. +user.acccountterms=Condiciones Generales de Uso +user.acccountterms.tip=Los t\u00e9rminos y condiciones para utilizar la aplicaci\u00f3n y sus servicios. +user.acccountterms.required=Por favor, seleccione la casilla de selecci\u00f3n para indicar que acepta las condiciones generales de uso. +user.acccountterms.iagree=He le\u00eddo y acepto las Condiciones Generales de Uso de Dataverse. +user.createBtn=Crear Cuenta +user.updatePassword.welcome=Bienvenido a Dataverse {0}, {1} +user.updatePassword.warning=Con la actualizaci\u00f3n a Dataverse 4.2, los requisitos de las contrase\u00f1as y las condiciones generales de uso se han actualizados. Al ser la primera vez que usa Dataverse desde la actualizaci\u00f3n, necesita crear una nueva contrase\u00f1a y aceptar las Condiciones Generales de Uso. +user.updatePassword.password=Cree una contrase\u00f1a con un m\u00ednimo de seis caracteres que contenga por lo menos una letra y un n\u00famero. +authenticationProvidersAvailable.tip={0}No hay proveedores de autentificaci\u00f3n activos{1}Si es usted el administrador del sistema, por favor, act\u00edvelos usando el API.{2}Si no lo es, por favor, contacte con el administrador de su instituci\u00f3n. +passwdVal.passwdReq.title=Your password must contain: +passwdVal.passwdReq.goodStrength =passwords of at least {0} characters are exempt from all other requirements +passwdVal.passwdReq.lengthReq =At least {0} characters +passwdVal.passwdReq.characteristicsReq =At least 1 character from {0} of the following types: +passwdVal.passwdReq.notInclude =It may not include: +passwdVal.passwdReq.consecutiveDigits =More than {0} numbers in a row +passwdVal.passwdReq.dictionaryWords =Dictionary words +passwdVal.passwdReq.unknownPasswordRule =Unknown, contact your administrator +#printf syntax used to pass to passay library +passwdVal.expireRule.errorCode =EXPIRED +passwdVal.expireRule.errorMsg =The password is over %1$s days old and has expired. +passwdVal.goodStrengthRule.errorMsg =Note: passwords are always valid with a %1$s or more character length regardless. +passwdVal.goodStrengthRule.errorCode =NO_GOODSTRENGTH +passwdVal.passwdReset.resetLinkTitle =Password Reset Link +passwdVal.passwdReset.resetLinkDesc =Your password reset link is not valid +passwdVal.passwdReset.valBlankLog =new password is blank +passwdVal.passwdReset.valFacesError =Password Error +passwdVal.passwdReset.valFacesErrorDesc =Please enter a new password for your account. +passwdVal.passwdValBean.warnDictionaryRead =Dictionary was set, but none was read in. +passwdVal.passwdValBean.warnDictionaryObj =PwDictionaries not set and no default password file found: +passwdVal.passwdValBean.warnSetStrength =The PwGoodStrength {0} value competes with the PwMinLength value of {1} and is added to {2} +#loginpage.xhtml +login.System=Iniciar Sesi\u00f3n +login.forgot.text=\u00bfOlvid\u00f3 su contrase\u00f1a? +login.builtin=Dataverse Account +login.institution=Inicio de Sesi\u00f3n con su Instituci\u00f3n +login.institution.blurb=Log in or sign up with your institutional account — learn more. +login.institution.support.beforeLink=Leaving your institution? Please contact +login.institution.support.afterLink=for assistance. +login.builtin.credential.usernameOrEmail=Username/Email +login.builtin.credential.password=Password +login.builtin.invalidUsernameEmailOrPassword=The username, email address, or password you entered is invalid. Need assistance accessing your account? +# how do we exercise login.error? Via a password upgrade failure? See https://github.com/IQSS/dataverse/pull/2922 +login.error=Error en la validaci\u00f3n de su usuario y/o contrase\u00f1a. Int\u00e9ntelo de nuevo. Si el problema contin\u00faa, contacte con un administrador. +user.error.cannotChangePassword=Sorry, your password cannot be changed. Please contact your system administrator. +user.error.wrongPassword=Sorry, wrong password. +login.button=Log In with {0} +login.button.orcid=Create or Connect your ORCID +# authentication providers +auth.providers.title=Other options +auth.providers.tip=You can convert a Dataverse account to use one of the options above. Learn more. +auth.providers.title.builtin=Username/Email +auth.providers.title.shib=Your Institution +auth.providers.title.orcid=ORCID +auth.providers.title.google=Google +auth.providers.title.github=GitHub +auth.providers.blurb=Log in or sign up with your {0} account — learn more. Having trouble? Please contact {3} for assistance. +auth.providers.persistentUserIdName.orcid=ORCID iD +auth.providers.persistentUserIdName.github=ID +auth.providers.persistentUserIdTooltip.orcid=ORCID provides a persistent digital identifier that distinguishes you from other researchers. +auth.providers.persistentUserIdTooltip.github=GitHub assigns a unique number to every user. +auth.providers.orcid.insufficientScope=Dataverse was not granted the permission to read user data from ORCID. +# Friendly AuthenticationProvider names +authenticationProvider.name.builtin=Dataverse +authenticationProvider.name.null=(provider is unknown) +authenticationProvider.name.github=GitHub +authenticationProvider.name.google=Google +authenticationProvider.name.orcid=ORCiD +authenticationProvider.name.orcid-sandbox=ORCiD Sandbox +authenticationProvider.name.shib=Shibboleth +#confirmemail.xhtml +confirmEmail.pageTitle=Email Verification +confirmEmail.submitRequest=Verify Email +confirmEmail.submitRequest.success=A verification email has been sent to {0}. Note, the verify link will expire after {1}. +confirmEmail.details.success=Email address verified! +confirmEmail.details.failure=We were unable to verify your email address. Please navigate to your Account Information page and click the "Verify Email" button. +confirmEmail.details.goToAccountPageButton=Go to Account Information +confirmEmail.notVerified=Not Verified +confirmEmail.verified=Verified +#shib.xhtml=#shib.xhtml +shib.btn.convertAccount=Convert Account +shib.btn.createAccount=Create Account +shib.askToConvert=Would you like to convert your Dataverse account to always use your institutional log in? +# Bundle file editors, please note that "shib.welcomeExistingUserMessage" is used in a unit test +shib.welcomeExistingUserMessage=El e-mail proporcionado para la autentificaci\u00f3n de {0} coincide con una cuenta ya existente en Dataverse. Si quiere asociar su cuenta en Dataverse con la autentificaci\u00f3n de {0}, por favor, introduzca la contrase\u00f1a de su cuenta en Dataverse, revise las Condiciones Generales de Uso y, pulse el bot\u00f3n de Aceptar las Condiciones y Convertir la Cuenta. +# Bundle file editors, please note that "shib.welcomeExistingUserMessageDefaultInstitution" is used in a unit test +shib.welcomeExistingUserMessageDefaultInstitution=your institution +shib.dataverseUsername=Usuario de Dataverse +shib.currentDataversePassword=Contrase\u00f1a Actual de Dataverse +shib.accountInformation=Informaci\u00f3n de la Cuenta +shib.offerToCreateNewAccount=Por favor, acepte las Condiciones Generales de Uso de Dataverse para crear su cuenta. +shib.passwordRejected=Para convertir su cuenta es necesario indicar la contrase\u00f1a correcta de su cuenta anterior. +# oauth2/firstLogin.xhtml +oauth2.btn.convertAccount=Convert Existing Account +oauth2.btn.createAccount=Create New Account +oauth2.askToConvert=Would you like to convert your Dataverse account to always use your institutional log in? +oauth2.welcomeExistingUserMessage=Your institutional log in for {0} matches an email address already being used for a Dataverse account. By entering your current Dataverse password below, your existing Dataverse account can be converted to use your institutional log in. After converting, you will only need to use your institutional log in. +oauth2.welcomeExistingUserMessageDefaultInstitution=your institution +oauth2.dataverseUsername=Dataverse Username +oauth2.currentDataversePassword=Current Dataverse Password +oauth2.chooseUsername=Username: +oauth2.passwordRejected=Validation Error - Wrong username or password. +# oauth2.newAccount.title=Account Creation +oauth2.newAccount.welcomeWithName=Welcome to Dataverse, {0} +oauth2.newAccount.welcomeNoName=Welcome to Dataverse +# oauth2.newAccount.email=Email +# oauth2.newAccount.email.tip=Dataverse uses this email to notify you of issues regarding your data. +oauth2.newAccount.suggestedEmails=Suggested Email Addresses: +oauth2.newAccount.username=Username +oauth2.newAccount.username.tip=This username will be your unique identifier as a Dataverse user. +oauth2.newAccount.explanation=This information is provided by {0} and will be used to create your {1} account. To log in again, you will have to use the {0} log in option. +oauth2.newAccount.suggestConvertInsteadOfCreate=If you already have a {0} account, you will need to convert your account. +# oauth2.newAccount.tabs.convertAccount=Convert Existing Account +oauth2.newAccount.buttons.convertNewAccount=Convert Account +oauth2.newAccount.emailTaken=Email already taken. Consider merging the corresponding account instead. +oauth2.newAccount.emailOk=Email OK. +oauth2.newAccount.emailInvalid=Invalid email address. +# oauth2.newAccount.usernameTaken=Username already taken. +# oauth2.newAccount.usernameOk=Username OK. +# oauth2/convert.xhtml +# oauth2.convertAccount.title=Account Conversion +oauth2.convertAccount.explanation=Please enter your {0} account username or email and password to convert your account to the {1} log in option. Learn more about converting your account. +oauth2.convertAccount.username=Existing username +oauth2.convertAccount.password=Password +oauth2.convertAccount.authenticationFailed=Authentication failed - bad username or password. +oauth2.convertAccount.buttonTitle=Convert Account +oauth2.convertAccount.success=Your Dataverse account is now associated with your {0} account. +# oauth2/callback.xhtml +oauth2.callback.page.title=OAuth Callback +oauth2.callback.message=Authentication Error - Dataverse could not authenticate your ORCID login. Please make sure you authorize your ORCID account to connect with Dataverse. For more details about the information being requested, see the User Guide. +# tab on dataverseuser.xhtml +apitoken.title=Token del API +apitoken.message=Aqu\u00ed tiene su token del API. Revise nuestra {0}Gu\u00eda del API{1} para m\u00e1s informaci\u00f3n. +apitoken.notFound=API Token for {0} has not been created. +apitoken.generateBtn=Generar Token +apitoken.regenerateBtn=Regenerar Token +#dashboard.xhtml +dashboard.title=Dashboard +dashboard.card.harvestingclients.header=Harvesting Clients +dashboard.card.harvestingclients.btn.manage=Manage Clients +dashboard.card.harvestingclients.clients={0, choice, 0#Clients|1#Client|2#Clients} +dashboard.card.harvestingclients.datasets={0, choice, 0#Datasets|1#Dataset|2#Datasets} +dashboard.card.harvestingserver.header=Harvesting Server +dashboard.card.harvestingserver.enabled=OAI server enabled +dashboard.card.harvestingserver.disabled=OAI server disabled +dashboard.card.harvestingserver.status=Status +dashboard.card.harvestingserver.sets={0, choice, 0#Sets|1#Set|2#Sets} +dashboard.card.harvestingserver.btn.manage=Manage Server +dashboard.card.metadataexport.header=Metadata Export +dashboard.card.metadataexport.message=Dataset metadata export is only available through the {0} API. Learn more in the {0} {1}API Guide{2}. +#harvestclients.xhtml +harvestclients.title=Manage Harvesting Clients +harvestclients.toptip= - Harvesting can be scheduled to run at a specific time or on demand. Harvesting can be initiated here or via the REST API. +harvestclients.noClients.label=No clients are configured. +harvestclients.noClients.why.header=What is Harvesting? +harvestclients.noClients.why.reason1=Harvesting is a process of exchanging metadata with other repositories. As a harvesting client, your Dataverse gathers metadata records from remote sources. These can be other Dataverse instances, or other archives that support OAI-PMH, the standard harvesting protocol. +harvestclients.noClients.why.reason2=Harvested metadata records are searchable by users. Clicking on a harvested dataset in the search results takes the user to the original repository. Harvested datasets cannot be edited in your Dataverse installation. +harvestclients.noClients.how.header=How To Use Harvesting +harvestclients.noClients.how.tip1=To harvest metadata, a Harvesting Client is created and configured for each remote repository. Note that when creating a client you will need to select an existing local dataverse to host harvested datasets. +harvestclients.noClients.how.tip2=Harvested records can be kept in sync with the original repository through scheduled incremental updates, for example, daily or weekly. Alternatively, harvests can be run on demand, from this page or via the REST API. +harvestclients.noClients.getStarted=To get started, click on the Add Client button above. To learn more about Harvesting, visit the Harvesting section of the User Guide. +harvestclients.btn.add=Add Client +harvestclients.tab.header.name=Nickname +harvestclients.tab.header.url=URL +harvestclients.tab.header.lastrun=Last Run +harvestclients.tab.header.lastresults=Last Results +harvestclients.tab.header.action=Actions +harvestclients.tab.header.action.btn.run=Run Harvesting +harvestclients.tab.header.action.btn.edit=Edit +harvestclients.tab.header.action.btn.delete=Delete +harvestclients.tab.header.action.btn.delete.dialog.header=Delete Harvesting Client +harvestclients.tab.header.action.btn.delete.dialog.warning=Are you sure you want to delete the harvesting client "{0}"? Deleting the client will delete all datasets harvested from this remote server. +harvestclients.tab.header.action.btn.delete.dialog.tip=Note, this action may take a while to process, depending on the number of harvested datasets. +harvestclients.tab.header.action.delete.infomessage=Harvesting client is being deleted. Note, that this may take a while, depending on the amount of harvested content. +harvestclients.actions.runharvest.success=Successfully started an asynchronous harvest for client "{0}" . Please reload the page to check on the harvest results). +harvestclients.newClientDialog.step1=Step 1 of 4 - Client Information +harvestclients.newClientDialog.title.new=Create Harvesting Client +harvestclients.newClientDialog.help=Configure a client to harvest content from a remote server. +harvestclients.newClientDialog.nickname=Nickname +harvestclients.newClientDialog.nickname.helptext=Consists of letters, digits, underscores (_) and dashes (-). +harvestclients.newClientDialog.nickname.required=Client nickname cannot be empty! +harvestclients.newClientDialog.nickname.invalid=Client nickname can contain only letters, digits, underscores (_) and dashes (-); and must be at most 30 characters. +harvestclients.newClientDialog.nickname.alreadyused=This nickname is already used. +harvestclients.newClientDialog.type=Server Protocol +harvestclients.newClientDialog.type.helptext=Only the OAI server protocol is currently supported. +harvestclients.newClientDialog.type.OAI=OAI +harvestclients.newClientDialog.type.Nesstar=Nesstar +harvestclients.newClientDialog.url=Server URL +harvestclients.newClientDialog.url.tip=URL of a harvesting resource. +harvestclients.newClientDialog.url.watermark=Remote harvesting server, http://... +harvestclients.newClientDialog.url.helptext.notvalidated=URL of a harvesting resource. Once you click 'Next', we will try to establish a connection to the server in order to verify that it is working, and to obtain extra information about its capabilities. +harvestclients.newClientDialog.url.required=A valid harvesting server address is required. +harvestclients.newClientDialog.url.invalid=Invalid URL. Failed to establish connection and receive a valid server response. +harvestclients.newClientDialog.url.noresponse=Failed to establish connection to the server. +harvestclients.newClientDialog.url.badresponse=Invalid response from the server. +harvestclients.newClientDialog.dataverse=Local Dataverse +harvestclients.newClientDialog.dataverse.tip=Dataverse that will host the datasets harvested from this remote resource. +harvestclients.newClientDialog.dataverse.menu.enterName=Enter Dataverse Alias +harvestclients.newClientDialog.dataverse.menu.header=Dataverse Name (Affiliate), Alias +harvestclients.newClientDialog.dataverse.menu.invalidMsg=No matches found +harvestclients.newClientDialog.dataverse.required=You must select an existing dataverse for this harvesting client. +harvestclients.newClientDialog.step2=Step 2 of 4 - Format +harvestclients.newClientDialog.oaiSets=OAI Set +harvestclients.newClientDialog.oaiSets.tip=Harvesting sets offered by this OAI server. +harvestclients.newClientDialog.oaiSets.noset=None +harvestclients.newClientDialog.oaiSets.helptext=Selecting "none" will harvest the default set, as defined by the server. Often this will be the entire body of content across all sub-sets. +harvestclients.newClientDialog.oaiSets.helptext.noset=This OAI server does not support named sets. The entire body of content offered by the server will be harvested. +harvestclients.newClientDialog.oaiMetadataFormat=Metadata Format +harvestclients.newClientDialog.oaiMetadataFormat.tip=Metadata formats offered by the remote server. +harvestclients.newClientDialog.oaiMetadataFormat.required=Please select the metadata format to harvest from this archive. +harvestclients.newClientDialog.step3=Step 3 of 4 - Schedule +harvestclients.newClientDialog.schedule=Schedule +harvestclients.newClientDialog.schedule.tip=Schedule harvesting to run automatically daily or weekly. +harvestclients.newClientDialog.schedule.time.none.helptext=Leave harvesting unscheduled to run on demand only. +harvestclients.newClientDialog.schedule.none=None +harvestclients.newClientDialog.schedule.daily=Daily +harvestclients.newClientDialog.schedule.weekly=Weekly +harvestclients.newClientDialog.schedule.time=Time +harvestclients.newClientDialog.schedule.day=Day +harvestclients.newClientDialog.schedule.time.am=AM +harvestclients.newClientDialog.schedule.time.pm=PM +harvestclients.newClientDialog.schedule.time.helptext=Scheduled times are in your local time. +harvestclients.newClientDialog.btn.create=Create Client +harvestclients.newClientDialog.success=Successfully created harvesting client "{0}". +harvestclients.newClientDialog.step4=Step 4 of 4 - Display +harvestclients.newClientDialog.harvestingStyle=Archive Type +harvestclients.newClientDialog.harvestingStyle.tip=Type of remote archive. +harvestclients.newClientDialog.harvestingStyle.helptext=Select the archive type that best describes this remote server in order to properly apply formatting rules and styles to the harvested metadata as they are shown in the search results. Note that improperly selecting the type of the remote archive can result in incomplete entries in the search results, and a failure to redirect the user to the archival source of the data. +harvestclients.viewEditDialog.title=Edit Harvesting Client +harvestclients.viewEditDialog.archiveUrl=Archive URL +harvestclients.viewEditDialog.archiveUrl.tip=The URL of the archive that serves the data harvested by this client, which is used in search results for links to the original sources of the harvested content. +harvestclients.viewEditDialog.archiveUrl.helptext=Edit if this URL differs from the Server URL. +harvestclients.viewEditDialog.archiveDescription=Archive Description +harvestclients.viewEditDialog.archiveDescription.tip=Description of the archival source of the harvested content, displayed in search results. +harvestclients.viewEditDialog.archiveDescription.default.generic=This Dataset is harvested from our partners. Clicking the link will take you directly to the archival source of the data. +harvestclients.viewEditDialog.btn.save=Save Changes +harvestclients.newClientDialog.title.edit=Edit Group {0} +#harvestset.xhtml +harvestserver.title=Manage Harvesting Server +harvestserver.toptip= - Define sets of local datasets that will be available for harvesting by remote clients. +harvestserver.service.label=OAI Server +harvestserver.service.enabled=Enabled +harvestserver.service.disabled=Disabled +harvestserver.service.disabled.msg=Harvesting Server is currently disabled. +harvestserver.service.empty=No sets are configured. +harvestserver.service.enable.success=OAI Service has been successfully enabled. +harvestserver.noSets.why.header=What is a Harvesting Server? +harvestserver.noSets.why.reason1=Harvesting is a process of exchanging metadata with other repositories. As a harvesting server, your Dataverse can make some of the local dataset metadata available to remote harvesting clients. These can be other Dataverse instances, or any other clients that support OAI-PMH harvesting protocol. +harvestserver.noSets.why.reason2=Only the published, unrestricted datasets in your Dataverse can be harvested. Remote clients normally keep their records in sync through scheduled incremental updates, daily or weekly, thus minimizing the load on your server. Note that it is only the metadata that are harvested. Remote harvesters will generally not attempt to download the data files themselves. +harvestserver.noSets.how.header=How to run a Harvesting Server? +harvestserver.noSets.how.tip1=Harvesting server can be enabled or disabled on this page. +harvestserver.noSets.how.tip2=Once the service is enabled, you can define collections of local datasets that will be available to remote harvesters as OAI Sets. Sets are defined by search queries (for example, authorName:king; or parentId:1234 - to select all the datasets that belong to the dataverse specified; or dsPersistentId:"doi:1234/" to select all the datasets with the persistent identifier authority specified). Consult the Search API section of the Dataverse User Guide for more information on the search queries. +harvestserver.noSets.getStarted=To get started, enable the OAI server and click on the Add Set button. To learn more about Harvesting, visit the Harvesting section of the User Guide. +harvestserver.btn.add=Add Set +harvestserver.tab.header.spec=OAI setSpec +harvestserver.tab.header.description=Description +harvestserver.tab.header.definition=Definition Query +harvestserver.tab.header.stats=Datasets +harvestserver.tab.col.stats.empty=No records (empty set) +harvestserver.tab.col.stats.results={0} {0, choice, 0#datasets|1#dataset|2#datasets} ({1} {1, choice, 0#records|1#record|2#records} exported, {2} marked as deleted) +harvestserver.tab.header.action=Actions +harvestserver.tab.header.action.btn.export=Run Export +harvestserver.actions.runreexport.success=Successfully started an asynchronous re-export job for OAI set "{0}" (please reload the page to check on the export progress). +harvestserver.tab.header.action.btn.edit=Edit +harvestserver.tab.header.action.btn.delete=Delete +harvestserver.tab.header.action.btn.delete.dialog.header=Delete Harvesting Set +harvestserver.tab.header.action.btn.delete.dialog.tip=Are you sure you want to delete the OAI set "{0}"? You cannot undo a delete! +harvestserver.tab.header.action.delete.infomessage=Selected harvesting set is being deleted. (this may take a few moments) +harvestserver.newSetDialog.title.new=Create Harvesting Set +harvestserver.newSetDialog.help=Define a set of local datasets available for harvesting to remote clients. +harvestserver.newSetDialog.setspec=Name/OAI setSpec +harvestserver.newSetDialog.setspec.tip=A unique name (OAI setSpec) identifying this set. +harvestserver.newSetDialog.setspec.helptext=Consists of letters, digits, underscores (_) and dashes (-). +harvestserver.editSetDialog.setspec.helptext=The name can not be changed once the set has been created. +harvestserver.newSetDialog.setspec.required=Name (OAI setSpec) cannot be empty! +harvestserver.newSetDialog.setspec.invalid=Name (OAI setSpec) can contain only letters, digits, underscores (_) and dashes (-). +harvestserver.newSetDialog.setspec.alreadyused=This set name (OAI setSpec) is already used. +harvestserver.newSetDialog.setdescription=Description +harvestserver.newSetDialog.setdescription.tip=Provide a brief description for this OAI set. +harvestserver.newSetDialog.setdescription.required=Set description cannot be empty! +harvestserver.newSetDialog.setquery=Definition Query +harvestserver.newSetDialog.setquery.tip=Search query that defines the content of the dataset. +harvestserver.newSetDialog.setquery.helptext=Example query: authorName:king +harvestserver.newSetDialog.setquery.required=Search query cannot be left empty! +harvestserver.newSetDialog.setquery.results=Search query returned {0} datasets! +harvestserver.newSetDialog.setquery.empty=WARNING: Search query returned no results! +harvestserver.newSetDialog.btn.create=Create Set +harvestserver.newSetDialog.success=Successfully created harvesting set "{0}". +harvestserver.viewEditDialog.title=Edit Harvesting Set +harvestserver.viewEditDialog.btn.save=Save Changes +#dashboard-users.xhtml +dashboard.card.users=Users +dashboard.card.users.header=Dashboard - User List +dashboard.card.users.super=Superusers +dashboard.card.users.manage=Manage Users +dashboard.card.users.message=List and manage users. +dashboard.list_users.searchTerm.watermark=Search these users... +dashboard.list_users.tbl_header.userId=ID +dashboard.list_users.tbl_header.userIdentifier=Username +dashboard.list_users.tbl_header.name=Name +dashboard.list_users.tbl_header.lastName=Last Name +dashboard.list_users.tbl_header.firstName=First Name +dashboard.list_users.tbl_header.email=Email +dashboard.list_users.tbl_header.affiliation=Affiliation +dashboard.list_users.tbl_header.roles=Roles +dashboard.list_users.tbl_header.position=Position +dashboard.list_users.tbl_header.isSuperuser=Superuser +dashboard.list_users.tbl_header.authProviderFactoryAlias=Authentication +dashboard.list_users.tbl_header.createdTime=Created Time +dashboard.list_users.tbl_header.lastLoginTime=Last Login Time +dashboard.list_users.tbl_header.lastApiUseTime=Last API Use Time +dashboard.list_users.tbl_header.roles.removeAll=Remove All +dashboard.list_users.tbl_header.roles.removeAll.header=Remove All Roles +dashboard.list_users.tbl_header.roles.removeAll.confirmationText=Are you sure you want to remove all roles for user {0}? +dashboard.list_users.removeAll.message.success=All roles have been removed for user {0}. +dashboard.list_users.removeAll.message.failure=Failed to remove roles for user {0}. +dashboard.list_users.toggleSuperuser=Edit Superuser Status +dashboard.list_users.toggleSuperuser.confirmationText.add=Are you sure you want to enable superuser status for user {0}? +dashboard.list_users.toggleSuperuser.confirmationText.remove=Are you sure you want to disable superuser status for user {0}? +dashboard.list_users.toggleSuperuser.confirm=Continue +dashboard.list_users.api.auth.invalid_apikey=The API key is invalid. +dashboard.list_users.api.auth.not_superuser=Forbidden. You must be a superuser. +#MailServiceBean.java=#MailServiceBean.java +notification.email.create.dataverse.subject=Dataverse: Se ha creado su dataverse +notification.email.create.dataset.subject=Dataverse: Se ha creado su dataset +notification.email.request.file.access.subject=Dataverse: Se ha solicitado acceso a un fichero con acceso restringido +notification.email.grant.file.access.subject=Dataverse: Se le ha concedido acceso a un fichero con acceso restringido +notification.email.rejected.file.access.subject=Dataverse: Su solicitud de acceso a un fichero con acceso restringido ha sido denegada +notification.email.update.maplayer=Dataverse: Capa de WorldMap a\u00f1adida al dataset +notification.email.maplayer.deletefailed.subject={0}: Failed to delete WorldMap layer +notification.email.maplayer.deletefailed.text=We failed to delete the WorldMap layer associated with the restricted file {0}, and any related data that may still be publicly available on the WorldMap site. Please try again, or contact WorldMap and/or Dataverse support. (Dataset: {1}) +notification.email.submit.dataset.subject=Dataverse: Se ha enviado su dataset para su revisi\u00f3n +notification.email.publish.dataset.subject=Dataverse: Se ha publicado su dataset +notification.email.returned.dataset.subject=Dataverse: Se ha devuelto su dataset +notification.email.create.account.subject=Dataverse: Se ha creado su cuenta +notification.email.assign.role.subject=Dataverse: Ha sido asignado a un rol +notification.email.revoke.role.subject=Dataverse: Su rol ha sido rechazado +notification.email.verifyEmail.subject={0}: Verify your email address +notification.email.greeting=Hola, \n +# Bundle file editors, please note that "notification.email.welcome" is used in a unit test +notification.email.welcome=\u00a1Bienvenido a Dataverse 4.2! Por favor, eche un vistazo general, pruebe todo lo que quiera y contacte con webmaster@consorciomadrono.es si tiene dudas o comentarios. +notification.email.welcomeConfirmEmailAddOn=\n\nPlease verify your email address at {0} . Note, the verify link will expire after {1}. Send another verification email by visiting your account page. +notification.email.requestFileAccess=Es necesario tener acceso al fichero para el dataset: {0}. Administrar permisos en {1}. +notification.email.grantFileAccess=Acceso permitido a los ficheros en el dataset: {0} (ver en {1}). +notification.email.rejectFileAccess=Acceso denegado para los ficheros pedidos en el dataset: {0} (ver en {1}). +# Bundle file editors, please note that "notification.email.createDataverse" is used in a unit test=# Bundle file editors, please note that "notification.email.createDataverse" is used in a unit test +notification.email.createDataverse=Su nuevo dataverse llamado {0} (ver en {1} ) fue creado en {2} (ver en {3} ). Para saber m\u00e1s sobre lo que puedes hacer con su dataverse, revise la Gu\u00eda de Usuario en {4}/{5}/user/dataverse-management.html . +# Bundle file editors, please note that "notification.email.createDataset" is used in a unit test=# Bundle file editors, please note that "notification.email.createDataset" is used in a unit test +notification.email.createDataset=Su nuevo dataset llamado {0} (ver en {1} ) se ha creado en {2} (ver en {3} ). Para saber m\u00e1s sobre lo que puede hacer con un dataset, revise la Gu\u00eda de Usuario en {4}/{5}/user/dataset-management.html . +notification.email.wasSubmittedForReview={0} (ver en {1}) se ha enviado a revisi\u00f3n para ser publicado en {2} (ver en {3}). \!No olvide publicarlo o devolverlo a los colaboradores\! +notification.email.wasReturnedByReviewer={0} (ver en {1}) fue devuelto por el conservador/revisor de {2} (ver en {3}). +notification.email.wasPublished={0} (ver en {1}) fue publicado en {2} (ver en {3}). +notification.email.worldMap.added={0} (ver en {1}) ten\u00eda una capa de datos de WorldMap a\u00f1adida. +notification.email.closing=\n\nGracias,\nEl Proyecto Dataverse +notification.email.assignRole=Ahora eres {0} para el {1} "{2}" (ver en {3}). +notification.email.revokeRole=Uno de sus roles para el {0} "{1}" se ha eliminado (ver en {2}). +notification.email.changeEmail=Hello, {0}.{1}\n\nPlease contact us if you did not intend this change or if you need assistance. +hours=hours +hour=hour +minutes=minutes +minute=minute +notification.email.checksumfail.subject={0}: Your upload failed checksum validation +notification.email.import.filesystem.subject=Dataset {0} has been successfully uploaded and verified +notification.email.import.checksum.subject={0}: Your file checksum job has completed +# passwordreset.xhtml=# passwordreset.xhtml +pageTitle.passwdReset.pre=Reinicio de Contrase\u00f1a +passwdReset.token=token : +passwdReset.userLookedUp=usuario buscado : +passwdReset.emailSubmitted=e-mail enviado : +passwdReset.details={0} Reinicio de Contrase\u00f1a{1} - Para iniciar el proceso de reinicio de contrase\u00f1a, por favor, indique su direcci\u00f3n de e-mail. +passwdReset.submitRequest=Enviar Petici\u00f3n de Contrase\u00f1a +passwdReset.successSubmit.tip=Si este e-mail est\u00e1 asociada con una cuenta, se le enviar\u00e1 un mensaje con instrucciones adicionales a {0}. +passwdReset.debug=DEBUG +passwdReset.resetUrl=La URL de reinicio es +passwdReset.noEmail.tip=No se envi\u00f3 el e-mail porque no se encontr\u00f3 un usuario con esa direcci\u00f3n de e-mail {0}, pero no hemos avisado para no alertar a posibles usuarios maliciosos que usan este m\u00e9todo para averiguar si una cuenta est\u00e1 asociada con una direcci\u00f3n de e-mail. +passwdReset.illegalLink.tip=El enlace para el reinicio de tu contrase\u00f1a no es v\u00e1lido. Si necesitas reiniciar tu contrase\u00f1a, {0}pulsa aqu\u00ed{1} para solicitar que tu contrase\u00f1a sea reiniciada de nuevo. +passwdReset.newPasswd.details={0} Nueva Contrase\u00f1a{1} \u00e2\u0080\u0093 Por favor, elija una contrase\u00f1a robusta que tenga por lo menos seis caracteres y que contenga al menos una letra y un n\u00famero. +passwdReset.newPasswd=Nueva Contrase\u00f1a +passwdReset.rePasswd=Re-escribir Contrase\u00f1a +passwdReset.resetBtn=Reiniciar Contrase\u00f1a +# dataverse.xhtml=# dataverse.xhtml +dataverse.title=El proyecto, departamento, universidad o profesor propietario de los datos de este dataverse. +dataverse.enterName=Introduzca el nombre... +dataverse.host.title=El dataverse que contiene estos datos. +dataverse.identifier.title=Nombre corto utilizado para la URL de este dataverse. +dataverse.affiliation.title=La organizaci\u00f3n a la que pertenece este dataverse. +dataverse.category=Categor\u00eda +dataverse.category.title=El tipo con el que mejor se identifica este dataverse. +dataverse.type.selectTab.top=Selecciona uno... +dataverse.type.selectTab.researchers=Investigador +dataverse.type.selectTab.researchProjects=Proyecto de Investigaci\u00f3n +dataverse.type.selectTab.journals=Revista +dataverse.type.selectTab.organizationsAndInsitutions=Organizaci\u00f3n o Instituci\u00f3n +dataverse.type.selectTab.teachingCourses=Curso +dataverse.type.selectTab.uncategorized=Sin Categorizar +dataverse.type.selectTab.researchGroup=Research Group +dataverse.type.selectTab.laboratory=Laboratory +dataverse.type.selectTab.department=Department +dataverse.description.title=Un resumen que describa el prop\u00f3sito, naturaleza o alcance de este dataverse. +dataverse.email=E-mail +dataverse.email.title=La(s) direcci\u00f3n(es) de e-mail de los contactos para el dataverse. +dataverse.share.dataverseShare=Compartir Dataverse +dataverse.share.dataverseShare.tip=Compartir este dataverse en sus redes sociales favoritas. +dataverse.share.dataverseShare.shareText=Ver este dataverse. +dataverse.subject.title=Materia(s) cubierta(s) por este dataverse. +dataverse.metadataElements=Campos de Metadatos +dataverse.metadataElements.tip=Elija un campo de metadatos para usar en las plantillas del dataset y cuando se a\u00f1ada un dataset a este dataverse. +dataverse.metadataElements.from.tip=Usar campos de metadatos desde {0} +dataverse.resetModifications=Reiniciar Modificaciones +dataverse.resetModifications.text=\u00bfEst\u00e1 seguro de que quiere reiniciar los campos de metadatos seleccionados?. Si lo hace, cualquier personalizaci\u00f3n (oculto, obligatorio, opcional) que haya hecho desaparecer\u00e1. +dataverse.field.required=(Obligatorio) +dataverse.field.example1= (Ejemplos: +dataverse.field.example2=) +dataverse.field.set.tip=[+] Ver campos + marcar como ocultos, obligatorios u opcionales +dataverse.field.set.view=[+] Ver Campos +dataverse.field.requiredByDataverse=Obligatorio para Dataverse +dataverse.facetPickList.text=Navegar/Buscar Facetas +dataverse.facetPickList.tip=Elija los campos de metadatos que se usar\u00e1n como facetas para navegar por datasets y dataverses en este dataverse. +dataverse.facetPickList.facetsFromHost.text=Use navegar/buscar facetas desde {0} +dataverse.facetPickList.metadataBlockList.all=Todos los Campos de Metadatos +dataverse.edit=Editar +dataverse.option.generalInfo=Informaci\u00f3n General +dataverse.option.themeAndWidgets=Tema + Widgets +dataverse.option.featuredDataverse=Dataverses Destacados +dataverse.option.permissions=Permisos +dataverse.option.dataverseGroups=Grupos +dataverse.option.datasetTemplates=Plantillas de Dataset +dataverse.option.datasetGuestbooks=Libros de Visitas del Dataset +dataverse.option.deleteDataverse=Eliminar Dataverse +dataverse.publish.btn=Publicar +dataverse.publish.header=Publicar Dataverse +dataverse.nopublished=Dataverses sin Publicar +dataverse.nopublished.tip=Para usar esta funcionalidad ha de tener publicado al menos un dataverse. +dataverse.contact=Enviar E-Mail al Contacto del Dataverse +dataset.link=Enlace al Dataset +dataverse.link=Enlace al Dataverse +dataverse.link.btn.tip=Enlace a su Dataverse +dataverse.link.yourDataverses=Su {0, elige, 1#Dataverse|2#Dataverses} +dataverse.link.save=Guardar el Dataverse Enlazado +dataset.link.save=Guardar el Dataset Enlazado +dataverse.link.dataverse.choose=Elija a cual de sus dataverses quiere enlazar este dataverse. +dataverse.link.dataset.choose=Elija a cual de tus dataverses quiere enlazar este dataset. +dataverse.link.no.choice=Tiene un dataverse al que puede a\u00f1adir dataverses y datasets enlazados. +dataverse.link.no.linkable=Para enlazar un dataverse o un dataset, necesita tener su propio dataverse. Pulse el bot\u00f3n A\u00f1adir Datos de la p\u00e1gina de inicio para comenzar. +dataverse.link.no.linkable.remaining=Todos sus dataverses seleccionables ya est\u00e1n enlazados. +dataverse.savedsearch.link=Buscar Enlaces +dataverse.savedsearch.searchquery=Buscar +dataverse.savedsearch.filterQueries=Facetas +dataverse.savedsearch.save=Guardar la B\u00fasqueda Enlazada +dataverse.savedsearch.dataverse.choose=Seleccione a cual de tus dataverses le gustar\u00eda enlazar esta b\u00fasqueda. +dataverse.savedsearch.no.choice=Tiene un dataverse al cual puede a\u00f1adir una b\u00fasqueda guardada. +# Bundle file editors, please note that "dataverse.savedsearch.save.success" is used in a unit test +dataverse.saved.search.success=The saved search has been successfully linked to {0}. +dataverse.saved.search.failure=The saved search was not able to be linked. +dataverse.linked.success= {0} se ha enlazado con \u00e9xito a {3}. +dataverse.linked.success.wait= {0} se ha enlazado con \u00e9xito a {3}. Por favor, espere para que su contenido sea visible. +dataverse.linked.internalerror={0} se ha enlazado con \u00e9xito a {3} pero el contenido no aparecer\u00e1 hasta que no se corrija un error interno. +dataverse.page.pre=Anterior +dataverse.page.next=Siguiente +dataverse.byCategory=Dataverses por Categor\u00eda +dataverse.displayFeatured=Mostrar los dataverses seleccionados abajo en la p\u00e1gina de inicio de este dataverse. +dataverse.selectToFeature=Seleccione los dataverses para presentar en la p\u00e1gina de inicio de este dataverse. +dataverse.publish.tip=\u00bfEst\u00e1 seguro de que quiere publicar su dataverse? Una vez hecho esto, deber\u00e1 permanecer publicado. +dataverse.publish.failed.tip=Este dataverse no puede publicarse porque el dataverse al que pertenece no ha sido publicado. +dataverse.publish.failed=No se puede publicar el dataverse. +dataverse.publish.success=Su dataverse es ahora p\u00fablico. +dataverse.publish.failure=No fue posible publicar este dataverse. +dataverse.delete.tip=\u00bfEst\u00e1 seguro de que quiere eliminar este dataverse? No podr\u00e1 recuperarlo. +dataverse.delete=Eliminar Dataverse +dataverse.delete.success=Su dataverse se ha eliminado. +dataverse.delete.failure=Este dataverse no puede ser eliminado. +# Bundle file editors, please note that "dataverse.create.success" is used in a unit test because it's so fancy with two parameters=# Bundle file editors, please note that "dataverse.create.success" is used in a unit test because it's so fancy with two parameters +dataverse.create.success=\u00a1Ha creado su dataverse con \u00e9xito!. Para saber que m\u00e1s cosas puede hacer con su dataverse, revise la Gu\u00eda de Usuario. +dataverse.create.failure=No se pudo crear este dataverse. +dataverse.create.authenticatedUsersOnly=S\u00f3lo pueden crear dataverses los usuarios autorizados. +dataverse.update.success=\u00a1Ha actualizado su dataverse con \u00e9xito! +dataverse.update.failure=Este dataverse no pudo ser actualizado +# rolesAndPermissionsFragment.xhtml=# rolesAndPermissionsFragment.xhtml +# advanced.xhtml=# advanced.xhtml +advanced.search.header.dataverses=Dataverses +advanced.search.dataverses.name.tip=El proyecto, departamento, universidad o profesor del que este Dataverse tiene datos. +advanced.search.dataverses.affiliation.tip=La organizaci\u00f3n con la que est\u00e1 afiliada este Dataverse. +advanced.search.dataverses.description.tip=Un resumen describiendo el prop\u00f3sito, naturaleza o \u00e1mbito de este Dataverse. +advanced.search.dataverses.subject.tip=Materias que son relevantes para este Dataverse en su dominio espec\u00edfico. +advanced.search.header.datasets=Datasets +advanced.search.header.files=Ficheros +advanced.search.files.name.tip=El nombre que identifica el fichero. +advanced.search.files.description.tip=Un resumen describiendo el fichero y sus variables. +advanced.search.files.fileType=Tipo de fichero +advanced.search.files.fileType.tip=Extensi\u00f3n de un fichero, p.e. CSV, zip, Stata, R, PDF, JPEG, etc. +advanced.search.files.variableName=Nombre de Variable +advanced.search.files.variableName.tip=El nombre de la columna de la variable en el marco de los datos. +advanced.search.files.variableLabel=Etiqueta de la variable +advanced.search.files.variableLabel.tip=Una descripci\u00f3n corta de la variable. +# search-include-fragment.xhtml=# search-include-fragment.xhtml +dataverse.search.advancedSearch=B\u00fasqueda Avanzada +dataverse.search.input.watermark=Buscar en este dataverse... +account.search.input.watermark=Buscar en estos datos... +dataverse.search.btn.find=Buscar +dataverse.results.btn.addData=A\u00f1adir datos +dataverse.results.btn.addData.newDataverse=Nuevo Dataverse +dataverse.results.btn.addData.newDataset=Nuevo Dataset +dataverse.results.dialog.addDataGuest.header=A\u00f1adir datos +dataverse.results.dialog.addDataGuest.msg=Necesita Identificarse para crear un dataverse o a\u00f1adir un dataset. +dataverse.results.dialog.addDataGuest.msg.signup=Necesita Crear una Cuenta o Identificarse para crear un dataverse o a\u00f1adir un dataset. +dataverse.results.types.dataverses=Dataverses +dataverse.results.types.datasets=Datasets +dataverse.results.types.files=Ficheros +# Bundle file editors, please note that "dataverse.results.empty.zero" is used in a unit test=# Bundle file editors, please note that "dataverse.results.empty.zero" is used in a unit test +dataverse.results.empty.zero=No hay dataverses, datasets o ficheros que concuerden con su b\u00fasqueda. Por favor, busque de nuevo usando otras palabras o t\u00e9rminos m\u00e1s generales. Tambi\u00e9n puede probar los consejos de la gu\u00eda de b\u00fasqueda. +# Bundle file editors, please note that "dataverse.results.empty.hidden" is used in a unit test=# Bundle file editors, please note that "dataverse.results.empty.hidden" is used in a unit test +dataverse.results.empty.hidden= No hay resultados de b\u00fasqueda de acuerdo a los criterios de su b\u00fasqueda. Puede probar los consejos de la gu\u00eda de b\u00fasqueda. +dataverse.results.empty.browse.guest.zero=Este dataverse no contiene dataverses, datasets, ni ficheros. Por favor identif\u00edquese para ver si puede a\u00f1adirlos. +dataverse.results.empty.browse.guest.hidden=Este dataverse no contiene dataverses. Por favor log in para ver si puede a\u00f1adirlos. +dataverse.results.empty.browse.loggedin.noperms.zero= Este dataverse no contiene dataverses, datasets, ni ficheros. Puede usar el bot\u00f3n Contactar con el Administrador del Dataverse para pedir m\u00e1s informaci\u00f3n o solicitar acceso a este dataverse. +dataverse.results.empty.browse.loggedin.noperms.hidden=Este dataverse no contiene dataverses. +dataverse.results.empty.browse.loggedin.perms.zero=Este dataverse no contiene dataverses, datasets, ni ficheros. Puede a\u00f1adirlos usando el bot\u00f3n A\u00f1adir Datos de esta p\u00e1gina. +account.results.empty.browse.loggedin.perms.zero=No hay dataverses, datasets, ni ficheros asociados a su cuenta. Puede a\u00f1adir un dataverse o un dataset usando el bot\u00f3n A\u00f1adir Datos m\u00e1s arriba. Tiene m\u00e1s informaci\u00f3n de como a\u00f1adir datos en la Gu\u00eda de Usuario. +dataverse.results.empty.browse.loggedin.perms.hidden=Este dataverse no contiene dataverses. Puede a\u00f1adirlos usando el bot\u00f3n A\u00f1adir Datos en esta p\u00e1gina. +dataverse.results.empty.link.technicalDetails=M\u00e1s detalles t\u00e9cnicos +dataverse.search.facet.error=There was an error with your search parameters. Please clear your search and try again. +dataverse.results.count.toofresults={0} a {1} de {2} {2, choice, 0#Resultados|1#Resultado|2#Resultados} +dataverse.results.paginator.current=(Actual) +dataverse.results.btn.sort=Ordenar +dataverse.results.btn.sort.option.nameAZ=Nombre (A-Z) +dataverse.results.btn.sort.option.nameZA=Nombre (Z-A) +dataverse.results.btn.sort.option.newest=M\u00e1s nuevos +dataverse.results.btn.sort.option.oldest=M\u00e1s antiguos +dataverse.results.btn.sort.option.relevance=Relevancia +dataverse.results.cards.foundInMetadata=Encontrados en Campos de Metadatos: +dataverse.results.cards.files.tabularData=Datos Tabulares +dataverse.results.solrIsDown=Nota: Debido a un error interno, la b\u00fasqueda y la navegaci\u00f3n no est\u00e1n disponibles. +dataverse.theme.title=Tema +dataverse.theme.inheritCustomization.title=Selecciona aqu\u00ed para usar el tema actual. +dataverse.theme.inheritCustomization.label=Heredar la personalizaci\u00f3n +dataverse.theme.inheritCustomization.checkbox=Heredar la personalizaci\u00f3n desde {0} +dataverse.theme.logo=Logo +dataverse.theme.logo.tip=No pueden subirse ficheros mayores de 500 kb. El \u00e1rea m\u00e1xima de visualizaci\u00f3n para los ficheros es de 940 pixels de ancho por 120 pixels de alto. +dataverse.theme.logo.format=Formato del Logo +dataverse.theme.logo.format.selectTab.square=Cuadrado +dataverse.theme.logo.format.selectTab.rectangle=Rect\u00e1ngulo +dataverse.theme.logo.alignment=Alineaci\u00f3n del Logo +dataverse.theme.logo.alignment.selectTab.left=Izquierda +dataverse.theme.logo.alignment.selectTab.center=Centro +dataverse.theme.logo.alignment.selectTab.right=Derecha +dataverse.theme.logo.backColor=Color de Fondo del Logo +dataverse.theme.logo.image.upload=Subir Imagen +dataverse.theme.tagline=Eslogan +dataverse.theme.website=Sitio Web +dataverse.theme.linkColor=Color del Enlace +dataverse.theme.txtColor=Color del Texto +dataverse.theme.backColor=Color de Fondo +dataverse.theme.success=\u00a1Has actualizado el tema de este dataverse con \u00e9xito! +dataverse.theme.failure=El tema del dataverse no se ha actualizado. +dataverse.theme.logo.image=Imagen del Logo +dataverse.theme.logo.image.title=El logo o fichero de imagen que quiere poner en la cabecera de este dataverse. +dataverse.theme.logo.image.uploadNewFile=Subir Fichero Nuevo +dataverse.theme.logo.image.invalidMsg=No se puede subir la imagen. Por favor, int\u00e9ntelo otra vez con un fichero jpeg, tiff, o png. +dataverse.theme.logo.image.uploadImgFile=Subir Fichero de Imagen +dataverse.theme.logo.format.title=La forma del logo o fichero de imagen que va a subir a este dataverse. +dataverse.theme.logo.format.selectTab.square2=Cuadrado +dataverse.theme.logo.format.selectTab.rectangle2=Rect\u00e1ngulo +dataverse.theme.logo.alignment.title=D\u00f3nde deber\u00eda mostrarse el logo o imagen en la cabecera. +dataverse.theme.logo.alignment.selectTab.left2=Izquierda +dataverse.theme.logo.alignment.selectTab.center2=Centro +dataverse.theme.logo.alignment.selectTab.right2=Derecha +dataverse.theme.logo.backColor.title=Elija un color para mostrar detr\u00e1s del logo de este dataverse. +dataverse.theme.headerColor=Colores de Cabecera +dataverse.theme.headerColor.tip=Los colores que seleccione para personalizar el estilo de cabecera de este dataverse. +dataverse.theme.backColor.title=Color para el \u00e1rea de cabecera que contiene la imagen, el eslogan, la URL y el texto. +dataverse.theme.linkColor.title=Color en que debe mostrarse el enlace. +dataverse.theme.txtColor.title=Color para el texto del eslogan y el nombre de este dataverse. +dataverse.theme.tagline.title=Frase o texto que describe este dataverse. +dataverse.theme.tagline.tip=Indique un eslogan de 140 caracteres como mucho. +dataverse.theme.website.title=URL de su web personal, instituci\u00f3n, o cualquier web relacionada con este dataverse. +dataverse.theme.website.tip=El portal web ser\u00e1 enlazado en el eslogan. Para que se muestre una web, debe elegir un eslogan. +dataverse.theme.website.watermark=Su sitio personal, http://... +dataverse.theme.website.invalidMsg=URL Inv\u00e1lida. +dataverse.theme.disabled=The theme for the root dataverse has been administratively disabled with the :DisableRootDataverseTheme database setting. +dataverse.widgets.title=Widgets +dataverse.widgets.notPublished.why.header=Why Use Widgets? +dataverse.widgets.notPublished.why.reason1=Increases the web visibility of your data by allowing you to embed your dataverse and datasets into your personal or project website. +dataverse.widgets.notPublished.why.reason2=Allows others to browse your dataverse and datasets without leaving your personal or project website. +dataverse.widgets.notPublished.how.header=How To Use Widgets +dataverse.widgets.notPublished.how.tip1=To use widgets, your dataverse and datasets need to be published. +dataverse.widgets.notPublished.how.tip2=After publishing, code will be available on this page for you to copy and add to your personal or project website. +dataverse.widgets.notPublished.how.tip3=Do you have an OpenScholar website? If so, learn more about adding the Dataverse widgets to your website here. +dataverse.widgets.notPublished.getStarted=To get started, publish your dataverse. To learn more about Widgets, visit the Theme + Widgets section of the User Guide. +dataverse.widgets.tip=Copie y pegue este c\u00f3digo en el HTML de su web. +dataverse.widgets.searchBox.txt=Cuadro de B\u00fasqueda de Dataverse. +dataverse.widgets.searchBox.tip=Proporcione un m\u00e9todo en el que los visitantes de su sitio web puedan buscar en Dataverse. +dataverse.widgets.dataverseListing.txt=Listado del Dataverse +dataverse.widgets.dataverseListing.tip=Proporcione un m\u00e9todo en el que los visitantes de su sitio web puedan ver sus dataverses y datasets, ordenar, o moverse por ellos. +dataverse.widgets.advanced.popup.header=Widget Advanced Options +dataverse.widgets.advanced.prompt=Forward dataset citation persistent URL's to your personal website. The page you submit as your Personal Website URL must contain the code snippet for the Dataverse Listing widget. +dataverse.widgets.advanced.url.label=Personal Website URL +dataverse.widgets.advanced.url.watermark=http://www.example.com/page-name +dataverse.widgets.advanced.invalid.message=Please enter a valid URL +dataverse.widgets.advanced.success.message=Successfully updated your Personal Website URL +dataverse.widgets.advanced.failure.message=The dataverse Personal Website URL has not been updated. +# permissions-manage.xhtml=# permissions-manage.xhtml +dataverse.permissions.title=Permisos +dataverse.permissions.dataset.title=Permisos del Dataset +dataverse.permissions.access.accessBtn=Editar Acceso +dataverse.permissions.usersOrGroups=Usuarios/Grupos +dataverse.permissions.usersOrGroups.assignBtn=Asignar Roles a Usuarios/Grupos +dataverse.permissions.usersOrGroups.createGroupBtn=Crear Grupo +dataverse.permissions.usersOrGroups.description=Aqu\u00ed tiene todos los usuarios y grupos que han accedido a su dataverse. +dataverse.permissions.usersOrGroups.tabHeader.userOrGroup=Nombre (Afiliaci\u00f3n) del Usuario/Grupo +dataverse.permissions.usersOrGroups.tabHeader.id=ID +dataverse.permissions.usersOrGroups.tabHeader.role=Rol +dataverse.permissions.usersOrGroups.tabHeader.action=Acci\u00f3n +dataverse.permissions.usersOrGroups.assignedAt=Rol Asignado a {0} +dataverse.permissions.usersOrGroups.removeBtn=Eliminar el Rol Asignado +dataverse.permissions.usersOrGroups.removeBtn.confirmation=\u00bfEst\u00e1 seguro de que quiere eliminar esta asignaci\u00f3n de rol? +dataverse.permissions.roles=Roles +dataverse.permissions.roles.add=A\u00f1adir un Nuevo Rol +dataverse.permissions.roles.description=Estos son todos los roles asignados a su dataverse a los que puede asignar usuarios y grupos. +dataverse.permissions.roles.edit=Editar Rol +dataverse.permissions.roles.copy=Copiar Rol +# permissions-manage-files.xhtml=# permissions-manage-files.xhtml +dataverse.permissionsFiles.title=Permisos del Fichero +dataverse.permissionsFiles.usersOrGroups=Usuarios/Grupos +dataverse.permissionsFiles.usersOrGroups.assignBtn=Dar Acceso a Usuarios/Grupos +dataverse.permissionsFiles.usersOrGroups.description=Estos son todos los usuarios y grupos que tienen acceso a ficheros en este dataset. +dataverse.permissionsFiles.usersOrGroups.tabHeader.userOrGroup=Nombre (Afiliaci\u00f3n) del Usuario/Grupo +dataverse.permissionsFiles.usersOrGroups.tabHeader.id=ID +dataverse.permissionsFiles.usersOrGroups.tabHeader.email=E-mail +dataverse.permissionsFiles.usersOrGroups.tabHeader.files=Ficheros +dataverse.permissionsFiles.usersOrGroups.tabHeader.access=Acceso +dataverse.permissionsFiles.usersOrGroups.file=Fichero +dataverse.permissionsFiles.usersOrGroups.files=Ficheros +dataverse.permissionsFiles.usersOrGroups.invalidMsg=No hay usuarios ni grupos con acceso a los ficheros con acceso restringido de este dataset. +dataverse.permissionsFiles.files=Ficheros +dataverse.permissionsFiles.files.label={0, choice, 0#Restricted Files|1#Restricted File|2#Restricted Files} +dataverse.permissionsFiles.files.description=Estos son todos los ficheros de acceso restringido en este dataset. +dataverse.permissionsFiles.files.tabHeader.fileName=Nombre del Fichero +dataverse.permissionsFiles.files.tabHeader.roleAssignees=Usuarios/Grupos +dataverse.permissionsFiles.files.tabHeader.access=Acceso +dataverse.permissionsFiles.files.tabHeader.publishedRestrictedState=Publicado +dataverse.permissionsFiles.files.tabHeader.draftRestrictedState=Preliminar +dataverse.permissionsFiles.files.deleted=Eliminado +dataverse.permissionsFiles.files.public=P\u00fablico +dataverse.permissionsFiles.files.restricted=Restringido +dataverse.permissionsFiles.files.roleAssignee=Usuario/Grupo +dataverse.permissionsFiles.files.roleAssignees=Usuarios/Grupos +dataverse.permissionsFiles.files.roleAssignees.label={0, choice, 0#Users/Groups|1#User/Group|2#Users/Groups} +dataverse.permissionsFiles.files.assignBtn=Asignar Accesos +dataverse.permissionsFiles.files.invalidMsg=No hay ficheros con acceso restringido en este dataset. +dataverse.permissionsFiles.files.requested=Requested Files +dataverse.permissionsFiles.files.selected=Selecting {0} of {1} {2} +dataverse.permissionsFiles.viewRemoveDialog.header=Acceso al Fichero +dataverse.permissionsFiles.viewRemoveDialog.removeBtn=Eliminar Acceso +dataverse.permissionsFiles.viewRemoveDialog.removeBtn.confirmation=\u00bfEst\u00e1 seguro de querer eliminar el acceso a este fichero?. Una vez eliminado, ni el usuario ni el grupo podr\u00e1n descargar este fichero. +dataverse.permissionsFiles.assignDialog.header=Permitir Acceso al Fichero +dataverse.permissionsFiles.assignDialog.description=Permitir Acceso al Fichero para usuarios y grupos. +dataverse.permissionsFiles.assignDialog.userOrGroup=Usuario/Grupo +dataverse.permissionsFiles.assignDialog.userOrGroup.enterName=Introduzca el nombre del Usuario/Grupo +dataverse.permissionsFiles.assignDialog.userOrGroup.invalidMsg=No se encuentran coincidencias. +dataverse.permissionsFiles.assignDialog.userOrGroup.requiredMsg=Por favor, seleccione al menos un usuario o grupo. +dataverse.permissionsFiles.assignDialog.fileName=File Name +dataverse.permissionsFiles.assignDialog.grantBtn=Permitir +dataverse.permissionsFiles.assignDialog.rejectBtn=Rechazar +# permissions-configure.xhtml=# permissions-configure.xhtml +dataverse.permissions.accessDialog.header=Editar Acceso +dataverse.permissions.description=Esta es la configuraci\u00f3n de acceso a su dataverse. +dataverse.permissions.tip=Select if all users or only certain users are able to add to this dataverse, by clicking the Edit Access button. +dataverse.permissions.Q1=\u00bfQui\u00e9n puede a\u00f1adir en este dataverse? +dataverse.permissions.Q1.answer1=Cualquiera que a\u00f1ada a este dataverse, necesita tener acceso +dataverse.permissions.Q1.answer2=Cualquiera con una cuenta en Dataverse puede a\u00f1adir sub dataverses +dataverse.permissions.Q1.answer3=Cualquiera con una cuenta en Dataverse puede a\u00f1adir datasets +dataverse.permissions.Q1.answer4=Cualquiera con una cuenta en Dataverse puede a\u00f1adir sub dataverses and datasets +dataverse.permissions.Q2=\u00bfCu\u00e1l deber\u00eda ser el rol por defecto para alguien que a\u00f1ada datasets a este dataverse? +dataverse.permissions.Q2.answer.editor.description=- Editar metadatos, subir ficheros y editar ficheros, editar Condiciones, Libro de Invitados, Enviar datasets a revisi\u00f3n +dataverse.permissions.Q2.answer.manager.description=- Editar metadatos, subir ficheros y editar ficheros, editar Condiciones, Libro de Invitados, Restricciones de Ficheros (Acceso a Ficheros + Uso) +dataverse.permissions.Q2.answer.curator.description=- Editar metadatos, subir ficheros y editar ficheros, editar Condiciones, Libro de Invitados, Restricciones de Ficheros (Acceso a Ficheros + Uso), Editar Permisos/Asignar Roles + Publicar +# roles-assign.xhtml=# roles-assign.xhtml +dataverse.permissions.usersOrGroups.assignDialog.header=Asignar Rol +dataverse.permissions.usersOrGroups.assignDialog.description=Conceder permisos a usuarios y grupos asign\u00e1ndoles a un rol. +dataverse.permissions.usersOrGroups.assignDialog.userOrGroup=Usuario/Grupo +dataverse.permissions.usersOrGroups.assignDialog.userOrGroup.enterName=Introduzca el nombre del Usuario/Grupo +dataverse.permissions.usersOrGroups.assignDialog.userOrGroup.invalidMsg=No se encuentran coincidencias. +dataverse.permissions.usersOrGroups.assignDialog.userOrGroup.requiredMsg=Por favor, seleccione al menos un usuario o grupo. +dataverse.permissions.usersOrGroups.assignDialog.role.description=Estos son los permisos asociados con el rol seleccionado. +dataverse.permissions.usersOrGroups.assignDialog.role.warning=Assigning the {0} role means the user(s) will also have the {0} role applied to all {1} within this {2}. +dataverse.permissions.usersOrGroups.assignDialog.role.requiredMsg=Por favor, selecciona el rol que quiera asignar. +# roles-edit.xhtml=# roles-edit.xhtml +dataverse.permissions.roles.header=Editar Rol +dataverse.permissions.roles.name=Nombre del Rol +dataverse.permissions.roles.name.title=Introduzca un nombre para el rol. +dataverse.permissions.roles.id=Identificador +dataverse.permissions.roles.id.title=Introduzca un nombre para el alias. +dataverse.permissions.roles.description.title=Describa el rol (1000 caracteres m\u00e1x). +dataverse.permissions.roles.description.counter=Quedan {0} caracteres +dataverse.permissions.roles.roleList.header=Permisos del Rol +dataverse.permissions.roles.roleList.authorizedUserOnly=Los permisos con el icono de informaci\u00f3n indican que las acciones se pueden realizar por parte de los usuarios no identificados en Dataverse. +# explicitGroup-new-dialog.xhtml=# explicitGroup-new-dialog.xhtml +dataverse.permissions.explicitGroupEditDialog.title.new=Crear Grupo +dataverse.permissions.explicitGroupEditDialog.title.edit=Editar Grupo {0} +dataverse.permissions.explicitGroupEditDialog.help=A\u00f1adir usuarios u otros grupos a este grupo. +dataverse.permissions.explicitGroupEditDialog.groupIdentifier=Identificador de Grupo +dataverse.permissions.explicitGroupEditDialog.groupIdentifier.tip=Short name used for the ID of this group. +dataverse.permissions.explicitGroupEditDialog.groupIdentifier.required=El Identificador de Grupo no puede estar vac\u00edo +dataverse.permissions.explicitGroupEditDialog.groupIdentifier.invalid=El Identificador de Grupo s\u00f3lo puede tener letras, n\u00fameros, guiones bajos (_) y guiones (-) +dataverse.permissions.explicitGroupEditDialog.groupIdentifier.helpText=Consiste en letras, n\u00fameros, guiones bajos (_) y guiones (-) +dataverse.permissions.explicitGroupEditDialog.groupIdentifier.taken=El identificador de Grupo ya est\u00e1 en uso en este dataverse +dataverse.permissions.explicitGroupEditDialog.groupName=Nombre del Grupo +dataverse.permissions.explicitGroupEditDialog.groupName.required=El Nombre del grupo no puede estar vac\u00edo +dataverse.permissions.explicitGroupEditDialog.groupDescription=Descripci\u00f3n +dataverse.permissions.explicitGroupEditDialog.roleAssigneeName=Usuario/Grupo +dataverse.permissions.explicitGroupEditDialog.roleAssigneeNames=Usuarios/Grupos +dataverse.permissions.explicitGroupEditDialog.createGroup=Crear Grupo +# manage-templates.xhtml=# manage-templates.xhtml +dataset.manageTemplates.pageTitle=Asdministrar las Plantillas de Dataset +dataset.manageTemplates.select.txt=Incluir Plantillas desde {0} +dataset.manageTemplates.createBtn=Crear Plantilla de Dataset +dataset.manageTemplates.saveNewTerms=Guardar Plantilla de Dataset +dataset.manageTemplates.noTemplates.why.header=\u00bfPor qu\u00e9 Usar Plantillas? +dataset.manageTemplates.noTemplates.why.reason1=Las plantillas son \u00fatiles cuando tiene varios datasets con la misma informaci\u00f3n en varios campos de metadatos y prefiere no tener que introducirlos de forma manual continuamente. +dataset.manageTemplates.noTemplates.why.reason2=Las plantillas pueden usarse para a\u00f1adir instrucciones para aquellos que a\u00f1adan datasets en su dataverse si quiere que un campo de metadatos se rellene de una forma concreta. +dataset.manageTemplates.noTemplates.how.header=C\u00f3mo Usar Plantillas +dataset.manageTemplates.noTemplates.how.tip1=Las plantillas se crean a nivel de dataverse, pueden borrarse (de forma que no se muestre en futuros datasets), configuradas como por defecto (no es obligatorio) y pueden ser copiadas para que no tenga que crear desde cero una nueva plantilla con metadatos similares desde otra plantilla. Eliminar una platilla no afecta a los datasets que ya la hayan utilizado. +dataset.manageTemplates.noTemplates.how.tip2=Por favor, tenga en cuenta que la elecci\u00f3n de que campos de metadatos est\u00e1n ocultos, son obligatorios u opcionales, se realiza en la p\u00e1gina de Informaci\u00f3n General de este dataverse. +dataset.manageTemplates.noTemplates.getStarted=Para empezar, pulse el bot\u00f3n Crear Plantilla de Dataset. Para saber m\u00e1s sobre las plantillas, visita la secci\u00f3n Plantillas de Dataset de la Gu\u00eda de Usuario. +dataset.manageTemplates.tab.header.templte=Nombre de Plantilla +dataset.manageTemplates.tab.header.date=Fecha de Creaci\u00f3n +dataset.manageTemplates.tab.header.usage=Uso +dataset.manageTemplates.tab.header.action=Acci\u00f3n +dataset.manageTemplates.tab.action.btn.makeDefault=Hacer por Defecto +dataset.manageTemplates.tab.action.btn.default=Por Defecto +dataset.manageTemplates.tab.action.btn.view=Ver +dataset.manageTemplates.tab.action.btn.copy=Copiar +dataset.manageTemplates.tab.action.btn.edit=Editar +dataset.manageTemplates.tab.action.btn.edit.metadata=Metadatos +dataset.manageTemplates.tab.action.btn.edit.terms=Condiciones +dataset.manageTemplates.tab.action.btn.delete=Eliminar +dataset.manageTemplates.tab.action.btn.delete.dialog.tip=\u00bfEst\u00e1s seguro de que quiere eliminar esta plantilla?. Los nuevos datasets no podr\u00e1n usar esta plantilla. +dataset.manageTemplates.tab.action.btn.delete.dialog.header=Eliminar Plantilla +dataset.manageTemplates.tab.action.btn.view.dialog.header=Previsualizaci\u00f3n de la Plantilla del Dataset +dataset.manageTemplates.tab.action.btn.view.dialog.datasetTemplate=Plantilla del Dataset +dataset.manageTemplates.tab.action.btn.view.dialog.datasetTemplate.title=La plantilla del dataset que rellena informaci\u00f3n previamente en el formulario de forma autom\u00e1tica. +dataset.manageTemplates.tab.action.noedit.createdin=Template created at {0} +dataset.manageTemplates.delete.usedAsDefault=Esta plantilla es la plantilla por defecto para los siguientes dataverse/s. Tamb\u00eden ser\u00e1 eliminada como plantilla por defecto. +dataset.manageTemplates.info.message.notEmptyTable=Crear, clonar, editar, ver o borrar plantillas de dataset. Crear una plantilla de dataset para rellenar campos con valores est\u00e1ndar, como la afiliaci\u00f3n del autor, para ayudar a los usuarios a crear datasets en este dataverse. Tambi\u00e9n puede a\u00f1adir marcas de agua o texto de ayuda a los campos de metadatos para orientar a los usuarios sobre qu\u00e9 a\u00f1adir en estos campos de metadatos. +# metadataFragment.xhtml=# metadataFragment.xhtml +# template.xhtml=# template.xhtml +dataset.template.name.tip=El nombre de la plantilla del dataset. +dataset.template.returnBtn=Volver a Administrar Plantillas +dataset.template.name.title=Introducir un nombre \u00fanico para la plantilla. +template.asterisk.tip=Los asteriscos indican campos de metadatos obligatorios para que los rellenen los usuarios al a\u00f1adir un dataset a este dataverse. +dataset.template.popup.create.title=Crear Plantilla +dataset.template.popup.create.text=\u00bfQuiere a\u00f1adir Condiciones de Uso/Acceso por defecto? +dataset.create.add.terms=Guardar y A\u00f1adir Condiciones +# manage-groups.xhtml=# manage-groups.xhtml +dataverse.manageGroups.pageTitle=Administrar Grupos de Dataverse +dataverse.manageGroups.createBtn=Crear Grupo +dataverse.manageGroups.noGroups.why.header=\u00bfPor qu\u00e9 usar Grupos? +dataverse.manageGroups.noGroups.why.reason1=Los grupos le permiten asignar roles y permisos a muchos usuarios a la vez. +dataverse.manageGroups.noGroups.why.reason2=Puede usar grupos para administrar distintos tipos de usuarios (estudiantes, colaboradores, etc.) +dataverse.manageGroups.noGroups.how.header=C\u00f3mo Usar los Grupos +dataverse.manageGroups.noGroups.how.tip1=Un grupo puede contener tanto usuarios como otros grupos. +dataverse.manageGroups.noGroups.how.tip2=Puede asignar permisos a un grupo desde la vista "Permisos". +dataverse.manageGroups.noGroups.getStarted=Para comenzar, pulse el bot\u00f3n Crear Grupo. +dataverse.manageGroups.tab.header.name=Nombre del Grupo +dataverse.manageGroups.tab.header.id=Id del Grupo +dataverse.manageGroups.tab.header.membership=Afiliaci\u00f3n +dataverse.manageGroups.tab.header.action=Acci\u00f3n +dataverse.manageGroups.tab.action.btn.view=Vista +dataverse.manageGroups.tab.action.btn.copy=Copiar +dataverse.manageGroups.tab.action.btn.enable=Habilitar +dataverse.manageGroups.tab.action.btn.disable=Deshabilitar +dataverse.manageGroups.tab.action.btn.edit=Editar +dataverse.manageGroups.tab.action.btn.viewCollectedData=Ver los Datos Recolectados +dataverse.manageGroups.tab.action.btn.delete=Eliminar +dataverse.manageGroups.tab.action.btn.delete.dialog.header=Eliminar Grupo +dataverse.manageGroups.tab.action.btn.delete.dialog.tip=\u00bfEst\u00e1 seguro de que quiere eliminar este grupo?, No es posible deshacer esta acci\u00f3n. +dataverse.manageGroups.tab.action.btn.view.dialog.header=Grupo de Dataverse +dataverse.manageGroups.tab.action.btn.view.dialog.group=Nombre del Grupo +dataverse.manageGroups.tab.action.btn.view.dialog.groupView.name=Nombre del Miembro +dataverse.manageGroups.tab.action.btn.view.dialog.groupView.type=Tipo de Miembro +dataverse.manageGroups.tab.action.btn.view.dialog.groupView.action=Acci\u00f3n +dataverse.manageGroups.tab.action.btn.view.dialog.groupView.delete=Eliminar +dataverse.manageGroups.tab.action.btn.view.dialog.groupMembers=Miembros del Grupo +dataverse.manageGroups.tab.action.btn.view.dialog.enterName=Introduce el Nombre del Usuario/Grupo +dataverse.manageGroups.tab.action.btn.view.dialog.invalidMsg=No se encontraron coincidencias. +# manage-guestbooks.xhtml=# manage-guestbooks.xhtml +dataset.manageGuestbooks.pageTitle=Administrar Libros de Invitados del Dataset +dataset.manageGuestbooks.include=Incluir Libros de Invitados desde {0} +dataset.manageGuestbooks.createBtn=Crear Libros de Invitados del Dataset +dataset.manageGuestbooks.download.all.responses=Download All Responses +dataset.manageGuestbooks.download.responses=Download Responses +dataset.manageGuestbooks.noGuestbooks.why.header=\u00bfPor qu\u00e9 Usar Libros de Invitados? +dataset.manageGuestbooks.noGuestbooks.why.reason1=Los libros de invitados le permiten recoger datos sobre qui\u00e9n descarga ficheros de sus datasets. Puede decidir recoger informaci\u00f3n de la cuenta (nombre de usuario, propio y apellidos, afiliaci\u00f3n, etc.) o bien crear sus propias preguntas (ej., \u00bfC\u00f3mo piensa utilizar estos datos?). +dataset.manageGuestbooks.noGuestbooks.why.reason2=Puede descargar los datos recogidos desde los libros de invitados habilitados para almacenarlos fuera del Dataverse. +dataset.manageGuestbooks.noGuestbooks.how.header=C\u00f3mo Usar los Libros de Invitados +dataset.manageGuestbooks.noGuestbooks.how.tip1=Se puede usra un libro de invitados en varios datasets, pero cada dataset s\u00f3lo puede tener un libro de invitados. +dataset.manageGuestbooks.noGuestbooks.how.tip2=Las preguntas personalizadas pueden tener respuestas en formato de texto libre u ofrecer al usuario la posibilidad de elegir una respuesta entre varias opciones. +dataset.manageGuestbooks.noGuestbooks.getStarted=Para comenzar, pulse el bot\u00f3n Crear Libro de Invitados del Dataset. Paras saber m\u00e1s sobre los Libros de Invitados, visita la secci\u00f3n Libro de Invitados del Dataset de la Gu\u00eda de Usuario. +dataset.manageGuestbooks.tab.header.name=Nombre del Libros de Invitados +dataset.manageGuestbooks.tab.header.date=Fecha de Creaci\u00f3n +dataset.manageGuestbooks.tab.header.usage=Uso +dataset.manageGuestbooks.tab.header.responses=Respuestas +dataset.manageGuestbooks.tab.header.action=Acci\u00f3n +dataset.manageGuestbooks.tab.action.btn.view=Ver +dataset.manageGuestbooks.tab.action.btn.copy=Copiar +dataset.manageGuestbooks.tab.action.btn.enable=Habilitar +dataset.manageGuestbooks.tab.action.btn.disable=Deshabilitar +dataset.manageGuestbooks.tab.action.btn.edit=Editar +dataset.manageGuestbooks.tab.action.btn.preview=Preview +dataset.manageGuestbooks.tab.action.btn.viewCollectedData=Ver Datos Recogidos +dataset.manageGuestbooks.tab.action.btn.delete=Eliminar +dataset.manageGuestbooks.tab.action.btn.delete.dialog.header=Eliminar Libro de Invitados +dataset.manageGuestbooks.tab.action.btn.delete.dialog.tip=\u00bfEst\u00e1 seguro de que quiere eliminar este libro de invitados? No podr\u00e1 deshacer la acci\u00f3n. +dataset.manageGuestbooks.tab.action.btn.view.dialog.header=Libro de Invitado del Dataset +dataset.manageGuestbooks.tab.action.btn.view.dialog.datasetGuestbook.title=Tras descargar los ficheros del libro de invitados pregunta por la informaci\u00f3n siguiente. +dataset.manageGuestbooks.tab.action.btn.view.dialog.datasetGuestbook=Nombre del Libro de Invitados +dataset.manageGuestbooks.tab.action.btn.viewCollectedData.dialog.header=Datos Recogidos por Libro de Invitados del Dataset +dataset.manageGuestbooks.tab.action.btn.view.dialog.userCollectedData.title=Datos del usuario recogidos por el libro de invitados. +dataset.manageGuestbooks.tab.action.btn.view.dialog.userCollectedData=Datos Recogidos +dataset.manageGuestbooks.tab.action.noedit.createdin=Guestbook created at {0} +dataset.manageGuestbooks.message.deleteSuccess=El libro de invitados se ha eliminado. +dataset.manageGuestbooks.message.deleteFailure=El libro de invitados no se puede eliminar. +dataset.manageGuestbooks.message.editSuccess=El libro de invitados se ha actualizado. +dataset.manageGuestbooks.message.editFailure=El libro de invitados no se puede actualizar. +dataset.manageGuestbooks.message.enableSuccess=El libro de invitados se ha habilitado. +dataset.manageGuestbooks.message.enableFailure=El libro de invitados no se puede habilitar. +dataset.manageGuestbooks.message.disableSuccess=El libro de invitados se ha deshabilitado. +dataset.manageGuestbooks.message.disableFailure=El libro de invitados no se puede deshabilitar. +dataset.manageGuestbooks.tip.title=Manage Dataset Guestbooks +dataset.manageGuestbooks.tip.downloadascsv=Click \"Download All Responses\" to download all collected guestbook responses for this dataverse, as a CSV file. To navigate and analyze your collected responses, we recommend importing this CSV file into Excel, Google Sheets or similar software. +dataset.guestbooksResponses.dataset=Dataset +dataset.guestbooksResponses.date=Fecha +dataset.guestbooksResponses.type=Tipo +dataset.guestbooksResponses.file=Fichero +dataset.guestbooksResponses.tip.title=Guestbook Responses +dataset.guestbooksResponses.count.responses={0} {0, choice, 0#Responses|1#Response|2#Responses} +dataset.guestbooksResponses.count.toofresults={0} to {1} of {2} {2, choice, 0#Responses|1#Response|2#Responses} +dataset.guestbooksResponses.tip.downloadascsv=Click \"Download Responses\" to download all collected responses for this guestbook, as a CSV file. To navigate and analyze your collected responses, we recommend importing this CSV file into Excel, Google Sheets or similar software. +dataset.guestbooksResponses.tooManyResponses.message=Note: this guestbook has too many responses to display on this page. Only the most recent {0} responses are shown below. Click \"Download Responses\" to download all collected responses ({1} total) as a CSV file. +# guestbook-responses.xhtml=# guestbook-responses.xhtml +dataset.guestbookResponses.pageTitle=Ver Respuestas del Libro de Invitados +# guestbook.xhtml=# guestbook.xhtml +dataset.manageGuestbooks.guestbook.name=Nombre del Libro de Invitados +dataset.manageGuestbooks.guestbook.name.tip=Introduzca un nombre \u00fanico para este Libro de Invitados. +dataset.manageGuestbooks.guestbook.dataCollected=Datos Recogidos +dataset.manageGuestbooks.guestbook.dataCollected.description=Informaci\u00f3n sobre la cuenta de Dataverse que ser\u00e1 recogida cunado un usuario descargue un fichero. Marque las que vayan a ser obligatorias. +dataset.manageGuestbooks.guestbook.customQuestions=Preguntas Personalizadas +dataset.manageGuestbooks.guestbook.accountInformation=Account Information +dataset.manageGuestbooks.guestbook.required=(Required) +dataset.manageGuestbooks.guestbook.optional=(Optional) +dataset.manageGuestbooks.guestbook.customQuestions.description=Cree sus propias preguntas para que los usuarios la introduzcan aparte de su informaci\u00f3n de cuenta cuando descarguen un fichero. Las preguntas pueden ser obligatorias u opcionales y las respuestas pueden ser textuales o de selecci\u00f3n m\u00faltiple. +dataset.manageGuestbooks.guestbook.customQuestions.questionType=Tipo de Pregunta +dataset.manageGuestbooks.guestbook.customQuestions.questionText=Texto de la Pregunta +dataset.manageGuestbooks.guestbook.customQuestions.responseOptions=Opciones de Respuesta +dataset.manageGuestbooks.guestbook.customQuestions.questionType.text=Texto +dataset.manageGuestbooks.guestbook.customQuestions.questionType.multiple=Selecci\u00f3n M\u00faltiple +# guestbookResponseFragment.xhtml=# guestbookResponseFragment.xhtml +dataset.guestbookResponse.guestbook.additionalQuestions=Preguntas Adicionales +dataset.guestbookResponse.guestbook.responseTooLong=Please limit response to 255 characters +# dataset.xhtml=# dataset.xhtml +dataset.configureBtn=Configure +dataset.pageTitle=A\u00f1adir Nuevo Dataset +dataset.editBtn=Editar +dataset.editBtn.itemLabel.upload=Fichero (Subir) +dataset.editBtn.itemLabel.metadata=Metadatos +dataset.editBtn.itemLabel.terms=Condiciones +dataset.editBtn.itemLabel.permissions=Permisos +dataset.editBtn.itemLabel.thumbnailsAndWidgets=Thumbnails + Widgets +dataset.editBtn.itemLabel.privateUrl=Private URL +dataset.editBtn.itemLabel.permissionsDataset=Dataset +dataset.editBtn.itemLabel.permissionsFile=Restricted Files +dataset.editBtn.itemLabel.deleteDataset=Eliminar Dataset +dataset.editBtn.itemLabel.deleteDraft=Eliminar Versi\u00f3n Preliminar +dataset.editBtn.itemLabel.deaccession=Eliminar Acceso al Dataset +dataset.exportBtn=Export Metadata +dataset.exportBtn.itemLabel.ddi=DDI +dataset.exportBtn.itemLabel.dublinCore=Dublin Core +dataset.exportBtn.itemLabel.schemaDotOrg=Schema.org JSON-LD +dataset.exportBtn.itemLabel.json=JSON +metrics.title=Estad\u00edsticas +metrics.title.tip=View more metrics information +metrics.comingsoon=Pr\u00f3ximamente ... +metrics.views=Vistas +metrics.downloads={0, choice, 0#Descargas|1#Descarga|2#Descargas} +metrics.citations=Citas +metrics.shares=Compartido +dataset.publish.btn=Publicar +dataset.publish.header=Publicar Dataset +dataset.rejectBtn=Devolver al Autor +dataset.submitBtn=Enviar a Revisi\u00f3n +dataset.disabledSubmittedBtn=Enviado a Revisi\u00f3n +dataset.submitMessage=Enviar este dataset a revisi\u00f3n por el Conservador/Revisor de esta dataverse para su posible publicaci\u00f3n. +dataset.submit.success=Your dataset has been submitted for review. +dataset.inreview.infoMessage=\u2013 This dataset is currently under review prior to publication. +dataset.submit.failure=Dataset Submission Failed - {0} +dataset.submit.failure.null=Can't submit for review. Dataset is null. +dataset.submit.failure.isReleased=Latest version of dataset is already released. Only draft versions can be submitted for review. +dataset.submit.failure.inReview=You cannot submit this dataset for review because it is already in review. +dataset.rejectMessage=Enviar este dataset al colaborador para su modificaci\u00f3n. +dataset.rejectWatermark=Please enter a reason for returning this dataset to its author(s). +dataset.reject.enterReason=Reason for return to author is required +dataset.reject.enterReason.header=Required entry +dataset.reject.success=This dataset has been sent back to the contributor. +dataset.reject.failure=Dataset Submission Return Failed - {0} +dataset.reject.datasetNull=Cannot return the dataset to the author(s) because it is null. +dataset.reject.datasetNotInReview=This dataset cannot be return to the author(s) because the latest version is not In Review. The author(s) needs to click Submit for Review first. +dataset.publish.tip=\u00bfEst\u00e1 seguro de que quiere publicar este dataset? Una vez hecho esto, permanecer\u00e1 publicado. +dataset.publishBoth.tip=Una vez publique el dataset, \u00e9ste quedar\u00e1 publicado. +dataset.unregistered.tip= Este dataset no est\u00e1 registrado. Intentaremos registrarlo antes de publicarlo. +dataset.republish.tip=\u00bfEst\u00e1 seguro de que quiere volver a publicar este dataset? +dataset.selectVersionNumber=Indique si es una actualizaci\u00f3n de versi\u00f3n mayor o menor. +dataset.majorRelease=Revisi\u00f3n Mayor +dataset.minorRelease=Revisi\u00f3n Menor +dataset.majorRelease.tip=Debido a la naturaleza de los cambios, la versi\u00f3n preliminar actual tendr\u00e1 una revisi\u00f3n mayor ({0}) +dataset.mayNotBePublished=No se puede publicar el dataset. +dataset.mayNotPublish.administrator= Este dataset no se puede publicar hasta que {1} sea publicado por su administrador. +dataset.mayNotPublish.both= Este dataset no se puede publicar hasta que {1} sea publicado. \u00bfQuiere publicar los dos ahora? +dataset.mayNotPublish.twoGenerations= Este dataset no se puede publicar hasta que {1} y {3} sean publicados. +dataset.mayNotBePublished.both.button=S\u00ed, Publicar Ambos +dataset.viewVersion.unpublished=Ver la Versi\u00f3n sin Publicar +dataset.viewVersion.published=Ver la Versi\u00f3n Publicada +dataset.email.datasetContactBtn=Mandar E-mail al Contacto del Dataset +dataset.email.hiddenMessage= +dataset.email.messageSubject=Asunto: Prueba de Mensaje +dataset.email.datasetLinkBtn.tip=Enlazar el Dataset a su Dataverse +dataset.share.datasetShare=Compartir Dataset +dataset.share.datasetShare.tip=Compartir este dataset en sus redes sociales favoritas. +dataset.share.datasetShare.shareText=Ver este dataset. +dataset.locked.message=Dataset Locked +dataset.locked.inReview.message=Submitted for Review +dataset.publish.error=This dataset may not be published because the {0} Service is currently inaccessible. Please try again. Does the issue continue to persist? +dataset.publish.error.doi=This dataset may not be published because the DOI update failed. +dataset.delete.error=Could not deaccession the dataset because the {0} update failed. +dataset.publish.worldMap.deleteConfirm=Please note that your data and map on WorldMap will be removed due to restricted file access changes in this dataset version which you are publishing. Do you want to continue? +dataset.publish.workflow.inprogress=Publish workflow in progress +dataset.versionUI.draft=Versi\u00f3n Preliminar +dataset.versionUI.inReview=En Revisi\u00f3n +dataset.versionUI.unpublished=Sin Publicar +dataset.versionUI.deaccessioned=Sin Acceso +dataset.cite.title.released=VERSI\u00d3N PRELIMINAR que se reemplazar\u00e1 en la cita por V1 una vez el dataset haya sido publicado. +dataset.cite.title.draft=VERSI\u00d3N PRELIMINAR que se reemplazar\u00e1 en la cita por la versi\u00f3n seleccionada una vez el dataset haya sido publicado. +dataset.cite.title.deassessioned=La VERSI\u00d3N CON ACCESO RETIRADO se ha a\u00f1adido a la cita para esta versi\u00f3n, porque ya no no est\u00e1 disponible. +dataset.cite.standards.tip=Si quiere usar estos datos, por favor, a\u00f1ada esta cita a sus recursos acad\u00e9micos. M\u00e1s informaci\u00f3n en Est\u00e1ndares de Citas de Datos. +dataset.cite.downloadBtn=Descargar Cita +dataset.cite.downloadBtn.xml=XML de EndNote +dataset.cite.downloadBtn.ris=Formato RIS +dataset.cite.downloadBtn.bib=BibTeX +dataset.create.authenticatedUsersOnly=S\u00f3los los usuarios identificados pueden descargar datasets. +dataset.deaccession.reason=Raz\u00f3n de Retirada de Acceso +dataset.beAccessedAt=Ahora se puede acceder al dataset en: +dataset.descriptionDisplay.title=Descripci\u00f3n +dataset.keywordDisplay.title=Palabra Clave +dataset.subjectDisplay.title=Materia +dataset.contact.tip=Utilice el bot\u00f3n de e-mail de arriba para contactar. +dataset.asterisk.tip=Los asteriscos indican campos obligatorios +dataset.message.uploadFiles=Subir Ficheros de Dataset - Puede pulsar y arrastrar ficheros desde su escritorio directamente hasta el widget de subida de ficheros. +dataset.message.editMetadata=Editar Metadatos del Dataset - A\u00f1ade m\u00e1s metadatos sobre este dataset para facilitar que sea encontrado facilmente. +dataset.message.editTerms=Editar las Condiciones del Dataset - Actualizar las condiciones de uso de este dataset. +dataset.message.locked.editNotAllowedInReview=Dataset cannot be edited due to In Review dataset lock. +dataset.message.locked.downloadNotAllowedInReview=Dataset file(s) may not be downloaded due to In Review dataset lock. +dataset.message.locked.downloadNotAllowed=Dataset file(s) may not be downloaded due to dataset lock. +dataset.message.locked.editNotAllowed=Dataset cannot be edited due to dataset lock. +dataset.message.createSuccess=Este dataset se ha creado. +dataset.message.linkSuccess= {0} se ha enlazado con \u00e9xito a {3}. +dataset.message.metadataSuccess=Los metadatos de este dataset se han actualizado. +dataset.message.termsSuccess=Las condiciones de este dataset se han actualizado. +dataset.message.filesSuccess=Los ficheros de este dataset se han actualizado. +dataset.message.publishSuccess=Este dataset se ha publicado. +dataset.message.only.authenticatedUsers=Only authenticated users may release Datasets. +dataset.message.deleteSuccess=Este dataset se ha eliminado. +dataset.message.bulkFileUpdateSuccess=Se han actualizado los ficheros seleccionados. +dataset.message.bulkFileDeleteSuccess=The selected files have been deleted. +datasetVersion.message.deleteSuccess=La versi\u00f3n preliminar de este dataset se ha eliminado. +datasetVersion.message.deaccessionSuccess=El/Las versi\u00f3n(es) se ha(n) retirado. +dataset.message.deaccessionSuccess=Este dataset se ha retirado. +dataset.message.files.ingestSuccess=El/Los fichero(s) se han a\u00f1adido con \u00e9xito. Ahora puedes explorarlos con TwoRavens o descargarlos en formatos alternativos. +dataset.message.validationError=Error de Validaci\u00f3n - Hab\u00eda campos obligatorios sin rellenar o hubo un error de validaci\u00f3n. Por favor, haga scroll para ver los detalles. +dataset.message.publishFailure=No se ha podido publicar el dataset. +dataset.message.metadataFailure=No se han podido actualizar los metadatos. +dataset.message.filesFailure=No se han podido actualizar los ficheros. +dataset.message.bulkFileDeleteFailure=The selected files could not be deleted. +dataset.message.files.ingestFailure=No se pudo/ieron subir el/los fichero(s). +dataset.message.deleteFailure=No se pudo eliminar la versi\u00f3n preliminar del dataset. +dataset.message.deaccessionFailure=No se puede retirar el acceso a este dataset. +dataset.message.createFailure=No pudo crearse el dataset. +dataset.message.termsFailure=No pudieron actualizarse las condiciones del dataset. +dataset.message.publicInstall=File Access - Files are stored on a publicly accessible storage server. +dataset.metadata.publicationDate=Fecha de Publicaci\u00f3n +dataset.metadata.publicationDate.tip=La fecha de publicaci\u00f3n del dataset. +dataset.metadata.persistentId=ID Persistente del Dataset +dataset.metadata.persistentId.tip=Identificador \u00fanico y persistente para un Dataset, el cu\u00e1l puede ser un Handle o un DOI en Dataverse. +dataset.versionDifferences.termsOfUseAccess=Condiciones de Uso y Acceso +dataset.versionDifferences.termsOfUseAccessChanged=Condiciones de Uso/Acceso Cambiadas +file.viewDiffDialog.restricted=Restringido +dataset.template.tip=Al cambiar la plantilla se borrar\u00e1 el contenido de cualquier fichero en el que haya introducido datos. +dataset.noTemplate.label=Ninguna +dataset.noSelectedFiles.header=Select File(s) +dataset.noSelectedFilesForDownload=Please select a file or files to be downloaded. +dataset.noSelectedFilesForRequestAccess=Please select a file or files for access request. +dataset.noSelectedFilesForDelete=Please select a file or files to be deleted. +dataset.noSelectedFilesForMetadataEdit=Please select a file or files to be edited. +dataset.noSelectedFilesForRestrict=Please select unrestricted file(s) to be restricted. +dataset.noSelectedFilesForUnRestrict=Please select restricted file(s) to be unrestricted. +dataset.inValidSelectedFilesForDownload=Restricted Files Selected +dataset.noValidSelectedFilesForDownload=The restricted file(s) selected may not be downloaded because you have not been granted access. +dataset.mixedSelectedFilesForDownload=The restricted file(s) selected may not be downloaded because you have not been granted access. +dataset.downloadUnrestricted=Click Continue to download the files you have access to download. +dataset.requestAccessToRestrictedFiles=You may request access to the restricted file(s) by clicking the Request Access button. +dataset.privateurl.infoMessageAuthor=Unpublished Dataset Private URL - Privately share this dataset before it is published: {0} +dataset.privateurl.infoMessageReviewer=Unpublished Dataset Private URL - This unpublished dataset is being privately shared. You will not be able to access it when logged into your Dataverse account. +dataset.privateurl.header=Unpublished Dataset Private URL +dataset.privateurl.tip=Use a Private URL to allow those without Dataverse accounts to access your unpublished dataset. For more information about the Private URL feature, please refer to the User Guide. +dataset.privateurl.absent=Private URL has not been created. +dataset.privateurl.createPrivateUrl=Create Private URL +dataset.privateurl.disablePrivateUrl=Disable Private URL +dataset.privateurl.disablePrivateUrlConfirm=Yes, Disable Private URL +dataset.privateurl.disableConfirmationText=Are you sure you want to disable the Private URL? If you have shared the Private URL with others they will no longer be able to use it to access your unpublished dataset. +dataset.privateurl.cannotCreate=Private URL can only be used with unpublished versions of datasets. +dataset.privateurl.roleassigeeTitle=Private URL Enabled +dataset.privateurl.createdSuccess=Success! +dataset.privateurl.disabledSuccess=You have successfully disabled the Private URL for this unpublished dataset. +dataset.privateurl.noPermToCreate=To create a Private URL you must have the following permissions: {0}. +file.count={0} {0, choice, 0#Ficheros|1#Fichero|2#Ficheros} +file.count.selected={0} {0, choice, 0#Ficheros Seleccionados|1#Fichero Seleccionado|2#Ficheros Seleccionados} +file.selectToAddBtn=Selecciona los ficheros que quiera a\u00f1adir +file.selectToAdd.tipLimit=File upload limit is {0} bytes per file. +file.selectToAdd.tipMoreInformation=For more information about supported file formats, please refer to the User Guide. +file.selectToAdd.dragdropMsg=Drag and drop files here. +file.createUploadDisabled=Once you have saved your dataset, you can upload your data using the "Upload Files" button on the dataset page. For more information about supported file formats, please refer to the User Guide. +file.fromDropbox=Subir desde Dropbox +file.fromDropbox.tip=Los ficheros, tambi\u00e9n pueden ser actualizados directamente desde Dropbox. +file.replace.original=Original File +file.editFiles=Editar Ficheros +file.bulkUpdate=Actualizaci\u00f3n en Bloque +file.uploadFiles=Subir Ficheros +file.replaceFile=Replace File +file.notFound.tip=No hay ficheros en este dataset. +file.noSelectedFiles.tip=No hay ficheros seleccionados para mostrar. +file.noUploadedFiles.tip=Los ficheros que suba aparecer\u00e1n aqu\u00ed. +file.replace=Replace +file.replaced.warning.header=Edit File +file.replaced.warning.draft.warningMessage=You can not replace a file that has been replaced in a dataset draft. In order to replace it with a different file you must delete the dataset draft. Note that doing so will discard any other changes within this draft. +file.replaced.warning.previous.warningMessage=You can not edit a file that has been replaced in a previous dataset version. In order to edit it you must go to the most recently published version of the file. +file.alreadyDeleted.previous.warningMessage=This file has already been deleted in current version. It may not be edited. +file.delete=Eliminar +file.metadata=Metadata +file.deleted.success=Los ficheros {0} ser\u00e1n eliminados de forma permanente de esta versi\u00f3n de este dataset una vez pulses el bot\u00f3n Guardar Cambios. +file.deleted.replacement.success=The replacement file has been deleted. +file.editAccess=Edit Access +file.restrict=Restringido +file.unrestrict=Eliminar restricci\u00f3n +file.restricted.success=El/Los fichero(s) {0} ser\u00e1n de acceso restringido cuando pulse el bot\u00f3n Guardar Cambios al final de esta p\u00e1gina. +file.download.header=Descargar +file.download.subset.header=Download Data Subset +file.preview=Previsualizaci\u00f3n: +file.previewMap=Preview Map:o +file.fileName=Nombre del Fichero +file.type.tabularData=Datos Tabulares +file.originalChecksumType=Original File {0} +file.checksum.exists.tip=A file with this checksum already exists in the dataset. +file.selectedThumbnail=Miniatura +file.selectedThumbnail.tip=La miniatura para este fichero se usa como miniatura para el dataset. Pulse el bot\u00f3n 'Opciones Avanzadas' de otro fichero para seleccionarlo. +file.cloudStorageAccess=Cloud Storage Access +file.cloudStorageAccess.tip=The container name for this dataset needed to access files in cloud storage. +file.cloudStorageAccess.help=To directly access this data in the {2} cloud environment, use the container name in the Cloud Storage Access box below. To learn more about the cloud environment, visit the Cloud Storage Access section of the User Guide. +file.copy=Copy +file.compute=Compute +file.rsyncUpload.info=Follow these steps to upload your data. To learn more about the upload process and how to prepare your data, please refer to the User Guide. +file.rsyncUpload.noScriptAvailable=Rsync script not available! +file.rsyncUpload.filesExist=You can not upload additional files to this dataset. +file.rsyncUpload.step1=Make sure your data is stored under a single directory. All files within this directory and its subdirectories will be uploaded to your dataset. +file.rsyncUpload.step2=Download this file upload script: +file.rsyncUpload.step2.downloadScriptButton=Download Script +file.rsyncUpload.step3=Open a terminal window in the same directory you saved the script and run this command: bash ./{0} +file.rsyncUpload.step4=Follow the instructions in the script. It will ask for a full path (beginning with "/") to the directory containing your data. Note: this script will expire after 7 days. +file.rsyncUpload.inProgressMessage.summary=DCM File Upload +file.rsyncUpload.inProgressMessage.details=This dataset is locked until the data files have been transferred and verified. +file.metaData.dataFile.dataTab.variables=Variables +file.metaData.dataFile.dataTab.observations=Observaciones +file.metaData.viewOnWorldMap=Ver en WorldMap +file.addDescription=A\u00f1adir descripci\u00f3n del fichero... +file.tags=Etiquetas +file.editTags=Editar Etiquetas +file.editTagsDialog.tip=Seleccionar etiquetas existentes o crear otras nuevas que describan sus ficheros. Cuando se crea una etiqueta nueva, \u00e9sta se a\u00f1ade como una opci\u00f3n de etiqueta para todos los ficheros de este dataset. Cada fichero puede tener m\u00e1s de una etiqueta. +file.editTagsDialog.select=Etiquetas de Fichero +file.editTagsDialog.selectedTags=Selected Tags +file.editTagsDialog.selectedTags.none=No tags selected +file.editTagsDialog.add=Etiqueta de Fichero Personalizada +file.editTagsDialog.add.tip=Creating a new tag will add it as a tag option for all files in this dataset. +file.editTagsDialog.newName=A\u00f1adir nueva etiqueta de fichero... +dataset.removeUnusedFileTags.label=Delete Tags +dataset.removeUnusedFileTags.tip=Select to delete Custom File Tags not used by the files in the dataset. +dataset.removeUnusedFileTags.check=Delete tags not being used +file.setThumbnail=Establecer Previsualizaci\u00f3n +file.setThumbnail.header=Establecer Previsualizaci\u00f3n para el Dataset +file.datasetThumbnail=Previsualizaci\u00f3n del Dataset +file.datasetThumbnail.tip=Seleccionar esta imagen para mostrarla como previsualizaci\u00f3n en los resultados de b\u00fasqueda para este dataset. +file.setThumbnail.confirmation=Are you sure you want to set this image as your dataset thumbnail? There is already an image uploaded to be the thumbnail and this action will remove it. +file.useThisIamge=Usar esta imagen como imagen de previsualizaci\u00f3n de este dataset +file.advancedOptions=Opciones Avanzadas +file.advancedIngestOptions=Opciones Avanzadas de Procesamiento +file.assignedDataverseImage.success={0} se ha guardado como la previsualizaci\u00f3n de este dataset. +file.assignedTabFileTags.success=La(s) etiqueta(s) se han a\u00f1adido con \u00e9xito a {0}. +file.tabularDataTags=Etiquetas de Datos Tabulares +file.tabularDataTags.tip=Etiquetas espec\u00edficas de ficheros de datos para identificar qu\u00e9 tipo(s) de datos tiene un fichero. +file.spss-savEncoding=Codificaci\u00f3n de Idioma +file.spss-savEncoding.title=Seleccione el idioma usado para codificar este fichero de datos SPSS (sav). +file.spss-savEncoding.current=Selecci\u00f3n actual: +file.spss-porExtraLabels=Etiquetas de Variables +file.spss-porExtraLabels.title=Subir un fichero adicional de texto con etiquetas de variable extra. +file.spss-porExtraLabels.selectToAddBtn=Seleccionar el Fichero a A\u00f1adir +file.ingestFailed.header=Upload Completed with Errors +file.ingestFailed.message=Tabular data ingest failed. +file.explore.twoRavens=TwoRavens +file.map=Map +file.mapData=Mapear Datos +file.mapData.worldMap=WorldMap +file.mapData.unpublished.header=Datos No Publicados +file.mapData.unpublished.message=Para situar tus datos en WorldMap, estos datos han de ser publicados. Por favor, publique este dataset y pruebe de nuevo la funcionalidad Mapa de Datos. +file.downloadBtn.format.all=Todos los Formatos de Ficheros + Informaci\u00f3n +file.downloadBtn.format.tab=Delimitados por Tabuladores +file.downloadBtn.format.original=Formato de Fichero Original ({0}) +file.downloadBtn.format.rdata=Formato RData +file.downloadBtn.format.var=Metadatos Variables +file.downloadBtn.format.citation=Citas de Fichero de Datos +file.more.information.link=Enlace a m\u00e1s informaci\u00f3n sobre el fichero sobre +file.requestAccess=Petici\u00f3n de acceso +file.requestAccess.dialog.msg=Tiene que Identificarse para solicitar acceso a este fichero. +file.requestAccess.dialog.msg.signup=Tiene que Crear una Cuenta o Identificarse para solicitar acceso a este fichero. +file.accessRequested=Acceso Pedido +file.restrictions=File Restrictions +file.restrictions.description=Limit access to published files by marking them as restricted. Provide users Terms of Access and allow them to request access. +file.restrictions.worldmap.warning=Please note, once your file access changes are published your map on WorldMap will be deleted and the Explore on WorldMap feature will be removed. +file.ingestInProgress=Ingest in progress... +file.dataFilesTab.metadata.header=Metadatos +file.dataFilesTab.metadata.addBtn=A\u00f1adir + Editar Metadatos +file.dataFilesTab.terms.header=Condiciones +file.dataFilesTab.terms.editTermsBtn=Editar los Requisitos de las Condiciones +file.dataFilesTab.terms.list.termsOfUse.header=Condiciones de Uso +file.dataFilesTab.terms.list.termsOfUse.waiver=Licencia +file.dataFilesTab.terms.list.termsOfUse.waiver.title=La licencia informa a quienes descargan datos sobre c\u00f3mo se puede usar este dataset. +file.dataFilesTab.terms.list.termsOfUse.waiver.txt=CC0 - "Dedicaci\u00f3n de Dominio P\u00fablico" +file.dataFilesTab.terms.list.termsOfUse.waiver.description=Los datasets tendr\u00e1n por defecto una licencia CC0 dedicaci\u00f3n de dominio p\u00fablico. La licencia CC0 facilita la reutilizaci\u00f3n y ampliaci\u00f3n de nuestros datos de investigaci\u00f3n. Tanto las Normas de la Comunidad como las buenas pr\u00e1cticas cient\u00edficas, esperan que se de cr\u00e9dito al material usado mediante citas. Si no puede dar a sus datasets una licencia CC0, puede introducir Condiciones de Uso personalizadas para tus datasets. +file.dataFilesTab.terms.list.termsOfUse.no.waiver.txt=No se ha seleccionado ninguna licencia para este dataset. +file.dataFilesTab.terms.list.termsOfUse.waiver.txt.description=Tanto nuestras Normas de la Comunidad como las buenas pr\u00e1cticas cient\u00edficas, experan que se de cr\u00e9dito al material usado mediante citas. Por favor, usa la siguiente cita de datos generada por Dataverse. +file.dataFilesTab.terms.list.termsOfUse.waiver.select.CCO=Si, usar CC0 - "Dedicaci\u00f3n de Dominio P\u00fablico" +file.dataFilesTab.terms.list.termsOfUse.waiver.select.notCCO=No, no usar CC0 - "Dedicaci\u00f3n de Dominio P\u00fablico" +file.dataFilesTab.terms.list.termsOfUse.waiver.select.tip=Esto es lo que ver\u00e1n los usuarios finales en este dataset +file.dataFilesTab.terms.list.termsOfUse.termsOfUse=Condiciones de Uso +file.dataFilesTab.terms.list.termsOfUse.termsOfUse.title=Describe c\u00f3mo pueden usarse estos datos una vez descargados. +file.dataFilesTab.terms.list.termsOfUse.termsOfUse.description=Si no puede dar una licencia CC0 a sus datasets, puede introducir Condiciones de Uso personalizadas. \u00c9ste es un ejemplo de Condiciones de uso de Datos para datasets que tienen datos anonimizados relacionados con personas. +file.dataFilesTab.terms.list.termsOfUse.addInfo=Informaci\u00f3n Adicional +file.dataFilesTab.terms.list.termsOfUse.addInfo.declaration=Declaraci\u00f3n de Confidencialidad +file.dataFilesTab.terms.list.termsOfUse.addInfo.declaration.title=Indica si se necesita firmar una declaraci\u00f3n de confidencialidad para acceder a un recurso. +file.dataFilesTab.terms.list.termsOfUse.addInfo.permissions=Permisos Especiales +file.dataFilesTab.terms.list.termsOfUse.addInfo.permissions.title=Determina si son necesarios permisos especiales para acceder a un recurso (p.e., si es necesario rellenar un formulario y d\u00f3nde se accede a dicho formulario. +file.dataFilesTab.terms.list.termsOfUse.addInfo.restrictions=Restricciones +file.dataFilesTab.terms.list.termsOfUse.addInfo.restrictions.title=Todas las restricciones de acceso o uso de la colecci\u00f3n, tales como certificaciones de privacidad o restricciones en su distribuci\u00f3n, se deber\u00edan indicar aqu\u00ed. Estas restricciones pueden estar impuestas por el autor, el productor o el diseminador de la colecci\u00f3n de datos. Si la restricci\u00f3n de datos afecta s\u00f3lo a un tipo de usuarios, indique a qu\u00e9 tipo. +file.dataFilesTab.terms.list.termsOfUse.addInfo.citationRequirements=Requisitos de Citas +file.dataFilesTab.terms.list.termsOfUse.addInfo.citationRequirements.title=Incluya requisitos especiales/expl\u00edcitos para datos que necesiten ser citados de forma adecuada en art\u00edculos u otras publicaciones basadas en el an\u00e1lisis de datos. Para los requisitos de cita est\u00e1ndar, dir\u00edjase a las Normas de nuestra Comunidad. +file.dataFilesTab.terms.list.termsOfUse.addInfo.depositorRequirements=Requisitos del Depositante +file.dataFilesTab.terms.list.termsOfUse.addInfo.depositorRequirements.title=Informaci\u00f3n relacionada con la responsabilidad de los usuarios para Depositantes, Autores y Conservadores/Revisores sobre el uso de los datos, indicando las citas a los trabajos publicados o proporcionando las copias de los manuscritos. +file.dataFilesTab.terms.list.termsOfUse.addInfo.conditions=Condiciones +file.dataFilesTab.terms.list.termsOfUse.addInfo.conditions.title=Cualquier informaci\u00f3n adicional que ayude al usuario a entender las condiciones de uso y acceso del Dataset. +file.dataFilesTab.terms.list.termsOfUse.addInfo.disclaimer=Renuncia de Responsabilidad +file.dataFilesTab.terms.list.termsOfUse.addInfo.disclaimer.title=Informaci\u00f3n relacionada con la responsabilidad de uso del Dataset. +file.dataFilesTab.terms.list.termsOfAccess.header=Ficheros de Acceso Restringido + Condiciones de Acceso +file.dataFilesTab.terms.list.termsOfAccess.restrictedFiles=Ficheros de Acceso Restringido +file.dataFilesTab.terms.list.termsOfAccess.restrictedFiles.title=El n\u00famero de ficheros de acceso restringido en este dataset. +file.dataFilesTab.terms.list.termsOfAccess.restrictedFiles.txt=Hay {0} {0, choice, 0#ficheros|1#fichero|2#ficheros} de acceso restringido en este dataset. +file.dataFilesTab.terms.list.termsOfAccess.termsOfsAccess=Condiciones de Acceso +file.dataFilesTab.terms.list.termsOfAccess.termsOfsAccess.title=Informaci\u00f3n sobre c\u00f3mo conseguir acceso a los ficheros con acceso restringido en este dataset y si los usuarios pueden conseguirlo. +file.dataFilesTab.terms.list.termsOfAccess.requestAccess=Pedir acceso +file.dataFilesTab.terms.list.termsOfAccess.requestAccess.title=Si est\u00e1 seleccionado, los usuarios pueden solicitar acceso a los ficheros de acceso restringido en este dataset. +file.dataFilesTab.terms.list.termsOfAccess.requestAccess.request=Los usuarios pueden solicitar acceso a los ficheros. +file.dataFilesTab.terms.list.termsOfAccess.requestAccess.notRequest=Los usuarios no pueden solicitar acceso a los ficheros. +file.dataFilesTab.terms.list.termsOfAccess.requestAccess.enableBtn=Habilitar la solicitud de acceso +file.dataFilesTab.terms.list.termsOfAccess.addInfo.dataAccessPlace=Lugar de Acceso a los Datos +file.dataFilesTab.terms.list.termsOfAccess.addInfo.dataAccessPlace.title=Si los datos no est\u00e1n s\u00f3lo en Dataverse, indique la(s) localizaci\u00f3n(es) donde los datos est\u00e1n almacenados actualmente. +file.dataFilesTab.terms.list.termsOfAccess.addInfo.originalArchive=Archivo Original +file.dataFilesTab.terms.list.termsOfAccess.addInfo.originalArchive.title=Archivo desde el que se obtuvieron los datos. +file.dataFilesTab.terms.list.termsOfAccess.addInfo.availabilityStatus=Estado de Disponibilidad +file.dataFilesTab.terms.list.termsOfAccess.addInfo.availabilityStatus.title=Disponibilidad del Dataset. Un depositante puede indicar que un Dataset no est\u00e1 disponible porque tiene un per\u00edodo de embargo, porque se ha sustituido, porque habr\u00e1 una nueva edici\u00f3n inminentemente, etc. +file.dataFilesTab.terms.list.termsOfAccess.addInfo.contactForAccess=Contactar para Solicitar Acceso +file.dataFilesTab.terms.list.termsOfAccess.addInfo.contactForAccess.title=Si es diferente del Contacto del Dataset, esta es la persona u organizaci\u00f3n de contacto (incluir el e-mail o la direcci\u00f3n completa, y el n\u00famero de tel\u00e9fono si est\u00e1 disponible) que controla el acceso a la colecci\u00f3n. +file.dataFilesTab.terms.list.termsOfAccess.addInfo.sizeOfCollection=Tama\u00f1o de la Colecci\u00f3n +file.dataFilesTab.terms.list.termsOfAccess.addInfo.sizeOfCollection.tip=Resumen del n\u00famero de ficheros f\u00edsicos que contiene el Dataset, indicando el n\u00famero de ficheros que contienen datos y haciendo notar si la colecci\u00f3n contiene documentaci\u00f3n legible por m\u00e1quinas y/o otra informaci\u00f3n adicional como c\u00f3digo, diccionarios de datos, declaraciones de definici\u00f3n de datos o instrumentos de recolecci\u00f3n de datos. +file.dataFilesTab.terms.list.termsOfAccess.addInfo.studyCompletion=Conclusi\u00f3n del Estudio +file.dataFilesTab.terms.list.termsOfAccess.addInfo.studyCompletion.title=Relaci\u00f3n entre los datos recolectados con la cantidad de datos almacenados en el Dataset. Informaci\u00f3n sobre porqu\u00e9 algunos items recolectados no se incluyeron en el dataset o si se debe proporcionar un fichero de datos concreto. +file.dataFilesTab.terms.list.guestbook=Libro de Invitados +file.dataFilesTab.terms.list.guestbook.title=La informaci\u00f3n del usuario (e.d., nombre, e-mail, instituci\u00f3n y puesto) ser\u00e1 recogida cuando se descarguen los ficheros. +file.dataFilesTab.terms.list.guestbook.noSelected.tip=No hay un libro de invitados asignado a este dataset, no se le preguntar\u00e1 si quiere proporcionar informaci\u00f3n durante la descarga del fichero. +file.dataFilesTab.terms.list.guestbook.noSelected.admin.tip=No hay libros de invitados disponibles en el Dataverse {0} para asignar a este dataset. +file.dataFilesTab.terms.list.guestbook.inUse.tip=El siguiente libro de invitados pedir\u00e1 al usuario informaci\u00f3n adicional cuando descargue un fichero. +file.dataFilesTab.terms.list.guestbook.viewBtn=Previsualizaci\u00f3n del Libro de Invitados +file.dataFilesTab.terms.list.guestbook.select.tip=Seleccionar un libro de invitados para que el usuario proporcione informaci\u00f3n adicional cuando descargue un fichero. +file.dataFilesTab.terms.list.guestbook.noAvailable.tip=No hay libros de invitados habilitados en el Dataverse {0}. Para crear un libro de invitados, vuelva al Dataverse {0}, pulsa el bot\u00f3n "Editar" y seleccione la opci\u00f3n "Libros de Invitados del Dataset". +file.dataFilesTab.terms.list.guestbook.clearBtn=Limpiar Selecci\u00f3n +file.dataFilesTab.dataAccess=Data Access +file.dataFilesTab.dataAccess.info=This data file can be accessed through a terminal window, using the commands below. For more information about downloading and verifying data, see our User Guide. +file.dataFilesTab.dataAccess.info.draft=Data files can not be accessed until the dataset draft has been published. For more information about downloading and verifying data, see our User Guide. +file.dataFilesTab.dataAccess.local.label=Local Access +file.dataFilesTab.dataAccess.download.label=Download Access +file.dataFilesTab.dataAccess.verify.label=Verify Data +file.dataFilesTab.dataAccess.local.tooltip=If this data is locally available to you, this is its file path. +file.dataFilesTab.dataAccess.download.tooltip=Download this data from your preferred mirror by running this command. +file.dataFilesTab.dataAccess.verify.tooltip=This command runs a checksum to verify the integrity of the data you have downloaded. +file.dataFilesTab.versions=Versiones +file.dataFilesTab.versions.headers.dataset=Dataset +file.dataFilesTab.versions.headers.summary=Summary +file.dataFilesTab.versions.headers.contributors=Contributors +file.dataFilesTab.versions.headers.published=Published +file.dataFilesTab.versions.viewDiffBtn=Ver Diferencias +file.dataFilesTab.versions.citationMetadata=Metadatos de Cita: +file.dataFilesTab.versions.added=A\u00f1adido/a +file.dataFilesTab.versions.removed=Eliminado/a +file.dataFilesTab.versions.changed=Cambiado/a +file.dataFilesTab.versions.replaced=Replaced +file.dataFilesTab.versions.original=Original +file.dataFilesTab.versions.replacment=Replacement +file.dataFilesTab.versions.additionalCitationMetadata=Metadatos de Cita Adicionales: +file.dataFilesTab.versions.description.draft=Esta es una versi\u00f3n preliminar. +file.dataFilesTab.versions.description.deaccessioned=Dado que la versi\u00f3n anterior se ha retirado, no hay notas disponibles de las diferencias para la versi\u00f3n publicada. +file.dataFilesTab.versions.description.firstPublished=Esta es la primera versi\u00f3n publicada. +file.dataFilesTab.versions.description.deaccessionedReason=Raz\u00f3n para la retirada de Acceso: +file.dataFilesTab.versions.description.beAccessedAt=Ahora se puede acceder al dataset en: +file.dataFilesTab.versions.viewDetails.btn=Ver detalles +file.dataFilesTab.versions.widget.viewMoreInfo=To view more information about the versions of this dataset, and to edit it if this is your dataset, please visit the full version of this dataset at the {2}. +file.deleteDialog.tip=\u00bfEst\u00e1 seguro de que quiere eliminar el dataset?. No podr\u00e1 deshacer la operaci\u00f3n. +file.deleteDialog.header=Eliminar Dataset +file.deleteDraftDialog.tip=\u00bfEst\u00e1 seguro de que quiere eliminar esta versi\u00f3n preliminar? No podr\u00e1 deshacer la operaci\u00f3n. +file.deleteDraftDialog.header=Eliminar Versi\u00f3n Preliminar +file.deleteFileDialog.tip=El/Los fichero(s) ser\u00e1(n) borrado(s) cuando pulse el bot\u00f3n Guardar Cambios en la parte inferior de esta p\u00e1gina. +file.deleteFileDialog.immediate=Se borrar\u00e1 el fichero despu\u00e9s de que pulse en el bot\u00f3n Borrar. +file.deleteFileDialog.multiple.immediate=Se borrar\u00e1/n el/los fichero/s despu\u00e9s de que pulse en el bot\u00f3n Borrar. +file.deleteFileDialog.header=Borrar Ficheros +file.deleteFileDialog.failed.tip=Los ficheros no se eliminar\u00e1n de las versiones publicadas previamente en el dataset. +file.deaccessionDialog.tip=Una vez haya retirado este dataset no ser\u00e1 visible para el p\u00fablico. +file.deaccessionDialog.version=Versi\u00f3n +file.deaccessionDialog.reason.question1=\u00bfQu\u00e9 versi\u00f3n(es) quiere retirar? +file.deaccessionDialog.reason.question2=\u00bfCu\u00e1l es la raz\u00f3n para la retirada de acceso? +file.deaccessionDialog.reason.selectItem.identifiable=Hay datos identificables en uno o m\u00e1s ficheros +file.deaccessionDialog.reason.selectItem.beRetracted=El art\u00edculo de investigaci\u00f3n se ha retirado +file.deaccessionDialog.reason.selectItem.beTransferred=Se ha transferido el dataset a otro repositorio +file.deaccessionDialog.reason.selectItem.IRB=Petici\u00f3n IRB +file.deaccessionDialog.reason.selectItem.legalIssue=Cuesti\u00f3n legal o Acuerdo de Uso de Datos +file.deaccessionDialog.reason.selectItem.notValid=No es un dataset v\u00e1lido +file.deaccessionDialog.reason.selectItem.other=Otra (Por favor, indique la raz\u00f3n en el siguiente espacio) +file.deaccessionDialog.enterInfo=Por favor, introduzca informaci\u00f3n adicional sobre la raz\u00f3n para la retirada de acceso. +file.deaccessionDialog.leaveURL=Si es aplicable, por favor, indique una URL desde la que se pueda acceder a este dataset tras la retirada de acceso. +file.deaccessionDialog.leaveURL.watermark=Sitio web opcional del dataset, http://... +file.deaccessionDialog.deaccession.tip=\u00bfEst\u00e1 seguro de que quiere realizar la retirada? La(s) versi\u00f3n(es) seleccionada(s) no volver\u00e1(n) a estar disponible(s) para el p\u00fablico. +file.deaccessionDialog.deaccessionDataset.tip=\u00bfEst\u00e1 seguro de que quiere retirar este dataset? No volver\u00e1 a estar disponible para el p\u00fablico. +file.deaccessionDialog.dialog.selectVersion.tip=Por favor, seleccione la(s) versi\u00f3n(ones) que ser\u00e1(n) retirada(s). +file.deaccessionDialog.dialog.selectVersion.header=Por Favor, Seleccionar la(s) Versi\u00f3n(es) +file.deaccessionDialog.dialog.reason.tip=Por Favor, seleccione la raz\u00f3n de la retirada de acceso. +file.deaccessionDialog.dialog.reason.header=Por Favor, Seleccionar una Raz\u00f3n +file.deaccessionDialog.dialog.url.tip=Por Favor, introduzca una URL v\u00e1lida. +file.deaccessionDialog.dialog.url.header=URL Inv\u00e1lida. +file.deaccessionDialog.dialog.textForReason.tip=Por Favor, introduzca el texto introduzca la raz\u00f3n para la retirada de acceso. +file.deaccessionDialog.dialog.textForReason.header=Introduzca informaci\u00f3n adicional +file.deaccessionDialog.dialog.limitChar.tip=El texto para la raz\u00f3n de la retirada de acceso, no puede superar los 1000 caracteres. +file.deaccessionDialog.dialog.limitChar.header=L\u00edmite 1000 caracteres +file.viewDiffDialog.header=Detalles de las Diferencias de Versi\u00f3n +file.viewDiffDialog.dialog.warning=Por favor, seleccione dos versiones para ver sus diferencias. +file.viewDiffDialog.version=Versi\u00f3n +file.viewDiffDialog.lastUpdated=\u00daltima Modificaci\u00f3n +file.viewDiffDialog.fileID=ID de Fichero +file.viewDiffDialog.fileName=Nombre +file.viewDiffDialog.fileType=Tipo +file.viewDiffDialog.fileSize=Tama\u00f1o +file.viewDiffDialog.category=Etiqueta(s) +file.viewDiffDialog.description=Descripci\u00f3n +file.viewDiffDialog.fileReplaced=File Replaced +file.viewDiffDialog.filesReplaced=File(s) Replaced +file.viewDiffDialog.files.header=Files +file.viewDiffDialog.msg.draftFound= This is the "DRAFT" version. +file.viewDiffDialog.msg.draftNotFound=The "DRAFT" version was not found. +file.viewDiffDialog.msg.versionFound= This is version "{0}". +file.viewDiffDialog.msg.versionNotFound=Version "{0}" was not found. +file.metadataTip=Truco de Metadatos: Despu\u00e9s de a\u00f1adir el dataset, pulse el bot\u00f3n Editar Dataset para a\u00f1adir m\u00e1s metadatos. +file.addBtn=Guardar Dataset +file.dataset.allFiles=Todos los Ficheros de este Dataset +file.downloadDialog.header=Descargar Fichero +file.downloadDialog.tip=Por favor, confirme y/o complete la informaci\u00f3n necesaria para descargar los ficheros de este dataset. +file.downloadDialog.termsTip=Acepto las Condiciones de Uso. +file.requestAccessTermsDialog.tip=Please confirm and/or complete the information needed below in order to request access to files in this dataset. +file.search.placeholder=Buscar en estos ficheros de datos... +file.results.btn.sort=Ordenar +file.results.btn.sort.option.nameAZ=Nombre (A-Z) +file.results.btn.sort.option.nameZA=Nombre (Z-A) +file.results.btn.sort.option.newest=M\u00e1s Nuevo +file.results.btn.sort.option.oldest=M\u00e1s Antiguo +file.results.btn.sort.option.size=Tama\u00f1o +file.results.btn.sort.option.type=Tipo +file.compute.fileRestricted=File Restricted +file.compute.fileAccessDenied=You cannot compute on this restricted file because you do not have permission to access it. +file.configure.Button=Configure +file.configure.launchMessage.details=Please refresh this page once you have finished configuring your +dataset.compute.datasetCompute=Dataset Compute Not Supported +dataset.compute.datasetAccessDenied=You cannot compute on this dataset because you do not have permission to access all of the restricted files. +dataset.compute.datasetComputeDisabled=You cannot compute on this dataset because this functionality is not enabled yet. Please click on a file to access computing features. +# dataset-widgets.xhtml +dataset.widgets.title=Dataset Thumbnail + Widgets +dataset.widgets.notPublished.why.header=Why Use Widgets? +dataset.widgets.notPublished.why.reason1=Increases the web visibility of your data by allowing you to embed your dataverse and datasets into your personal or project website. +dataset.widgets.notPublished.why.reason2=Allows others to browse your dataverse and datasets without leaving your personal or project website. +dataset.widgets.notPublished.how.header=How To Use Widgets +dataset.widgets.notPublished.how.tip1=To use widgets, your dataverse and datasets need to be published. +dataset.widgets.notPublished.how.tip2=After publishing, code will be available on this page for you to copy and add to your personal or project website. +dataset.widgets.notPublished.how.tip3=Do you have an OpenScholar website? If so, learn more about adding the Dataverse widgets to your website here. +dataset.widgets.notPublished.getStarted=To get started, publish your dataset. To learn more about Widgets, visit the Widgets section of the User Guide. +dataset.widgets.editAdvanced=Edit Advanced Options +dataset.widgets.editAdvanced.tip=Advanced Options – Additional options for configuring your widget on your personal or project website. +dataset.widgets.tip=Copy and paste this code into the HTML on your site. To learn more about Widgets, visit the Widgets section of the User Guide. +dataset.widgets.citation.txt=Dataset Citation +dataset.widgets.citation.tip=Add a citation for your dataset to your personal or project website. +dataset.widgets.datasetFull.txt=Dataset +dataset.widgets.datasetFull.tip=Add a way for visitors on your website to be able to view your datasets, download files, etc. +dataset.widgets.advanced.popup.header=Widget Advanced Options +dataset.widgets.advanced.prompt=Forward persistent URL's in your dataset citation to your personal website. +dataset.widgets.advanced.url.label=Personal Website URL +dataset.widgets.advanced.url.watermark=http://www.example.com/page-name +dataset.widgets.advanced.invalid.message=Please enter a valid URL +dataset.widgets.advanced.success.message=Successfully updated your Personal Website URL +dataset.widgets.advanced.failure.message=The dataverse Personal Website URL has not been updated. +dataset.thumbnailsAndWidget.breadcrumbs.title=Thumbnail + Widgets +dataset.thumbnailsAndWidget.thumbnails.title=Thumbnail +dataset.thumbnailsAndWidget.widgets.title=Widgets +dataset.thumbnailsAndWidget.thumbnailImage=Thumbnail Image +dataset.thumbnailsAndWidget.thumbnailImage.title=The logo or image file you wish to display as the thumbnail of this dataset. +dataset.thumbnailsAndWidget.thumbnailImage.tip=Supported image types are JPG, TIF, or PNG and should be no larger than {0} KB. The maximum display size for an image file as a dataset thumbnail is 48 pixels wide by 48 pixels high. +dataset.thumbnailsAndWidget.thumbnailImage.default=Default Icon +dataset.thumbnailsAndWidget.thumbnailImage.selectAvailable=Select Available File +dataset.thumbnailsAndWidget.thumbnailImage.selectThumbnail=Select Thumbnail +dataset.thumbnailsAndWidget.thumbnailImage.selectAvailable.title=Select a thumbnail from those available as image data files that belong to your dataset. +dataset.thumbnailsAndWidget.thumbnailImage.uploadNew=Upload New File +dataset.thumbnailsAndWidget.thumbnailImage.uploadNew.title=Upload an image file as your dataset thumbnail, which will be stored separately from the data files that belong to your dataset. +dataset.thumbnailsAndWidget.thumbnailImage.upload=Upload Image +dataset.thumbnailsAndWidget.thumbnailImage.upload.invalidMsg=The image could not be uploaded. Please try again with a JPG, TIF, or PNG file. +dataset.thumbnailsAndWidget.success=Dataset thumbnail updated. +dataset.thumbnailsAndWidget.removeThumbnail=Remove Thumbnail +dataset.thumbnailsAndWidget.removeThumbnail.tip=You are only removing this image as the dataset thumbnail, not removing it from your dataset. To do that, go to the Edit Files page. +dataset.thumbnailsAndWidget.availableThumbnails=Available Thumbnails +dataset.thumbnailsAndWidget.availableThumbnails.tip=Select a thumbnail from the data files that belong to your dataset. Continue back to the Thumbnail + Widgets page to save your changes. +# file.xhtml=# file.xhtml +file.share.fileShare=Share File +file.share.fileShare.tip=Share this file on your favorite social media networks. +file.share.fileShare.shareText=View this file. +file.title.label=T\u00edtulo +file.citation.label=Cita +file.cite.downloadBtn=Cite Data File +file.general.metadata.label=Metadatos Generales +file.description.label=Descripci\u00f3n +file.tags.label=Etiquetas +file.lastupdated.label=Last Updated +file.DatasetVersion=Version +file.metadataTab.fileMetadata.header=Metadatos del Fichero +file.metadataTab.fileMetadata.persistentid.label=Fichero de datos con el ID Persistente +file.metadataTab.fileMetadata.downloadUrl.label=Download URL +file.metadataTab.fileMetadata.unf.label=UNF +file.metadataTab.fileMetadata.size.label=Tama\u00f1o +file.metadataTab.fileMetadata.type.label=Tipo +file.metadataTab.fileMetadata.description.label=Descripci\u00f3n +file.metadataTab.fileMetadata.publicationDate.label=Publication Date +file.metadataTab.fileMetadata.depositDate.label=Deposit Date +file.metadataTab.fitsMetadata.header=Metadatos FITS +file.metadataTab.provenance.header=Procedencia del Fichero +file.metadataTab.provenance.body=La Informaci\u00f3n sobre la Procedencia del Fichero se a\u00f1adir\u00e1 en una versi\u00f3n posterior... +file.versionDifferences.noChanges=No changes associated with this version +file.versionDifferences.fileNotInVersion=File not included in this version +file.versionDifferences.actionChanged=Changed +file.versionDifferences.actionAdded=Added +file.versionDifferences.actionRemoved=Removed +file.versionDifferences.actionReplaced=Replaced +file.versionDifferences.fileMetadataGroupTitle=File Metadata +file.versionDifferences.fileTagsGroupTitle=File Tags +file.versionDifferences.descriptionDetailTitle=Description +file.versionDifferences.fileNameDetailTitle=File Name +file.versionDifferences.fileAccessTitle=File Access +file.versionDifferences.fileRestricted=Restricted +file.versionDifferences.fileUnrestricted=Unrestricted +file.versionDifferences.fileGroupTitle=File +# File Ingest +ingest.csv.invalidHeader=Invalid header row. One of the cells is empty. +ingest.csv.lineMismatch=Mismatch between line counts in first and final passes!, {0} found on first pass, but {1} found on second. +ingest.csv.recordMismatch=Reading mismatch, line {0} of the Data file: {1} delimited values expected, {2} found. +ingest.csv.nullStream=Stream can't be null. +# editdatafile.xhtml +# editFilesFragment.xhtml +file.edit.error.file_exceeds_limit=This file exceeds the size limit. +# File metadata error +file.metadata.datafiletag.not_tabular=You cannot add Tabular Data Tags to a non-tabular file. +# File Edit Success +file.message.editSuccess=This file has been updated. +file.message.deleteSuccess=The file has been deleted. +file.message.replaceSuccess=This file has been replaced. +# File Add/Replace operation messages +file.addreplace.file_size_ok=File size is in range. +file.addreplace.error.file_exceeds_limit=This file size ({0}) exceeds the size limit of {1} bytes. +file.addreplace.error.dataset_is_null=The dataset cannot be null. +file.addreplace.error.dataset_id_is_null=The dataset ID cannot be null. +find.dataset.error.dataset_id_is_null=When accessing a dataset based on Persistent ID, a {0} query parameter must be present. +find.dataset.error.dataset.not.found.persistentId=Dataset with Persistent ID {0} not found. +find.dataset.error.dataset.not.found.id=Dataset with ID {0} not found. +find.dataset.error.dataset.not.found.bad.id=Bad dataset ID number: {0}. +file.addreplace.error.dataset_id_not_found=There was no dataset found for ID: +file.addreplace.error.no_edit_dataset_permission=You do not have permission to edit this dataset. +file.addreplace.error.filename_undetermined=The file name cannot be determined. +file.addreplace.error.file_content_type_undetermined=The file content type cannot be determined. +file.addreplace.error.file_upload_failed=The file upload failed. +file.addreplace.error.duplicate_file=This file already exists in the dataset. +file.addreplace.error.existing_file_to_replace_id_is_null=The ID of the existing file to replace must be provided. +file.addreplace.error.existing_file_to_replace_not_found_by_id=Replacement file not found. There was no file found for ID: {0} +file.addreplace.error.existing_file_to_replace_is_null=The file to replace cannot be null. +file.addreplace.error.existing_file_to_replace_not_in_dataset=The file to replace does not belong to this dataset. +file.addreplace.error.existing_file_not_in_latest_published_version=You cannot replace a file that is not in the most recently published dataset. (The file is unpublished or was deleted from a previous version.) +file.addreplace.content_type.header=File Type Different +file.addreplace.error.replace.new_file_has_different_content_type=The original file ({0}) and replacement file ({1}) are different file types. +file.addreplace.error.replace.new_file_same_as_replacement=You cannot replace a file with the exact same file. +file.addreplace.error.unpublished_file_cannot_be_replaced=You cannot replace an unpublished file. Please delete it instead of replacing it. +file.addreplace.error.ingest_create_file_err=There was an error when trying to add the new file. +file.addreplace.error.initial_file_list_empty=An error occurred and the new file was not added. +file.addreplace.error.initial_file_list_more_than_one=You cannot replace a single file with multiple files. The file you uploaded was ingested into multiple files. +file.addreplace.error.final_file_list_empty=There are no files to add. (This error should not happen if steps called in sequence.) +file.addreplace.error.only_replace_operation=This should only be called for file replace operations! +file.addreplace.error.failed_to_remove_old_file_from_dataset=Unable to remove old file from new DatasetVersion. +file.addreplace.error.add.add_file_error=Failed to add file to dataset. +file.addreplace.error.phase2_called_early_no_new_files=There was an error saving the dataset - no new files found. +file.addreplace.success.add=File successfully added! +file.addreplace.success.replace=File successfully replaced! +file.addreplace.error.auth=The API key is invalid. +file.addreplace.error.invalid_datafile_tag=Not a valid Tabular Data Tag: +# 500.xhtml=# 500.xhtml +error.500.page.title=500 Internal Server Error +error.500.message=Error Interno del Servidor - Ocurri\u00f3 un error inesperado, no hay disponible m\u00e1s informaci\u00f3n. +# 404.xhtml=# 404.xhtml +error.404.page.title=404 Not Found +error.404.message=Page Not Found - No encontramos la p\u00e1gina que buscas. Para contactar con el soporte, por favor, pulsa en el enlace Soporte. +# 403.xhtml=# 403.xhtml +error.403.page.title=403 Not Authorized +error.403.message=Sin Autorizaci\u00f3n - Usted no est\u00e1 autorizado a ver esta p\u00e1gina. Para contactar con soporte, por favor, pulsa en el enlace Soporte m\u00e1s arriba. +# general error - support message +error.support.message= If you believe this is an error, please contact {0} for assistance. +# citation-frame.xhtml +citationFrame.banner.message=If the site below does not load, the archived data can be found in the {0} {1}. {2} +citationFrame.banner.message.here=here +citationFrame.banner.closeIcon=Close this message, go to dataset +citationFrame.banner.countdownMessage= This message will close in +citationFrame.banner.countdownMessage.seconds=seconds +# Friendly AuthenticationProvider names +authenticationProvider.name.builtin=Dataverse +authenticationProvider.name.null=(provider is unknown) +authenticationProvider.name.github=GitHub +authenticationProvider.name.google=Google +authenticationProvider.name.orcid=ORCiD +authenticationProvider.name.orcid-sandbox=ORCiD Sandbox +authenticationProvider.name.shib=Shibboleth +ingest.csv.invalidHeader=Invalid header row. One of the cells is empty. +ingest.csv.lineMismatch=Mismatch between line counts in first and final passes!, {0} found on first pass, but {1} found on second. +ingest.csv.recordMismatch=Reading mismatch, line {0} of the Data file: {1} delimited values expected, {2} found. +ingest.csv.nullStream=Stream can't be null. +Campo obligatorio diff --git a/dataversedock/lang.properties/Bundle_fr.properties b/dataversedock/lang.properties/Bundle_fr.properties new file mode 100644 index 0000000..b756de5 --- /dev/null +++ b/dataversedock/lang.properties/Bundle_fr.properties @@ -0,0 +1,2061 @@ +dataverse=Dataverse +newDataverse=Nouveau dataverse +hostDataverse=Dataverse hte +dataverses=Dataverses +passwd=Mot de passe +dataset=Ensemble de donnes +datasets=Ensembles de donnes +newDataset=Nouvel ensemble de donnes +files=Fichiers +file=Fichier +restricted=En accs rserv +restrictedaccess=Accs rserv +find=Trouver +search=Recherche +unpublished=Non publi +cancel=Annuler +saveChanges=Enregistrer les modifications +acceptTerms=Accepter +submit=Soumettre +signup=S'inscrire +login=Se connecter +email=Courriel +account=Compte +requiredField=Champ obligatoire +new=Nouveau +identifier=Identifiant +description=Description +subject=Sujet +close=Fermer +preview=Aperu +continue=Continuer +name=Nom +institution=tablissement +position=Poste +affiliation=Affiliation +createDataverse=Crer un dataverse +remove=Supprimer +done=Termin +editor=Collaborateur +manager=Gestionnaire +curator=Intendant des donnes +explore=Explorer +download=Tlcharger +deaccession=Retrait +share=Partager +link=Lien +linked=Li +harvested=Moissonn +apply=Appliquer +add=Ajouter +delete=Supprimer +yes=Oui +no=Non +previous=Prcdent +next=Suivant +first=Premier +last=Dernier +more=Plus\u2026 +less=Moins\u2026 +select=Slectionner\u2026 +selectedFiles=Fichiers slectionns +htmlAllowedTitle=Balises HTML permises +htmlAllowedMsg=Ce champ prend seulement en charge certaines balises HTML. +htmlAllowedTags=, ,
                    ,
                    , , ,
                    ,
                    ,
                    , ,
                    ,

                    -

                    , , , ,
                  • ,
                      ,

                      ,

                      , , , , , , 
                        + +# dataverse_header.xhtml= + +header.status.header=tat +header.search.title=Chercher dans tous les dataverses\u2026 +header.about= propos +header.support=Soutien +header.guides=Guides +header.guides.user=Guide d'utilisation +header.guides.developer=Guide du dveloppeur +header.guides.installation=Guide d'installation +header.guides.api=Guide API +header.guides.admin=Guide de l'administrateur +header.signUp=S'inscrire +header.logOut=Se dconnecter +header.accountInfo=Renseignements sur le compte +header.dashboard=Tableau de bord +header.user.selectTab.dataRelated=Mes donnes +header.user.selectTab.notifications=Avis +header.user.selectTab.accountInfo=Renseignements sur le compte +header.user.selectTab.groupsAndRoles=Groupes + rles +header.user.selectTab.apiToken=Jeton API + +# dataverse_template.xhtml= + +head.meta.description=Dataverse est un logiciel libre pour le partage, la citation et l'archivage de donnes. Dataverse fournit aux gestionnaires de donnes une infrastructure solide pour hberger et archiver des donnes et offre aux chercheurs une solution pour partager facilement leurs donnes et en obtenir le crdit. +body.skip=Passer au contenu principal + +# dataverse_footer.xhtml= + +footer.codeAvailable=Code disponible +footer.dataverseOnGitHub=Dataverse sur GitHub +footer.dataverseProjectOn=Projet Dataverse sur +footer.Twitter=Twitter +footer.dataScienceIQSS=Dvelopp au Institute for Quantitative Social Science +footer.copyright=Droits rservs © {0} +footer.widget.datastored=Les donnes sont archives par\u00A0: {0}. +footer.widget.login=Se connecter +footer.privacyPolicy=Politique de protection de la vie prive +footer.poweredby=Fourni par +footer.dataverseProject=Le projet Dataverse + +# messages.xhtml + +messages.error=Erreur +messages.success=Opration russie! +messages.info=Info +messages.validation=Erreur de validation +messages.validation.msg=Des champs requis sont manquants ou encore une erreur de validation est survenue. Faites dfiler vers le bas pour voir les dtails. + +# contactFormFragment.xhtml= + + +contact.header=Communiquer avec le service de soutien Scholars Portal +contact.dataverse.header=Communiquer avec la personne-ressource pour ce dataverse +contact.dataset.header=Communiquer avec la personne-ressource pour cet ensemble de donnes +contact.to=Destinataire +contact.support=Service de soutien de Scholars Portal + +contact.from=Expditeur +contact.from.required=L'adresse courriel de l'utilisateur est requise. +contact.from.invalid=L'adresse courriel est invalide. +contact.subject=Objet +contact.subject.required=Il faut indiquer un objet +contact.subject.selectTab.top=Slectionner l'objet\u2026 +contact.subject.selectTab.support=Question de soutien +contact.subject.selectTab.dataIssue=Problme concernant les donnes +contact.msg=Message +contact.msg.required=Un message doit tre rdig. +contact.send=Envoyer le message +contact.question=Veuillez remplir cet espace afin de prouver que vous n'tes pas un robot. +contact.sum.required=Vous devez entrer une valeur. +contact.sum.invalid=Somme incorrecte, veuillez ressayer. +contact.sum.converterMessage=Veuillez entrer un chiffre. +contact.contact=Personne-ressource + +# dataverseuser.xhtml= + +account.info=Renseignements sur le compte +account.edit=Modifier le compte +apiTaken=Jeton API +user.isShibUser=Les renseignements sur le compte ne peuvent tre modifis lorsque connect via un compte institutionnel. +user.helpShibUserMigrateOffShibBeforeLink=Vous quittez votre tablissement? Prire de contacter +user.helpShibUserMigrateOffShibAfterLink=pour obtenir de l'aide. +user.helpOAuthBeforeLink=Votre compte Dataverse utilise {0} pour pouvoir se connecter. Si vous souhaitez modifier vos modes de connexion, prire de contacter +user.helpOAuthAfterLink=pour obtenir du soutien. +user.lostPasswdTip=Si vous avez perdu ou oubli votre mot de passe, indiquez votre nom d'utilisateur ou votre adresse courriel dans l'espace ci-dessous et cliquez sur \u00A0Soumettre\u00A0. Nous vous enverrons votre nouveau mot de passe par courriel. +user.dataRelatedToMe=Mes donnes +wasCreatedIn=a t cr dans +wasCreatedTo=a t ajout +wasSubmittedForReview=a t soumis pour fin d'examen en vue d'tre publi dans +wasPublished=a t publi dans +wasReturnedByReviewer=a t retourn par l'intendant des donnes de +toReview=N'oubliez pas de les publier ou de les renvoyer au collaborateur! +worldMap.added=Les donnes d'une couche WorldMap ont t ajoutes l'ensemble de donnes. +# Bundle file editors, please note that "notification.welcome" is used in a unit test.= +notification.welcome=Bienvenue dans le {0}! Commencez ds maintenant en ajoutant ou encore en recherchant des donnes. Des questions? Consultez le\u00A0: {1}. Vous voulez faire l'essai des composantes de Dataverse? Essayez notre {2}. N'oubliez pas de vrifier que vous avez bien reu votre courriel d'invitation afin que nous puissions valider votre adresse. +notification.demoSite=Site de dmonstration +notification.requestFileAccess=Demande d'accs pour l'ensemble de donnes\u00A0: {0}. +notification.grantFileAccess=Accs accord pour les fichiers de l'ensemble de donnes\u00A0: {0}. +notification.rejectFileAccess=Demande d'accs refuse pour les fichiers de l'ensemble de donnes\u00A0: {0}. +notification.createDataverse={0} a t cr dans {1}. Pour savoir ce que vous pouvez faire avec votre dataverse, consultez le {2}. +notification.dataverse.management.title=Administration de Dataverse - Guide d'utilisation Dataverse +notification.createDataset={0} a t cr dans {1}. Pour savoir ce que vous pouvez faire avec votre ensemble de donnes, consultez le {2}. +notification.dataset.management.title=Administration des ensembles de donnes - Guide d'utilisation pour les ensembles de donnes +notification.wasSubmittedForReview={0} a t soumis pour vrification avant publication dans {1}. N'oubliez pas de le publier ou de le renvoyer au collaborateur\! +notification.wasReturnedByReviewer={0} a t retourn par l'intendant des donnes de {1}. +notification.wasPublished={0} a t publi dans {1}. +notification.worldMap.added={0}, cet ensemble de donnes dispose maintenant d'une couche de donnes WorldMap. +notification.maplayer.deletefailed=Impossible de supprimer la couche cartographique associe au fichier accs restreint {0} provenant de WorldMap. Essayez de nouveau, ou contactez le soutien WorldMap et/ou Dataverse. (Ensemble de donnes: {1}) +notification.generic.objectDeleted=Le dataverse, l'ensemble de donnes ou le fichier vis par cet avis a t supprim. +notification.access.granted.dataverse=Le rle {0} vous a t accord pour {1}. +notification.access.granted.dataset=Le rle {0} vous a t accord pour {1}. +notification.access.granted.datafile=Le rle {0} vous a t accord pour un fichier dans {1}. +notification.access.granted.fileDownloader.additionalDataverse={0} Vous avez maintenant accs tous les fichiers en accs rserv ou non rserv publis dans ce dataverse. +notification.access.granted.fileDownloader.additionalDataset={0} Vous avez maintenant accs tous les fichiers rservs ou non rservs qui ont t publis dans cet ensemble de donnes. +notification.access.revoked.dataverse=Votre rle dans {0} a t retir. +notification.access.revoked.dataset=Votre rle dans {0} a t retir. +notification.access.revoked.datafile=Votre rle dans {0} a t retir. +notification.checksumfail=Votre tlchargement dans l'ensemble de donnes "{0}" a chou la validation de la somme de contrle. +notification.import.filesystem={2}, l'ensemble de donnes a import des fichiers partir du systme de fichiers par l'entremise d'un traitement en lot. +notification.import.checksum={1}, l'ensemble de donnes a ajout les sommes de contle des fichiers par l'entremise d'un traitement en lot. +removeNotification=Supprimer l'avis +groupAndRoles.manageTips=Vous pouvez grer tous les groupes dont vous tes membre et les rles qui vous ont t confis et y avoir accs. +user.signup.tip=Pourquoi se crer un compte Dataverse? De faon pouvoir crer votre propre dataverse, le personnaliser, y ajouter des ensembles de donnes, ou encore pour demander l'accs des fichiers accs rserv. +user.signup.otherLogInOptions.tip=Voir les autres options de connexion. +user.username.illegal.tip=Votre nom d'utilisateur doit compter entre 2 et 60\u00A0caractres et vous pouvez utiliser les lettres a z, les chiffres 0 9 et le caractre soulign \u00A0_\u00A0. +user.username=Nom d'utilisateur +user.username.taken=Ce nom d'utilisateur est dj pris. +user.username.invalid=Ce nom d'utilisateur contient un caractre invalide ou enfreint la limite de longueur (2 60 caractres). +user.username.valid=Crez un nom d'utilisateur valide de 2 60 caractres contenant des lettres (a-Z), des chiffres (0-9), des tirets (-), des traits de soulignements (_) et des points (.). +user.noPasswd=Aucun mot de passe +user.currentPasswd=Mot de passe actuel +user.currentPasswd.tip=Veuillez entrer le mot de passe actuel pour ce compte. +user.passwd.illegal.tip=Le mot de passe doit compter au moins 6 caractres, y compris une lettre et un chiffre, et des caractres spciaux peuvent tre utiliss. +user.rePasswd=Confirmer le mot de passe +user.rePasswd.tip=Veuillez confirmer le mot de passe indiqu ci-dessus. +user.firstName=Prnom +user.firstName.tip=Le prnom ou le nom que vous utiliserez pour ce compte. +user.lastName=Nom +user.lastName.tip=Le nom que vous utiliserez pour ce compte. +user.email.tip=Une adresse courriel valide permettant de communiquer avec vous. +user.email.taken=Cette adresse courriel est dj prise. +user.affiliation.tip=L'organisation avec laquelle vous tes affili(e). +user.position=Poste +user.position.tip=Votre rle ou titre au sein de l'organisation avec laquelle vous tes affili(e), par exemple\u00A0: employ(e), membre du corps professoral, tudiant(e), etc. +user.acccountterms=Conditions gnrales d'utilisation +user.acccountterms.tip=Les conditions d'utilisation de l'application et des services. +user.acccountterms.required=Veuillez cocher la case pour indiquer que vous acceptez les conditions gnrales d'utilisation. +user.acccountterms.iagree=J'ai lu et j'accepte les conditions gnrales d'utilisation de Dataverse susmentionnes. +user.createBtn=Crer un compte +user.updatePassword.welcome=Bienvenue dans Dataverse, {1} +user.updatePassword.warning=Les exigences relatives au mot de passe et les conditions gnrales d'utilisation ont t mises jour lors de la publication de notre nouvelle version de Dataverse 4.0. Comme c'est la premire fois que vous utilisez Dataverse depuis la mise jour, vous devez crer un nouveau mot de passe et accepter les nouvelles conditions gnrales d'utilisation. +user.updatePassword.password=Choisir un mot de passe d'au minimum six caractres comportant au moins une lettre et un chiffre. +authenticationProvidersAvailable.tip={0}Il n'y a aucun systme d'authentification actif{1}Si vous tes administrateur systme, veuillez en autoriser un au moyen de l'API.{2}Si vous n'tes pas administrateur systme, veuillez communiquer avec celui de votre tablissement. + +#loginpage.xhtml + +login.System=Systme d'authentification +login.forgot.text=Mot de passe oubli? +login.builtin=Compte Dataverse +login.institution=Compte institutionnel +login.institution.blurb=Connectez-vous ou inscrivez-vous avec votre compte institutionnel — en apprendre davantage. +login.institution.support.beforeLink=Vous quittez votre tablissement? Prire de contacter +login.institution.support.afterLink=pour obtenir de l'aide. +login.builtin.credential.usernameOrEmail=Nom d'utilisateur/courriel +login.builtin.credential.password=Mot de passe +login.builtin.invalidUsernameEmailOrPassword=Le nom d'utilisateur, le courriel ou le mot de passe indiqu n'est pas valide. Avez-vous besoin d'aide pour accder votre compte? +# how do we exercise login.error? Via a password upgrade failure? See https://github.com/IQSS/dataverse/pull/2922= +login.error=Une erreur s'est produite au moment de la validation du nom d'utilisateur ou du mot de passe. Veuillez essayer nouveau. Si le problme persiste, communiquez avec un administrateur. +user.error.cannotChangePassword=Dsol, votre mot de passe ne peut pas tre modifi. Veuillez contacter votre administrateur systme. +user.error.wrongPassword=Dsol, mot de passe erronn. +login.button=Connectez-vous avec {0} +# authentication providers +auth.providers.title=Autres options +auth.providers.tip=Vous pouvez convertir un compte Dataverse pour utiliser l'une des options ci-dessus. En apprendre davantage. +auth.providers.title.builtin=Nom d'utilisateur/Courriel +auth.providers.title.shib=Votre tablissement +auth.providers.title.orcid=ORCID +auth.providers.title.google=Google +auth.providers.title.github=GitHub +auth.providers.blurb=Connectez-vous ou inscrivez-vous avec votre compte {0} — en apprendre davantage. Vous prouvez des problmes? Veuillez contacter {3} pour obtenir de l'aide. +auth.providers.persistentUserIdName.orcid=Identifiant ORCID +auth.providers.persistentUserIdName.github=Identifiant GitHub +auth.providers.persistentUserIdTooltip.orcid=ORCID fournit un identifiant numrique prenne qui vous distingue des autres chercheurs. +auth.providers.persistentUserIdTooltip.github=GitHub attribue un identifiant unique chaque utilisateur. + +#confirmemail.xhtml= +confirmEmail.pageTitle=Validation du courriel +confirmEmail.submitRequest=Valider le courriel +confirmEmail.submitRequest.success=Un courriel de validation a t envoy {0}. Veuillez noter que le lien de validation expirera aprs un dlai de {1}. +confirmEmail.details.success=L'adresse courriel est bien valide! +confirmEmail.details.failure=Nous n'avons pas t en mesure de valider votre adresse courriel. Merci de cliquer sur le bouton Valider le courriel depuis la page comportant les renseignements sur votre compte. +confirmEmail.details.goToAccountPageButton=Aller la page comportant les renseignements du compte +confirmEmail.notVerified=Non valid +confirmEmail.verified=Valid + +#shib.xhtml= +shib.btn.convertAccount=Convertir le compte +shib.btn.createAccount=Crer le compte +shib.askToConvert=Dsirez-vous convertir votre compte Dataverse de faon utiliser dornavant vos informations de connexion institutionnelle afin de vous connecter? +# Bundle file editors, please note that "shib.welcomeExistingUserMessage" is used in a unit test= +shib.welcomeExistingUserMessage=Vos informations de connection institutionnelle pour {0} comprennent une adresse courriel dj utilise pour un compte Dataverse existant. En entrant ici-bas votre mot de passe Dataverse actuel, votre compte Dataverse pourra tre converti de faon ce que vous puissiez dornavant utiliser votre compte institutionnel. Suite cette conversion, vous n'aurez plus qu' utiliser votre compte institutionnel pour pouvoir vous connecter. +# Bundle file editors, please note that "shib.welcomeExistingUserMessageDefaultInstitution" is used in a unit test= +shib.welcomeExistingUserMessageDefaultInstitution=votre tablissement +shib.dataverseUsername=Nom d'utilisateur Dataverse +shib.currentDataversePassword=Mot de passe Dataverse actuel +shib.accountInformation=Renseignements sur le compte +shib.offerToCreateNewAccount=Cette information est fournie par votre tablissement et sera employe pour crer votre compte Dataverse. +shib.passwordRejected=Erreur de validation - Votre compte peut uniquement tre converti si vous indiquez le bon mot de passe pour votre compte existant. + +# oauth2/firstLogin.xhtml +oauth2.btn.convertAccount=Convertir le compte existant +oauth2.btn.createAccount=Crer un nouveau compte +oauth2.askToConvert=Dsirez-vous convertir votre compte Dataverse de faon toujours utiliser votre compte institutionnel? +oauth2.welcomeExistingUserMessage=Votre compte institutionnel {0} correspond une adresse courriel dj utilise pour un compte Dataverse. En entrant votre mot de passe actuel de Dataverse ci-dessous, votre compte Dataverse existant peut tre converti pour utiliser votre compte institutionnel la place. Suite la conversion vous n'aurez plus qu' utiliser votre compte institutionnel. +oauth2.welcomeExistingUserMessageDefaultInstitution=votre tablissement +oauth2.dataverseUsername=Nom d'utilisateur Dataverse +oauth2.currentDataversePassword=Mot de passe Dataverse actuel +oauth2.chooseUsername=Nom d'utilisateur\u00A0: +oauth2.passwordRejected=Erreur de validation - Nom d'utilisateur ou mot de passe incorrect. +# oauth2.newAccount.title=Cration de compte +oauth2.newAccount.welcomeWithName=Bienvenue dans Dataverse, {0} +oauth2.newAccount.welcomeNoName=Bienvenue dans Dataverse +# oauth2.newAccount.email=Courriel +# oauth2.newAccount.email.tip=Dataverse utilise ce courriel pour vous informer des problmes lis vos donnes. +oauth2.newAccount.suggestedEmails=Adresses de courriel suggres\u00A0: +oauth2.newAccount.username=Nom d'utilisateur +oauth2.newAccount.username.tip=Ce nom d'utilisateur sera votre identifiant unique en tant qu'utilisateur Dataverse. +oauth2.newAccount.explanation=Cette information est fournie par {0} et sera utilise pour crer votre compte {1}. Pour vous connecter nouveau, vous devrez utiliser l'option de connexion {0}. +oauth2.newAccount.suggestConvertInsteadOfCreate=Si vous avez dj un compte {0}, vous devrez convertir votre compte. +# oauth2.newAccount.tabs.convertAccount=Convertir un compte existant +oauth2.newAccount.buttons.convertNewAccount=Convertir un compte +oauth2.newAccount.emailTaken=Adresse courriel dj prise. Envisagez plutt de de fusionner le compte correspondant. +oauth2.newAccount.emailOk=Adresse courriel valide. +oauth2.newAccount.emailInvalid=Adresse courriel non valide. +# oauth2.newAccount.usernameTaken=Nom d'utilisateur dj pris. +# oauth2.newAccount.usernameOk=Nom d'utilisateur valid. + +# oauth2/convert.xhtml +# oauth2.convertAccount.title=Conversion de compte +oauth2.convertAccount.explanation=Entrez votre nom d'utilisateur {0} ou encore votre nom d'utilisateur et votre mot de passe pour convertir votre compte l'option de connexion {1}. En apprendre davantage propos de la conversion de votre compte. +oauth2.convertAccount.username=Nom d'utilisateur existant +oauth2.convertAccount.password=Mot de passe +oauth2.convertAccount.authenticationFailed=Authentification choue - Nom d'utilisateur ou mot de passe incorrect. +oauth2.convertAccount.buttonTitle=Convertir un compte +oauth2.convertAccount.success=Votre compte Dataverse est maintenant associ votre compte {0}. + +# oauth2/callback.xhtml +oauth2.callback.page.title=Rappel OAuth +oauth2.callback.message=Erreur OAuth2 - Dsol, le processus d'identification n'a pas russi. + +# tab on dataverseuser.xhtml= +apitoken.title=Jeton API +apitoken.message=Votre jeton API sera affich ci-aprs une fois qu'il aura t cr. Consultez notre {0}guide API{1} pour obtenir plus de dtails sur comment utiliser votre jeton API avec les API de Dataverse +apitoken.notFound=Le jeton API pour {0} n'a pas t cr. +apitoken.generateBtn=Crer le jeton +apitoken.regenerateBtn=Crer de nouveau le jeton + +#dashboard.xhtml= +dashboard.title=Tableau de bord +dashboard.card.harvestingclients.header=Clients de moissonnage +dashboard.card.harvestingclients.btn.manage=Gestion des clients +dashboard.card.harvestingclients.clients={0, choice, 0#Clients|1#Client|2#Clients} +dashboard.card.harvestingclients.datasets={0, choice, 0#Ensembles de donnes|1#Ensemble de donnest|2#Ensembles de donnes} +dashboard.card.harvestingserver.header=Serveur de moissonnage +dashboard.card.harvestingserver.enabled=Serveur OAI activ +dashboard.card.harvestingserver.disabled=Serveur OAI dsactiv +dashboard.card.harvestingserver.status=Statut +dashboard.card.harvestingserver.sets={0, choice, 0#Ensembles|1#Ensemble|2#Ensembles} +dashboard.card.harvestingserver.btn.manage=Gestion du serveur +dashboard.card.metadataexport.header=Exportation des mtadonnes +dashboard.card.metadataexport.message=L'exportation des mtadonnes de l'ensemble de donnes n'est disponible que via l'API de {0}. Pour en savoir davantage, consultez le {1}Guide API{2} du {0}. + +#harvestclients.xhtml= +harvestclients.title=Administration du moissonnage de clients +harvestclients.toptip=- Le moissonnage peut tre planifi pour s'excuter selon un horaire spcifique ou la demande. Le moissonnage peut tre lanc ici ou partir de l'API REST. + +harvestclients.noClients.label=Aucun client n'est configur. +harvestclients.noClients.why.header=Qu'est-ce que le moissonnage? +harvestclients.noClients.why.reason1=Le moissonnage consiste changer des mtadonnes avec d'autres dpts. En tant que client de moissonnage, votre Dataverse peut recueillir les mtadonnes de notices provenant de sources distantes. Il peut s'agir d'autres instances de Dataverse, ou encore de dpts compatibles avec le protocole OAI-PMH, soit le protocole standard de moissonnage. +harvestclients.noClients.why.reason2=Les notices de mtadonnes moissonnes sont interrogeables par les usagers. En cliquant sur un ensemble de donnes moissonn dans la liste des rsultats de recherche, l'usager peut accder au dpt d'origine. Les ensembles de donnes moissonns ne peuvent cependant pas tre modifis dans votre instance de Dataverse. +harvestclients.noClients.how.header=Comment effectuer le moissonnage +harvestclients.noClients.how.tip1=Afin de pouvoir moissonner des mtadonnes, un client de moissonnage doit tre dfini et paramtr pour chacun des dpts distants. Veuillez noter que pour dfinir un client, vous devrez slectionner un dataverse local dj existant, lequel hbergera les ensembles de donnes moissonns. + +harvestclients.noClients.how.tip2=Les notices rcoltes peuvent tre synchronises avec le dpt d'origine l'aide de mises jour incrmentielles programmes, par exemple, quotidiennes ou hebdomadaires. Alternativement, les moissonnages peuvent tre excuts la demande, partir de cette page ou via l'API REST. +harvestclients.noClients.getStarted=Pour commencer, cliquez sur le bouton Ajouter un client ci-dessus. Pour en savoir davantage sur le moissonnage, consultez la section moissonnage du guide d'utilisation + +harvestclients.btn.add=Ajouter un client +harvestclients.tab.header.name=Alias +harvestclients.tab.header.url=Adresse URL +harvestclients.tab.header.lastrun=Dernire excution +harvestclients.tab.header.lastresults=Derniers rsultats +harvestclients.tab.header.action=Oprations +harvestclients.tab.header.action.btn.run=Lancer le moissonnage +harvestclients.tab.header.action.btn.edit=Modifier +harvestclients.tab.header.action.btn.delete=Supprimer +harvestclients.tab.header.action.btn.delete.dialog.header=Supprimer le client de moissonnage +harvestclients.tab.header.action.btn.delete.dialog.warning=Voulez-vous vraiment supprimer le client de moissonnage "{0}"? La suppression du client supprimera tous les jeux de donnes rcolts partir de ce serveur distant. +harvestclients.tab.header.action.btn.delete.dialog.tip=Veuillez noter que cette opration peut prendre un certain temps effectuer en fonction du nombre d'ensembles de donnes rcolts. +harvestclients.tab.header.action.delete.infomessage=La suppression du client de moissonnage est lance. Notez que cela peut prendre un certain temps en fonction de la quantit de contenu rcolt. + +harvestclients.actions.runharvest.success=Lancement russi d'un moissonnage asynchrone pour le client "{0}". Veuillez recharger la page pour vrifier les rsultats de la rcolte. + +harvestclients.newClientDialog.step1=tape 1 de 4 - Renseignements au sujet du client +harvestclients.newClientDialog.title.new=Dfinir un client de moissonnage +harvestclients.newClientDialog.help=Configurer un client pour moissonner le contenu d'un serveur distant +harvestclients.newClientDialog.nickname=Alias +harvestclients.newClientDialog.nickname.helptext=Doit tre compos de lettres, de chiffres, de traits de soulignement (_) et de tirets (-). +harvestclients.newClientDialog.nickname.required=L'alias du client ne peut pas tre vide! +harvestclients.newClientDialog.nickname.invalid=L'alias du client ne peut contenir que des lettres, des chiffres, des traits de soulignement (_), des tirets (-) , et ne peut excder 30 caractres. +harvestclients.newClientDialog.nickname.alreadyused=Cet alias est dj utilis. + +harvestclients.newClientDialog.type=Protocole du serveur +harvestclients.newClientDialog.type.helptext=Seul le protocole serveur OAI est actuellement pris en charge. +harvestclients.newClientDialog.type.OAI=OAI +harvestclients.newClientDialog.type.Nesstar=Nesstar + +harvestclients.newClientDialog.url=URL du serveur +harvestclients.newClientDialog.url.tip=URL d'une source moisssonne. +harvestclients.newClientDialog.url.watermark=URL du serveur moissonn distant, http://\u2026 +harvestclients.newClientDialog.url.helptext.notvalidated=URL d'une source moisssonne. Une fois le bouton Suivant cliqu, nous tenterons d'tablir une connexion avec le serveur afin de vrifier qu'il fonctionne bien et obtenir des informations supplmentaires sur ses caractristiques. +harvestclients.newClientDialog.url.required=Une adresse valide de serveur moissonner est requise. +harvestclients.newClientDialog.url.invalid=URL non valide. Impossible d'tablir une connexion et recevoir une rponse valide du serveur. +harvestclients.newClientDialog.url.noresponse=Impossible d'tablir la connexion avec le serveur. +harvestclients.newClientDialog.url.badresponse=Rponse non valide de la part du serveur. + +harvestclients.newClientDialog.dataverse=Dataverse local +harvestclients.newClientDialog.dataverse.tip=Dataverse qui hbergera les ensembles de donnes rcolts partir de cette ressource distante. +harvestclients.newClientDialog.dataverse.menu.enterName=Saisir l'alias du dataverse +harvestclients.newClientDialog.dataverse.menu.header=Nom du dataverse (affiliation), alias +harvestclients.newClientDialog.dataverse.menu.invalidMsg=Aucun rsultat +harvestclients.newClientDialog.dataverse.required=Vous devez slectionner un dataverse existant pour ce client de moissonnage. + +harvestclients.newClientDialog.step2=tape 2 de 4 - Format +harvestclients.newClientDialog.oaiSets=Ensemble OAI +harvestclients.newClientDialog.oaiSets.tip=Ensembles moissonnables offerts par ce serveur OAI. +harvestclients.newClientDialog.oaiSets.noset=Aucun +harvestclients.newClientDialog.oaiSets.helptext=En slectionnant Aucun le moissonnage se fera sur l'ensemble par dfaut dfini par le serveur. Frquemment il s'agit de l'entiret du contenu de tous les sous-ensembles. +harvestclients.newClientDialog.oaiSets.helptext.noset=Ce serveur OAI ne prend pas en charge les ensembles slectionns. L'ensemble du contenu propos par le serveur sera moissonn. + +harvestclients.newClientDialog.oaiMetadataFormat=Format des mtadonnes +harvestclients.newClientDialog.oaiMetadataFormat.tip=Formats de mtadonnes offerts par le serveur distant. +harvestclients.newClientDialog.oaiMetadataFormat.required=Veuillez slectionner le format de mtadonnes utiliser pour le moissonnage de ce dpt. + +harvestclients.newClientDialog.step3=tape 3 de 4 - Planifier +harvestclients.newClientDialog.schedule=Priodicit +harvestclients.newClientDialog.schedule.tip=Programmer le moissonnage pour qu'il s'excute automatiquement de faon quotidienne ou hebdomadaire. +harvestclients.newClientDialog.schedule.time.none.helptext=Ne pas spcifier de priodicit de moissonnage de sorte que l'excution se fera sur demande seulement. +harvestclients.newClientDialog.schedule.none=Aucun +harvestclients.newClientDialog.schedule.daily=Quotidiennement +harvestclients.newClientDialog.schedule.weekly=Hebdomadairement +harvestclients.newClientDialog.schedule.time=Horaire +harvestclients.newClientDialog.schedule.day=Jour +harvestclients.newClientDialog.schedule.time.am=a.m. +harvestclients.newClientDialog.schedule.time.pm=p.m. +harvestclients.newClientDialog.schedule.time.helptext=L'horaire programm se rfre votre heure locale. +harvestclients.newClientDialog.btn.create=Crer un client +harvestclients.newClientDialog.success=Le client de moissonnage "{0}" a bien t cr. + +harvestclients.newClientDialog.step4=tape 4 de 4 - Affichage +harvestclients.newClientDialog.harvestingStyle=Type de dpt +harvestclients.newClientDialog.harvestingStyle.tip=Type du dpt distant. +harvestclients.newClientDialog.harvestingStyle.helptext=Slectionnez le type de dpt qui dcrit le mieux ce serveur distant afin d'appliquer correctement les rgles de formatage et de style aux mtadonnes rcoltes lors de leur affichage dans les rsultats de recherche. Notez qu'une slection incorrecte du type de dpt distant peut entraner l'affichage incomplet des entres dans les rsultats de recherche causant ainsi une impossibilit de rediriger l'utilisateur vers le dpt source des donnes. + +harvestclients.viewEditDialog.title=Modifier le client de moissonnage +harvestclients.viewEditDialog.archiveUrl=URL du dpt +harvestclients.viewEditDialog.archiveUrl.tip=L'URL du dpt qui fournit les donnes moisssonnes par ce client, laquelle est utilise dans les rsultats de recherche pour les liens vers les sources originales du contenu moissonn. +harvestclients.viewEditDialog.archiveUrl.helptext=Modifier si cet URL est diffrent de l'URL du serveur. +harvestclients.viewEditDialog.archiveDescription=Description du dpt +harvestclients.viewEditDialog.archiveDescription.tip=Description du dpt source du contenu moissonn et affich dans les rsultats de recherche. +harvestclients.viewEditDialog.archiveDescription.default.generic=Cet ensemble de de donnes est moissonn auprs de nos partenaires. En cliquant sur le lien, vous accdez directement au dpt source des donnes. + +harvestclients.viewEditDialog.btn.save=Sauvegarder les modifications +harvestclients.newClientDialog.title.edit=Modifier le groupe {0} + +#harvestset.xhtml= +harvestserver.title=Administration du serveur de moissonnage +harvestserver.toptip=- Dfinir les collections d'ensembles de donnes locaux qui seront disponibles pour le moissonnage par les clients distants. +harvestserver.service.label=Serveur OAI +harvestserver.service.enabled=Activ +harvestserver.service.disabled=Dsactiv +harvestserver.service.disabled.msg=Le serveur de moissonnage est actuellement dsactiv. +harvestserver.service.empty=Aucun lot n'est configur. +harvestserver.service.enable.success=Le service OAI a bien t activ. + +harvestserver.noSets.why.header=Qu'est ce qu'un serveur de moissonnage? +harvestserver.noSets.why.reason1=Le moissonnage consiste changer des mtadonnes avec d'autres dpts. En tant que serveur de moissonnage, votre dataverse peut rendre disponible certains ensembles de donnes locaux des clients de moissonnage distants. Il peut s'agir d'autres instances de Dataverse, ou encore de clients compatibles avec le protocole de moissonnage OAI-PMH. +harvestserver.noSets.why.reason2=Seuls les ensembles de donnes publis et non restreints de votre Dataverse peuvent tre moissonns. Les clients distants maintiennent normalement leurs enregistrements synchroniss grce des mises jour incrmentielles programmes, quotidiennes ou hebdomadaires, rduisant ainsi la charge sur votre serveur. Notez que seules les mtadonnes sont moissonnes. Les moissonneurs distants ne tentent gnralement pas de tlcharger eux-mmes les fichiers de donnes. + +harvestserver.noSets.how.header=Comment activer un serveur de moissonnage? +harvestserver.noSets.how.tip1=Le serveur de moissonnage peut tre activ ou dsactiv partir de cette page. +harvestserver.noSets.how.tip2=Une fois le service activ, vous pouvez dfinir des collections d'ensembles de donnes locaux qui seront disponibles pour les moissonneurs distants sous Ensembles OAI. Les ensembles sont dfinis par des requtes de recherche (par exemple, authorName:king; ou parentId:1234 - pour slectionner tous les ensembles de donnes appartenant au dataverse spcifi; ou dsPersistentId: "doi:1234/" pour slectionner tous les ensembles de donnes avec l'identifiant perenne spcifi). Consultez la section sur l'API de recherche du guide d'utilisation de Dataverse pour plus d'informations sur les requtes de recherche. + +harvestserver.noSets.getStarted=Pour commencer, activez le serveur OAI et cliquez sur le bouton Ajouter un ensemble (set) . Pour en savoir plus sur le moissonnage, consultez la section moissonnage du guide d'utilisation. + +harvestserver.btn.add=Ajouter un ensemble (set) +harvestserver.tab.header.spec=setSpec OAI (identifiant OAI de l'ensemble) +harvestserver.tab.header.description=Description +harvestserver.tab.header.definition=Dfinition de la requte +harvestserver.tab.header.stats=Ensembles de donnes +harvestserver.tab.col.stats.empty=Aucun enregistrement (ensemble vide) +harvestserver.tab.col.stats.results={0} {0, choice, 0#Ensembles de donnes|1#Ensemble de donnes|2#Ensembles de donnes} ({1} {1, choice, 0#enregistrements|1#enregistrement|2#enregistrements} export(s), {2} marqu(s) comme supprim(s)) +harvestserver.tab.header.action=Oprations +harvestserver.tab.header.action.btn.export=Lancer l'exportation +harvestserver.actions.runreexport.success=La tche asynchrone de rexportation de l'ensemble OAI "{0}" a bien t lance (veuillez recharger la page pour suivre la progression de l'exportation). +harvestserver.tab.header.action.btn.edit=Modifier +harvestserver.tab.header.action.btn.delete=Supprimer +harvestserver.tab.header.action.btn.delete.dialog.header=Supprimer l'ensemble moissonner +harvestserver.tab.header.action.btn.delete.dialog.tip=Voulez-vous vraiment supprimer l'ensemble OAI "{0}"? Vous ne pouvez pas annuler une suppression! +harvestserver.tab.header.action.delete.infomessage=L'ensemble moissonner est supprim. (Ceci peut prendre un certain temps) + +harvestserver.newSetDialog.title.new=Dfinir un ensemble moissonner +harvestserver.newSetDialog.help=Dfinir une collection d'ensembles de donnes locaux qui seront disponibles pour le moissonnage par les clients distants. + +harvestserver.newSetDialog.setspec=Nom/setSpec OAI +harvestserver.newSetDialog.setspec.tip=Un nom unique (OAI setSpec) identifiant cet ensemble. +harvestserver.newSetDialog.setspec.helptext=Se compose de lettres, de chiffres, de traits de soulignement (_) et de tirets (-). +harvestserver.editSetDialog.setspec.helptext=Le nom ne peut pas tre modifi une fois l'ensemble cr. +harvestserver.newSetDialog.setspec.required=Le nom (setSpec OAI) ne peut tre vide! +harvestserver.newSetDialog.setspec.invalid=Le nom (setSpec OAI) ne peut contenir que des lettres, des chiffres, des traits de soulignement (_) et des tirets (-). +harvestserver.newSetDialog.setspec.alreadyused=Ce nom d'ensemble (setSpec OAI) est dj utilis. + +harvestserver.newSetDialog.setdescription=Description +harvestserver.newSetDialog.setdescription.tip=Fournir une brve description de cet ensemble OAI. +harvestserver.newSetDialog.setdescription.required=La description de l'ensemble ne peut tre vide! + +harvestserver.newSetDialog.setquery=Requte de recherche +harvestserver.newSetDialog.setquery.tip=Requte de recherche qui dfinit le contenu de l'ensemble de donnes. +harvestserver.newSetDialog.setquery.helptext=Exemple de requte\u00A0: authorName:king +harvestserver.newSetDialog.setquery.required=La requte de recherche ne peut tre vide! +harvestserver.newSetDialog.setquery.results=La requte de recherche a retourn {0} ensemble(s) de donnes! +harvestserver.newSetDialog.setquery.empty=AVERTISSEMENT\u00A0: la requte de recherche n'a retourn aucun rsultat! + +harvestserver.newSetDialog.btn.create=Crer l'ensemble +harvestserver.newSetDialog.success=L'ensemble de donnes "{0}" a bien t cr. + +harvestserver.viewEditDialog.title=Modifier l'ensemble moissonn. +harvestserver.viewEditDialog.btn.save=Sauvegarder les modifications + +#dashboard-users.xhtml +dashboard.card.users=Utilisateurs +dashboard.card.users.header=Tableau de bord - Liste des utilisateurs +dashboard.card.users.super=Super-utilisateurs +dashboard.card.users.manage=Grer les utilisateurs +dashboard.card.users.message=Lister et grer les utilisateurs. +dashboard.list_users.searchTerm.watermark=Rechercher ces utilisateurs\u2026 + +dashboard.list_users.tbl_header.userId=Identifiant +dashboard.list_users.tbl_header.userIdentifier=Nom d'utilisateur +dashboard.list_users.tbl_header.name=Nom +dashboard.list_users.tbl_header.lastName=Nom de famille +dashboard.list_users.tbl_header.firstName=Prnom +dashboard.list_users.tbl_header.email=Courriel +dashboard.list_users.tbl_header.affiliation=Affiliation +dashboard.list_users.tbl_header.roles=Roles +dashboard.list_users.tbl_header.position=Poste +dashboard.list_users.tbl_header.isSuperuser=Super-utilisateur +dashboard.list_users.tbl_header.authProviderFactoryAlias=Authentification +dashboard.list_users.tbl_header.createdTime=Date de cration +dashboard.list_users.tbl_header.lastLoginTime=Dernire connexion +dashboard.list_users.tbl_header.lastApiUseTime=Dernire utilisation de l'API + + +dashboard.list_users.toggleSuperuser=Modifier le statut de super-utilisateur +dashboard.list_users.toggleSuperuser.confirmationText.add=tes-vous certain de vouloir activer le statut de super-utilisateur pour l'utilisateur {0}? +dashboard.list_users.toggleSuperuser.confirmationText.remove=tes-vous certain de vouloir dsactiver le statut de super-utilisateur pour l'utilisateur {0}? +dashboard.list_users.toggleSuperuser.confirm=Poursuivre + +dashboard.list_users.api.auth.invalid_apikey=La cl API n'est pas valide. +dashboard.list_users.api.auth.not_superuser=Action Interdite. Vous devez tre un super-utilisateur. + +#MailServiceBean.java= + +notification.email.create.dataverse.subject={0}\u00A0: Votre dataverse a t cr. +notification.email.create.dataset.subject={0}\u00A0: Votre ensemble de donnes a t cr. +notification.email.request.file.access.subject={0}\u00A0: Vous avez prsent une demande d'accs un fichier en accs rserv. +notification.email.grant.file.access.subject={0}\u00A0: L'accs un fichier rserv vous a t accord. +notification.email.rejected.file.access.subject={0}\u00A0: Votre demande d'accs un fichier en accs rserv a t refuse. +notification.email.update.maplayer={0}\u00A0: Une couche WorldMap a t ajoute l'ensemble de donnes. +notification.email.maplayer.deletefailed.subject={0}: Impossible de supprimer la couche WorldMap. +notification.email.maplayer.deletefailed.text=Impossible de supprimer la couche WorldMap associe au fichier accs restreint {0}, ainsi que toutes les donnes connexes qui peuvent encore tre publiquement disponibles sur le site de WorldMap. Essayez de nouveau, ou contactez le soutien WorldMap et/ou Dataverse. (Ensemble de donnes: {1}) +notification.email.submit.dataset.subject={0}\u00A0: Votre ensemble de donnes a t soumis aux fins d'examen. +notification.email.publish.dataset.subject={0}\u00A0: Votre ensemble de donnes a t publi. +notification.email.returned.dataset.subject={0}\u00A0: Votre ensemble de donnes a t retourn. +notification.email.create.account.subject={0}\u00A0: Votre compte a t cr. +notification.email.assign.role.subject={0}\u00A0: Un rle vous a t attribu +notification.email.revoke.role.subject={0}\u00A0: Votre rle a t rvoqu +notification.email.verifyEmail.subject={0}\u00A0: Valider votre adresse courriel + +notification.email.greeting=Bonjour, \n + +notification.email.welcome=Bienvenue dans Dataverse! Commencez ds maintenant en ajoutant ou encore en recherchant des donnes. Des questions? Consultez le Guide d'utilisation ici {0}/{1}/user/ ou contactez le service de soutien de Scholars Portal pour de l'aide. Vous voulez faire l'essai des composantes de Dataverse? Essayez notre site de dmonstration https://demodv.scholarsportal.info +# Bundle file editors, please note that "notification.email.welcome" is used in a unit test= + +notification.email.welcomeConfirmEmailAddOn=\n\nVeuillez vrifier votre adresse courriel {0}. Notez que le lien de validation expirera aprs {1}. Envoyez de nouveau un courriel de validation en vous rendant la page de votre compte. + +notification.email.requestFileAccess=Accs au fichier demand pour l'ensemble de donnes\u00A0: {0}. Grer les autorisations {1}. +notification.email.grantFileAccess=Accs accord aux fichiers de l'ensemble de donnes\u00A0: {0} (voir {1}). +notification.email.rejectFileAccess=Accs refus aux fichiers demands de l'ensemble de donnes \u00A0: {0} (voir {1}). +# Bundle file editors, please note that "notification.email.createDataverse" is used in a unit test= +notification.email.createDataverse=Votre nouveau dataverse intitul {0} (voir {1}) a t cr dans {2} (voir {3}). Pour en savoir plus sur ce que vous pouvez faire avec votre dataverse, consultez le Guide de l'utilisateur Dataverse l'adresse suivante\u00A0: {4}/{5}/user/dataverse-management.html . +# Bundle file editors, please note that "notification.email.createDataset" is used in a unit test= +notification.email.createDataset=Votre nouvel ensemble de donnes intitul {0} (voir {1}) a t cr dans {2} (voir {3}). Pour en savoir plus sur ce que vous pouvez faire avec un ensemble de donnes, consultez le Guide Dataverse sur la gestion d'un ensemble de donnes l'adresse suivante\u00A0: {4}/{5}/user/dataset-management.html . +notification.email.wasSubmittedForReview={0} (voir {1}) a t soumis aux fins d'examen en vue de sa publication dans {2} (voir {3}). N'oubliez pas de le publier ou de le renvoyer au collaborateur\! +notification.email.wasReturnedByReviewer={0} (voir {1}) a t retourn par l'intendant des donnes de {2} (voir {3}). +notification.email.wasPublished={0} (voir {1}) a t publi dans {2} (voir {3}). +notification.email.worldMap.added=Les donnes d'une couche WorldMap ont t ajoutes {0} (voir {1}). +notification.email.closing=\n\nMerci,\nScholars Portal Dataverse +notification.email.assignRole=Vous tes maintenant {0} pour\u00A0: {1} "{2}" (voir {3}). +notification.email.revokeRole=Un de vos rles pour {0} "{1}" a t rvoqu (voir {2}). +notification.email.changeEmail=Bonjour, {0}.{1}\n\nVeuillez nous contacter si vous n'aviez pas l'intention de faire cette modification ou si vous avez besoin d'aide. +hours=heures +hour=heures +minutes=minutes +minute=minute +notification.email.checksumfail.subject={0}\u00A0: votre validation de somme de contrle a choue. +notification.email.import.filesystem.subject={0}\u00A0: Votre tche d'importation de fichier est complte. +notification.email.import.checksum.subject={0}\u00A0: Votre tche de somme de contrle de fichier est complte. + +# passwordreset.xhtml= + +pageTitle.passwdReset.pre=Rinitialisation du mot de passe du compte +passwdReset.token=Jeton\u00A0: +passwdReset.userLookedUp=Utilisateur recherch\u00A0: +passwdReset.emailSubmitted=Courriel soumis\u00A0: +passwdReset.details={0} Rinitialisation du mot de passe{1} - Pour dbuter le processus de rinitialisation du mot de passe, veuillez indiquer votre adresse courriel. +passwdReset.submitRequest=Soumettre la demande de mot de passe +passwdReset.successSubmit.tip=Si cette adresse courriel est associe un compte, un courriel sera envoy avec des instructions supplmentaires {0}. +passwdReset.debug=DBOGUER +passwdReset.resetUrl=L'adresse URL rinitialis est +passwdReset.noEmail.tip=Aucun courriel n'a t envoy tant donn qu'aucun utilisateur n'a t trouv au moyen de l'adresse fournie {0}. Nous ne le mentionnons pas, car nous voulons viter que des utilisateurs malveillants se servent du formulaire pour dterminer si un compte est associ une adresse courriel. +passwdReset.illegalLink.tip=Le lien pour rinitialiser votre mot de passe n'est pas valide. Si vous devez rinitialiser votre mot de passe, {0}cliquez ici{1} pour demander ce que votre mot de passe soit rinitialis. +passwdReset.newPasswd.details={0} Nouveau mot de passe{1} \u2013 Veuillez choisir un mot de passe solide comptant au moins six caractres et au moins une lettre et un chiffre. +passwdReset.newPasswd=Nouveau mot de passe +passwdReset.rePasswd=Confirmer le mot de passe +passwdReset.resetBtn=Rinitialiser le mot de passe + +# dataverse.xhtml= + +dataverse.title=Le projet, le dpartement, l'universit ou le chercheur vis par les donnes contenues dans le dataverse. +dataverse.enterName=Entrer le nom\u2026 +dataverse.host.title=Le dataverse qui contient ces donnes. +dataverse.identifier.title=Nom abrg utilis pour l'adresse URL de ce dataverse. +dataverse.affiliation.title=L'organisation avec laquelle ce dataverse est affili. + +dataverse.category=Catgorie +dataverse.category.title=Le type correspondant le mieux ce dataverse. +dataverse.type.selectTab.top=Slectionner\u2026 +dataverse.type.selectTab.researchers=Chercheur +dataverse.type.selectTab.researchProjects=Projet de recherche +dataverse.type.selectTab.journals=Revue +dataverse.type.selectTab.organizationsAndInsitutions=Organisation ou tablissement +dataverse.type.selectTab.teachingCourses=Cours +dataverse.type.selectTab.uncategorized=Sans catgorie +dataverse.type.selectTab.researchGroup=Groupe de recherche +dataverse.type.selectTab.laboratory=Laboratoire +dataverse.type.selectTab.department=Dpartement + +dataverse.description.title=Un rsum dcrivant l'objet, la nature ou la porte de ce dataverse. +dataverse.email=Courriel +dataverse.email.title=L'adresse courriel du ou des personnes-ressources pour ce dataverse. +dataverse.share.dataverseShare=Partager le dataverse +dataverse.share.dataverseShare.tip=Partager ce dataverse sur vos mdias sociaux prfrs. +dataverse.share.dataverseShare.shareText=Consulter ce dataverse + +dataverse.subject.title=Sujet(s) traits(s) dans ce dataverse. + +dataverse.metadataElements=Champs de mtadonnes +dataverse.metadataElements.tip=Slectionner les champs de mtadonnes utiliser dans les modles d'ensembles de donnes ainsi qu'au moment d'ajouter un ensemble de donnes ce dataverse. +dataverse.metadataElements.from.tip=Utiliser les champs de mtadonnes de {0} +dataverse.resetModifications=Rinitialiser les modifications +dataverse.resetModifications.text=tes-vous sr(e) de vouloir rinitialiser les champs de mtadonnes slectionns? Si vous les rinitialisez, toute personnalisation effectue (information cache, obligatoire, facultative) sera annule. +dataverse.field.required=(Obligatoire) +dataverse.field.example1=(Exemples\u00A0: +dataverse.field.example2=) +dataverse.field.set.tip=[+] Voir les champs + les dfinir comme champs cachs, obligatoires ou facultatifs +dataverse.field.set.view=[+] Voir les champs +dataverse.field.requiredByDataverse=Requis par Dataverse +dataverse.facetPickList.text=Parcourir/Recherche par facettes +dataverse.facetPickList.tip=Choisir les champs de mtadonnes utiliser comme facettes pour consulter les ensembles de donnes ainsi que les dataverses contenus dans ce dataverse. +dataverse.facetPickList.facetsFromHost.text=Utiliser la fonction Parcourir/Recherche par facettes de {0} +dataverse.facetPickList.metadataBlockList.all=Tous les champs de mtadonnes + +dataverse.edit=Modifier +dataverse.option.generalInfo=Renseignements gnraux +dataverse.option.themeAndWidgets=Thme + widgets +dataverse.option.featuredDataverse=Dataverses en vedette +dataverse.option.permissions=Permissions +dataverse.option.dataverseGroups=Groupes +dataverse.option.datasetTemplates=Modles d'ensembles de donnes +dataverse.option.datasetGuestbooks=Registre des visiteurs pour l'ensemble de donnes +dataverse.option.deleteDataverse=Supprimer le dataverse +dataverse.publish.btn=Publier +dataverse.publish.header=Publier le dataverse +dataverse.nopublished=Aucun dataverse publi +dataverse.nopublished.tip=Vous devez avoir au moins un dataverse publi pour utiliser cette fonction. +dataverse.contact=Communiquer par courriel avec Dataverse +dataset.link=Lier l'ensemble de donnes +dataverse.link=Lier le dataverse +dataverse.link.btn.tip=Lier votre dataverse +dataverse.link.yourDataverses=Votre {0, choice, 1#Dataverse|2#Dataverses} +dataverse.link.save=Enregistrer le dataverse li +dataset.link.save=Enregistrer l'ensemble de donnes li +dataverse.link.dataverse.choose=Dterminez avec lequel de vos dataverses vous souhaitez lier ce dataverse. +dataverse.link.dataset.choose=Dterminez avec lequel de vos dataverses vous souhaitez lier cet ensemble de donnes. +dataverse.link.no.choice=Vous avez un dataverse dans lequel vous pouvez ajouter des ensembles de donnes et des dataverses lis. +dataverse.link.no.linkable=Vous devez possder votre propre dataverse pour pouvoir lier un dataverse ou un ensemble de donnes. Cliquer sur le bouton \u00A0Ajouter des donnes\u00A0 la page d'accueil pour commencer. +dataverse.link.no.linkable.remaining=Vous avez dj li tous vos dataverses admissibles. +dataverse.savedsearch.link=Recherche de liens +dataverse.savedsearch.searchquery=Recherche +dataverse.savedsearch.filterQueries=Facettes +dataverse.savedsearch.save=Enregistrer la recherche de liens +dataverse.savedsearch.dataverse.choose=Dterminez avec lequel de vos dataverses vous souhaitez lier cette recherche. +dataverse.savedsearch.no.choice=Vous avez un dataverse pour lequel vous pouvez ajouter une recherche sauvegarde. +# Bundle file editors, please note that "dataverse.savedsearch.save.success" is used in a unit test= +dataverse.saved.search.success=La recherche sauvegarde est maintenant lie {0}. +dataverse.saved.search.failure=La recherche sauvegarde n'a pas pu tre lie. +dataverse.linked.success={0} est maintenant li {1}. +dataverse.linked.success.wait={0} a bien t li {1}. Veuillez attendre que le contenu s'affiche. +dataverse.linked.internalerror={0} a bien t li {1} mais le contenu ne s'affichera pas avant qu'une erreur interne ne soit rsolue. +dataverse.page.pre=Prcdent +dataverse.page.next=Suivant +dataverse.byCategory=Dataverses par catgorie +dataverse.displayFeatured=Afficher les dataverses slectionns ci-dessous sur la page d'accueil pour ce dataverse. +dataverse.selectToFeature=Slectionner les dataverses afficher sur la page d'accueil de ce dataverse. +dataverse.publish.tip=tes-vous sr(e) de vouloir publier votre dataverse? Une fois publi, il doit le demeurer. +dataverse.publish.failed.tip=Ce dataverse ne peut pas tre publi, car le dataverse dans lequel il se trouve n'a pas t publi. +dataverse.publish.failed=Impossible de publier le dataverse +dataverse.publish.success=Votre dataverse est maintenant public. +dataverse.publish.failure=Ce dataverse n'a pas pu tre publi. +dataverse.delete.tip=tes-vous sr(e) de vouloir supprimer votre dataverse? Vous ne pouvez pas annuler la suppression de ce dataverse. +dataverse.delete=Supprimer le dataverse +dataverse.delete.success=Votre dataverse a t supprim. +dataverse.delete.failure=Ce dataverse n'a pas pu tre supprim. +# Bundle file editors, please note that "dataverse.create.success" is used in a unit test because it's so fancy with two parameters= +dataverse.create.success=Vous avez bien russi crer votre dataverse! Pour en savoir davantage sur ce que vous pouvez faire avec votre dataverse, consultez le guide d'utilisation. +dataverse.create.failure=Ce dataverse n'a pas pu tre cr. +dataverse.create.authenticatedUsersOnly=Seuls les utilisateurs authentifis peuvent crer des dataverses. +dataverse.update.success=Vous avez bien mis jour votre dataverse! +dataverse.update.failure=Ce dataverse n'a pas pu tre mis jour. + +# rolesAndPermissionsFragment.xhtml= + +# advanced.xhtml= +advanced.search.header.dataverses=Dataverses +advanced.search.dataverses.name.tip=Le projet, le dpartement, l'universit ou le professeur vis par les donnes contenues dans ce dataverse. +advanced.search.dataverses.affiliation.tip=L'organisation avec laquelle ce dataverse est affili. +advanced.search.dataverses.description.tip=Un rsum dcrivant l'objet, la nature ou la porte de ce dataverse. +advanced.search.dataverses.subject.tip=Catgories de sujets propres aux domaines qui sont pertinents du point de vue du sujet pour ce dataverse. +advanced.search.header.datasets=Ensembles de donnes +advanced.search.header.files=Fichiers +advanced.search.files.name.tip=Le nom donn au fichier. +advanced.search.files.description.tip=Un rsum dcrivant le fichier et ses variables. +advanced.search.files.fileType=Type de fichier +advanced.search.files.fileType.tip=L'extension d'un fichier, p.\u00A0ex. CSV, zip, Stata, R, PDF, JPEG, etc. +advanced.search.files.variableName=Nom de la variable +advanced.search.files.variableName.tip=Le titre de la colonne pour cette variable dans la table de donnes. +advanced.search.files.variableLabel=Libell de variable +advanced.search.files.variableLabel.tip=Une courte description de la variable. + +# search-include-fragment.xhtml= + +dataverse.search.advancedSearch=Recherche avance +dataverse.search.input.watermark=Chercher dans ce dataverse\u2026 +account.search.input.watermark=Chercher ces donnes\u2026 +dataverse.search.btn.find=Trouver + +dataverse.results.btn.addData=Ajouter des donnes +dataverse.results.btn.addData.newDataverse=Nouveau dataverse +dataverse.results.btn.addData.newDataset=Nouvel ensemble de donnes +dataverse.results.dialog.addDataGuest.header=Ajouter des donnes +dataverse.results.dialog.addDataGuest.msg=Vous devez vous authentifier pour crer un dataverse ou ajouter un ensemble de donnes. +dataverse.results.dialog.addDataGuest.msg.signup=Vous devez crer un compte ou vous connecter pour crer un dataverse ou ajouter un ensemble de donnes. +dataverse.results.types.dataverses=Dataverses +dataverse.results.types.datasets=Ensembles de donnes +dataverse.results.types.files=Fichiers + +dataverse.results.empty.zero=Aucun dataverse, ensemble de donnes ou fichier ne correspond votre recherche. Veuillez effectuer une nouvelle recherche en utilisant d'autres termes ou des termes plus gnraux. Vous pouvez galement consulter le guide de recherche pour des astuces. + +dataverse.results.empty.hidden=Il n'y a pas de rsultats rpondants vos critres de recherche. Vous pouvez consulter le guide de recherche pour des conseils. +dataverse.results.empty.browse.guest.zero=Ce dataverse ne contient actuellement aucun dataverse, ensemble de donnes ou fichier. Veuillez vous authentifier pour voir si vous pouvez y ajouter du contenu. +dataverse.results.empty.browse.guest.hidden=Ce dataverse ne contient aucun dataverse. Veuillez vous authentifier pour voir si vous pouvez y ajouter du contenu. +dataverse.results.empty.browse.loggedin.noperms.zero=Ce dataverse ne contient actuellement aucun dataverse, ensemble de donnes ou fichier. Vous pouvez utiliser le bouton \u00A0Envoyer un courriel au contact du dataverse\u00A0 ci-dessus pour toute question sur ce dataverse ou pour effectuer une demande d'accs ce dataverse. +dataverse.results.empty.browse.loggedin.noperms.hidden=Il n'y a aucun dataverse dans ce dataverse. +dataverse.results.empty.browse.loggedin.perms.zero=Ce dataverse ne contient actuellement aucun dataverse, ensemble de donnes ou fichier. Vous pouvez en ajouter l'aide du bouton \u00A0Ajouter des donnes\u00A0 qui se trouve sur cette page. +account.results.empty.browse.loggedin.perms.zero=Il n'y a aucun dataverse, ensemble de donnes ou fichier associ votre compte. Vous pouvez ajouter un dataverse ou un ensemble de donnes en cliquant sur le bouton \u00A0Ajouter des donnes\u00A0 ci-dessus. Pour en savoir plus sur l'ajout de donnes, consultez le guide d'utilisation +dataverse.results.empty.browse.loggedin.perms.hidden=Il n'y a aucun dataverse dans ce dataverse. Vous pouvez en ajouter l'aide du bouton \u00A0Ajouter des donnes\u00A0 qui se trouve sur cette page. +dataverse.results.empty.link.technicalDetails=Plus de dtails techniques +dataverse.search.facet.error=Une erreur s'est produite avec vos paramtres de recherche. Veuillez effacer votre recherche et essayer de nouveau. +dataverse.results.count.toofresults={0} {1} de {2} {2, choice, 0#rsultats|1#rsultat|2#rsultats} +dataverse.results.paginator.current=(Actuel) +dataverse.results.btn.sort=Tri +dataverse.results.btn.sort.option.nameAZ=Nom (A-Z) +dataverse.results.btn.sort.option.nameZA=Nom (Z-A) +dataverse.results.btn.sort.option.newest=Plus rcent +dataverse.results.btn.sort.option.oldest=Plus ancien +dataverse.results.btn.sort.option.relevance=Pertinence + +dataverse.results.cards.foundInMetadata=Trouv(s) dans les champs de mtadonnes\u00A0: +dataverse.results.cards.files.tabularData=Donnes tabulaires + +dataverse.results.solrIsDown=Veuillez noter qu'en raison d'une erreur interne, les fonctions de consultation et de recherche ne sont pas disponibles. + +dataverse.theme.title=Thme +dataverse.theme.inheritCustomization.title=Cochez cette case pour utiliser le thme existant. +dataverse.theme.inheritCustomization.label=Personnalisation hrite +dataverse.theme.inheritCustomization.checkbox=Personnalisation hrite de {0} +dataverse.theme.logo=Logo +dataverse.theme.logo.tip=Les formats d'image pris en charge sont JPG, TIF ou PNG et les fichiers ne doivent pas dpasser 500 Ko. La taille d'affichage maximale d'un fichier image dans le thme d'un dataverse est de 940 pixels de large par 120 pixels de hauteur. +dataverse.theme.logo.format=Format du logo +dataverse.theme.logo.format.selectTab.square=Carr +dataverse.theme.logo.format.selectTab.rectangle=Rectangle +dataverse.theme.logo.alignment=Alignement du logo +dataverse.theme.logo.alignment.selectTab.left=Gauche +dataverse.theme.logo.alignment.selectTab.center=Centre +dataverse.theme.logo.alignment.selectTab.right=Droite +dataverse.theme.logo.backColor=Couleur du fond du logo +dataverse.theme.logo.image.upload=Tlverser l'image +dataverse.theme.tagline=Titre d'appel +dataverse.theme.website=Site Web +dataverse.theme.linkColor=Couleur du lien +dataverse.theme.txtColor=Couleur du texte +dataverse.theme.backColor=Couleur du fond +dataverse.theme.success=Vous avez bien mis jour le thme de ce dataverse! +dataverse.theme.failure=Le thme de ce dataverse n'a pas t mis jour. +dataverse.theme.logo.image=Image du logo +dataverse.theme.logo.image.title=Le logo ou le fichier image que vous dsirez afficher dans l'en-tte de ce dataverse. +dataverse.theme.logo.image.uploadNewFile=Tlverser un nouveau fichier +dataverse.theme.logo.image.invalidMsg=L'image ne peut pas tre tlverse. Veuillez essayer nouveau en utilisant un fichier JPG, TIF ou PNG. +dataverse.theme.logo.image.uploadImgFile=Tlverser le fichier image +dataverse.theme.logo.format.title=La forme du logo ou le fichier image que vous tlversez pour ce dataverse. +dataverse.theme.logo.format.selectTab.square2=Carr +dataverse.theme.logo.format.selectTab.rectangle2=Rectangle +dataverse.theme.logo.alignment.title=L'endroit o le logo ou l'image devrait apparatre dans l'en-tte +dataverse.theme.logo.alignment.selectTab.left2=Gauche +dataverse.theme.logo.alignment.selectTab.center2=Centre +dataverse.theme.logo.alignment.selectTab.right2=Droite +dataverse.theme.logo.backColor.title=Slectionner une couleur afficher derrire le logo de ce dataverse. +dataverse.theme.headerColor=Couleurs de l'en-tte +dataverse.theme.headerColor.tip=Couleurs que vous slectionnez pour crer l'en-tte de ce dataverse. +dataverse.theme.backColor.title=Couleur de la zone de l'en-tte qui contient l'image, le titre d'appel, l'adresse URL et le texte. +dataverse.theme.linkColor.title=Couleur du lien afficher +dataverse.theme.txtColor.title=Couleur du texte du titre d'appel et le nom de ce dataverse. +dataverse.theme.tagline.title=Une phrase qui dcrit ce dataverse. +dataverse.theme.tagline.tip=Fournir un titre d'appel de 140 caractres ou moins. +dataverse.theme.website.title=L'adresse URL de votre site Web personnel ou de votre tablissement ou de tout site Web associ ce dataverse. +dataverse.theme.website.tip=Le lien pour le site Web se trouvera derrire le titre d'appel. Pour qu'un site Web apparaisse dans la liste, vous devez galement fournir un titre d'appel +dataverse.theme.website.watermark=Votre site personnel, http://\u2026 +dataverse.theme.website.invalidMsg=Adresse URL non valide. + +dataverse.widgets.title=Widgets +dataverse.widgets.notPublished.why.header=Pourquoi faire appel aux widgets? +dataverse.widgets.notPublished.why.reason1=Augmente la visibilit de vos donnes sur le Web en vous permettant d'intgrer votre dataverse et les ensembles de donnes dans votre site web personnel ou de projet. +dataverse.widgets.notPublished.why.reason2=Permet aux autres de parcourir votre dataverse ainsi que vos ensembles de donnes sans quitter votre site personnel ou de projet. +dataverse.widgets.notPublished.how.header=Comment utiliser les widgets +dataverse.widgets.notPublished.how.tip1=Pour pouvoir utiliser des widgets, votre dataverse et vos ensembles de donnes doivent tre publis. +dataverse.widgets.notPublished.how.tip2=Suite la publication, le code sera disponible sur cette page pour que vous puissiez le copier et l'ajouter votre site web personnel ou de projet. +dataverse.widgets.notPublished.how.tip3=Avez-vous un site web OpenScholar? Si oui, apprenez-en davantage sur l'ajout de widgets Dataverse dans votre site web ici. +dataverse.widgets.notPublished.getStarted=Pour dbuter, publiez votre dataverse. Pour en savoir davantage sur les widgets, consultez la section thme et widgets du guide d'utilisation. +dataverse.widgets.tip=Copiez et collez ce code dans le code HTML de votre site web. Pour en savoir davantage sur les widgets, consultez la section Thme et widgets du guide d'utilisation. +dataverse.widgets.searchBox.txt=Bote de recherche Dataverse +dataverse.widgets.searchBox.tip=Permet aux visiteurs de votre site Web d'effectuer une recherche dans Dataverse. +dataverse.widgets.dataverseListing.txt=Liste des dataverses +dataverse.widgets.dataverseListing.tip=Permet aux visiteurs de votre site Web de voir vos dataverses et vos ensembles de donnes, de les trier ou de les parcourir en revue. +dataverse.widgets.advanced.popup.header=Widgets\u00A0: Options avances +dataverse.widgets.advanced.prompt=Expdier vers votre site web personnel l'URL prenne de la rfrence bibliographique de l'ensemble de donnes. La page que vous rferrez comme tant l'URL de votre site web personnel doit contenir l'extrait de code utilis par le widget Listing de Dataverse. +dataverse.widgets.advanced.url.label=URL de votre site web personnel +dataverse.widgets.advanced.url.watermark=http://www.exemple.com/nom-de-la-page +dataverse.widgets.advanced.invalid.message=Veuillez saisir un URL valide +dataverse.widgets.advanced.success.message=Mise jour russie de l'URL de votre site web personnel +dataverse.widgets.advanced.failure.message=L'URL du site web personnel associ ce dataverse n'a pas t mis jour. + +# permissions-manage.xhtml= + +dataverse.permissions.title=Permissions +dataverse.permissions.dataset.title=Permissions pour l'ensemble de donnes +dataverse.permissions.access.accessBtn=Modifier l'accs +dataverse.permissions.usersOrGroups=Utilisateurs/Groupes +dataverse.permissions.usersOrGroups.assignBtn=Assigner les rles aux utilisateurs/groupes +dataverse.permissions.usersOrGroups.createGroupBtn=Crer un groupe +dataverse.permissions.usersOrGroups.description=Tous les utilisateurs et les groupes qui ont accs votre dataverse. +dataverse.permissions.usersOrGroups.tabHeader.userOrGroup=Nom de l'utilisateur ou du groupe (affiliation) +dataverse.permissions.usersOrGroups.tabHeader.id=Identifiant +dataverse.permissions.usersOrGroups.tabHeader.role=Rle +dataverse.permissions.usersOrGroups.tabHeader.action=Action +dataverse.permissions.usersOrGroups.assignedAt=Rle assign {0} +dataverse.permissions.usersOrGroups.removeBtn=Supprimer le rle assign +dataverse.permissions.usersOrGroups.removeBtn.confirmation=tes-vous sr(e) de vouloir supprimer cette attribution des rles? + +dataverse.permissions.roles=Rles +dataverse.permissions.roles.add=Ajouter un nouveau rle +dataverse.permissions.roles.description=Tous les rles tablis dans votre dataverse que vous pouvez attribuer aux utilisateurs et aux groupes. +dataverse.permissions.roles.edit=Modifier le rle +dataverse.permissions.roles.copy=Copier le rle + +# permissions-manage-files.xhtml= + +dataverse.permissionsFiles.title=Permissions des fichiers accs rserv + +dataverse.permissionsFiles.usersOrGroups=Utilisateurs/Groupes +dataverse.permissionsFiles.usersOrGroups.assignBtn=Accorder l'accs aux utilisateurs/groupes +dataverse.permissionsFiles.usersOrGroups.description=Tous les utilisateurs et les groupes qui ont accs aux fichiers accs rserv de cet ensemble de donnes. +dataverse.permissionsFiles.usersOrGroups.tabHeader.userOrGroup=Nom de l'utilisateur ou du groupe (affiliation) +dataverse.permissionsFiles.usersOrGroups.tabHeader.id=Identification +dataverse.permissionsFiles.usersOrGroups.tabHeader.email=Courriel +dataverse.permissionsFiles.usersOrGroups.tabHeader.files=Fichiers +dataverse.permissionsFiles.usersOrGroups.tabHeader.access=Accs +dataverse.permissionsFiles.usersOrGroups.file=Fichier +dataverse.permissionsFiles.usersOrGroups.files=Fichiers +dataverse.permissionsFiles.usersOrGroups.invalidMsg=Aucun utilisateur ou groupe n'a accs aux fichiers rservs de cet ensemble de donnes. + +dataverse.permissionsFiles.files=Fichiers accs rserv +dataverse.permissionsFiles.files.label={0, choice, 0#Fichiers accs rserv|1#Fichier accs rserv|2#Fichiers accs rserv} +dataverse.permissionsFiles.files.description=Tous les fichiers en accs rserv dans cet ensemble de donnes. +dataverse.permissionsFiles.files.tabHeader.fileName=Nom du fichier +dataverse.permissionsFiles.files.tabHeader.roleAssignees=Utilisateurs/Groupes +dataverse.permissionsFiles.files.tabHeader.access=Accs +dataverse.permissionsFiles.files.tabHeader.publishedRestrictedState=Publi +dataverse.permissionsFiles.files.tabHeader.draftRestrictedState=Version provisoire +dataverse.permissionsFiles.files.deleted=Supprim +dataverse.permissionsFiles.files.public=Public +dataverse.permissionsFiles.files.restricted=Accs rserv +dataverse.permissionsFiles.files.roleAssignee=Utilisateur/Groupe +dataverse.permissionsFiles.files.roleAssignees=Utilisateurs/Groupes +dataverse.permissionsFiles.files.roleAssignees.label={0, choice, 0#Utilisateurs/Groupes|1#Utilisateur/Groupe|2#Utilisateurs/Groupes} +dataverse.permissionsFiles.files.assignBtn=Accorder l'accs +dataverse.permissionsFiles.files.invalidMsg=Cet ensemble de donnes ne contient aucun fichier en accs rserv. +dataverse.permissionsFiles.files.requested=Fichiers demands +dataverse.permissionsFiles.files.selected=Slection\u00A0: {0} de {1} {2} + +dataverse.permissionsFiles.viewRemoveDialog.header=Accs au fichier +dataverse.permissionsFiles.viewRemoveDialog.removeBtn=Supprimer l'accs +dataverse.permissionsFiles.viewRemoveDialog.removeBtn.confirmation=tes-vous sr(e) de vouloir supprimer l'accs ce fichier? Une fois l'accs supprim, l'utilisateur ou le groupe ne sera plus en mesure de tlcharger ce fichier. + +dataverse.permissionsFiles.assignDialog.header=Accorder l'accs au fichier +dataverse.permissionsFiles.assignDialog.description=Accorder l'accs au fichier aux utilisateurs et aux groupes +dataverse.permissionsFiles.assignDialog.userOrGroup=Utilisateurs/Groupes +dataverse.permissionsFiles.assignDialog.userOrGroup.enterName=Indiquer le nom de l'utilisateur ou du groupe +dataverse.permissionsFiles.assignDialog.userOrGroup.invalidMsg=Aucun rsultat +dataverse.permissionsFiles.assignDialog.userOrGroup.requiredMsg=Veuillez slectionner au moins un utilisateur ou groupe. +dataverse.permissionsFiles.assignDialog.fileName=Nom du fichier +dataverse.permissionsFiles.assignDialog.grantBtn=Accorder +dataverse.permissionsFiles.assignDialog.rejectBtn=Rejeter + +# permissions-configure.xhtml= +dataverse.permissions.accessDialog.header=Modifier l'accs +dataverse.permissions.description=Configuration actuelle de l'accs votre dataverse. +dataverse.permissions.tip=Slectionnez, en cliquant sur le bouton Modifier l'accs , si tous les utilisateurs ou seulement certains sont en mesure d'ajouter des donnes ce dataverse. +dataverse.permissions.Q1=Qui peut ajouter des donnes ce dataverse? +dataverse.permissions.Q1.answer1=Toute personne qui ajoute des donnes ce dataverse doit y avoir accs. +dataverse.permissions.Q1.answer2=Toute personne possdant un compte Dataverse peut ajouter des sous-dataverses. +dataverse.permissions.Q1.answer3=Toute personne possdant un compte Dataverse peut ajouter des ensembles de donnes. +dataverse.permissions.Q1.answer4=Toute personne possdant un compte Dataverse peut ajouter des sous-dataverses et des ensembles de donnes. +dataverse.permissions.Q2=Quel devrait tre le rle par dfaut d'une personne qui ajoute des ensembles de donnes ce dataverse? +dataverse.permissions.Q2.answer.editor.description=\u2219 Modifier les mtadonnes, tlverser les fichiers et modifier les fichiers, modifier les conditions, le registre des visiteurs, soumettre les ensembles de donnes aux fins d'examen. +dataverse.permissions.Q2.answer.manager.description=\u2219 Modifier les mtadonnes, tlverser les fichiers et modifier les fichiers, modifier les conditions, le registre des visiteurs, les restrictions relatives aux fichiers (accs aux fichiers + utilisation) +dataverse.permissions.Q2.answer.curator.description=\u2219 Modifier les mtadonnes, tlverser les fichiers et modifier les fichiers, modifier les conditions, le registre des visiteurs, les restrictions relatives aux fichiers (accs aux fichiers + utilisation), modifier les permissions/assigner les rles + publier + +# roles-assign.xhtml= + +dataverse.permissions.usersOrGroups.assignDialog.header=Assigner le rle +dataverse.permissions.usersOrGroups.assignDialog.description=Accorder les permissions aux utilisateurs et aux groupes en leur attribuant un rle. +dataverse.permissions.usersOrGroups.assignDialog.userOrGroup=Utilisateurs/groupes +dataverse.permissions.usersOrGroups.assignDialog.userOrGroup.enterName=Indiquer le nom de l'utilisateur ou du groupe. +dataverse.permissions.usersOrGroups.assignDialog.userOrGroup.invalidMsg=Aucun rsultat +dataverse.permissions.usersOrGroups.assignDialog.userOrGroup.requiredMsg=Veuillez slectionner au moins un utilisateur ou un groupe. +dataverse.permissions.usersOrGroups.assignDialog.role.description=Voici les permissions associes au rle slectionn. +dataverse.permissions.usersOrGroups.assignDialog.role.warning=L'attribution du rle {0} signifie que le ou les utilisateurs auront galement le rle {0} qui s'applique tous les {1} dans ce {2}. +dataverse.permissions.usersOrGroups.assignDialog.role.requiredMsg=Veuillez slectionner un rle attribuer. + +# roles-edit.xhtml= + +dataverse.permissions.roles.header=Modifier le rle +dataverse.permissions.roles.name=Nom du rle +dataverse.permissions.roles.name.title=Indiquer un nom pour le rle. +dataverse.permissions.roles.id=Identifiant +dataverse.permissions.roles.id.title=Indiquer un nom pour l'alias. +dataverse.permissions.roles.description.title=Dcrire le rle (1000 caractres maximum). +dataverse.permissions.roles.description.counter={0} caractre(s) restant(s) +dataverse.permissions.roles.roleList.header=Permissions du rle +dataverse.permissions.roles.roleList.authorizedUserOnly=Les permissions comportant l'icne Information indiquent que les actions peuvent tre faites par des utilisateurs non authentifis dans Dataverse. + +# explicitGroup-new-dialog.xhtml= + +dataverse.permissions.explicitGroupEditDialog.title.new=Crer un groupe +dataverse.permissions.explicitGroupEditDialog.title.edit=Modifier le groupe {0} +dataverse.permissions.explicitGroupEditDialog.help=Ajouter des utilisateurs ou d'autres groupes ce groupe +dataverse.permissions.explicitGroupEditDialog.groupIdentifier=Identifiant du groupe +dataverse.permissions.explicitGroupEditDialog.groupIdentifier.tip=Nom abrg utilis pour l'identifiant de ce groupe. +dataverse.permissions.explicitGroupEditDialog.groupIdentifier.required=L'identifiant du groupe ne doit pas tre vide. +dataverse.permissions.explicitGroupEditDialog.groupIdentifier.invalid=L'identifiant du groupe peut seulement contenir des lettres, des chiffres, des traits de soulignement (_) et des tirets (-). +dataverse.permissions.explicitGroupEditDialog.groupIdentifier.helpText=Comprend des lettres, des chiffres, des traits de soulignement (_) et des tirets. +dataverse.permissions.explicitGroupEditDialog.groupIdentifier.taken=Identifiant du groupe dj utilis dans ce dataverse. +dataverse.permissions.explicitGroupEditDialog.groupName=Nom du groupe +dataverse.permissions.explicitGroupEditDialog.groupName.required=Le nom du groupe ne peut pas tre vide. +dataverse.permissions.explicitGroupEditDialog.groupDescription=Description +dataverse.permissions.explicitGroupEditDialog.roleAssigneeName=Utilisateur/Groupe +dataverse.permissions.explicitGroupEditDialog.roleAssigneeNames=Utilisateurs/Groupes +dataverse.permissions.explicitGroupEditDialog.createGroup=Crer un groupe + +# manage-templates.xhtml= + +dataset.manageTemplates.pageTitle=Grer les modles d'ensembles de donnes +dataset.manageTemplates.select.txt=Intgrer des modles provenant de {0} +dataset.manageTemplates.createBtn=Crer un modle d'ensemble de donnes +dataset.manageTemplates.saveNewTerms=Sauvegarder le modle d'ensemble de donnes +dataset.manageTemplates.noTemplates.why.header=Pourquoi utiliser des modles? +dataset.manageTemplates.noTemplates.why.reason1=Les modles sont utiles lorsque vous possdez plusieurs ensembles de donnes pour lesquels les mmes renseignements s'appliquent et que vous ne voulez pas avoir les saisir manuellement chaque fois. +dataset.manageTemplates.noTemplates.why.reason2=Les modles peuvent tre utiliss pour entrer des directives l'intention des personnes qui tlversent des ensembles de donnes dans votre dataverse si vous dsirez qu'un champ de mtadonnes soit rempli d'une faon particulire. +dataset.manageTemplates.noTemplates.how.header=Comment utiliser les modles +dataset.manageTemplates.noTemplates.how.tip1=Les modles sont crs au niveau du dataverse, peuvent tre supprims (si on ne veut pas qu'ils paraissent dans les futurs ensembles de donnes), sont activs par dfaut (non requis) et peuvent tre copis de faon ce que vous n'ayez pas recommencer du dbut lorsque vous crez un nouveau modle contenant des mtadonnes similaires un autre modle. Lorsqu'un modle est supprim, il n'y a aucune incidence sur les ensembles de donnes qui ont dj utilis le modle. +dataset.manageTemplates.noTemplates.how.tip2=Veuillez noter que la possibilit de choisir les champs de mtadonnes qui seront cachs, obligatoires ou facultatifs est disponible sur la page Renseignements gnraux de ce dataverse. +dataset.manageTemplates.noTemplates.getStarted=Pour commencer, cliquez sur le bouton \u00A0Crer un modle d'ensemble de donnes\u00A0 ci-dessus. Pour en savoir plus au sujet des modles, consultez la section Modles d'ensembles de donnesdu guide d'utilisation. +dataset.manageTemplates.tab.header.templte=Nom du modle +dataset.manageTemplates.tab.header.date=Date de cration +dataset.manageTemplates.tab.header.usage=Usage +dataset.manageTemplates.tab.header.action=Action +dataset.manageTemplates.tab.action.btn.makeDefault=Dfinir par dfaut +dataset.manageTemplates.tab.action.btn.default=Par dfaut +dataset.manageTemplates.tab.action.btn.view=Consulter +dataset.manageTemplates.tab.action.btn.copy=Copier +dataset.manageTemplates.tab.action.btn.edit=Modifier +dataset.manageTemplates.tab.action.btn.edit.metadata=Mtadonnes +dataset.manageTemplates.tab.action.btn.edit.terms=Conditions +dataset.manageTemplates.tab.action.btn.delete=Supprimer +dataset.manageTemplates.tab.action.btn.delete.dialog.tip=tes-vous sr(e) de vouloir supprimer ce modle? Un nouvel ensemble de donnes ne pourra utiliser ce modle. +dataset.manageTemplates.tab.action.btn.delete.dialog.header=Supprimer le modle +dataset.manageTemplates.tab.action.btn.view.dialog.header=Aperu du modle d'ensemble de donnes +dataset.manageTemplates.tab.action.btn.view.dialog.datasetTemplate=Modle d'ensemble de donnes +dataset.manageTemplates.tab.action.btn.view.dialog.datasetTemplate.title=Le modle d'ensemble de donnes qui permet d'intgrer au pralable des valeurs standards dans des champs de mtadonnes. +dataset.manageTemplates.tab.action.noedit.createdin=Modle cr pour {0} +dataset.manageTemplates.delete.usedAsDefault=Ce modle est le modle par dfaut du ou des dataverses suivants. Il sera galement supprim comme modle par dfaut. +dataset.manageTemplates.info.message.notEmptyTable=Crer, cloner, modifier, voir ou supprimer des modles d'ensembles de donnes. Crer un modle d'ensemble de donnes o sont remplis au pralable certains champs de mtadonnes au moyen de valeurs standards, comme l'affiliation de l'auteur, afin de faciliter l'ajout d'ensembles de donnes dans ce dataverse. Vous pouvez galement ajouter du texte d'aide aux champs de mtadonnes afin d'orienter les utilisateurs sur les lments ajouter ces champs. + +# metadataFragment.xhtml= + +# template.xhtml= + +dataset.template.name.tip=Le nom du modle d'ensemble de donnes. +dataset.template.returnBtn=Revenir Grer les modles +dataset.template.name.title=Indiquer un nom unique pour l'ensemble de donnes +template.asterisk.tip=Les astrisques indiquent les champs de mtadonnes que les utilisateurs doivent obligatoirement remplir lors de l'ajout d'un ensemble de donnes ce dataverse. +dataset.template.popup.create.title=Crer un modle +dataset.template.popup.create.text=Dsirez-vous ajouter des conditions d'utilisation et d'accs par dfaut? +dataset.create.add.terms=Sauvegarder et ajouter des conditions d'utilisation + +# manage-groups.xhtml= + +dataverse.manageGroups.createBtn=Crer un groupe +dataverse.manageGroups.noGroups.getStarted=Pour dbuter, cliquez sur le bouton Crer un groupe ci-dessus. +dataverse.manageGroups.noGroups.how.header=Comment utiliser les groupes +dataverse.manageGroups.noGroups.how.tip1=Un groupe peut comprendre la fois des individus et d'autres groupes. +dataverse.manageGroups.noGroups.how.tip2=Vous pouvez attribuer des permissions un groupe dans le volet Permissions . +dataverse.manageGroups.noGroups.why.header=Pourquoi faire appel aux groupes? +dataverse.manageGroups.noGroups.why.reason1=Les groupes vous permettent d'attribuer des rles et permissions plusieurs personnes la fois. +dataverse.manageGroups.noGroups.why.reason2=Vous pouvez faire appel aux groupes pour la gestion de diffrents types d'utilisateurs (tudiants, collaborateurs, etc.). +dataverse.manageGroups.pageTitle=Grer des groupes Dataverse +dataverse.manageGroups.tab.action.btn.copy=Copier +dataverse.manageGroups.tab.action.btn.delete.dialog.header=Supprimer un groupe +dataverse.manageGroups.tab.action.btn.delete.dialog.tip=tes-vous sr de vouloir supprimer ce groupe? Vous ne pouvez annuler la suppression d'un groupe. +dataverse.manageGroups.tab.action.btn.delete=Supprimer +dataverse.manageGroups.tab.action.btn.disable=Dsactiver +dataverse.manageGroups.tab.action.btn.edit=Modifier +dataverse.manageGroups.tab.action.btn.enable=Activer +dataverse.manageGroups.tab.action.btn.view.dialog.enterName=Entrer le nom d'utilisateur ou de groupe +dataverse.manageGroups.tab.action.btn.view.dialog.group=Nom du groupe +dataverse.manageGroups.tab.action.btn.view.dialog.groupMembers=Membres du groupe +dataverse.manageGroups.tab.action.btn.view.dialog.groupView.action=Action +dataverse.manageGroups.tab.action.btn.view.dialog.groupView.delete=Supprimer +dataverse.manageGroups.tab.action.btn.view.dialog.groupView.name=Nom du membre +dataverse.manageGroups.tab.action.btn.view.dialog.groupView.type=Catgorie de membre +dataverse.manageGroups.tab.action.btn.view.dialog.header=Groupe Dataverse +dataverse.manageGroups.tab.action.btn.view.dialog.invalidMsg=Aucun rsultat. +dataverse.manageGroups.tab.action.btn.view=Voir +dataverse.manageGroups.tab.action.btn.viewCollectedData=Voir les donnes colliges +dataverse.manageGroups.tab.header.action=Action +dataverse.manageGroups.tab.header.id=Identifiant du groupe +dataverse.manageGroups.tab.header.membership=Adhsion +dataverse.manageGroups.tab.header.name=Nom du groupe +# manage-guestbooks.xhtml= + +dataset.manageGuestbooks.pageTitle=Grer les registres de visiteurs pour les ensembles de donnes +dataset.manageGuestbooks.include=Comprend les registres de visiteurs de {0}. +dataset.manageGuestbooks.createBtn=Crer un registre des visiteurs pour l'ensemble de donnes +dataset.manageGuestbooks.download.all.responses=Tlcharger toutes les entres +dataset.manageGuestbooks.download.responses=Tlcharger les entres +dataset.manageGuestbooks.noGuestbooks.why.header=Pourquoi utiliser des registres de visiteurs? +dataset.manageGuestbooks.noGuestbooks.why.reason1=Les registres de visiteurs vous permettent de recueillir des donnes au sujet des personnes qui tlchargent les fichiers de vos ensembles de donnes. Vous pouvez recueillir des renseignements issus du compte (nom d'utilisateur, prnom et nom, affiliation, etc.) et crer des questions personnalises (p.\u00A0ex. quoi serviront les donnes?) +dataset.manageGuestbooks.noGuestbooks.why.reason2=Vous pouvez tlcharger les donnes recueillies dans les registres de visiteurs activs afin de pouvoir les enregistrer en dehors de Dataverse. +dataset.manageGuestbooks.noGuestbooks.how.header=Comment utiliser les registres de visiteurs +dataset.manageGuestbooks.noGuestbooks.how.tip1=Un registre des visiteurs peut tre utilis pour plusieurs ensembles de donnes, mais un seul registre des visiteurs peut tre utilis pour un ensemble de donnes. +dataset.manageGuestbooks.noGuestbooks.how.tip2=Les questions personnalises peuvent comprendre des rponses en texte libre ou des questions choice de rponses. +dataset.manageGuestbooks.noGuestbooks.getStarted=Pour commencer, cliquer ci-dessus sur le bouton \u00A0Crer un registre des visiteurs pour l'ensemble de donnes\u00A0. Pour en savoir plus sur les registres de visiteurs, visitez la section Registre des visiteurs du guide d'utilisation. +dataset.manageGuestbooks.tab.header.name=Nom du registre des visiteurs +dataset.manageGuestbooks.tab.header.date=Date de cration +dataset.manageGuestbooks.tab.header.usage=Usage +dataset.manageGuestbooks.tab.header.responses=Rponses +dataset.manageGuestbooks.tab.header.action=Action +dataset.manageGuestbooks.tab.action.btn.view=Consulter +dataset.manageGuestbooks.tab.action.btn.copy=Copier +dataset.manageGuestbooks.tab.action.btn.enable=Activer +dataset.manageGuestbooks.tab.action.btn.disable=Dsactiver +dataset.manageGuestbooks.tab.action.btn.edit=Modifier +dataset.manageGuestbooks.tab.action.btn.viewCollectedData=Consulter les donnes recueillies +dataset.manageGuestbooks.tab.action.btn.delete=Supprimer +dataset.manageGuestbooks.tab.action.btn.delete.dialog.header=Supprimer le registre des visiteurs +dataset.manageGuestbooks.tab.action.btn.delete.dialog.tip=tes-vous sr(e) de vouloir supprimer ce registre des visiteurs? Vous ne pouvez pas annuler la suppression d'un registre des visiteurs. +dataset.manageGuestbooks.tab.action.btn.view.dialog.header=Registre des visiteurs pour l'ensemble de donnes +dataset.manageGuestbooks.tab.action.btn.view.dialog.datasetGuestbook.title=Au moment de tlcharger les fichiers, le registre des visiteurs demande les renseignements suivants. +dataset.manageGuestbooks.tab.action.btn.view.dialog.datasetGuestbook=Nom du registre des visiteurs +dataset.manageGuestbooks.tab.action.btn.viewCollectedData.dialog.header=Donnes recueillies dans le registre des visiteurs pour l'ensemble des donnes +dataset.manageGuestbooks.tab.action.btn.view.dialog.userCollectedData.title=Donnes sur les utilisateurs recueillies par le registre des visiteurs +dataset.manageGuestbooks.tab.action.btn.view.dialog.userCollectedData=Donnes recueillies +dataset.manageGuestbooks.tab.action.noedit.createdin=Registre des visiteurs cr pour {0} +dataset.manageGuestbooks.message.deleteSuccess=Le registre des visiteurs a t supprim. +dataset.manageGuestbooks.message.deleteFailure=Le registre des visiteurs ne peut pas tre supprim. +dataset.manageGuestbooks.message.editSuccess=Le registre des visiteurs a t mis jour. +dataset.manageGuestbooks.message.editFailure=Le registre des visiteurs ne peut pas tre mis jour. +dataset.manageGuestbooks.message.enableSuccess=Le registre des visiteurs a t activ. +dataset.manageGuestbooks.message.enableFailure=Le registre des visiteurs ne peut pas tre activ. +dataset.manageGuestbooks.message.disableSuccess=Le registre des visiteurs a t dsactiv. +dataset.manageGuestbooks.message.disableFailure=Le registre des visiteurs ne peut pas tre dsactiv. +dataset.guestbooksResponses.dataset=Ensemble de donnes +dataset.guestbooksResponses.date=Date +dataset.guestbooksResponses.type=Type +dataset.guestbooksResponses.file=Fichier + +# guestbook-responses.xhtml= +dataset.guestbookResponses.pageTitle=Consulter les rponses dans le registre des visiteurs + +# guestbook.xhtml= + +dataset.manageGuestbooks.guestbook.name=Nom du registre des visiteurs +dataset.manageGuestbooks.guestbook.name.tip=Indiquer un nom unique pour ce registre des visiteurs. +dataset.manageGuestbooks.guestbook.dataCollected=Donnes recueillies +dataset.manageGuestbooks.guestbook.dataCollected.description=Renseignements sur le compte Dataverse qui seront recueillis lorsqu'un utilisateur tlcharge un fichier. Veuillez cocher les lments qui seront requis. +dataset.manageGuestbooks.guestbook.customQuestions=Questions personnalises +dataset.manageGuestbooks.guestbook.requiredCustomQuestions=Questions personnalises requises +dataset.manageGuestbooks.guestbook.optionalCustomQuestions=Questions personnalises facultatives +dataset.manageGuestbooks.guestbook.requiredAccountInformation=Renseignements sur le compte requis +dataset.manageGuestbooks.guestbook.optionalAccountInformation=Renseignements sur le compte facultatifs +dataset.manageGuestbooks.guestbook.customQuestions.description=Crez vos propres questions afin que les utilisateurs fournissent plus d'information que ce qui se trouve dans leur compte lorsqu'ils tlchargent un fichier. Les questions peuvent tre obligatoires ou facultatives et les rponses peuvent tre en format texte ou choice multiples. +dataset.manageGuestbooks.guestbook.customQuestions.questionType=Type de question +dataset.manageGuestbooks.guestbook.customQuestions.questionText=Texte de la question +dataset.manageGuestbooks.guestbook.customQuestions.responseOptions=choice de rponse +dataset.manageGuestbooks.guestbook.customQuestions.questionType.text=Texte +dataset.manageGuestbooks.guestbook.customQuestions.questionType.multiple=choice multiples + +# guestbookResponseFragment.xhtml= + +dataset.guestbookResponse.guestbook.additionalQuestions=Autres questions +dataset.guestbookResponse.guestbook.responseTooLong=Veuillez limiter votre rponse 255 caractres + +# dataset.xhtml= + +dataset.pageTitle=Ajouter un nouvel ensemble de donnes +dataset.editBtn=Modifier +dataset.editBtn.itemLabel.upload=Fichiers (tlverser) +dataset.editBtn.itemLabel.metadata=Mtadonnes +dataset.editBtn.itemLabel.terms=Conditions d'utilisation +dataset.editBtn.itemLabel.permissions=Permissions +dataset.editBtn.itemLabel.thumbnailsAndWidgets=Vignettes + Widgets +dataset.editBtn.itemLabel.privateUrl=URL priv +dataset.editBtn.itemLabel.permissionsDataset=Ensemble de donnes +dataset.editBtn.itemLabel.permissionsFile=Fichiers accs rserv +dataset.editBtn.itemLabel.deleteDataset=Supprimer l'ensemble de donnes +dataset.editBtn.itemLabel.deleteDraft=Supprimer la version provisoire +dataset.editBtn.itemLabel.deaccession=Retirer l'ensemble de donnes de la diffusion +dataset.exportBtn=Exporter les mtadonnes +dataset.exportBtn.itemLabel.ddi=DDI +dataset.exportBtn.itemLabel.dublinCore=Dublin Core +dataset.exportBtn.itemLabel.json=JSON +metrics.title=Statistiques +metrics.title.tip=Afficher plus d'informations sur les statistiques d'utilisation +metrics.comingsoon=Bientt disponible\u2026 +metrics.views=Pages consultes +metrics.downloads={0, choice, 0#tlchargements|1#tlchargement|2#tlchargements} +metrics.citations=Citations +metrics.shares=Partages +dataset.publish.btn=Publier +dataset.publish.header=Publier l'ensemble de donnes +dataset.rejectBtn=Retourner l'auteur +dataset.submitBtn=Soumettre aux fins d'examen +dataset.disabledSubmittedBtn=Soumis aux fins d'examen +dataset.submitMessage=Soumettre cet ensemble de donnes pour examen par l'intendant des donnes de ce dataverse en vue de publier. +dataset.rejectMessage=Retourner cet ensemble de donnes au collaborateur aux fins de modification. +dataset.publish.tip=tes-vous sr(e) de vouloir publier cet ensemble de donnes? Une fois publi, il doit demeurer publi. +dataset.publishBoth.tip=Une fois que vous publiez cet ensemble de donnes, il doit demeurer publi. +dataset.unregistered.tip=Cet ensemble de donnes n'est pas enregistr. Nous tenterons de l'enregistrer avant de le publier. +dataset.republish.tip=tes-vous sr(e) de vouloir publier nouveau cet ensemble de donnes? +dataset.selectVersionNumber=Indiquer s'il s'agit d'une mise jour mineure ou majeure de la version. +dataset.majorRelease=Version majeure +dataset.minorRelease=Version mineure +dataset.majorRelease.tip=En raison de la nature des modifications apportes la version provisoire actuelle, il s'agira d'une version majeure({0}). +dataset.mayNotBePublished=Impossible de publier l'ensemble de donnes +dataset.mayNotPublish.administrator=Cet ensemble de donnes ne peut tre publi tant que {0} n'est pas publi par son administrateur. +dataset.mayNotPublish.both=Cet ensemble de donnes ne peut tre publi tant que {0} n'est pas publi. Voulez-vous publier les deux immdiatement? +dataset.mayNotPublish.twoGenerations=Cet ensemble de donnes ne peut tre publi tant que {0} et {1} ne sont pas publis. +dataset.mayNotBePublished.both.button=Oui, publier les deux. +dataset.viewVersion.unpublished=Voir la version non publie +dataset.viewVersion.published=Voir la version publie +dataset.email.datasetContactBtn=Envoyer un courriel la personne-ressource pour l'ensemble de donnes +dataset.email.hiddenMessage= +dataset.email.messageSubject=Objet du message +dataset.email.datasetLinkBtn.tip=Lier l'ensemble de donnes votre dataverse +dataset.share.datasetShare=Partager l'ensemble de donnes +dataset.share.datasetShare.tip=Partager cet ensemble de donnes sur vos mdias sociaux prfrs. +dataset.share.datasetShare.shareText=Consulter cet ensemble de donnes +dataset.publish.error=L'ensemble de donnes ne peut tre publi, car le service de {0} est actuellement inaccessible. Veuillez essayer nouveau. Le problme persiste-il? +dataset.publish.error.doi=L'ensemble de donnes ne peut tre retir, car la mise jour DOI a chou. +dataset.delete.error=L'ensemble de donnes ne peut tre publi, car la mise jour {0} a chou. +dataset.publish.worldMap.deleteConfirm=Prenez note que vos donnes et votre carte sur WorldMap seront supprimes en raison de modifications des restrictions d'accs aux fichiers dans cette version de l'ensemble de donnes que vous publiez. Voulez-vous continuer? + +dataset.versionUI.draft=Version provisoire +dataset.versionUI.inReview=En rvision +dataset.versionUI.unpublished=Non publi +dataset.versionUI.deaccessioned=Retir +dataset.cite.title.released=La VERSION PROVISOIRE sera remplace dans la rfrence bibliographique par la V1 une fois l'ensemble de donnes publi. +dataset.cite.title.draft=La VERSION PROVISOIRE sera remplace dans la rfrence bibliographique par la version slectionne une fois l'ensemble de donnes publi. +dataset.cite.title.deassessioned=La mention VERSION RETIRE a t ajoute la rfrence bibliographique pour cette version tant donn qu'elle n'est plus disponible. +dataset.cite.standards.tip=Pour en apprendre sur le sujet, consultez le document Data Citation Standards. +dataset.cite.downloadBtn=Citer l'ensemble de donnes +dataset.cite.downloadBtn.xml=EndNote XML +dataset.cite.downloadBtn.ris=RIS +dataset.cite.downloadBtn.bib=BibTeX +dataset.create.authenticatedUsersOnly=Seuls les utilisateurs authentifis peuvent crer des ensembles de donnes. +dataset.deaccession.reason=Raison du retrait +dataset.beAccessedAt=L'ensemble de donnes peut maintenant tre consult \u00A0: +dataset.descriptionDisplay.title=Description +dataset.keywordDisplay.title=Mot-cl +dataset.subjectDisplay.title=Sujet +dataset.contact.tip=Utiliser le bouton de courriel ci-dessus pour communiquer avec cette personne. +dataset.asterisk.tip=Les astrisques indiquent les champs obligatoires. +dataset.message.uploadFiles=Tlverser les fichiers de l'ensemble de donnes - Vous pouvez glisser-dplacer les fichiers partir de votre ordinateur vers le widget de tlversement. +dataset.message.editMetadata=Modifier les mtadonnes de l'ensemble de donnes\u00A0: ajouter plus de mtadonnes afin de faciliter le reprage de cet ensemble. +dataset.message.editTerms=Modifier les conditions de l'ensemble de donnes\u00A0: mettre jour les conditions d'utilisation de cet ensemble de donnes. +dataset.message.createSuccess=Cet ensemble de donnes a t cr. +dataset.message.linkSuccess=Cet ensemble de donnes est maintenant li {1}. +dataset.message.metadataSuccess=Les mtadonnes pour cet ensemble de donnes ont t mises jour. +dataset.message.termsSuccess=Les conditions pour cet ensemble de donnes ont t mises jour. +dataset.message.filesSuccess=Les fichiers pour cet ensemble de donnes ont t mis jour. +dataset.message.publishSuccess=Cet ensemble de donnes a t publi. +dataset.message.only.authenticatedUsers=Seuls les utilisateurs authentifis peuvent publier des ensembles de donnes. +dataset.message.deleteSuccess=Cet ensemble de donnes a t supprim. +dataset.message.bulkFileUpdateSuccess=Les fichiers slectionns ont t mis jour. +datasetVersion.message.deleteSuccess=La version provisoire de cet ensemble de donnes a t supprime. +datasetVersion.message.deaccessionSuccess=La ou les versions slectionnes ont t retires. +dataset.message.deaccessionSuccess=Cet ensemble de donnes a t retir. +dataset.message.files.ingestSuccess=Le(s) fichier(s) a(ont) bien t charg(s). Vous pouvez maintenant les consulter l'aide de TwoRavens ou les tlcharger en d'autres formats. +dataset.message.validationError=Erreur de validation - Les champs obligatoires ont t omis ou il y a eu une erreur de validation. Veuillez dfiler le menu vers le bas pour voir les dtails. +dataset.message.publishFailure=L'ensemble de donnes n'a pas pu tre publi. +dataset.message.metadataFailure=Les mtadonnes n'ont pas pu tre mises jour. +dataset.message.filesFailure=Les fichiers n'ont pas pu tre tlchargs. +dataset.message.files.ingestFailure=Le ou les fichiers n'ont pas pu tre chargs. +dataset.message.deleteFailure=La version provisoire de cet ensemble de donnes n'a pas pu tre supprime. +dataset.message.deaccessionFailure=Cet ensemble de donnes n'a pas pu tre retir. +dataset.message.createFailure=L'ensemble de donnes n'a pas pu tre cr. +dataset.message.termsFailure=Les conditions de cet ensemble de donnes n'ont pas pu tre mises jour. +dataset.metadata.publicationDate=Date de publication +dataset.metadata.publicationDate.tip=La date de publication d'un ensemble de donnes. +dataset.metadata.persistentId=Identifiant prenne de l'ensemble de donnes +dataset.metadata.persistentId.tip=L'identifiant prenne unique d'un ensemble de donnes, qui peut tre un Handle ou un DOI dans Dataverse. +dataset.versionDifferences.termsOfUseAccess=Conditions d'utilisation et d'accs +dataset.versionDifferences.termsOfUseAccessChanged=Conditions d'utilisation et d'accs modifies +file.viewDiffDialog.restricted=Accs rserv + +dataset.template.tip=Modifier le modle effacera tous les champs dans lesquels vous auriez entr des donnes. +dataset.noTemplate.label=Aucun + +dataset.noSelectedFiles.header=Slectionner un ou des fichiers +dataset.noSelectedFilesForDownload=Veuillez slectionner le ou les fichiers tlcharger. +dataset.noSelectedFilesForRequestAccess=Veuillez slectionner le ou les fichiers pour la demande d'accs. +dataset.noSelectedFilesForDelete=Veuillez slectionner le ou les fichiers supprimer. +dataset.noSelectedFilesForMetadataEdit=Veuillez slectionner le ou les fichiers modifier. +dataset.noSelectedFilesForRestrict=Veuillez slectionner le ou les fichiers non rservs mettre en accs rserv. +dataset.noSelectedFilesForUnRestrict=Veuillez slectionner le ou les fichiers rservs mettre en accs non rserv. +dataset.inValidSelectedFilesForDownload=Fichiers rservs slectionns +dataset.noValidSelectedFilesForDownload=Le ou les fichiers rservs slectionns ne peuvent tre tlchargs, car les accs ne vous ont pas t accords. +dataset.mixedSelectedFilesForDownload=Le ou les fichiers rservs slectionns ne peuvent tre tlchargs, car les accs ne vous ont pas t accords. +dataset.downloadUnrestricted=Cliquez sur Continuer pour tlcharger les fichiers pour lesquels vous avez un accs. +dataset.requestAccessToRestrictedFiles=Vous pouvez demander l'accs un ou des fichiers rservs en cliquant sur le bouton Demander l'accs . + +dataset.privateurl.infoMessageAuthor=URL priv de l'ensemble de donnes non publi - Partager en priv cet ensemble de donnes avant sa publication\u00A0: {0} +dataset.privateurl.infoMessageReviewer=URL priv de l'ensemble de donnes non publi - Cet ensemble de donnes non publi est partag en priv. Vous ne pourrez pas y accder lorsque connect votre compte Dataverse. +dataset.privateurl.header=URL prive de l'ensemble de donnes non publi +dataset.privateurl.tip=Utilisez une adresse URL prive pour permettre ceux qui n'ont pas de compte Dataverse d'accder votre ensemble de donnes non publi. Pour plus d'informations sur la fonctionnalit d'URL priv, reportez-vous au guide d'utilisation. +dataset.privateurl.absent=L'adresse URL prive n'a pas t cre. +dataset.privateurl.createPrivateUrl=Crer une adresse URL prive +dataset.privateurl.disablePrivateUrl=Dsactiver l'URL priv +dataset.privateurl.disablePrivateUrlConfirm=Confirmer la dsactivation de l'URL priv +dataset.privateurl.disableConfirmationText=Voulez-vous vraiment dsactiver l'URL priv? Si vous avez partag l'URL priv avec d'autres utilisateurs, ceux-ci ne pourront plus l'utiliser pour accder votre ensemble de donnes non publi. +dataset.privateurl.cannotCreate=L'URL priv ne peut tre utilis qu'avec des versions non publies d'ensembles de donnes. +dataset.privateurl.roleassigeeTitle=URL priv activ +dataset.privateurl.createdSuccess=Opration russie! +dataset.privateurl.disabledSuccess=Vous avez bien dsactiv l'URL priv de cet ensemble de donnes non publi. +dataset.privateurl.noPermToCreate=Pour crer une adresse URL priv, vous devez disposer des autorisations suivantes\u00A0: {0}. +file.count={0} {0, choice, 0#Fichiers|1#Fichiers|2#Fichiers} +file.count.selected={0} {0, choice, 0#Fichiers slectionns|1#Fichier slectionn|2#Fichiers slectionns} +file.selectToAddBtn=Slectionner les fichiers ajouter +file.selectToAdd.tipLimit=La limite de tlversement est de {0} octets par fichier. +file.selectToAdd.tipMoreInformation=Pour plus d'informations sur les formats de fichiers pris en charge, reportez-vous au guide d'utilisation. +file.fromDropbox=Tlverser partir de Dropbox +file.fromDropbox.tip=Les fichiers peuvent aussi tre tlverser directement de Dropbox. +file.fromDropbox.description=Glisser et dposer les fichiers ici. +file.replace.original=Original File + +file.editFiles=Modifier les fichiers +file.bulkUpdate=Chargement en lot +file.uploadFiles=Tlverser des fichiers +file.replaceFile=Remplacer le fichier +file.notFound.tip=Cet ensemble de donnes ne contient aucun fichier. +file.noSelectedFiles.tip=Aucun fichier n'a t slectionn pour affichage. +file.noUploadedFiles.tip=Les fichiers tlverss paratront ici. +file.replace=Remplacer +file.replaced.warning.header=Modifier le fichier +file.replaced.warning.draft.warningMessage=Vous ne pouvez pas remplacer un fichier qui a t remplac dans un ensemble de donnes provisoire. Pour le remplacer par un fichier diffrent, vous devez d'abord suppprimer l'ensemble de donnes provisoire. Notez que ce faisant, toute autre modification apporte cet ensemble provisoire sera annule. +file.replaced.warning.previous.warningMessage=Vous ne pouvez pas diter un fichier qui a t remplac dans une version prcdente d'un ensemble de donnes. Pour pouvoir l'diter, vous devez accder la dernire version publie du fichier. +file.alreadyDeleted.previous.warningMessage=Ce fichier a dj t supprim dans la version actuelle. Il peut ne pas tre modifi. +file.delete=Supprimer +file.metadata=Mtadonnes +file.deleted.success=En cliquant sur \u00A0Enregistrer les modifications\u00A0, les fichiers \u201C{0}\u201D seront supprims de faon permanente de cette version de l'ensemble de donnes. +file.deleted.replacement.success=Le fichier de remplacement a t supprim. +file.editAccess=Modifier les accs +file.restrict=Restreindre +file.unrestrict=Sans restriction +file.restricted.success=L'accs aux fichiers \u201C{0}\u201D sera restreint du moment o vous cliquerez sur le bouton \u00A0Enregistrer les modifications\u00A0 au bas de la page. +file.download.header=Tlcharger +file.preview=Aperu\u00A0: +file.previewMap=Aperu de la carte\u00A0: +file.fileName=Nom du fichier +file.type.tabularData=Donnes tabulaires +file.originalChecksumType=Fichier original {0} +file.checksum.exists.tip=Un fichier avec cette somme de contrle existe dj dans l'ensemble de donnes. +file.selectedThumbnail=Vignette +file.selectedThumbnail.tip=La vignette associe au fichier est utilise comme vignette par dfaut pour l'ensemble de donnes. Cliquez sur le bouton \u00A0Options avances\u00A0 d'un autre fichier pour slectionner ce fichier. +file.cloudStorageAccess=Accs au stockage infonuagique +file.cloudStorageAccess.tip=Le nom du conteneur pour cet ensemble de donnes doit accder aux fichiers dans le stockage infonuagique. +file.cloudStorageAccess.help=Pour accder directement ces donnes dans l'environnement infonuagique {2}, utilisez le nom du conteneur dans la case d'accs au stockage infonuagique ci-dessous. Pour en savoir plus sur l'environnement infonuagique, consultez la section Accs au stockage infonuagique du Guide d'utilisation. +file.copy=Copier +file.compute=Calculer + +file.metaData.dataFile.dataTab.variables=Variables, +file.metaData.dataFile.dataTab.observations=Observations +file.metaData.viewOnWorldMap=Explorer sur WorldMap +file.addDescription=Ajouter une description du fichier\u2026 +file.tags=Libells +file.editTags=Libells +file.editTagsDialog.tip=Slectionner les libells de fichier existants ou crer de nouveaux libells pour dcrire vos fichiers. Chaque fichier peut avoir plus d'un libell. +file.editTagsDialog.select=Libells de fichier +file.editTagsDialog.selectedTags=Libells slectionns +file.editTagsDialog.selectedTags.none=Aucun libell slectionn +file.editTagsDialog.add=Personnaliser le libell du fichier +file.editTagsDialog.add.tip=Crer un nouveau libell l'ajoutera comme option de libell pour tous les fichiers de cet ensemble de donnes. +file.editTagsDialog.newName=Ajouter un nouveau libell de fichier\u2026 +dataset.removeUnusedFileTags.label=Supprimer les libells +dataset.removeUnusedFileTags.tip=Slectionner pour supprimer les libells personnaliss non utiliss par les fichiers de l'ensemble de donnes. +dataset.removeUnusedFileTags.check=Supprimer les libells non utiliss + +file.setThumbnail=Slectionner la vignette +file.setThumbnail.header=Slectionner la vignette de l'ensemble de donnes +file.datasetThumbnail=Vignette de l'ensemble de donnes +file.datasetThumbnail.tip=Slectionner cette option_pour slectionner cette image en tant que vignette affiche dans la page des rsultats de recherche pour cet ensemble de donnes. +file.setThumbnail.confirmation=tes-vous certain de vouloir choisir cette image comme vignette pour votre ensemble de donnes? Il y a dj une image tlverse en tant que vignette et cette action l'enlvera. +file.useThisIamge=Utiliser cette image comme vignette pour l'ensemble de donnes. +file.advancedOptions=Options avances +file.advancedIngestOptions=Options de chargement avances +file.assignedDataverseImage.success={0} a t sauvegarde comme vignette pour cet ensemble de donnes. +file.assignedTabFileTags.success=Les libells ont bien t ajouts pour {0}. +file.tabularDataTags=Libells des donnes tabulaires +file.tabularDataTags.tip=Slectionner un ou plusieurs libells dcrivant le type de fichier de donnes. +file.spss-savEncoding=Encodage linguistique +file.spss-savEncoding.title=Slectionner la langue utilise pour encoder ce fichier de donnes SPSS (sav). +file.spss-savEncoding.current=Slection actuelle\u00A0: +file.spss-porExtraLabels=Libells de variable +file.spss-porExtraLabels.title=Tlverser un fichier texte supplmentaire contenant des libells de variable supplmentaires. +file.spss-porExtraLabels.selectToAddBtn=Slectionner le fichier ajouter +file.ingestFailed=Le chargement des donnes tabulaires a chou. +file.explore.twoRavens=TwoRavens +file.map=Carte +file.mapData=Golocaliser les donnes +file.mapData.worldMap=WorldMap +file.mapData.unpublished.header=Donnes non publies +file.mapData.unpublished.message=Pour golocaliser vos donnes avec WorldMap, vos donnes doivent tre publies. Veuillez publier cet ensemble de donnes et essayer nouveau. +file.downloadBtn.format.all=Tous les formats de fichier + renseignements +file.downloadBtn.format.tab=Spar par des tabulateurs +file.downloadBtn.format.original=Format du fichier original ({0}) +file.downloadBtn.format.rdata=Format RData +file.downloadBtn.format.var=Mtadonnes des variables +file.downloadBtn.format.citation=Rfrence bibliographique du fichier de donnes +file.more.information.link=Mettre un lien vers plus d'information sur le fichier\u00A0: + +file.requestAccess=Demander l'accs +file.requestAccess.dialog.msg=Vous devez vous authentifier pour demander un accs ce fichier. +file.requestAccess.dialog.msg.signup=Vous devez vous crer un compte ou vous authentifier pour demander un accs ce fichier. +file.accessRequested=Accs demand +file.restrictions=Restrictions d'accs aux fichiers +file.restrictions.description=Limiter l'accs aux fichiers publis en les indiquant comme tant restreints. Fournir aux utilisateurs les Conditions d'accs et leur permettre de demander l'accs. +file.restrictions.worldmap.warning=Veuillez noter que, une fois vos modifications d'accs au fichier publies, votre carte sur WorldMap sera supprime et la fonction Explorer sur WorldMap sera retire. + +file.ingestInProgress=Chargement en cours\u2026 + +file.dataFilesTab.metadata.header=Mtadonnes +file.dataFilesTab.metadata.addBtn=Ajouter + Modifier les mtadonnes +file.dataFilesTab.terms.header=Conditions +file.dataFilesTab.terms.editTermsBtn=Modifier les conditions +file.dataFilesTab.terms.list.termsOfUse.header=Conditions d'utilisation +file.dataFilesTab.terms.list.termsOfUse.waiver=Licence accorde +file.dataFilesTab.terms.list.termsOfUse.waiver.title=La licence permet d'informer les utilisateurs de ce qui leur est permis de faire avec ces donnes tlcharges. +file.dataFilesTab.terms.list.termsOfUse.waiver.txt=licence CC0 - \u201CTransfert dans le domaine public\u201D +file.dataFilesTab.terms.list.termsOfUse.waiver.description=Les ensembles de donnes se verront attribuer par dfaut une licence CC0 - Transfert dans le domaine public. CC0 facilite la rutilisation des donnes de recherche. Les normes de la communaut Dataverse de mme que les bonnes pratiques scientifiques exigent que toute source utilise soit cite correctement. Si vous ne pouvez accorder une licence CC0, vous pouvez tablir des conditions d'utilisation personnalises pour vos ensembles de donnes. +file.dataFilesTab.terms.list.termsOfUse.no.waiver.txt=Aucune licence n'a t slectionne pour cet ensemble de donnes. +file.dataFilesTab.terms.list.termsOfUse.waiver.txt.description=Les normes de la communaut Dataverse de mme que les bonnes pratiques scientifiques exigent que toute source utilise soit cite correctement. Veuillez utiliser la rfrence bibliographique ci-dessus gnre par Dataverse. +file.dataFilesTab.terms.list.termsOfUse.waiver.select.CCO=Oui, appliquer la licence CC0 - \u00A0Transfert dans le domaine public\u00A0. +file.dataFilesTab.terms.list.termsOfUse.waiver.select.notCCO=Non, ne pas appliquer la licence CC0 - \u00A0Transfert dans le domaine public\u00A0. +file.dataFilesTab.terms.list.termsOfUse.waiver.select.tip=Voici ce que les utilisateurs finaux verront affich pour cet ensemble de donnes. +file.dataFilesTab.terms.list.termsOfUse.termsOfUse=Conditions d'utilisation +file.dataFilesTab.terms.list.termsOfUse.termsOfUse.title=Indique la faon dont ces donnes peuvent tre utilises une fois tlcharges. +file.dataFilesTab.terms.list.termsOfUse.termsOfUse.description=Si vous n'tes pas en mesure d'utiliser la licence CC0 pour les ensembles de donnes, vous pouvez tablir des conditions d'utilisation personnalises. Voici un exemple d'une Licence de donnes pour des ensembles de donnes qui comportent des donnes anonymises de sujets humains. +file.dataFilesTab.terms.list.termsOfUse.addInfo=Renseignements supplmentaires +file.dataFilesTab.terms.list.termsOfUse.addInfo.declaration=Dclaration de confidentialit +file.dataFilesTab.terms.list.termsOfUse.addInfo.declaration.title=Indique s'il faut signer une dclaration de confidentialit pour avoir accs une ressource. +file.dataFilesTab.terms.list.termsOfUse.addInfo.permissions=Permissions spciales +file.dataFilesTab.terms.list.termsOfUse.addInfo.permissions.title=Dterminer si des permissions spciales sont requises pour avoir accs une ressource (p.\u00A0ex. si un formulaire est ncessaire et o obtenir le formulaire). +file.dataFilesTab.terms.list.termsOfUse.addInfo.restrictions=Restrictions +file.dataFilesTab.terms.list.termsOfUse.addInfo.restrictions.title=Toute restriction s'appliquant l'accs l'ensemble de donnes et son utilisation, comme la certification relative la vie prive ou les restrictions concernant la diffusion, doit tre indique cet endroit. Il peut s'agir de restrictions tablies selon l'auteur, le producteur ou le diffuseur des donnes. Si l'accs aux donnes est limit une certaine catgorie d'utilisateurs, veuillez le prciser. +file.dataFilesTab.terms.list.termsOfUse.addInfo.citationRequirements=Exigences de citation +file.dataFilesTab.terms.list.termsOfUse.addInfo.citationRequirements.title=lments spciaux requis pour citer adquatement ces donnes dans les articles ou autres publications qui s'appuient sur ces donnes. Pour les exigences standards de citation, se reporter aux Normes de la communaut Dataverse. +file.dataFilesTab.terms.list.termsOfUse.addInfo.depositorRequirements=Exigences du dposant +file.dataFilesTab.terms.list.termsOfUse.addInfo.depositorRequirements.title=Renseignements concernant la responsabilit de l'utilisateur d'informer les dposants, les auteurs ou les intendants des donnes de l'utilisation faite des donnes en leur fournissant les rfrences aux travaux publis ou les copies des manuscrits. +file.dataFilesTab.terms.list.termsOfUse.addInfo.conditions=Conditions +file.dataFilesTab.terms.list.termsOfUse.addInfo.conditions.title=Tout renseignement supplmentaire qui aidera l'utilisateur comprendre les conditions d'accs et d'utilisation de l'ensemble de donnes. +file.dataFilesTab.terms.list.termsOfUse.addInfo.disclaimer=Avis de non-responsabilit +file.dataFilesTab.terms.list.termsOfUse.addInfo.disclaimer.title=Renseignements concernant les responsabilits lies l'utilisation de l'ensemble de donnes. + +file.dataFilesTab.terms.list.termsOfAccess.header=Fichiers en accs rserv + Conditions d'accs +file.dataFilesTab.terms.list.termsOfAccess.restrictedFiles=Fichiers en accs rserv +file.dataFilesTab.terms.list.termsOfAccess.restrictedFiles.title=Nombre de fichiers en accs rserv dans cet ensemble de donnes +file.dataFilesTab.terms.list.termsOfAccess.restrictedFiles.txt=Il y a {0} {0, choice, 0#fichiers|1#fichier|2#fichiers} en accs rserv dans cet ensemble de donnes. +file.dataFilesTab.terms.list.termsOfAccess.termsOfsAccess=Conditions d'accs +file.dataFilesTab.terms.list.termsOfAccess.termsOfsAccess.title=Renseignements sur la faon dont les utilisateurs peuvent avoir accs aux fichiers en accs rserv de cet ensemble de donnes. +file.dataFilesTab.terms.list.termsOfAccess.requestAccess=Demander l'accs +file.dataFilesTab.terms.list.termsOfAccess.requestAccess.title=Si la case est slectionne, les utilisateurs peuvent demander l'accs aux fichiers en accs rserv de cet ensemble de donnes. +file.dataFilesTab.terms.list.termsOfAccess.requestAccess.request=Les utilisateurs peuvent demander l'accs aux fichiers. +file.dataFilesTab.terms.list.termsOfAccess.requestAccess.notRequest=Les utilisateurs ne peuvent pas demander l'accs aux fichiers. +file.dataFilesTab.terms.list.termsOfAccess.requestAccess.enableBtn=Autoriser la demande d'accs + +file.dataFilesTab.terms.list.termsOfAccess.addInfo.dataAccessPlace=Emplacement des donnes +file.dataFilesTab.terms.list.termsOfAccess.addInfo.dataAccessPlace.title=Si les donnes ne se trouvent pas uniquement dans Dataverse, indiquer le ou les emplacements o les donnes sont actuellement conserves. +file.dataFilesTab.terms.list.termsOfAccess.addInfo.originalArchive=Dpt original +file.dataFilesTab.terms.list.termsOfAccess.addInfo.originalArchive.title=Dpt duquel les donnes ont t obtenues. +file.dataFilesTab.terms.list.termsOfAccess.addInfo.availabilityStatus=tat de disponibilit +file.dataFilesTab.terms.list.termsOfAccess.addInfo.availabilityStatus.title=nonc concernant la disponibilit de l'ensemble de donnes. Un dposant pourrait devor indiquer qu'un ensemble de donnes n'est pas disponible parce que faisant l'objet d'un embargo temporaire, parce qu'il a t remplac, parce qu'une nouvelle dition est imminente, etc. +file.dataFilesTab.terms.list.termsOfAccess.addInfo.contactForAccess=Personne-ressource pour les demandes d'accs +file.dataFilesTab.terms.list.termsOfAccess.addInfo.contactForAccess.title=Si les coordonnes diffrent des coordonnes pour l'ensemble de donnes, il s'agit de la personne-ressource ou de l'organisation (indiquer l'adresse courriel ou l'adresse complte et le numro de tlphone, si disponibles) qui contrle l'accs une collection de donnes. +file.dataFilesTab.terms.list.termsOfAccess.addInfo.sizeOfCollection=Taille de la collection de donnes +file.dataFilesTab.terms.list.termsOfAccess.addInfo.sizeOfCollection.tip=Sommaire du nombre de fichiers dans l'ensemble de donnes, du nombre de fichiers contenant des donnes; informations sur la disponibilit de documentation lisible par machine dans la collection et d'autres fichiers, comme manuels de codes, des dictionnaires de donnes, des noncs sur la dfinition des donnes ou des instruments pour la collecte de donnes. +file.dataFilesTab.terms.list.termsOfAccess.addInfo.studyCompletion=lments achevs de l'tude +file.dataFilesTab.terms.list.termsOfAccess.addInfo.studyCompletion.title=Lien entre les donnes recueillies et la quantit de donnes codes et enregistres dans l'ensemble de donnes. De l'information devrait tre fournie ici sur les raisons pour lesquelles certaine donnes recueillies, ou un fichier de donnes prcis, n'ont pas t inclus dans l'ensemble de donnes. + +file.dataFilesTab.terms.list.guestbook=Registre des visiteurs +file.dataFilesTab.terms.list.guestbook.title=Des renseignements sur l'utilisateur (c.--d. le nom, l'adresse courriel, l'tablissement et le poste) seront recueillis lors du tlchargement des fichiers. +file.dataFilesTab.terms.list.guestbook.noSelected.tip=Aucun registre des visiteurs n'est associ cet ensemble de donnes donc aucun renseignement ne vous sera demand concernant le tlchargement des fichiers. +file.dataFilesTab.terms.list.guestbook.noSelected.admin.tip=Aucun registre des visiteurs n'est disponible dans le {0} pour tre assign cet ensemble de donnes. +file.dataFilesTab.terms.list.guestbook.inUse.tip=Le registre des visiteurs suivant demandera un utilisateur de fournir des renseignements supplmentaires au moment du tlchargement d'un fichier. +file.dataFilesTab.terms.list.guestbook.viewBtn=Prvisualiser le registre des visiteurs +file.dataFilesTab.terms.list.guestbook.select.tip=Slectionner un registre des visiteurs afin qu'un utilisateur fournisse des renseignements supplmentaires lorsqu'il tlcharge un fichier. +file.dataFilesTab.terms.list.guestbook.noAvailable.tip=Aucun registre des visiteurs n'est activ dans le {0}. Pour crer un registre des visiteurs, retourner dans le {0}, cliquer sur le bouton \u00A0Modifier\u00A0 et slectionner \u00A0Registres de visiteurs pour l'ensemble de donnes\u00A0. +file.dataFilesTab.terms.list.guestbook.clearBtn=Effacer la slection + +file.dataFilesTab.versions=Versions +file.dataFilesTab.versions.headers.dataset=Ensemble de donnes +file.dataFilesTab.versions.headers.summary=Rsum +file.dataFilesTab.versions.headers.contributors=Contributeurs +file.dataFilesTab.versions.headers.published=Publi +file.dataFilesTab.versions.viewDiffBtn=Voir les diffrences +file.dataFilesTab.versions.citationMetadata=Mtadonnes bibliographiques\u00A0: +file.dataFilesTab.versions.added=Ajout +file.dataFilesTab.versions.removed=Supprim +file.dataFilesTab.versions.changed=Modifi +file.dataFilesTab.versions.replaced=Remplac +file.dataFilesTab.versions.original=Original +file.dataFilesTab.versions.replacment=Remplacement + +file.dataFilesTab.versions.additionalCitationMetadata=Mtadonnes bibliographiques additionnelles\u00A0: +file.dataFilesTab.versions.description.draft=Il s'agit d'une version provisoire. +file.dataFilesTab.versions.description.deaccessioned=tant donn que la version prcdente a t retire de la diffusion, aucune note sur les diffrences n'est disponible pour cette version publie. +file.dataFilesTab.versions.description.firstPublished=Il s'agit de la premire version publie. +file.dataFilesTab.versions.description.deaccessionedReason=Raison du retrait\u00A0: +file.dataFilesTab.versions.description.beAccessedAt=L'ensemble de donnes peut maintenant tre consult \u00A0: +file.dataFilesTab.versions.viewDetails.btn=Voir les renseignements +file.dataFilesTab.versions.widget.viewMoreInfo=Pour afficher plus d'informations sur les versions de cet ensemble de donnes et pour le modifier s'il s'agit de votre ensemble de donnes, consultez la version complte de cet ensemble {2}. +file.deleteDialog.tip=tes-vous sr(e) de vouloir supprimer cet ensemble de donnes? Vous ne pourrez pas annuler la suppression. +file.deleteDialog.header=Supprimer l'ensemble de donnes +file.deleteDraftDialog.tip=tes-vous sr(e) de vouloir supprimer cette version provisoire? Vous ne pourrez pas annuler la suppression de cette version. +file.deleteDraftDialog.header=Supprimer la version provisoire +file.deleteFileDialog.tip=Le ou les fichiers seront supprims ds que vous cliquerez sur le bouton \u00A0Enregistrer les modifications\u00A0 au bas de cette page. +file.deleteFileDialog.immediate=Le fichier sera supprim une fois que vous aurez cliqu sur le bouton Supprimer . +file.deleteFileDialog.multiple.immediate=Le ou les fichiers seront supprims lorsque vous aurez cliqu sur le bouton Supprimer . +file.deleteFileDialog.header=Supprimer les fichiers +file.deleteFileDialog.failed.tip=Les fichiers ne seront pas supprims des versions de l'ensemble de donnes publies prcdemment. +file.deaccessionDialog.tip=Si vous retirez de la diffusion un ensemble de donnes, le public ne pourra plus le consulter. +file.deaccessionDialog.version=Version +file.deaccessionDialog.reason.question1=Quelles versions dsirez-vous retirer de la diffusion? +file.deaccessionDialog.reason.question2=Quel est la raison du retrait? +file.deaccessionDialog.reason.selectItem.identifiable=Il y a des donnes permettant l'identification de personnes dans un ou plusieurs fichiers. +file.deaccessionDialog.reason.selectItem.beRetracted=L'article de recherche a t retir. +file.deaccessionDialog.reason.selectItem.beTransferred=L'ensemble de donnes a t transfr dans un autre dpt. +file.deaccessionDialog.reason.selectItem.IRB=Demande du Comit d'thique de la recherche +file.deaccessionDialog.reason.selectItem.legalIssue=Convention d'utilisation des donnes et question de droit +file.deaccessionDialog.reason.selectItem.notValid=Ensemble de donnes non valide +file.deaccessionDialog.reason.selectItem.other=Autre (Veuillez indiquer la raison dans l'espace fourni ci-dessous.) +file.deaccessionDialog.enterInfo=Veuillez entrer des renseignements supplmentaires sur la raison du retrait. +file.deaccessionDialog.leaveURL=S'il y a lieu, veuillez indiquer une adresse URL o cet ensemble de donnes peut tre consult aprs son retrait. +file.deaccessionDialog.leaveURL.watermark=Site facultatif pour l'ensemble de donnes, http://\u2026 +file.deaccessionDialog.deaccession.tip=tes-vous sr(e) de vouloir procder au retrait? La ou les versions slectionnes ne pourront plus tre consultes par le public. +file.deaccessionDialog.deaccessionDataset.tip=tes-vous sr(e) de vouloir retir cet ensemble de donnes? Le public ne pourra plus le consulter. +file.deaccessionDialog.dialog.selectVersion.tip=Veuillez slectionner la ou les versions retirer de la diffusion. +file.deaccessionDialog.dialog.selectVersion.header=Veuillez slectionner la ou les versions. +file.deaccessionDialog.dialog.reason.tip=Veuillez slectionner la raison du retrait. +file.deaccessionDialog.dialog.reason.header=Veuillez slectionner la raison. +file.deaccessionDialog.dialog.url.tip=Veuillez indiquer une adresse URL de redirection valide. +file.deaccessionDialog.dialog.url.header=Adresse URL non valide +file.deaccessionDialog.dialog.textForReason.tip=Veuillez indiquer la raison du retrait. +file.deaccessionDialog.dialog.textForReason.header=Entrer des renseignements supplmentaires +file.deaccessionDialog.dialog.limitChar.tip=Le texte pour indiquer la raison du retrait ne peut dpasser 1000 caractres. +file.deaccessionDialog.dialog.limitChar.header=Limite de 1000 caractres +file.viewDiffDialog.header=Renseignements sur les diffrences de version +file.viewDiffDialog.dialog.warning=Veuillez slectionner deux versions pour voir les diffrences. +file.viewDiffDialog.version=Version +file.viewDiffDialog.lastUpdated=Dernire mise jour +file.viewDiffDialog.fileID=Identifiant du fichier +file.viewDiffDialog.fileName=Nom +file.viewDiffDialog.fileType=Type +file.viewDiffDialog.fileSize=Taille +file.viewDiffDialog.category=Libells +file.viewDiffDialog.description=Description +file.viewDiffDialog.fileReplaced=Fichier remplac +file.viewDiffDialog.filesReplaced=Fichier(s) remplac(s) +file.viewDiffDialog.files.header=Fichiers +file.viewDiffDialog.msg.draftFound= Ceci est la version provisoire. +file.viewDiffDialog.msg.draftNotFound=La version provisoire n'a pas t trouve. +file.viewDiffDialog.msg.versionFound= Ceci est la version "{0}". +file.viewDiffDialog.msg.versionNotFound=La version "{0}" n'a pas t trouve. +file.metadataTip=Conseil sur les mtadonnes\u00A0: aprs avoir ajout l'ensemble de donnes, cliquer sur le bouton \u00A0Modifier l'ensemble de donnes\u00A0 pour ajouter plus de mtadonnes. +file.addBtn=Sauvegarder l'ensemble de donnes +file.dataset.allFiles=Tous les fichiers de cet ensemble de donnes + +file.downloadDialog.header=Tlcharger le fichier +file.downloadDialog.tip=Veuillez confirmer ou remplir l'information requise ci-dessous afin de tlcharger les fichiers de cet ensemble de donnes. +file.downloadDialog.termsTip=J'accepte ces conditions d'utilisation. + +file.search.placeholder=Chercher dans cet ensemble de donnes\u2026 +file.results.btn.sort=Trier +file.results.btn.sort.option.nameAZ=Nom (A-Z) +file.results.btn.sort.option.nameZA=Nom (A-Z) +file.results.btn.sort.option.newest=Plus rcent +file.results.btn.sort.option.oldest=Plus ancien +file.results.btn.sort.option.size=Taille +file.results.btn.sort.option.type=Catgorie + +# dataset-widgets.xhtml= +dataset.widgets.title=Vignette de l'ensemble de donnes + Widgets +dataset.widgets.notPublished.why.header=Pourquoi faire appel aux widgets? +dataset.widgets.notPublished.why.reason1=Augmente la visibilit de vos donnes en vous permettant d'intgrer votre dataverse et les ensembles de donnes dans votre site web personnel ou de projet. +dataset.widgets.notPublished.why.reason2=Permet aux autres de parcourir votre dataverse ainsi que vos ensembles de donnes sans quitter votre site personnel ou de projet. +dataset.widgets.notPublished.how.header=Comment utiliser les widgets +dataset.widgets.notPublished.how.tip1=Pour pouvoir utiliser des widgets, votre dataverse et vos ensembles de donnes doivent tre publis. +dataset.widgets.notPublished.how.tip2=Suite la publication, le code sera disponible sur cette page pour que vous puissiez le copier et l'ajouter votre site web personnel ou de projet. +dataset.widgets.notPublished.how.tip3=Avez-vous un site web OpenScholar? Si oui, apprenez-en davantage sur l'ajout de widgets Dataverse dans votre site web ici. +dataset.widgets.notPublished.getStarted=Pour dbuter, publiez votre dataverse. Pour en savoir davantage sur les widgets, consultez la section thme et widgets du guide d'utilisation. +dataset.widgets.editAdvanced=Modifier les options avances +dataset.widgets.editAdvanced.tip=Options avances – Options supplmentaires pour configurer votre widget sur votre site personnel ou de projet. +dataset.widgets.tip=Copiez et collez ce code dans le code HTML de votre site web. Pour en savoir davantage sur les widgets, consultez la section Thme et widgets du guide d'utilisation. +dataset.widgets.citation.txt=Citation de l'ensemble de donnes +dataset.widgets.citation.tip=Ajoutez la rfrence de votre ensemble de donnes votre site personnel ou de projet. +dataset.widgets.datasetFull.txt=Ensemble de donnes +dataset.widgets.datasetFull.tip=Permet aux visiteurs de votre site web d'tre en mesure d'afficher vos jeux de donnes, de tlcharger des fichiers, etc. +dataset.widgets.advanced.popup.header=Widgets\u00A0: Options avances +dataset.widgets.advanced.prompt=Expdier vers votre site web personnel l'URL prenne de la rfrence bibliographique de l'ensemble de donnes. +dataset.widgets.advanced.url.label=URL de votre site web personnel +dataset.widgets.advanced.url.watermark=http://www.exemple.com/nom-de-la-page +dataset.widgets.advanced.invalid.message=Veuillez saisir un URL valide +dataset.widgets.advanced.success.message=Mise jour russie de l'URL de votre site web personnel +dataset.widgets.advanced.failure.message=L'URL du site web personnel n'a pas t mis jour dans dataverse. +dataset.thumbnailsAndWidget.breadcrumbs.title=Vignette + Widgets +dataset.thumbnailsAndWidget.thumbnails.title=Vignette +dataset.thumbnailsAndWidget.widgets.title=Widgets +dataset.thumbnailsAndWidget.thumbnailImage=Image de la vignette +dataset.thumbnailsAndWidget.thumbnailImage.title=Le logo ou le fichier d'image que vous souhaitez voir afficher comme vignette pour cet ensemble de donnes. +dataset.thumbnailsAndWidget.thumbnailImage.tip=Les types d'images prises en charge sont JPG, TIF ou PNG et ne doivent pas tre suprieurs {0} Ko. La taille d'affichage maximale pour un fichier image en tant que vignette d'un ensemble de donnes est de 48 pixels de largeur par 48 pixels de haut. +dataset.thumbnailsAndWidget.thumbnailImage.default=Vignette par dfaut +dataset.thumbnailsAndWidget.thumbnailImage.selectAvailable=Slectionnez le fichier disponible +dataset.thumbnailsAndWidget.thumbnailImage.selectThumbnail=Slectionnez la vignette +dataset.thumbnailsAndWidget.thumbnailImage.selectAvailable.title=Slectionnez une vignette parmi les fichiers de donnes d'image disponibles provenant de votre ensemble de donnes. +dataset.thumbnailsAndWidget.thumbnailImage.uploadNew=Tlverser un nouveau fichier +dataset.thumbnailsAndWidget.thumbnailImage.uploadNew.title=Tlverser un fichier image en tant que vignette de votre ensemble de donnes, qui sera stock sparment des fichiers de donnes appartenant votre ensemble de donnes +dataset.thumbnailsAndWidget.thumbnailImage.upload=Tlverser une image +dataset.thumbnailsAndWidget.thumbnailImage.upload.invalidMsg=L'image n'a pas pu tre tlverse. Veuillez ressayer avec un fichier JPG, TIF ou PNG. +dataset.thumbnailsAndWidget.success=Vignette de l'ensemble de donnes mise jour. +dataset.thumbnailsAndWidget.removeThumbnail=Supprimer la vignette +dataset.thumbnailsAndWidget.removeThumbnail.tip=Vous ne supprimez que la vignette de l'ensemble de donnes et non pas le fichier d'image inclus dans votre ensemble de donnes. Pour ce faire, accdez la page Modifier les fichiers. +dataset.thumbnailsAndWidget.availableThumbnails=Vignettes disponibles +dataset.thumbnailsAndWidget.availableThumbnails.tip=Slectionnez une vignette partir d'un fichier de donnes provenant de votre ensemble de donnes. Revenir ensuite la page Vignette + Widgets pour enregistrer vos modifications. + +# file.xhtml +file.share.fileShare=Partager le fichier +file.share.fileShare.tip=Partager ce fichier sur vos mdias sociaux prfrs. +file.share.fileShare.shareText=Afficher ce fichier. +file.title.label=Titre +file.citation.label=Rfrence bibliographique +file.cite.downloadBtn=Citer le fichier de donnes +file.general.metadata.label=Mtadonnes gnrales +file.description.label=Description +file.tags.label=Libells +file.lastupdated.label=Dernire mise jour +file.DatasetVersion=Version + +file.metadataTab.fileMetadata.header=Mtadonnes du fichier +file.metadataTab.fileMetadata.persistentid.label=Identifiant prenne du fichier +file.metadataTab.fileMetadata.downloadUrl.label=URL de tlchargement +file.metadataTab.fileMetadata.unf.label=UNF +file.metadataTab.fileMetadata.size.label=Taille +file.metadataTab.fileMetadata.type.label=Catgorie +file.metadataTab.fileMetadata.description.label=Description +file.metadataTab.fileMetadata.publicationDate.label=Date de publication +file.metadataTab.fileMetadata.depositDate.label=Date de dpt +file.metadataTab.fitsMetadata.header=Mtadonnes FITS +file.metadataTab.provenance.header=Provenance du fichier +file.metadataTab.provenance.body=Information sur la provenance des fichiers venir dans une version ultrieure\u2026 + +file.versionDifferences.noChanges=Aucun changement associ cette version +file.versionDifferences.fileNotInVersion=Fichier non inclus dans cette version +file.versionDifferences.actionChanged=Chang +file.versionDifferences.actionAdded=Ajout +file.versionDifferences.actionRemoved=Supprim +file.versionDifferences.actionReplaced=Remplac +file.versionDifferences.fileMetadataGroupTitle=Mtadonnes du fichier +file.versionDifferences.fileTagsGroupTitle=Libells du fichier +file.versionDifferences.descriptionDetailTitle=Description +file.versionDifferences.fileNameDetailTitle=Nom du fichier +file.versionDifferences.fileAccessTitle=Accs aux fichiers +file.versionDifferences.fileRestricted=Accs rserv +file.versionDifferences.fileUnrestricted=Accs sans restrictions +file.versionDifferences.fileGroupTitle=Fichier + +# editdatafile.xhtml + +# editFilesFragment.xhtml +file.edit.error.file_exceeds_limit=Ce fichier dpasse la taille limite. +# File metadata error +file.metadata.datafiletag.not_tabular=Vous ne pouvez pas ajouter de libells de donnes tabulaires un fichier non tabulaire. + +# File Edit Success +file.message.editSuccess=Ce fichier a t mis jour. +file.message.replaceSuccess=Ce fichier a t remplac. + +# File Add/Replace operation messages +file.addreplace.file_size_ok=La taille du fichier est approprie. +file.addreplace.error.file_exceeds_limit=La taille de ce fichier ({0}) dpasse la limite de taille de {1} octet(s). +file.addreplace.error.dataset_is_null=L'ensemble de donnes ne peut tre nul. +file.addreplace.error.dataset_id_is_null=L'identifiant de l'ensemble de donnes ne peut tre nul. +find.dataset.error.dataset_id_is_null=L'accs un ensemble de donnes bas sur un identifiant prenne requiert qu'un paramtre de requte {0} soit prsent. +find.dataset.error.dataset.not.found.persistentId=L'ensemble de donnes bas sur l'identifiant prenne {0} est introuvable. +find.dataset.error.dataset.not.found.id=L'ensemble de donnes avec l'identifiant {0} est introuvable. +find.dataset.error.dataset.not.found.bad.id=Numro d'identifiant de l'ensemble de donnes incorrect\u00A0: {0}. +file.addreplace.error.dataset_id_not_found=Aucun ensemble de donnes n'a t trouv pour l'identifiant\u00A0: +file.addreplace.error.no_edit_dataset_permission=Vous n'avez pas la permission de modifier cet ensemble de donnes. +file.addreplace.error.filename_undetermined=Le nom du fichier ne peut tre tabli. +file.addreplace.error.file_content_type_undetermined=Le type de contenu du fichier ne peut tre tabli. +file.addreplace.error.file_upload_failed=Le tlversement du fichier a chou. +file.addreplace.error.duplicate_file=Ce fichier existe dj dans l'ensemble de donnes. +file.addreplace.error.existing_file_to_replace_id_is_null=L'identifiant du fichier existant remplacer doit tre fourni. +file.addreplace.error.existing_file_to_replace_not_found_by_id=Fichier de remplacement non trouv. Aucun fichier n'a t trouv pour l'identifiant\u00A0: {0} +file.addreplace.error.existing_file_to_replace_is_null=Le fichier remplacer ne peut tre nul. +file.addreplace.error.existing_file_to_replace_not_in_dataset=Le fichier remplacer n'appartient pas cet ensemble de donnes. +file.addreplace.error.existing_file_not_in_latest_published_version=Vous ne pouvez pas remplacer un fichier qui n'est pas dans le dernier ensemble de donnes publi. (Le fichier est non publi ou a t supprim d'une version prcdente.) +file.addreplace.content_type.header=Type de fichier diffrent +file.addreplace.error.replace.new_file_has_different_content_type=Le fichier d'origine ({0}) et le fichier de remplacement ({1}) sont des types de fichiers diffrents. +file.addreplace.error.replace.new_file_same_as_replacement=Vous ne pouvez pas remplacer un fichier avec exactement le mme fichier. +file.addreplace.error.unpublished_file_cannot_be_replaced=Vous ne pouvez pas remplacer un fichier non publi. Supprimez-le au lieu de le remplacer. +file.addreplace.error.ingest_create_file_err=Une erreur s'est produite lors de l'ajout du nouveau fichier. +file.addreplace.error.initial_file_list_empty=Une erreur s'est produite et le nouveau fichier n'a pas t ajout. +file.addreplace.error.initial_file_list_more_than_one=Vous ne pouvez pas remplacer un seul fichier par plusieurs fichiers. Le fichier que vous avez tlvers a t ingr dans plusieurs fichiers. +file.addreplace.error.final_file_list_empty=Il n'y a pas de fichiers ajouter. (Cette erreur ne devrait pas se produire si la squence des tapes a t respecte.) +file.addreplace.error.only_replace_operation=Ceci ne devrait tre appel que pour les oprations de remplacement de fichier! +file.addreplace.error.failed_to_remove_old_file_from_dataset=Impossible de retirer un ancien fichier du nouvel ensemble de donnes versionn. +file.addreplace.error.add.add_file_error=Impossible d'ajouter un fichier l'ensemble de donnes. +file.addreplace.error.phase2_called_early_no_new_files=Une erreur s'est produite lors de l'enregistrement de l'ensemble de donnes. Aucun nouveau fichier n'a t trouv. +file.addreplace.success.add=Le fichier a bien t ajout! +file.addreplace.success.replace=Le fichier a bien t remplac! +file.addreplace.error.auth=La cl API n'est pas valide. +file.addreplace.error.invalid_datafile_tag=Libell de donnes tabulaires non valide\u00A0: + +# 500.xhtml +error.500.page.title=500 - Erreur interne du serveur +error.500.message=Erreur interne du serveur - Une erreur inattendue s'est produite, aucune information supplmentaire n'est disponible. + +# 404.xhtml= +error.404.page.title=404 - Page non trouve +error.404.message=Page non trouve - La page que vous cherchez n'a pas t trouve. + +# 403.xhtml +error.403.page.title=403 - Non autoris +error.403.message=Non autoris - Vous n'tes pas autoris voir cette page. + +# general error - support message= +error.support.message=Si vous pensez qu'il s'agit d'une erreur, veuillez contacter {0} pour obtenir de l'aide. + +# citation-frame.xhtml= +citationFrame.banner.message=Si le site ci-dessous ne se charge pas, les donnes archives sont disponibles dans {0} {1}. {2} +citationFrame.banner.message.here=ici +citationFrame.banner.closeIcon=Fermer ce message, aller dans l'ensemble de donnes +citationFrame.banner.countdownMessage=Ce message se fermera dans +citationFrame.banner.countdownMessage.seconds=secondes + +# EditDatafilesPage.java +file.edit.duplicate.message=Le fichier suivant est un doublon d'un fichier dj tlvers\u00A0: +file.edit.duplicates.message=Les fichiers suivants sont des doublons d'un (de) fichier(s) dj tlvers(s)\u00A0: +file.edit.exist.message=Les fichiers suivants font dj partie de l'ensemble de donnes\u00A0: +file.edit.skip.message=(ignor) + +# DTA117FileReader.java +dta.datafile.error=ce module d'extension (plugin) ne prend pas en charge les fichiers externes de donnes brutes +dta.readheader.error=Libell de version inattendue trouv\u00A0: {0}; valeur attendue\u00A0: 117. +dta.datasettimestamp.error=Longueur invalide/inattendue de l'horodatage dans l'en-tte du DTA117. +dta.variabletype.label.error=Libell de type non reconnu\u00A0: {0} pour le type de valeur Stata (court) {1}. +dta.variabletype.value.error=valeur de type de variable inconnue rencontre\u00A0: {0} +dta.vartype.error=Type de variable indfini rencontr dans readData() +dta.datasection.error=Type de variable inconnu rencontr lors de la lecture de la section de donnes\u00A0: {0} +dta.byteoffset.error=Nombre inattendu d'octets lus pour la range de donnes {0}; {1} attendu(s), {2} lu(s). +dta.readSTRLs.failure=Impossible de lire une paire intermdiaire v,o pour la variable {0}, observation {1} +dta.readSTRLs.illegal=Valeur v,o interdite\u00A0: {0} pour la variable {1}, observation {2} +dta.readGSO.failure=Impossible de lire la valeur GSO pour {0} +dta.readGSO.string.error=Chane GSO indisponible pour la valeur v,o {0} +dta.readlabels.total.error=incompatibilit de lecture dans readLabels() +dta.readlabels.value.error=incompatibilit de lecture dans readLabels() 2 +dta.calculatebytes.length.error=Le tableau des variables internes de dcalages d'octets n'est pas correctement configur +dta.calculatebytes.offset.error=variable de dcalage d'octet incorrect\u00A0: {0} +dta.variabletypes.error=Les types de variables internes ne sont pas correctement configurs +dta.variabletype.error=Type de variable vide dans la tentative de recherche sur la longueur d'octet. +dta.invalidstrf.error=STRF invalide rencontr\u00A0: {0} +dta.unknown.variable=Type de variable inconnue ou invalide\u00A0: {0} +dta.readbytes.error=DataReader.readBytes appel lire un nombre d'octets ngatifs ou gal 0. +dta.readbytes.buffer=TD - Surcharge de mmoire tampon +dta.readbytes.premature=a atteint la fin du flux de donnes prmaturment. +dta.readdoubleInt.error=Ordre d'octet non dtermin pour la lecture de valeurs numriques. +dta.bytestoint.error=Nombre d'octets non pris en charge dans un entier\u00A0: {0} +dta.bytestosignedInt.error=Nombre d'octets non pris en charge pour entier sign\u00A0: {0} +dta.bytestolong.error=Nombre erron d'octets dans bytesToLong(). +dta.characterlimit.error=limite atteinte pour le nombre de caractres dans la balise de section +dta.negativenumber.error=Nombre d'octets ngatifs dans skipDefinedSection(balise) +dta.opentag.error=la balise d'ouverture doit tre une chane non vide. +dta.sectiontag.error=Vrification des balises de section sur les tampons d'octets non encore implmente. +dta.opentag.missing=Impossible de lire la balise d'ouverture {0} +dta.closetag.error=la balise de fermeture doit tre une chane non vide. +dta.closetag.missing=Impossible de lire la balise de fermeture {0} +dta.bufferoverflow.error=Surcharge de mmoire tampon dans le DataReader. + + +# New labels + +main_dataverse_create=Crer mon Dataverse +main_or=ou +main_explore=Explorer +main_search=Chercher +main_dataverse_slogan=Plate-forme des donnes de recherche de Scholars Portal. Publiez et suivez vos donnes, dcouvrez et rutilisez les donnes des autres! + +#EMailValidator +emailvalidator.notValid={0} n'est pas une adresse courriel valide. + +#dataset.xhtml +Abbreviation=Abrviation +Actions\u0020to\u0020Minimize\u0020Losses=Mesures visant minimiser les pertes +Actual=Relle +Affiliation=Affiliation +Agency=Organisme +Alternative\u0020Title=Autre titre +Author=Auteur +Bandpass=Largeur de bande +Cell\u0020Type=Type de cellule +Central\u0020Wavelength\u0020(m)=Longueur d'onde centrale (m) +Characteristic\u0020of\u0020Sources\u0020Noted=Caractristiques des sources notes +Characteristics\u0020of\u0020Data\u0020Collection\u0020Situation=Caractristiques de la collecte de donnes +Citation=Rfrence +City=Ville +Cleaning\u0020Operations=Oprations de nettoyage +Collection\u0020Mode=Mode de collecte +Collector\u0020Training=Formation du responsable de la collecte de donnes +Contact=Personne-ressource +Control\u0020Operations=Oprations de contrle +Country\u0020/\u0020Nation=Pays / Nation +Data\u0020Collector=Responsable de la collecte de donnes +Data\u0020Sources=Sources de donnes +Dataset\u0020Date\u0020Range=Priode de l'ensemble de donnes +Date=Date +Date\u0020of\u0020Collection=Date de la collecte +Depositor=Dposant +Depth\u0020Coverage=tendue +Description=Description +Design\u0020Type=Type de modle +Distributor=Diffuseur +Documentation\u0020and\u0020Access\u0020to\u0020Sources=Documentation et accs aux sources +E-mail=Courriel +East\u0020Longitude=Longitude est +End=Fin +Estimates\u0020of\u0020Sampling\u0020Error=Estimation de l'erreur d'chantillonnage +Facility=Installation +Factor\u0020Type=Type de facteur +Formula=Formule +Fraction\u0020of\u0020Sky=Fraction de ciel +Frequency=Frquence +Geographic\u0020Bounding\u0020Box=Zone de dlimitation gographique +Geographic\u0020Coverage=Couverture gographique +Geographic\u0020Unit=Unit gographique +Grant\u0020Agency=Organisme subventionnaire +Grant\u0020Information=Renseignements sur la subvention +Grant\u0020Number=Numro de la subvention +ID\u0020Number=Numro d'identification +ID\u0020Type=Type d'identifiant +Identifier=Identifiant +Identifier\u0020Scheme=Schma de l'identifiant +Information=Renseignements +Instrument=Instrument +Issue=Numro +Journal=Revue +Keyword=Mot-cl +Logo\u0020URL=Adresse URL du logo +Major\u0020Deviations\u0020for\u0020Sample\u0020Design=carts importants pour le plan d'chantillonnage +Maximum=Maximum +Maximum\u0020(m)=Maximum (m) +Measurement\u0020Type=Type de mesure +Minimum=Minimum +Minimum\u0020(m)=Minimum (m) +Name=Nom +North\u0020Latitude=Latitude nord +Notes=Remarques +Object=Objet +Object\u0020Count=Nombre d'objets +Object\u0020Density=Densit de l'objet +Organism=Organisme +Origin\u0020of\u0020Sources=Origine des sources +Other=Autre +Other\u0020Forms\u0020of\u0020Data\u0020Appraisal=Autres formes d'valuation des donnes +Other\u0020ID=Autre identifiant +Other\u0020Measurement\u0020Type=Autre type de mesure +Other\u0020Organism=Autre organisme +Other\u0020References=Autres rfrences +Polarization=Polarisation +Producer=Producteur +Production\u0020Place=Endroit de production +Redshift\u0020Resolution=Rsolution du dcalage vers le rouge +Redshift\u0020Value=Valeur du dcalage vers le rouge +RedshiftType=TypeDeDcalageVersLeRouge +Related\u0020Datasets=Ensembles de donnes connexes +Related\u0020Material=Document connexe +Related\u0020Publication=Publication connexe +Response\u0020Rate=Taux de rponse +Sampling\u0020Procedure=Mthode d\u2019chantillonnage +Series=Srie +Sky\u0020Coverage=Partie du ciel couverte +Software=Logiciel +South\u0020Latitude=Latitude sud +Spatial\u0020Resolution=Rsolution spatiale +Spectral\u0020Resolution=Rsolution spectrale +Start=Dbut +State\u0020/\u0020Province=tat / Province +Study\u0020Level\u0020Error\u0020Notes=Remarques gnrales d'erreur +Subtitle=Sous-titre +Target\u0020Sample\u0020Size=Taille de l'chantillon cible +Technology\u0020Platform=Plateforme technologique +Technology\u0020Type=Type de technologie +Term=Terme +Text=Texte +Time\u0020Method=Mthode temporelle +Time\u0020Period\u0020Covered=Priode couverte +Time\u0020Resolution=Rsolution temporelle +Topic\u0020Classification=Classification des sujets +Type=Type +Type\u0020of\u0020Article=Type d'article +Type\u0020of\u0020Research\u0020Instrument=Type d'instrument de recherche +Unit\u0020of\u0020Analysis=Unit d'analyse +Universe=Univers +URL=Adresse URL +Version=Version +Vocabulary=Vocabulaire +Vocabulary\u0020URL=Adresse URL du vocabulaire +Volume=Volume +Wavelength\u0020Range=Gamme de longueurs d'onde +Weighting=Pondration +West\u0020Longitude=Longitude ouest + + +Citation\u0020Metadata=Mtadonnes bibliographiques +Geospatial\u0020Metadata=Mtadonnes gospatiales +Social\u0020Science\u0020and\u0020Humanities\u0020Metadata=Mtadonnes de sciences sociales et humaines +Astronomy\u0020and\u0020Astrophysics\u0020Metadata=Mtadonnes d'astronomie et d'astrophysique +Life\u0020Sciences\u0020Metadata=Mtadonnes des sciences de la vie +Journal\u0020Metadata=Mtadonnes de la revue + +file.dataFilesTab.fileRestrictions=Restrictions relatives aux fichiers +datset.replicationDataFor=Donnes de rplication pour\u00A0: + +#Permission.java +permission.addDataverseDataverse=Ajouter un dataverse l'intrieur d'un autre dataverse +permission.deleteDataset=Supprimer la version provisoire de l'ensemble de donnes +permission.deleteDataverse=Supprimer un dataverse non publi +permission.publishDataset=Publier un ensemble de donnes +permission.publishDataverse=Publier un dataverse +permission.managePermissionsDataset=Grer les autorisations pour un ensemble de donnes +permission.managePermissionsDataverse=Grer les autorisations pour un dataverse +permission.editDataset=diter les mtadonnes d'un ensemble de donnes +permission.editDataverse=diter les mtadonnes, les facettes, la personnalisation et les modles +permission.downloadFile=Tlcharger un fichier +permission.viewUnpublishedDataset=Consulter un ensemble de donnes non publi et ses fichiers +permission.viewUnpublishedDataverse=Consulter un dataverse non publi +permission.addDatasetDataverse=Ajouter un ensemble de donnes un dataverse + +#ManagePermissionsPage and ManageFilePermissionsPage +permission.roleWasRemoved=Le rle {0} associ {1} a t supprim. +permission.roleNotAbleToBeRemoved=L'attribution du rle n'a pu tre supprime. +permission.permissionsMissing=Les autorisations pour {0} sont manquantes. +permission.fileAccessGranted=L'accs aux fichiers demand par {0} a t accord. +permission.fileAccessRejected=L'accs aux fichiers demand par {0} a t refus. +permission.roleAssignedToFor=Rle {0} attribu {1} pour {2}. +permission.roleNotAbleToBeAssigned=II a t impossible d'attribuer le rle. +permission.defaultPermissionDataverseUpdated=Les autorisations par dfaut pour ce dataverse ont t mises jour. +permission.CannotAssigntDefaultPermissions=Impossible d'attribuer des autorisations par dfaut. +permission.errorAssigningRole=Erreur dans l'attribution du rle\u00A0: {0} +permission.updated=mis jour +permission.created=cr +permission.roleWas=Le rle tait {0}. Pour l'attribuer un utilisateur et/ou un groupe, cliquez sur le bouton Attribuer un rle un utilisateur/un groupe dans la section Utilisateurs/Groupes de cette page. +permission.roleNotSaved=Il a t impossible de sauvegarder le rle. +permission.anyoneWithAccount=Toute personne possdant un compte Dataverse + +#datasetFieldForEditFragment.xhtml +dataset.AddReplication=Ajouter Donnes de rplication pour au Titre + +#result_message_only.html +result.status=Statut +result.role=Rle +result.to= +result.of=De +result.result=Rsultat +result.results=Rsultats + +#loginpage.xhtml bundle [fc.credential.title]. Username and Password translation +Username=Nom d'utilisateur +Password=Mot de passe + +#search-include-fragment.xhtml bundle[facetCategory.friendlyName] +Dataverse\u0020Category=Catgorie Dataverse +Publication\u0020Date=Date de publication +Author-Name=Nom \u2014 Auteur +Subject=Sujet +Deposit\u0020Date=Date de dpt +File\u0020Type=Type de fichier +File\u0020Tag=Libell de fichier +Access=Accs +Keyword-Term=Mot-cl \u2014 Terme +Author\u0020Affiliation=Affiliation de l'auteur +Language=Langue +Kind\u0020of\u0020Data=Type de donnes +Publication\u0020Status=Statut de publication + +#dataverseuser.xhtml bundle [DataverseUserPage.editMode=='CREATE' ? 'Password' : 'New Password'] +user.password=Mot de passe +user.newPassword=Nouveau mot de passe + +#mydata_fragment.xhtml +mydataFragment.infoAccess=Voici tous les dataverses, ensembles de donnes et fichiers pour lesquels vous avez un accs. Vous pouvez filtrer la liste par statut de publication et par rle. +mydataFragment.moreResults=Voir plus de rsultats +mydataFragment.publicacionStatus=Statut de publication +mydataFragment.roles=Rles +mydataFragment.resultsByUserName=Rsultats par utilisateur +mydataFragment.search=Chercher dans mes donnes& + +Published=Publi +Unpublished=Non publi +Draft=Version provisoire +In\u0020Review=En rvision +Deaccessioned=Retir + +Admin=Administrateur systme +File\u0020Downloader=Utilisateur avec droits de tlchargement +Dataverse\u0020+\u0020Dataset\u0020Creator=Crateur de dataverses et d'ensembles de donnes +Dataverse\u0020Creator=Crateur de dataverses +Dataset\u0020Creator=Crateur d'ensembles de donnes +Contributor=Collaborateur +Curator=Intendant des donnes +Member=Membre + +#webapp/search/advanced.xhtml #{bundle[item.displayName] +Title=Titre +Description\u0020Text=Texte de description +Topic\u0020Classification\u0020Term=Classification sujet +Related\u0020Publication\u0020Citation=Rfrence bibliographique de la publication connexe +Related\u0020Publication\u0020ID\u0020Type=Type d'identifiant de la publication connexe +Related\u0020Publication\u0020ID\u0020Number=Identifiant de la publication connexe +Producer\u0020Name=Nom du producteur +Production\u0020Date=Date de production +Contributor\u0020Type=Type de collaborateur +Contributor\u0020Name=Nom du collaborateur +Distributor\u0020Name=Nom du diffuseur +Distribution\u0020Date=Date de diffusion +Time\u0020Period\u0020Covered\u0020Start=Dbut de la priode couverte +Time\u0020Period\u0020Covered\u0020End=Fin de la priode couverte +Series\u0020Name=Nom de la srie + +#SystemConfig +system.app.terms=Il n'y a pas de conditions d'utilisation associes cette installation de Dataverse. +system.api.terms=Il n'y a pas de conditions d'utilisation des API associes cette installation de Dataverse. + +#messages.xhtml +iqbs.message.validationErrorStrong=Erreur de validation - Les champs obligatoires ont t omis ou il y a eu une erreur de validation. Veuillez dfiler le menu vers le bas pour voir les dtails. +iqbs.message.success=Russi! +iqbs.message.info=Information +iqbs.message.error=Erreur + +#iqbs/messages.xhtml +Please=SVP +contact\u0020support=communiquer avec le service de soutien + +#LoginPage.java +login.UserName=Veuillez entrer un nom d'utilisateur. +login.Password=Veuillez entrer un mot de passe. + +# PasswordResetPage.java +passwordReset.initiated=Rinitialisation du mot de passe amorce + +# BuiltinUserPage +userPage.informationUpdated=L'information associe votre compte a bien t mise jour. +userPage.passwordChanged=Votre mot de passe a bien t modifi. +userPage.usernameIncorrect=Erreur dans le nom d'utilisateur ou l'adresse courriel. +userPage.passwordStillNull=Aucune valeur associe InputPassword +userPage.passwordNotComplex=Le mot de passe n'est pas suffisamment complexe. Le mot de passe doit comprendre au minimum une lettre, un chiffre et contenir au moins {0} caractres. +userPage.newPasswordNotBlank=le nouveau mot de passe n'est pas vide +userPage.newPasswordBlankRetype=Le nouveau mot de passe est vide\u00A0: saisir nouveau. +userPage.newPasswordBlank=le nouveau mot de passe est vide +userPage.passwordIncorrect=Le mot de passe est incorrect. +userPage.passwordNotBlank=le mot de passe actuel n'est pas vide +userPage.passwordBlankRetype=Le mot de passe est vide\u00A0: saisir nouveau. +userPage.passwordError=Erreur de mot de passe +userPage.passwordBlank=le mot de passe actuel est vide + +# DataRetrieverAPI +noResultsFound=Dsol, aucun rsultat ne correspond votre recherche. + +dataverse.selected=Slectionns +dataverse.item.required=Obligatoire +dataverse.item.optional=Facultatif +dataverse.item.hidden=Information cache + +dataset.category.documentation=Documentation +dataset.category.data=Donnes +dataset.category.code=Code + +Researcher=Chercheur +Research\u0020Project=Projet de recherche +Journal=Revue +Organizations\u0020or\u0020Institutions=Organisation ou tablissement +Teaching\u0020Course=Cours +Uncategorized=Sans catgorie +Research\u0020Group=Groupe de recherche +Laboratory=Laboratoire + +Agricultural\u0020Sciences=Sciences de l\u2019agriculture +Arts\u0020and\u0020Humanities=Arts et sciences humaines +Astronomy\u0020and\u0020Astrophysics=Astronomie et astrophysique +Business\u0020and\u0020Management=Affaires et gestion +Chemistry=Chimie +Earth\u0020and\u0020Environmental\u0020Sciences=Sciences de la terre et de l'environnement +Engineering=Gnie +Medicine,\u0020Health\u0020and\u0020Life\u0020Sciences=Mdecine, sant et sciences de la vie +Computer\u0020and\u0020Information\u0020Science=Informatique et science de l'information +Law=Droit +Mathematical\u0020Sciences=Sciences mathmatiques +Physics=Physique +Social\u0020Sciences=Sciences sociales +Other=Autre + + +# End of new labels + + +# Friendly AuthenticationProvider names +authenticationProvider.name.builtin=Dataverse +authenticationProvider.name.null=(Le fournisseur est inconnu) +authenticationProvider.name.github=GitHub +authenticationProvider.name.google=Google +authenticationProvider.name.orcid=ORCiD +authenticationProvider.name.orcid-sandbox=Bac sable ORCiD +authenticationProvider.name.shib=Shibboleth + + + + \ No newline at end of file diff --git a/dataversedock/lang.properties/Bundle_fr.properties_utf b/dataversedock/lang.properties/Bundle_fr.properties_utf new file mode 100644 index 0000000..303cee4 --- /dev/null +++ b/dataversedock/lang.properties/Bundle_fr.properties_utf @@ -0,0 +1,1656 @@ +dataverse=\u0044\u0061\u0074\u0061\u0076\u0065\u0072\u0073\u0065 +newDataverse=\u004e\u006f\u0075\u0076\u0065\u0061\u0075\u0020\u0064\u0061\u0074\u0061\u0076\u0065\u0072\u0073\u0065 +hostDataverse=\u0044\u0061\u0074\u0061\u0076\u0065\u0072\u0073\u0065\u0020\u0068\u00f4\u0074\u0065 +dataverses=\u0044\u0061\u0074\u0061\u0076\u0065\u0072\u0073\u0065\u0073 +passwd=\u004d\u006f\u0074\u0020\u0064\u0065\u0020\u0070\u0061\u0073\u0073\u0065 +dataset=\u0045\u006e\u0073\u0065\u006d\u0062\u006c\u0065\u0020\u0064\u0065\u0020\u0064\u006f\u006e\u006e\u00e9\u0065\u0073 +datasets=\u0045\u006e\u0073\u0065\u006d\u0062\u006c\u0065\u0073\u0020\u0064\u0065\u0020\u0064\u006f\u006e\u006e\u00e9\u0065\u0073 +newDataset=\u004e\u006f\u0075\u0076\u0065\u006c\u0020\u0065\u006e\u0073\u0065\u006d\u0062\u006c\u0065\u0020\u0064\u0065\u0020\u0064\u006f\u006e\u006e\u00e9\u0065\u0073 +files=\u0046\u0069\u0063\u0068\u0069\u0065\u0072\u0073 +file=\u0046\u0069\u0063\u0068\u0069\u0065\u0072 +restricted=\u0045\u006e\u0020\u0061\u0063\u0063\u00e8\u0073\u0020\u0072\u00e9\u0073\u0065\u0072\u0076\u00e9 +restrictedaccess=\u0041\u0063\u0063\u00e8\u0073\u0020\u0072\u00e9\u0073\u0065\u0072\u0076\u00e9 +find=\u0054\u0072\u006f\u0075\u0076\u0065\u0072 +search=\u0052\u0065\u0063\u0068\u0065\u0072\u0063\u0068\u0065 +unpublished=\u004e\u006f\u006e\u0020\u0070\u0075\u0062\u006c\u0069\u00e9 +cancel=\u0041\u006e\u006e\u0075\u006c\u0065\u0072 +ok=OK +saveChanges=\u0045\u006e\u0072\u0065\u0067\u0069\u0073\u0074\u0072\u0065\u0072\u0020\u006c\u0065\u0073\u0020\u006d\u006f\u0064\u0069\u0066\u0069\u0063\u0061\u0074\u0069\u006f\u006e\u0073 +acceptTerms=\u0041\u0063\u0063\u0065\u0070\u0074\u0065\u0072 +submit=\u0053\u006f\u0075\u006d\u0065\u0074\u0074\u0072\u0065 +signup=\u0053\u0027\u0069\u006e\u0073\u0063\u0072\u0069\u0072\u0065 +login=\u0053\u0065\u0020\u0063\u006f\u006e\u006e\u0065\u0063\u0074\u0065\u0072 +email=\u0043\u006f\u0075\u0072\u0072\u0069\u0065\u006c +account=\u0043\u006f\u006d\u0070\u0074\u0065 +requiredField=\u0043\u0068\u0061\u006d\u0070\u0020\u006f\u0062\u006c\u0069\u0067\u0061\u0074\u006f\u0069\u0072\u0065 +new=\u004e\u006f\u0075\u0076\u0065\u0061\u0075 +identifier=\u0049\u0064\u0065\u006e\u0074\u0069\u0066\u0069\u0061\u006e\u0074 +description=\u0044\u0065\u0073\u0063\u0072\u0069\u0070\u0074\u0069\u006f\u006e +subject=\u0053\u0075\u006a\u0065\u0074 +close=\u0046\u0065\u0072\u006d\u0065\u0072 +preview=\u0041\u0070\u0065\u0072\u00e7\u0075 +continue=\u0043\u006f\u006e\u0074\u0069\u006e\u0075\u0065\u0072 +name=\u004e\u006f\u006d +institution=\u00c9\u0074\u0061\u0062\u006c\u0069\u0073\u0073\u0065\u006d\u0065\u006e\u0074 +position=\u0050\u006f\u0073\u0074\u0065 +affiliation=\u0041\u0066\u0066\u0069\u006c\u0069\u0061\u0074\u0069\u006f\u006e +createDataverse=\u0043\u0072\u00e9\u0065\u0072\u0020\u0075\u006e\u0020\u0064\u0061\u0074\u0061\u0076\u0065\u0072\u0073\u0065 +remove=\u0053\u0075\u0070\u0070\u0072\u0069\u006d\u0065\u0072 +done=\u0054\u0065\u0072\u006d\u0069\u006e\u00e9 +editor=\u0043\u006f\u006c\u006c\u0061\u0062\u006f\u0072\u0061\u0074\u0065\u0075\u0072 +manager=\u0047\u0065\u0073\u0074\u0069\u006f\u006e\u006e\u0061\u0069\u0072\u0065 +curator=\u0049\u006e\u0074\u0065\u006e\u0064\u0061\u006e\u0074\u0020\u0064\u0065\u0073\u0020\u0064\u006f\u006e\u006e\u00e9\u0065\u0073 +explore=\u0045\u0078\u0070\u006c\u006f\u0072\u0065\u0072 +download=\u0054\u00e9\u006c\u00e9\u0063\u0068\u0061\u0072\u0067\u0065\u0072 +deaccession=\u0052\u0065\u0074\u0072\u0061\u0069\u0074 +share=\u0050\u0061\u0072\u0074\u0061\u0067\u0065\u0072 +link=\u004c\u0069\u0065\u006e +linked=\u004c\u0069\u00e9 +harvested=\u004d\u006f\u0069\u0073\u0073\u006f\u006e\u006e\u00e9 +apply=\u0041\u0070\u0070\u006c\u0069\u0071\u0075\u0065\u0072 +add=\u0041\u006a\u006f\u0075\u0074\u0065\u0072 +delete=\u0053\u0075\u0070\u0070\u0072\u0069\u006d\u0065\u0072 +yes=\u004f\u0075\u0069 +no=\u004e\u006f\u006e +previous=\u0050\u0072\u00e9\u0063\u00e9\u0064\u0065\u006e\u0074 +next=\u0053\u0075\u0069\u0076\u0061\u006e\u0074 +first=\u0050\u0072\u0065\u006d\u0069\u0065\u0072 +last=\u0044\u0065\u0072\u006e\u0069\u0065\u0072 +more=\u0050\u006c\u0075\u0073\u005c\u0075\u0032\u0030\u0032\u0036 +less=\u004d\u006f\u0069\u006e\u0073\u005c\u0075\u0032\u0030\u0032\u0036 +select=\u0053\u00e9\u006c\u0065\u0063\u0074\u0069\u006f\u006e\u006e\u0065\u0072\u005c\u0075\u0032\u0030\u0032\u0036 +selectedFiles=\u0046\u0069\u0063\u0068\u0069\u0065\u0072\u0073\u0020\u0073\u00e9\u006c\u0065\u0063\u0074\u0069\u006f\u006e\u006e\u00e9\u0073 +htmlAllowedTitle=\u0042\u0061\u006c\u0069\u0073\u0065\u0073\u0020\u0048\u0054\u004d\u004c\u0020\u0070\u0065\u0072\u006d\u0069\u0073\u0065\u0073 +htmlAllowedMsg=\u0043\u0065\u0020\u0063\u0068\u0061\u006d\u0070\u0020\u0070\u0072\u0065\u006e\u0064\u0020\u0073\u0065\u0075\u006c\u0065\u006d\u0065\u006e\u0074\u0020\u0065\u006e\u0020\u0063\u0068\u0061\u0072\u0067\u0065\u0020\u0063\u0065\u0072\u0074\u0061\u0069\u006e\u0065\u0073\u0020\u003c\u0073\u0070\u0061\u006e\u0020\u0063\u006c\u0061\u0073\u0073\u003d\u0022\u0074\u0065\u0078\u0074\u002d\u0069\u006e\u0066\u006f\u0020\u0070\u006f\u0070\u006f\u0076\u0065\u0072\u0048\u0054\u004d\u004c\u0022\u003e\u0062\u0061\u006c\u0069\u0073\u0065\u0073\u0020\u0048\u0054\u004d\u004c\u003c\u002f\u0073\u0070\u0061\u006e\u003e\u002e +htmlAllowedTags=\u003c\u0061\u003e\u002c\u0020\u003c\u0062\u003e\u002c\u0020\u003c\u0062\u006c\u006f\u0063\u006b\u0071\u0075\u006f\u0074\u0065\u003e\u002c\u0020\u003c\u0062\u0072\u003e\u002c\u0020\u003c\u0063\u006f\u0064\u0065\u003e\u002c\u0020\u003c\u0064\u0065\u006c\u003e\u002c\u0020\u003c\u0064\u0064\u003e\u002c\u0020\u003c\u0064\u006c\u003e\u002c\u0020\u003c\u0064\u0074\u003e\u002c\u0020\u003c\u0065\u006d\u003e\u002c\u0020\u003c\u0068\u0072\u003e\u002c\u0020\u003c\u0068\u0031\u003e\u002d\u003c\u0068\u0033\u003e\u002c\u0020\u003c\u0069\u003e\u002c\u0020\u003c\u0069\u006d\u0067\u003e\u002c\u0020\u003c\u006b\u0062\u0064\u003e\u002c\u0020\u003c\u006c\u0069\u003e\u002c\u0020\u003c\u006f\u006c\u003e\u002c\u0020\u003c\u0070\u003e\u002c\u0020\u003c\u0070\u0072\u0065\u003e\u002c\u0020\u003c\u0073\u003e\u002c\u0020\u003c\u0073\u0075\u0070\u003e\u002c\u0020\u003c\u0073\u0075\u0062\u003e\u002c\u0020\u003c\u0073\u0074\u0072\u006f\u006e\u0067\u003e\u002c\u0020\u003c\u0073\u0074\u0072\u0069\u006b\u0065\u003e\u002c\u0020\u003c\u0075\u006c\u003e +# dataverse_header.xhtml= +header.status.header=\u00c9\u0074\u0061\u0074 +header.search.title=\u0043\u0068\u0065\u0072\u0063\u0068\u0065\u0072\u0020\u0064\u0061\u006e\u0073\u0020\u0074\u006f\u0075\u0073\u0020\u006c\u0065\u0073\u0020\u0064\u0061\u0074\u0061\u0076\u0065\u0072\u0073\u0065\u0073\u005c\u0075\u0032\u0030\u0032\u0036 +header.about=\u00c0\u0020\u0070\u0072\u006f\u0070\u006f\u0073 +header.support=\u0053\u006f\u0075\u0074\u0069\u0065\u006e +header.guides=\u0047\u0075\u0069\u0064\u0065\u0073 +header.guides.user=\u0047\u0075\u0069\u0064\u0065\u0020\u0064\u0027\u0075\u0074\u0069\u006c\u0069\u0073\u0061\u0074\u0069\u006f\u006e +header.guides.developer=\u0047\u0075\u0069\u0064\u0065\u0020\u0064\u0075\u0020\u0064\u00e9\u0076\u0065\u006c\u006f\u0070\u0070\u0065\u0075\u0072 +header.guides.installation=\u0047\u0075\u0069\u0064\u0065\u0020\u0064\u0027\u0069\u006e\u0073\u0074\u0061\u006c\u006c\u0061\u0074\u0069\u006f\u006e +header.guides.api=\u0047\u0075\u0069\u0064\u0065\u0020\u0041\u0050\u0049 +header.guides.admin=\u0047\u0075\u0069\u0064\u0065\u0020\u0064\u0065\u0020\u006c\u0027\u0061\u0064\u006d\u0069\u006e\u0069\u0073\u0074\u0072\u0061\u0074\u0065\u0075\u0072 +header.signUp=\u0053\u0027\u0069\u006e\u0073\u0063\u0072\u0069\u0072\u0065 +header.logOut=\u0053\u0065\u0020\u0064\u00e9\u0063\u006f\u006e\u006e\u0065\u0063\u0074\u0065\u0072 +header.accountInfo=\u0052\u0065\u006e\u0073\u0065\u0069\u0067\u006e\u0065\u006d\u0065\u006e\u0074\u0073\u0020\u0073\u0075\u0072\u0020\u006c\u0065\u0020\u0063\u006f\u006d\u0070\u0074\u0065 +header.dashboard=\u0054\u0061\u0062\u006c\u0065\u0061\u0075\u0020\u0064\u0065\u0020\u0062\u006f\u0072\u0064 +header.user.selectTab.dataRelated=\u004d\u0065\u0073\u0020\u0064\u006f\u006e\u006e\u00e9\u0065\u0073 +header.user.selectTab.notifications=\u0041\u0076\u0069\u0073 +header.user.selectTab.accountInfo=\u0052\u0065\u006e\u0073\u0065\u0069\u0067\u006e\u0065\u006d\u0065\u006e\u0074\u0073\u0020\u0073\u0075\u0072\u0020\u006c\u0065\u0020\u0063\u006f\u006d\u0070\u0074\u0065 +header.user.selectTab.groupsAndRoles=\u0047\u0072\u006f\u0075\u0070\u0065\u0073\u0020\u002b\u0020\u0072\u00f4\u006c\u0065\u0073 +header.user.selectTab.apiToken=\u004a\u0065\u0074\u006f\u006e\u0020\u0041\u0050\u0049 +# dataverse_template.xhtml= +head.meta.description=\u0044\u0061\u0074\u0061\u0076\u0065\u0072\u0073\u0065\u0020\u0065\u0073\u0074\u0020\u0075\u006e\u0020\u006c\u006f\u0067\u0069\u0063\u0069\u0065\u006c\u0020\u006c\u0069\u0062\u0072\u0065\u0020\u0070\u006f\u0075\u0072\u0020\u006c\u0065\u0020\u0070\u0061\u0072\u0074\u0061\u0067\u0065\u002c\u0020\u006c\u0061\u0020\u0063\u0069\u0074\u0061\u0074\u0069\u006f\u006e\u0020\u0065\u0074\u0020\u006c\u0027\u0061\u0072\u0063\u0068\u0069\u0076\u0061\u0067\u0065\u0020\u0064\u0065\u0020\u0064\u006f\u006e\u006e\u00e9\u0065\u0073\u002e\u0020\u0044\u0061\u0074\u0061\u0076\u0065\u0072\u0073\u0065\u0020\u0066\u006f\u0075\u0072\u006e\u0069\u0074\u0020\u0061\u0075\u0078\u0020\u0067\u0065\u0073\u0074\u0069\u006f\u006e\u006e\u0061\u0069\u0072\u0065\u0073\u0020\u0064\u0065\u0020\u0064\u006f\u006e\u006e\u00e9\u0065\u0073\u0020\u0075\u006e\u0065\u0020\u0069\u006e\u0066\u0072\u0061\u0073\u0074\u0072\u0075\u0063\u0074\u0075\u0072\u0065\u0020\u0073\u006f\u006c\u0069\u0064\u0065\u0020\u0070\u006f\u0075\u0072\u0020\u0068\u00e9\u0062\u0065\u0072\u0067\u0065\u0072\u0020\u0065\u0074\u0020\u0061\u0072\u0063\u0068\u0069\u0076\u0065\u0072\u0020\u0064\u0065\u0073\u0020\u0064\u006f\u006e\u006e\u00e9\u0065\u0073\u0020\u0065\u0074\u0020\u006f\u0066\u0066\u0072\u0065\u0020\u0061\u0075\u0078\u0020\u0063\u0068\u0065\u0072\u0063\u0068\u0065\u0075\u0072\u0073\u0020\u0075\u006e\u0065\u0020\u0073\u006f\u006c\u0075\u0074\u0069\u006f\u006e\u0020\u0070\u006f\u0075\u0072\u0020\u0070\u0061\u0072\u0074\u0061\u0067\u0065\u0072\u0020\u0066\u0061\u0063\u0069\u006c\u0065\u006d\u0065\u006e\u0074\u0020\u006c\u0065\u0075\u0072\u0073\u0020\u0064\u006f\u006e\u006e\u00e9\u0065\u0073\u0020\u0065\u0074\u0020\u0065\u006e\u0020\u006f\u0062\u0074\u0065\u006e\u0069\u0072\u0020\u006c\u0065\u0020\u0063\u0072\u00e9\u0064\u0069\u0074\u002e +body.skip=\u0050\u0061\u0073\u0073\u0065\u0072\u0020\u0061\u0075\u0020\u0063\u006f\u006e\u0074\u0065\u006e\u0075\u0020\u0070\u0072\u0069\u006e\u0063\u0069\u0070\u0061\u006c +# dataverse_footer.xhtml= +footer.copyright=\u0044\u0072\u006f\u0069\u0074\u0073\u0020\u0072\u00e9\u0073\u0065\u0072\u0076\u00e9\u0073\u0020\u0026\u0023\u0031\u0036\u0039\u003b\u0020\u007b\u0030\u007d +footer.widget.datastored=\u004c\u0065\u0073\u0020\u0064\u006f\u006e\u006e\u00e9\u0065\u0073\u0020\u0073\u006f\u006e\u0074\u0020\u0061\u0072\u0063\u0068\u0069\u0076\u00e9\u0065\u0073\u0020\u0070\u0061\u0072\u005c\u0075\u0030\u0030\u0041\u0030\u003a\u0020\u007b\u0030\u007d\u002e +footer.widget.login=\u0053\u0065\u0020\u0063\u006f\u006e\u006e\u0065\u0063\u0074\u0065\u0072\u0020\u00e0 +footer.privacyPolicy=\u0050\u006f\u006c\u0069\u0074\u0069\u0071\u0075\u0065\u0020\u0064\u0065\u0020\u0070\u0072\u006f\u0074\u0065\u0063\u0074\u0069\u006f\u006e\u0020\u0064\u0065\u0020\u006c\u0061\u0020\u0076\u0069\u0065\u0020\u0070\u0072\u0069\u0076\u00e9\u0065 +footer.poweredby=\u0046\u006f\u0075\u0072\u006e\u0069\u0020\u0070\u0061\u0072 +footer.dataverseProject=\u004c\u0065\u0020\u0070\u0072\u006f\u006a\u0065\u0074\u0020\u0044\u0061\u0074\u0061\u0076\u0065\u0072\u0073\u0065 +# messages.xhtml=\u0023\u0020\u006d\u0065\u0073\u0073\u0061\u0067\u0065\u0073\u002e\u0078\u0068\u0074\u006d\u006c +messages.error=\u0045\u0072\u0072\u0065\u0075\u0072 +messages.success=\u004f\u0070\u00e9\u0072\u0061\u0074\u0069\u006f\u006e\u0020\u0072\u00e9\u0075\u0073\u0073\u0069\u0065\u0021 +messages.info=\u0049\u006e\u0066\u006f +messages.validation=\u0045\u0072\u0072\u0065\u0075\u0072\u0020\u0064\u0065\u0020\u0076\u0061\u006c\u0069\u0064\u0061\u0074\u0069\u006f\u006e +messages.validation.msg=\u0044\u0065\u0073\u0020\u0063\u0068\u0061\u006d\u0070\u0073\u0020\u0072\u0065\u0071\u0075\u0069\u0073\u0020\u0073\u006f\u006e\u0074\u0020\u006d\u0061\u006e\u0071\u0075\u0061\u006e\u0074\u0073\u0020\u006f\u0075\u0020\u0065\u006e\u0063\u006f\u0072\u0065\u0020\u0075\u006e\u0065\u0020\u0065\u0072\u0072\u0065\u0075\u0072\u0020\u0064\u0065\u0020\u0076\u0061\u006c\u0069\u0064\u0061\u0074\u0069\u006f\u006e\u0020\u0065\u0073\u0074\u0020\u0073\u0075\u0072\u0076\u0065\u006e\u0075\u0065\u002e\u0020\u0046\u0061\u0069\u0074\u0065\u0073\u0020\u0064\u00e9\u0066\u0069\u006c\u0065\u0072\u0020\u0076\u0065\u0072\u0073\u0020\u006c\u0065\u0020\u0062\u0061\u0073\u0020\u0070\u006f\u0075\u0072\u0020\u0076\u006f\u0069\u0072\u0020\u006c\u0065\u0073\u0020\u0064\u00e9\u0074\u0061\u0069\u006c\u0073\u002e +# contactFormFragment.xhtml= +contact.header=\u0043\u006f\u006d\u006d\u0075\u006e\u0069\u0071\u0075\u0065\u0072\u0020\u0061\u0076\u0065\u0063\u0020\u006c\u0065\u0020\u0073\u0065\u0072\u0076\u0069\u0063\u0065\u0020\u0064\u0065\u0020\u0073\u006f\u0075\u0074\u0069\u0065\u006e\u0020\u0053\u0063\u0068\u006f\u006c\u0061\u0072\u0073\u0020\u0050\u006f\u0072\u0074\u0061\u006c +contact.dataverse.header=\u0043\u006f\u006d\u006d\u0075\u006e\u0069\u0071\u0075\u0065\u0072\u0020\u0061\u0076\u0065\u0063\u0020\u006c\u0061\u0020\u0070\u0065\u0072\u0073\u006f\u006e\u006e\u0065\u002d\u0072\u0065\u0073\u0073\u006f\u0075\u0072\u0063\u0065\u0020\u0070\u006f\u0075\u0072\u0020\u0063\u0065\u0020\u0064\u0061\u0074\u0061\u0076\u0065\u0072\u0073\u0065 +contact.dataset.header=\u0043\u006f\u006d\u006d\u0075\u006e\u0069\u0071\u0075\u0065\u0072\u0020\u0061\u0076\u0065\u0063\u0020\u006c\u0061\u0020\u0070\u0065\u0072\u0073\u006f\u006e\u006e\u0065\u002d\u0072\u0065\u0073\u0073\u006f\u0075\u0072\u0063\u0065\u0020\u0070\u006f\u0075\u0072\u0020\u0063\u0065\u0074\u0020\u0065\u006e\u0073\u0065\u006d\u0062\u006c\u0065\u0020\u0064\u0065\u0020\u0064\u006f\u006e\u006e\u00e9\u0065\u0073 +contact.to=\u0044\u0065\u0073\u0074\u0069\u006e\u0061\u0074\u0061\u0069\u0072\u0065 +contact.support=\u0053\u0065\u0072\u0076\u0069\u0063\u0065\u0020\u0064\u0065\u0020\u0073\u006f\u0075\u0074\u0069\u0065\u006e\u0020\u0064\u0065\u0020\u0053\u0063\u0068\u006f\u006c\u0061\u0072\u0073\u0020\u0050\u006f\u0072\u0074\u0061\u006c +contact.from=\u0045\u0078\u0070\u00e9\u0064\u0069\u0074\u0065\u0075\u0072 +contact.from.required=\u004c\u0027\u0061\u0064\u0072\u0065\u0073\u0073\u0065\u0020\u0063\u006f\u0075\u0072\u0072\u0069\u0065\u006c\u0020\u0064\u0065\u0020\u006c\u0027\u0075\u0074\u0069\u006c\u0069\u0073\u0061\u0074\u0065\u0075\u0072\u0020\u0065\u0073\u0074\u0020\u0072\u0065\u0071\u0075\u0069\u0073\u0065\u002e +contact.from.invalid=\u004c\u0027\u0061\u0064\u0072\u0065\u0073\u0073\u0065\u0020\u0063\u006f\u0075\u0072\u0072\u0069\u0065\u006c\u0020\u0065\u0073\u0074\u0020\u0069\u006e\u0076\u0061\u006c\u0069\u0064\u0065\u002e +contact.subject=\u004f\u0062\u006a\u0065\u0074 +contact.subject.required=\u0049\u006c\u0020\u0066\u0061\u0075\u0074\u0020\u0069\u006e\u0064\u0069\u0071\u0075\u0065\u0072\u0020\u0075\u006e\u0020\u006f\u0062\u006a\u0065\u0074 +contact.subject.selectTab.top=\u0053\u00e9\u006c\u0065\u0063\u0074\u0069\u006f\u006e\u006e\u0065\u0072\u0020\u006c\u0027\u006f\u0062\u006a\u0065\u0074\u005c\u0075\u0032\u0030\u0032\u0036 +contact.subject.selectTab.support=\u0051\u0075\u0065\u0073\u0074\u0069\u006f\u006e\u0020\u0064\u0065\u0020\u0073\u006f\u0075\u0074\u0069\u0065\u006e +contact.subject.selectTab.dataIssue=\u0050\u0072\u006f\u0062\u006c\u00e8\u006d\u0065\u0020\u0063\u006f\u006e\u0063\u0065\u0072\u006e\u0061\u006e\u0074\u0020\u006c\u0065\u0073\u0020\u0064\u006f\u006e\u006e\u00e9\u0065\u0073 +contact.msg=\u004d\u0065\u0073\u0073\u0061\u0067\u0065 +contact.msg.required=\u0055\u006e\u0020\u006d\u0065\u0073\u0073\u0061\u0067\u0065\u0020\u0064\u006f\u0069\u0074\u0020\u00ea\u0074\u0072\u0065\u0020\u0072\u00e9\u0064\u0069\u0067\u00e9\u002e +contact.send=\u0045\u006e\u0076\u006f\u0079\u0065\u0072\u0020\u006c\u0065\u0020\u006d\u0065\u0073\u0073\u0061\u0067\u0065 +contact.question=\u0056\u0065\u0075\u0069\u006c\u006c\u0065\u007a\u0020\u0072\u0065\u006d\u0070\u006c\u0069\u0072\u0020\u0063\u0065\u0074\u0020\u0065\u0073\u0070\u0061\u0063\u0065\u0020\u0061\u0066\u0069\u006e\u0020\u0064\u0065\u0020\u0070\u0072\u006f\u0075\u0076\u0065\u0072\u0020\u0071\u0075\u0065\u0020\u0076\u006f\u0075\u0073\u0020\u006e\u0027\u00ea\u0074\u0065\u0073\u0020\u0070\u0061\u0073\u0020\u0075\u006e\u0020\u0072\u006f\u0062\u006f\u0074\u002e +contact.sum.required=\u0056\u006f\u0075\u0073\u0020\u0064\u0065\u0076\u0065\u007a\u0020\u0065\u006e\u0074\u0072\u0065\u0072\u0020\u0075\u006e\u0065\u0020\u0076\u0061\u006c\u0065\u0075\u0072\u002e +contact.sum.invalid=\u0053\u006f\u006d\u006d\u0065\u0020\u0069\u006e\u0063\u006f\u0072\u0072\u0065\u0063\u0074\u0065\u002c\u0020\u0076\u0065\u0075\u0069\u006c\u006c\u0065\u007a\u0020\u0072\u00e9\u0065\u0073\u0073\u0061\u0079\u0065\u0072\u002e +contact.sum.converterMessage=\u0056\u0065\u0075\u0069\u006c\u006c\u0065\u007a\u0020\u0065\u006e\u0074\u0072\u0065\u0072\u0020\u0075\u006e\u0020\u0063\u0068\u0069\u0066\u0066\u0072\u0065\u002e +contact.contact=\u0050\u0065\u0072\u0073\u006f\u006e\u006e\u0065\u002d\u0072\u0065\u0073\u0073\u006f\u0075\u0072\u0063\u0065 +# dataverseuser.xhtml= +account.info=\u0052\u0065\u006e\u0073\u0065\u0069\u0067\u006e\u0065\u006d\u0065\u006e\u0074\u0073\u0020\u0073\u0075\u0072\u0020\u006c\u0065\u0020\u0063\u006f\u006d\u0070\u0074\u0065 +account.edit=\u004d\u006f\u0064\u0069\u0066\u0069\u0065\u0072\u0020\u006c\u0065\u0020\u0063\u006f\u006d\u0070\u0074\u0065 +account.apiToken=API Token +user.isShibUser=\u004c\u0065\u0073\u0020\u0072\u0065\u006e\u0073\u0065\u0069\u0067\u006e\u0065\u006d\u0065\u006e\u0074\u0073\u0020\u0073\u0075\u0072\u0020\u006c\u0065\u0020\u0063\u006f\u006d\u0070\u0074\u0065\u0020\u006e\u0065\u0020\u0070\u0065\u0075\u0076\u0065\u006e\u0074\u0020\u00ea\u0074\u0072\u0065\u0020\u006d\u006f\u0064\u0069\u0066\u0069\u00e9\u0073\u0020\u006c\u006f\u0072\u0073\u0071\u0075\u0065\u0020\u0063\u006f\u006e\u006e\u0065\u0063\u0074\u00e9\u0020\u0076\u0069\u0061\u0020\u0075\u006e\u0020\u0063\u006f\u006d\u0070\u0074\u0065\u0020\u0069\u006e\u0073\u0074\u0069\u0074\u0075\u0074\u0069\u006f\u006e\u006e\u0065\u006c\u002e +user.helpShibUserMigrateOffShibBeforeLink=\u0056\u006f\u0075\u0073\u0020\u0071\u0075\u0069\u0074\u0074\u0065\u007a\u0020\u0076\u006f\u0074\u0072\u0065\u0020\u00e9\u0074\u0061\u0062\u006c\u0069\u0073\u0073\u0065\u006d\u0065\u006e\u0074\u003f\u0020\u0050\u0072\u0069\u00e8\u0072\u0065\u0020\u0064\u0065\u0020\u0063\u006f\u006e\u0074\u0061\u0063\u0074\u0065\u0072 +user.helpShibUserMigrateOffShibAfterLink=\u0070\u006f\u0075\u0072\u0020\u006f\u0062\u0074\u0065\u006e\u0069\u0072\u0020\u0064\u0065\u0020\u006c\u0027\u0061\u0069\u0064\u0065\u002e +user.helpOAuthBeforeLink=\u0056\u006f\u0074\u0072\u0065\u0020\u0063\u006f\u006d\u0070\u0074\u0065\u0020\u0044\u0061\u0074\u0061\u0076\u0065\u0072\u0073\u0065\u0020\u0075\u0074\u0069\u006c\u0069\u0073\u0065\u0020\u007b\u0030\u007d\u0020\u0070\u006f\u0075\u0072\u0020\u0070\u006f\u0075\u0076\u006f\u0069\u0072\u0020\u0073\u0065\u0020\u0063\u006f\u006e\u006e\u0065\u0063\u0074\u0065\u0072\u002e\u0020\u0053\u0069\u0020\u0076\u006f\u0075\u0073\u0020\u0073\u006f\u0075\u0068\u0061\u0069\u0074\u0065\u007a\u0020\u006d\u006f\u0064\u0069\u0066\u0069\u0065\u0072\u0020\u0076\u006f\u0073\u0020\u006d\u006f\u0064\u0065\u0073\u0020\u0064\u0065\u0020\u0063\u006f\u006e\u006e\u0065\u0078\u0069\u006f\u006e\u002c\u0020\u0070\u0072\u0069\u00e8\u0072\u0065\u0020\u0064\u0065\u0020\u0063\u006f\u006e\u0074\u0061\u0063\u0074\u0065\u0072 +user.helpOAuthAfterLink=\u0070\u006f\u0075\u0072\u0020\u006f\u0062\u0074\u0065\u006e\u0069\u0072\u0020\u0064\u0075\u0020\u0073\u006f\u0075\u0074\u0069\u0065\u006e\u002e +user.lostPasswdTip=\u0053\u0069\u0020\u0076\u006f\u0075\u0073\u0020\u0061\u0076\u0065\u007a\u0020\u0070\u0065\u0072\u0064\u0075\u0020\u006f\u0075\u0020\u006f\u0075\u0062\u006c\u0069\u00e9\u0020\u0076\u006f\u0074\u0072\u0065\u0020\u006d\u006f\u0074\u0020\u0064\u0065\u0020\u0070\u0061\u0073\u0073\u0065\u002c\u0020\u0069\u006e\u0064\u0069\u0071\u0075\u0065\u007a\u0020\u0076\u006f\u0074\u0072\u0065\u0020\u006e\u006f\u006d\u0020\u0064\u0027\u0075\u0074\u0069\u006c\u0069\u0073\u0061\u0074\u0065\u0075\u0072\u0020\u006f\u0075\u0020\u0076\u006f\u0074\u0072\u0065\u0020\u0061\u0064\u0072\u0065\u0073\u0073\u0065\u0020\u0063\u006f\u0075\u0072\u0072\u0069\u0065\u006c\u0020\u0064\u0061\u006e\u0073\u0020\u006c\u0027\u0065\u0073\u0070\u0061\u0063\u0065\u0020\u0063\u0069\u002d\u0064\u0065\u0073\u0073\u006f\u0075\u0073\u0020\u0065\u0074\u0020\u0063\u006c\u0069\u0071\u0075\u0065\u007a\u0020\u0073\u0075\u0072\u0020\u00ab\u005c\u0075\u0030\u0030\u0041\u0030\u0053\u006f\u0075\u006d\u0065\u0074\u0074\u0072\u0065\u005c\u0075\u0030\u0030\u0041\u0030\u00bb\u002e\u0020\u004e\u006f\u0075\u0073\u0020\u0076\u006f\u0075\u0073\u0020\u0065\u006e\u0076\u0065\u0072\u0072\u006f\u006e\u0073\u0020\u0076\u006f\u0074\u0072\u0065\u0020\u006e\u006f\u0075\u0076\u0065\u0061\u0075\u0020\u006d\u006f\u0074\u0020\u0064\u0065\u0020\u0070\u0061\u0073\u0073\u0065\u0020\u0070\u0061\u0072\u0020\u0063\u006f\u0075\u0072\u0072\u0069\u0065\u006c\u002e +user.dataRelatedToMe=\u004d\u0065\u0073\u0020\u0064\u006f\u006e\u006e\u00e9\u0065\u0073 +wasCreatedIn=\u0061\u0020\u00e9\u0074\u00e9\u0020\u0063\u0072\u00e9\u00e9\u0020\u0064\u0061\u006e\u0073 +wasCreatedTo=\u0061\u0020\u00e9\u0074\u00e9\u0020\u0061\u006a\u006f\u0075\u0074\u00e9\u0020\u00e0 +wasSubmittedForReview=\u0061\u0020\u00e9\u0074\u00e9\u0020\u0073\u006f\u0075\u006d\u0069\u0073\u0020\u0070\u006f\u0075\u0072\u0020\u0066\u0069\u006e\u0020\u0064\u0027\u0065\u0078\u0061\u006d\u0065\u006e\u0020\u0065\u006e\u0020\u0076\u0075\u0065\u0020\u0064\u0027\u00ea\u0074\u0072\u0065\u0020\u0070\u0075\u0062\u006c\u0069\u00e9\u0020\u0064\u0061\u006e\u0073 +wasPublished=\u0061\u0020\u00e9\u0074\u00e9\u0020\u0070\u0075\u0062\u006c\u0069\u00e9\u0020\u0064\u0061\u006e\u0073 +wasReturnedByReviewer=\u0061\u0020\u00e9\u0074\u00e9\u0020\u0072\u0065\u0074\u006f\u0075\u0072\u006e\u00e9\u0020\u0070\u0061\u0072\u0020\u006c\u0027\u0069\u006e\u0074\u0065\u006e\u0064\u0061\u006e\u0074\u0020\u0064\u0065\u0073\u0020\u0064\u006f\u006e\u006e\u00e9\u0065\u0073\u0020\u0064\u0065 +# TODO: Confirm that "toReview" can be deleted. +toReview=\u004e\u0027\u006f\u0075\u0062\u006c\u0069\u0065\u007a\u0020\u0070\u0061\u0073\u0020\u0064\u0065\u0020\u006c\u0065\u0073\u0020\u0070\u0075\u0062\u006c\u0069\u0065\u0072\u0020\u006f\u0075\u0020\u0064\u0065\u0020\u006c\u0065\u0073\u0020\u0072\u0065\u006e\u0076\u006f\u0079\u0065\u0072\u0020\u0061\u0075\u0020\u0063\u006f\u006c\u006c\u0061\u0062\u006f\u0072\u0061\u0074\u0065\u0075\u0072\u0021 +worldMap.added=\u004c\u0065\u0073\u0020\u0064\u006f\u006e\u006e\u00e9\u0065\u0073\u0020\u0064\u0027\u0075\u006e\u0065\u0020\u0063\u006f\u0075\u0063\u0068\u0065\u0020\u0057\u006f\u0072\u006c\u0064\u004d\u0061\u0070\u0020\u006f\u006e\u0074\u0020\u00e9\u0074\u00e9\u0020\u0061\u006a\u006f\u0075\u0074\u00e9\u0065\u0073\u0020\u00e0\u0020\u006c\u0027\u0065\u006e\u0073\u0065\u006d\u0062\u006c\u0065\u0020\u0064\u0065\u0020\u0064\u006f\u006e\u006e\u00e9\u0065\u0073\u002e +# Bundle file editors, please note that "notification.welcome" is used in a unit test.= +notification.welcome=\u0042\u0069\u0065\u006e\u0076\u0065\u006e\u0075\u0065\u0020\u0064\u0061\u006e\u0073\u0020\u006c\u0065\u0020\u007b\u0030\u007d\u0021\u0020\u0043\u006f\u006d\u006d\u0065\u006e\u0063\u0065\u007a\u0020\u0064\u00e8\u0073\u0020\u006d\u0061\u0069\u006e\u0074\u0065\u006e\u0061\u006e\u0074\u0020\u0065\u006e\u0020\u0061\u006a\u006f\u0075\u0074\u0061\u006e\u0074\u0020\u006f\u0075\u0020\u0065\u006e\u0063\u006f\u0072\u0065\u0020\u0065\u006e\u0020\u0072\u0065\u0063\u0068\u0065\u0072\u0063\u0068\u0061\u006e\u0074\u0020\u0064\u0065\u0073\u0020\u0064\u006f\u006e\u006e\u00e9\u0065\u0073\u002e\u0020\u0044\u0065\u0073\u0020\u0071\u0075\u0065\u0073\u0074\u0069\u006f\u006e\u0073\u003f\u0020\u0043\u006f\u006e\u0073\u0075\u006c\u0074\u0065\u007a\u0020\u006c\u0065\u005c\u0075\u0030\u0030\u0041\u0030\u003a\u0020\u007b\u0031\u007d\u002e\u0020\u0056\u006f\u0075\u0073\u0020\u0076\u006f\u0075\u006c\u0065\u007a\u0020\u0066\u0061\u0069\u0072\u0065\u0020\u006c\u0027\u0065\u0073\u0073\u0061\u0069\u0020\u0064\u0065\u0073\u0020\u0063\u006f\u006d\u0070\u006f\u0073\u0061\u006e\u0074\u0065\u0073\u0020\u0064\u0065\u0020\u0044\u0061\u0074\u0061\u0076\u0065\u0072\u0073\u0065\u003f\u0020\u0045\u0073\u0073\u0061\u0079\u0065\u007a\u0020\u006e\u006f\u0074\u0072\u0065\u0020\u007b\u0032\u007d\u002e\u0020\u0020\u004e\u0027\u006f\u0075\u0062\u006c\u0069\u0065\u007a\u0020\u0070\u0061\u0073\u0020\u0064\u0065\u0020\u0076\u00e9\u0072\u0069\u0066\u0069\u0065\u0072\u0020\u0071\u0075\u0065\u0020\u0076\u006f\u0075\u0073\u0020\u0061\u0076\u0065\u007a\u0020\u0062\u0069\u0065\u006e\u0020\u0072\u0065\u00e7\u0075\u0020\u0076\u006f\u0074\u0072\u0065\u0020\u0063\u006f\u0075\u0072\u0072\u0069\u0065\u006c\u0020\u0064\u0027\u0069\u006e\u0076\u0069\u0074\u0061\u0074\u0069\u006f\u006e\u0020\u0061\u0066\u0069\u006e\u0020\u0071\u0075\u0065\u0020\u006e\u006f\u0075\u0073\u0020\u0070\u0075\u0069\u0073\u0073\u0069\u006f\u006e\u0073\u0020\u0076\u0061\u006c\u0069\u0064\u0065\u0072\u0020\u0076\u006f\u0074\u0072\u0065\u0020\u0061\u0064\u0072\u0065\u0073\u0073\u0065\u002e +notification.demoSite=\u0053\u0069\u0074\u0065\u0020\u0064\u0065\u0020\u0064\u00e9\u006d\u006f\u006e\u0073\u0074\u0072\u0061\u0074\u0069\u006f\u006e +notification.requestFileAccess=\u0044\u0065\u006d\u0061\u006e\u0064\u0065\u0020\u0064\u0027\u0061\u0063\u0063\u00e8\u0073\u0020\u0070\u006f\u0075\u0072\u0020\u006c\u0027\u0065\u006e\u0073\u0065\u006d\u0062\u006c\u0065\u0020\u0064\u0065\u0020\u0064\u006f\u006e\u006e\u00e9\u0065\u0073\u005c\u0075\u0030\u0030\u0041\u0030\u003a\u0020\u007b\u0030\u007d\u002e +notification.grantFileAccess=\u0041\u0063\u0063\u00e8\u0073\u0020\u0061\u0063\u0063\u006f\u0072\u0064\u00e9\u0020\u0070\u006f\u0075\u0072\u0020\u006c\u0065\u0073\u0020\u0066\u0069\u0063\u0068\u0069\u0065\u0072\u0073\u0020\u0064\u0065\u0020\u006c\u0027\u0065\u006e\u0073\u0065\u006d\u0062\u006c\u0065\u0020\u0064\u0065\u0020\u0064\u006f\u006e\u006e\u00e9\u0065\u0073\u005c\u0075\u0030\u0030\u0041\u0030\u003a\u0020\u007b\u0030\u007d\u002e +notification.rejectFileAccess=\u0044\u0065\u006d\u0061\u006e\u0064\u0065\u0020\u0064\u0027\u0061\u0063\u0063\u00e8\u0073\u0020\u0072\u0065\u0066\u0075\u0073\u00e9\u0065\u0020\u0070\u006f\u0075\u0072\u0020\u006c\u0065\u0073\u0020\u0066\u0069\u0063\u0068\u0069\u0065\u0072\u0073\u0020\u0064\u0065\u0020\u006c\u0027\u0065\u006e\u0073\u0065\u006d\u0062\u006c\u0065\u0020\u0064\u0065\u0020\u0064\u006f\u006e\u006e\u00e9\u0065\u0073\u005c\u0075\u0030\u0030\u0041\u0030\u003a\u0020\u007b\u0030\u007d\u002e +notification.createDataverse=\u007b\u0030\u007d\u0020\u0020\u0061\u0020\u00e9\u0074\u00e9\u0020\u0063\u0072\u00e9\u00e9\u0020\u0064\u0061\u006e\u0073\u0020\u007b\u0031\u007d\u002e\u0020\u0050\u006f\u0075\u0072\u0020\u0073\u0061\u0076\u006f\u0069\u0072\u0020\u0063\u0065\u0020\u0071\u0075\u0065\u0020\u0076\u006f\u0075\u0073\u0020\u0070\u006f\u0075\u0076\u0065\u007a\u0020\u0066\u0061\u0069\u0072\u0065\u0020\u0061\u0076\u0065\u0063\u0020\u0076\u006f\u0074\u0072\u0065\u0020\u0064\u0061\u0074\u0061\u0076\u0065\u0072\u0073\u0065\u002c\u0020\u0063\u006f\u006e\u0073\u0075\u006c\u0074\u0065\u007a\u0020\u006c\u0065\u0020\u007b\u0032\u007d\u002e +notification.dataverse.management.title=\u0041\u0064\u006d\u0069\u006e\u0069\u0073\u0074\u0072\u0061\u0074\u0069\u006f\u006e\u0020\u0064\u0065\u0020\u0044\u0061\u0074\u0061\u0076\u0065\u0072\u0073\u0065\u0020\u002d\u0020\u0047\u0075\u0069\u0064\u0065\u0020\u0064\u0027\u0075\u0074\u0069\u006c\u0069\u0073\u0061\u0074\u0069\u006f\u006e\u0020\u0044\u0061\u0074\u0061\u0076\u0065\u0072\u0073\u0065 +notification.createDataset=\u007b\u0030\u007d\u0020\u0020\u0061\u0020\u00e9\u0074\u00e9\u0020\u0063\u0072\u00e9\u00e9\u0020\u0064\u0061\u006e\u0073\u0020\u007b\u0031\u007d\u002e\u0020\u0050\u006f\u0075\u0072\u0020\u0073\u0061\u0076\u006f\u0069\u0072\u0020\u0063\u0065\u0020\u0071\u0075\u0065\u0020\u0076\u006f\u0075\u0073\u0020\u0070\u006f\u0075\u0076\u0065\u007a\u0020\u0066\u0061\u0069\u0072\u0065\u0020\u0061\u0076\u0065\u0063\u0020\u0076\u006f\u0074\u0072\u0065\u0020\u0065\u006e\u0073\u0065\u006d\u0062\u006c\u0065\u0020\u0064\u0065\u0020\u0064\u006f\u006e\u006e\u00e9\u0065\u0073\u002c\u0020\u0063\u006f\u006e\u0073\u0075\u006c\u0074\u0065\u007a\u0020\u006c\u0065\u0020\u007b\u0032\u007d\u002e +notification.dataset.management.title=\u0041\u0064\u006d\u0069\u006e\u0069\u0073\u0074\u0072\u0061\u0074\u0069\u006f\u006e\u0020\u0064\u0065\u0073\u0020\u0065\u006e\u0073\u0065\u006d\u0062\u006c\u0065\u0073\u0020\u0064\u0065\u0020\u0064\u006f\u006e\u006e\u00e9\u0065\u0073\u0020\u002d\u0020\u0047\u0075\u0069\u0064\u0065\u0020\u0064\u0027\u0075\u0074\u0069\u006c\u0069\u0073\u0061\u0074\u0069\u006f\u006e\u0020\u0070\u006f\u0075\u0072\u0020\u006c\u0065\u0073\u0020\u0065\u006e\u0073\u0065\u006d\u0062\u006c\u0065\u0073\u0020\u0064\u0065\u0020\u0064\u006f\u006e\u006e\u00e9\u0065\u0073 +notification.wasSubmittedForReview=\u007b\u0030\u007d\u0020\u0061\u0020\u00e9\u0074\u00e9\u0020\u0073\u006f\u0075\u006d\u0069\u0073\u0020\u0070\u006f\u0075\u0072\u0020\u0076\u00e9\u0072\u0069\u0066\u0069\u0063\u0061\u0074\u0069\u006f\u006e\u0020\u0061\u0076\u0061\u006e\u0074\u0020\u0070\u0075\u0062\u006c\u0069\u0063\u0061\u0074\u0069\u006f\u006e\u0020\u0064\u0061\u006e\u0073\u0020\u007b\u0031\u007d\u002e\u0020\u004e\u0027\u006f\u0075\u0062\u006c\u0069\u0065\u007a\u0020\u0070\u0061\u0073\u0020\u0064\u0065\u0020\u006c\u0065\u0020\u0070\u0075\u0062\u006c\u0069\u0065\u0072\u0020\u006f\u0075\u0020\u0064\u0065\u0020\u006c\u0065\u0020\u0072\u0065\u006e\u0076\u006f\u0079\u0065\u0072\u0020\u0061\u0075\u0020\u0063\u006f\u006c\u006c\u0061\u0062\u006f\u0072\u0061\u0074\u0065\u0075\u0072\u005c\u0021 +notification.wasReturnedByReviewer=\u007b\u0030\u007d\u0020\u0061\u0020\u00e9\u0074\u00e9\u0020\u0072\u0065\u0074\u006f\u0075\u0072\u006e\u00e9\u0020\u0070\u0061\u0072\u0020\u006c\u0027\u0069\u006e\u0074\u0065\u006e\u0064\u0061\u006e\u0074\u0020\u0064\u0065\u0073\u0020\u0064\u006f\u006e\u006e\u00e9\u0065\u0073\u0020\u0064\u0065\u0020\u007b\u0031\u007d\u002e +notification.wasPublished=\u007b\u0030\u007d\u0020\u0061\u0020\u00e9\u0074\u00e9\u0020\u0070\u0075\u0062\u006c\u0069\u00e9\u0020\u0064\u0061\u006e\u0073\u0020\u007b\u0031\u007d\u002e +notification.worldMap.added=\u007b\u0030\u007d\u002c\u0020\u0063\u0065\u0074\u0020\u0065\u006e\u0073\u0065\u006d\u0062\u006c\u0065\u0020\u0064\u0065\u0020\u0064\u006f\u006e\u006e\u00e9\u0065\u0073\u0020\u0064\u0069\u0073\u0070\u006f\u0073\u0065\u0020\u006d\u0061\u0069\u006e\u0074\u0065\u006e\u0061\u006e\u0074\u0020\u0064\u0027\u0075\u006e\u0065\u0020\u0063\u006f\u0075\u0063\u0068\u0065\u0020\u0064\u0065\u0020\u0064\u006f\u006e\u006e\u00e9\u0065\u0073\u0020\u0057\u006f\u0072\u006c\u0064\u004d\u0061\u0070\u002e +notification.maplayer.deletefailed=\u0049\u006d\u0070\u006f\u0073\u0073\u0069\u0062\u006c\u0065\u0020\u0064\u0065\u0020\u0073\u0075\u0070\u0070\u0072\u0069\u006d\u0065\u0072\u0020\u006c\u0061\u0020\u0063\u006f\u0075\u0063\u0068\u0065\u0020\u0063\u0061\u0072\u0074\u006f\u0067\u0072\u0061\u0070\u0068\u0069\u0071\u0075\u0065\u0020\u0061\u0073\u0073\u006f\u0063\u0069\u00e9\u0065\u0020\u0061\u0075\u0020\u0066\u0069\u0063\u0068\u0069\u0065\u0072\u0020\u00e0\u0020\u0061\u0063\u0063\u00e8\u0073\u0020\u0072\u0065\u0073\u0074\u0072\u0065\u0069\u006e\u0074\u0020\u007b\u0030\u007d\u0020\u0070\u0072\u006f\u0076\u0065\u006e\u0061\u006e\u0074\u0020\u0064\u0065\u0020\u0057\u006f\u0072\u006c\u0064\u004d\u0061\u0070\u002e\u0020\u0045\u0073\u0073\u0061\u0079\u0065\u007a\u0020\u0064\u0065\u0020\u006e\u006f\u0075\u0076\u0065\u0061\u0075\u002c\u0020\u006f\u0075\u0020\u0063\u006f\u006e\u0074\u0061\u0063\u0074\u0065\u007a\u0020\u006c\u0065\u0020\u0073\u006f\u0075\u0074\u0069\u0065\u006e\u0020\u0057\u006f\u0072\u006c\u0064\u004d\u0061\u0070\u0020\u0065\u0074\u002f\u006f\u0075\u0020\u0044\u0061\u0074\u0061\u0076\u0065\u0072\u0073\u0065\u002e\u0020\u0028\u0045\u006e\u0073\u0065\u006d\u0062\u006c\u0065\u0020\u0064\u0065\u0020\u0064\u006f\u006e\u006e\u00e9\u0065\u0073\u003a\u0020\u007b\u0031\u007d\u0029 +notification.generic.objectDeleted=\u004c\u0065\u0020\u0064\u0061\u0074\u0061\u0076\u0065\u0072\u0073\u0065\u002c\u0020\u006c\u0027\u0065\u006e\u0073\u0065\u006d\u0062\u006c\u0065\u0020\u0064\u0065\u0020\u0064\u006f\u006e\u006e\u00e9\u0065\u0073\u0020\u006f\u0075\u0020\u006c\u0065\u0020\u0066\u0069\u0063\u0068\u0069\u0065\u0072\u0020\u0076\u0069\u0073\u00e9\u0020\u0070\u0061\u0072\u0020\u0063\u0065\u0074\u0020\u0061\u0076\u0069\u0073\u0020\u0061\u0020\u00e9\u0074\u00e9\u0020\u0073\u0075\u0070\u0070\u0072\u0069\u006d\u00e9\u002e\u0020 +notification.access.granted.dataverse=\u004c\u0065\u0020\u0072\u00f4\u006c\u0065\u0020\u007b\u0030\u007d\u0020\u0076\u006f\u0075\u0073\u0020\u0061\u0020\u00e9\u0074\u00e9\u0020\u0061\u0063\u0063\u006f\u0072\u0064\u00e9\u0020\u0070\u006f\u0075\u0072\u0020\u007b\u0031\u007d\u002e +notification.access.granted.dataset=\u004c\u0065\u0020\u0072\u00f4\u006c\u0065\u0020\u007b\u0030\u007d\u0020\u0076\u006f\u0075\u0073\u0020\u0061\u0020\u00e9\u0074\u00e9\u0020\u0061\u0063\u0063\u006f\u0072\u0064\u00e9\u0020\u0070\u006f\u0075\u0072\u0020\u007b\u0031\u007d\u002e +notification.access.granted.datafile=\u004c\u0065\u0020\u0072\u00f4\u006c\u0065\u0020\u007b\u0030\u007d\u0020\u0076\u006f\u0075\u0073\u0020\u0061\u0020\u00e9\u0074\u00e9\u0020\u0061\u0063\u0063\u006f\u0072\u0064\u00e9\u0020\u0070\u006f\u0075\u0072\u0020\u0075\u006e\u0020\u0066\u0069\u0063\u0068\u0069\u0065\u0072\u0020\u0064\u0061\u006e\u0073\u0020\u007b\u0031\u007d\u002e +notification.access.granted.fileDownloader.additionalDataverse=\u007b\u0030\u007d\u0020\u0056\u006f\u0075\u0073\u0020\u0061\u0076\u0065\u007a\u0020\u006d\u0061\u0069\u006e\u0074\u0065\u006e\u0061\u006e\u0074\u0020\u0061\u0063\u0063\u00e8\u0073\u0020\u00e0\u0020\u0074\u006f\u0075\u0073\u0020\u006c\u0065\u0073\u0020\u0066\u0069\u0063\u0068\u0069\u0065\u0072\u0073\u0020\u0065\u006e\u0020\u0061\u0063\u0063\u00e8\u0073\u0020\u0072\u00e9\u0073\u0065\u0072\u0076\u00e9\u0020\u006f\u0075\u0020\u006e\u006f\u006e\u0020\u0072\u00e9\u0073\u0065\u0072\u0076\u00e9\u0020\u0070\u0075\u0062\u006c\u0069\u00e9\u0073\u0020\u0064\u0061\u006e\u0073\u0020\u0063\u0065\u0020\u0064\u0061\u0074\u0061\u0076\u0065\u0072\u0073\u0065\u002e\u0020 +notification.access.granted.fileDownloader.additionalDataset=\u007b\u0030\u007d\u0020\u0056\u006f\u0075\u0073\u0020\u0061\u0076\u0065\u007a\u0020\u006d\u0061\u0069\u006e\u0074\u0065\u006e\u0061\u006e\u0074\u0020\u0061\u0063\u0063\u00e8\u0073\u0020\u00e0\u0020\u0074\u006f\u0075\u0073\u0020\u006c\u0065\u0073\u0020\u0066\u0069\u0063\u0068\u0069\u0065\u0072\u0073\u0020\u0072\u00e9\u0073\u0065\u0072\u0076\u00e9\u0073\u0020\u006f\u0075\u0020\u006e\u006f\u006e\u0020\u0072\u00e9\u0073\u0065\u0072\u0076\u00e9\u0073\u0020\u0071\u0075\u0069\u0020\u006f\u006e\u0074\u0020\u00e9\u0074\u00e9\u0020\u0070\u0075\u0062\u006c\u0069\u00e9\u0073\u0020\u0064\u0061\u006e\u0073\u0020\u0063\u0065\u0074\u0020\u0065\u006e\u0073\u0065\u006d\u0062\u006c\u0065\u0020\u0064\u0065\u0020\u0064\u006f\u006e\u006e\u00e9\u0065\u0073\u002e +notification.access.revoked.dataverse=\u0056\u006f\u0074\u0072\u0065\u0020\u0072\u00f4\u006c\u0065\u0020\u0064\u0061\u006e\u0073\u0020\u007b\u0030\u007d\u0020\u0061\u0020\u00e9\u0074\u00e9\u0020\u0072\u0065\u0074\u0069\u0072\u00e9\u002e +notification.access.revoked.dataset=\u0056\u006f\u0074\u0072\u0065\u0020\u0072\u00f4\u006c\u0065\u0020\u0064\u0061\u006e\u0073\u0020\u007b\u0030\u007d\u0020\u0061\u0020\u00e9\u0074\u00e9\u0020\u0072\u0065\u0074\u0069\u0072\u00e9\u002e +notification.access.revoked.datafile=\u0056\u006f\u0074\u0072\u0065\u0020\u0072\u00f4\u006c\u0065\u0020\u0064\u0061\u006e\u0073\u0020\u007b\u0030\u007d\u0020\u0061\u0020\u00e9\u0074\u00e9\u0020\u0072\u0065\u0074\u0069\u0072\u00e9\u002e +notification.checksumfail=\u0056\u006f\u0074\u0072\u0065\u0020\u0074\u00e9\u006c\u00e9\u0063\u0068\u0061\u0072\u0067\u0065\u006d\u0065\u006e\u0074\u0020\u0064\u0061\u006e\u0073\u0020\u006c\u0027\u0065\u006e\u0073\u0065\u006d\u0062\u006c\u0065\u0020\u0064\u0065\u0020\u0064\u006f\u006e\u006e\u00e9\u0065\u0073\u0020\u0022\u007b\u0030\u007d\u0022\u0020\u0061\u0020\u00e9\u0063\u0068\u006f\u0075\u00e9\u0020\u006c\u0061\u0020\u0076\u0061\u006c\u0069\u0064\u0061\u0074\u0069\u006f\u006e\u0020\u0064\u0065\u0020\u006c\u0061\u0020\u0073\u006f\u006d\u006d\u0065\u0020\u0064\u0065\u0020\u0063\u006f\u006e\u0074\u0072\u00f4\u006c\u0065\u002e +notification.mail.import.filesystem=Dataset {2} ({0}/dataset.xhtml?persistentId={1}) has been successfully uploaded and verified. +notification.import.filesystem=\u003c\u0061\u0020\u0068\u0072\u0065\u0066\u003d\u0022\u007b\u0030\u007d\u002f\u0064\u0061\u0074\u0061\u0073\u0065\u0074\u002e\u0078\u0068\u0074\u006d\u006c\u003f\u0070\u0065\u0072\u0073\u0069\u0073\u0074\u0065\u006e\u0074\u0049\u0064\u003d\u007b\u0031\u007d\u0022\u0020\u0074\u0069\u0074\u006c\u0065\u003d\u0022\u007b\u0032\u007d\u0022\u0026\u003e\u007b\u0032\u007d\u003c\u002f\u0061\u003e\u002c\u0020\u006c\u0027\u0065\u006e\u0073\u0065\u006d\u0062\u006c\u0065\u0020\u0064\u0065\u0020\u0064\u006f\u006e\u006e\u00e9\u0065\u0073\u0020\u0061\u0020\u0069\u006d\u0070\u006f\u0072\u0074\u00e9\u0020\u0064\u0065\u0073\u0020\u0066\u0069\u0063\u0068\u0069\u0065\u0072\u0073\u0020\u00e0\u0020\u0070\u0061\u0072\u0074\u0069\u0072\u0020\u0064\u0075\u0020\u0073\u0079\u0073\u0074\u00e8\u006d\u0065\u0020\u0064\u0065\u0020\u0066\u0069\u0063\u0068\u0069\u0065\u0072\u0073\u0020\u0070\u0061\u0072\u0020\u006c\u0027\u0065\u006e\u0074\u0072\u0065\u006d\u0069\u0073\u0065\u0020\u0064\u0027\u0075\u006e\u0020\u0074\u0072\u0061\u0069\u0074\u0065\u006d\u0065\u006e\u0074\u0020\u0065\u006e\u0020\u006c\u006f\u0074\u002e +notification.import.checksum=\u003c\u0061\u0020\u0068\u0072\u0065\u0066\u003d\u0022\u002f\u0064\u0061\u0074\u0061\u0073\u0065\u0074\u002e\u0078\u0068\u0074\u006d\u006c\u003f\u0070\u0065\u0072\u0073\u0069\u0073\u0074\u0065\u006e\u0074\u0049\u0064\u003d\u007b\u0030\u007d\u0022\u0020\u0074\u0069\u0074\u006c\u0065\u003d\u0022\u007b\u0031\u007d\u0022\u0026\u003e\u007b\u0031\u007d\u003c\u002f\u0061\u003e\u002c\u0020\u006c\u0027\u0065\u006e\u0073\u0065\u006d\u0062\u006c\u0065\u0020\u0064\u0065\u0020\u0064\u006f\u006e\u006e\u00e9\u0065\u0073\u0020\u0061\u0020\u0061\u006a\u006f\u0075\u0074\u00e9\u0020\u006c\u0065\u0073\u0020\u0073\u006f\u006d\u006d\u0065\u0073\u0020\u0064\u0065\u0020\u0063\u006f\u006e\u0074\u00f4\u006c\u0065\u0020\u0064\u0065\u0073\u0020\u0066\u0069\u0063\u0068\u0069\u0065\u0072\u0073\u0020\u0070\u0061\u0072\u0020\u006c\u0027\u0065\u006e\u0074\u0072\u0065\u006d\u0069\u0073\u0065\u0020\u0064\u0027\u0075\u006e\u0020\u0074\u0072\u0061\u0069\u0074\u0065\u006d\u0065\u006e\u0074\u0020\u0065\u006e\u0020\u006c\u006f\u0074\u002e +removeNotification=\u0053\u0075\u0070\u0070\u0072\u0069\u006d\u0065\u0072\u0020\u006c\u0027\u0061\u0076\u0069\u0073 +groupAndRoles.manageTips=\u0056\u006f\u0075\u0073\u0020\u0070\u006f\u0075\u0076\u0065\u007a\u0020\u0067\u00e9\u0072\u0065\u0072\u0020\u0074\u006f\u0075\u0073\u0020\u006c\u0065\u0073\u0020\u0067\u0072\u006f\u0075\u0070\u0065\u0073\u0020\u0064\u006f\u006e\u0074\u0020\u0076\u006f\u0075\u0073\u0020\u00ea\u0074\u0065\u0073\u0020\u006d\u0065\u006d\u0062\u0072\u0065\u0020\u0065\u0074\u0020\u006c\u0065\u0073\u0020\u0072\u00f4\u006c\u0065\u0073\u0020\u0071\u0075\u0069\u0020\u0076\u006f\u0075\u0073\u0020\u006f\u006e\u0074\u0020\u00e9\u0074\u00e9\u0020\u0063\u006f\u006e\u0066\u0069\u00e9\u0073\u0020\u0065\u0074\u0020\u0079\u0020\u0061\u0076\u006f\u0069\u0072\u0020\u0061\u0063\u0063\u00e8\u0073\u002e +user.signup.tip=\u0050\u006f\u0075\u0072\u0071\u0075\u006f\u0069\u0020\u0073\u0065\u0020\u0063\u0072\u00e9\u0065\u0072\u0020\u0075\u006e\u0020\u0063\u006f\u006d\u0070\u0074\u0065\u0020\u0044\u0061\u0074\u0061\u0076\u0065\u0072\u0073\u0065\u003f\u0020\u0044\u0065\u0020\u0066\u0061\u00e7\u006f\u006e\u0020\u00e0\u0020\u0070\u006f\u0075\u0076\u006f\u0069\u0072\u0020\u0063\u0072\u00e9\u0065\u0072\u0020\u0076\u006f\u0074\u0072\u0065\u0020\u0070\u0072\u006f\u0070\u0072\u0065\u0020\u0064\u0061\u0074\u0061\u0076\u0065\u0072\u0073\u0065\u002c\u0020\u006c\u0065\u0020\u0070\u0065\u0072\u0073\u006f\u006e\u006e\u0061\u006c\u0069\u0073\u0065\u0072\u002c\u0020\u0079\u0020\u0061\u006a\u006f\u0075\u0074\u0065\u0072\u0020\u0064\u0065\u0073\u0020\u0065\u006e\u0073\u0065\u006d\u0062\u006c\u0065\u0073\u0020\u0064\u0065\u0020\u0064\u006f\u006e\u006e\u00e9\u0065\u0073\u002c\u0020\u006f\u0075\u0020\u0065\u006e\u0063\u006f\u0072\u0065\u0020\u0070\u006f\u0075\u0072\u0020\u0064\u0065\u006d\u0061\u006e\u0064\u0065\u0072\u0020\u006c\u0027\u0061\u0063\u0063\u00e8\u0073\u0020\u00e0\u0020\u0064\u0065\u0073\u0020\u0066\u0069\u0063\u0068\u0069\u0065\u0072\u0073\u0020\u00e0\u0020\u0061\u0063\u0063\u00e8\u0073\u0020\u0072\u00e9\u0073\u0065\u0072\u0076\u00e9\u002e\u0020 +user.signup.otherLogInOptions.tip=\u003c\u0061\u0020\u0068\u0072\u0065\u0066\u003d\u0022\u002f\u006c\u006f\u0067\u0069\u006e\u0070\u0061\u0067\u0065\u002e\u0078\u0068\u0074\u006d\u006c\u0022\u0020\u0074\u0069\u0074\u006c\u0065\u003d\u0022\u0044\u0061\u0074\u0061\u0076\u0065\u0072\u0073\u0065\u0020\u002d\u0020\u0053\u0065\u0020\u0063\u006f\u006e\u006e\u0065\u0063\u0074\u0065\u0072\u0022\u003e\u0056\u006f\u0069\u0072\u0020\u006c\u0065\u0073\u0020\u0061\u0075\u0074\u0072\u0065\u0073\u0020\u006f\u0070\u0074\u0069\u006f\u006e\u0073\u0020\u0064\u0065\u0020\u0063\u006f\u006e\u006e\u0065\u0078\u0069\u006f\u006e\u002e\u003c\u002f\u0061\u003e +user.username.illegal.tip=\u0056\u006f\u0074\u0072\u0065\u0020\u006e\u006f\u006d\u0020\u0064\u0027\u0075\u0074\u0069\u006c\u0069\u0073\u0061\u0074\u0065\u0075\u0072\u0020\u0064\u006f\u0069\u0074\u0020\u0063\u006f\u006d\u0070\u0074\u0065\u0072\u0020\u0065\u006e\u0074\u0072\u0065\u0020\u0032\u0020\u0065\u0074\u0020\u0036\u0030\u005c\u0075\u0030\u0030\u0041\u0030\u0063\u0061\u0072\u0061\u0063\u0074\u00e8\u0072\u0065\u0073\u0020\u0065\u0074\u0020\u0076\u006f\u0075\u0073\u0020\u0070\u006f\u0075\u0076\u0065\u007a\u0020\u0075\u0074\u0069\u006c\u0069\u0073\u0065\u0072\u0020\u006c\u0065\u0073\u0020\u006c\u0065\u0074\u0074\u0072\u0065\u0073\u0020\u0061\u0020\u00e0\u0020\u007a\u002c\u0020\u006c\u0065\u0073\u0020\u0063\u0068\u0069\u0066\u0066\u0072\u0065\u0073\u0020\u0030\u0020\u00e0\u0020\u0039\u0020\u0065\u0074\u0020\u006c\u0065\u0020\u0063\u0061\u0072\u0061\u0063\u0074\u00e8\u0072\u0065\u0020\u0073\u006f\u0075\u006c\u0069\u0067\u006e\u00e9\u0020\u00ab\u005c\u0075\u0030\u0030\u0041\u0030\u005f\u005c\u0075\u0030\u0030\u0041\u0030\u00bb\u002e +user.username=\u004e\u006f\u006d\u0020\u0064\u0027\u0075\u0074\u0069\u006c\u0069\u0073\u0061\u0074\u0065\u0075\u0072 +user.username.taken=\u0043\u0065\u0020\u006e\u006f\u006d\u0020\u0064\u0027\u0075\u0074\u0069\u006c\u0069\u0073\u0061\u0074\u0065\u0075\u0072\u0020\u0065\u0073\u0074\u0020\u0064\u00e9\u006a\u00e0\u0020\u0070\u0072\u0069\u0073\u002e +user.username.invalid=\u0043\u0065\u0020\u006e\u006f\u006d\u0020\u0064\u0027\u0075\u0074\u0069\u006c\u0069\u0073\u0061\u0074\u0065\u0075\u0072\u0020\u0063\u006f\u006e\u0074\u0069\u0065\u006e\u0074\u0020\u0075\u006e\u0020\u0063\u0061\u0072\u0061\u0063\u0074\u00e8\u0072\u0065\u0020\u0069\u006e\u0076\u0061\u006c\u0069\u0064\u0065\u0020\u006f\u0075\u0020\u0065\u006e\u0066\u0072\u0065\u0069\u006e\u0074\u0020\u006c\u0061\u0020\u006c\u0069\u006d\u0069\u0074\u0065\u0020\u0064\u0065\u0020\u006c\u006f\u006e\u0067\u0075\u0065\u0075\u0072\u0020\u0028\u0032\u0020\u00e0\u0020\u0036\u0030\u0020\u0063\u0061\u0072\u0061\u0063\u0074\u00e8\u0072\u0065\u0073\u0029\u002e +user.username.valid=\u0043\u0072\u00e9\u0065\u007a\u0020\u0075\u006e\u0020\u006e\u006f\u006d\u0020\u0064\u0027\u0075\u0074\u0069\u006c\u0069\u0073\u0061\u0074\u0065\u0075\u0072\u0020\u0076\u0061\u006c\u0069\u0064\u0065\u0020\u0064\u0065\u0020\u0032\u0020\u00e0\u0020\u0036\u0030\u0020\u0063\u0061\u0072\u0061\u0063\u0074\u00e8\u0072\u0065\u0073\u0020\u0063\u006f\u006e\u0074\u0065\u006e\u0061\u006e\u0074\u0020\u0064\u0065\u0073\u0020\u006c\u0065\u0074\u0074\u0072\u0065\u0073\u0020\u0028\u0061\u002d\u005a\u0029\u002c\u0020\u0064\u0065\u0073\u0020\u0063\u0068\u0069\u0066\u0066\u0072\u0065\u0073\u0020\u0028\u0030\u002d\u0039\u0029\u002c\u0020\u0064\u0065\u0073\u0020\u0074\u0069\u0072\u0065\u0074\u0073\u0020\u0028\u002d\u0029\u002c\u0020\u0064\u0065\u0073\u0020\u0074\u0072\u0061\u0069\u0074\u0073\u0020\u0064\u0065\u0020\u0073\u006f\u0075\u006c\u0069\u0067\u006e\u0065\u006d\u0065\u006e\u0074\u0073\u0020\u0028\u005f\u0029\u0020\u0065\u0074\u0020\u0064\u0065\u0073\u0020\u0070\u006f\u0069\u006e\u0074\u0073\u0020\u0028\u002e\u0029\u002e +user.noPasswd=\u0041\u0075\u0063\u0075\u006e\u0020\u006d\u006f\u0074\u0020\u0064\u0065\u0020\u0070\u0061\u0073\u0073\u0065 +user.currentPasswd=\u004d\u006f\u0074\u0020\u0064\u0065\u0020\u0070\u0061\u0073\u0073\u0065\u0020\u0061\u0063\u0074\u0075\u0065\u006c +user.currentPasswd.tip=\u0056\u0065\u0075\u0069\u006c\u006c\u0065\u007a\u0020\u0065\u006e\u0074\u0072\u0065\u0072\u0020\u006c\u0065\u0020\u006d\u006f\u0074\u0020\u0064\u0065\u0020\u0070\u0061\u0073\u0073\u0065\u0020\u0061\u0063\u0074\u0075\u0065\u006c\u0020\u0070\u006f\u0075\u0072\u0020\u0063\u0065\u0020\u0063\u006f\u006d\u0070\u0074\u0065\u002e +user.passwd.illegal.tip=\u004c\u0065\u0020\u006d\u006f\u0074\u0020\u0064\u0065\u0020\u0070\u0061\u0073\u0073\u0065\u0020\u0064\u006f\u0069\u0074\u0020\u0063\u006f\u006d\u0070\u0074\u0065\u0072\u0020\u0061\u0075\u0020\u006d\u006f\u0069\u006e\u0073\u0020\u0036\u0020\u0063\u0061\u0072\u0061\u0063\u0074\u00e8\u0072\u0065\u0073\u002c\u0020\u0079\u0020\u0063\u006f\u006d\u0070\u0072\u0069\u0073\u0020\u0075\u006e\u0065\u0020\u006c\u0065\u0074\u0074\u0072\u0065\u0020\u0065\u0074\u0020\u0075\u006e\u0020\u0063\u0068\u0069\u0066\u0066\u0072\u0065\u002c\u0020\u0065\u0074\u0020\u0064\u0065\u0073\u0020\u0063\u0061\u0072\u0061\u0063\u0074\u00e8\u0072\u0065\u0073\u0020\u0073\u0070\u00e9\u0063\u0069\u0061\u0075\u0078\u0020\u0070\u0065\u0075\u0076\u0065\u006e\u0074\u0020\u00ea\u0074\u0072\u0065\u0020\u0075\u0074\u0069\u006c\u0069\u0073\u00e9\u0073\u002e +user.rePasswd=\u0043\u006f\u006e\u0066\u0069\u0072\u006d\u0065\u0072\u0020\u006c\u0065\u0020\u006d\u006f\u0074\u0020\u0064\u0065\u0020\u0070\u0061\u0073\u0073\u0065\u0020\u0020\u0020 +user.rePasswd.tip=\u0056\u0065\u0075\u0069\u006c\u006c\u0065\u007a\u0020\u0063\u006f\u006e\u0066\u0069\u0072\u006d\u0065\u0072\u0020\u006c\u0065\u0020\u006d\u006f\u0074\u0020\u0064\u0065\u0020\u0070\u0061\u0073\u0073\u0065\u0020\u0069\u006e\u0064\u0069\u0071\u0075\u00e9\u0020\u0063\u0069\u002d\u0064\u0065\u0073\u0073\u0075\u0073\u002e +user.firstName=\u0050\u0072\u00e9\u006e\u006f\u006d +user.firstName.tip=\u004c\u0065\u0020\u0070\u0072\u00e9\u006e\u006f\u006d\u0020\u006f\u0075\u0020\u006c\u0065\u0020\u006e\u006f\u006d\u0020\u0071\u0075\u0065\u0020\u0076\u006f\u0075\u0073\u0020\u0075\u0074\u0069\u006c\u0069\u0073\u0065\u0072\u0065\u007a\u0020\u0070\u006f\u0075\u0072\u0020\u0063\u0065\u0020\u0063\u006f\u006d\u0070\u0074\u0065\u002e +user.lastName=\u004e\u006f\u006d +user.lastName.tip=\u004c\u0065\u0020\u006e\u006f\u006d\u0020\u0071\u0075\u0065\u0020\u0076\u006f\u0075\u0073\u0020\u0075\u0074\u0069\u006c\u0069\u0073\u0065\u0072\u0065\u007a\u0020\u0070\u006f\u0075\u0072\u0020\u0063\u0065\u0020\u0063\u006f\u006d\u0070\u0074\u0065\u002e +user.email.tip=\u0055\u006e\u0065\u0020\u0061\u0064\u0072\u0065\u0073\u0073\u0065\u0020\u0063\u006f\u0075\u0072\u0072\u0069\u0065\u006c\u0020\u0076\u0061\u006c\u0069\u0064\u0065\u0020\u0070\u0065\u0072\u006d\u0065\u0074\u0074\u0061\u006e\u0074\u0020\u0064\u0065\u0020\u0063\u006f\u006d\u006d\u0075\u006e\u0069\u0071\u0075\u0065\u0072\u0020\u0061\u0076\u0065\u0063\u0020\u0076\u006f\u0075\u0073\u002e +user.email.taken=\u0043\u0065\u0074\u0074\u0065\u0020\u0061\u0064\u0072\u0065\u0073\u0073\u0065\u0020\u0063\u006f\u0075\u0072\u0072\u0069\u0065\u006c\u0020\u0065\u0073\u0074\u0020\u0064\u00e9\u006a\u00e0\u0020\u0070\u0072\u0069\u0073\u0065\u002e +user.affiliation.tip=\u004c\u0027\u006f\u0072\u0067\u0061\u006e\u0069\u0073\u0061\u0074\u0069\u006f\u006e\u0020\u0061\u0076\u0065\u0063\u0020\u006c\u0061\u0071\u0075\u0065\u006c\u006c\u0065\u0020\u0076\u006f\u0075\u0073\u0020\u00ea\u0074\u0065\u0073\u0020\u0061\u0066\u0066\u0069\u006c\u0069\u00e9\u0028\u0065\u0029\u002e +user.position=\u0050\u006f\u0073\u0074\u0065 +user.position.tip=\u0056\u006f\u0074\u0072\u0065\u0020\u0072\u00f4\u006c\u0065\u0020\u006f\u0075\u0020\u0074\u0069\u0074\u0072\u0065\u0020\u0061\u0075\u0020\u0073\u0065\u0069\u006e\u0020\u0064\u0065\u0020\u006c\u0027\u006f\u0072\u0067\u0061\u006e\u0069\u0073\u0061\u0074\u0069\u006f\u006e\u0020\u0061\u0076\u0065\u0063\u0020\u006c\u0061\u0071\u0075\u0065\u006c\u006c\u0065\u0020\u0076\u006f\u0075\u0073\u0020\u00ea\u0074\u0065\u0073\u0020\u0061\u0066\u0066\u0069\u006c\u0069\u00e9\u0028\u0065\u0029\u002c\u0020\u0070\u0061\u0072\u0020\u0065\u0078\u0065\u006d\u0070\u006c\u0065\u005c\u0075\u0030\u0030\u0041\u0030\u003a\u0020\u0065\u006d\u0070\u006c\u006f\u0079\u00e9\u0028\u0065\u0029\u002c\u0020\u006d\u0065\u006d\u0062\u0072\u0065\u0020\u0064\u0075\u0020\u0063\u006f\u0072\u0070\u0073\u0020\u0070\u0072\u006f\u0066\u0065\u0073\u0073\u006f\u0072\u0061\u006c\u002c\u0020\u00e9\u0074\u0075\u0064\u0069\u0061\u006e\u0074\u0028\u0065\u0029\u002c\u0020\u0065\u0074\u0063\u002e\u0020 +user.acccountterms=\u0043\u006f\u006e\u0064\u0069\u0074\u0069\u006f\u006e\u0073\u0020\u0067\u00e9\u006e\u00e9\u0072\u0061\u006c\u0065\u0073\u0020\u0064\u0027\u0075\u0074\u0069\u006c\u0069\u0073\u0061\u0074\u0069\u006f\u006e +user.acccountterms.tip=\u004c\u0065\u0073\u0020\u0063\u006f\u006e\u0064\u0069\u0074\u0069\u006f\u006e\u0073\u0020\u0064\u0027\u0075\u0074\u0069\u006c\u0069\u0073\u0061\u0074\u0069\u006f\u006e\u0020\u0064\u0065\u0020\u006c\u0027\u0061\u0070\u0070\u006c\u0069\u0063\u0061\u0074\u0069\u006f\u006e\u0020\u0065\u0074\u0020\u0064\u0065\u0073\u0020\u0073\u0065\u0072\u0076\u0069\u0063\u0065\u0073\u002e +user.acccountterms.required=\u0056\u0065\u0075\u0069\u006c\u006c\u0065\u007a\u0020\u0063\u006f\u0063\u0068\u0065\u0072\u0020\u006c\u0061\u0020\u0063\u0061\u0073\u0065\u0020\u0070\u006f\u0075\u0072\u0020\u0069\u006e\u0064\u0069\u0071\u0075\u0065\u0072\u0020\u0071\u0075\u0065\u0020\u0076\u006f\u0075\u0073\u0020\u0061\u0063\u0063\u0065\u0070\u0074\u0065\u007a\u0020\u006c\u0065\u0073\u0020\u0063\u006f\u006e\u0064\u0069\u0074\u0069\u006f\u006e\u0073\u0020\u0067\u00e9\u006e\u00e9\u0072\u0061\u006c\u0065\u0073\u0020\u0064\u0027\u0075\u0074\u0069\u006c\u0069\u0073\u0061\u0074\u0069\u006f\u006e\u002e +user.acccountterms.iagree=\u004a\u0027\u0061\u0069\u0020\u006c\u0075\u0020\u0065\u0074\u0020\u006a\u0027\u0061\u0063\u0063\u0065\u0070\u0074\u0065\u0020\u006c\u0065\u0073\u0020\u0063\u006f\u006e\u0064\u0069\u0074\u0069\u006f\u006e\u0073\u0020\u0067\u00e9\u006e\u00e9\u0072\u0061\u006c\u0065\u0073\u0020\u0064\u0027\u0075\u0074\u0069\u006c\u0069\u0073\u0061\u0074\u0069\u006f\u006e\u0020\u0064\u0065\u0020\u0044\u0061\u0074\u0061\u0076\u0065\u0072\u0073\u0065\u0020\u0073\u0075\u0073\u006d\u0065\u006e\u0074\u0069\u006f\u006e\u006e\u00e9\u0065\u0073\u002e +user.createBtn=\u0043\u0072\u00e9\u0065\u0072\u0020\u0075\u006e\u0020\u0063\u006f\u006d\u0070\u0074\u0065 +user.updatePassword.welcome=\u0042\u0069\u0065\u006e\u0076\u0065\u006e\u0075\u0065\u0020\u0064\u0061\u006e\u0073\u0020\u0044\u0061\u0074\u0061\u0076\u0065\u0072\u0073\u0065\u002c\u0020\u007b\u0031\u007d +user.updatePassword.warning=\u004c\u0065\u0073\u0020\u0065\u0078\u0069\u0067\u0065\u006e\u0063\u0065\u0073\u0020\u0072\u0065\u006c\u0061\u0074\u0069\u0076\u0065\u0073\u0020\u0061\u0075\u0020\u006d\u006f\u0074\u0020\u0064\u0065\u0020\u0070\u0061\u0073\u0073\u0065\u0020\u0065\u0074\u0020\u006c\u0065\u0073\u0020\u0063\u006f\u006e\u0064\u0069\u0074\u0069\u006f\u006e\u0073\u0020\u0067\u00e9\u006e\u00e9\u0072\u0061\u006c\u0065\u0073\u0020\u0064\u0027\u0075\u0074\u0069\u006c\u0069\u0073\u0061\u0074\u0069\u006f\u006e\u0020\u006f\u006e\u0074\u0020\u00e9\u0074\u00e9\u0020\u006d\u0069\u0073\u0065\u0073\u0020\u00e0\u0020\u006a\u006f\u0075\u0072\u0020\u006c\u006f\u0072\u0073\u0020\u0064\u0065\u0020\u006c\u0061\u0020\u0070\u0075\u0062\u006c\u0069\u0063\u0061\u0074\u0069\u006f\u006e\u0020\u0064\u0065\u0020\u006e\u006f\u0074\u0072\u0065\u0020\u006e\u006f\u0075\u0076\u0065\u006c\u006c\u0065\u0020\u0076\u0065\u0072\u0073\u0069\u006f\u006e\u0020\u0064\u0065\u0020\u0044\u0061\u0074\u0061\u0076\u0065\u0072\u0073\u0065\u0020\u0034\u002e\u0030\u002e\u0020\u0043\u006f\u006d\u006d\u0065\u0020\u0063\u0027\u0065\u0073\u0074\u0020\u006c\u0061\u0020\u0070\u0072\u0065\u006d\u0069\u00e8\u0072\u0065\u0020\u0066\u006f\u0069\u0073\u0020\u0071\u0075\u0065\u0020\u0076\u006f\u0075\u0073\u0020\u0075\u0074\u0069\u006c\u0069\u0073\u0065\u007a\u0020\u0044\u0061\u0074\u0061\u0076\u0065\u0072\u0073\u0065\u0020\u0064\u0065\u0070\u0075\u0069\u0073\u0020\u006c\u0061\u0020\u006d\u0069\u0073\u0065\u0020\u00e0\u0020\u006a\u006f\u0075\u0072\u002c\u0020\u0076\u006f\u0075\u0073\u0020\u0064\u0065\u0076\u0065\u007a\u0020\u0063\u0072\u00e9\u0065\u0072\u0020\u0075\u006e\u0020\u006e\u006f\u0075\u0076\u0065\u0061\u0075\u0020\u006d\u006f\u0074\u0020\u0064\u0065\u0020\u0070\u0061\u0073\u0073\u0065\u0020\u0065\u0074\u0020\u0061\u0063\u0063\u0065\u0070\u0074\u0065\u0072\u0020\u006c\u0065\u0073\u0020\u006e\u006f\u0075\u0076\u0065\u006c\u006c\u0065\u0073\u0020\u0063\u006f\u006e\u0064\u0069\u0074\u0069\u006f\u006e\u0073\u0020\u0067\u00e9\u006e\u00e9\u0072\u0061\u006c\u0065\u0073\u0020\u0064\u0027\u0075\u0074\u0069\u006c\u0069\u0073\u0061\u0074\u0069\u006f\u006e\u002e +user.updatePassword.password=\u0043\u0068\u006f\u0069\u0073\u0069\u0072\u0020\u0075\u006e\u0020\u006d\u006f\u0074\u0020\u0064\u0065\u0020\u0070\u0061\u0073\u0073\u0065\u0020\u0064\u0027\u0061\u0075\u0020\u006d\u0069\u006e\u0069\u006d\u0075\u006d\u0020\u0073\u0069\u0078\u0020\u0063\u0061\u0072\u0061\u0063\u0074\u00e8\u0072\u0065\u0073\u0020\u0063\u006f\u006d\u0070\u006f\u0072\u0074\u0061\u006e\u0074\u0020\u0061\u0075\u0020\u006d\u006f\u0069\u006e\u0073\u0020\u0075\u006e\u0065\u0020\u006c\u0065\u0074\u0074\u0072\u0065\u0020\u0065\u0074\u0020\u0075\u006e\u0020\u0063\u0068\u0069\u0066\u0066\u0072\u0065\u002e +authenticationProvidersAvailable.tip=\u007b\u0030\u007d\u0049\u006c\u0020\u006e\u0027\u0079\u0020\u0061\u0020\u0061\u0075\u0063\u0075\u006e\u0020\u0073\u0079\u0073\u0074\u00e8\u006d\u0065\u0020\u0064\u0027\u0061\u0075\u0074\u0068\u0065\u006e\u0074\u0069\u0066\u0069\u0063\u0061\u0074\u0069\u006f\u006e\u0020\u0061\u0063\u0074\u0069\u0066\u007b\u0031\u007d\u0053\u0069\u0020\u0076\u006f\u0075\u0073\u0020\u00ea\u0074\u0065\u0073\u0020\u0061\u0064\u006d\u0069\u006e\u0069\u0073\u0074\u0072\u0061\u0074\u0065\u0075\u0072\u0020\u0073\u0079\u0073\u0074\u00e8\u006d\u0065\u002c\u0020\u0076\u0065\u0075\u0069\u006c\u006c\u0065\u007a\u0020\u0065\u006e\u0020\u0061\u0075\u0074\u006f\u0072\u0069\u0073\u0065\u0072\u0020\u0075\u006e\u0020\u0061\u0075\u0020\u006d\u006f\u0079\u0065\u006e\u0020\u0064\u0065\u0020\u006c\u0027\u0041\u0050\u0049\u002e\u007b\u0032\u007d\u0053\u0069\u0020\u0076\u006f\u0075\u0073\u0020\u006e\u0027\u00ea\u0074\u0065\u0073\u0020\u0070\u0061\u0073\u0020\u0061\u0064\u006d\u0069\u006e\u0069\u0073\u0074\u0072\u0061\u0074\u0065\u0075\u0072\u0020\u0073\u0079\u0073\u0074\u00e8\u006d\u0065\u002c\u0020\u0076\u0065\u0075\u0069\u006c\u006c\u0065\u007a\u0020\u0063\u006f\u006d\u006d\u0075\u006e\u0069\u0071\u0075\u0065\u0072\u0020\u0061\u0076\u0065\u0063\u0020\u0063\u0065\u006c\u0075\u0069\u0020\u0064\u0065\u0020\u0076\u006f\u0074\u0072\u0065\u0020\u00e9\u0074\u0061\u0062\u006c\u0069\u0073\u0073\u0065\u006d\u0065\u006e\u0074\u002e\u0020 +passwdVal.passwdReq.title=Your password must contain: +passwdVal.passwdReq.goodStrength =passwords of at least {0} characters are exempt from all other requirements +passwdVal.passwdReq.lengthReq =At least {0} characters +passwdVal.passwdReq.characteristicsReq =At least 1 character from {0} of the following types: +passwdVal.passwdReq.notInclude =It may not include: +passwdVal.passwdReq.consecutiveDigits =More than {0} numbers in a row +passwdVal.passwdReq.dictionaryWords =Dictionary words +passwdVal.passwdReq.unknownPasswordRule =Unknown, contact your administrator +#printf syntax used to pass to passay library +passwdVal.expireRule.errorCode =EXPIRED +passwdVal.expireRule.errorMsg =The password is over %1$s days old and has expired. +passwdVal.goodStrengthRule.errorMsg =Note: passwords are always valid with a %1$s or more character length regardless. +passwdVal.goodStrengthRule.errorCode =NO_GOODSTRENGTH +passwdVal.passwdReset.resetLinkTitle =Password Reset Link +passwdVal.passwdReset.resetLinkDesc =Your password reset link is not valid +passwdVal.passwdReset.valBlankLog =new password is blank +passwdVal.passwdReset.valFacesError =Password Error +passwdVal.passwdReset.valFacesErrorDesc =Please enter a new password for your account. +passwdVal.passwdValBean.warnDictionaryRead =Dictionary was set, but none was read in. +passwdVal.passwdValBean.warnDictionaryObj =PwDictionaries not set and no default password file found: +passwdVal.passwdValBean.warnSetStrength =The PwGoodStrength {0} value competes with the PwMinLength value of {1} and is added to {2} +#loginpage.xhtml=\u0023\u006c\u006f\u0067\u0069\u006e\u0070\u0061\u0067\u0065\u002e\u0078\u0068\u0074\u006d\u006c +login.System=\u0053\u0079\u0073\u0074\u00e8\u006d\u0065\u0020\u0064\u0027\u0061\u0075\u0074\u0068\u0065\u006e\u0074\u0069\u0066\u0069\u0063\u0061\u0074\u0069\u006f\u006e +login.forgot.text=\u004d\u006f\u0074\u0020\u0064\u0065\u0020\u0070\u0061\u0073\u0073\u0065\u0020\u006f\u0075\u0062\u006c\u0069\u00e9\u003f +login.builtin=\u0043\u006f\u006d\u0070\u0074\u0065\u0020\u0044\u0061\u0074\u0061\u0076\u0065\u0072\u0073\u0065 +login.institution=\u0043\u006f\u006d\u0070\u0074\u0065\u0020\u0069\u006e\u0073\u0074\u0069\u0074\u0075\u0074\u0069\u006f\u006e\u006e\u0065\u006c +login.institution.blurb=\u0043\u006f\u006e\u006e\u0065\u0063\u0074\u0065\u007a\u002d\u0076\u006f\u0075\u0073\u0020\u006f\u0075\u0020\u0069\u006e\u0073\u0063\u0072\u0069\u0076\u0065\u007a\u002d\u0076\u006f\u0075\u0073\u0020\u0061\u0076\u0065\u0063\u0020\u0076\u006f\u0074\u0072\u0065\u0020\u0063\u006f\u006d\u0070\u0074\u0065\u0020\u0069\u006e\u0073\u0074\u0069\u0074\u0075\u0074\u0069\u006f\u006e\u006e\u0065\u006c\u0020\u0026\u006d\u0064\u0061\u0073\u0068\u003b\u0020\u003c\u0061\u0020\u0068\u0072\u0065\u0066\u003d\u0022\u007b\u0030\u007d\u002f\u007b\u0031\u007d\u002f\u0075\u0073\u0065\u0072\u002f\u0061\u0063\u0063\u006f\u0075\u006e\u0074\u002e\u0068\u0074\u006d\u006c\u0022\u0020\u0074\u0061\u0072\u0067\u0065\u0074\u003d\u0022\u005f\u0062\u006c\u0061\u006e\u006b\u0022\u003e\u0065\u006e\u0020\u0061\u0070\u0070\u0072\u0065\u006e\u0064\u0072\u0065\u0020\u0064\u0061\u0076\u0061\u006e\u0074\u0061\u0067\u0065\u003c\u002f\u0061\u003e\u002e +login.institution.support.beforeLink=\u0056\u006f\u0075\u0073\u0020\u0071\u0075\u0069\u0074\u0074\u0065\u007a\u0020\u0076\u006f\u0074\u0072\u0065\u0020\u00e9\u0074\u0061\u0062\u006c\u0069\u0073\u0073\u0065\u006d\u0065\u006e\u0074\u003f\u0020\u0050\u0072\u0069\u00e8\u0072\u0065\u0020\u0064\u0065\u0020\u0063\u006f\u006e\u0074\u0061\u0063\u0074\u0065\u0072 +login.institution.support.afterLink=\u0070\u006f\u0075\u0072\u0020\u006f\u0062\u0074\u0065\u006e\u0069\u0072\u0020\u0064\u0065\u0020\u006c\u0027\u0061\u0069\u0064\u0065\u002e +login.builtin.credential.usernameOrEmail=\u004e\u006f\u006d\u0020\u0064\u0027\u0075\u0074\u0069\u006c\u0069\u0073\u0061\u0074\u0065\u0075\u0072\u002f\u0063\u006f\u0075\u0072\u0072\u0069\u0065\u006c +login.builtin.credential.password=\u004d\u006f\u0074\u0020\u0064\u0065\u0020\u0070\u0061\u0073\u0073\u0065 +login.builtin.invalidUsernameEmailOrPassword=\u004c\u0065\u0020\u006e\u006f\u006d\u0020\u0064\u0027\u0075\u0074\u0069\u006c\u0069\u0073\u0061\u0074\u0065\u0075\u0072\u002c\u0020\u006c\u0065\u0020\u0063\u006f\u0075\u0072\u0072\u0069\u0065\u006c\u0020\u006f\u0075\u0020\u006c\u0065\u0020\u006d\u006f\u0074\u0020\u0064\u0065\u0020\u0070\u0061\u0073\u0073\u0065\u0020\u0069\u006e\u0064\u0069\u0071\u0075\u00e9\u0020\u006e\u0027\u0065\u0073\u0074\u0020\u0070\u0061\u0073\u0020\u0076\u0061\u006c\u0069\u0064\u0065\u002e\u0020\u0041\u0076\u0065\u007a\u002d\u0076\u006f\u0075\u0073\u0020\u0062\u0065\u0073\u006f\u0069\u006e\u0020\u0064\u0027\u0061\u0069\u0064\u0065\u0020\u0070\u006f\u0075\u0072\u0020\u0061\u0063\u0063\u00e9\u0064\u0065\u0072\u0020\u00e0\u0020\u0076\u006f\u0074\u0072\u0065\u0020\u0063\u006f\u006d\u0070\u0074\u0065\u003f +# how do we exercise login.error? Via a password upgrade failure? See https://github.com/IQSS/dataverse/pull/2922= +login.error=\u0055\u006e\u0065\u0020\u0065\u0072\u0072\u0065\u0075\u0072\u0020\u0073\u0027\u0065\u0073\u0074\u0020\u0070\u0072\u006f\u0064\u0075\u0069\u0074\u0065\u0020\u0061\u0075\u0020\u006d\u006f\u006d\u0065\u006e\u0074\u0020\u0064\u0065\u0020\u006c\u0061\u0020\u0076\u0061\u006c\u0069\u0064\u0061\u0074\u0069\u006f\u006e\u0020\u0064\u0075\u0020\u006e\u006f\u006d\u0020\u0064\u0027\u0075\u0074\u0069\u006c\u0069\u0073\u0061\u0074\u0065\u0075\u0072\u0020\u006f\u0075\u0020\u0064\u0075\u0020\u006d\u006f\u0074\u0020\u0064\u0065\u0020\u0070\u0061\u0073\u0073\u0065\u002e\u0020\u0056\u0065\u0075\u0069\u006c\u006c\u0065\u007a\u0020\u0065\u0073\u0073\u0061\u0079\u0065\u0072\u0020\u00e0\u0020\u006e\u006f\u0075\u0076\u0065\u0061\u0075\u002e\u0020\u0053\u0069\u0020\u006c\u0065\u0020\u0070\u0072\u006f\u0062\u006c\u00e8\u006d\u0065\u0020\u0070\u0065\u0072\u0073\u0069\u0073\u0074\u0065\u002c\u0020\u0063\u006f\u006d\u006d\u0075\u006e\u0069\u0071\u0075\u0065\u007a\u0020\u0061\u0076\u0065\u0063\u0020\u0075\u006e\u0020\u0061\u0064\u006d\u0069\u006e\u0069\u0073\u0074\u0072\u0061\u0074\u0065\u0075\u0072\u002e +user.error.cannotChangePassword=\u0044\u00e9\u0073\u006f\u006c\u00e9\u002c\u0020\u0076\u006f\u0074\u0072\u0065\u0020\u006d\u006f\u0074\u0020\u0064\u0065\u0020\u0070\u0061\u0073\u0073\u0065\u0020\u006e\u0065\u0020\u0070\u0065\u0075\u0074\u0020\u0070\u0061\u0073\u0020\u00ea\u0074\u0072\u0065\u0020\u006d\u006f\u0064\u0069\u0066\u0069\u00e9\u002e\u0020\u0056\u0065\u0075\u0069\u006c\u006c\u0065\u007a\u0020\u0063\u006f\u006e\u0074\u0061\u0063\u0074\u0065\u0072\u0020\u0076\u006f\u0074\u0072\u0065\u0020\u0061\u0064\u006d\u0069\u006e\u0069\u0073\u0074\u0072\u0061\u0074\u0065\u0075\u0072\u0020\u0073\u0079\u0073\u0074\u00e8\u006d\u0065\u002e +user.error.wrongPassword=\u0044\u00e9\u0073\u006f\u006c\u00e9\u002c\u0020\u006d\u006f\u0074\u0020\u0064\u0065\u0020\u0070\u0061\u0073\u0073\u0065\u0020\u0065\u0072\u0072\u006f\u006e\u006e\u00e9\u002e +login.button=\u0043\u006f\u006e\u006e\u0065\u0063\u0074\u0065\u007a\u002d\u0076\u006f\u0075\u0073\u0020\u0061\u0076\u0065\u0063\u0020\u007b\u0030\u007d +login.button.orcid=Create or Connect your ORCID +# authentication providers=\u0023\u0020\u0061\u0075\u0074\u0068\u0065\u006e\u0074\u0069\u0063\u0061\u0074\u0069\u006f\u006e\u0020\u0070\u0072\u006f\u0076\u0069\u0064\u0065\u0072\u0073 +auth.providers.title=\u0041\u0075\u0074\u0072\u0065\u0073\u0020\u006f\u0070\u0074\u0069\u006f\u006e\u0073 +auth.providers.tip=\u0056\u006f\u0075\u0073\u0020\u0070\u006f\u0075\u0076\u0065\u007a\u0020\u0063\u006f\u006e\u0076\u0065\u0072\u0074\u0069\u0072\u0020\u0075\u006e\u0020\u0063\u006f\u006d\u0070\u0074\u0065\u0020\u0044\u0061\u0074\u0061\u0076\u0065\u0072\u0073\u0065\u0020\u0070\u006f\u0075\u0072\u0020\u0075\u0074\u0069\u006c\u0069\u0073\u0065\u0072\u0020\u006c\u0027\u0075\u006e\u0065\u0020\u0064\u0065\u0073\u0020\u006f\u0070\u0074\u0069\u006f\u006e\u0073\u0020\u0063\u0069\u002d\u0064\u0065\u0073\u0073\u0075\u0073\u002e\u0020\u003c\u0061\u0020\u0068\u0072\u0065\u0066\u003d\u0022\u007b\u0030\u007d\u002f\u007b\u0031\u007d\u002f\u0075\u0073\u0065\u0072\u002f\u0061\u0063\u0063\u006f\u0075\u006e\u0074\u002e\u0068\u0074\u006d\u006c\u0022\u0020\u0074\u0061\u0072\u0067\u0065\u0074\u003d\u0022\u005f\u0062\u006c\u0061\u006e\u006b\u0022\u003e\u0045\u006e\u0020\u0061\u0070\u0070\u0072\u0065\u006e\u0064\u0072\u0065\u0020\u0064\u0061\u0076\u0061\u006e\u0074\u0061\u0067\u0065\u003c\u002f\u0061\u003e\u002e +auth.providers.title.builtin=\u004e\u006f\u006d\u0020\u0064\u0027\u0075\u0074\u0069\u006c\u0069\u0073\u0061\u0074\u0065\u0075\u0072\u002f\u0043\u006f\u0075\u0072\u0072\u0069\u0065\u006c +auth.providers.title.shib=\u0056\u006f\u0074\u0072\u0065\u0020\u00e9\u0074\u0061\u0062\u006c\u0069\u0073\u0073\u0065\u006d\u0065\u006e\u0074 +auth.providers.title.orcid=\u004f\u0052\u0043\u0049\u0044 +auth.providers.title.google=\u0047\u006f\u006f\u0067\u006c\u0065 +auth.providers.title.github=\u0047\u0069\u0074\u0048\u0075\u0062 +auth.providers.blurb=\u0043\u006f\u006e\u006e\u0065\u0063\u0074\u0065\u007a\u002d\u0076\u006f\u0075\u0073\u0020\u006f\u0075\u0020\u0069\u006e\u0073\u0063\u0072\u0069\u0076\u0065\u007a\u002d\u0076\u006f\u0075\u0073\u0020\u0061\u0076\u0065\u0063\u0020\u0076\u006f\u0074\u0072\u0065\u0020\u0063\u006f\u006d\u0070\u0074\u0065\u0020\u007b\u0030\u007d\u0020\u0026\u006d\u0064\u0061\u0073\u0068\u003b\u0020\u003c\u0061\u0020\u0068\u0072\u0065\u0066\u003d\u0022\u007b\u0031\u007d\u002f\u007b\u0032\u007d\u002f\u0075\u0073\u0065\u0072\u002f\u0061\u0063\u0063\u006f\u0075\u006e\u0074\u002e\u0068\u0074\u006d\u006c\u0022\u0020\u0074\u0061\u0072\u0067\u0065\u0074\u003d\u0022\u005f\u0062\u006c\u0061\u006e\u006b\u0022\u003e\u0065\u006e\u0020\u0061\u0070\u0070\u0072\u0065\u006e\u0064\u0072\u0065\u0020\u0064\u0061\u0076\u0061\u006e\u0074\u0061\u0067\u0065\u003c\u002f\u0061\u003e\u002e\u0020\u0056\u006f\u0075\u0073\u0020\u00e9\u0070\u0072\u006f\u0075\u0076\u0065\u007a\u0020\u0064\u0065\u0073\u0020\u0070\u0072\u006f\u0062\u006c\u00e8\u006d\u0065\u0073\u003f\u0020\u0056\u0065\u0075\u0069\u006c\u006c\u0065\u007a\u0020\u0063\u006f\u006e\u0074\u0061\u0063\u0074\u0065\u0072\u0020\u007b\u0033\u007d\u0020\u0070\u006f\u0075\u0072\u0020\u006f\u0062\u0074\u0065\u006e\u0069\u0072\u0020\u0064\u0065\u0020\u006c\u0027\u0061\u0069\u0064\u0065\u002e +auth.providers.persistentUserIdName.orcid=\u0049\u0064\u0065\u006e\u0074\u0069\u0066\u0069\u0061\u006e\u0074\u0020\u004f\u0052\u0043\u0049\u0044 +auth.providers.persistentUserIdName.github=\u0049\u0064\u0065\u006e\u0074\u0069\u0066\u0069\u0061\u006e\u0074\u0020\u0047\u0069\u0074\u0048\u0075\u0062 +auth.providers.persistentUserIdTooltip.orcid=\u004f\u0052\u0043\u0049\u0044\u0020\u0066\u006f\u0075\u0072\u006e\u0069\u0074\u0020\u0075\u006e\u0020\u0069\u0064\u0065\u006e\u0074\u0069\u0066\u0069\u0061\u006e\u0074\u0020\u006e\u0075\u006d\u00e9\u0072\u0069\u0071\u0075\u0065\u0020\u0070\u00e9\u0072\u0065\u006e\u006e\u0065\u0020\u0071\u0075\u0069\u0020\u0076\u006f\u0075\u0073\u0020\u0064\u0069\u0073\u0074\u0069\u006e\u0067\u0075\u0065\u0020\u0064\u0065\u0073\u0020\u0061\u0075\u0074\u0072\u0065\u0073\u0020\u0063\u0068\u0065\u0072\u0063\u0068\u0065\u0075\u0072\u0073\u002e +auth.providers.persistentUserIdTooltip.github=\u0047\u0069\u0074\u0048\u0075\u0062\u0020\u0061\u0074\u0074\u0072\u0069\u0062\u0075\u0065\u0020\u0075\u006e\u0020\u0069\u0064\u0065\u006e\u0074\u0069\u0066\u0069\u0061\u006e\u0074\u0020\u0075\u006e\u0069\u0071\u0075\u0065\u0020\u00e0\u0020\u0063\u0068\u0061\u0071\u0075\u0065\u0020\u0075\u0074\u0069\u006c\u0069\u0073\u0061\u0074\u0065\u0075\u0072\u002e +auth.providers.orcid.insufficientScope=Dataverse was not granted the permission to read user data from ORCID. +# Friendly AuthenticationProvider names=\u0023\u0020\u0046\u0072\u0069\u0065\u006e\u0064\u006c\u0079\u0020\u0041\u0075\u0074\u0068\u0065\u006e\u0074\u0069\u0063\u0061\u0074\u0069\u006f\u006e\u0050\u0072\u006f\u0076\u0069\u0064\u0065\u0072\u0020\u006e\u0061\u006d\u0065\u0073 +authenticationProvider.name.builtin=\u0044\u0061\u0074\u0061\u0076\u0065\u0072\u0073\u0065 +authenticationProvider.name.null=\u0028\u004c\u0065\u0020\u0066\u006f\u0075\u0072\u006e\u0069\u0073\u0073\u0065\u0075\u0072\u0020\u0065\u0073\u0074\u0020\u0069\u006e\u0063\u006f\u006e\u006e\u0075\u0029 +authenticationProvider.name.github=\u0047\u0069\u0074\u0048\u0075\u0062 +authenticationProvider.name.google=\u0047\u006f\u006f\u0067\u006c\u0065 +authenticationProvider.name.orcid=\u004f\u0052\u0043\u0069\u0044 +authenticationProvider.name.orcid-sandbox=\u0042\u0061\u0063\u0020\u00e0\u0020\u0073\u0061\u0062\u006c\u0065\u0020\u004f\u0052\u0043\u0069\u0044 +authenticationProvider.name.shib=\u0053\u0068\u0069\u0062\u0062\u006f\u006c\u0065\u0074\u0068 +#confirmemail.xhtml= +confirmEmail.pageTitle=\u0056\u0061\u006c\u0069\u0064\u0061\u0074\u0069\u006f\u006e\u0020\u0064\u0075\u0020\u0063\u006f\u0075\u0072\u0072\u0069\u0065\u006c +confirmEmail.submitRequest=\u0056\u0061\u006c\u0069\u0064\u0065\u0072\u0020\u006c\u0065\u0020\u0063\u006f\u0075\u0072\u0072\u0069\u0065\u006c +confirmEmail.submitRequest.success=\u0055\u006e\u0020\u0063\u006f\u0075\u0072\u0072\u0069\u0065\u006c\u0020\u0064\u0065\u0020\u0076\u0061\u006c\u0069\u0064\u0061\u0074\u0069\u006f\u006e\u0020\u0061\u0020\u00e9\u0074\u00e9\u0020\u0065\u006e\u0076\u006f\u0079\u00e9\u0020\u00e0\u0020\u007b\u0030\u007d\u002e\u0020\u0056\u0065\u0075\u0069\u006c\u006c\u0065\u007a\u0020\u006e\u006f\u0074\u0065\u0072\u0020\u0071\u0075\u0065\u0020\u006c\u0065\u0020\u006c\u0069\u0065\u006e\u0020\u0064\u0065\u0020\u0076\u0061\u006c\u0069\u0064\u0061\u0074\u0069\u006f\u006e\u0020\u0065\u0078\u0070\u0069\u0072\u0065\u0072\u0061\u0020\u0061\u0070\u0072\u00e8\u0073\u0020\u0075\u006e\u0020\u0064\u00e9\u006c\u0061\u0069\u0020\u0064\u0065\u0020\u007b\u0031\u007d\u002e +confirmEmail.details.success=\u004c\u0027\u0061\u0064\u0072\u0065\u0073\u0073\u0065\u0020\u0063\u006f\u0075\u0072\u0072\u0069\u0065\u006c\u0020\u0065\u0073\u0074\u0020\u0062\u0069\u0065\u006e\u0020\u0076\u0061\u006c\u0069\u0064\u0065\u0021 +confirmEmail.details.failure=\u004e\u006f\u0075\u0073\u0020\u006e\u0027\u0061\u0076\u006f\u006e\u0073\u0020\u0070\u0061\u0073\u0020\u00e9\u0074\u00e9\u0020\u0065\u006e\u0020\u006d\u0065\u0073\u0075\u0072\u0065\u0020\u0064\u0065\u0020\u0076\u0061\u006c\u0069\u0064\u0065\u0072\u0020\u0076\u006f\u0074\u0072\u0065\u0020\u0061\u0064\u0072\u0065\u0073\u0073\u0065\u0020\u0063\u006f\u0075\u0072\u0072\u0069\u0065\u006c\u002e\u0020\u004d\u0065\u0072\u0063\u0069\u0020\u0064\u0065\u0020\u0063\u006c\u0069\u0071\u0075\u0065\u0072\u0020\u0073\u0075\u0072\u0020\u006c\u0065\u0020\u0062\u006f\u0075\u0074\u006f\u006e\u0020\u00ab\u0020\u0056\u0061\u006c\u0069\u0064\u0065\u0072\u0020\u006c\u0065\u0020\u0063\u006f\u0075\u0072\u0072\u0069\u0065\u006c\u0020\u00bb\u0020\u0064\u0065\u0070\u0075\u0069\u0073\u0020\u006c\u0061\u0020\u0070\u0061\u0067\u0065\u0020\u0063\u006f\u006d\u0070\u006f\u0072\u0074\u0061\u006e\u0074\u0020\u006c\u0065\u0073\u0020\u0072\u0065\u006e\u0073\u0065\u0069\u0067\u006e\u0065\u006d\u0065\u006e\u0074\u0073\u0020\u0073\u0075\u0072\u0020\u0076\u006f\u0074\u0072\u0065\u0020\u0063\u006f\u006d\u0070\u0074\u0065\u002e +confirmEmail.details.goToAccountPageButton=\u0041\u006c\u006c\u0065\u0072\u0020\u00e0\u0020\u006c\u0061\u0020\u0070\u0061\u0067\u0065\u0020\u0063\u006f\u006d\u0070\u006f\u0072\u0074\u0061\u006e\u0074\u0020\u006c\u0065\u0073\u0020\u0072\u0065\u006e\u0073\u0065\u0069\u0067\u006e\u0065\u006d\u0065\u006e\u0074\u0073\u0020\u0064\u0075\u0020\u0063\u006f\u006d\u0070\u0074\u0065 +confirmEmail.notVerified=\u004e\u006f\u006e\u0020\u0076\u0061\u006c\u0069\u0064\u00e9 +confirmEmail.verified=\u0056\u0061\u006c\u0069\u0064\u00e9 +#shib.xhtml= +shib.btn.convertAccount=\u0043\u006f\u006e\u0076\u0065\u0072\u0074\u0069\u0072\u0020\u006c\u0065\u0020\u0063\u006f\u006d\u0070\u0074\u0065 +shib.btn.createAccount=\u0043\u0072\u00e9\u0065\u0072\u0020\u006c\u0065\u0020\u0063\u006f\u006d\u0070\u0074\u0065 +shib.askToConvert=\u0044\u00e9\u0073\u0069\u0072\u0065\u007a\u002d\u0076\u006f\u0075\u0073\u0020\u0063\u006f\u006e\u0076\u0065\u0072\u0074\u0069\u0072\u0020\u0076\u006f\u0074\u0072\u0065\u0020\u0063\u006f\u006d\u0070\u0074\u0065\u0020\u0044\u0061\u0074\u0061\u0076\u0065\u0072\u0073\u0065\u0020\u0064\u0065\u0020\u0066\u0061\u00e7\u006f\u006e\u0020\u00e0\u0020\u0075\u0074\u0069\u006c\u0069\u0073\u0065\u0072\u0020\u0064\u006f\u0072\u00e9\u006e\u0061\u0076\u0061\u006e\u0074\u0020\u0076\u006f\u0073\u0020\u0069\u006e\u0066\u006f\u0072\u006d\u0061\u0074\u0069\u006f\u006e\u0073\u0020\u0064\u0065\u0020\u0063\u006f\u006e\u006e\u0065\u0078\u0069\u006f\u006e\u0020\u0069\u006e\u0073\u0074\u0069\u0074\u0075\u0074\u0069\u006f\u006e\u006e\u0065\u006c\u006c\u0065\u0020\u0061\u0066\u0069\u006e\u0020\u0064\u0065\u0020\u0076\u006f\u0075\u0073\u0020\u0063\u006f\u006e\u006e\u0065\u0063\u0074\u0065\u0072\u003f +# Bundle file editors, please note that "shib.welcomeExistingUserMessage" is used in a unit test= +shib.welcomeExistingUserMessage=\u0056\u006f\u0073\u0020\u0069\u006e\u0066\u006f\u0072\u006d\u0061\u0074\u0069\u006f\u006e\u0073\u0020\u0064\u0065\u0020\u0063\u006f\u006e\u006e\u0065\u0063\u0074\u0069\u006f\u006e\u0020\u0069\u006e\u0073\u0074\u0069\u0074\u0075\u0074\u0069\u006f\u006e\u006e\u0065\u006c\u006c\u0065\u0020\u0070\u006f\u0075\u0072\u0020\u007b\u0030\u007d\u0020\u0063\u006f\u006d\u0070\u0072\u0065\u006e\u006e\u0065\u006e\u0074\u0020\u0075\u006e\u0065\u0020\u0061\u0064\u0072\u0065\u0073\u0073\u0065\u0020\u0063\u006f\u0075\u0072\u0072\u0069\u0065\u006c\u0020\u0064\u00e9\u006a\u00e0\u0020\u0075\u0074\u0069\u006c\u0069\u0073\u00e9\u0065\u0020\u0070\u006f\u0075\u0072\u0020\u0075\u006e\u0020\u0063\u006f\u006d\u0070\u0074\u0065\u0020\u0044\u0061\u0074\u0061\u0076\u0065\u0072\u0073\u0065\u0020\u0065\u0078\u0069\u0073\u0074\u0061\u006e\u0074\u002e\u0020\u0045\u006e\u0020\u0065\u006e\u0074\u0072\u0061\u006e\u0074\u0020\u0069\u0063\u0069\u002d\u0062\u0061\u0073\u0020\u0076\u006f\u0074\u0072\u0065\u0020\u006d\u006f\u0074\u0020\u0064\u0065\u0020\u0070\u0061\u0073\u0073\u0065\u0020\u0044\u0061\u0074\u0061\u0076\u0065\u0072\u0073\u0065\u0020\u0061\u0063\u0074\u0075\u0065\u006c\u002c\u0020\u0076\u006f\u0074\u0072\u0065\u0020\u0063\u006f\u006d\u0070\u0074\u0065\u0020\u0044\u0061\u0074\u0061\u0076\u0065\u0072\u0073\u0065\u0020\u0070\u006f\u0075\u0072\u0072\u0061\u0020\u00ea\u0074\u0072\u0065\u0020\u0063\u006f\u006e\u0076\u0065\u0072\u0074\u0069\u0020\u0064\u0065\u0020\u0066\u0061\u00e7\u006f\u006e\u0020\u00e0\u0020\u0063\u0065\u0020\u0071\u0075\u0065\u0020\u0076\u006f\u0075\u0073\u0020\u0070\u0075\u0069\u0073\u0073\u0069\u0065\u007a\u0020\u0064\u006f\u0072\u00e9\u006e\u0061\u0076\u0061\u006e\u0074\u0020\u0075\u0074\u0069\u006c\u0069\u0073\u0065\u0072\u0020\u0076\u006f\u0074\u0072\u0065\u0020\u0063\u006f\u006d\u0070\u0074\u0065\u0020\u0069\u006e\u0073\u0074\u0069\u0074\u0075\u0074\u0069\u006f\u006e\u006e\u0065\u006c\u002e\u0020\u0053\u0075\u0069\u0074\u0065\u0020\u00e0\u0020\u0063\u0065\u0074\u0074\u0065\u0020\u0063\u006f\u006e\u0076\u0065\u0072\u0073\u0069\u006f\u006e\u002c\u0020\u0076\u006f\u0075\u0073\u0020\u006e\u0027\u0061\u0075\u0072\u0065\u007a\u0020\u0070\u006c\u0075\u0073\u0020\u0071\u0075\u0027\u00e0\u0020\u0075\u0074\u0069\u006c\u0069\u0073\u0065\u0072\u0020\u0076\u006f\u0074\u0072\u0065\u0020\u0063\u006f\u006d\u0070\u0074\u0065\u0020\u0069\u006e\u0073\u0074\u0069\u0074\u0075\u0074\u0069\u006f\u006e\u006e\u0065\u006c\u0020\u0070\u006f\u0075\u0072\u0020\u0070\u006f\u0075\u0076\u006f\u0069\u0072\u0020\u0076\u006f\u0075\u0073\u0020\u0063\u006f\u006e\u006e\u0065\u0063\u0074\u0065\u0072\u002e\u0020 +# Bundle file editors, please note that "shib.welcomeExistingUserMessageDefaultInstitution" is used in a unit test= +shib.welcomeExistingUserMessageDefaultInstitution=\u0076\u006f\u0074\u0072\u0065\u0020\u00e9\u0074\u0061\u0062\u006c\u0069\u0073\u0073\u0065\u006d\u0065\u006e\u0074 +shib.dataverseUsername=\u004e\u006f\u006d\u0020\u0064\u0027\u0075\u0074\u0069\u006c\u0069\u0073\u0061\u0074\u0065\u0075\u0072\u0020\u0044\u0061\u0074\u0061\u0076\u0065\u0072\u0073\u0065 +shib.currentDataversePassword=\u004d\u006f\u0074\u0020\u0064\u0065\u0020\u0070\u0061\u0073\u0073\u0065\u0020\u0044\u0061\u0074\u0061\u0076\u0065\u0072\u0073\u0065\u0020\u0061\u0063\u0074\u0075\u0065\u006c +shib.accountInformation=\u0052\u0065\u006e\u0073\u0065\u0069\u0067\u006e\u0065\u006d\u0065\u006e\u0074\u0073\u0020\u0073\u0075\u0072\u0020\u006c\u0065\u0020\u0063\u006f\u006d\u0070\u0074\u0065 +shib.offerToCreateNewAccount=\u0043\u0065\u0074\u0074\u0065\u0020\u0069\u006e\u0066\u006f\u0072\u006d\u0061\u0074\u0069\u006f\u006e\u0020\u0065\u0073\u0074\u0020\u0066\u006f\u0075\u0072\u006e\u0069\u0065\u0020\u0070\u0061\u0072\u0020\u0076\u006f\u0074\u0072\u0065\u0020\u00e9\u0074\u0061\u0062\u006c\u0069\u0073\u0073\u0065\u006d\u0065\u006e\u0074\u0020\u0065\u0074\u0020\u0073\u0065\u0072\u0061\u0020\u0065\u006d\u0070\u006c\u006f\u0079\u00e9\u0065\u0020\u0070\u006f\u0075\u0072\u0020\u0063\u0072\u00e9\u0065\u0072\u0020\u0076\u006f\u0074\u0072\u0065\u0020\u0063\u006f\u006d\u0070\u0074\u0065\u0020\u0044\u0061\u0074\u0061\u0076\u0065\u0072\u0073\u0065\u002e +shib.passwordRejected=\u003c\u0073\u0074\u0072\u006f\u006e\u0067\u003e\u0045\u0072\u0072\u0065\u0075\u0072\u0020\u0064\u0065\u0020\u0076\u0061\u006c\u0069\u0064\u0061\u0074\u0069\u006f\u006e\u003c\u002f\u0073\u0074\u0072\u006f\u006e\u0067\u003e\u0020\u002d\u0020\u0056\u006f\u0074\u0072\u0065\u0020\u0063\u006f\u006d\u0070\u0074\u0065\u0020\u0070\u0065\u0075\u0074\u0020\u0075\u006e\u0069\u0071\u0075\u0065\u006d\u0065\u006e\u0074\u0020\u00ea\u0074\u0072\u0065\u0020\u0063\u006f\u006e\u0076\u0065\u0072\u0074\u0069\u0020\u0073\u0069\u0020\u0076\u006f\u0075\u0073\u0020\u0069\u006e\u0064\u0069\u0071\u0075\u0065\u007a\u0020\u006c\u0065\u0020\u0062\u006f\u006e\u0020\u006d\u006f\u0074\u0020\u0064\u0065\u0020\u0070\u0061\u0073\u0073\u0065\u0020\u0070\u006f\u0075\u0072\u0020\u0076\u006f\u0074\u0072\u0065\u0020\u0063\u006f\u006d\u0070\u0074\u0065\u0020\u0065\u0078\u0069\u0073\u0074\u0061\u006e\u0074\u002e +# oauth2/firstLogin.xhtml=\u0023\u0020\u006f\u0061\u0075\u0074\u0068\u0032\u002f\u0066\u0069\u0072\u0073\u0074\u004c\u006f\u0067\u0069\u006e\u002e\u0078\u0068\u0074\u006d\u006c +oauth2.btn.convertAccount=\u0043\u006f\u006e\u0076\u0065\u0072\u0074\u0069\u0072\u0020\u006c\u0065\u0020\u0063\u006f\u006d\u0070\u0074\u0065\u0020\u0065\u0078\u0069\u0073\u0074\u0061\u006e\u0074 +oauth2.btn.createAccount=\u0043\u0072\u00e9\u0065\u0072\u0020\u0075\u006e\u0020\u006e\u006f\u0075\u0076\u0065\u0061\u0075\u0020\u0063\u006f\u006d\u0070\u0074\u0065 +oauth2.askToConvert=\u0044\u00e9\u0073\u0069\u0072\u0065\u007a\u002d\u0076\u006f\u0075\u0073\u0020\u0063\u006f\u006e\u0076\u0065\u0072\u0074\u0069\u0072\u0020\u0076\u006f\u0074\u0072\u0065\u0020\u0063\u006f\u006d\u0070\u0074\u0065\u0020\u0044\u0061\u0074\u0061\u0076\u0065\u0072\u0073\u0065\u0020\u0064\u0065\u0020\u0066\u0061\u00e7\u006f\u006e\u0020\u00e0\u0020\u0074\u006f\u0075\u006a\u006f\u0075\u0072\u0073\u0020\u0075\u0074\u0069\u006c\u0069\u0073\u0065\u0072\u0020\u0076\u006f\u0074\u0072\u0065\u0020\u0063\u006f\u006d\u0070\u0074\u0065\u0020\u0069\u006e\u0073\u0074\u0069\u0074\u0075\u0074\u0069\u006f\u006e\u006e\u0065\u006c\u003f\u0020 +oauth2.welcomeExistingUserMessage=\u0056\u006f\u0074\u0072\u0065\u0020\u0063\u006f\u006d\u0070\u0074\u0065\u0020\u0069\u006e\u0073\u0074\u0069\u0074\u0075\u0074\u0069\u006f\u006e\u006e\u0065\u006c\u0020\u007b\u0030\u007d\u0020\u0063\u006f\u0072\u0072\u0065\u0073\u0070\u006f\u006e\u0064\u0020\u00e0\u0020\u0075\u006e\u0065\u0020\u0061\u0064\u0072\u0065\u0073\u0073\u0065\u0020\u0063\u006f\u0075\u0072\u0072\u0069\u0065\u006c\u0020\u0064\u00e9\u006a\u00e0\u0020\u0075\u0074\u0069\u006c\u0069\u0073\u00e9\u0065\u0020\u0070\u006f\u0075\u0072\u0020\u0075\u006e\u0020\u0063\u006f\u006d\u0070\u0074\u0065\u0020\u0044\u0061\u0074\u0061\u0076\u0065\u0072\u0073\u0065\u002e\u0020\u0045\u006e\u0020\u0065\u006e\u0074\u0072\u0061\u006e\u0074\u0020\u0076\u006f\u0074\u0072\u0065\u0020\u006d\u006f\u0074\u0020\u0064\u0065\u0020\u0070\u0061\u0073\u0073\u0065\u0020\u0061\u0063\u0074\u0075\u0065\u006c\u0020\u0064\u0065\u0020\u0044\u0061\u0074\u0061\u0076\u0065\u0072\u0073\u0065\u0020\u0063\u0069\u002d\u0064\u0065\u0073\u0073\u006f\u0075\u0073\u002c\u0020\u0076\u006f\u0074\u0072\u0065\u0020\u0063\u006f\u006d\u0070\u0074\u0065\u0020\u0044\u0061\u0074\u0061\u0076\u0065\u0072\u0073\u0065\u0020\u0065\u0078\u0069\u0073\u0074\u0061\u006e\u0074\u0020\u0070\u0065\u0075\u0074\u0020\u00ea\u0074\u0072\u0065\u0020\u0063\u006f\u006e\u0076\u0065\u0072\u0074\u0069\u0020\u0070\u006f\u0075\u0072\u0020\u0075\u0074\u0069\u006c\u0069\u0073\u0065\u0072\u0020\u0076\u006f\u0074\u0072\u0065\u0020\u0063\u006f\u006d\u0070\u0074\u0065\u0020\u0069\u006e\u0073\u0074\u0069\u0074\u0075\u0074\u0069\u006f\u006e\u006e\u0065\u006c\u0020\u00e0\u0020\u006c\u0061\u0020\u0070\u006c\u0061\u0063\u0065\u002e\u0020\u0053\u0075\u0069\u0074\u0065\u0020\u00e0\u0020\u006c\u0061\u0020\u0063\u006f\u006e\u0076\u0065\u0072\u0073\u0069\u006f\u006e\u0020\u0076\u006f\u0075\u0073\u0020\u006e\u0027\u0061\u0075\u0072\u0065\u007a\u0020\u0070\u006c\u0075\u0073\u0020\u0071\u0075\u0027\u00e0\u0020\u0075\u0074\u0069\u006c\u0069\u0073\u0065\u0072\u0020\u0076\u006f\u0074\u0072\u0065\u0020\u0063\u006f\u006d\u0070\u0074\u0065\u0020\u0069\u006e\u0073\u0074\u0069\u0074\u0075\u0074\u0069\u006f\u006e\u006e\u0065\u006c\u002e +oauth2.welcomeExistingUserMessageDefaultInstitution=\u0076\u006f\u0074\u0072\u0065\u0020\u00e9\u0074\u0061\u0062\u006c\u0069\u0073\u0073\u0065\u006d\u0065\u006e\u0074 +oauth2.dataverseUsername=\u004e\u006f\u006d\u0020\u0064\u0027\u0075\u0074\u0069\u006c\u0069\u0073\u0061\u0074\u0065\u0075\u0072\u0020\u0044\u0061\u0074\u0061\u0076\u0065\u0072\u0073\u0065 +oauth2.currentDataversePassword=\u004d\u006f\u0074\u0020\u0064\u0065\u0020\u0070\u0061\u0073\u0073\u0065\u0020\u0044\u0061\u0074\u0061\u0076\u0065\u0072\u0073\u0065\u0020\u0061\u0063\u0074\u0075\u0065\u006c +oauth2.chooseUsername=\u004e\u006f\u006d\u0020\u0064\u0027\u0075\u0074\u0069\u006c\u0069\u0073\u0061\u0074\u0065\u0075\u0072\u005c\u0075\u0030\u0030\u0041\u0030\u003a\u0020 +oauth2.passwordRejected=\u003c\u0073\u0074\u0072\u006f\u006e\u0067\u003e\u0045\u0072\u0072\u0065\u0075\u0072\u0020\u0064\u0065\u0020\u0076\u0061\u006c\u0069\u0064\u0061\u0074\u0069\u006f\u006e\u003c\u002f\u0073\u0074\u0072\u006f\u006e\u0067\u003e\u0020\u002d\u0020\u004e\u006f\u006d\u0020\u0064\u0027\u0075\u0074\u0069\u006c\u0069\u0073\u0061\u0074\u0065\u0075\u0072\u0020\u006f\u0075\u0020\u006d\u006f\u0074\u0020\u0064\u0065\u0020\u0070\u0061\u0073\u0073\u0065\u0020\u0069\u006e\u0063\u006f\u0072\u0072\u0065\u0063\u0074\u002e +# oauth2.newAccount.title=\u0043\u0072\u00e9\u0061\u0074\u0069\u006f\u006e\u0020\u0064\u0065\u0020\u0063\u006f\u006d\u0070\u0074\u0065 +oauth2.newAccount.welcomeWithName=\u0042\u0069\u0065\u006e\u0076\u0065\u006e\u0075\u0065\u0020\u0064\u0061\u006e\u0073\u0020\u0044\u0061\u0074\u0061\u0076\u0065\u0072\u0073\u0065\u002c\u0020\u007b\u0030\u007d +oauth2.newAccount.welcomeNoName=\u0042\u0069\u0065\u006e\u0076\u0065\u006e\u0075\u0065\u0020\u0064\u0061\u006e\u0073\u0020\u0044\u0061\u0074\u0061\u0076\u0065\u0072\u0073\u0065 +# oauth2.newAccount.email=\u0043\u006f\u0075\u0072\u0072\u0069\u0065\u006c +# oauth2.newAccount.email.tip=\u0044\u0061\u0074\u0061\u0076\u0065\u0072\u0073\u0065\u0020\u0075\u0074\u0069\u006c\u0069\u0073\u0065\u0020\u0063\u0065\u0020\u0063\u006f\u0075\u0072\u0072\u0069\u0065\u006c\u0020\u0070\u006f\u0075\u0072\u0020\u0076\u006f\u0075\u0073\u0020\u0069\u006e\u0066\u006f\u0072\u006d\u0065\u0072\u0020\u0064\u0065\u0073\u0020\u0070\u0072\u006f\u0062\u006c\u00e8\u006d\u0065\u0073\u0020\u006c\u0069\u00e9\u0073\u0020\u00e0\u0020\u0076\u006f\u0073\u0020\u0064\u006f\u006e\u006e\u00e9\u0065\u0073\u002e +oauth2.newAccount.suggestedEmails=\u0041\u0064\u0072\u0065\u0073\u0073\u0065\u0073\u0020\u0064\u0065\u0020\u0063\u006f\u0075\u0072\u0072\u0069\u0065\u006c\u0020\u0073\u0075\u0067\u0067\u00e9\u0072\u00e9\u0065\u0073\u005c\u0075\u0030\u0030\u0041\u0030\u003a +oauth2.newAccount.username=\u004e\u006f\u006d\u0020\u0064\u0027\u0075\u0074\u0069\u006c\u0069\u0073\u0061\u0074\u0065\u0075\u0072 +oauth2.newAccount.username.tip=\u0043\u0065\u0020\u006e\u006f\u006d\u0020\u0064\u0027\u0075\u0074\u0069\u006c\u0069\u0073\u0061\u0074\u0065\u0075\u0072\u0020\u0073\u0065\u0072\u0061\u0020\u0076\u006f\u0074\u0072\u0065\u0020\u0069\u0064\u0065\u006e\u0074\u0069\u0066\u0069\u0061\u006e\u0074\u0020\u0075\u006e\u0069\u0071\u0075\u0065\u0020\u0065\u006e\u0020\u0074\u0061\u006e\u0074\u0020\u0071\u0075\u0027\u0075\u0074\u0069\u006c\u0069\u0073\u0061\u0074\u0065\u0075\u0072\u0020\u0044\u0061\u0074\u0061\u0076\u0065\u0072\u0073\u0065\u002e +oauth2.newAccount.explanation=\u0043\u0065\u0074\u0074\u0065\u0020\u0069\u006e\u0066\u006f\u0072\u006d\u0061\u0074\u0069\u006f\u006e\u0020\u0065\u0073\u0074\u0020\u0066\u006f\u0075\u0072\u006e\u0069\u0065\u0020\u0070\u0061\u0072\u0020\u007b\u0030\u007d\u0020\u0065\u0074\u0020\u0073\u0065\u0072\u0061\u0020\u0075\u0074\u0069\u006c\u0069\u0073\u00e9\u0065\u0020\u0070\u006f\u0075\u0072\u0020\u0063\u0072\u00e9\u0065\u0072\u0020\u0076\u006f\u0074\u0072\u0065\u0020\u0063\u006f\u006d\u0070\u0074\u0065\u0020\u007b\u0031\u007d\u002e\u0020\u0050\u006f\u0075\u0072\u0020\u0076\u006f\u0075\u0073\u0020\u0063\u006f\u006e\u006e\u0065\u0063\u0074\u0065\u0072\u0020\u00e0\u0020\u006e\u006f\u0075\u0076\u0065\u0061\u0075\u002c\u0020\u0076\u006f\u0075\u0073\u0020\u0064\u0065\u0076\u0072\u0065\u007a\u0020\u0075\u0074\u0069\u006c\u0069\u0073\u0065\u0072\u0020\u006c\u0027\u006f\u0070\u0074\u0069\u006f\u006e\u0020\u0064\u0065\u0020\u0063\u006f\u006e\u006e\u0065\u0078\u0069\u006f\u006e\u0020\u007b\u0030\u007d\u002e +oauth2.newAccount.suggestConvertInsteadOfCreate=\u0053\u0069\u0020\u0076\u006f\u0075\u0073\u0020\u0061\u0076\u0065\u007a\u0020\u0064\u00e9\u006a\u00e0\u0020\u0075\u006e\u0020\u0063\u006f\u006d\u0070\u0074\u0065\u0020\u007b\u0030\u007d\u002c\u0020\u0076\u006f\u0075\u0073\u0020\u0064\u0065\u0076\u0072\u0065\u007a\u0020\u003c\u0061\u0020\u0068\u0072\u0065\u0066\u003d\u0022\u002f\u006f\u0061\u0075\u0074\u0068\u0032\u002f\u0063\u006f\u006e\u0076\u0065\u0072\u0074\u002e\u0078\u0068\u0074\u006d\u006c\u0022\u003e\u0020\u0063\u006f\u006e\u0076\u0065\u0072\u0074\u0069\u0072\u0020\u0076\u006f\u0074\u0072\u0065\u0020\u0063\u006f\u006d\u0070\u0074\u0065\u003c\u002f\u0061\u003e\u002e +# oauth2.newAccount.tabs.convertAccount=\u0043\u006f\u006e\u0076\u0065\u0072\u0074\u0069\u0072\u0020\u0075\u006e\u0020\u0063\u006f\u006d\u0070\u0074\u0065\u0020\u0065\u0078\u0069\u0073\u0074\u0061\u006e\u0074 +oauth2.newAccount.buttons.convertNewAccount=\u0043\u006f\u006e\u0076\u0065\u0072\u0074\u0069\u0072\u0020\u0075\u006e\u0020\u0063\u006f\u006d\u0070\u0074\u0065 +oauth2.newAccount.emailTaken=\u0041\u0064\u0072\u0065\u0073\u0073\u0065\u0020\u0063\u006f\u0075\u0072\u0072\u0069\u0065\u006c\u0020\u0064\u00e9\u006a\u00e0\u0020\u0070\u0072\u0069\u0073\u0065\u002e\u0020\u0045\u006e\u0076\u0069\u0073\u0061\u0067\u0065\u007a\u0020\u0070\u006c\u0075\u0074\u00f4\u0074\u0020\u0064\u0065\u0020\u0064\u0065\u0020\u0066\u0075\u0073\u0069\u006f\u006e\u006e\u0065\u0072\u0020\u006c\u0065\u0020\u0063\u006f\u006d\u0070\u0074\u0065\u0020\u0063\u006f\u0072\u0072\u0065\u0073\u0070\u006f\u006e\u0064\u0061\u006e\u0074\u002e +oauth2.newAccount.emailOk=\u0041\u0064\u0072\u0065\u0073\u0073\u0065\u0020\u0063\u006f\u0075\u0072\u0072\u0069\u0065\u006c\u0020\u0076\u0061\u006c\u0069\u0064\u00e9\u0065\u002e +oauth2.newAccount.emailInvalid=\u0041\u0064\u0072\u0065\u0073\u0073\u0065\u0020\u0063\u006f\u0075\u0072\u0072\u0069\u0065\u006c\u0020\u006e\u006f\u006e\u0020\u0076\u0061\u006c\u0069\u0064\u0065\u002e +# oauth2.newAccount.usernameTaken=\u004e\u006f\u006d\u0020\u0064\u0027\u0075\u0074\u0069\u006c\u0069\u0073\u0061\u0074\u0065\u0075\u0072\u0020\u0064\u00e9\u006a\u00e0\u0020\u0070\u0072\u0069\u0073\u002e +# oauth2.newAccount.usernameOk=\u004e\u006f\u006d\u0020\u0064\u0027\u0075\u0074\u0069\u006c\u0069\u0073\u0061\u0074\u0065\u0075\u0072\u0020\u0076\u0061\u006c\u0069\u0064\u00e9\u002e +# oauth2/convert.xhtml=\u0023\u0020\u006f\u0061\u0075\u0074\u0068\u0032\u002f\u0063\u006f\u006e\u0076\u0065\u0072\u0074\u002e\u0078\u0068\u0074\u006d\u006c +# oauth2.convertAccount.title=\u0043\u006f\u006e\u0076\u0065\u0072\u0073\u0069\u006f\u006e\u0020\u0064\u0065\u0020\u0063\u006f\u006d\u0070\u0074\u0065 +oauth2.convertAccount.explanation=\u0045\u006e\u0074\u0072\u0065\u007a\u0020\u0076\u006f\u0074\u0072\u0065\u0020\u006e\u006f\u006d\u0020\u0064\u0027\u0075\u0074\u0069\u006c\u0069\u0073\u0061\u0074\u0065\u0075\u0072\u0020\u007b\u0030\u007d\u0020\u006f\u0075\u0020\u0065\u006e\u0063\u006f\u0072\u0065\u0020\u0076\u006f\u0074\u0072\u0065\u0020\u006e\u006f\u006d\u0020\u0064\u0027\u0075\u0074\u0069\u006c\u0069\u0073\u0061\u0074\u0065\u0075\u0072\u0020\u0065\u0074\u0020\u0076\u006f\u0074\u0072\u0065\u0020\u006d\u006f\u0074\u0020\u0064\u0065\u0020\u0070\u0061\u0073\u0073\u0065\u0020\u0070\u006f\u0075\u0072\u0020\u0063\u006f\u006e\u0076\u0065\u0072\u0074\u0069\u0072\u0020\u0076\u006f\u0074\u0072\u0065\u0020\u0063\u006f\u006d\u0070\u0074\u0065\u0020\u00e0\u0020\u006c\u0027\u006f\u0070\u0074\u0069\u006f\u006e\u0020\u0064\u0065\u0020\u0063\u006f\u006e\u006e\u0065\u0078\u0069\u006f\u006e\u0020\u007b\u0031\u007d\u002e\u0020\u003c\u0061\u0020\u0068\u0072\u0065\u0066\u003d\u0022\u007b\u0032\u007d\u002f\u007b\u0033\u007d\u002f\u0075\u0073\u0065\u0072\u002f\u0061\u0063\u0063\u006f\u0075\u006e\u0074\u002e\u0068\u0074\u006d\u006c\u0022\u0020\u0074\u0061\u0072\u0067\u0065\u0074\u003d\u0022\u005f\u0062\u006c\u0061\u006e\u006b\u0022\u003e\u0045\u006e\u0020\u0061\u0070\u0070\u0072\u0065\u006e\u0064\u0072\u0065\u0020\u0064\u0061\u0076\u0061\u006e\u0074\u0061\u0067\u0065\u003c\u002f\u0061\u003e\u0020\u00e0\u0020\u0070\u0072\u006f\u0070\u006f\u0073\u0020\u0064\u0065\u0020\u006c\u0061\u0020\u0063\u006f\u006e\u0076\u0065\u0072\u0073\u0069\u006f\u006e\u0020\u0064\u0065\u0020\u0076\u006f\u0074\u0072\u0065\u0020\u0063\u006f\u006d\u0070\u0074\u0065\u002e +oauth2.convertAccount.username=\u004e\u006f\u006d\u0020\u0064\u0027\u0075\u0074\u0069\u006c\u0069\u0073\u0061\u0074\u0065\u0075\u0072\u0020\u0065\u0078\u0069\u0073\u0074\u0061\u006e\u0074 +oauth2.convertAccount.password=\u004d\u006f\u0074\u0020\u0064\u0065\u0020\u0070\u0061\u0073\u0073\u0065 +oauth2.convertAccount.authenticationFailed=\u0041\u0075\u0074\u0068\u0065\u006e\u0074\u0069\u0066\u0069\u0063\u0061\u0074\u0069\u006f\u006e\u0020\u00e9\u0063\u0068\u006f\u0075\u00e9\u0065\u0020\u002d\u0020\u004e\u006f\u006d\u0020\u0064\u0027\u0075\u0074\u0069\u006c\u0069\u0073\u0061\u0074\u0065\u0075\u0072\u0020\u006f\u0075\u0020\u006d\u006f\u0074\u0020\u0064\u0065\u0020\u0070\u0061\u0073\u0073\u0065\u0020\u0069\u006e\u0063\u006f\u0072\u0072\u0065\u0063\u0074\u002e +oauth2.convertAccount.buttonTitle=\u0043\u006f\u006e\u0076\u0065\u0072\u0074\u0069\u0072\u0020\u0075\u006e\u0020\u0063\u006f\u006d\u0070\u0074\u0065 +oauth2.convertAccount.success=\u0056\u006f\u0074\u0072\u0065\u0020\u0063\u006f\u006d\u0070\u0074\u0065\u0020\u0044\u0061\u0074\u0061\u0076\u0065\u0072\u0073\u0065\u0020\u0065\u0073\u0074\u0020\u006d\u0061\u0069\u006e\u0074\u0065\u006e\u0061\u006e\u0074\u0020\u0061\u0073\u0073\u006f\u0063\u0069\u00e9\u0020\u00e0\u0020\u0076\u006f\u0074\u0072\u0065\u0020\u0063\u006f\u006d\u0070\u0074\u0065\u0020\u007b\u0030\u007d\u002e +# oauth2/callback.xhtml=\u0023\u0020\u006f\u0061\u0075\u0074\u0068\u0032\u002f\u0063\u0061\u006c\u006c\u0062\u0061\u0063\u006b\u002e\u0078\u0068\u0074\u006d\u006c +oauth2.callback.page.title=\u0052\u0061\u0070\u0070\u0065\u006c\u0020\u004f\u0041\u0075\u0074\u0068 +oauth2.callback.message=\u003c\u0073\u0074\u0072\u006f\u006e\u0067\u003e\u0045\u0072\u0072\u0065\u0075\u0072\u0020\u004f\u0041\u0075\u0074\u0068\u0032\u003c\u002f\u0073\u0074\u0072\u006f\u006e\u0067\u003e\u0020\u002d\u0020\u0044\u00e9\u0073\u006f\u006c\u00e9\u002c\u0020\u006c\u0065\u0020\u0070\u0072\u006f\u0063\u0065\u0073\u0073\u0075\u0073\u0020\u0064\u0027\u0069\u0064\u0065\u006e\u0074\u0069\u0066\u0069\u0063\u0061\u0074\u0069\u006f\u006e\u0020\u006e\u0027\u0061\u0020\u0070\u0061\u0073\u0020\u0072\u00e9\u0075\u0073\u0073\u0069\u002e +# tab on dataverseuser.xhtml= +apitoken.title=\u004a\u0065\u0074\u006f\u006e\u0020\u0041\u0050\u0049 +apitoken.message=\u0056\u006f\u0074\u0072\u0065\u0020\u006a\u0065\u0074\u006f\u006e\u0020\u0041\u0050\u0049\u0020\u0073\u0065\u0072\u0061\u0020\u0061\u0066\u0066\u0069\u0063\u0068\u00e9\u0020\u0063\u0069\u002d\u0061\u0070\u0072\u00e8\u0073\u0020\u0075\u006e\u0065\u0020\u0066\u006f\u0069\u0073\u0020\u0071\u0075\u0027\u0069\u006c\u0020\u0061\u0075\u0072\u0061\u0020\u00e9\u0074\u00e9\u0020\u0063\u0072\u00e9\u00e9\u002e\u0020\u0043\u006f\u006e\u0073\u0075\u006c\u0074\u0065\u007a\u0020\u006e\u006f\u0074\u0072\u0065\u0020\u007b\u0030\u007d\u0067\u0075\u0069\u0064\u0065\u0020\u0041\u0050\u0049\u007b\u0031\u007d\u0020\u0070\u006f\u0075\u0072\u0020\u006f\u0062\u0074\u0065\u006e\u0069\u0072\u0020\u0070\u006c\u0075\u0073\u0020\u0064\u0065\u0020\u0064\u00e9\u0074\u0061\u0069\u006c\u0073\u0020\u0073\u0075\u0072\u0020\u0063\u006f\u006d\u006d\u0065\u006e\u0074\u0020\u0075\u0074\u0069\u006c\u0069\u0073\u0065\u0072\u0020\u0076\u006f\u0074\u0072\u0065\u0020\u006a\u0065\u0074\u006f\u006e\u0020\u0041\u0050\u0049\u0020\u0061\u0076\u0065\u0063\u0020\u006c\u0065\u0073\u0020\u0041\u0050\u0049\u0020\u0064\u0065\u0020\u0044\u0061\u0074\u0061\u0076\u0065\u0072\u0073\u0065\u0020 +apitoken.notFound=\u004c\u0065\u0020\u006a\u0065\u0074\u006f\u006e\u0020\u0041\u0050\u0049\u0020\u0070\u006f\u0075\u0072\u0020\u007b\u0030\u007d\u0020\u006e\u0027\u0061\u0020\u0070\u0061\u0073\u0020\u00e9\u0074\u00e9\u0020\u0063\u0072\u00e9\u00e9\u002e +apitoken.generateBtn=\u0043\u0072\u00e9\u0065\u0072\u0020\u006c\u0065\u0020\u006a\u0065\u0074\u006f\u006e +apitoken.regenerateBtn=\u0043\u0072\u00e9\u0065\u0072\u0020\u0064\u0065\u0020\u006e\u006f\u0075\u0076\u0065\u0061\u0075\u0020\u006c\u0065\u0020\u006a\u0065\u0074\u006f\u006e +#dashboard.xhtml= +dashboard.title=\u0054\u0061\u0062\u006c\u0065\u0061\u0075\u0020\u0064\u0065\u0020\u0062\u006f\u0072\u0064 +dashboard.card.harvestingclients.header=\u0043\u006c\u0069\u0065\u006e\u0074\u0073\u0020\u0064\u0065\u0020\u006d\u006f\u0069\u0073\u0073\u006f\u006e\u006e\u0061\u0067\u0065 +dashboard.card.harvestingclients.btn.manage=\u0047\u0065\u0073\u0074\u0069\u006f\u006e\u0020\u0064\u0065\u0073\u0020\u0063\u006c\u0069\u0065\u006e\u0074\u0073 +dashboard.card.harvestingclients.clients=\u007b\u0030\u002c\u0020\u0063\u0068\u006f\u0069\u0063\u0065\u002c\u0020\u0030\u0023\u0043\u006c\u0069\u0065\u006e\u0074\u0073\u007c\u0031\u0023\u0043\u006c\u0069\u0065\u006e\u0074\u007c\u0032\u0023\u0043\u006c\u0069\u0065\u006e\u0074\u0073\u007d +dashboard.card.harvestingclients.datasets=\u007b\u0030\u002c\u0020\u0063\u0068\u006f\u0069\u0063\u0065\u002c\u0020\u0030\u0023\u0045\u006e\u0073\u0065\u006d\u0062\u006c\u0065\u0073\u0020\u0064\u0065\u0020\u0064\u006f\u006e\u006e\u00e9\u0065\u0073\u007c\u0031\u0023\u0045\u006e\u0073\u0065\u006d\u0062\u006c\u0065\u0020\u0064\u0065\u0020\u0064\u006f\u006e\u006e\u00e9\u0065\u0073\u0074\u007c\u0032\u0023\u0045\u006e\u0073\u0065\u006d\u0062\u006c\u0065\u0073\u0020\u0064\u0065\u0020\u0064\u006f\u006e\u006e\u00e9\u0065\u0073\u007d +dashboard.card.harvestingserver.header=\u0053\u0065\u0072\u0076\u0065\u0075\u0072\u0020\u0064\u0065\u0020\u006d\u006f\u0069\u0073\u0073\u006f\u006e\u006e\u0061\u0067\u0065\u0020 +dashboard.card.harvestingserver.enabled=\u0053\u0065\u0072\u0076\u0065\u0075\u0072\u0020\u004f\u0041\u0049\u0020\u0061\u0063\u0074\u0069\u0076\u00e9 +dashboard.card.harvestingserver.disabled=\u0053\u0065\u0072\u0076\u0065\u0075\u0072\u0020\u004f\u0041\u0049\u0020\u0064\u00e9\u0073\u0061\u0063\u0074\u0069\u0076\u00e9 +dashboard.card.harvestingserver.status=\u0053\u0074\u0061\u0074\u0075\u0074 +dashboard.card.harvestingserver.sets=\u007b\u0030\u002c\u0020\u0063\u0068\u006f\u0069\u0063\u0065\u002c\u0020\u0030\u0023\u0045\u006e\u0073\u0065\u006d\u0062\u006c\u0065\u0073\u007c\u0031\u0023\u0045\u006e\u0073\u0065\u006d\u0062\u006c\u0065\u007c\u0032\u0023\u0045\u006e\u0073\u0065\u006d\u0062\u006c\u0065\u0073\u007d +dashboard.card.harvestingserver.btn.manage=\u0047\u0065\u0073\u0074\u0069\u006f\u006e\u0020\u0064\u0075\u0020\u0073\u0065\u0072\u0076\u0065\u0075\u0072 +dashboard.card.metadataexport.header=\u0045\u0078\u0070\u006f\u0072\u0074\u0061\u0074\u0069\u006f\u006e\u0020\u0064\u0065\u0073\u0020\u006d\u00e9\u0074\u0061\u0064\u006f\u006e\u006e\u00e9\u0065\u0073 +dashboard.card.metadataexport.message=\u004c\u0027\u0065\u0078\u0070\u006f\u0072\u0074\u0061\u0074\u0069\u006f\u006e\u0020\u0064\u0065\u0073\u0020\u006d\u00e9\u0074\u0061\u0064\u006f\u006e\u006e\u00e9\u0065\u0073\u0020\u0064\u0065\u0020\u006c\u0027\u0065\u006e\u0073\u0065\u006d\u0062\u006c\u0065\u0020\u0064\u0065\u0020\u0064\u006f\u006e\u006e\u00e9\u0065\u0073\u0020\u006e\u0027\u0065\u0073\u0074\u0020\u0064\u0069\u0073\u0070\u006f\u006e\u0069\u0062\u006c\u0065\u0020\u0071\u0075\u0065\u0020\u0076\u0069\u0061\u0020\u006c\u0027\u0041\u0050\u0049\u0020\u0064\u0065\u0020\u007b\u0030\u007d\u002e\u0020\u0050\u006f\u0075\u0072\u0020\u0065\u006e\u0020\u0073\u0061\u0076\u006f\u0069\u0072\u0020\u0064\u0061\u0076\u0061\u006e\u0074\u0061\u0067\u0065\u002c\u0020\u0063\u006f\u006e\u0073\u0075\u006c\u0074\u0065\u007a\u0020\u006c\u0065\u0020\u007b\u0031\u007d\u0047\u0075\u0069\u0064\u0065\u0020\u0041\u0050\u0049\u007b\u0032\u007d\u0020\u0064\u0075\u0020\u007b\u0030\u007d\u002e +#harvestclients.xhtml= +harvestclients.title=\u0041\u0064\u006d\u0069\u006e\u0069\u0073\u0074\u0072\u0061\u0074\u0069\u006f\u006e\u0020\u0064\u0075\u0020\u006d\u006f\u0069\u0073\u0073\u006f\u006e\u006e\u0061\u0067\u0065\u0020\u0064\u0065\u0020\u0063\u006c\u0069\u0065\u006e\u0074\u0073 +harvestclients.toptip=\u002d\u0020\u004c\u0065\u0020\u006d\u006f\u0069\u0073\u0073\u006f\u006e\u006e\u0061\u0067\u0065\u0020\u0070\u0065\u0075\u0074\u0020\u00ea\u0074\u0072\u0065\u0020\u0070\u006c\u0061\u006e\u0069\u0066\u0069\u00e9\u0020\u0070\u006f\u0075\u0072\u0020\u0073\u0027\u0065\u0078\u00e9\u0063\u0075\u0074\u0065\u0072\u0020\u0073\u0065\u006c\u006f\u006e\u0020\u0075\u006e\u0020\u0068\u006f\u0072\u0061\u0069\u0072\u0065\u0020\u0073\u0070\u00e9\u0063\u0069\u0066\u0069\u0071\u0075\u0065\u0020\u006f\u0075\u0020\u00e0\u0020\u006c\u0061\u0020\u0064\u0065\u006d\u0061\u006e\u0064\u0065\u002e\u0020\u004c\u0065\u0020\u006d\u006f\u0069\u0073\u0073\u006f\u006e\u006e\u0061\u0067\u0065\u0020\u0070\u0065\u0075\u0074\u0020\u00ea\u0074\u0072\u0065\u0020\u006c\u0061\u006e\u0063\u00e9\u0020\u0069\u0063\u0069\u0020\u006f\u0075\u0020\u00e0\u0020\u0070\u0061\u0072\u0074\u0069\u0072\u0020\u0064\u0065\u0020\u006c\u0027\u0041\u0050\u0049\u0020\u0052\u0045\u0053\u0054\u002e +harvestclients.noClients.label=\u0041\u0075\u0063\u0075\u006e\u0020\u0063\u006c\u0069\u0065\u006e\u0074\u0020\u006e\u0027\u0065\u0073\u0074\u0020\u0063\u006f\u006e\u0066\u0069\u0067\u0075\u0072\u00e9\u002e +harvestclients.noClients.why.header=\u0051\u0075\u0027\u0065\u0073\u0074\u002d\u0063\u0065\u0020\u0071\u0075\u0065\u0020\u006c\u0065\u0020\u006d\u006f\u0069\u0073\u0073\u006f\u006e\u006e\u0061\u0067\u0065\u003f +harvestclients.noClients.why.reason1=\u004c\u0065\u0020\u006d\u006f\u0069\u0073\u0073\u006f\u006e\u006e\u0061\u0067\u0065\u0020\u0063\u006f\u006e\u0073\u0069\u0073\u0074\u0065\u0020\u00e0\u0020\u00e9\u0063\u0068\u0061\u006e\u0067\u0065\u0072\u0020\u0064\u0065\u0073\u0020\u006d\u00e9\u0074\u0061\u0064\u006f\u006e\u006e\u00e9\u0065\u0073\u0020\u0061\u0076\u0065\u0063\u0020\u0064\u0027\u0061\u0075\u0074\u0072\u0065\u0073\u0020\u0064\u00e9\u0070\u00f4\u0074\u0073\u002e\u0020\u0045\u006e\u0020\u0074\u0061\u006e\u0074\u0020\u0071\u0075\u0065\u0020\u0020\u003c\u0062\u003e\u003c\u0069\u003e\u0063\u006c\u0069\u0065\u006e\u0074\u003c\u002f\u0069\u003e\u003c\u002f\u0062\u003e\u0020\u0064\u0065\u0020\u006d\u006f\u0069\u0073\u0073\u006f\u006e\u006e\u0061\u0067\u0065\u002c\u0020\u0076\u006f\u0074\u0072\u0065\u0020\u0044\u0061\u0074\u0061\u0076\u0065\u0072\u0073\u0065\u0020\u0070\u0065\u0075\u0074\u0020\u0072\u0065\u0063\u0075\u0065\u0069\u006c\u006c\u0069\u0072\u0020\u006c\u0065\u0073\u0020\u006d\u00e9\u0074\u0061\u0064\u006f\u006e\u006e\u00e9\u0065\u0073\u0020\u0064\u0065\u0020\u006e\u006f\u0074\u0069\u0063\u0065\u0073\u0020\u0070\u0072\u006f\u0076\u0065\u006e\u0061\u006e\u0074\u0020\u0064\u0065\u0020\u0073\u006f\u0075\u0072\u0063\u0065\u0073\u0020\u0064\u0069\u0073\u0074\u0061\u006e\u0074\u0065\u0073\u002e\u0020\u0049\u006c\u0020\u0070\u0065\u0075\u0074\u0020\u0073\u0027\u0061\u0067\u0069\u0072\u0020\u0064\u0027\u0061\u0075\u0074\u0072\u0065\u0073\u0020\u0069\u006e\u0073\u0074\u0061\u006e\u0063\u0065\u0073\u0020\u0064\u0065\u0020\u0044\u0061\u0074\u0061\u0076\u0065\u0072\u0073\u0065\u002c\u0020\u006f\u0075\u0020\u0065\u006e\u0063\u006f\u0072\u0065\u0020\u0064\u0065\u0020\u0064\u00e9\u0070\u00f4\u0074\u0073\u0020\u0063\u006f\u006d\u0070\u0061\u0074\u0069\u0062\u006c\u0065\u0073\u0020\u0061\u0076\u0065\u0063\u0020\u006c\u0065\u0020\u0070\u0072\u006f\u0074\u006f\u0063\u006f\u006c\u0065\u0020\u004f\u0041\u0049\u002d\u0050\u004d\u0048\u002c\u0020\u0073\u006f\u0069\u0074\u0020\u006c\u0065\u0020\u0070\u0072\u006f\u0074\u006f\u0063\u006f\u006c\u0065\u0020\u0073\u0074\u0061\u006e\u0064\u0061\u0072\u0064\u0020\u0064\u0065\u0020\u006d\u006f\u0069\u0073\u0073\u006f\u006e\u006e\u0061\u0067\u0065\u002e\u0020 +harvestclients.noClients.why.reason2=\u004c\u0065\u0073\u0020\u006e\u006f\u0074\u0069\u0063\u0065\u0073\u0020\u0064\u0065\u0020\u006d\u00e9\u0074\u0061\u0064\u006f\u006e\u006e\u00e9\u0065\u0073\u0020\u006d\u006f\u0069\u0073\u0073\u006f\u006e\u006e\u00e9\u0065\u0073\u0020\u0073\u006f\u006e\u0074\u0020\u0069\u006e\u0074\u0065\u0072\u0072\u006f\u0067\u0065\u0061\u0062\u006c\u0065\u0073\u0020\u0070\u0061\u0072\u0020\u006c\u0065\u0073\u0020\u0075\u0073\u0061\u0067\u0065\u0072\u0073\u002e\u0020\u0045\u006e\u0020\u0063\u006c\u0069\u0071\u0075\u0061\u006e\u0074\u0020\u0073\u0075\u0072\u0020\u0075\u006e\u0020\u0065\u006e\u0073\u0065\u006d\u0062\u006c\u0065\u0020\u0064\u0065\u0020\u0064\u006f\u006e\u006e\u00e9\u0065\u0073\u0020\u006d\u006f\u0069\u0073\u0073\u006f\u006e\u006e\u00e9\u0020\u0064\u0061\u006e\u0073\u0020\u006c\u0061\u0020\u006c\u0069\u0073\u0074\u0065\u0020\u0064\u0065\u0073\u0020\u0072\u00e9\u0073\u0075\u006c\u0074\u0061\u0074\u0073\u0020\u0064\u0065\u0020\u0072\u0065\u0063\u0068\u0065\u0072\u0063\u0068\u0065\u002c\u0020\u006c\u0027\u0075\u0073\u0061\u0067\u0065\u0072\u0020\u0070\u0065\u0075\u0074\u0020\u0061\u0063\u0063\u00e9\u0064\u0065\u0072\u0020\u0061\u0075\u0020\u0064\u00e9\u0070\u00f4\u0074\u0020\u0064\u0027\u006f\u0072\u0069\u0067\u0069\u006e\u0065\u002e\u0020\u004c\u0065\u0073\u0020\u0065\u006e\u0073\u0065\u006d\u0062\u006c\u0065\u0073\u0020\u0064\u0065\u0020\u0064\u006f\u006e\u006e\u00e9\u0065\u0073\u0020\u006d\u006f\u0069\u0073\u0073\u006f\u006e\u006e\u00e9\u0073\u0020\u006e\u0065\u0020\u0070\u0065\u0075\u0076\u0065\u006e\u0074\u0020\u0063\u0065\u0070\u0065\u006e\u0064\u0061\u006e\u0074\u0020\u0070\u0061\u0073\u0020\u00ea\u0074\u0072\u0065\u0020\u006d\u006f\u0064\u0069\u0066\u0069\u00e9\u0073\u0020\u0064\u0061\u006e\u0073\u0020\u0076\u006f\u0074\u0072\u0065\u0020\u0069\u006e\u0073\u0074\u0061\u006e\u0063\u0065\u0020\u0064\u0065\u0020\u0044\u0061\u0074\u0061\u0076\u0065\u0072\u0073\u0065\u002e\u0020 +harvestclients.noClients.how.header=\u0043\u006f\u006d\u006d\u0065\u006e\u0074\u0020\u0065\u0066\u0066\u0065\u0063\u0074\u0075\u0065\u0072\u0020\u006c\u0065\u0020\u006d\u006f\u0069\u0073\u0073\u006f\u006e\u006e\u0061\u0067\u0065 +harvestclients.noClients.how.tip1=\u0041\u0066\u0069\u006e\u0020\u0064\u0065\u0020\u0070\u006f\u0075\u0076\u006f\u0069\u0072\u0020\u006d\u006f\u0069\u0073\u0073\u006f\u006e\u006e\u0065\u0072\u0020\u0064\u0065\u0073\u0020\u006d\u00e9\u0074\u0061\u0064\u006f\u006e\u006e\u00e9\u0065\u0073\u002c\u0020\u0075\u006e\u0020\u003c\u0069\u003e\u0063\u006c\u0069\u0065\u006e\u0074\u0020\u0064\u0065\u0020\u006d\u006f\u0069\u0073\u0073\u006f\u006e\u006e\u0061\u0067\u0065\u003c\u002f\u0069\u003e\u0020\u0064\u006f\u0069\u0074\u0020\u00ea\u0074\u0072\u0065\u0020\u0064\u00e9\u0066\u0069\u006e\u0069\u0020\u0065\u0074\u0020\u0070\u0061\u0072\u0061\u006d\u00e9\u0074\u0072\u00e9\u0020\u0070\u006f\u0075\u0072\u0020\u0063\u0068\u0061\u0063\u0075\u006e\u0020\u0064\u0065\u0073\u0020\u0064\u00e9\u0070\u00f4\u0074\u0073\u0020\u0064\u0069\u0073\u0074\u0061\u006e\u0074\u0073\u002e\u0020\u0056\u0065\u0075\u0069\u006c\u006c\u0065\u007a\u0020\u006e\u006f\u0074\u0065\u0072\u0020\u0071\u0075\u0065\u0020\u0070\u006f\u0075\u0072\u0020\u0064\u00e9\u0066\u0069\u006e\u0069\u0072\u0020\u0075\u006e\u0020\u0063\u006c\u0069\u0065\u006e\u0074\u002c\u0020\u0076\u006f\u0075\u0073\u0020\u0064\u0065\u0076\u0072\u0065\u007a\u0020\u0073\u00e9\u006c\u0065\u0063\u0074\u0069\u006f\u006e\u006e\u0065\u0072\u0020\u0075\u006e\u0020\u0064\u0061\u0074\u0061\u0076\u0065\u0072\u0073\u0065\u0020\u006c\u006f\u0063\u0061\u006c\u0020\u0064\u00e9\u006a\u00e0\u0020\u0065\u0078\u0069\u0073\u0074\u0061\u006e\u0074\u002c\u0020\u006c\u0065\u0071\u0075\u0065\u006c\u0020\u0068\u00e9\u0062\u0065\u0072\u0067\u0065\u0072\u0061\u0020\u006c\u0065\u0073\u0020\u0065\u006e\u0073\u0065\u006d\u0062\u006c\u0065\u0073\u0020\u0064\u0065\u0020\u0064\u006f\u006e\u006e\u00e9\u0065\u0073\u0020\u006d\u006f\u0069\u0073\u0073\u006f\u006e\u006e\u00e9\u0073\u002e +harvestclients.noClients.how.tip2=\u004c\u0065\u0073\u0020\u006e\u006f\u0074\u0069\u0063\u0065\u0073\u0020\u0072\u00e9\u0063\u006f\u006c\u0074\u00e9\u0065\u0073\u0020\u0070\u0065\u0075\u0076\u0065\u006e\u0074\u0020\u00ea\u0074\u0072\u0065\u0020\u0073\u0079\u006e\u0063\u0068\u0072\u006f\u006e\u0069\u0073\u00e9\u0065\u0073\u0020\u0061\u0076\u0065\u0063\u0020\u006c\u0065\u0020\u0064\u00e9\u0070\u00f4\u0074\u0020\u0064\u0027\u006f\u0072\u0069\u0067\u0069\u006e\u0065\u0020\u00e0\u0020\u006c\u0027\u0061\u0069\u0064\u0065\u0020\u0064\u0065\u0020\u006d\u0069\u0073\u0065\u0073\u0020\u00e0\u0020\u006a\u006f\u0075\u0072\u0020\u0069\u006e\u0063\u0072\u00e9\u006d\u0065\u006e\u0074\u0069\u0065\u006c\u006c\u0065\u0073\u0020\u0070\u0072\u006f\u0067\u0072\u0061\u006d\u006d\u00e9\u0065\u0073\u002c\u0020\u0070\u0061\u0072\u0020\u0065\u0078\u0065\u006d\u0070\u006c\u0065\u002c\u0020\u0071\u0075\u006f\u0074\u0069\u0064\u0069\u0065\u006e\u006e\u0065\u0073\u0020\u006f\u0075\u0020\u0068\u0065\u0062\u0064\u006f\u006d\u0061\u0064\u0061\u0069\u0072\u0065\u0073\u002e\u0020\u0041\u006c\u0074\u0065\u0072\u006e\u0061\u0074\u0069\u0076\u0065\u006d\u0065\u006e\u0074\u002c\u0020\u006c\u0065\u0073\u0020\u006d\u006f\u0069\u0073\u0073\u006f\u006e\u006e\u0061\u0067\u0065\u0073\u0020\u0070\u0065\u0075\u0076\u0065\u006e\u0074\u0020\u00ea\u0074\u0072\u0065\u0020\u0065\u0078\u00e9\u0063\u0075\u0074\u00e9\u0073\u0020\u00e0\u0020\u006c\u0061\u0020\u0064\u0065\u006d\u0061\u006e\u0064\u0065\u002c\u0020\u00e0\u0020\u0070\u0061\u0072\u0074\u0069\u0072\u0020\u0064\u0065\u0020\u0063\u0065\u0074\u0074\u0065\u0020\u0070\u0061\u0067\u0065\u0020\u006f\u0075\u0020\u0076\u0069\u0061\u0020\u006c\u0027\u0041\u0050\u0049\u0020\u0052\u0045\u0053\u0054\u002e +harvestclients.noClients.getStarted=\u0050\u006f\u0075\u0072\u0020\u0063\u006f\u006d\u006d\u0065\u006e\u0063\u0065\u0072\u002c\u0020\u0063\u006c\u0069\u0071\u0075\u0065\u007a\u0020\u0073\u0075\u0072\u0020\u006c\u0065\u0020\u0062\u006f\u0075\u0074\u006f\u006e\u0020\u00ab\u0020\u0041\u006a\u006f\u0075\u0074\u0065\u0072\u0020\u0075\u006e\u0020\u0063\u006c\u0069\u0065\u006e\u0074\u0020\u00bb\u0020\u0063\u0069\u002d\u0064\u0065\u0073\u0073\u0075\u0073\u002e\u0020\u0050\u006f\u0075\u0072\u0020\u0065\u006e\u0020\u0073\u0061\u0076\u006f\u0069\u0072\u0020\u0064\u0061\u0076\u0061\u006e\u0074\u0061\u0067\u0065\u0020\u0073\u0075\u0072\u0020\u006c\u0065\u0020\u006d\u006f\u0069\u0073\u0073\u006f\u006e\u006e\u0061\u0067\u0065\u002c\u0020\u0063\u006f\u006e\u0073\u0075\u006c\u0074\u0065\u007a\u0020\u006c\u0061\u0020\u0073\u0065\u0063\u0074\u0069\u006f\u006e\u0020\u003c\u0061\u0020\u0068\u0072\u0065\u0066\u003d\u0022\u007b\u0030\u007d\u002f\u007b\u0031\u007d\u002f\u0075\u0073\u0065\u0072\u002f\u0069\u006e\u0064\u0065\u0078\u002e\u0068\u0074\u006d\u006c\u0023\u0069\u006e\u0064\u0065\u0078\u0022\u0020\u0074\u0069\u0074\u006c\u0065\u003d\u0022\u0048\u0061\u0072\u0076\u0065\u0073\u0074\u0069\u006e\u0067\u0020\u002d\u0020\u0044\u0061\u0074\u0061\u0076\u0065\u0072\u0073\u0065\u0020\u0055\u0073\u0065\u0072\u0020\u0047\u0075\u0069\u0064\u0065\u0022\u0020\u0074\u0061\u0072\u0067\u0065\u0074\u003d\u0022\u005f\u0062\u006c\u0061\u006e\u006b\u0022\u003e\u006d\u006f\u0069\u0073\u0073\u006f\u006e\u006e\u0061\u0067\u0065\u003c\u002f\u0061\u003e\u0020\u0064\u0075\u0020\u0067\u0075\u0069\u0064\u0065\u0020\u0064\u0027\u0075\u0074\u0069\u006c\u0069\u0073\u0061\u0074\u0069\u006f\u006e +harvestclients.btn.add=\u0041\u006a\u006f\u0075\u0074\u0065\u0072\u0020\u0075\u006e\u0020\u0063\u006c\u0069\u0065\u006e\u0074 +harvestclients.tab.header.name=\u0041\u006c\u0069\u0061\u0073 +harvestclients.tab.header.url=\u0041\u0064\u0072\u0065\u0073\u0073\u0065\u0020\u0055\u0052\u004c +harvestclients.tab.header.lastrun=\u0044\u0065\u0072\u006e\u0069\u00e8\u0072\u0065\u0020\u0065\u0078\u00e9\u0063\u0075\u0074\u0069\u006f\u006e +harvestclients.tab.header.lastresults=\u0044\u0065\u0072\u006e\u0069\u0065\u0072\u0073\u0020\u0072\u00e9\u0073\u0075\u006c\u0074\u0061\u0074\u0073 +harvestclients.tab.header.action=\u004f\u0070\u00e9\u0072\u0061\u0074\u0069\u006f\u006e\u0073 +harvestclients.tab.header.action.btn.run=\u004c\u0061\u006e\u0063\u0065\u0072\u0020\u006c\u0065\u0020\u006d\u006f\u0069\u0073\u0073\u006f\u006e\u006e\u0061\u0067\u0065 +harvestclients.tab.header.action.btn.edit=\u004d\u006f\u0064\u0069\u0066\u0069\u0065\u0072 +harvestclients.tab.header.action.btn.delete=\u0053\u0075\u0070\u0070\u0072\u0069\u006d\u0065\u0072 +harvestclients.tab.header.action.btn.delete.dialog.header=\u0053\u0075\u0070\u0070\u0072\u0069\u006d\u0065\u0072\u0020\u006c\u0065\u0020\u0063\u006c\u0069\u0065\u006e\u0074\u0020\u0064\u0065\u0020\u006d\u006f\u0069\u0073\u0073\u006f\u006e\u006e\u0061\u0067\u0065 +harvestclients.tab.header.action.btn.delete.dialog.warning=\u0056\u006f\u0075\u006c\u0065\u007a\u002d\u0076\u006f\u0075\u0073\u0020\u0076\u0072\u0061\u0069\u006d\u0065\u006e\u0074\u0020\u0073\u0075\u0070\u0070\u0072\u0069\u006d\u0065\u0072\u0020\u006c\u0065\u0020\u0063\u006c\u0069\u0065\u006e\u0074\u0020\u0064\u0065\u0020\u006d\u006f\u0069\u0073\u0073\u006f\u006e\u006e\u0061\u0067\u0065\u0020\u0022\u007b\u0030\u007d\u0022\u003f\u0020\u004c\u0061\u0020\u0073\u0075\u0070\u0070\u0072\u0065\u0073\u0073\u0069\u006f\u006e\u0020\u0064\u0075\u0020\u0063\u006c\u0069\u0065\u006e\u0074\u0020\u0073\u0075\u0070\u0070\u0072\u0069\u006d\u0065\u0072\u0061\u0020\u0074\u006f\u0075\u0073\u0020\u006c\u0065\u0073\u0020\u006a\u0065\u0075\u0078\u0020\u0064\u0065\u0020\u0064\u006f\u006e\u006e\u00e9\u0065\u0073\u0020\u0072\u00e9\u0063\u006f\u006c\u0074\u00e9\u0073\u0020\u00e0\u0020\u0070\u0061\u0072\u0074\u0069\u0072\u0020\u0064\u0065\u0020\u0063\u0065\u0020\u0073\u0065\u0072\u0076\u0065\u0075\u0072\u0020\u0064\u0069\u0073\u0074\u0061\u006e\u0074\u002e +harvestclients.tab.header.action.btn.delete.dialog.tip=\u0056\u0065\u0075\u0069\u006c\u006c\u0065\u007a\u0020\u006e\u006f\u0074\u0065\u0072\u0020\u0071\u0075\u0065\u0020\u0063\u0065\u0074\u0074\u0065\u0020\u006f\u0070\u00e9\u0072\u0061\u0074\u0069\u006f\u006e\u0020\u0070\u0065\u0075\u0074\u0020\u0070\u0072\u0065\u006e\u0064\u0072\u0065\u0020\u0075\u006e\u0020\u0063\u0065\u0072\u0074\u0061\u0069\u006e\u0020\u0074\u0065\u006d\u0070\u0073\u0020\u00e0\u0020\u0065\u0066\u0066\u0065\u0063\u0074\u0075\u0065\u0072\u0020\u0065\u006e\u0020\u0066\u006f\u006e\u0063\u0074\u0069\u006f\u006e\u0020\u0064\u0075\u0020\u006e\u006f\u006d\u0062\u0072\u0065\u0020\u0064\u0027\u0065\u006e\u0073\u0065\u006d\u0062\u006c\u0065\u0073\u0020\u0064\u0065\u0020\u0064\u006f\u006e\u006e\u00e9\u0065\u0073\u0020\u0072\u00e9\u0063\u006f\u006c\u0074\u00e9\u0073\u002e +harvestclients.tab.header.action.delete.infomessage=\u004c\u0061\u0020\u0073\u0075\u0070\u0070\u0072\u0065\u0073\u0073\u0069\u006f\u006e\u0020\u0064\u0075\u0020\u0063\u006c\u0069\u0065\u006e\u0074\u0020\u0064\u0065\u0020\u006d\u006f\u0069\u0073\u0073\u006f\u006e\u006e\u0061\u0067\u0065\u0020\u0065\u0073\u0074\u0020\u006c\u0061\u006e\u0063\u00e9\u0065\u002e\u0020\u004e\u006f\u0074\u0065\u007a\u0020\u0071\u0075\u0065\u0020\u0063\u0065\u006c\u0061\u0020\u0070\u0065\u0075\u0074\u0020\u0070\u0072\u0065\u006e\u0064\u0072\u0065\u0020\u0075\u006e\u0020\u0063\u0065\u0072\u0074\u0061\u0069\u006e\u0020\u0074\u0065\u006d\u0070\u0073\u0020\u0065\u006e\u0020\u0066\u006f\u006e\u0063\u0074\u0069\u006f\u006e\u0020\u0064\u0065\u0020\u006c\u0061\u0020\u0071\u0075\u0061\u006e\u0074\u0069\u0074\u00e9\u0020\u0064\u0065\u0020\u0063\u006f\u006e\u0074\u0065\u006e\u0075\u0020\u0072\u00e9\u0063\u006f\u006c\u0074\u00e9\u002e +harvestclients.actions.runharvest.success=\u004c\u0061\u006e\u0063\u0065\u006d\u0065\u006e\u0074\u0020\u0072\u00e9\u0075\u0073\u0073\u0069\u0020\u0064\u0027\u0075\u006e\u0020\u006d\u006f\u0069\u0073\u0073\u006f\u006e\u006e\u0061\u0067\u0065\u0020\u0061\u0073\u0079\u006e\u0063\u0068\u0072\u006f\u006e\u0065\u0020\u0070\u006f\u0075\u0072\u0020\u006c\u0065\u0020\u0063\u006c\u0069\u0065\u006e\u0074\u0020\u0022\u007b\u0030\u007d\u0022\u002e\u0020\u0056\u0065\u0075\u0069\u006c\u006c\u0065\u007a\u0020\u0072\u0065\u0063\u0068\u0061\u0072\u0067\u0065\u0072\u0020\u006c\u0061\u0020\u0070\u0061\u0067\u0065\u0020\u0070\u006f\u0075\u0072\u0020\u0076\u00e9\u0072\u0069\u0066\u0069\u0065\u0072\u0020\u006c\u0065\u0073\u0020\u0072\u00e9\u0073\u0075\u006c\u0074\u0061\u0074\u0073\u0020\u0064\u0065\u0020\u006c\u0061\u0020\u0072\u00e9\u0063\u006f\u006c\u0074\u0065\u002e +harvestclients.newClientDialog.step1=\u00c9\u0074\u0061\u0070\u0065\u0020\u0031\u0020\u0064\u0065\u0020\u0034\u0020\u002d\u0020\u0052\u0065\u006e\u0073\u0065\u0069\u0067\u006e\u0065\u006d\u0065\u006e\u0074\u0073\u0020\u0061\u0075\u0020\u0073\u0075\u006a\u0065\u0074\u0020\u0064\u0075\u0020\u0063\u006c\u0069\u0065\u006e\u0074 +harvestclients.newClientDialog.title.new=\u0044\u00e9\u0066\u0069\u006e\u0069\u0072\u0020\u0075\u006e\u0020\u0063\u006c\u0069\u0065\u006e\u0074\u0020\u0064\u0065\u0020\u006d\u006f\u0069\u0073\u0073\u006f\u006e\u006e\u0061\u0067\u0065 +harvestclients.newClientDialog.help=\u0043\u006f\u006e\u0066\u0069\u0067\u0075\u0072\u0065\u0072\u0020\u0075\u006e\u0020\u0063\u006c\u0069\u0065\u006e\u0074\u0020\u0070\u006f\u0075\u0072\u0020\u006d\u006f\u0069\u0073\u0073\u006f\u006e\u006e\u0065\u0072\u0020\u006c\u0065\u0020\u0063\u006f\u006e\u0074\u0065\u006e\u0075\u0020\u0064\u0027\u0075\u006e\u0020\u0073\u0065\u0072\u0076\u0065\u0075\u0072\u0020\u0064\u0069\u0073\u0074\u0061\u006e\u0074 +harvestclients.newClientDialog.nickname=\u0041\u006c\u0069\u0061\u0073 +harvestclients.newClientDialog.nickname.helptext=\u0044\u006f\u0069\u0074\u0020\u00ea\u0074\u0072\u0065\u0020\u0063\u006f\u006d\u0070\u006f\u0073\u00e9\u0020\u0064\u0065\u0020\u006c\u0065\u0074\u0074\u0072\u0065\u0073\u002c\u0020\u0064\u0065\u0020\u0063\u0068\u0069\u0066\u0066\u0072\u0065\u0073\u002c\u0020\u0064\u0065\u0020\u0074\u0072\u0061\u0069\u0074\u0073\u0020\u0064\u0065\u0020\u0073\u006f\u0075\u006c\u0069\u0067\u006e\u0065\u006d\u0065\u006e\u0074\u0020\u0028\u005f\u0029\u0020\u0065\u0074\u0020\u0064\u0065\u0020\u0074\u0069\u0072\u0065\u0074\u0073\u0020\u0028\u002d\u0029\u002e +harvestclients.newClientDialog.nickname.required=\u004c\u0027\u0061\u006c\u0069\u0061\u0073\u0020\u0064\u0075\u0020\u0063\u006c\u0069\u0065\u006e\u0074\u0020\u006e\u0065\u0020\u0070\u0065\u0075\u0074\u0020\u0070\u0061\u0073\u0020\u00ea\u0074\u0072\u0065\u0020\u0076\u0069\u0064\u0065\u0021 +harvestclients.newClientDialog.nickname.invalid=\u004c\u0027\u0061\u006c\u0069\u0061\u0073\u0020\u0064\u0075\u0020\u0063\u006c\u0069\u0065\u006e\u0074\u0020\u006e\u0065\u0020\u0070\u0065\u0075\u0074\u0020\u0063\u006f\u006e\u0074\u0065\u006e\u0069\u0072\u0020\u0071\u0075\u0065\u0020\u0064\u0065\u0073\u0020\u006c\u0065\u0074\u0074\u0072\u0065\u0073\u002c\u0020\u0064\u0065\u0073\u0020\u0063\u0068\u0069\u0066\u0066\u0072\u0065\u0073\u002c\u0020\u0064\u0065\u0073\u0020\u0074\u0072\u0061\u0069\u0074\u0073\u0020\u0064\u0065\u0020\u0073\u006f\u0075\u006c\u0069\u0067\u006e\u0065\u006d\u0065\u006e\u0074\u0020\u0028\u005f\u0029\u002c\u0020\u0064\u0065\u0073\u0020\u0074\u0069\u0072\u0065\u0074\u0073\u0020\u0028\u002d\u0029\u0020\u002c\u0020\u0065\u0074\u0020\u006e\u0065\u0020\u0070\u0065\u0075\u0074\u0020\u0065\u0078\u0063\u00e9\u0064\u0065\u0072\u0020\u0033\u0030\u0020\u0063\u0061\u0072\u0061\u0063\u0074\u00e8\u0072\u0065\u0073\u002e +harvestclients.newClientDialog.nickname.alreadyused=\u0043\u0065\u0074\u0020\u0061\u006c\u0069\u0061\u0073\u0020\u0065\u0073\u0074\u0020\u0064\u00e9\u006a\u00e0\u0020\u0075\u0074\u0069\u006c\u0069\u0073\u00e9\u002e +harvestclients.newClientDialog.type=\u0050\u0072\u006f\u0074\u006f\u0063\u006f\u006c\u0065\u0020\u0064\u0075\u0020\u0073\u0065\u0072\u0076\u0065\u0075\u0072 +harvestclients.newClientDialog.type.helptext=\u0053\u0065\u0075\u006c\u0020\u006c\u0065\u0020\u0070\u0072\u006f\u0074\u006f\u0063\u006f\u006c\u0065\u0020\u0073\u0065\u0072\u0076\u0065\u0075\u0072\u0020\u004f\u0041\u0049\u0020\u0065\u0073\u0074\u0020\u0061\u0063\u0074\u0075\u0065\u006c\u006c\u0065\u006d\u0065\u006e\u0074\u0020\u0070\u0072\u0069\u0073\u0020\u0065\u006e\u0020\u0063\u0068\u0061\u0072\u0067\u0065\u002e +harvestclients.newClientDialog.type.OAI=\u004f\u0041\u0049 +harvestclients.newClientDialog.type.Nesstar=\u004e\u0065\u0073\u0073\u0074\u0061\u0072 +harvestclients.newClientDialog.url=\u0055\u0052\u004c\u0020\u0064\u0075\u0020\u0073\u0065\u0072\u0076\u0065\u0075\u0072 +harvestclients.newClientDialog.url.tip=\u0055\u0052\u004c\u0020\u0064\u0027\u0075\u006e\u0065\u0020\u0073\u006f\u0075\u0072\u0063\u0065\u0020\u006d\u006f\u0069\u0073\u0073\u0073\u006f\u006e\u006e\u00e9\u0065\u002e +harvestclients.newClientDialog.url.watermark=\u0055\u0052\u004c\u0020\u0064\u0075\u0020\u0073\u0065\u0072\u0076\u0065\u0075\u0072\u0020\u006d\u006f\u0069\u0073\u0073\u006f\u006e\u006e\u00e9\u0020\u0064\u0069\u0073\u0074\u0061\u006e\u0074\u002c\u0020\u0068\u0074\u0074\u0070\u003a\u002f\u002f\u005c\u0075\u0032\u0030\u0032\u0036 +harvestclients.newClientDialog.url.helptext.notvalidated=\u0055\u0052\u004c\u0020\u0064\u0027\u0075\u006e\u0065\u0020\u0073\u006f\u0075\u0072\u0063\u0065\u0020\u006d\u006f\u0069\u0073\u0073\u0073\u006f\u006e\u006e\u00e9\u0065\u002e\u0020\u0055\u006e\u0065\u0020\u0066\u006f\u0069\u0073\u0020\u006c\u0065\u0020\u0062\u006f\u0075\u0074\u006f\u006e\u0020\u00ab\u0020\u0053\u0075\u0069\u0076\u0061\u006e\u0074\u0020\u00bb\u0020\u0063\u006c\u0069\u0071\u0075\u00e9\u002c\u0020\u006e\u006f\u0075\u0073\u0020\u0074\u0065\u006e\u0074\u0065\u0072\u006f\u006e\u0073\u0020\u0064\u0027\u00e9\u0074\u0061\u0062\u006c\u0069\u0072\u0020\u0075\u006e\u0065\u0020\u0063\u006f\u006e\u006e\u0065\u0078\u0069\u006f\u006e\u0020\u0061\u0076\u0065\u0063\u0020\u006c\u0065\u0020\u0073\u0065\u0072\u0076\u0065\u0075\u0072\u0020\u0061\u0066\u0069\u006e\u0020\u0064\u0065\u0020\u0076\u00e9\u0072\u0069\u0066\u0069\u0065\u0072\u0020\u0071\u0075\u0027\u0069\u006c\u0020\u0066\u006f\u006e\u0063\u0074\u0069\u006f\u006e\u006e\u0065\u0020\u0062\u0069\u0065\u006e\u0020\u0065\u0074\u0020\u006f\u0062\u0074\u0065\u006e\u0069\u0072\u0020\u0064\u0065\u0073\u0020\u0069\u006e\u0066\u006f\u0072\u006d\u0061\u0074\u0069\u006f\u006e\u0073\u0020\u0073\u0075\u0070\u0070\u006c\u00e9\u006d\u0065\u006e\u0074\u0061\u0069\u0072\u0065\u0073\u0020\u0073\u0075\u0072\u0020\u0073\u0065\u0073\u0020\u0063\u0061\u0072\u0061\u0063\u0074\u00e9\u0072\u0069\u0073\u0074\u0069\u0071\u0075\u0065\u0073\u002e +harvestclients.newClientDialog.url.required=\u0055\u006e\u0065\u0020\u0061\u0064\u0072\u0065\u0073\u0073\u0065\u0020\u0076\u0061\u006c\u0069\u0064\u0065\u0020\u0064\u0065\u0020\u0073\u0065\u0072\u0076\u0065\u0075\u0072\u0020\u00e0\u0020\u006d\u006f\u0069\u0073\u0073\u006f\u006e\u006e\u0065\u0072\u0020\u0065\u0073\u0074\u0020\u0072\u0065\u0071\u0075\u0069\u0073\u0065\u002e +harvestclients.newClientDialog.url.invalid=\u0055\u0052\u004c\u0020\u006e\u006f\u006e\u0020\u0076\u0061\u006c\u0069\u0064\u0065\u002e\u0020\u0049\u006d\u0070\u006f\u0073\u0073\u0069\u0062\u006c\u0065\u0020\u0064\u0027\u00e9\u0074\u0061\u0062\u006c\u0069\u0072\u0020\u0075\u006e\u0065\u0020\u0063\u006f\u006e\u006e\u0065\u0078\u0069\u006f\u006e\u0020\u0065\u0074\u0020\u0072\u0065\u0063\u0065\u0076\u006f\u0069\u0072\u0020\u0075\u006e\u0065\u0020\u0072\u00e9\u0070\u006f\u006e\u0073\u0065\u0020\u0076\u0061\u006c\u0069\u0064\u0065\u0020\u0064\u0075\u0020\u0073\u0065\u0072\u0076\u0065\u0075\u0072\u002e +harvestclients.newClientDialog.url.noresponse=\u0049\u006d\u0070\u006f\u0073\u0073\u0069\u0062\u006c\u0065\u0020\u0064\u0027\u00e9\u0074\u0061\u0062\u006c\u0069\u0072\u0020\u006c\u0061\u0020\u0063\u006f\u006e\u006e\u0065\u0078\u0069\u006f\u006e\u0020\u0061\u0076\u0065\u0063\u0020\u006c\u0065\u0020\u0073\u0065\u0072\u0076\u0065\u0075\u0072\u002e +harvestclients.newClientDialog.url.badresponse=\u0052\u00e9\u0070\u006f\u006e\u0073\u0065\u0020\u006e\u006f\u006e\u0020\u0076\u0061\u006c\u0069\u0064\u0065\u0020\u0064\u0065\u0020\u006c\u0061\u0020\u0070\u0061\u0072\u0074\u0020\u0064\u0075\u0020\u0073\u0065\u0072\u0076\u0065\u0075\u0072\u002e +harvestclients.newClientDialog.dataverse=\u0044\u0061\u0074\u0061\u0076\u0065\u0072\u0073\u0065\u0020\u006c\u006f\u0063\u0061\u006c +harvestclients.newClientDialog.dataverse.tip=\u0044\u0061\u0074\u0061\u0076\u0065\u0072\u0073\u0065\u0020\u0071\u0075\u0069\u0020\u0068\u00e9\u0062\u0065\u0072\u0067\u0065\u0072\u0061\u0020\u006c\u0065\u0073\u0020\u0065\u006e\u0073\u0065\u006d\u0062\u006c\u0065\u0073\u0020\u0064\u0065\u0020\u0064\u006f\u006e\u006e\u00e9\u0065\u0073\u0020\u0072\u00e9\u0063\u006f\u006c\u0074\u00e9\u0073\u0020\u00e0\u0020\u0070\u0061\u0072\u0074\u0069\u0072\u0020\u0064\u0065\u0020\u0063\u0065\u0074\u0074\u0065\u0020\u0072\u0065\u0073\u0073\u006f\u0075\u0072\u0063\u0065\u0020\u0064\u0069\u0073\u0074\u0061\u006e\u0074\u0065\u002e +harvestclients.newClientDialog.dataverse.menu.enterName=\u0053\u0061\u0069\u0073\u0069\u0072\u0020\u006c\u0027\u0061\u006c\u0069\u0061\u0073\u0020\u0064\u0075\u0020\u0064\u0061\u0074\u0061\u0076\u0065\u0072\u0073\u0065 +harvestclients.newClientDialog.dataverse.menu.header=\u004e\u006f\u006d\u0020\u0064\u0075\u0020\u0064\u0061\u0074\u0061\u0076\u0065\u0072\u0073\u0065\u0020\u0028\u0061\u0066\u0066\u0069\u006c\u0069\u0061\u0074\u0069\u006f\u006e\u0029\u002c\u0020\u0061\u006c\u0069\u0061\u0073 +harvestclients.newClientDialog.dataverse.menu.invalidMsg=\u0041\u0075\u0063\u0075\u006e\u0020\u0072\u00e9\u0073\u0075\u006c\u0074\u0061\u0074 +harvestclients.newClientDialog.dataverse.required=\u0056\u006f\u0075\u0073\u0020\u0064\u0065\u0076\u0065\u007a\u0020\u0073\u00e9\u006c\u0065\u0063\u0074\u0069\u006f\u006e\u006e\u0065\u0072\u0020\u0075\u006e\u0020\u0064\u0061\u0074\u0061\u0076\u0065\u0072\u0073\u0065\u0020\u0065\u0078\u0069\u0073\u0074\u0061\u006e\u0074\u0020\u0070\u006f\u0075\u0072\u0020\u0063\u0065\u0020\u0063\u006c\u0069\u0065\u006e\u0074\u0020\u0064\u0065\u0020\u006d\u006f\u0069\u0073\u0073\u006f\u006e\u006e\u0061\u0067\u0065\u002e +harvestclients.newClientDialog.step2=\u00c9\u0074\u0061\u0070\u0065\u0020\u0032\u0020\u0064\u0065\u0020\u0034\u0020\u002d\u0020\u0046\u006f\u0072\u006d\u0061\u0074 +harvestclients.newClientDialog.oaiSets=\u0045\u006e\u0073\u0065\u006d\u0062\u006c\u0065\u0020\u004f\u0041\u0049 +harvestclients.newClientDialog.oaiSets.tip=\u0045\u006e\u0073\u0065\u006d\u0062\u006c\u0065\u0073\u0020\u006d\u006f\u0069\u0073\u0073\u006f\u006e\u006e\u0061\u0062\u006c\u0065\u0073\u0020\u006f\u0066\u0066\u0065\u0072\u0074\u0073\u0020\u0070\u0061\u0072\u0020\u0063\u0065\u0020\u0073\u0065\u0072\u0076\u0065\u0075\u0072\u0020\u004f\u0041\u0049\u002e +harvestclients.newClientDialog.oaiSets.noset=\u0041\u0075\u0063\u0075\u006e +harvestclients.newClientDialog.oaiSets.helptext=\u0045\u006e\u0020\u0073\u00e9\u006c\u0065\u0063\u0074\u0069\u006f\u006e\u006e\u0061\u006e\u0074\u0020\u00ab\u0020\u0041\u0075\u0063\u0075\u006e\u0020\u00bb\u0020\u006c\u0065\u0020\u006d\u006f\u0069\u0073\u0073\u006f\u006e\u006e\u0061\u0067\u0065\u0020\u0073\u0065\u0020\u0066\u0065\u0072\u0061\u0020\u0073\u0075\u0072\u0020\u006c\u0027\u0065\u006e\u0073\u0065\u006d\u0062\u006c\u0065\u0020\u0070\u0061\u0072\u0020\u0064\u00e9\u0066\u0061\u0075\u0074\u0020\u0064\u00e9\u0066\u0069\u006e\u0069\u0020\u0070\u0061\u0072\u0020\u006c\u0065\u0020\u0073\u0065\u0072\u0076\u0065\u0075\u0072\u002e\u0020\u0046\u0072\u00e9\u0071\u0075\u0065\u006d\u006d\u0065\u006e\u0074\u0020\u0069\u006c\u0020\u0073\u0027\u0061\u0067\u0069\u0074\u0020\u0064\u0065\u0020\u006c\u0027\u0065\u006e\u0074\u0069\u00e8\u0072\u0065\u0074\u00e9\u0020\u0064\u0075\u0020\u0063\u006f\u006e\u0074\u0065\u006e\u0075\u0020\u0064\u0065\u0020\u0074\u006f\u0075\u0073\u0020\u006c\u0065\u0073\u0020\u0073\u006f\u0075\u0073\u002d\u0065\u006e\u0073\u0065\u006d\u0062\u006c\u0065\u0073\u002e +harvestclients.newClientDialog.oaiSets.helptext.noset=\u0043\u0065\u0020\u0073\u0065\u0072\u0076\u0065\u0075\u0072\u0020\u004f\u0041\u0049\u0020\u006e\u0065\u0020\u0070\u0072\u0065\u006e\u0064\u0020\u0070\u0061\u0073\u0020\u0065\u006e\u0020\u0063\u0068\u0061\u0072\u0067\u0065\u0020\u006c\u0065\u0073\u0020\u0065\u006e\u0073\u0065\u006d\u0062\u006c\u0065\u0073\u0020\u0073\u00e9\u006c\u0065\u0063\u0074\u0069\u006f\u006e\u006e\u00e9\u0073\u002e\u0020\u004c\u0027\u0065\u006e\u0073\u0065\u006d\u0062\u006c\u0065\u0020\u0064\u0075\u0020\u0063\u006f\u006e\u0074\u0065\u006e\u0075\u0020\u0070\u0072\u006f\u0070\u006f\u0073\u00e9\u0020\u0070\u0061\u0072\u0020\u006c\u0065\u0020\u0073\u0065\u0072\u0076\u0065\u0075\u0072\u0020\u0073\u0065\u0072\u0061\u0020\u006d\u006f\u0069\u0073\u0073\u006f\u006e\u006e\u00e9\u002e +harvestclients.newClientDialog.oaiMetadataFormat=\u0046\u006f\u0072\u006d\u0061\u0074\u0020\u0064\u0065\u0073\u0020\u006d\u00e9\u0074\u0061\u0064\u006f\u006e\u006e\u00e9\u0065\u0073 +harvestclients.newClientDialog.oaiMetadataFormat.tip=\u0046\u006f\u0072\u006d\u0061\u0074\u0073\u0020\u0064\u0065\u0020\u006d\u00e9\u0074\u0061\u0064\u006f\u006e\u006e\u00e9\u0065\u0073\u0020\u006f\u0066\u0066\u0065\u0072\u0074\u0073\u0020\u0070\u0061\u0072\u0020\u006c\u0065\u0020\u0073\u0065\u0072\u0076\u0065\u0075\u0072\u0020\u0064\u0069\u0073\u0074\u0061\u006e\u0074\u002e +harvestclients.newClientDialog.oaiMetadataFormat.required=\u0056\u0065\u0075\u0069\u006c\u006c\u0065\u007a\u0020\u0073\u00e9\u006c\u0065\u0063\u0074\u0069\u006f\u006e\u006e\u0065\u0072\u0020\u006c\u0065\u0020\u0066\u006f\u0072\u006d\u0061\u0074\u0020\u0064\u0065\u0020\u006d\u00e9\u0074\u0061\u0064\u006f\u006e\u006e\u00e9\u0065\u0073\u0020\u00e0\u0020\u0075\u0074\u0069\u006c\u0069\u0073\u0065\u0072\u0020\u0070\u006f\u0075\u0072\u0020\u006c\u0065\u0020\u006d\u006f\u0069\u0073\u0073\u006f\u006e\u006e\u0061\u0067\u0065\u0020\u0064\u0065\u0020\u0063\u0065\u0020\u0064\u00e9\u0070\u00f4\u0074\u002e +harvestclients.newClientDialog.step3=\u00c9\u0074\u0061\u0070\u0065\u0020\u0033\u0020\u0064\u0065\u0020\u0034\u0020\u002d\u0020\u0050\u006c\u0061\u006e\u0069\u0066\u0069\u0065\u0072 +harvestclients.newClientDialog.schedule=\u0050\u00e9\u0072\u0069\u006f\u0064\u0069\u0063\u0069\u0074\u00e9 +harvestclients.newClientDialog.schedule.tip=\u0050\u0072\u006f\u0067\u0072\u0061\u006d\u006d\u0065\u0072\u0020\u006c\u0065\u0020\u006d\u006f\u0069\u0073\u0073\u006f\u006e\u006e\u0061\u0067\u0065\u0020\u0070\u006f\u0075\u0072\u0020\u0071\u0075\u0027\u0069\u006c\u0020\u0073\u0027\u0065\u0078\u00e9\u0063\u0075\u0074\u0065\u0020\u0061\u0075\u0074\u006f\u006d\u0061\u0074\u0069\u0071\u0075\u0065\u006d\u0065\u006e\u0074\u0020\u0064\u0065\u0020\u0066\u0061\u00e7\u006f\u006e\u0020\u0071\u0075\u006f\u0074\u0069\u0064\u0069\u0065\u006e\u006e\u0065\u0020\u006f\u0075\u0020\u0068\u0065\u0062\u0064\u006f\u006d\u0061\u0064\u0061\u0069\u0072\u0065\u002e +harvestclients.newClientDialog.schedule.time.none.helptext=\u004e\u0065\u0020\u0070\u0061\u0073\u0020\u0073\u0070\u00e9\u0063\u0069\u0066\u0069\u0065\u0072\u0020\u0064\u0065\u0020\u0070\u00e9\u0072\u0069\u006f\u0064\u0069\u0063\u0069\u0074\u00e9\u0020\u0064\u0065\u0020\u006d\u006f\u0069\u0073\u0073\u006f\u006e\u006e\u0061\u0067\u0065\u0020\u0064\u0065\u0020\u0073\u006f\u0072\u0074\u0065\u0020\u0071\u0075\u0065\u0020\u006c\u0027\u0065\u0078\u00e9\u0063\u0075\u0074\u0069\u006f\u006e\u0020\u0073\u0065\u0020\u0066\u0065\u0072\u0061\u0020\u0073\u0075\u0072\u0020\u0064\u0065\u006d\u0061\u006e\u0064\u0065\u0020\u0073\u0065\u0075\u006c\u0065\u006d\u0065\u006e\u0074\u002e +harvestclients.newClientDialog.schedule.none=\u0041\u0075\u0063\u0075\u006e +harvestclients.newClientDialog.schedule.daily=\u0051\u0075\u006f\u0074\u0069\u0064\u0069\u0065\u006e\u006e\u0065\u006d\u0065\u006e\u0074 +harvestclients.newClientDialog.schedule.weekly=\u0048\u0065\u0062\u0064\u006f\u006d\u0061\u0064\u0061\u0069\u0072\u0065\u006d\u0065\u006e\u0074 +harvestclients.newClientDialog.schedule.time=\u0048\u006f\u0072\u0061\u0069\u0072\u0065 +harvestclients.newClientDialog.schedule.day=\u004a\u006f\u0075\u0072 +harvestclients.newClientDialog.schedule.time.am=\u0061\u002e\u006d\u002e +harvestclients.newClientDialog.schedule.time.pm=\u0070\u002e\u006d\u002e +harvestclients.newClientDialog.schedule.time.helptext=\u004c\u0027\u0068\u006f\u0072\u0061\u0069\u0072\u0065\u0020\u0070\u0072\u006f\u0067\u0072\u0061\u006d\u006d\u00e9\u0020\u0073\u0065\u0020\u0072\u00e9\u0066\u00e8\u0072\u0065\u0020\u00e0\u0020\u0076\u006f\u0074\u0072\u0065\u0020\u0068\u0065\u0075\u0072\u0065\u0020\u006c\u006f\u0063\u0061\u006c\u0065\u002e +harvestclients.newClientDialog.btn.create=\u0043\u0072\u00e9\u0065\u0072\u0020\u0075\u006e\u0020\u0063\u006c\u0069\u0065\u006e\u0074 +harvestclients.newClientDialog.success=\u004c\u0065\u0020\u0063\u006c\u0069\u0065\u006e\u0074\u0020\u0064\u0065\u0020\u006d\u006f\u0069\u0073\u0073\u006f\u006e\u006e\u0061\u0067\u0065\u0020\u0022\u007b\u0030\u007d\u0022\u0020\u0061\u0020\u0062\u0069\u0065\u006e\u0020\u00e9\u0074\u00e9\u0020\u0063\u0072\u00e9\u00e9\u002e +harvestclients.newClientDialog.step4=\u00c9\u0074\u0061\u0070\u0065\u0020\u0034\u0020\u0064\u0065\u0020\u0034\u0020\u002d\u0020\u0041\u0066\u0066\u0069\u0063\u0068\u0061\u0067\u0065 +harvestclients.newClientDialog.harvestingStyle=\u0054\u0079\u0070\u0065\u0020\u0064\u0065\u0020\u0064\u00e9\u0070\u00f4\u0074 +harvestclients.newClientDialog.harvestingStyle.tip=\u0054\u0079\u0070\u0065\u0020\u0064\u0075\u0020\u0064\u00e9\u0070\u00f4\u0074\u0020\u0064\u0069\u0073\u0074\u0061\u006e\u0074\u002e +harvestclients.newClientDialog.harvestingStyle.helptext=\u0053\u00e9\u006c\u0065\u0063\u0074\u0069\u006f\u006e\u006e\u0065\u007a\u0020\u006c\u0065\u0020\u0074\u0079\u0070\u0065\u0020\u0064\u0065\u0020\u0064\u00e9\u0070\u00f4\u0074\u0020\u0071\u0075\u0069\u0020\u0064\u00e9\u0063\u0072\u0069\u0074\u0020\u006c\u0065\u0020\u006d\u0069\u0065\u0075\u0078\u0020\u0063\u0065\u0020\u0073\u0065\u0072\u0076\u0065\u0075\u0072\u0020\u0064\u0069\u0073\u0074\u0061\u006e\u0074\u0020\u0061\u0066\u0069\u006e\u0020\u0064\u0027\u0061\u0070\u0070\u006c\u0069\u0071\u0075\u0065\u0072\u0020\u0063\u006f\u0072\u0072\u0065\u0063\u0074\u0065\u006d\u0065\u006e\u0074\u0020\u006c\u0065\u0073\u0020\u0072\u00e8\u0067\u006c\u0065\u0073\u0020\u0064\u0065\u0020\u0066\u006f\u0072\u006d\u0061\u0074\u0061\u0067\u0065\u0020\u0065\u0074\u0020\u0064\u0065\u0020\u0073\u0074\u0079\u006c\u0065\u0020\u0061\u0075\u0078\u0020\u006d\u00e9\u0074\u0061\u0064\u006f\u006e\u006e\u00e9\u0065\u0073\u0020\u0072\u00e9\u0063\u006f\u006c\u0074\u00e9\u0065\u0073\u0020\u006c\u006f\u0072\u0073\u0020\u0064\u0065\u0020\u006c\u0065\u0075\u0072\u0020\u0061\u0066\u0066\u0069\u0063\u0068\u0061\u0067\u0065\u0020\u0064\u0061\u006e\u0073\u0020\u006c\u0065\u0073\u0020\u0072\u00e9\u0073\u0075\u006c\u0074\u0061\u0074\u0073\u0020\u0064\u0065\u0020\u0072\u0065\u0063\u0068\u0065\u0072\u0063\u0068\u0065\u002e\u0020\u004e\u006f\u0074\u0065\u007a\u0020\u0071\u0075\u0027\u0075\u006e\u0065\u0020\u0073\u00e9\u006c\u0065\u0063\u0074\u0069\u006f\u006e\u0020\u0069\u006e\u0063\u006f\u0072\u0072\u0065\u0063\u0074\u0065\u0020\u0064\u0075\u0020\u0074\u0079\u0070\u0065\u0020\u0064\u0065\u0020\u0064\u00e9\u0070\u00f4\u0074\u0020\u0064\u0069\u0073\u0074\u0061\u006e\u0074\u0020\u0070\u0065\u0075\u0074\u0020\u0065\u006e\u0074\u0072\u0061\u00ee\u006e\u0065\u0072\u0020\u006c\u0027\u0061\u0066\u0066\u0069\u0063\u0068\u0061\u0067\u0065\u0020\u0069\u006e\u0063\u006f\u006d\u0070\u006c\u0065\u0074\u0020\u0064\u0065\u0073\u0020\u0065\u006e\u0074\u0072\u00e9\u0065\u0073\u0020\u0064\u0061\u006e\u0073\u0020\u006c\u0065\u0073\u0020\u0072\u00e9\u0073\u0075\u006c\u0074\u0061\u0074\u0073\u0020\u0064\u0065\u0020\u0072\u0065\u0063\u0068\u0065\u0072\u0063\u0068\u0065\u0020\u0063\u0061\u0075\u0073\u0061\u006e\u0074\u0020\u0061\u0069\u006e\u0073\u0069\u0020\u0075\u006e\u0065\u0020\u0069\u006d\u0070\u006f\u0073\u0073\u0069\u0062\u0069\u006c\u0069\u0074\u00e9\u0020\u0064\u0065\u0020\u0072\u0065\u0064\u0069\u0072\u0069\u0067\u0065\u0072\u0020\u006c\u0027\u0075\u0074\u0069\u006c\u0069\u0073\u0061\u0074\u0065\u0075\u0072\u0020\u0076\u0065\u0072\u0073\u0020\u006c\u0065\u0020\u0064\u00e9\u0070\u00f4\u0074\u0020\u0073\u006f\u0075\u0072\u0063\u0065\u0020\u0064\u0065\u0073\u0020\u0064\u006f\u006e\u006e\u00e9\u0065\u0073\u002e +harvestclients.viewEditDialog.title=\u004d\u006f\u0064\u0069\u0066\u0069\u0065\u0072\u0020\u006c\u0065\u0020\u0063\u006c\u0069\u0065\u006e\u0074\u0020\u0064\u0065\u0020\u006d\u006f\u0069\u0073\u0073\u006f\u006e\u006e\u0061\u0067\u0065 +harvestclients.viewEditDialog.archiveUrl=\u0055\u0052\u004c\u0020\u0064\u0075\u0020\u0064\u00e9\u0070\u00f4\u0074 +harvestclients.viewEditDialog.archiveUrl.tip=\u004c\u0027\u0055\u0052\u004c\u0020\u0064\u0075\u0020\u0064\u00e9\u0070\u00f4\u0074\u0020\u0071\u0075\u0069\u0020\u0066\u006f\u0075\u0072\u006e\u0069\u0074\u0020\u006c\u0065\u0073\u0020\u0064\u006f\u006e\u006e\u00e9\u0065\u0073\u0020\u006d\u006f\u0069\u0073\u0073\u0073\u006f\u006e\u006e\u00e9\u0065\u0073\u0020\u0070\u0061\u0072\u0020\u0063\u0065\u0020\u0063\u006c\u0069\u0065\u006e\u0074\u002c\u0020\u006c\u0061\u0071\u0075\u0065\u006c\u006c\u0065\u0020\u0065\u0073\u0074\u0020\u0075\u0074\u0069\u006c\u0069\u0073\u00e9\u0065\u0020\u0064\u0061\u006e\u0073\u0020\u006c\u0065\u0073\u0020\u0072\u00e9\u0073\u0075\u006c\u0074\u0061\u0074\u0073\u0020\u0064\u0065\u0020\u0072\u0065\u0063\u0068\u0065\u0072\u0063\u0068\u0065\u0020\u0070\u006f\u0075\u0072\u0020\u006c\u0065\u0073\u0020\u006c\u0069\u0065\u006e\u0073\u0020\u0076\u0065\u0072\u0073\u0020\u006c\u0065\u0073\u0020\u0073\u006f\u0075\u0072\u0063\u0065\u0073\u0020\u006f\u0072\u0069\u0067\u0069\u006e\u0061\u006c\u0065\u0073\u0020\u0064\u0075\u0020\u0063\u006f\u006e\u0074\u0065\u006e\u0075\u0020\u006d\u006f\u0069\u0073\u0073\u006f\u006e\u006e\u00e9\u002e\u0020 +harvestclients.viewEditDialog.archiveUrl.helptext=\u004d\u006f\u0064\u0069\u0066\u0069\u0065\u0072\u0020\u0073\u0069\u0020\u0063\u0065\u0074\u0020\u0055\u0052\u004c\u0020\u0065\u0073\u0074\u0020\u0064\u0069\u0066\u0066\u00e9\u0072\u0065\u006e\u0074\u0020\u0064\u0065\u0020\u006c\u0027\u0055\u0052\u004c\u0020\u0064\u0075\u0020\u0073\u0065\u0072\u0076\u0065\u0075\u0072\u002e +harvestclients.viewEditDialog.archiveDescription=\u0044\u0065\u0073\u0063\u0072\u0069\u0070\u0074\u0069\u006f\u006e\u0020\u0064\u0075\u0020\u0064\u00e9\u0070\u00f4\u0074 +harvestclients.viewEditDialog.archiveDescription.tip=\u0044\u0065\u0073\u0063\u0072\u0069\u0070\u0074\u0069\u006f\u006e\u0020\u0064\u0075\u0020\u0064\u00e9\u0070\u00f4\u0074\u0020\u0073\u006f\u0075\u0072\u0063\u0065\u0020\u0064\u0075\u0020\u0063\u006f\u006e\u0074\u0065\u006e\u0075\u0020\u006d\u006f\u0069\u0073\u0073\u006f\u006e\u006e\u00e9\u0020\u0065\u0074\u0020\u0061\u0066\u0066\u0069\u0063\u0068\u00e9\u0020\u0064\u0061\u006e\u0073\u0020\u006c\u0065\u0073\u0020\u0072\u00e9\u0073\u0075\u006c\u0074\u0061\u0074\u0073\u0020\u0064\u0065\u0020\u0072\u0065\u0063\u0068\u0065\u0072\u0063\u0068\u0065\u002e +harvestclients.viewEditDialog.archiveDescription.default.generic=\u0043\u0065\u0074\u0020\u0065\u006e\u0073\u0065\u006d\u0062\u006c\u0065\u0020\u0064\u0065\u0020\u0064\u0065\u0020\u0064\u006f\u006e\u006e\u00e9\u0065\u0073\u0020\u0065\u0073\u0074\u0020\u006d\u006f\u0069\u0073\u0073\u006f\u006e\u006e\u00e9\u0020\u0061\u0075\u0070\u0072\u00e8\u0073\u0020\u0064\u0065\u0020\u006e\u006f\u0073\u0020\u0070\u0061\u0072\u0074\u0065\u006e\u0061\u0069\u0072\u0065\u0073\u002e\u0020\u0045\u006e\u0020\u0063\u006c\u0069\u0071\u0075\u0061\u006e\u0074\u0020\u0073\u0075\u0072\u0020\u006c\u0065\u0020\u006c\u0069\u0065\u006e\u002c\u0020\u0076\u006f\u0075\u0073\u0020\u0061\u0063\u0063\u00e9\u0064\u0065\u007a\u0020\u0064\u0069\u0072\u0065\u0063\u0074\u0065\u006d\u0065\u006e\u0074\u0020\u0061\u0075\u0020\u0064\u00e9\u0070\u00f4\u0074\u0020\u0073\u006f\u0075\u0072\u0063\u0065\u0020\u0064\u0065\u0073\u0020\u0064\u006f\u006e\u006e\u00e9\u0065\u0073\u002e +harvestclients.viewEditDialog.btn.save=\u0053\u0061\u0075\u0076\u0065\u0067\u0061\u0072\u0064\u0065\u0072\u0020\u006c\u0065\u0073\u0020\u006d\u006f\u0064\u0069\u0066\u0069\u0063\u0061\u0074\u0069\u006f\u006e\u0073 +harvestclients.newClientDialog.title.edit=\u004d\u006f\u0064\u0069\u0066\u0069\u0065\u0072\u0020\u006c\u0065\u0020\u0067\u0072\u006f\u0075\u0070\u0065\u0020\u007b\u0030\u007d +#harvestset.xhtml= +harvestserver.title=\u0041\u0064\u006d\u0069\u006e\u0069\u0073\u0074\u0072\u0061\u0074\u0069\u006f\u006e\u0020\u0064\u0075\u0020\u0073\u0065\u0072\u0076\u0065\u0075\u0072\u0020\u0064\u0065\u0020\u006d\u006f\u0069\u0073\u0073\u006f\u006e\u006e\u0061\u0067\u0065 +harvestserver.toptip=\u002d\u0020\u0044\u00e9\u0066\u0069\u006e\u0069\u0072\u0020\u006c\u0065\u0073\u0020\u0063\u006f\u006c\u006c\u0065\u0063\u0074\u0069\u006f\u006e\u0073\u0020\u0064\u0027\u0065\u006e\u0073\u0065\u006d\u0062\u006c\u0065\u0073\u0020\u0064\u0065\u0020\u0064\u006f\u006e\u006e\u00e9\u0065\u0073\u0020\u006c\u006f\u0063\u0061\u0075\u0078\u0020\u0071\u0075\u0069\u0020\u0073\u0065\u0072\u006f\u006e\u0074\u0020\u0064\u0069\u0073\u0070\u006f\u006e\u0069\u0062\u006c\u0065\u0073\u0020\u0070\u006f\u0075\u0072\u0020\u006c\u0065\u0020\u006d\u006f\u0069\u0073\u0073\u006f\u006e\u006e\u0061\u0067\u0065\u0020\u0070\u0061\u0072\u0020\u006c\u0065\u0073\u0020\u0063\u006c\u0069\u0065\u006e\u0074\u0073\u0020\u0064\u0069\u0073\u0074\u0061\u006e\u0074\u0073\u002e +harvestserver.service.label=\u0053\u0065\u0072\u0076\u0065\u0075\u0072\u0020\u004f\u0041\u0049 +harvestserver.service.enabled=\u0041\u0063\u0074\u0069\u0076\u00e9 +harvestserver.service.disabled=\u0044\u00e9\u0073\u0061\u0063\u0074\u0069\u0076\u00e9 +harvestserver.service.disabled.msg=\u004c\u0065\u0020\u0073\u0065\u0072\u0076\u0065\u0075\u0072\u0020\u0064\u0065\u0020\u006d\u006f\u0069\u0073\u0073\u006f\u006e\u006e\u0061\u0067\u0065\u0020\u0065\u0073\u0074\u0020\u0061\u0063\u0074\u0075\u0065\u006c\u006c\u0065\u006d\u0065\u006e\u0074\u0020\u0064\u00e9\u0073\u0061\u0063\u0074\u0069\u0076\u00e9\u002e +harvestserver.service.empty=\u0041\u0075\u0063\u0075\u006e\u0020\u006c\u006f\u0074\u0020\u006e\u0027\u0065\u0073\u0074\u0020\u0063\u006f\u006e\u0066\u0069\u0067\u0075\u0072\u00e9\u002e +harvestserver.service.enable.success=\u004c\u0065\u0020\u0073\u0065\u0072\u0076\u0069\u0063\u0065\u0020\u004f\u0041\u0049\u0020\u0061\u0020\u0062\u0069\u0065\u006e\u0020\u00e9\u0074\u00e9\u0020\u0061\u0063\u0074\u0069\u0076\u00e9\u002e +harvestserver.noSets.why.header=\u0051\u0075\u0027\u0065\u0073\u0074\u0020\u0063\u0065\u0020\u0071\u0075\u0027\u0075\u006e\u0020\u0073\u0065\u0072\u0076\u0065\u0075\u0072\u0020\u0064\u0065\u0020\u006d\u006f\u0069\u0073\u0073\u006f\u006e\u006e\u0061\u0067\u0065\u003f +harvestserver.noSets.why.reason1=\u004c\u0065\u0020\u006d\u006f\u0069\u0073\u0073\u006f\u006e\u006e\u0061\u0067\u0065\u0020\u0063\u006f\u006e\u0073\u0069\u0073\u0074\u0065\u0020\u00e0\u0020\u00e9\u0063\u0068\u0061\u006e\u0067\u0065\u0072\u0020\u0064\u0065\u0073\u0020\u006d\u00e9\u0074\u0061\u0064\u006f\u006e\u006e\u00e9\u0065\u0073\u0020\u0061\u0076\u0065\u0063\u0020\u0064\u0027\u0061\u0075\u0074\u0072\u0065\u0073\u0020\u0064\u00e9\u0070\u00f4\u0074\u0073\u002e\u0020\u0045\u006e\u0020\u0074\u0061\u006e\u0074\u0020\u0071\u0075\u0065\u0020\u0020\u003c\u0062\u003e\u003c\u0069\u003e\u0073\u0065\u0072\u0076\u0065\u0075\u0072\u003c\u002f\u0069\u003e\u003c\u002f\u0062\u003e\u0020\u0064\u0065\u0020\u006d\u006f\u0069\u0073\u0073\u006f\u006e\u006e\u0061\u0067\u0065\u002c\u0020\u0076\u006f\u0074\u0072\u0065\u0020\u0064\u0061\u0074\u0061\u0076\u0065\u0072\u0073\u0065\u0020\u0070\u0065\u0075\u0074\u0020\u0072\u0065\u006e\u0064\u0072\u0065\u0020\u0064\u0069\u0073\u0070\u006f\u006e\u0069\u0062\u006c\u0065\u0020\u0063\u0065\u0072\u0074\u0061\u0069\u006e\u0073\u0020\u0065\u006e\u0073\u0065\u006d\u0062\u006c\u0065\u0073\u0020\u0064\u0065\u0020\u0064\u006f\u006e\u006e\u00e9\u0065\u0073\u0020\u006c\u006f\u0063\u0061\u0075\u0078\u0020\u00e0\u0020\u0064\u0065\u0073\u0020\u0063\u006c\u0069\u0065\u006e\u0074\u0073\u0020\u0064\u0065\u0020\u006d\u006f\u0069\u0073\u0073\u006f\u006e\u006e\u0061\u0067\u0065\u0020\u0064\u0069\u0073\u0074\u0061\u006e\u0074\u0073\u002e\u0020\u0049\u006c\u0020\u0070\u0065\u0075\u0074\u0020\u0073\u0027\u0061\u0067\u0069\u0072\u0020\u0064\u0027\u0061\u0075\u0074\u0072\u0065\u0073\u0020\u0069\u006e\u0073\u0074\u0061\u006e\u0063\u0065\u0073\u0020\u0064\u0065\u0020\u0044\u0061\u0074\u0061\u0076\u0065\u0072\u0073\u0065\u002c\u0020\u006f\u0075\u0020\u0065\u006e\u0063\u006f\u0072\u0065\u0020\u0064\u0065\u0020\u0063\u006c\u0069\u0065\u006e\u0074\u0073\u0020\u0063\u006f\u006d\u0070\u0061\u0074\u0069\u0062\u006c\u0065\u0073\u0020\u0061\u0076\u0065\u0063\u0020\u006c\u0065\u0020\u0070\u0072\u006f\u0074\u006f\u0063\u006f\u006c\u0065\u0020\u0064\u0065\u0020\u006d\u006f\u0069\u0073\u0073\u006f\u006e\u006e\u0061\u0067\u0065\u0020\u004f\u0041\u0049\u002d\u0050\u004d\u0048\u002e +harvestserver.noSets.why.reason2=\u0053\u0065\u0075\u006c\u0073\u0020\u006c\u0065\u0073\u0020\u0065\u006e\u0073\u0065\u006d\u0062\u006c\u0065\u0073\u0020\u0064\u0065\u0020\u0064\u006f\u006e\u006e\u00e9\u0065\u0073\u0020\u0070\u0075\u0062\u006c\u0069\u00e9\u0073\u0020\u0065\u0074\u0020\u006e\u006f\u006e\u0020\u0072\u0065\u0073\u0074\u0072\u0065\u0069\u006e\u0074\u0073\u0020\u0064\u0065\u0020\u0076\u006f\u0074\u0072\u0065\u0020\u0044\u0061\u0074\u0061\u0076\u0065\u0072\u0073\u0065\u0020\u0070\u0065\u0075\u0076\u0065\u006e\u0074\u0020\u00ea\u0074\u0072\u0065\u0020\u006d\u006f\u0069\u0073\u0073\u006f\u006e\u006e\u00e9\u0073\u002e\u0020\u004c\u0065\u0073\u0020\u0063\u006c\u0069\u0065\u006e\u0074\u0073\u0020\u0064\u0069\u0073\u0074\u0061\u006e\u0074\u0073\u0020\u006d\u0061\u0069\u006e\u0074\u0069\u0065\u006e\u006e\u0065\u006e\u0074\u0020\u006e\u006f\u0072\u006d\u0061\u006c\u0065\u006d\u0065\u006e\u0074\u0020\u006c\u0065\u0075\u0072\u0073\u0020\u0065\u006e\u0072\u0065\u0067\u0069\u0073\u0074\u0072\u0065\u006d\u0065\u006e\u0074\u0073\u0020\u0073\u0079\u006e\u0063\u0068\u0072\u006f\u006e\u0069\u0073\u00e9\u0073\u0020\u0067\u0072\u00e2\u0063\u0065\u0020\u00e0\u0020\u0064\u0065\u0073\u0020\u006d\u0069\u0073\u0065\u0073\u0020\u00e0\u0020\u006a\u006f\u0075\u0072\u0020\u0069\u006e\u0063\u0072\u00e9\u006d\u0065\u006e\u0074\u0069\u0065\u006c\u006c\u0065\u0073\u0020\u0070\u0072\u006f\u0067\u0072\u0061\u006d\u006d\u00e9\u0065\u0073\u002c\u0020\u0071\u0075\u006f\u0074\u0069\u0064\u0069\u0065\u006e\u006e\u0065\u0073\u0020\u006f\u0075\u0020\u0068\u0065\u0062\u0064\u006f\u006d\u0061\u0064\u0061\u0069\u0072\u0065\u0073\u002c\u0020\u0072\u00e9\u0064\u0075\u0069\u0073\u0061\u006e\u0074\u0020\u0061\u0069\u006e\u0073\u0069\u0020\u006c\u0061\u0020\u0063\u0068\u0061\u0072\u0067\u0065\u0020\u0073\u0075\u0072\u0020\u0076\u006f\u0074\u0072\u0065\u0020\u0073\u0065\u0072\u0076\u0065\u0075\u0072\u002e\u0020\u004e\u006f\u0074\u0065\u007a\u0020\u0071\u0075\u0065\u0020\u0073\u0065\u0075\u006c\u0065\u0073\u0020\u006c\u0065\u0073\u0020\u006d\u00e9\u0074\u0061\u0064\u006f\u006e\u006e\u00e9\u0065\u0073\u0020\u0073\u006f\u006e\u0074\u0020\u006d\u006f\u0069\u0073\u0073\u006f\u006e\u006e\u00e9\u0065\u0073\u002e\u0020\u004c\u0065\u0073\u0020\u006d\u006f\u0069\u0073\u0073\u006f\u006e\u006e\u0065\u0075\u0072\u0073\u0020\u0064\u0069\u0073\u0074\u0061\u006e\u0074\u0073\u0020\u006e\u0065\u0020\u0074\u0065\u006e\u0074\u0065\u006e\u0074\u0020\u0067\u00e9\u006e\u00e9\u0072\u0061\u006c\u0065\u006d\u0065\u006e\u0074\u0020\u0070\u0061\u0073\u0020\u0064\u0065\u0020\u0074\u00e9\u006c\u00e9\u0063\u0068\u0061\u0072\u0067\u0065\u0072\u0020\u0065\u0075\u0078\u002d\u006d\u00ea\u006d\u0065\u0073\u0020\u006c\u0065\u0073\u0020\u0066\u0069\u0063\u0068\u0069\u0065\u0072\u0073\u0020\u0064\u0065\u0020\u0064\u006f\u006e\u006e\u00e9\u0065\u0073\u002e +harvestserver.noSets.how.header=\u0043\u006f\u006d\u006d\u0065\u006e\u0074\u0020\u0061\u0063\u0074\u0069\u0076\u0065\u0072\u0020\u0075\u006e\u0020\u0073\u0065\u0072\u0076\u0065\u0075\u0072\u0020\u0064\u0065\u0020\u006d\u006f\u0069\u0073\u0073\u006f\u006e\u006e\u0061\u0067\u0065\u003f +harvestserver.noSets.how.tip1=\u004c\u0065\u0020\u0073\u0065\u0072\u0076\u0065\u0075\u0072\u0020\u0064\u0065\u0020\u006d\u006f\u0069\u0073\u0073\u006f\u006e\u006e\u0061\u0067\u0065\u0020\u0070\u0065\u0075\u0074\u0020\u00ea\u0074\u0072\u0065\u0020\u0061\u0063\u0074\u0069\u0076\u00e9\u0020\u006f\u0075\u0020\u0064\u00e9\u0073\u0061\u0063\u0074\u0069\u0076\u00e9\u0020\u00e0\u0020\u0070\u0061\u0072\u0074\u0069\u0072\u0020\u0064\u0065\u0020\u0063\u0065\u0074\u0074\u0065\u0020\u0070\u0061\u0067\u0065\u002e\u0020 +harvestserver.noSets.how.tip2=\u0055\u006e\u0065\u0020\u0066\u006f\u0069\u0073\u0020\u006c\u0065\u0020\u0073\u0065\u0072\u0076\u0069\u0063\u0065\u0020\u0061\u0063\u0074\u0069\u0076\u00e9\u002c\u0020\u0076\u006f\u0075\u0073\u0020\u0070\u006f\u0075\u0076\u0065\u007a\u0020\u0064\u00e9\u0066\u0069\u006e\u0069\u0072\u0020\u0064\u0065\u0073\u0020\u0063\u006f\u006c\u006c\u0065\u0063\u0074\u0069\u006f\u006e\u0073\u0020\u0064\u0027\u0065\u006e\u0073\u0065\u006d\u0062\u006c\u0065\u0073\u0020\u0064\u0065\u0020\u0064\u006f\u006e\u006e\u00e9\u0065\u0073\u0020\u006c\u006f\u0063\u0061\u0075\u0078\u0020\u0071\u0075\u0069\u0020\u0073\u0065\u0072\u006f\u006e\u0074\u0020\u0064\u0069\u0073\u0070\u006f\u006e\u0069\u0062\u006c\u0065\u0073\u0020\u0070\u006f\u0075\u0072\u0020\u006c\u0065\u0073\u0020\u006d\u006f\u0069\u0073\u0073\u006f\u006e\u006e\u0065\u0075\u0072\u0073\u0020\u0064\u0069\u0073\u0074\u0061\u006e\u0074\u0073\u0020\u0073\u006f\u0075\u0073\u0020\u003c\u0069\u003e\u0045\u006e\u0073\u0065\u006d\u0062\u006c\u0065\u0073\u0020\u004f\u0041\u0049\u003c\u002f\u0069\u003e\u002e\u0020\u004c\u0065\u0073\u0020\u0065\u006e\u0073\u0065\u006d\u0062\u006c\u0065\u0073\u0020\u0073\u006f\u006e\u0074\u0020\u0064\u00e9\u0066\u0069\u006e\u0069\u0073\u0020\u0070\u0061\u0072\u0020\u0064\u0065\u0073\u0020\u0072\u0065\u0071\u0075\u00ea\u0074\u0065\u0073\u0020\u0064\u0065\u0020\u0072\u0065\u0063\u0068\u0065\u0072\u0063\u0068\u0065\u0020\u0028\u0070\u0061\u0072\u0020\u0065\u0078\u0065\u006d\u0070\u006c\u0065\u002c\u0020\u0061\u0075\u0074\u0068\u006f\u0072\u004e\u0061\u006d\u0065\u003a\u006b\u0069\u006e\u0067\u003b\u0020\u006f\u0075\u0020\u0070\u0061\u0072\u0065\u006e\u0074\u0049\u0064\u003a\u0031\u0032\u0033\u0034\u0020\u002d\u0020\u0070\u006f\u0075\u0072\u0020\u0073\u00e9\u006c\u0065\u0063\u0074\u0069\u006f\u006e\u006e\u0065\u0072\u0020\u0074\u006f\u0075\u0073\u0020\u006c\u0065\u0073\u0020\u0065\u006e\u0073\u0065\u006d\u0062\u006c\u0065\u0073\u0020\u0064\u0065\u0020\u0064\u006f\u006e\u006e\u00e9\u0065\u0073\u0020\u0061\u0070\u0070\u0061\u0072\u0074\u0065\u006e\u0061\u006e\u0074\u0020\u0061\u0075\u0020\u0064\u0061\u0074\u0061\u0076\u0065\u0072\u0073\u0065\u0020\u0073\u0070\u00e9\u0063\u0069\u0066\u0069\u00e9\u003b\u0020\u006f\u0075\u0020\u0064\u0073\u0050\u0065\u0072\u0073\u0069\u0073\u0074\u0065\u006e\u0074\u0049\u0064\u003a\u0020\u0022\u0064\u006f\u0069\u003a\u0031\u0032\u0033\u0034\u002f\u0022\u0020\u0070\u006f\u0075\u0072\u0020\u0073\u00e9\u006c\u0065\u0063\u0074\u0069\u006f\u006e\u006e\u0065\u0072\u0020\u0074\u006f\u0075\u0073\u0020\u006c\u0065\u0073\u0020\u0065\u006e\u0073\u0065\u006d\u0062\u006c\u0065\u0073\u0020\u0064\u0065\u0020\u0064\u006f\u006e\u006e\u00e9\u0065\u0073\u0020\u0061\u0076\u0065\u0063\u0020\u006c\u0027\u0069\u0064\u0065\u006e\u0074\u0069\u0066\u0069\u0061\u006e\u0074\u0020\u0070\u0065\u0072\u0065\u006e\u006e\u0065\u0020\u0073\u0070\u00e9\u0063\u0069\u0066\u0069\u00e9\u0029\u002e\u0020\u0043\u006f\u006e\u0073\u0075\u006c\u0074\u0065\u007a\u0020\u006c\u0061\u0020\u0073\u0065\u0063\u0074\u0069\u006f\u006e\u0020\u0073\u0075\u0072\u0020\u006c\u0027\u0041\u0050\u0049\u0020\u0064\u0065\u0020\u0072\u0065\u0063\u0068\u0065\u0072\u0063\u0068\u0065\u0020\u0064\u0075\u0020\u0067\u0075\u0069\u0064\u0065\u0020\u0064\u0027\u0075\u0074\u0069\u006c\u0069\u0073\u0061\u0074\u0069\u006f\u006e\u0020\u0064\u0065\u0020\u0044\u0061\u0074\u0061\u0076\u0065\u0072\u0073\u0065\u0020\u0070\u006f\u0075\u0072\u0020\u0070\u006c\u0075\u0073\u0020\u0064\u0027\u0069\u006e\u0066\u006f\u0072\u006d\u0061\u0074\u0069\u006f\u006e\u0073\u0020\u0073\u0075\u0072\u0020\u006c\u0065\u0073\u0020\u0072\u0065\u0071\u0075\u00ea\u0074\u0065\u0073\u0020\u0064\u0065\u0020\u0072\u0065\u0063\u0068\u0065\u0072\u0063\u0068\u0065\u002e\u0020\u0020 +harvestserver.noSets.getStarted=\u0050\u006f\u0075\u0072\u0020\u0063\u006f\u006d\u006d\u0065\u006e\u0063\u0065\u0072\u002c\u0020\u0061\u0063\u0074\u0069\u0076\u0065\u007a\u0020\u006c\u0065\u0020\u0073\u0065\u0072\u0076\u0065\u0075\u0072\u0020\u004f\u0041\u0049\u0020\u0065\u0074\u0020\u0063\u006c\u0069\u0071\u0075\u0065\u007a\u0020\u0073\u0075\u0072\u0020\u006c\u0065\u0020\u0062\u006f\u0075\u0074\u006f\u006e\u0020\u00ab\u0020\u0041\u006a\u006f\u0075\u0074\u0065\u0072\u0020\u0075\u006e\u0020\u0065\u006e\u0073\u0065\u006d\u0062\u006c\u0065\u0020\u0028\u0073\u0065\u0074\u0029\u0020\u00bb\u002e\u0020\u0050\u006f\u0075\u0072\u0020\u0065\u006e\u0020\u0073\u0061\u0076\u006f\u0069\u0072\u0020\u0070\u006c\u0075\u0073\u0020\u0073\u0075\u0072\u0020\u006c\u0065\u0020\u006d\u006f\u0069\u0073\u0073\u006f\u006e\u006e\u0061\u0067\u0065\u002c\u0020\u0063\u006f\u006e\u0073\u0075\u006c\u0074\u0065\u007a\u0020\u006c\u0061\u0020\u0073\u0065\u0063\u0074\u0069\u006f\u006e\u0020\u0020\u003c\u0061\u0020\u0068\u0072\u0065\u0066\u003d\u0022\u007b\u0030\u007d\u002f\u007b\u0031\u007d\u002f\u0075\u0073\u0065\u0072\u002f\u0069\u006e\u0064\u0065\u0078\u002e\u0068\u0074\u006d\u006c\u0023\u0069\u006e\u0064\u0065\u0078\u0022\u0020\u0074\u0069\u0074\u006c\u0065\u003d\u0022\u0048\u0061\u0072\u0076\u0065\u0073\u0074\u0069\u006e\u0067\u0020\u002d\u0020\u0044\u0061\u0074\u0061\u0076\u0065\u0072\u0073\u0065\u0020\u0055\u0073\u0065\u0072\u0020\u0047\u0075\u0069\u0064\u0065\u0022\u0020\u0074\u0061\u0072\u0067\u0065\u0074\u003d\u0022\u005f\u0062\u006c\u0061\u006e\u006b\u0022\u003e\u006d\u006f\u0069\u0073\u0073\u006f\u006e\u006e\u0061\u0067\u0065\u003c\u002f\u0061\u003e\u0020\u0064\u0075\u0020\u0067\u0075\u0069\u0064\u0065\u0020\u0064\u0027\u0075\u0074\u0069\u006c\u0069\u0073\u0061\u0074\u0069\u006f\u006e\u002e +harvestserver.btn.add=\u0041\u006a\u006f\u0075\u0074\u0065\u0072\u0020\u0075\u006e\u0020\u0065\u006e\u0073\u0065\u006d\u0062\u006c\u0065\u0020\u0028\u0073\u0065\u0074\u0029 +harvestserver.tab.header.spec=\u0073\u0065\u0074\u0053\u0070\u0065\u0063\u0020\u004f\u0041\u0049\u0020\u0028\u0069\u0064\u0065\u006e\u0074\u0069\u0066\u0069\u0061\u006e\u0074\u0020\u004f\u0041\u0049\u0020\u0064\u0065\u0020\u006c\u0027\u0065\u006e\u0073\u0065\u006d\u0062\u006c\u0065\u0029 +harvestserver.tab.header.description=\u0044\u0065\u0073\u0063\u0072\u0069\u0070\u0074\u0069\u006f\u006e +harvestserver.tab.header.definition=\u0044\u00e9\u0066\u0069\u006e\u0069\u0074\u0069\u006f\u006e\u0020\u0064\u0065\u0020\u006c\u0061\u0020\u0072\u0065\u0071\u0075\u00ea\u0074\u0065 +harvestserver.tab.header.stats=\u0045\u006e\u0073\u0065\u006d\u0062\u006c\u0065\u0073\u0020\u0064\u0065\u0020\u0064\u006f\u006e\u006e\u00e9\u0065\u0073 +harvestserver.tab.col.stats.empty=\u0041\u0075\u0063\u0075\u006e\u0020\u0065\u006e\u0072\u0065\u0067\u0069\u0073\u0074\u0072\u0065\u006d\u0065\u006e\u0074\u0020\u0028\u0065\u006e\u0073\u0065\u006d\u0062\u006c\u0065\u0020\u0076\u0069\u0064\u0065\u0029 +harvestserver.tab.col.stats.results=\u007b\u0030\u007d\u0020\u007b\u0030\u002c\u0020\u0063\u0068\u006f\u0069\u0063\u0065\u002c\u0020\u0030\u0023\u0045\u006e\u0073\u0065\u006d\u0062\u006c\u0065\u0073\u0020\u0064\u0065\u0020\u0064\u006f\u006e\u006e\u00e9\u0065\u0073\u007c\u0031\u0023\u0045\u006e\u0073\u0065\u006d\u0062\u006c\u0065\u0020\u0064\u0065\u0020\u0064\u006f\u006e\u006e\u00e9\u0065\u0073\u007c\u0032\u0023\u0045\u006e\u0073\u0065\u006d\u0062\u006c\u0065\u0073\u0020\u0064\u0065\u0020\u0064\u006f\u006e\u006e\u00e9\u0065\u0073\u007d\u0020\u0028\u007b\u0031\u007d\u0020\u007b\u0031\u002c\u0020\u0063\u0068\u006f\u0069\u0063\u0065\u002c\u0020\u0030\u0023\u0065\u006e\u0072\u0065\u0067\u0069\u0073\u0074\u0072\u0065\u006d\u0065\u006e\u0074\u0073\u007c\u0031\u0023\u0065\u006e\u0072\u0065\u0067\u0069\u0073\u0074\u0072\u0065\u006d\u0065\u006e\u0074\u007c\u0032\u0023\u0065\u006e\u0072\u0065\u0067\u0069\u0073\u0074\u0072\u0065\u006d\u0065\u006e\u0074\u0073\u007d\u0020\u0065\u0078\u0070\u006f\u0072\u0074\u00e9\u0028\u0073\u0029\u002c\u0020\u007b\u0032\u007d\u0020\u006d\u0061\u0072\u0071\u0075\u00e9\u0028\u0073\u0029\u0020\u0063\u006f\u006d\u006d\u0065\u0020\u0073\u0075\u0070\u0070\u0072\u0069\u006d\u00e9\u0028\u0073\u0029\u0029 +harvestserver.tab.header.action=\u004f\u0070\u00e9\u0072\u0061\u0074\u0069\u006f\u006e\u0073 +harvestserver.tab.header.action.btn.export=\u004c\u0061\u006e\u0063\u0065\u0072\u0020\u006c\u0027\u0065\u0078\u0070\u006f\u0072\u0074\u0061\u0074\u0069\u006f\u006e +harvestserver.actions.runreexport.success=\u004c\u0061\u0020\u0074\u00e2\u0063\u0068\u0065\u0020\u0061\u0073\u0079\u006e\u0063\u0068\u0072\u006f\u006e\u0065\u0020\u0064\u0065\u0020\u0072\u00e9\u0065\u0078\u0070\u006f\u0072\u0074\u0061\u0074\u0069\u006f\u006e\u0020\u0064\u0065\u0020\u006c\u0027\u0065\u006e\u0073\u0065\u006d\u0062\u006c\u0065\u0020\u004f\u0041\u0049\u0020\u0022\u007b\u0030\u007d\u0022\u0020\u0061\u0020\u0062\u0069\u0065\u006e\u0020\u00e9\u0074\u00e9\u0020\u006c\u0061\u006e\u0063\u00e9\u0065\u0020\u0028\u0076\u0065\u0075\u0069\u006c\u006c\u0065\u007a\u0020\u0072\u0065\u0063\u0068\u0061\u0072\u0067\u0065\u0072\u0020\u006c\u0061\u0020\u0070\u0061\u0067\u0065\u0020\u0070\u006f\u0075\u0072\u0020\u0073\u0075\u0069\u0076\u0072\u0065\u0020\u006c\u0061\u0020\u0070\u0072\u006f\u0067\u0072\u0065\u0073\u0073\u0069\u006f\u006e\u0020\u0064\u0065\u0020\u006c\u0027\u0065\u0078\u0070\u006f\u0072\u0074\u0061\u0074\u0069\u006f\u006e\u0029\u002e +harvestserver.tab.header.action.btn.edit=\u004d\u006f\u0064\u0069\u0066\u0069\u0065\u0072 +harvestserver.tab.header.action.btn.delete=\u0053\u0075\u0070\u0070\u0072\u0069\u006d\u0065\u0072 +harvestserver.tab.header.action.btn.delete.dialog.header=\u0053\u0075\u0070\u0070\u0072\u0069\u006d\u0065\u0072\u0020\u006c\u0027\u0065\u006e\u0073\u0065\u006d\u0062\u006c\u0065\u0020\u00e0\u0020\u006d\u006f\u0069\u0073\u0073\u006f\u006e\u006e\u0065\u0072 +harvestserver.tab.header.action.btn.delete.dialog.tip=\u0056\u006f\u0075\u006c\u0065\u007a\u002d\u0076\u006f\u0075\u0073\u0020\u0076\u0072\u0061\u0069\u006d\u0065\u006e\u0074\u0020\u0073\u0075\u0070\u0070\u0072\u0069\u006d\u0065\u0072\u0020\u006c\u0027\u0065\u006e\u0073\u0065\u006d\u0062\u006c\u0065\u0020\u004f\u0041\u0049\u0020\u0022\u007b\u0030\u007d\u0022\u003f\u0020\u0056\u006f\u0075\u0073\u0020\u006e\u0065\u0020\u0070\u006f\u0075\u0076\u0065\u007a\u0020\u0070\u0061\u0073\u0020\u0061\u006e\u006e\u0075\u006c\u0065\u0072\u0020\u0075\u006e\u0065\u0020\u0073\u0075\u0070\u0070\u0072\u0065\u0073\u0073\u0069\u006f\u006e\u0021 +harvestserver.tab.header.action.delete.infomessage=\u004c\u0027\u0065\u006e\u0073\u0065\u006d\u0062\u006c\u0065\u0020\u00e0\u0020\u006d\u006f\u0069\u0073\u0073\u006f\u006e\u006e\u0065\u0072\u0020\u0065\u0073\u0074\u0020\u0073\u0075\u0070\u0070\u0072\u0069\u006d\u00e9\u002e\u0020\u0028\u0043\u0065\u0063\u0069\u0020\u0070\u0065\u0075\u0074\u0020\u0070\u0072\u0065\u006e\u0064\u0072\u0065\u0020\u0075\u006e\u0020\u0063\u0065\u0072\u0074\u0061\u0069\u006e\u0020\u0074\u0065\u006d\u0070\u0073\u0029 +harvestserver.newSetDialog.title.new=\u0044\u00e9\u0066\u0069\u006e\u0069\u0072\u0020\u0075\u006e\u0020\u0065\u006e\u0073\u0065\u006d\u0062\u006c\u0065\u0020\u00e0\u0020\u006d\u006f\u0069\u0073\u0073\u006f\u006e\u006e\u0065\u0072 +harvestserver.newSetDialog.help=\u0044\u00e9\u0066\u0069\u006e\u0069\u0072\u0020\u0075\u006e\u0065\u0020\u0063\u006f\u006c\u006c\u0065\u0063\u0074\u0069\u006f\u006e\u0020\u0064\u0027\u0065\u006e\u0073\u0065\u006d\u0062\u006c\u0065\u0073\u0020\u0064\u0065\u0020\u0064\u006f\u006e\u006e\u00e9\u0065\u0073\u0020\u006c\u006f\u0063\u0061\u0075\u0078\u0020\u0071\u0075\u0069\u0020\u0073\u0065\u0072\u006f\u006e\u0074\u0020\u0064\u0069\u0073\u0070\u006f\u006e\u0069\u0062\u006c\u0065\u0073\u0020\u0070\u006f\u0075\u0072\u0020\u006c\u0065\u0020\u006d\u006f\u0069\u0073\u0073\u006f\u006e\u006e\u0061\u0067\u0065\u0020\u0070\u0061\u0072\u0020\u006c\u0065\u0073\u0020\u0063\u006c\u0069\u0065\u006e\u0074\u0073\u0020\u0064\u0069\u0073\u0074\u0061\u006e\u0074\u0073\u002e +harvestserver.newSetDialog.setspec=\u004e\u006f\u006d\u002f\u0073\u0065\u0074\u0053\u0070\u0065\u0063\u0020\u004f\u0041\u0049 +harvestserver.newSetDialog.setspec.tip=\u0055\u006e\u0020\u006e\u006f\u006d\u0020\u0075\u006e\u0069\u0071\u0075\u0065\u0020\u0028\u004f\u0041\u0049\u0020\u0073\u0065\u0074\u0053\u0070\u0065\u0063\u0029\u0020\u0069\u0064\u0065\u006e\u0074\u0069\u0066\u0069\u0061\u006e\u0074\u0020\u0063\u0065\u0074\u0020\u0065\u006e\u0073\u0065\u006d\u0062\u006c\u0065\u002e +harvestserver.newSetDialog.setspec.helptext=\u0053\u0065\u0020\u0063\u006f\u006d\u0070\u006f\u0073\u0065\u0020\u0064\u0065\u0020\u006c\u0065\u0074\u0074\u0072\u0065\u0073\u002c\u0020\u0064\u0065\u0020\u0063\u0068\u0069\u0066\u0066\u0072\u0065\u0073\u002c\u0020\u0064\u0065\u0020\u0074\u0072\u0061\u0069\u0074\u0073\u0020\u0064\u0065\u0020\u0073\u006f\u0075\u006c\u0069\u0067\u006e\u0065\u006d\u0065\u006e\u0074\u0020\u0028\u005f\u0029\u0020\u0065\u0074\u0020\u0064\u0065\u0020\u0074\u0069\u0072\u0065\u0074\u0073\u0020\u0028\u002d\u0029\u002e +harvestserver.editSetDialog.setspec.helptext=\u004c\u0065\u0020\u006e\u006f\u006d\u0020\u006e\u0065\u0020\u0070\u0065\u0075\u0074\u0020\u0070\u0061\u0073\u0020\u00ea\u0074\u0072\u0065\u0020\u006d\u006f\u0064\u0069\u0066\u0069\u00e9\u0020\u0075\u006e\u0065\u0020\u0066\u006f\u0069\u0073\u0020\u006c\u0027\u0065\u006e\u0073\u0065\u006d\u0062\u006c\u0065\u0020\u0063\u0072\u00e9\u00e9\u002e +harvestserver.newSetDialog.setspec.required=\u004c\u0065\u0020\u006e\u006f\u006d\u0020\u0028\u0073\u0065\u0074\u0053\u0070\u0065\u0063\u0020\u004f\u0041\u0049\u0029\u0020\u006e\u0065\u0020\u0070\u0065\u0075\u0074\u0020\u00ea\u0074\u0072\u0065\u0020\u0076\u0069\u0064\u0065\u0021 +harvestserver.newSetDialog.setspec.invalid=\u004c\u0065\u0020\u006e\u006f\u006d\u0020\u0028\u0073\u0065\u0074\u0053\u0070\u0065\u0063\u0020\u004f\u0041\u0049\u0029\u0020\u006e\u0065\u0020\u0070\u0065\u0075\u0074\u0020\u0063\u006f\u006e\u0074\u0065\u006e\u0069\u0072\u0020\u0071\u0075\u0065\u0020\u0064\u0065\u0073\u0020\u006c\u0065\u0074\u0074\u0072\u0065\u0073\u002c\u0020\u0064\u0065\u0073\u0020\u0063\u0068\u0069\u0066\u0066\u0072\u0065\u0073\u002c\u0020\u0064\u0065\u0073\u0020\u0074\u0072\u0061\u0069\u0074\u0073\u0020\u0064\u0065\u0020\u0073\u006f\u0075\u006c\u0069\u0067\u006e\u0065\u006d\u0065\u006e\u0074\u0020\u0028\u005f\u0029\u0020\u0065\u0074\u0020\u0064\u0065\u0073\u0020\u0074\u0069\u0072\u0065\u0074\u0073\u0020\u0028\u002d\u0029\u002e +harvestserver.newSetDialog.setspec.alreadyused=\u0043\u0065\u0020\u006e\u006f\u006d\u0020\u0064\u0027\u0065\u006e\u0073\u0065\u006d\u0062\u006c\u0065\u0020\u0028\u0073\u0065\u0074\u0053\u0070\u0065\u0063\u0020\u004f\u0041\u0049\u0029\u0020\u0065\u0073\u0074\u0020\u0064\u00e9\u006a\u00e0\u0020\u0075\u0074\u0069\u006c\u0069\u0073\u00e9\u002e +harvestserver.newSetDialog.setdescription=\u0044\u0065\u0073\u0063\u0072\u0069\u0070\u0074\u0069\u006f\u006e +harvestserver.newSetDialog.setdescription.tip=\u0046\u006f\u0075\u0072\u006e\u0069\u0072\u0020\u0075\u006e\u0065\u0020\u0062\u0072\u00e8\u0076\u0065\u0020\u0064\u0065\u0073\u0063\u0072\u0069\u0070\u0074\u0069\u006f\u006e\u0020\u0064\u0065\u0020\u0063\u0065\u0074\u0020\u0065\u006e\u0073\u0065\u006d\u0062\u006c\u0065\u0020\u004f\u0041\u0049\u002e +harvestserver.newSetDialog.setdescription.required=\u004c\u0061\u0020\u0064\u0065\u0073\u0063\u0072\u0069\u0070\u0074\u0069\u006f\u006e\u0020\u0064\u0065\u0020\u006c\u0027\u0065\u006e\u0073\u0065\u006d\u0062\u006c\u0065\u0020\u006e\u0065\u0020\u0070\u0065\u0075\u0074\u0020\u00ea\u0074\u0072\u0065\u0020\u0076\u0069\u0064\u0065\u0021 +harvestserver.newSetDialog.setquery=\u0052\u0065\u0071\u0075\u00ea\u0074\u0065\u0020\u0064\u0065\u0020\u0072\u0065\u0063\u0068\u0065\u0072\u0063\u0068\u0065 +harvestserver.newSetDialog.setquery.tip=\u0052\u0065\u0071\u0075\u00ea\u0074\u0065\u0020\u0064\u0065\u0020\u0072\u0065\u0063\u0068\u0065\u0072\u0063\u0068\u0065\u0020\u0071\u0075\u0069\u0020\u0064\u00e9\u0066\u0069\u006e\u0069\u0074\u0020\u006c\u0065\u0020\u0063\u006f\u006e\u0074\u0065\u006e\u0075\u0020\u0064\u0065\u0020\u006c\u0027\u0065\u006e\u0073\u0065\u006d\u0062\u006c\u0065\u0020\u0064\u0065\u0020\u0064\u006f\u006e\u006e\u00e9\u0065\u0073\u002e +harvestserver.newSetDialog.setquery.helptext=\u0045\u0078\u0065\u006d\u0070\u006c\u0065\u0020\u0064\u0065\u0020\u0072\u0065\u0071\u0075\u00ea\u0074\u0065\u005c\u0075\u0030\u0030\u0041\u0030\u003a\u0020\u0061\u0075\u0074\u0068\u006f\u0072\u004e\u0061\u006d\u0065\u003a\u006b\u0069\u006e\u0067 +harvestserver.newSetDialog.setquery.required=\u004c\u0061\u0020\u0072\u0065\u0071\u0075\u00ea\u0074\u0065\u0020\u0064\u0065\u0020\u0072\u0065\u0063\u0068\u0065\u0072\u0063\u0068\u0065\u0020\u006e\u0065\u0020\u0070\u0065\u0075\u0074\u0020\u00ea\u0074\u0072\u0065\u0020\u0076\u0069\u0064\u0065\u0021 +harvestserver.newSetDialog.setquery.results=\u004c\u0061\u0020\u0072\u0065\u0071\u0075\u00ea\u0074\u0065\u0020\u0064\u0065\u0020\u0072\u0065\u0063\u0068\u0065\u0072\u0063\u0068\u0065\u0020\u0061\u0020\u0072\u0065\u0074\u006f\u0075\u0072\u006e\u00e9\u0020\u007b\u0030\u007d\u0020\u0065\u006e\u0073\u0065\u006d\u0062\u006c\u0065\u0028\u0073\u0029\u0020\u0064\u0065\u0020\u0064\u006f\u006e\u006e\u00e9\u0065\u0073\u0021 +harvestserver.newSetDialog.setquery.empty=\u0041\u0056\u0045\u0052\u0054\u0049\u0053\u0053\u0045\u004d\u0045\u004e\u0054\u005c\u0075\u0030\u0030\u0041\u0030\u003a\u0020\u006c\u0061\u0020\u0072\u0065\u0071\u0075\u00ea\u0074\u0065\u0020\u0064\u0065\u0020\u0072\u0065\u0063\u0068\u0065\u0072\u0063\u0068\u0065\u0020\u006e\u0027\u0061\u0020\u0072\u0065\u0074\u006f\u0075\u0072\u006e\u00e9\u0020\u0061\u0075\u0063\u0075\u006e\u0020\u0072\u00e9\u0073\u0075\u006c\u0074\u0061\u0074\u0021 +harvestserver.newSetDialog.btn.create=\u0043\u0072\u00e9\u0065\u0072\u0020\u006c\u0027\u0065\u006e\u0073\u0065\u006d\u0062\u006c\u0065 +harvestserver.newSetDialog.success=\u004c\u0027\u0065\u006e\u0073\u0065\u006d\u0062\u006c\u0065\u0020\u0064\u0065\u0020\u0064\u006f\u006e\u006e\u00e9\u0065\u0073\u0020\u0022\u007b\u0030\u007d\u0022\u0020\u0061\u0020\u0062\u0069\u0065\u006e\u0020\u00e9\u0074\u00e9\u0020\u0063\u0072\u00e9\u00e9\u002e +harvestserver.viewEditDialog.title=\u004d\u006f\u0064\u0069\u0066\u0069\u0065\u0072\u0020\u006c\u0027\u0065\u006e\u0073\u0065\u006d\u0062\u006c\u0065\u0020\u006d\u006f\u0069\u0073\u0073\u006f\u006e\u006e\u00e9\u002e +harvestserver.viewEditDialog.btn.save=\u0053\u0061\u0075\u0076\u0065\u0067\u0061\u0072\u0064\u0065\u0072\u0020\u006c\u0065\u0073\u0020\u006d\u006f\u0064\u0069\u0066\u0069\u0063\u0061\u0074\u0069\u006f\u006e\u0073 +#dashboard-users.xhtml=\u0023\u0064\u0061\u0073\u0068\u0062\u006f\u0061\u0072\u0064\u002d\u0075\u0073\u0065\u0072\u0073\u002e\u0078\u0068\u0074\u006d\u006c +dashboard.card.users=\u0055\u0074\u0069\u006c\u0069\u0073\u0061\u0074\u0065\u0075\u0072\u0073 +dashboard.card.users.header=\u0054\u0061\u0062\u006c\u0065\u0061\u0075\u0020\u0064\u0065\u0020\u0062\u006f\u0072\u0064\u0020\u002d\u0020\u004c\u0069\u0073\u0074\u0065\u0020\u0064\u0065\u0073\u0020\u0075\u0074\u0069\u006c\u0069\u0073\u0061\u0074\u0065\u0075\u0072\u0073 +dashboard.card.users.super=\u0053\u0075\u0070\u0065\u0072\u002d\u0075\u0074\u0069\u006c\u0069\u0073\u0061\u0074\u0065\u0075\u0072\u0073 +dashboard.card.users.manage=\u0047\u00e9\u0072\u0065\u0072\u0020\u006c\u0065\u0073\u0020\u0075\u0074\u0069\u006c\u0069\u0073\u0061\u0074\u0065\u0075\u0072\u0073 +dashboard.card.users.message=\u004c\u0069\u0073\u0074\u0065\u0072\u0020\u0065\u0074\u0020\u0067\u00e9\u0072\u0065\u0072\u0020\u006c\u0065\u0073\u0020\u0075\u0074\u0069\u006c\u0069\u0073\u0061\u0074\u0065\u0075\u0072\u0073\u002e +dashboard.list_users.searchTerm.watermark=\u0052\u0065\u0063\u0068\u0065\u0072\u0063\u0068\u0065\u0072\u0020\u0063\u0065\u0073\u0020\u0075\u0074\u0069\u006c\u0069\u0073\u0061\u0074\u0065\u0075\u0072\u0073\u005c\u0075\u0032\u0030\u0032\u0036 +dashboard.list_users.tbl_header.userId=\u0049\u0064\u0065\u006e\u0074\u0069\u0066\u0069\u0061\u006e\u0074 +dashboard.list_users.tbl_header.userIdentifier=\u004e\u006f\u006d\u0020\u0064\u0027\u0075\u0074\u0069\u006c\u0069\u0073\u0061\u0074\u0065\u0075\u0072 +dashboard.list_users.tbl_header.name=\u004e\u006f\u006d\u0020 +dashboard.list_users.tbl_header.lastName=\u004e\u006f\u006d\u0020\u0064\u0065\u0020\u0066\u0061\u006d\u0069\u006c\u006c\u0065\u0020 +dashboard.list_users.tbl_header.firstName=\u0050\u0072\u00e9\u006e\u006f\u006d\u0020 +dashboard.list_users.tbl_header.email=\u0043\u006f\u0075\u0072\u0072\u0069\u0065\u006c +dashboard.list_users.tbl_header.affiliation=\u0041\u0066\u0066\u0069\u006c\u0069\u0061\u0074\u0069\u006f\u006e +dashboard.list_users.tbl_header.roles=\u0052\u006f\u006c\u0065\u0073 +dashboard.list_users.tbl_header.position=\u0050\u006f\u0073\u0074\u0065 +dashboard.list_users.tbl_header.isSuperuser=\u0053\u0075\u0070\u0065\u0072\u002d\u0075\u0074\u0069\u006c\u0069\u0073\u0061\u0074\u0065\u0075\u0072 +dashboard.list_users.tbl_header.authProviderFactoryAlias=\u0041\u0075\u0074\u0068\u0065\u006e\u0074\u0069\u0066\u0069\u0063\u0061\u0074\u0069\u006f\u006e +dashboard.list_users.tbl_header.createdTime=\u0044\u0061\u0074\u0065\u0020\u0064\u0065\u0020\u0063\u0072\u00e9\u0061\u0074\u0069\u006f\u006e +dashboard.list_users.tbl_header.lastLoginTime=\u0044\u0065\u0072\u006e\u0069\u00e8\u0072\u0065\u0020\u0063\u006f\u006e\u006e\u0065\u0078\u0069\u006f\u006e +dashboard.list_users.tbl_header.lastApiUseTime=\u0044\u0065\u0072\u006e\u0069\u00e8\u0072\u0065\u0020\u0075\u0074\u0069\u006c\u0069\u0073\u0061\u0074\u0069\u006f\u006e\u0020\u0064\u0065\u0020\u006c\u0027\u0041\u0050\u0049 +dashboard.list_users.tbl_header.roles.removeAll=Remove All +dashboard.list_users.tbl_header.roles.removeAll.header=Remove All Roles +dashboard.list_users.tbl_header.roles.removeAll.confirmationText=Are you sure you want to remove all roles for user {0}? +dashboard.list_users.removeAll.message.success=All roles have been removed for user {0}. +dashboard.list_users.removeAll.message.failure=Failed to remove roles for user {0}. +dashboard.list_users.toggleSuperuser=\u004d\u006f\u0064\u0069\u0066\u0069\u0065\u0072\u0020\u006c\u0065\u0020\u0073\u0074\u0061\u0074\u0075\u0074\u0020\u0064\u0065\u0020\u0073\u0075\u0070\u0065\u0072\u002d\u0075\u0074\u0069\u006c\u0069\u0073\u0061\u0074\u0065\u0075\u0072 +dashboard.list_users.toggleSuperuser.confirmationText.add=\u00ca\u0074\u0065\u0073\u002d\u0076\u006f\u0075\u0073\u0020\u0063\u0065\u0072\u0074\u0061\u0069\u006e\u0020\u0064\u0065\u0020\u0076\u006f\u0075\u006c\u006f\u0069\u0072\u0020\u0061\u0063\u0074\u0069\u0076\u0065\u0072\u0020\u006c\u0065\u0020\u0073\u0074\u0061\u0074\u0075\u0074\u0020\u0064\u0065\u0020\u0073\u0075\u0070\u0065\u0072\u002d\u0075\u0074\u0069\u006c\u0069\u0073\u0061\u0074\u0065\u0075\u0072\u0020\u0070\u006f\u0075\u0072\u0020\u006c\u0027\u0075\u0074\u0069\u006c\u0069\u0073\u0061\u0074\u0065\u0075\u0072\u0020\u007b\u0030\u007d\u003f +dashboard.list_users.toggleSuperuser.confirmationText.remove=\u00ca\u0074\u0065\u0073\u002d\u0076\u006f\u0075\u0073\u0020\u0063\u0065\u0072\u0074\u0061\u0069\u006e\u0020\u0064\u0065\u0020\u0076\u006f\u0075\u006c\u006f\u0069\u0072\u0020\u0064\u00e9\u0073\u0061\u0063\u0074\u0069\u0076\u0065\u0072\u0020\u006c\u0065\u0020\u0073\u0074\u0061\u0074\u0075\u0074\u0020\u0064\u0065\u0020\u0073\u0075\u0070\u0065\u0072\u002d\u0075\u0074\u0069\u006c\u0069\u0073\u0061\u0074\u0065\u0075\u0072\u0020\u0070\u006f\u0075\u0072\u0020\u006c\u0027\u0075\u0074\u0069\u006c\u0069\u0073\u0061\u0074\u0065\u0075\u0072\u0020\u007b\u0030\u007d\u003f +dashboard.list_users.toggleSuperuser.confirm=\u0050\u006f\u0075\u0072\u0073\u0075\u0069\u0076\u0072\u0065 +dashboard.list_users.api.auth.invalid_apikey=\u004c\u0061\u0020\u0063\u006c\u00e9\u0020\u0041\u0050\u0049\u0020\u006e\u0027\u0065\u0073\u0074\u0020\u0070\u0061\u0073\u0020\u0076\u0061\u006c\u0069\u0064\u0065\u002e +dashboard.list_users.api.auth.not_superuser=\u0041\u0063\u0074\u0069\u006f\u006e\u0020\u0049\u006e\u0074\u0065\u0072\u0064\u0069\u0074\u0065\u002e\u0020\u0056\u006f\u0075\u0073\u0020\u0064\u0065\u0076\u0065\u007a\u0020\u00ea\u0074\u0072\u0065\u0020\u0075\u006e\u0020\u0073\u0075\u0070\u0065\u0072\u002d\u0075\u0074\u0069\u006c\u0069\u0073\u0061\u0074\u0065\u0075\u0072\u002e +#MailServiceBean.java= +notification.email.create.dataverse.subject=\u007b\u0030\u007d\u005c\u0075\u0030\u0030\u0041\u0030\u003a\u0020\u0056\u006f\u0074\u0072\u0065\u0020\u0064\u0061\u0074\u0061\u0076\u0065\u0072\u0073\u0065\u0020\u0061\u0020\u00e9\u0074\u00e9\u0020\u0063\u0072\u00e9\u00e9\u002e +notification.email.create.dataset.subject=\u007b\u0030\u007d\u005c\u0075\u0030\u0030\u0041\u0030\u003a\u0020\u0056\u006f\u0074\u0072\u0065\u0020\u0065\u006e\u0073\u0065\u006d\u0062\u006c\u0065\u0020\u0064\u0065\u0020\u0064\u006f\u006e\u006e\u00e9\u0065\u0073\u0020\u0061\u0020\u00e9\u0074\u00e9\u0020\u0063\u0072\u00e9\u00e9\u002e +notification.email.request.file.access.subject=\u007b\u0030\u007d\u005c\u0075\u0030\u0030\u0041\u0030\u003a\u0020\u0056\u006f\u0075\u0073\u0020\u0061\u0076\u0065\u007a\u0020\u0070\u0072\u00e9\u0073\u0065\u006e\u0074\u00e9\u0020\u0075\u006e\u0065\u0020\u0064\u0065\u006d\u0061\u006e\u0064\u0065\u0020\u0064\u0027\u0061\u0063\u0063\u00e8\u0073\u0020\u00e0\u0020\u0075\u006e\u0020\u0066\u0069\u0063\u0068\u0069\u0065\u0072\u0020\u0065\u006e\u0020\u0061\u0063\u0063\u00e8\u0073\u0020\u0072\u00e9\u0073\u0065\u0072\u0076\u00e9\u002e +notification.email.grant.file.access.subject=\u007b\u0030\u007d\u005c\u0075\u0030\u0030\u0041\u0030\u003a\u0020\u004c\u0027\u0061\u0063\u0063\u00e8\u0073\u0020\u00e0\u0020\u0075\u006e\u0020\u0066\u0069\u0063\u0068\u0069\u0065\u0072\u0020\u0072\u00e9\u0073\u0065\u0072\u0076\u00e9\u0020\u0076\u006f\u0075\u0073\u0020\u0061\u0020\u00e9\u0074\u00e9\u0020\u0061\u0063\u0063\u006f\u0072\u0064\u00e9\u002e +notification.email.rejected.file.access.subject=\u007b\u0030\u007d\u005c\u0075\u0030\u0030\u0041\u0030\u003a\u0020\u0056\u006f\u0074\u0072\u0065\u0020\u0064\u0065\u006d\u0061\u006e\u0064\u0065\u0020\u0064\u0027\u0061\u0063\u0063\u00e8\u0073\u0020\u00e0\u0020\u0075\u006e\u0020\u0066\u0069\u0063\u0068\u0069\u0065\u0072\u0020\u0065\u006e\u0020\u0061\u0063\u0063\u00e8\u0073\u0020\u0072\u00e9\u0073\u0065\u0072\u0076\u00e9\u0020\u0061\u0020\u00e9\u0074\u00e9\u0020\u0072\u0065\u0066\u0075\u0073\u00e9\u0065\u002e +notification.email.update.maplayer=\u007b\u0030\u007d\u005c\u0075\u0030\u0030\u0041\u0030\u003a\u0020\u0055\u006e\u0065\u0020\u0063\u006f\u0075\u0063\u0068\u0065\u0020\u0057\u006f\u0072\u006c\u0064\u004d\u0061\u0070\u0020\u0061\u0020\u00e9\u0074\u00e9\u0020\u0061\u006a\u006f\u0075\u0074\u00e9\u0065\u0020\u00e0\u0020\u006c\u0027\u0065\u006e\u0073\u0065\u006d\u0062\u006c\u0065\u0020\u0064\u0065\u0020\u0064\u006f\u006e\u006e\u00e9\u0065\u0073\u002e +notification.email.maplayer.deletefailed.subject=\u007b\u0030\u007d\u003a\u0020\u0049\u006d\u0070\u006f\u0073\u0073\u0069\u0062\u006c\u0065\u0020\u0064\u0065\u0020\u0073\u0075\u0070\u0070\u0072\u0069\u006d\u0065\u0072\u0020\u006c\u0061\u0020\u0063\u006f\u0075\u0063\u0068\u0065\u0020\u0057\u006f\u0072\u006c\u0064\u004d\u0061\u0070\u002e +notification.email.maplayer.deletefailed.text=\u0049\u006d\u0070\u006f\u0073\u0073\u0069\u0062\u006c\u0065\u0020\u0064\u0065\u0020\u0073\u0075\u0070\u0070\u0072\u0069\u006d\u0065\u0072\u0020\u006c\u0061\u0020\u0063\u006f\u0075\u0063\u0068\u0065\u0020\u0057\u006f\u0072\u006c\u0064\u004d\u0061\u0070\u0020\u0061\u0073\u0073\u006f\u0063\u0069\u00e9\u0065\u0020\u0061\u0075\u0020\u0066\u0069\u0063\u0068\u0069\u0065\u0072\u0020\u00e0\u0020\u0061\u0063\u0063\u00e8\u0073\u0020\u0072\u0065\u0073\u0074\u0072\u0065\u0069\u006e\u0074\u0020\u007b\u0030\u007d\u002c\u0020\u0061\u0069\u006e\u0073\u0069\u0020\u0071\u0075\u0065\u0020\u0074\u006f\u0075\u0074\u0065\u0073\u0020\u006c\u0065\u0073\u0020\u0064\u006f\u006e\u006e\u00e9\u0065\u0073\u0020\u0063\u006f\u006e\u006e\u0065\u0078\u0065\u0073\u0020\u0071\u0075\u0069\u0020\u0070\u0065\u0075\u0076\u0065\u006e\u0074\u0020\u0065\u006e\u0063\u006f\u0072\u0065\u0020\u00ea\u0074\u0072\u0065\u0020\u0070\u0075\u0062\u006c\u0069\u0071\u0075\u0065\u006d\u0065\u006e\u0074\u0020\u0064\u0069\u0073\u0070\u006f\u006e\u0069\u0062\u006c\u0065\u0073\u0020\u0073\u0075\u0072\u0020\u006c\u0065\u0020\u0073\u0069\u0074\u0065\u0020\u0064\u0065\u0020\u0057\u006f\u0072\u006c\u0064\u004d\u0061\u0070\u002e\u0020\u0045\u0073\u0073\u0061\u0079\u0065\u007a\u0020\u0064\u0065\u0020\u006e\u006f\u0075\u0076\u0065\u0061\u0075\u002c\u0020\u006f\u0075\u0020\u0063\u006f\u006e\u0074\u0061\u0063\u0074\u0065\u007a\u0020\u006c\u0065\u0020\u0073\u006f\u0075\u0074\u0069\u0065\u006e\u0020\u0057\u006f\u0072\u006c\u0064\u004d\u0061\u0070\u0020\u0065\u0074\u002f\u006f\u0075\u0020\u0044\u0061\u0074\u0061\u0076\u0065\u0072\u0073\u0065\u002e\u0020\u0028\u0045\u006e\u0073\u0065\u006d\u0062\u006c\u0065\u0020\u0064\u0065\u0020\u0064\u006f\u006e\u006e\u00e9\u0065\u0073\u003a\u0020\u007b\u0031\u007d\u0029 +notification.email.submit.dataset.subject=\u007b\u0030\u007d\u005c\u0075\u0030\u0030\u0041\u0030\u003a\u0020\u0056\u006f\u0074\u0072\u0065\u0020\u0065\u006e\u0073\u0065\u006d\u0062\u006c\u0065\u0020\u0064\u0065\u0020\u0064\u006f\u006e\u006e\u00e9\u0065\u0073\u0020\u0061\u0020\u00e9\u0074\u00e9\u0020\u0073\u006f\u0075\u006d\u0069\u0073\u0020\u0061\u0075\u0078\u0020\u0066\u0069\u006e\u0073\u0020\u0064\u0027\u0065\u0078\u0061\u006d\u0065\u006e\u002e +notification.email.publish.dataset.subject=\u007b\u0030\u007d\u005c\u0075\u0030\u0030\u0041\u0030\u003a\u0020\u0056\u006f\u0074\u0072\u0065\u0020\u0065\u006e\u0073\u0065\u006d\u0062\u006c\u0065\u0020\u0064\u0065\u0020\u0064\u006f\u006e\u006e\u00e9\u0065\u0073\u0020\u0061\u0020\u00e9\u0074\u00e9\u0020\u0070\u0075\u0062\u006c\u0069\u00e9\u002e +notification.email.returned.dataset.subject=\u007b\u0030\u007d\u005c\u0075\u0030\u0030\u0041\u0030\u003a\u0020\u0056\u006f\u0074\u0072\u0065\u0020\u0065\u006e\u0073\u0065\u006d\u0062\u006c\u0065\u0020\u0064\u0065\u0020\u0064\u006f\u006e\u006e\u00e9\u0065\u0073\u0020\u0061\u0020\u00e9\u0074\u00e9\u0020\u0072\u0065\u0074\u006f\u0075\u0072\u006e\u00e9\u002e +notification.email.create.account.subject=\u007b\u0030\u007d\u005c\u0075\u0030\u0030\u0041\u0030\u003a\u0020\u0056\u006f\u0074\u0072\u0065\u0020\u0063\u006f\u006d\u0070\u0074\u0065\u0020\u0061\u0020\u00e9\u0074\u00e9\u0020\u0063\u0072\u00e9\u00e9\u002e +notification.email.assign.role.subject=\u007b\u0030\u007d\u005c\u0075\u0030\u0030\u0041\u0030\u003a\u0020\u0020\u0055\u006e\u0020\u0072\u00f4\u006c\u0065\u0020\u0076\u006f\u0075\u0073\u0020\u0061\u0020\u00e9\u0074\u00e9\u0020\u0061\u0074\u0074\u0072\u0069\u0062\u0075\u00e9 +notification.email.revoke.role.subject=\u007b\u0030\u007d\u005c\u0075\u0030\u0030\u0041\u0030\u003a\u0020\u0056\u006f\u0074\u0072\u0065\u0020\u0072\u00f4\u006c\u0065\u0020\u0061\u0020\u00e9\u0074\u00e9\u0020\u0072\u00e9\u0076\u006f\u0071\u0075\u00e9 +notification.email.verifyEmail.subject=\u007b\u0030\u007d\u005c\u0075\u0030\u0030\u0041\u0030\u003a\u0020\u0056\u0061\u006c\u0069\u0064\u0065\u0072\u0020\u0076\u006f\u0074\u0072\u0065\u0020\u0061\u0064\u0072\u0065\u0073\u0073\u0065\u0020\u0063\u006f\u0075\u0072\u0072\u0069\u0065\u006c +notification.email.greeting=\u0042\u006f\u006e\u006a\u006f\u0075\u0072\u002c\u0020\u005c\u006e +# Bundle file editors, please note that "notification.email.welcome" is used in a unit test= +notification.email.welcome=\u0042\u0069\u0065\u006e\u0076\u0065\u006e\u0075\u0065\u0020\u0064\u0061\u006e\u0073\u0020\u0044\u0061\u0074\u0061\u0076\u0065\u0072\u0073\u0065\u0021\u0020\u0043\u006f\u006d\u006d\u0065\u006e\u0063\u0065\u007a\u0020\u0064\u00e8\u0073\u0020\u006d\u0061\u0069\u006e\u0074\u0065\u006e\u0061\u006e\u0074\u0020\u0065\u006e\u0020\u0061\u006a\u006f\u0075\u0074\u0061\u006e\u0074\u0020\u006f\u0075\u0020\u0065\u006e\u0063\u006f\u0072\u0065\u0020\u0065\u006e\u0020\u0072\u0065\u0063\u0068\u0065\u0072\u0063\u0068\u0061\u006e\u0074\u0020\u0064\u0065\u0073\u0020\u0064\u006f\u006e\u006e\u00e9\u0065\u0073\u002e\u0020\u0044\u0065\u0073\u0020\u0071\u0075\u0065\u0073\u0074\u0069\u006f\u006e\u0073\u003f\u0020\u0043\u006f\u006e\u0073\u0075\u006c\u0074\u0065\u007a\u0020\u006c\u0065\u0020\u0047\u0075\u0069\u0064\u0065\u0020\u0064\u0027\u0075\u0074\u0069\u006c\u0069\u0073\u0061\u0074\u0069\u006f\u006e\u0020\u0069\u0063\u0069\u0020\u007b\u0030\u007d\u002f\u007b\u0031\u007d\u002f\u0075\u0073\u0065\u0072\u002f\u0020\u006f\u0075\u0020\u0063\u006f\u006e\u0074\u0061\u0063\u0074\u0065\u007a\u0020\u006c\u0065\u0020\u0073\u0065\u0072\u0076\u0069\u0063\u0065\u0020\u0064\u0065\u0020\u0073\u006f\u0075\u0074\u0069\u0065\u006e\u0020\u0064\u0065\u0020\u0053\u0063\u0068\u006f\u006c\u0061\u0072\u0073\u0020\u0050\u006f\u0072\u0074\u0061\u006c\u0020\u0070\u006f\u0075\u0072\u0020\u0064\u0065\u0020\u006c\u0027\u0061\u0069\u0064\u0065\u002e\u0020\u0056\u006f\u0075\u0073\u0020\u0076\u006f\u0075\u006c\u0065\u007a\u0020\u0066\u0061\u0069\u0072\u0065\u0020\u006c\u0027\u0065\u0073\u0073\u0061\u0069\u0020\u0064\u0065\u0073\u0020\u0063\u006f\u006d\u0070\u006f\u0073\u0061\u006e\u0074\u0065\u0073\u0020\u0064\u0065\u0020\u0044\u0061\u0074\u0061\u0076\u0065\u0072\u0073\u0065\u003f\u0020\u0045\u0073\u0073\u0061\u0079\u0065\u007a\u0020\u006e\u006f\u0074\u0072\u0065\u0020\u0073\u0069\u0074\u0065\u0020\u0064\u0065\u0020\u0064\u00e9\u006d\u006f\u006e\u0073\u0074\u0072\u0061\u0074\u0069\u006f\u006e\u0020\u00e0\u0020\u0068\u0074\u0074\u0070\u0073\u003a\u002f\u002f\u0064\u0065\u006d\u006f\u0064\u0076\u002e\u0073\u0063\u0068\u006f\u006c\u0061\u0072\u0073\u0070\u006f\u0072\u0074\u0061\u006c\u002e\u0069\u006e\u0066\u006f +notification.email.welcomeConfirmEmailAddOn=\u005c\u006e\u005c\u006e\u0056\u0065\u0075\u0069\u006c\u006c\u0065\u007a\u0020\u0076\u00e9\u0072\u0069\u0066\u0069\u0065\u0072\u0020\u0076\u006f\u0074\u0072\u0065\u0020\u0061\u0064\u0072\u0065\u0073\u0073\u0065\u0020\u0063\u006f\u0075\u0072\u0072\u0069\u0065\u006c\u0020\u00e0\u0020\u007b\u0030\u007d\u002e\u0020\u004e\u006f\u0074\u0065\u007a\u0020\u0071\u0075\u0065\u0020\u006c\u0065\u0020\u006c\u0069\u0065\u006e\u0020\u0064\u0065\u0020\u0076\u0061\u006c\u0069\u0064\u0061\u0074\u0069\u006f\u006e\u0020\u0065\u0078\u0070\u0069\u0072\u0065\u0072\u0061\u0020\u0061\u0070\u0072\u00e8\u0073\u0020\u007b\u0031\u007d\u002e\u0020\u0045\u006e\u0076\u006f\u0079\u0065\u007a\u0020\u0064\u0065\u0020\u006e\u006f\u0075\u0076\u0065\u0061\u0075\u0020\u0075\u006e\u0020\u0063\u006f\u0075\u0072\u0072\u0069\u0065\u006c\u0020\u0064\u0065\u0020\u0076\u0061\u006c\u0069\u0064\u0061\u0074\u0069\u006f\u006e\u0020\u0065\u006e\u0020\u0076\u006f\u0075\u0073\u0020\u0072\u0065\u006e\u0064\u0061\u006e\u0074\u0020\u00e0\u0020\u006c\u0061\u0020\u0070\u0061\u0067\u0065\u0020\u0064\u0065\u0020\u0076\u006f\u0074\u0072\u0065\u0020\u0063\u006f\u006d\u0070\u0074\u0065\u002e +notification.email.requestFileAccess=\u0041\u0063\u0063\u00e8\u0073\u0020\u0061\u0075\u0020\u0066\u0069\u0063\u0068\u0069\u0065\u0072\u0020\u0064\u0065\u006d\u0061\u006e\u0064\u00e9\u0020\u0070\u006f\u0075\u0072\u0020\u006c\u0027\u0065\u006e\u0073\u0065\u006d\u0062\u006c\u0065\u0020\u0064\u0065\u0020\u0064\u006f\u006e\u006e\u00e9\u0065\u0073\u005c\u0075\u0030\u0030\u0041\u0030\u003a\u0020\u007b\u0030\u007d\u002e\u0020\u0047\u00e9\u0072\u0065\u0072\u0020\u006c\u0065\u0073\u0020\u0061\u0075\u0074\u006f\u0072\u0069\u0073\u0061\u0074\u0069\u006f\u006e\u0073\u0020\u00e0\u0020\u007b\u0031\u007d\u002e +notification.email.grantFileAccess=\u0041\u0063\u0063\u00e8\u0073\u0020\u0061\u0063\u0063\u006f\u0072\u0064\u00e9\u0020\u0061\u0075\u0078\u0020\u0066\u0069\u0063\u0068\u0069\u0065\u0072\u0073\u0020\u0064\u0065\u0020\u006c\u0027\u0065\u006e\u0073\u0065\u006d\u0062\u006c\u0065\u0020\u0064\u0065\u0020\u0064\u006f\u006e\u006e\u00e9\u0065\u0073\u005c\u0075\u0030\u0030\u0041\u0030\u003a\u0020\u007b\u0030\u007d\u0020\u0028\u0076\u006f\u0069\u0072\u0020\u007b\u0031\u007d\u0029\u002e +notification.email.rejectFileAccess=\u0041\u0063\u0063\u00e8\u0073\u0020\u0072\u0065\u0066\u0075\u0073\u00e9\u0020\u0061\u0075\u0078\u0020\u0066\u0069\u0063\u0068\u0069\u0065\u0072\u0073\u0020\u0064\u0065\u006d\u0061\u006e\u0064\u00e9\u0073\u0020\u0064\u0065\u0020\u006c\u0027\u0065\u006e\u0073\u0065\u006d\u0062\u006c\u0065\u0020\u0064\u0065\u0020\u0064\u006f\u006e\u006e\u00e9\u0065\u0073\u0020\u005c\u0075\u0030\u0030\u0041\u0030\u003a\u0020\u007b\u0030\u007d\u0020\u0028\u0076\u006f\u0069\u0072\u0020\u007b\u0031\u007d\u0029\u002e +# Bundle file editors, please note that "notification.email.createDataverse" is used in a unit test= +notification.email.createDataverse=\u0056\u006f\u0074\u0072\u0065\u0020\u006e\u006f\u0075\u0076\u0065\u0061\u0075\u0020\u0064\u0061\u0074\u0061\u0076\u0065\u0072\u0073\u0065\u0020\u0069\u006e\u0074\u0069\u0074\u0075\u006c\u00e9\u0020\u007b\u0030\u007d\u0020\u0028\u0076\u006f\u0069\u0072\u0020\u007b\u0031\u007d\u0029\u0020\u0061\u0020\u00e9\u0074\u00e9\u0020\u0063\u0072\u00e9\u00e9\u0020\u0064\u0061\u006e\u0073\u0020\u007b\u0032\u007d\u0020\u0028\u0076\u006f\u0069\u0072\u0020\u007b\u0033\u007d\u0029\u002e\u0020\u0050\u006f\u0075\u0072\u0020\u0065\u006e\u0020\u0073\u0061\u0076\u006f\u0069\u0072\u0020\u0070\u006c\u0075\u0073\u0020\u0073\u0075\u0072\u0020\u0063\u0065\u0020\u0071\u0075\u0065\u0020\u0076\u006f\u0075\u0073\u0020\u0070\u006f\u0075\u0076\u0065\u007a\u0020\u0066\u0061\u0069\u0072\u0065\u0020\u0061\u0076\u0065\u0063\u0020\u0076\u006f\u0074\u0072\u0065\u0020\u0064\u0061\u0074\u0061\u0076\u0065\u0072\u0073\u0065\u002c\u0020\u0063\u006f\u006e\u0073\u0075\u006c\u0074\u0065\u007a\u0020\u006c\u0065\u0020\u0047\u0075\u0069\u0064\u0065\u0020\u0064\u0065\u0020\u006c\u0027\u0075\u0074\u0069\u006c\u0069\u0073\u0061\u0074\u0065\u0075\u0072\u0020\u0044\u0061\u0074\u0061\u0076\u0065\u0072\u0073\u0065\u0020\u00e0\u0020\u006c\u0027\u0061\u0064\u0072\u0065\u0073\u0073\u0065\u0020\u0073\u0075\u0069\u0076\u0061\u006e\u0074\u0065\u005c\u0075\u0030\u0030\u0041\u0030\u003a\u0020\u007b\u0034\u007d\u002f\u007b\u0035\u007d\u002f\u0075\u0073\u0065\u0072\u002f\u0064\u0061\u0074\u0061\u0076\u0065\u0072\u0073\u0065\u002d\u006d\u0061\u006e\u0061\u0067\u0065\u006d\u0065\u006e\u0074\u002e\u0068\u0074\u006d\u006c\u0020\u002e +# Bundle file editors, please note that "notification.email.createDataset" is used in a unit test= +notification.email.createDataset=\u0056\u006f\u0074\u0072\u0065\u0020\u006e\u006f\u0075\u0076\u0065\u006c\u0020\u0065\u006e\u0073\u0065\u006d\u0062\u006c\u0065\u0020\u0064\u0065\u0020\u0064\u006f\u006e\u006e\u00e9\u0065\u0073\u0020\u0069\u006e\u0074\u0069\u0074\u0075\u006c\u00e9\u0020\u007b\u0030\u007d\u0020\u0028\u0076\u006f\u0069\u0072\u0020\u007b\u0031\u007d\u0029\u0020\u0061\u0020\u00e9\u0074\u00e9\u0020\u0063\u0072\u00e9\u00e9\u0020\u0064\u0061\u006e\u0073\u0020\u007b\u0032\u007d\u0020\u0028\u0076\u006f\u0069\u0072\u0020\u007b\u0033\u007d\u0029\u002e\u0020\u0050\u006f\u0075\u0072\u0020\u0065\u006e\u0020\u0073\u0061\u0076\u006f\u0069\u0072\u0020\u0070\u006c\u0075\u0073\u0020\u0073\u0075\u0072\u0020\u0063\u0065\u0020\u0071\u0075\u0065\u0020\u0076\u006f\u0075\u0073\u0020\u0070\u006f\u0075\u0076\u0065\u007a\u0020\u0066\u0061\u0069\u0072\u0065\u0020\u0061\u0076\u0065\u0063\u0020\u0075\u006e\u0020\u0065\u006e\u0073\u0065\u006d\u0062\u006c\u0065\u0020\u0064\u0065\u0020\u0064\u006f\u006e\u006e\u00e9\u0065\u0073\u002c\u0020\u0063\u006f\u006e\u0073\u0075\u006c\u0074\u0065\u007a\u0020\u006c\u0065\u0020\u0047\u0075\u0069\u0064\u0065\u0020\u0044\u0061\u0074\u0061\u0076\u0065\u0072\u0073\u0065\u0020\u0073\u0075\u0072\u0020\u006c\u0061\u0020\u0067\u0065\u0073\u0074\u0069\u006f\u006e\u0020\u0064\u0027\u0075\u006e\u0020\u0065\u006e\u0073\u0065\u006d\u0062\u006c\u0065\u0020\u0064\u0065\u0020\u0064\u006f\u006e\u006e\u00e9\u0065\u0073\u0020\u00e0\u0020\u006c\u0027\u0061\u0064\u0072\u0065\u0073\u0073\u0065\u0020\u0073\u0075\u0069\u0076\u0061\u006e\u0074\u0065\u005c\u0075\u0030\u0030\u0041\u0030\u003a\u0020\u007b\u0034\u007d\u002f\u007b\u0035\u007d\u002f\u0075\u0073\u0065\u0072\u002f\u0064\u0061\u0074\u0061\u0073\u0065\u0074\u002d\u006d\u0061\u006e\u0061\u0067\u0065\u006d\u0065\u006e\u0074\u002e\u0068\u0074\u006d\u006c\u0020\u002e +notification.email.wasSubmittedForReview=\u007b\u0030\u007d\u0020\u0028\u0076\u006f\u0069\u0072\u0020\u007b\u0031\u007d\u0029\u0020\u0061\u0020\u00e9\u0074\u00e9\u0020\u0073\u006f\u0075\u006d\u0069\u0073\u0020\u0061\u0075\u0078\u0020\u0066\u0069\u006e\u0073\u0020\u0064\u0027\u0065\u0078\u0061\u006d\u0065\u006e\u0020\u0065\u006e\u0020\u0076\u0075\u0065\u0020\u0064\u0065\u0020\u0073\u0061\u0020\u0070\u0075\u0062\u006c\u0069\u0063\u0061\u0074\u0069\u006f\u006e\u0020\u0064\u0061\u006e\u0073\u0020\u007b\u0032\u007d\u0020\u0020\u0028\u0076\u006f\u0069\u0072\u0020\u007b\u0033\u007d\u0029\u002e\u0020\u004e\u0027\u006f\u0075\u0062\u006c\u0069\u0065\u007a\u0020\u0070\u0061\u0073\u0020\u0064\u0065\u0020\u006c\u0065\u0020\u0070\u0075\u0062\u006c\u0069\u0065\u0072\u0020\u006f\u0075\u0020\u0064\u0065\u0020\u006c\u0065\u0020\u0072\u0065\u006e\u0076\u006f\u0079\u0065\u0072\u0020\u0061\u0075\u0020\u0063\u006f\u006c\u006c\u0061\u0062\u006f\u0072\u0061\u0074\u0065\u0075\u0072\u005c\u0021 +notification.email.wasReturnedByReviewer=\u007b\u0030\u007d\u0020\u0028\u0076\u006f\u0069\u0072\u0020\u007b\u0031\u007d\u0029\u0020\u0061\u0020\u00e9\u0074\u00e9\u0020\u0072\u0065\u0074\u006f\u0075\u0072\u006e\u00e9\u0020\u0070\u0061\u0072\u0020\u006c\u0027\u0069\u006e\u0074\u0065\u006e\u0064\u0061\u006e\u0074\u0020\u0064\u0065\u0073\u0020\u0064\u006f\u006e\u006e\u00e9\u0065\u0073\u0020\u0064\u0065\u0020\u007b\u0032\u007d\u0020\u0028\u0076\u006f\u0069\u0072\u0020\u007b\u0033\u007d\u0029\u002e +notification.email.wasPublished=\u007b\u0030\u007d\u0020\u0028\u0076\u006f\u0069\u0072\u0020\u007b\u0031\u007d\u0029\u0020\u0061\u0020\u00e9\u0074\u00e9\u0020\u0070\u0075\u0062\u006c\u0069\u00e9\u0020\u0064\u0061\u006e\u0073\u0020\u007b\u0032\u007d\u0020\u0028\u0076\u006f\u0069\u0072\u0020\u007b\u0033\u007d\u0029\u002e +notification.email.worldMap.added=\u004c\u0065\u0073\u0020\u0064\u006f\u006e\u006e\u00e9\u0065\u0073\u0020\u0064\u0027\u0075\u006e\u0065\u0020\u0063\u006f\u0075\u0063\u0068\u0065\u0020\u0057\u006f\u0072\u006c\u0064\u004d\u0061\u0070\u0020\u006f\u006e\u0074\u0020\u00e9\u0074\u00e9\u0020\u0061\u006a\u006f\u0075\u0074\u00e9\u0065\u0073\u0020\u00e0\u0020\u007b\u0030\u007d\u0020\u0028\u0076\u006f\u0069\u0072\u0020\u007b\u0031\u007d\u0029\u002e +notification.email.closing=\u005c\u006e\u005c\u006e\u004d\u0065\u0072\u0063\u0069\u002c\u005c\u006e\u0053\u0063\u0068\u006f\u006c\u0061\u0072\u0073\u0020\u0050\u006f\u0072\u0074\u0061\u006c\u0020\u0044\u0061\u0074\u0061\u0076\u0065\u0072\u0073\u0065 +notification.email.assignRole=\u0056\u006f\u0075\u0073\u0020\u00ea\u0074\u0065\u0073\u0020\u006d\u0061\u0069\u006e\u0074\u0065\u006e\u0061\u006e\u0074\u0020\u007b\u0030\u007d\u0020\u0070\u006f\u0075\u0072\u005c\u0075\u0030\u0030\u0041\u0030\u003a\u0020\u007b\u0031\u007d\u0020\u0022\u007b\u0032\u007d\u0022\u0020\u0028\u0076\u006f\u0069\u0072\u0020\u007b\u0033\u007d\u0029\u002e +notification.email.revokeRole=\u0055\u006e\u0020\u0064\u0065\u0020\u0076\u006f\u0073\u0020\u0072\u00f4\u006c\u0065\u0073\u0020\u0070\u006f\u0075\u0072\u0020\u007b\u0030\u007d\u0020\u0022\u007b\u0031\u007d\u0022\u0020\u0061\u0020\u00e9\u0074\u00e9\u0020\u0072\u00e9\u0076\u006f\u0071\u0075\u00e9\u0020\u0028\u0076\u006f\u0069\u0072\u0020\u007b\u0032\u007d\u0029\u002e +notification.email.changeEmail=\u0042\u006f\u006e\u006a\u006f\u0075\u0072\u002c\u0020\u007b\u0030\u007d\u002e\u007b\u0031\u007d\u005c\u006e\u005c\u006e\u0056\u0065\u0075\u0069\u006c\u006c\u0065\u007a\u0020\u006e\u006f\u0075\u0073\u0020\u0063\u006f\u006e\u0074\u0061\u0063\u0074\u0065\u0072\u0020\u0073\u0069\u0020\u0076\u006f\u0075\u0073\u0020\u006e\u0027\u0061\u0076\u0069\u0065\u007a\u0020\u0070\u0061\u0073\u0020\u006c\u0027\u0069\u006e\u0074\u0065\u006e\u0074\u0069\u006f\u006e\u0020\u0064\u0065\u0020\u0066\u0061\u0069\u0072\u0065\u0020\u0063\u0065\u0074\u0074\u0065\u0020\u006d\u006f\u0064\u0069\u0066\u0069\u0063\u0061\u0074\u0069\u006f\u006e\u0020\u006f\u0075\u0020\u0073\u0069\u0020\u0076\u006f\u0075\u0073\u0020\u0061\u0076\u0065\u007a\u0020\u0062\u0065\u0073\u006f\u0069\u006e\u0020\u0064\u0027\u0061\u0069\u0064\u0065\u002e +hours=\u0068\u0065\u0075\u0072\u0065\u0073 +hour=\u0068\u0065\u0075\u0072\u0065\u0073 +minutes=\u006d\u0069\u006e\u0075\u0074\u0065\u0073 +minute=\u006d\u0069\u006e\u0075\u0074\u0065 +notification.email.checksumfail.subject=\u007b\u0030\u007d\u005c\u0075\u0030\u0030\u0041\u0030\u003a\u0020\u0076\u006f\u0074\u0072\u0065\u0020\u0076\u0061\u006c\u0069\u0064\u0061\u0074\u0069\u006f\u006e\u0020\u0064\u0065\u0020\u0073\u006f\u006d\u006d\u0065\u0020\u0064\u0065\u0020\u0063\u006f\u006e\u0074\u0072\u00f4\u006c\u0065\u0020\u0061\u0020\u00e9\u0063\u0068\u006f\u0075\u00e9\u0065\u002e +notification.email.import.filesystem.subject=\u007b\u0030\u007d\u005c\u0075\u0030\u0030\u0041\u0030\u003a\u0020\u0056\u006f\u0074\u0072\u0065\u0020\u0074\u00e2\u0063\u0068\u0065\u0020\u0064\u0027\u0069\u006d\u0070\u006f\u0072\u0074\u0061\u0074\u0069\u006f\u006e\u0020\u0064\u0065\u0020\u0066\u0069\u0063\u0068\u0069\u0065\u0072\u0020\u0065\u0073\u0074\u0020\u0063\u006f\u006d\u0070\u006c\u00e9\u0074\u00e9\u0065\u002e +notification.email.import.checksum.subject=\u007b\u0030\u007d\u005c\u0075\u0030\u0030\u0041\u0030\u003a\u0020\u0056\u006f\u0074\u0072\u0065\u0020\u0074\u00e2\u0063\u0068\u0065\u0020\u0064\u0065\u0020\u0073\u006f\u006d\u006d\u0065\u0020\u0064\u0065\u0020\u0063\u006f\u006e\u0074\u0072\u00f4\u006c\u0065\u0020\u0064\u0065\u0020\u0066\u0069\u0063\u0068\u0069\u0065\u0072\u0020\u0065\u0073\u0074\u0020\u0063\u006f\u006d\u0070\u006c\u00e9\u0074\u00e9\u0065\u002e +# passwordreset.xhtml= +pageTitle.passwdReset.pre=\u0052\u00e9\u0069\u006e\u0069\u0074\u0069\u0061\u006c\u0069\u0073\u0061\u0074\u0069\u006f\u006e\u0020\u0064\u0075\u0020\u006d\u006f\u0074\u0020\u0064\u0065\u0020\u0070\u0061\u0073\u0073\u0065\u0020\u0064\u0075\u0020\u0063\u006f\u006d\u0070\u0074\u0065 +passwdReset.token=\u004a\u0065\u0074\u006f\u006e\u005c\u0075\u0030\u0030\u0041\u0030\u003a +passwdReset.userLookedUp=\u0055\u0074\u0069\u006c\u0069\u0073\u0061\u0074\u0065\u0075\u0072\u0020\u0072\u0065\u0063\u0068\u0065\u0072\u0063\u0068\u00e9\u005c\u0075\u0030\u0030\u0041\u0030\u003a +passwdReset.emailSubmitted=\u0043\u006f\u0075\u0072\u0072\u0069\u0065\u006c\u0020\u0073\u006f\u0075\u006d\u0069\u0073\u005c\u0075\u0030\u0030\u0041\u0030\u003a +passwdReset.details=\u007b\u0030\u007d\u0020\u0052\u00e9\u0069\u006e\u0069\u0074\u0069\u0061\u006c\u0069\u0073\u0061\u0074\u0069\u006f\u006e\u0020\u0064\u0075\u0020\u006d\u006f\u0074\u0020\u0064\u0065\u0020\u0070\u0061\u0073\u0073\u0065\u007b\u0031\u007d\u0020\u002d\u0020\u0050\u006f\u0075\u0072\u0020\u0064\u00e9\u0062\u0075\u0074\u0065\u0072\u0020\u006c\u0065\u0020\u0070\u0072\u006f\u0063\u0065\u0073\u0073\u0075\u0073\u0020\u0064\u0065\u0020\u0072\u00e9\u0069\u006e\u0069\u0074\u0069\u0061\u006c\u0069\u0073\u0061\u0074\u0069\u006f\u006e\u0020\u0064\u0075\u0020\u006d\u006f\u0074\u0020\u0064\u0065\u0020\u0070\u0061\u0073\u0073\u0065\u002c\u0020\u0076\u0065\u0075\u0069\u006c\u006c\u0065\u007a\u0020\u0069\u006e\u0064\u0069\u0071\u0075\u0065\u0072\u0020\u0076\u006f\u0074\u0072\u0065\u0020\u0061\u0064\u0072\u0065\u0073\u0073\u0065\u0020\u0063\u006f\u0075\u0072\u0072\u0069\u0065\u006c\u002e\u0020 +passwdReset.submitRequest=\u0053\u006f\u0075\u006d\u0065\u0074\u0074\u0072\u0065\u0020\u006c\u0061\u0020\u0064\u0065\u006d\u0061\u006e\u0064\u0065\u0020\u0064\u0065\u0020\u006d\u006f\u0074\u0020\u0064\u0065\u0020\u0070\u0061\u0073\u0073\u0065 +passwdReset.successSubmit.tip=\u0053\u0069\u0020\u0063\u0065\u0074\u0074\u0065\u0020\u0061\u0064\u0072\u0065\u0073\u0073\u0065\u0020\u0063\u006f\u0075\u0072\u0072\u0069\u0065\u006c\u0020\u0065\u0073\u0074\u0020\u0061\u0073\u0073\u006f\u0063\u0069\u00e9\u0065\u0020\u00e0\u0020\u0075\u006e\u0020\u0063\u006f\u006d\u0070\u0074\u0065\u002c\u0020\u0075\u006e\u0020\u0063\u006f\u0075\u0072\u0072\u0069\u0065\u006c\u0020\u0073\u0065\u0072\u0061\u0020\u0065\u006e\u0076\u006f\u0079\u00e9\u0020\u0061\u0076\u0065\u0063\u0020\u0064\u0065\u0073\u0020\u0069\u006e\u0073\u0074\u0072\u0075\u0063\u0074\u0069\u006f\u006e\u0073\u0020\u0073\u0075\u0070\u0070\u006c\u00e9\u006d\u0065\u006e\u0074\u0061\u0069\u0072\u0065\u0073\u0020\u00e0\u0020\u007b\u0030\u007d\u002e +passwdReset.debug=\u0044\u00c9\u0042\u004f\u0047\u0055\u0045\u0052 +passwdReset.resetUrl=\u004c\u0027\u0061\u0064\u0072\u0065\u0073\u0073\u0065\u0020\u0055\u0052\u004c\u0020\u0072\u00e9\u0069\u006e\u0069\u0074\u0069\u0061\u006c\u0069\u0073\u00e9\u0020\u0065\u0073\u0074 +passwdReset.noEmail.tip=\u0041\u0075\u0063\u0075\u006e\u0020\u0063\u006f\u0075\u0072\u0072\u0069\u0065\u006c\u0020\u006e\u0027\u0061\u0020\u00e9\u0074\u00e9\u0020\u0065\u006e\u0076\u006f\u0079\u00e9\u0020\u00e9\u0074\u0061\u006e\u0074\u0020\u0064\u006f\u006e\u006e\u00e9\u0020\u0071\u0075\u0027\u0061\u0075\u0063\u0075\u006e\u0020\u0075\u0074\u0069\u006c\u0069\u0073\u0061\u0074\u0065\u0075\u0072\u0020\u006e\u0027\u0061\u0020\u00e9\u0074\u00e9\u0020\u0074\u0072\u006f\u0075\u0076\u00e9\u0020\u0061\u0075\u0020\u006d\u006f\u0079\u0065\u006e\u0020\u0064\u0065\u0020\u006c\u0027\u0061\u0064\u0072\u0065\u0073\u0073\u0065\u0020\u0066\u006f\u0075\u0072\u006e\u0069\u0065\u0020\u007b\u0030\u007d\u002e\u0020\u004e\u006f\u0075\u0073\u0020\u006e\u0065\u0020\u006c\u0065\u0020\u006d\u0065\u006e\u0074\u0069\u006f\u006e\u006e\u006f\u006e\u0073\u0020\u0070\u0061\u0073\u002c\u0020\u0063\u0061\u0072\u0020\u006e\u006f\u0075\u0073\u0020\u0076\u006f\u0075\u006c\u006f\u006e\u0073\u0020\u00e9\u0076\u0069\u0074\u0065\u0072\u0020\u0071\u0075\u0065\u0020\u0064\u0065\u0073\u0020\u0075\u0074\u0069\u006c\u0069\u0073\u0061\u0074\u0065\u0075\u0072\u0073\u0020\u006d\u0061\u006c\u0076\u0065\u0069\u006c\u006c\u0061\u006e\u0074\u0073\u0020\u0073\u0065\u0020\u0073\u0065\u0072\u0076\u0065\u006e\u0074\u0020\u0064\u0075\u0020\u0066\u006f\u0072\u006d\u0075\u006c\u0061\u0069\u0072\u0065\u0020\u0070\u006f\u0075\u0072\u0020\u0064\u00e9\u0074\u0065\u0072\u006d\u0069\u006e\u0065\u0072\u0020\u0073\u0069\u0020\u0075\u006e\u0020\u0063\u006f\u006d\u0070\u0074\u0065\u0020\u0065\u0073\u0074\u0020\u0061\u0073\u0073\u006f\u0063\u0069\u00e9\u0020\u00e0\u0020\u0075\u006e\u0065\u0020\u0061\u0064\u0072\u0065\u0073\u0073\u0065\u0020\u0063\u006f\u0075\u0072\u0072\u0069\u0065\u006c\u002e\u0020 +passwdReset.illegalLink.tip=\u004c\u0065\u0020\u006c\u0069\u0065\u006e\u0020\u0070\u006f\u0075\u0072\u0020\u0072\u00e9\u0069\u006e\u0069\u0074\u0069\u0061\u006c\u0069\u0073\u0065\u0072\u0020\u0076\u006f\u0074\u0072\u0065\u0020\u006d\u006f\u0074\u0020\u0064\u0065\u0020\u0070\u0061\u0073\u0073\u0065\u0020\u006e\u0027\u0065\u0073\u0074\u0020\u0070\u0061\u0073\u0020\u0076\u0061\u006c\u0069\u0064\u0065\u002e\u0020\u0053\u0069\u0020\u0076\u006f\u0075\u0073\u0020\u0064\u0065\u0076\u0065\u007a\u0020\u0072\u00e9\u0069\u006e\u0069\u0074\u0069\u0061\u006c\u0069\u0073\u0065\u0072\u0020\u0076\u006f\u0074\u0072\u0065\u0020\u006d\u006f\u0074\u0020\u0064\u0065\u0020\u0070\u0061\u0073\u0073\u0065\u002c\u0020\u007b\u0030\u007d\u0063\u006c\u0069\u0071\u0075\u0065\u007a\u0020\u0069\u0063\u0069\u007b\u0031\u007d\u0020\u0070\u006f\u0075\u0072\u0020\u0064\u0065\u006d\u0061\u006e\u0064\u0065\u0072\u0020\u00e0\u0020\u0063\u0065\u0020\u0071\u0075\u0065\u0020\u0076\u006f\u0074\u0072\u0065\u0020\u006d\u006f\u0074\u0020\u0064\u0065\u0020\u0070\u0061\u0073\u0073\u0065\u0020\u0073\u006f\u0069\u0074\u0020\u0072\u00e9\u0069\u006e\u0069\u0074\u0069\u0061\u006c\u0069\u0073\u00e9\u002e +passwdReset.newPasswd.details=\u007b\u0030\u007d\u0020\u004e\u006f\u0075\u0076\u0065\u0061\u0075\u0020\u006d\u006f\u0074\u0020\u0064\u0065\u0020\u0070\u0061\u0073\u0073\u0065\u007b\u0031\u007d\u0020\u005c\u0075\u0032\u0030\u0031\u0033\u0020\u0056\u0065\u0075\u0069\u006c\u006c\u0065\u007a\u0020\u0063\u0068\u006f\u0069\u0073\u0069\u0072\u0020\u0075\u006e\u0020\u006d\u006f\u0074\u0020\u0064\u0065\u0020\u0070\u0061\u0073\u0073\u0065\u0020\u0073\u006f\u006c\u0069\u0064\u0065\u0020\u0063\u006f\u006d\u0070\u0074\u0061\u006e\u0074\u0020\u0061\u0075\u0020\u006d\u006f\u0069\u006e\u0073\u0020\u0073\u0069\u0078\u0020\u0063\u0061\u0072\u0061\u0063\u0074\u00e8\u0072\u0065\u0073\u0020\u0065\u0074\u0020\u0061\u0075\u0020\u006d\u006f\u0069\u006e\u0073\u0020\u0075\u006e\u0065\u0020\u006c\u0065\u0074\u0074\u0072\u0065\u0020\u0065\u0074\u0020\u0075\u006e\u0020\u0063\u0068\u0069\u0066\u0066\u0072\u0065\u002e\u0020 +passwdReset.newPasswd=\u004e\u006f\u0075\u0076\u0065\u0061\u0075\u0020\u006d\u006f\u0074\u0020\u0064\u0065\u0020\u0070\u0061\u0073\u0073\u0065 +passwdReset.rePasswd=\u0043\u006f\u006e\u0066\u0069\u0072\u006d\u0065\u0072\u0020\u006c\u0065\u0020\u006d\u006f\u0074\u0020\u0064\u0065\u0020\u0070\u0061\u0073\u0073\u0065 +passwdReset.resetBtn=\u0052\u00e9\u0069\u006e\u0069\u0074\u0069\u0061\u006c\u0069\u0073\u0065\u0072\u0020\u006c\u0065\u0020\u006d\u006f\u0074\u0020\u0064\u0065\u0020\u0070\u0061\u0073\u0073\u0065\u0020\u0020 +# dataverse.xhtml= +dataverse.title=\u004c\u0065\u0020\u0070\u0072\u006f\u006a\u0065\u0074\u002c\u0020\u006c\u0065\u0020\u0064\u00e9\u0070\u0061\u0072\u0074\u0065\u006d\u0065\u006e\u0074\u002c\u0020\u006c\u0027\u0075\u006e\u0069\u0076\u0065\u0072\u0073\u0069\u0074\u00e9\u0020\u006f\u0075\u0020\u006c\u0065\u0020\u0063\u0068\u0065\u0072\u0063\u0068\u0065\u0075\u0072\u0020\u0076\u0069\u0073\u00e9\u0020\u0070\u0061\u0072\u0020\u006c\u0065\u0073\u0020\u0064\u006f\u006e\u006e\u00e9\u0065\u0073\u0020\u0063\u006f\u006e\u0074\u0065\u006e\u0075\u0065\u0073\u0020\u0064\u0061\u006e\u0073\u0020\u006c\u0065\u0020\u0064\u0061\u0074\u0061\u0076\u0065\u0072\u0073\u0065\u002e +dataverse.enterName=\u0045\u006e\u0074\u0072\u0065\u0072\u0020\u006c\u0065\u0020\u006e\u006f\u006d\u005c\u0075\u0032\u0030\u0032\u0036 +dataverse.host.title=\u004c\u0065\u0020\u0064\u0061\u0074\u0061\u0076\u0065\u0072\u0073\u0065\u0020\u0071\u0075\u0069\u0020\u0063\u006f\u006e\u0074\u0069\u0065\u006e\u0074\u0020\u0063\u0065\u0073\u0020\u0064\u006f\u006e\u006e\u00e9\u0065\u0073\u002e +dataverse.identifier.title=\u004e\u006f\u006d\u0020\u0061\u0062\u0072\u00e9\u0067\u00e9\u0020\u0075\u0074\u0069\u006c\u0069\u0073\u00e9\u0020\u0070\u006f\u0075\u0072\u0020\u006c\u0027\u0061\u0064\u0072\u0065\u0073\u0073\u0065\u0020\u0055\u0052\u004c\u0020\u0064\u0065\u0020\u0063\u0065\u0020\u0064\u0061\u0074\u0061\u0076\u0065\u0072\u0073\u0065\u002e +dataverse.affiliation.title=\u004c\u0027\u006f\u0072\u0067\u0061\u006e\u0069\u0073\u0061\u0074\u0069\u006f\u006e\u0020\u0061\u0076\u0065\u0063\u0020\u006c\u0061\u0071\u0075\u0065\u006c\u006c\u0065\u0020\u0063\u0065\u0020\u0064\u0061\u0074\u0061\u0076\u0065\u0072\u0073\u0065\u0020\u0065\u0073\u0074\u0020\u0061\u0066\u0066\u0069\u006c\u0069\u00e9\u002e +dataverse.category=\u0043\u0061\u0074\u00e9\u0067\u006f\u0072\u0069\u0065 +dataverse.category.title=\u004c\u0065\u0020\u0074\u0079\u0070\u0065\u0020\u0063\u006f\u0072\u0072\u0065\u0073\u0070\u006f\u006e\u0064\u0061\u006e\u0074\u0020\u006c\u0065\u0020\u006d\u0069\u0065\u0075\u0078\u0020\u00e0\u0020\u0063\u0065\u0020\u0064\u0061\u0074\u0061\u0076\u0065\u0072\u0073\u0065\u002e +dataverse.type.selectTab.top=\u0053\u00e9\u006c\u0065\u0063\u0074\u0069\u006f\u006e\u006e\u0065\u0072\u005c\u0075\u0032\u0030\u0032\u0036 +dataverse.type.selectTab.researchers=\u0043\u0068\u0065\u0072\u0063\u0068\u0065\u0075\u0072 +dataverse.type.selectTab.researchProjects=\u0050\u0072\u006f\u006a\u0065\u0074\u0020\u0064\u0065\u0020\u0072\u0065\u0063\u0068\u0065\u0072\u0063\u0068\u0065 +dataverse.type.selectTab.journals=\u0052\u0065\u0076\u0075\u0065 +dataverse.type.selectTab.organizationsAndInsitutions=\u004f\u0072\u0067\u0061\u006e\u0069\u0073\u0061\u0074\u0069\u006f\u006e\u0020\u006f\u0075\u0020\u00e9\u0074\u0061\u0062\u006c\u0069\u0073\u0073\u0065\u006d\u0065\u006e\u0074 +dataverse.type.selectTab.teachingCourses=\u0043\u006f\u0075\u0072\u0073 +dataverse.type.selectTab.uncategorized=\u0053\u0061\u006e\u0073\u0020\u0063\u0061\u0074\u00e9\u0067\u006f\u0072\u0069\u0065 +dataverse.type.selectTab.researchGroup=\u0047\u0072\u006f\u0075\u0070\u0065\u0020\u0064\u0065\u0020\u0072\u0065\u0063\u0068\u0065\u0072\u0063\u0068\u0065 +dataverse.type.selectTab.laboratory=\u004c\u0061\u0062\u006f\u0072\u0061\u0074\u006f\u0069\u0072\u0065 +dataverse.type.selectTab.department=\u0044\u00e9\u0070\u0061\u0072\u0074\u0065\u006d\u0065\u006e\u0074 +dataverse.description.title=\u0055\u006e\u0020\u0072\u00e9\u0073\u0075\u006d\u00e9\u0020\u0064\u00e9\u0063\u0072\u0069\u0076\u0061\u006e\u0074\u0020\u006c\u0027\u006f\u0062\u006a\u0065\u0074\u002c\u0020\u006c\u0061\u0020\u006e\u0061\u0074\u0075\u0072\u0065\u0020\u006f\u0075\u0020\u006c\u0061\u0020\u0070\u006f\u0072\u0074\u00e9\u0065\u0020\u0064\u0065\u0020\u0063\u0065\u0020\u0064\u0061\u0074\u0061\u0076\u0065\u0072\u0073\u0065\u002e +dataverse.email=\u0043\u006f\u0075\u0072\u0072\u0069\u0065\u006c +dataverse.email.title=\u004c\u0027\u0061\u0064\u0072\u0065\u0073\u0073\u0065\u0020\u0063\u006f\u0075\u0072\u0072\u0069\u0065\u006c\u0020\u0064\u0075\u0020\u006f\u0075\u0020\u0064\u0065\u0073\u0020\u0070\u0065\u0072\u0073\u006f\u006e\u006e\u0065\u0073\u002d\u0072\u0065\u0073\u0073\u006f\u0075\u0072\u0063\u0065\u0073\u0020\u0070\u006f\u0075\u0072\u0020\u0063\u0065\u0020\u0064\u0061\u0074\u0061\u0076\u0065\u0072\u0073\u0065\u002e +dataverse.share.dataverseShare=\u0050\u0061\u0072\u0074\u0061\u0067\u0065\u0072\u0020\u006c\u0065\u0020\u0064\u0061\u0074\u0061\u0076\u0065\u0072\u0073\u0065 +dataverse.share.dataverseShare.tip=\u0050\u0061\u0072\u0074\u0061\u0067\u0065\u0072\u0020\u0063\u0065\u0020\u0064\u0061\u0074\u0061\u0076\u0065\u0072\u0073\u0065\u0020\u0073\u0075\u0072\u0020\u0076\u006f\u0073\u0020\u0020\u006d\u00e9\u0064\u0069\u0061\u0073\u0020\u0073\u006f\u0063\u0069\u0061\u0075\u0078\u0020\u0070\u0072\u00e9\u0066\u00e9\u0072\u00e9\u0073\u002e +dataverse.share.dataverseShare.shareText=\u0043\u006f\u006e\u0073\u0075\u006c\u0074\u0065\u0072\u0020\u0063\u0065\u0020\u0064\u0061\u0074\u0061\u0076\u0065\u0072\u0073\u0065 +dataverse.subject.title=\u0053\u0075\u006a\u0065\u0074\u0028\u0073\u0029\u0020\u0074\u0072\u0061\u0069\u0074\u00e9\u0073\u0028\u0073\u0029\u0020\u0064\u0061\u006e\u0073\u0020\u0063\u0065\u0020\u0064\u0061\u0074\u0061\u0076\u0065\u0072\u0073\u0065\u002e +dataverse.metadataElements=\u0043\u0068\u0061\u006d\u0070\u0073\u0020\u0064\u0065\u0020\u006d\u00e9\u0074\u0061\u0064\u006f\u006e\u006e\u00e9\u0065\u0073 +dataverse.metadataElements.tip=\u0053\u00e9\u006c\u0065\u0063\u0074\u0069\u006f\u006e\u006e\u0065\u0072\u0020\u006c\u0065\u0073\u0020\u0063\u0068\u0061\u006d\u0070\u0073\u0020\u0064\u0065\u0020\u006d\u00e9\u0074\u0061\u0064\u006f\u006e\u006e\u00e9\u0065\u0073\u0020\u00e0\u0020\u0075\u0074\u0069\u006c\u0069\u0073\u0065\u0072\u0020\u0064\u0061\u006e\u0073\u0020\u006c\u0065\u0073\u0020\u006d\u006f\u0064\u00e8\u006c\u0065\u0073\u0020\u0064\u0027\u0065\u006e\u0073\u0065\u006d\u0062\u006c\u0065\u0073\u0020\u0064\u0065\u0020\u0064\u006f\u006e\u006e\u00e9\u0065\u0073\u0020\u0061\u0069\u006e\u0073\u0069\u0020\u0071\u0075\u0027\u0061\u0075\u0020\u006d\u006f\u006d\u0065\u006e\u0074\u0020\u0064\u0027\u0061\u006a\u006f\u0075\u0074\u0065\u0072\u0020\u0075\u006e\u0020\u0065\u006e\u0073\u0065\u006d\u0062\u006c\u0065\u0020\u0064\u0065\u0020\u0064\u006f\u006e\u006e\u00e9\u0065\u0073\u0020\u00e0\u0020\u0063\u0065\u0020\u0064\u0061\u0074\u0061\u0076\u0065\u0072\u0073\u0065\u002e +dataverse.metadataElements.from.tip=\u0055\u0074\u0069\u006c\u0069\u0073\u0065\u0072\u0020\u006c\u0065\u0073\u0020\u0063\u0068\u0061\u006d\u0070\u0073\u0020\u0064\u0065\u0020\u006d\u00e9\u0074\u0061\u0064\u006f\u006e\u006e\u00e9\u0065\u0073\u0020\u0064\u0065\u0020\u007b\u0030\u007d +dataverse.resetModifications=\u0052\u00e9\u0069\u006e\u0069\u0074\u0069\u0061\u006c\u0069\u0073\u0065\u0072\u0020\u006c\u0065\u0073\u0020\u006d\u006f\u0064\u0069\u0066\u0069\u0063\u0061\u0074\u0069\u006f\u006e\u0073 +dataverse.resetModifications.text=\u00ca\u0074\u0065\u0073\u002d\u0076\u006f\u0075\u0073\u0020\u0073\u00fb\u0072\u0028\u0065\u0029\u0020\u0064\u0065\u0020\u0076\u006f\u0075\u006c\u006f\u0069\u0072\u0020\u0072\u00e9\u0069\u006e\u0069\u0074\u0069\u0061\u006c\u0069\u0073\u0065\u0072\u0020\u006c\u0065\u0073\u0020\u0063\u0068\u0061\u006d\u0070\u0073\u0020\u0064\u0065\u0020\u006d\u00e9\u0074\u0061\u0064\u006f\u006e\u006e\u00e9\u0065\u0073\u0020\u0073\u00e9\u006c\u0065\u0063\u0074\u0069\u006f\u006e\u006e\u00e9\u0073\u003f\u0020\u0053\u0069\u0020\u0076\u006f\u0075\u0073\u0020\u006c\u0065\u0073\u0020\u0072\u00e9\u0069\u006e\u0069\u0074\u0069\u0061\u006c\u0069\u0073\u0065\u007a\u002c\u0020\u0074\u006f\u0075\u0074\u0065\u0020\u0070\u0065\u0072\u0073\u006f\u006e\u006e\u0061\u006c\u0069\u0073\u0061\u0074\u0069\u006f\u006e\u0020\u0065\u0066\u0066\u0065\u0063\u0074\u0075\u00e9\u0065\u0020\u0028\u0069\u006e\u0066\u006f\u0072\u006d\u0061\u0074\u0069\u006f\u006e\u0020\u0063\u0061\u0063\u0068\u00e9\u0065\u002c\u0020\u006f\u0062\u006c\u0069\u0067\u0061\u0074\u006f\u0069\u0072\u0065\u002c\u0020\u0066\u0061\u0063\u0075\u006c\u0074\u0061\u0074\u0069\u0076\u0065\u0029\u0020\u0073\u0065\u0072\u0061\u0020\u0061\u006e\u006e\u0075\u006c\u00e9\u0065\u002e +dataverse.field.required=\u0028\u004f\u0062\u006c\u0069\u0067\u0061\u0074\u006f\u0069\u0072\u0065\u0029 +dataverse.field.example1=\u0028\u0045\u0078\u0065\u006d\u0070\u006c\u0065\u0073\u005c\u0075\u0030\u0030\u0041\u0030\u003a +dataverse.field.example2=\u0029 +dataverse.field.set.tip=\u005b\u002b\u005d\u0020\u0056\u006f\u0069\u0072\u0020\u006c\u0065\u0073\u0020\u0063\u0068\u0061\u006d\u0070\u0073\u0020\u002b\u0020\u006c\u0065\u0073\u0020\u0064\u00e9\u0066\u0069\u006e\u0069\u0072\u0020\u0063\u006f\u006d\u006d\u0065\u0020\u0063\u0068\u0061\u006d\u0070\u0073\u0020\u0063\u0061\u0063\u0068\u00e9\u0073\u002c\u0020\u006f\u0062\u006c\u0069\u0067\u0061\u0074\u006f\u0069\u0072\u0065\u0073\u0020\u006f\u0075\u0020\u0066\u0061\u0063\u0075\u006c\u0074\u0061\u0074\u0069\u0066\u0073 +dataverse.field.set.view=\u005b\u002b\u005d\u0020\u0056\u006f\u0069\u0072\u0020\u006c\u0065\u0073\u0020\u0063\u0068\u0061\u006d\u0070\u0073 +dataverse.field.requiredByDataverse=\u0052\u0065\u0071\u0075\u0069\u0073\u0020\u0070\u0061\u0072\u0020\u0044\u0061\u0074\u0061\u0076\u0065\u0072\u0073\u0065 +dataverse.facetPickList.text=\u0050\u0061\u0072\u0063\u006f\u0075\u0072\u0069\u0072\u002f\u0052\u0065\u0063\u0068\u0065\u0072\u0063\u0068\u0065\u0020\u0070\u0061\u0072\u0020\u0066\u0061\u0063\u0065\u0074\u0074\u0065\u0073 +dataverse.facetPickList.tip=\u0043\u0068\u006f\u0069\u0073\u0069\u0072\u0020\u006c\u0065\u0073\u0020\u0063\u0068\u0061\u006d\u0070\u0073\u0020\u0064\u0065\u0020\u006d\u00e9\u0074\u0061\u0064\u006f\u006e\u006e\u00e9\u0065\u0073\u0020\u00e0\u0020\u0075\u0074\u0069\u006c\u0069\u0073\u0065\u0072\u0020\u0063\u006f\u006d\u006d\u0065\u0020\u0066\u0061\u0063\u0065\u0074\u0074\u0065\u0073\u0020\u0070\u006f\u0075\u0072\u0020\u0063\u006f\u006e\u0073\u0075\u006c\u0074\u0065\u0072\u0020\u006c\u0065\u0073\u0020\u0065\u006e\u0073\u0065\u006d\u0062\u006c\u0065\u0073\u0020\u0064\u0065\u0020\u0064\u006f\u006e\u006e\u00e9\u0065\u0073\u0020\u0061\u0069\u006e\u0073\u0069\u0020\u0071\u0075\u0065\u0020\u006c\u0065\u0073\u0020\u0064\u0061\u0074\u0061\u0076\u0065\u0072\u0073\u0065\u0073\u0020\u0063\u006f\u006e\u0074\u0065\u006e\u0075\u0073\u0020\u0064\u0061\u006e\u0073\u0020\u0063\u0065\u0020\u0064\u0061\u0074\u0061\u0076\u0065\u0072\u0073\u0065\u002e +dataverse.facetPickList.facetsFromHost.text=\u0055\u0074\u0069\u006c\u0069\u0073\u0065\u0072\u0020\u006c\u0061\u0020\u0066\u006f\u006e\u0063\u0074\u0069\u006f\u006e\u0020\u0050\u0061\u0072\u0063\u006f\u0075\u0072\u0069\u0072\u002f\u0052\u0065\u0063\u0068\u0065\u0072\u0063\u0068\u0065\u0020\u0070\u0061\u0072\u0020\u0066\u0061\u0063\u0065\u0074\u0074\u0065\u0073\u0020\u0064\u0065\u0020\u007b\u0030\u007d +dataverse.facetPickList.metadataBlockList.all=\u0054\u006f\u0075\u0073\u0020\u006c\u0065\u0073\u0020\u0063\u0068\u0061\u006d\u0070\u0073\u0020\u0064\u0065\u0020\u006d\u00e9\u0074\u0061\u0064\u006f\u006e\u006e\u00e9\u0065\u0073 +dataverse.edit=\u004d\u006f\u0064\u0069\u0066\u0069\u0065\u0072 +dataverse.option.generalInfo=\u0052\u0065\u006e\u0073\u0065\u0069\u0067\u006e\u0065\u006d\u0065\u006e\u0074\u0073\u0020\u0067\u00e9\u006e\u00e9\u0072\u0061\u0075\u0078 +dataverse.option.themeAndWidgets=\u0054\u0068\u00e8\u006d\u0065\u0020\u002b\u0020\u0077\u0069\u0064\u0067\u0065\u0074\u0073 +dataverse.option.featuredDataverse=\u0044\u0061\u0074\u0061\u0076\u0065\u0072\u0073\u0065\u0073\u0020\u0065\u006e\u0020\u0076\u0065\u0064\u0065\u0074\u0074\u0065 +dataverse.option.permissions=\u0050\u0065\u0072\u006d\u0069\u0073\u0073\u0069\u006f\u006e\u0073 +dataverse.option.dataverseGroups=\u0047\u0072\u006f\u0075\u0070\u0065\u0073 +dataverse.option.datasetTemplates=\u004d\u006f\u0064\u00e8\u006c\u0065\u0073\u0020\u0064\u0027\u0065\u006e\u0073\u0065\u006d\u0062\u006c\u0065\u0073\u0020\u0064\u0065\u0020\u0064\u006f\u006e\u006e\u00e9\u0065\u0073 +dataverse.option.datasetGuestbooks=\u0052\u0065\u0067\u0069\u0073\u0074\u0072\u0065\u0020\u0064\u0065\u0073\u0020\u0076\u0069\u0073\u0069\u0074\u0065\u0075\u0072\u0073\u0020\u0070\u006f\u0075\u0072\u0020\u006c\u0027\u0065\u006e\u0073\u0065\u006d\u0062\u006c\u0065\u0020\u0064\u0065\u0020\u0064\u006f\u006e\u006e\u00e9\u0065\u0073 +dataverse.option.deleteDataverse=\u0053\u0075\u0070\u0070\u0072\u0069\u006d\u0065\u0072\u0020\u006c\u0065\u0020\u0064\u0061\u0074\u0061\u0076\u0065\u0072\u0073\u0065 +dataverse.publish.btn=\u0050\u0075\u0062\u006c\u0069\u0065\u0072 +dataverse.publish.header=\u0050\u0075\u0062\u006c\u0069\u0065\u0072\u0020\u006c\u0065\u0020\u0064\u0061\u0074\u0061\u0076\u0065\u0072\u0073\u0065 +dataverse.nopublished=\u0041\u0075\u0063\u0075\u006e\u0020\u0064\u0061\u0074\u0061\u0076\u0065\u0072\u0073\u0065\u0020\u0070\u0075\u0062\u006c\u0069\u00e9 +dataverse.nopublished.tip=\u0056\u006f\u0075\u0073\u0020\u0064\u0065\u0076\u0065\u007a\u0020\u0061\u0076\u006f\u0069\u0072\u0020\u0061\u0075\u0020\u006d\u006f\u0069\u006e\u0073\u0020\u0075\u006e\u0020\u0064\u0061\u0074\u0061\u0076\u0065\u0072\u0073\u0065\u0020\u0070\u0075\u0062\u006c\u0069\u00e9\u0020\u0070\u006f\u0075\u0072\u0020\u0075\u0074\u0069\u006c\u0069\u0073\u0065\u0072\u0020\u0063\u0065\u0074\u0074\u0065\u0020\u0066\u006f\u006e\u0063\u0074\u0069\u006f\u006e\u002e +dataverse.contact=\u0043\u006f\u006d\u006d\u0075\u006e\u0069\u0071\u0075\u0065\u0072\u0020\u0070\u0061\u0072\u0020\u0063\u006f\u0075\u0072\u0072\u0069\u0065\u006c\u0020\u0061\u0076\u0065\u0063\u0020\u0044\u0061\u0074\u0061\u0076\u0065\u0072\u0073\u0065 +dataset.link=\u004c\u0069\u0065\u0072\u0020\u006c\u0027\u0065\u006e\u0073\u0065\u006d\u0062\u006c\u0065\u0020\u0064\u0065\u0020\u0064\u006f\u006e\u006e\u00e9\u0065\u0073 +dataverse.link=\u004c\u0069\u0065\u0072\u0020\u006c\u0065\u0020\u0064\u0061\u0074\u0061\u0076\u0065\u0072\u0073\u0065 +dataverse.link.btn.tip=\u004c\u0069\u0065\u0072\u0020\u00e0\u0020\u0076\u006f\u0074\u0072\u0065\u0020\u0064\u0061\u0074\u0061\u0076\u0065\u0072\u0073\u0065 +dataverse.link.yourDataverses=\u0056\u006f\u0074\u0072\u0065\u0020\u007b\u0030\u002c\u0020\u0063\u0068\u006f\u0069\u0063\u0065\u002c\u0020\u0031\u0023\u0044\u0061\u0074\u0061\u0076\u0065\u0072\u0073\u0065\u007c\u0032\u0023\u0044\u0061\u0074\u0061\u0076\u0065\u0072\u0073\u0065\u0073\u007d +dataverse.link.save=\u0045\u006e\u0072\u0065\u0067\u0069\u0073\u0074\u0072\u0065\u0072\u0020\u006c\u0065\u0020\u0064\u0061\u0074\u0061\u0076\u0065\u0072\u0073\u0065\u0020\u006c\u0069\u00e9 +dataset.link.save=\u0045\u006e\u0072\u0065\u0067\u0069\u0073\u0074\u0072\u0065\u0072\u0020\u006c\u0027\u0065\u006e\u0073\u0065\u006d\u0062\u006c\u0065\u0020\u0064\u0065\u0020\u0064\u006f\u006e\u006e\u00e9\u0065\u0073\u0020\u006c\u0069\u00e9 +dataverse.link.dataverse.choose=\u0044\u00e9\u0074\u0065\u0072\u006d\u0069\u006e\u0065\u007a\u0020\u0061\u0076\u0065\u0063\u0020\u006c\u0065\u0071\u0075\u0065\u006c\u0020\u0064\u0065\u0020\u0076\u006f\u0073\u0020\u0064\u0061\u0074\u0061\u0076\u0065\u0072\u0073\u0065\u0073\u0020\u0076\u006f\u0075\u0073\u0020\u0073\u006f\u0075\u0068\u0061\u0069\u0074\u0065\u007a\u0020\u006c\u0069\u0065\u0072\u0020\u0063\u0065\u0020\u0064\u0061\u0074\u0061\u0076\u0065\u0072\u0073\u0065\u002e +dataverse.link.dataset.choose=\u0044\u00e9\u0074\u0065\u0072\u006d\u0069\u006e\u0065\u007a\u0020\u0061\u0076\u0065\u0063\u0020\u006c\u0065\u0071\u0075\u0065\u006c\u0020\u0064\u0065\u0020\u0076\u006f\u0073\u0020\u0064\u0061\u0074\u0061\u0076\u0065\u0072\u0073\u0065\u0073\u0020\u0076\u006f\u0075\u0073\u0020\u0073\u006f\u0075\u0068\u0061\u0069\u0074\u0065\u007a\u0020\u006c\u0069\u0065\u0072\u0020\u0063\u0065\u0074\u0020\u0065\u006e\u0073\u0065\u006d\u0062\u006c\u0065\u0020\u0064\u0065\u0020\u0064\u006f\u006e\u006e\u00e9\u0065\u0073\u002e +dataverse.link.no.choice=\u0056\u006f\u0075\u0073\u0020\u0061\u0076\u0065\u007a\u0020\u0075\u006e\u0020\u0064\u0061\u0074\u0061\u0076\u0065\u0072\u0073\u0065\u0020\u0064\u0061\u006e\u0073\u0020\u006c\u0065\u0071\u0075\u0065\u006c\u0020\u0076\u006f\u0075\u0073\u0020\u0070\u006f\u0075\u0076\u0065\u007a\u0020\u0061\u006a\u006f\u0075\u0074\u0065\u0072\u0020\u0064\u0065\u0073\u0020\u0065\u006e\u0073\u0065\u006d\u0062\u006c\u0065\u0073\u0020\u0064\u0065\u0020\u0064\u006f\u006e\u006e\u00e9\u0065\u0073\u0020\u0065\u0074\u0020\u0064\u0065\u0073\u0020\u0064\u0061\u0074\u0061\u0076\u0065\u0072\u0073\u0065\u0073\u0020\u006c\u0069\u00e9\u0073\u002e\u0020 +dataverse.link.no.linkable=\u0056\u006f\u0075\u0073\u0020\u0064\u0065\u0076\u0065\u007a\u0020\u0070\u006f\u0073\u0073\u00e9\u0064\u0065\u0072\u0020\u0076\u006f\u0074\u0072\u0065\u0020\u0070\u0072\u006f\u0070\u0072\u0065\u0020\u0064\u0061\u0074\u0061\u0076\u0065\u0072\u0073\u0065\u0020\u0070\u006f\u0075\u0072\u0020\u0070\u006f\u0075\u0076\u006f\u0069\u0072\u0020\u006c\u0069\u0065\u0072\u0020\u0075\u006e\u0020\u0064\u0061\u0074\u0061\u0076\u0065\u0072\u0073\u0065\u0020\u006f\u0075\u0020\u0075\u006e\u0020\u0065\u006e\u0073\u0065\u006d\u0062\u006c\u0065\u0020\u0064\u0065\u0020\u0064\u006f\u006e\u006e\u00e9\u0065\u0073\u002e\u0020\u0043\u006c\u0069\u0071\u0075\u0065\u0072\u0020\u0073\u0075\u0072\u0020\u006c\u0065\u0020\u0062\u006f\u0075\u0074\u006f\u006e\u0020\u00ab\u005c\u0075\u0030\u0030\u0041\u0030\u0041\u006a\u006f\u0075\u0074\u0065\u0072\u0020\u0064\u0065\u0073\u0020\u0064\u006f\u006e\u006e\u00e9\u0065\u0073\u005c\u0075\u0030\u0030\u0041\u0030\u00bb\u0020\u00e0\u0020\u006c\u0061\u0020\u0070\u0061\u0067\u0065\u0020\u0064\u0027\u0061\u0063\u0063\u0075\u0065\u0069\u006c\u0020\u0070\u006f\u0075\u0072\u0020\u0063\u006f\u006d\u006d\u0065\u006e\u0063\u0065\u0072\u002e +dataverse.link.no.linkable.remaining=\u0056\u006f\u0075\u0073\u0020\u0061\u0076\u0065\u007a\u0020\u0064\u00e9\u006a\u00e0\u0020\u006c\u0069\u00e9\u0020\u0074\u006f\u0075\u0073\u0020\u0076\u006f\u0073\u0020\u0064\u0061\u0074\u0061\u0076\u0065\u0072\u0073\u0065\u0073\u0020\u0061\u0064\u006d\u0069\u0073\u0073\u0069\u0062\u006c\u0065\u0073\u002e +dataverse.savedsearch.link=\u0052\u0065\u0063\u0068\u0065\u0072\u0063\u0068\u0065\u0020\u0064\u0065\u0020\u006c\u0069\u0065\u006e\u0073 +dataverse.savedsearch.searchquery=\u0052\u0065\u0063\u0068\u0065\u0072\u0063\u0068\u0065 +dataverse.savedsearch.filterQueries=\u0046\u0061\u0063\u0065\u0074\u0074\u0065\u0073 +dataverse.savedsearch.save=\u0045\u006e\u0072\u0065\u0067\u0069\u0073\u0074\u0072\u0065\u0072\u0020\u006c\u0061\u0020\u0072\u0065\u0063\u0068\u0065\u0072\u0063\u0068\u0065\u0020\u0064\u0065\u0020\u006c\u0069\u0065\u006e\u0073 +dataverse.savedsearch.dataverse.choose=\u0044\u00e9\u0074\u0065\u0072\u006d\u0069\u006e\u0065\u007a\u0020\u0061\u0076\u0065\u0063\u0020\u006c\u0065\u0071\u0075\u0065\u006c\u0020\u0064\u0065\u0020\u0076\u006f\u0073\u0020\u0064\u0061\u0074\u0061\u0076\u0065\u0072\u0073\u0065\u0073\u0020\u0076\u006f\u0075\u0073\u0020\u0073\u006f\u0075\u0068\u0061\u0069\u0074\u0065\u007a\u0020\u006c\u0069\u0065\u0072\u0020\u0063\u0065\u0074\u0074\u0065\u0020\u0072\u0065\u0063\u0068\u0065\u0072\u0063\u0068\u0065\u002e +dataverse.savedsearch.no.choice=\u0056\u006f\u0075\u0073\u0020\u0061\u0076\u0065\u007a\u0020\u0075\u006e\u0020\u0064\u0061\u0074\u0061\u0076\u0065\u0072\u0073\u0065\u0020\u0070\u006f\u0075\u0072\u0020\u006c\u0065\u0071\u0075\u0065\u006c\u0020\u0076\u006f\u0075\u0073\u0020\u0070\u006f\u0075\u0076\u0065\u007a\u0020\u0061\u006a\u006f\u0075\u0074\u0065\u0072\u0020\u0075\u006e\u0065\u0020\u0072\u0065\u0063\u0068\u0065\u0072\u0063\u0068\u0065\u0020\u0073\u0061\u0075\u0076\u0065\u0067\u0061\u0072\u0064\u00e9\u0065\u002e +# Bundle file editors, please note that "dataverse.savedsearch.save.success" is used in a unit test= +dataverse.saved.search.success=\u004c\u0061\u0020\u0072\u0065\u0063\u0068\u0065\u0072\u0063\u0068\u0065\u0020\u0073\u0061\u0075\u0076\u0065\u0067\u0061\u0072\u0064\u00e9\u0065\u0020\u0065\u0073\u0074\u0020\u006d\u0061\u0069\u006e\u0074\u0065\u006e\u0061\u006e\u0074\u0020\u006c\u0069\u00e9\u0065\u0020\u00e0\u0020\u007b\u0030\u007d\u002e +dataverse.saved.search.failure=\u004c\u0061\u0020\u0072\u0065\u0063\u0068\u0065\u0072\u0063\u0068\u0065\u0020\u0073\u0061\u0075\u0076\u0065\u0067\u0061\u0072\u0064\u00e9\u0065\u0020\u006e\u0027\u0061\u0020\u0070\u0061\u0073\u0020\u0070\u0075\u0020\u00ea\u0074\u0072\u0065\u0020\u006c\u0069\u00e9\u0065\u002e +dataverse.linked.success=\u007b\u0030\u007d\u0020\u0065\u0073\u0074\u0020\u006d\u0061\u0069\u006e\u0074\u0065\u006e\u0061\u006e\u0074\u0020\u006c\u0069\u00e9\u0020\u00e0\u0020\u007b\u0031\u007d\u002e +dataverse.linked.success.wait=\u007b\u0030\u007d\u0020\u0061\u0020\u0062\u0069\u0065\u006e\u0020\u00e9\u0074\u00e9\u0020\u006c\u0069\u00e9\u0020\u00e0\u0020\u007b\u0031\u007d\u002e\u0020\u0056\u0065\u0075\u0069\u006c\u006c\u0065\u007a\u0020\u0061\u0074\u0074\u0065\u006e\u0064\u0072\u0065\u0020\u0071\u0075\u0065\u0020\u006c\u0065\u0020\u0063\u006f\u006e\u0074\u0065\u006e\u0075\u0020\u0073\u0027\u0061\u0066\u0066\u0069\u0063\u0068\u0065\u002e +dataverse.linked.internalerror=\u007b\u0030\u007d\u0020\u0061\u0020\u0062\u0069\u0065\u006e\u0020\u00e9\u0074\u00e9\u0020\u006c\u0069\u00e9\u0020\u00e0\u0020\u007b\u0031\u007d\u0020\u006d\u0061\u0069\u0073\u0020\u006c\u0065\u0020\u0063\u006f\u006e\u0074\u0065\u006e\u0075\u0020\u006e\u0065\u0020\u0073\u0027\u0061\u0066\u0066\u0069\u0063\u0068\u0065\u0072\u0061\u0020\u0070\u0061\u0073\u0020\u0061\u0076\u0061\u006e\u0074\u0020\u0071\u0075\u0027\u0075\u006e\u0065\u0020\u0065\u0072\u0072\u0065\u0075\u0072\u0020\u0069\u006e\u0074\u0065\u0072\u006e\u0065\u0020\u006e\u0065\u0020\u0073\u006f\u0069\u0074\u0020\u0072\u00e9\u0073\u006f\u006c\u0075\u0065\u002e +dataverse.page.pre=\u0050\u0072\u00e9\u0063\u00e9\u0064\u0065\u006e\u0074 +dataverse.page.next=\u0053\u0075\u0069\u0076\u0061\u006e\u0074 +dataverse.byCategory=\u0044\u0061\u0074\u0061\u0076\u0065\u0072\u0073\u0065\u0073\u0020\u0070\u0061\u0072\u0020\u0063\u0061\u0074\u00e9\u0067\u006f\u0072\u0069\u0065 +dataverse.displayFeatured=\u0041\u0066\u0066\u0069\u0063\u0068\u0065\u0072\u0020\u006c\u0065\u0073\u0020\u0064\u0061\u0074\u0061\u0076\u0065\u0072\u0073\u0065\u0073\u0020\u0073\u00e9\u006c\u0065\u0063\u0074\u0069\u006f\u006e\u006e\u00e9\u0073\u0020\u0063\u0069\u002d\u0064\u0065\u0073\u0073\u006f\u0075\u0073\u0020\u0073\u0075\u0072\u0020\u006c\u0061\u0020\u0070\u0061\u0067\u0065\u0020\u0064\u0027\u0061\u0063\u0063\u0075\u0065\u0069\u006c\u0020\u0070\u006f\u0075\u0072\u0020\u0063\u0065\u0020\u0064\u0061\u0074\u0061\u0076\u0065\u0072\u0073\u0065\u002e +dataverse.selectToFeature=\u0053\u00e9\u006c\u0065\u0063\u0074\u0069\u006f\u006e\u006e\u0065\u0072\u0020\u006c\u0065\u0073\u0020\u0064\u0061\u0074\u0061\u0076\u0065\u0072\u0073\u0065\u0073\u0020\u00e0\u0020\u0061\u0066\u0066\u0069\u0063\u0068\u0065\u0072\u0020\u0073\u0075\u0072\u0020\u006c\u0061\u0020\u0070\u0061\u0067\u0065\u0020\u0064\u0027\u0061\u0063\u0063\u0075\u0065\u0069\u006c\u0020\u0064\u0065\u0020\u0063\u0065\u0020\u0064\u0061\u0074\u0061\u0076\u0065\u0072\u0073\u0065\u002e +dataverse.publish.tip=\u00ca\u0074\u0065\u0073\u002d\u0076\u006f\u0075\u0073\u0020\u0073\u00fb\u0072\u0028\u0065\u0029\u0020\u0064\u0065\u0020\u0076\u006f\u0075\u006c\u006f\u0069\u0072\u0020\u0070\u0075\u0062\u006c\u0069\u0065\u0072\u0020\u0076\u006f\u0074\u0072\u0065\u0020\u0064\u0061\u0074\u0061\u0076\u0065\u0072\u0073\u0065\u003f\u0020\u0055\u006e\u0065\u0020\u0066\u006f\u0069\u0073\u0020\u0070\u0075\u0062\u006c\u0069\u00e9\u002c\u0020\u0069\u006c\u0020\u0064\u006f\u0069\u0074\u0020\u006c\u0065\u0020\u0064\u0065\u006d\u0065\u0075\u0072\u0065\u0072\u002e +dataverse.publish.failed.tip=\u0043\u0065\u0020\u0064\u0061\u0074\u0061\u0076\u0065\u0072\u0073\u0065\u0020\u006e\u0065\u0020\u0070\u0065\u0075\u0074\u0020\u0070\u0061\u0073\u0020\u00ea\u0074\u0072\u0065\u0020\u0070\u0075\u0062\u006c\u0069\u00e9\u002c\u0020\u0063\u0061\u0072\u0020\u006c\u0065\u0020\u0064\u0061\u0074\u0061\u0076\u0065\u0072\u0073\u0065\u0020\u0064\u0061\u006e\u0073\u0020\u006c\u0065\u0071\u0075\u0065\u006c\u0020\u0069\u006c\u0020\u0073\u0065\u0020\u0074\u0072\u006f\u0075\u0076\u0065\u0020\u006e\u0027\u0061\u0020\u0070\u0061\u0073\u0020\u00e9\u0074\u00e9\u0020\u0070\u0075\u0062\u006c\u0069\u00e9\u002e +dataverse.publish.failed=\u0049\u006d\u0070\u006f\u0073\u0073\u0069\u0062\u006c\u0065\u0020\u0064\u0065\u0020\u0070\u0075\u0062\u006c\u0069\u0065\u0072\u0020\u006c\u0065\u0020\u0064\u0061\u0074\u0061\u0076\u0065\u0072\u0073\u0065 +dataverse.publish.success=\u0056\u006f\u0074\u0072\u0065\u0020\u0064\u0061\u0074\u0061\u0076\u0065\u0072\u0073\u0065\u0020\u0065\u0073\u0074\u0020\u006d\u0061\u0069\u006e\u0074\u0065\u006e\u0061\u006e\u0074\u0020\u0070\u0075\u0062\u006c\u0069\u0063\u002e +dataverse.publish.failure=\u0043\u0065\u0020\u0064\u0061\u0074\u0061\u0076\u0065\u0072\u0073\u0065\u0020\u006e\u0027\u0061\u0020\u0070\u0061\u0073\u0020\u0070\u0075\u0020\u00ea\u0074\u0072\u0065\u0020\u0070\u0075\u0062\u006c\u0069\u00e9\u002e +dataverse.delete.tip=\u00ca\u0074\u0065\u0073\u002d\u0076\u006f\u0075\u0073\u0020\u0073\u00fb\u0072\u0028\u0065\u0029\u0020\u0064\u0065\u0020\u0076\u006f\u0075\u006c\u006f\u0069\u0072\u0020\u0073\u0075\u0070\u0070\u0072\u0069\u006d\u0065\u0072\u0020\u0076\u006f\u0074\u0072\u0065\u0020\u0064\u0061\u0074\u0061\u0076\u0065\u0072\u0073\u0065\u003f\u0020\u0056\u006f\u0075\u0073\u0020\u006e\u0065\u0020\u0070\u006f\u0075\u0076\u0065\u007a\u0020\u0070\u0061\u0073\u0020\u0061\u006e\u006e\u0075\u006c\u0065\u0072\u0020\u006c\u0061\u0020\u0073\u0075\u0070\u0070\u0072\u0065\u0073\u0073\u0069\u006f\u006e\u0020\u0064\u0065\u0020\u0063\u0065\u0020\u0064\u0061\u0074\u0061\u0076\u0065\u0072\u0073\u0065\u002e +dataverse.delete=\u0053\u0075\u0070\u0070\u0072\u0069\u006d\u0065\u0072\u0020\u006c\u0065\u0020\u0064\u0061\u0074\u0061\u0076\u0065\u0072\u0073\u0065 +dataverse.delete.success=\u0056\u006f\u0074\u0072\u0065\u0020\u0064\u0061\u0074\u0061\u0076\u0065\u0072\u0073\u0065\u0020\u0061\u0020\u00e9\u0074\u00e9\u0020\u0073\u0075\u0070\u0070\u0072\u0069\u006d\u00e9\u002e +dataverse.delete.failure=\u0043\u0065\u0020\u0064\u0061\u0074\u0061\u0076\u0065\u0072\u0073\u0065\u0020\u006e\u0027\u0061\u0020\u0070\u0061\u0073\u0020\u0070\u0075\u0020\u00ea\u0074\u0072\u0065\u0020\u0073\u0075\u0070\u0070\u0072\u0069\u006d\u00e9\u002e\u0020 +# Bundle file editors, please note that "dataverse.create.success" is used in a unit test because it's so fancy with two parameters= +dataverse.create.success=\u0056\u006f\u0075\u0073\u0020\u0061\u0076\u0065\u007a\u0020\u0062\u0069\u0065\u006e\u0020\u0072\u00e9\u0075\u0073\u0073\u0069\u0020\u00e0\u0020\u0063\u0072\u00e9\u0065\u0072\u0020\u0076\u006f\u0074\u0072\u0065\u0020\u0064\u0061\u0074\u0061\u0076\u0065\u0072\u0073\u0065\u0021\u0020\u0050\u006f\u0075\u0072\u0020\u0065\u006e\u0020\u0073\u0061\u0076\u006f\u0069\u0072\u0020\u0064\u0061\u0076\u0061\u006e\u0074\u0061\u0067\u0065\u0020\u0073\u0075\u0072\u0020\u0063\u0065\u0020\u0071\u0075\u0065\u0020\u0076\u006f\u0075\u0073\u0020\u0070\u006f\u0075\u0076\u0065\u007a\u0020\u0066\u0061\u0069\u0072\u0065\u0020\u0061\u0076\u0065\u0063\u0020\u0076\u006f\u0074\u0072\u0065\u0020\u0064\u0061\u0074\u0061\u0076\u0065\u0072\u0073\u0065\u002c\u0020\u0063\u006f\u006e\u0073\u0075\u006c\u0074\u0065\u007a\u0020\u006c\u0065\u0020\u003c\u0061\u0020\u0068\u0072\u0065\u0066\u003d\u0022\u007b\u0030\u007d\u002f\u007b\u0031\u007d\u002f\u0075\u0073\u0065\u0072\u002f\u0064\u0061\u0074\u0061\u0076\u0065\u0072\u0073\u0065\u002d\u006d\u0061\u006e\u0061\u0067\u0065\u006d\u0065\u006e\u0074\u002e\u0068\u0074\u006d\u006c\u0022\u0020\u0074\u0069\u0074\u006c\u0065\u003d\u0022\u0044\u0061\u0074\u0061\u0076\u0065\u0072\u0073\u0065\u0020\u004d\u0061\u006e\u0061\u0067\u0065\u006d\u0065\u006e\u0074\u0020\u002d\u0020\u0044\u0061\u0074\u0061\u0076\u0065\u0072\u0073\u0065\u0020\u0055\u0073\u0065\u0072\u0020\u0047\u0075\u0069\u0064\u0065\u0022\u0020\u0074\u0061\u0072\u0067\u0065\u0074\u003d\u0022\u005f\u0062\u006c\u0061\u006e\u006b\u0022\u003e\u0067\u0075\u0069\u0064\u0065\u0020\u0064\u0027\u0075\u0074\u0069\u006c\u0069\u0073\u0061\u0074\u0069\u006f\u006e\u003c\u002f\u0061\u003e\u002e +dataverse.create.failure=\u0043\u0065\u0020\u0064\u0061\u0074\u0061\u0076\u0065\u0072\u0073\u0065\u0020\u006e\u0027\u0061\u0020\u0070\u0061\u0073\u0020\u0070\u0075\u0020\u00ea\u0074\u0072\u0065\u0020\u0063\u0072\u00e9\u00e9\u002e\u0020 +dataverse.create.authenticatedUsersOnly=\u0053\u0065\u0075\u006c\u0073\u0020\u006c\u0065\u0073\u0020\u0075\u0074\u0069\u006c\u0069\u0073\u0061\u0074\u0065\u0075\u0072\u0073\u0020\u0061\u0075\u0074\u0068\u0065\u006e\u0074\u0069\u0066\u0069\u00e9\u0073\u0020\u0070\u0065\u0075\u0076\u0065\u006e\u0074\u0020\u0063\u0072\u00e9\u0065\u0072\u0020\u0064\u0065\u0073\u0020\u0064\u0061\u0074\u0061\u0076\u0065\u0072\u0073\u0065\u0073\u002e +dataverse.update.success=\u0056\u006f\u0075\u0073\u0020\u0061\u0076\u0065\u007a\u0020\u0062\u0069\u0065\u006e\u0020\u006d\u0069\u0073\u0020\u00e0\u0020\u006a\u006f\u0075\u0072\u0020\u0076\u006f\u0074\u0072\u0065\u0020\u0064\u0061\u0074\u0061\u0076\u0065\u0072\u0073\u0065\u0021 +dataverse.update.failure=\u0043\u0065\u0020\u0064\u0061\u0074\u0061\u0076\u0065\u0072\u0073\u0065\u0020\u006e\u0027\u0061\u0020\u0070\u0061\u0073\u0020\u0070\u0075\u0020\u00ea\u0074\u0072\u0065\u0020\u006d\u0069\u0073\u0020\u00e0\u0020\u006a\u006f\u0075\u0072\u002e +# rolesAndPermissionsFragment.xhtml= +# advanced.xhtml= +advanced.search.header.dataverses=\u0044\u0061\u0074\u0061\u0076\u0065\u0072\u0073\u0065\u0073 +advanced.search.dataverses.name.tip=\u004c\u0065\u0020\u0070\u0072\u006f\u006a\u0065\u0074\u002c\u0020\u006c\u0065\u0020\u0064\u00e9\u0070\u0061\u0072\u0074\u0065\u006d\u0065\u006e\u0074\u002c\u0020\u006c\u0027\u0075\u006e\u0069\u0076\u0065\u0072\u0073\u0069\u0074\u00e9\u0020\u006f\u0075\u0020\u006c\u0065\u0020\u0070\u0072\u006f\u0066\u0065\u0073\u0073\u0065\u0075\u0072\u0020\u0076\u0069\u0073\u00e9\u0020\u0070\u0061\u0072\u0020\u006c\u0065\u0073\u0020\u0064\u006f\u006e\u006e\u00e9\u0065\u0073\u0020\u0063\u006f\u006e\u0074\u0065\u006e\u0075\u0065\u0073\u0020\u0064\u0061\u006e\u0073\u0020\u0063\u0065\u0020\u0064\u0061\u0074\u0061\u0076\u0065\u0072\u0073\u0065\u002e +advanced.search.dataverses.affiliation.tip=\u004c\u0027\u006f\u0072\u0067\u0061\u006e\u0069\u0073\u0061\u0074\u0069\u006f\u006e\u0020\u0061\u0076\u0065\u0063\u0020\u006c\u0061\u0071\u0075\u0065\u006c\u006c\u0065\u0020\u0063\u0065\u0020\u0064\u0061\u0074\u0061\u0076\u0065\u0072\u0073\u0065\u0020\u0065\u0073\u0074\u0020\u0061\u0066\u0066\u0069\u006c\u0069\u00e9\u002e +advanced.search.dataverses.description.tip=\u0055\u006e\u0020\u0072\u00e9\u0073\u0075\u006d\u00e9\u0020\u0064\u00e9\u0063\u0072\u0069\u0076\u0061\u006e\u0074\u0020\u006c\u0027\u006f\u0062\u006a\u0065\u0074\u002c\u0020\u006c\u0061\u0020\u006e\u0061\u0074\u0075\u0072\u0065\u0020\u006f\u0075\u0020\u006c\u0061\u0020\u0070\u006f\u0072\u0074\u00e9\u0065\u0020\u0064\u0065\u0020\u0063\u0065\u0020\u0064\u0061\u0074\u0061\u0076\u0065\u0072\u0073\u0065\u002e +advanced.search.dataverses.subject.tip=\u0043\u0061\u0074\u00e9\u0067\u006f\u0072\u0069\u0065\u0073\u0020\u0064\u0065\u0020\u0073\u0075\u006a\u0065\u0074\u0073\u0020\u0070\u0072\u006f\u0070\u0072\u0065\u0073\u0020\u0061\u0075\u0078\u0020\u0064\u006f\u006d\u0061\u0069\u006e\u0065\u0073\u0020\u0071\u0075\u0069\u0020\u0073\u006f\u006e\u0074\u0020\u0070\u0065\u0072\u0074\u0069\u006e\u0065\u006e\u0074\u0073\u0020\u0064\u0075\u0020\u0070\u006f\u0069\u006e\u0074\u0020\u0064\u0065\u0020\u0076\u0075\u0065\u0020\u0064\u0075\u0020\u0073\u0075\u006a\u0065\u0074\u0020\u0070\u006f\u0075\u0072\u0020\u0063\u0065\u0020\u0064\u0061\u0074\u0061\u0076\u0065\u0072\u0073\u0065\u002e +advanced.search.header.datasets=\u0045\u006e\u0073\u0065\u006d\u0062\u006c\u0065\u0073\u0020\u0064\u0065\u0020\u0064\u006f\u006e\u006e\u00e9\u0065\u0073 +advanced.search.header.files=\u0046\u0069\u0063\u0068\u0069\u0065\u0072\u0073 +advanced.search.files.name.tip=\u004c\u0065\u0020\u006e\u006f\u006d\u0020\u0064\u006f\u006e\u006e\u00e9\u0020\u0061\u0075\u0020\u0066\u0069\u0063\u0068\u0069\u0065\u0072\u002e +advanced.search.files.description.tip=\u0055\u006e\u0020\u0072\u00e9\u0073\u0075\u006d\u00e9\u0020\u0064\u00e9\u0063\u0072\u0069\u0076\u0061\u006e\u0074\u0020\u006c\u0065\u0020\u0066\u0069\u0063\u0068\u0069\u0065\u0072\u0020\u0065\u0074\u0020\u0073\u0065\u0073\u0020\u0076\u0061\u0072\u0069\u0061\u0062\u006c\u0065\u0073\u002e +advanced.search.files.fileType=\u0054\u0079\u0070\u0065\u0020\u0064\u0065\u0020\u0066\u0069\u0063\u0068\u0069\u0065\u0072 +advanced.search.files.fileType.tip=\u004c\u0027\u0065\u0078\u0074\u0065\u006e\u0073\u0069\u006f\u006e\u0020\u0064\u0027\u0075\u006e\u0020\u0066\u0069\u0063\u0068\u0069\u0065\u0072\u002c\u0020\u0070\u002e\u005c\u0075\u0030\u0030\u0041\u0030\u0065\u0078\u002e\u0020\u0043\u0053\u0056\u002c\u0020\u007a\u0069\u0070\u002c\u0020\u0053\u0074\u0061\u0074\u0061\u002c\u0020\u0052\u002c\u0020\u0050\u0044\u0046\u002c\u0020\u004a\u0050\u0045\u0047\u002c\u0020\u0065\u0074\u0063\u002e +advanced.search.files.variableName=\u004e\u006f\u006d\u0020\u0064\u0065\u0020\u006c\u0061\u0020\u0076\u0061\u0072\u0069\u0061\u0062\u006c\u0065 +advanced.search.files.variableName.tip=\u004c\u0065\u0020\u0074\u0069\u0074\u0072\u0065\u0020\u0064\u0065\u0020\u006c\u0061\u0020\u0063\u006f\u006c\u006f\u006e\u006e\u0065\u0020\u0070\u006f\u0075\u0072\u0020\u0063\u0065\u0074\u0074\u0065\u0020\u0076\u0061\u0072\u0069\u0061\u0062\u006c\u0065\u0020\u0064\u0061\u006e\u0073\u0020\u006c\u0061\u0020\u0074\u0061\u0062\u006c\u0065\u0020\u0064\u0065\u0020\u0064\u006f\u006e\u006e\u00e9\u0065\u0073\u002e +advanced.search.files.variableLabel=\u004c\u0069\u0062\u0065\u006c\u006c\u00e9\u0020\u0064\u0065\u0020\u0076\u0061\u0072\u0069\u0061\u0062\u006c\u0065 +advanced.search.files.variableLabel.tip=\u0055\u006e\u0065\u0020\u0063\u006f\u0075\u0072\u0074\u0065\u0020\u0064\u0065\u0073\u0063\u0072\u0069\u0070\u0074\u0069\u006f\u006e\u0020\u0064\u0065\u0020\u006c\u0061\u0020\u0076\u0061\u0072\u0069\u0061\u0062\u006c\u0065\u002e +# search-include-fragment.xhtml= +dataverse.search.advancedSearch=\u0052\u0065\u0063\u0068\u0065\u0072\u0063\u0068\u0065\u0020\u0061\u0076\u0061\u006e\u0063\u00e9\u0065 +dataverse.search.input.watermark=\u0043\u0068\u0065\u0072\u0063\u0068\u0065\u0072\u0020\u0064\u0061\u006e\u0073\u0020\u0063\u0065\u0020\u0064\u0061\u0074\u0061\u0076\u0065\u0072\u0073\u0065\u005c\u0075\u0032\u0030\u0032\u0036 +account.search.input.watermark=\u0043\u0068\u0065\u0072\u0063\u0068\u0065\u0072\u0020\u0063\u0065\u0073\u0020\u0064\u006f\u006e\u006e\u00e9\u0065\u0073\u005c\u0075\u0032\u0030\u0032\u0036 +dataverse.search.btn.find=\u0054\u0072\u006f\u0075\u0076\u0065\u0072 +dataverse.results.btn.addData=\u0041\u006a\u006f\u0075\u0074\u0065\u0072\u0020\u0064\u0065\u0073\u0020\u0064\u006f\u006e\u006e\u00e9\u0065\u0073 +dataverse.results.btn.addData.newDataverse=\u004e\u006f\u0075\u0076\u0065\u0061\u0075\u0020\u0064\u0061\u0074\u0061\u0076\u0065\u0072\u0073\u0065 +dataverse.results.btn.addData.newDataset=\u004e\u006f\u0075\u0076\u0065\u006c\u0020\u0065\u006e\u0073\u0065\u006d\u0062\u006c\u0065\u0020\u0064\u0065\u0020\u0064\u006f\u006e\u006e\u00e9\u0065\u0073 +dataverse.results.dialog.addDataGuest.header=\u0041\u006a\u006f\u0075\u0074\u0065\u0072\u0020\u0064\u0065\u0073\u0020\u0064\u006f\u006e\u006e\u00e9\u0065\u0073 +dataverse.results.dialog.addDataGuest.msg=\u0056\u006f\u0075\u0073\u0020\u0064\u0065\u0076\u0065\u007a\u0020\u003c\u0061\u0020\u0068\u0072\u0065\u0066\u003d\u0022\u002f\u006c\u006f\u0067\u0069\u006e\u0070\u0061\u0067\u0065\u002e\u0078\u0068\u0074\u006d\u006c\u007b\u0030\u007d\u0022\u0020\u0074\u0069\u0074\u006c\u0065\u003d\u0022\u0076\u006f\u0075\u0073\u0020\u0063\u006f\u006e\u006e\u0065\u0063\u0074\u0065\u0072\u0020\u00e0\u0020\u0076\u006f\u0074\u0072\u0065\u0020\u0063\u006f\u006d\u0070\u0074\u0065\u0020\u0044\u0061\u0074\u0061\u0076\u0065\u0072\u0073\u0065\u0022\u003e\u0076\u006f\u0075\u0073\u0020\u0061\u0075\u0074\u0068\u0065\u006e\u0074\u0069\u0066\u0069\u0065\u0072\u003c\u002f\u0061\u003e\u0020\u0070\u006f\u0075\u0072\u0020\u0063\u0072\u00e9\u0065\u0072\u0020\u0075\u006e\u0020\u0064\u0061\u0074\u0061\u0076\u0065\u0072\u0073\u0065\u0020\u006f\u0075\u0020\u0061\u006a\u006f\u0075\u0074\u0065\u0072\u0020\u0075\u006e\u0020\u0065\u006e\u0073\u0065\u006d\u0062\u006c\u0065\u0020\u0064\u0065\u0020\u0064\u006f\u006e\u006e\u00e9\u0065\u0073\u002e +dataverse.results.dialog.addDataGuest.msg.signup=\u0056\u006f\u0075\u0073\u0020\u0064\u0065\u0076\u0065\u007a\u0020\u003c\u0061\u0020\u0068\u0072\u0065\u0066\u003d\u0022\u002f\u0064\u0061\u0074\u0061\u0076\u0065\u0072\u0073\u0065\u0075\u0073\u0065\u0072\u002e\u0078\u0068\u0074\u006d\u006c\u007b\u0030\u007d\u0026\u0061\u006d\u0070\u003b\u0065\u0064\u0069\u0074\u004d\u006f\u0064\u0065\u003d\u0043\u0052\u0045\u0041\u0054\u0045\u0022\u0020\u0074\u0069\u0074\u006c\u0065\u003d\u0022\u0063\u0072\u00e9\u0065\u0072\u0020\u0075\u006e\u0020\u0063\u006f\u006d\u0070\u0074\u0065\u0020\u0044\u0061\u0074\u0061\u0076\u0065\u0072\u0073\u0065\u0022\u003e\u0063\u0072\u00e9\u0065\u0072\u0020\u0075\u006e\u0020\u0063\u006f\u006d\u0070\u0074\u0065\u003c\u002f\u0061\u003e\u0020\u006f\u0075\u0020\u003c\u0061\u0020\u0068\u0072\u0065\u0066\u003d\u0022\u002f\u006c\u006f\u0067\u0069\u006e\u0070\u0061\u0067\u0065\u002e\u0078\u0068\u0074\u006d\u006c\u007b\u0030\u007d\u0022\u0020\u0074\u0069\u0074\u006c\u0065\u003d\u0022\u0076\u006f\u0075\u0073\u0020\u0063\u006f\u006e\u006e\u0065\u0063\u0074\u0065\u0072\u0020\u00e0\u0020\u0076\u006f\u0074\u0072\u0065\u0020\u0063\u006f\u006d\u0070\u0074\u0065\u0020\u0044\u0061\u0074\u0061\u0076\u0065\u0072\u0073\u0065\u0022\u003e\u0076\u006f\u0075\u0073\u0020\u0063\u006f\u006e\u006e\u0065\u0063\u0074\u0065\u0072\u003c\u002f\u0061\u003e\u0020\u0070\u006f\u0075\u0072\u0020\u0063\u0072\u00e9\u0065\u0072\u0020\u0075\u006e\u0020\u0064\u0061\u0074\u0061\u0076\u0065\u0072\u0073\u0065\u0020\u006f\u0075\u0020\u0061\u006a\u006f\u0075\u0074\u0065\u0072\u0020\u0075\u006e\u0020\u0065\u006e\u0073\u0065\u006d\u0062\u006c\u0065\u0020\u0064\u0065\u0020\u0064\u006f\u006e\u006e\u00e9\u0065\u0073\u002e +dataverse.results.types.dataverses=\u0044\u0061\u0074\u0061\u0076\u0065\u0072\u0073\u0065\u0073 +dataverse.results.types.datasets=\u0045\u006e\u0073\u0065\u006d\u0062\u006c\u0065\u0073\u0020\u0064\u0065\u0020\u0064\u006f\u006e\u006e\u00e9\u0065\u0073 +dataverse.results.types.files=\u0046\u0069\u0063\u0068\u0069\u0065\u0072\u0073 +# Bundle file editors, please note that "dataverse.results.empty.zero" is used in a unit test +dataverse.results.empty.zero=\u0041\u0075\u0063\u0075\u006e\u0020\u0064\u0061\u0074\u0061\u0076\u0065\u0072\u0073\u0065\u002c\u0020\u0065\u006e\u0073\u0065\u006d\u0062\u006c\u0065\u0020\u0064\u0065\u0020\u0064\u006f\u006e\u006e\u00e9\u0065\u0073\u0020\u006f\u0075\u0020\u0066\u0069\u0063\u0068\u0069\u0065\u0072\u0020\u006e\u0065\u0020\u0063\u006f\u0072\u0072\u0065\u0073\u0070\u006f\u006e\u0064\u0020\u00e0\u0020\u0076\u006f\u0074\u0072\u0065\u0020\u0072\u0065\u0063\u0068\u0065\u0072\u0063\u0068\u0065\u002e\u0020\u0056\u0065\u0075\u0069\u006c\u006c\u0065\u007a\u0020\u0065\u0066\u0066\u0065\u0063\u0074\u0075\u0065\u0072\u0020\u0075\u006e\u0065\u0020\u006e\u006f\u0075\u0076\u0065\u006c\u006c\u0065\u0020\u0072\u0065\u0063\u0068\u0065\u0072\u0063\u0068\u0065\u0020\u0065\u006e\u0020\u0075\u0074\u0069\u006c\u0069\u0073\u0061\u006e\u0074\u0020\u0064\u0027\u0061\u0075\u0074\u0072\u0065\u0073\u0020\u0074\u0065\u0072\u006d\u0065\u0073\u0020\u006f\u0075\u0020\u0064\u0065\u0073\u0020\u0074\u0065\u0072\u006d\u0065\u0073\u0020\u0070\u006c\u0075\u0073\u0020\u0067\u00e9\u006e\u00e9\u0072\u0061\u0075\u0078\u002e\u0020\u0056\u006f\u0075\u0073\u0020\u0070\u006f\u0075\u0076\u0065\u007a\u0020\u00e9\u0067\u0061\u006c\u0065\u006d\u0065\u006e\u0074\u0020\u0063\u006f\u006e\u0073\u0075\u006c\u0074\u0065\u0072\u0020\u006c\u0065\u0020\u003c\u0061\u0020\u0068\u0072\u0065\u0066\u003d\u0022\u007b\u0030\u007d\u002f\u007b\u0031\u007d\u002f\u0075\u0073\u0065\u0072\u002f\u0066\u0069\u006e\u0064\u002d\u0075\u0073\u0065\u002d\u0064\u0061\u0074\u0061\u002e\u0068\u0074\u006d\u006c\u0022\u0020\u0074\u0069\u0074\u006c\u0065\u003d\u0022\u0046\u0069\u006e\u0064\u0069\u006e\u0067\u0020\u0026\u0061\u006d\u0070\u003b\u0020\u0055\u0073\u0069\u006e\u0067\u0020\u0044\u0061\u0074\u0061\u0020\u002d\u0020\u0044\u0061\u0074\u0061\u0076\u0065\u0072\u0073\u0065\u0020\u0055\u0073\u0065\u0072\u0020\u0047\u0075\u0069\u0064\u0065\u0022\u0020\u0074\u0061\u0072\u0067\u0065\u0074\u003d\u0022\u005f\u0062\u006c\u0061\u006e\u006b\u0022\u003e\u0067\u0075\u0069\u0064\u0065\u0020\u0064\u0065\u0020\u0072\u0065\u0063\u0068\u0065\u0072\u0063\u0068\u0065\u003c\u002f\u0061\u003e\u0020\u0070\u006f\u0075\u0072\u0020\u0064\u0065\u0073\u0020\u0061\u0073\u0074\u0075\u0063\u0065\u0073\u002e +# Bundle file editors, please note that "dataverse.results.empty.hidden" is used in a unit test +dataverse.results.empty.hidden=\u0049\u006c\u0020\u006e\u0027\u0079\u0020\u0061\u0020\u0070\u0061\u0073\u0020\u0064\u0065\u0020\u0072\u00e9\u0073\u0075\u006c\u0074\u0061\u0074\u0073\u0020\u0072\u00e9\u0070\u006f\u006e\u0064\u0061\u006e\u0074\u0073\u0020\u00e0\u0020\u0076\u006f\u0073\u0020\u0063\u0072\u0069\u0074\u00e8\u0072\u0065\u0073\u0020\u0064\u0065\u0020\u0072\u0065\u0063\u0068\u0065\u0072\u0063\u0068\u0065\u002e\u0020\u0056\u006f\u0075\u0073\u0020\u0070\u006f\u0075\u0076\u0065\u007a\u0020\u0063\u006f\u006e\u0073\u0075\u006c\u0074\u0065\u0072\u0020\u006c\u0065\u0020\u003c\u0061\u0020\u0068\u0072\u0065\u0066\u003d\u0022\u007b\u0030\u007d\u002f\u007b\u0031\u007d\u002f\u0075\u0073\u0065\u0072\u002f\u0066\u0069\u006e\u0064\u002d\u0075\u0073\u0065\u002d\u0064\u0061\u0074\u0061\u002e\u0068\u0074\u006d\u006c\u0022\u0020\u0046\u0069\u006e\u0064\u0069\u006e\u0067\u0020\u0026\u0061\u006d\u0070\u003b\u0020\u0055\u0073\u0069\u006e\u0067\u0020\u0044\u0061\u0074\u0061\u0020\u002d\u0020\u0044\u0061\u0074\u0061\u0076\u0065\u0072\u0073\u0065\u0020\u0055\u0073\u0065\u0072\u0020\u0047\u0075\u0069\u0064\u0065\u0022\u0020\u0074\u0061\u0072\u0067\u0065\u0074\u003d\u0022\u005f\u0062\u006c\u0061\u006e\u006b\u0022\u003e\u0020\u0067\u0075\u0069\u0064\u0065\u0020\u0064\u0065\u0020\u0072\u0065\u0063\u0068\u0065\u0072\u0063\u0068\u0065\u003c\u002f\u0061\u003e\u0020\u0070\u006f\u0075\u0072\u0020\u0064\u0065\u0073\u0020\u0063\u006f\u006e\u0073\u0065\u0069\u006c\u0073\u002e +dataverse.results.empty.browse.guest.zero=\u0043\u0065\u0020\u0064\u0061\u0074\u0061\u0076\u0065\u0072\u0073\u0065\u0020\u006e\u0065\u0020\u0063\u006f\u006e\u0074\u0069\u0065\u006e\u0074\u0020\u0061\u0063\u0074\u0075\u0065\u006c\u006c\u0065\u006d\u0065\u006e\u0074\u0020\u0061\u0075\u0063\u0075\u006e\u0020\u0064\u0061\u0074\u0061\u0076\u0065\u0072\u0073\u0065\u002c\u0020\u0065\u006e\u0073\u0065\u006d\u0062\u006c\u0065\u0020\u0064\u0065\u0020\u0064\u006f\u006e\u006e\u00e9\u0065\u0073\u0020\u006f\u0075\u0020\u0066\u0069\u0063\u0068\u0069\u0065\u0072\u002e\u0020\u0056\u0065\u0075\u0069\u006c\u006c\u0065\u007a\u0020\u003c\u0061\u0020\u0068\u0072\u0065\u0066\u003d\u0022\u002f\u006c\u006f\u0067\u0069\u006e\u0070\u0061\u0067\u0065\u002e\u0078\u0068\u0074\u006d\u006c\u007b\u0030\u007d\u0022\u0020\u0074\u0069\u0074\u006c\u0065\u003d\u0022\u0076\u006f\u0075\u0073\u0020\u0063\u006f\u006e\u006e\u0065\u0063\u0074\u0065\u0072\u0020\u00e0\u0020\u0076\u006f\u0074\u0072\u0065\u0020\u0063\u006f\u006d\u0070\u0074\u0065\u0020\u0044\u0061\u0074\u0061\u0076\u0065\u0072\u0073\u0065\u0022\u003e\u0076\u006f\u0075\u0073\u0020\u0061\u0075\u0074\u0068\u0065\u006e\u0074\u0069\u0066\u0069\u0065\u0072\u003c\u002f\u0061\u003e\u0020\u0070\u006f\u0075\u0072\u0020\u0076\u006f\u0069\u0072\u0020\u0073\u0069\u0020\u0076\u006f\u0075\u0073\u0020\u0070\u006f\u0075\u0076\u0065\u007a\u0020\u0079\u0020\u0061\u006a\u006f\u0075\u0074\u0065\u0072\u0020\u0064\u0075\u0020\u0063\u006f\u006e\u0074\u0065\u006e\u0075\u002e\u0020 +dataverse.results.empty.browse.guest.hidden=\u0043\u0065\u0020\u0064\u0061\u0074\u0061\u0076\u0065\u0072\u0073\u0065\u0020\u006e\u0065\u0020\u0063\u006f\u006e\u0074\u0069\u0065\u006e\u0074\u0020\u0061\u0075\u0063\u0075\u006e\u0020\u0064\u0061\u0074\u0061\u0076\u0065\u0072\u0073\u0065\u002e\u0020\u0056\u0065\u0075\u0069\u006c\u006c\u0065\u007a\u0020\u003c\u0061\u0020\u0068\u0072\u0065\u0066\u003d\u0022\u002f\u006c\u006f\u0067\u0069\u006e\u0070\u0061\u0067\u0065\u002e\u0078\u0068\u0074\u006d\u006c\u007b\u0030\u007d\u0022\u0020\u0074\u0069\u0074\u006c\u0065\u003d\u0022\u0076\u006f\u0075\u0073\u0020\u0063\u006f\u006e\u006e\u0065\u0063\u0074\u0065\u0072\u0020\u00e0\u0020\u0076\u006f\u0074\u0072\u0065\u0020\u0063\u006f\u006d\u0070\u0074\u0065\u0020\u0044\u0061\u0074\u0061\u0076\u0065\u0072\u0073\u0065\u0022\u003e\u0076\u006f\u0075\u0073\u0020\u0061\u0075\u0074\u0068\u0065\u006e\u0074\u0069\u0066\u0069\u0065\u0072\u003c\u002f\u0061\u003e\u0020\u0070\u006f\u0075\u0072\u0020\u0076\u006f\u0069\u0072\u0020\u0073\u0069\u0020\u0076\u006f\u0075\u0073\u0020\u0070\u006f\u0075\u0076\u0065\u007a\u0020\u0079\u0020\u0061\u006a\u006f\u0075\u0074\u0065\u0072\u0020\u0064\u0075\u0020\u0063\u006f\u006e\u0074\u0065\u006e\u0075\u002e\u0020 +dataverse.results.empty.browse.loggedin.noperms.zero=\u0043\u0065\u0020\u0064\u0061\u0074\u0061\u0076\u0065\u0072\u0073\u0065\u0020\u006e\u0065\u0020\u0063\u006f\u006e\u0074\u0069\u0065\u006e\u0074\u0020\u0061\u0063\u0074\u0075\u0065\u006c\u006c\u0065\u006d\u0065\u006e\u0074\u0020\u0061\u0075\u0063\u0075\u006e\u0020\u0064\u0061\u0074\u0061\u0076\u0065\u0072\u0073\u0065\u002c\u0020\u0065\u006e\u0073\u0065\u006d\u0062\u006c\u0065\u0020\u0064\u0065\u0020\u0064\u006f\u006e\u006e\u00e9\u0065\u0073\u0020\u006f\u0075\u0020\u0066\u0069\u0063\u0068\u0069\u0065\u0072\u002e\u0020\u0056\u006f\u0075\u0073\u0020\u0070\u006f\u0075\u0076\u0065\u007a\u0020\u0075\u0074\u0069\u006c\u0069\u0073\u0065\u0072\u0020\u006c\u0065\u0020\u0062\u006f\u0075\u0074\u006f\u006e\u0020\u00ab\u005c\u0075\u0030\u0030\u0041\u0030\u0045\u006e\u0076\u006f\u0079\u0065\u0072\u0020\u0075\u006e\u0020\u0063\u006f\u0075\u0072\u0072\u0069\u0065\u006c\u0020\u0061\u0075\u0020\u0063\u006f\u006e\u0074\u0061\u0063\u0074\u0020\u0064\u0075\u0020\u0064\u0061\u0074\u0061\u0076\u0065\u0072\u0073\u0065\u005c\u0075\u0030\u0030\u0041\u0030\u00bb\u0020\u0063\u0069\u002d\u0064\u0065\u0073\u0073\u0075\u0073\u0020\u0070\u006f\u0075\u0072\u0020\u0074\u006f\u0075\u0074\u0065\u0020\u0071\u0075\u0065\u0073\u0074\u0069\u006f\u006e\u0020\u0073\u0075\u0072\u0020\u0063\u0065\u0020\u0064\u0061\u0074\u0061\u0076\u0065\u0072\u0073\u0065\u0020\u006f\u0075\u0020\u0070\u006f\u0075\u0072\u0020\u0065\u0066\u0066\u0065\u0063\u0074\u0075\u0065\u0072\u0020\u0075\u006e\u0065\u0020\u0064\u0065\u006d\u0061\u006e\u0064\u0065\u0020\u0064\u0027\u0061\u0063\u0063\u00e8\u0073\u0020\u00e0\u0020\u0063\u0065\u0020\u0064\u0061\u0074\u0061\u0076\u0065\u0072\u0073\u0065\u002e +dataverse.results.empty.browse.loggedin.noperms.hidden=\u0049\u006c\u0020\u006e\u0027\u0079\u0020\u0061\u0020\u0061\u0075\u0063\u0075\u006e\u0020\u0064\u0061\u0074\u0061\u0076\u0065\u0072\u0073\u0065\u0020\u0064\u0061\u006e\u0073\u0020\u0063\u0065\u0020\u0064\u0061\u0074\u0061\u0076\u0065\u0072\u0073\u0065\u002e +dataverse.results.empty.browse.loggedin.perms.zero=\u0043\u0065\u0020\u0064\u0061\u0074\u0061\u0076\u0065\u0072\u0073\u0065\u0020\u006e\u0065\u0020\u0063\u006f\u006e\u0074\u0069\u0065\u006e\u0074\u0020\u0061\u0063\u0074\u0075\u0065\u006c\u006c\u0065\u006d\u0065\u006e\u0074\u0020\u0061\u0075\u0063\u0075\u006e\u0020\u0064\u0061\u0074\u0061\u0076\u0065\u0072\u0073\u0065\u002c\u0020\u0065\u006e\u0073\u0065\u006d\u0062\u006c\u0065\u0020\u0064\u0065\u0020\u0064\u006f\u006e\u006e\u00e9\u0065\u0073\u0020\u006f\u0075\u0020\u0066\u0069\u0063\u0068\u0069\u0065\u0072\u002e\u0020\u0056\u006f\u0075\u0073\u0020\u0070\u006f\u0075\u0076\u0065\u007a\u0020\u0065\u006e\u0020\u0061\u006a\u006f\u0075\u0074\u0065\u0072\u0020\u00e0\u0020\u006c\u0027\u0061\u0069\u0064\u0065\u0020\u0064\u0075\u0020\u0062\u006f\u0075\u0074\u006f\u006e\u0020\u00ab\u005c\u0075\u0030\u0030\u0041\u0030\u0041\u006a\u006f\u0075\u0074\u0065\u0072\u0020\u0064\u0065\u0073\u0020\u0064\u006f\u006e\u006e\u00e9\u0065\u0073\u005c\u0075\u0030\u0030\u0041\u0030\u00bb\u0020\u0071\u0075\u0069\u0020\u0073\u0065\u0020\u0074\u0072\u006f\u0075\u0076\u0065\u0020\u0073\u0075\u0072\u0020\u0063\u0065\u0074\u0074\u0065\u0020\u0070\u0061\u0067\u0065\u002e +account.results.empty.browse.loggedin.perms.zero=\u0049\u006c\u0020\u006e\u0027\u0079\u0020\u0061\u0020\u0061\u0075\u0063\u0075\u006e\u0020\u0064\u0061\u0074\u0061\u0076\u0065\u0072\u0073\u0065\u002c\u0020\u0065\u006e\u0073\u0065\u006d\u0062\u006c\u0065\u0020\u0064\u0065\u0020\u0064\u006f\u006e\u006e\u00e9\u0065\u0073\u0020\u006f\u0075\u0020\u0066\u0069\u0063\u0068\u0069\u0065\u0072\u0020\u0061\u0073\u0073\u006f\u0063\u0069\u00e9\u0020\u00e0\u0020\u0076\u006f\u0074\u0072\u0065\u0020\u0063\u006f\u006d\u0070\u0074\u0065\u002e\u0020\u0056\u006f\u0075\u0073\u0020\u0070\u006f\u0075\u0076\u0065\u007a\u0020\u0061\u006a\u006f\u0075\u0074\u0065\u0072\u0020\u0075\u006e\u0020\u0064\u0061\u0074\u0061\u0076\u0065\u0072\u0073\u0065\u0020\u006f\u0075\u0020\u0075\u006e\u0020\u0065\u006e\u0073\u0065\u006d\u0062\u006c\u0065\u0020\u0064\u0065\u0020\u0064\u006f\u006e\u006e\u00e9\u0065\u0073\u0020\u0065\u006e\u0020\u0063\u006c\u0069\u0071\u0075\u0061\u006e\u0074\u0020\u0073\u0075\u0072\u0020\u006c\u0065\u0020\u0062\u006f\u0075\u0074\u006f\u006e\u0020\u00ab\u005c\u0075\u0030\u0030\u0041\u0030\u0041\u006a\u006f\u0075\u0074\u0065\u0072\u0020\u0064\u0065\u0073\u0020\u0064\u006f\u006e\u006e\u00e9\u0065\u0073\u005c\u0075\u0030\u0030\u0041\u0030\u00bb\u0020\u0063\u0069\u002d\u0064\u0065\u0073\u0073\u0075\u0073\u002e\u0020\u0050\u006f\u0075\u0072\u0020\u0065\u006e\u0020\u0073\u0061\u0076\u006f\u0069\u0072\u0020\u0070\u006c\u0075\u0073\u0020\u0073\u0075\u0072\u0020\u006c\u0027\u0061\u006a\u006f\u0075\u0074\u0020\u0064\u0065\u0020\u0064\u006f\u006e\u006e\u00e9\u0065\u0073\u002c\u0020\u0063\u006f\u006e\u0073\u0075\u006c\u0074\u0065\u007a\u0020\u006c\u0065\u0020\u003c\u0061\u0020\u0068\u0072\u0065\u0066\u003d\u0022\u007b\u0030\u007d\u002f\u007b\u0031\u007d\u002f\u0075\u0073\u0065\u0072\u002f\u0064\u0061\u0074\u0061\u0076\u0065\u0072\u0073\u0065\u002d\u006d\u0061\u006e\u0061\u0067\u0065\u006d\u0065\u006e\u0074\u002e\u0068\u0074\u006d\u006c\u0022\u0020\u0074\u0069\u0074\u006c\u0065\u003d\u0022\u0044\u0061\u0074\u0061\u0076\u0065\u0072\u0073\u0065\u0020\u004d\u0061\u006e\u0061\u0067\u0065\u006d\u0065\u006e\u0074\u0020\u002d\u0020\u0044\u0061\u0074\u0061\u0076\u0065\u0072\u0073\u0065\u0020\u0055\u0073\u0065\u0072\u0020\u0047\u0075\u0069\u0064\u0065\u0022\u0020\u0074\u0061\u0072\u0067\u0065\u0074\u003d\u0022\u005f\u0062\u006c\u0061\u006e\u006b\u0022\u003e\u0067\u0075\u0069\u0064\u0065\u0020\u0064\u0027\u0075\u0074\u0069\u006c\u0069\u0073\u0061\u0074\u0069\u006f\u006e\u003c\u002f\u0061\u003e +dataverse.results.empty.browse.loggedin.perms.hidden=\u0049\u006c\u0020\u006e\u0027\u0079\u0020\u0061\u0020\u0061\u0075\u0063\u0075\u006e\u0020\u0064\u0061\u0074\u0061\u0076\u0065\u0072\u0073\u0065\u0020\u0064\u0061\u006e\u0073\u0020\u0063\u0065\u0020\u0064\u0061\u0074\u0061\u0076\u0065\u0072\u0073\u0065\u002e\u0020\u0056\u006f\u0075\u0073\u0020\u0070\u006f\u0075\u0076\u0065\u007a\u0020\u0065\u006e\u0020\u0061\u006a\u006f\u0075\u0074\u0065\u0072\u0020\u00e0\u0020\u006c\u0027\u0061\u0069\u0064\u0065\u0020\u0064\u0075\u0020\u0062\u006f\u0075\u0074\u006f\u006e\u0020\u00ab\u005c\u0075\u0030\u0030\u0041\u0030\u0041\u006a\u006f\u0075\u0074\u0065\u0072\u0020\u0064\u0065\u0073\u0020\u0064\u006f\u006e\u006e\u00e9\u0065\u0073\u005c\u0075\u0030\u0030\u0041\u0030\u00bb\u0020\u0071\u0075\u0069\u0020\u0073\u0065\u0020\u0074\u0072\u006f\u0075\u0076\u0065\u0020\u0073\u0075\u0072\u0020\u0063\u0065\u0074\u0074\u0065\u0020\u0070\u0061\u0067\u0065\u002e +dataverse.results.empty.link.technicalDetails=\u0050\u006c\u0075\u0073\u0020\u0064\u0065\u0020\u0064\u00e9\u0074\u0061\u0069\u006c\u0073\u0020\u0074\u0065\u0063\u0068\u006e\u0069\u0071\u0075\u0065\u0073 +dataverse.search.facet.error=\u0055\u006e\u0065\u0020\u0065\u0072\u0072\u0065\u0075\u0072\u0020\u0073\u0027\u0065\u0073\u0074\u0020\u0070\u0072\u006f\u0064\u0075\u0069\u0074\u0065\u0020\u0061\u0076\u0065\u0063\u0020\u0076\u006f\u0073\u0020\u0070\u0061\u0072\u0061\u006d\u00e8\u0074\u0072\u0065\u0073\u0020\u0064\u0065\u0020\u0072\u0065\u0063\u0068\u0065\u0072\u0063\u0068\u0065\u002e\u0020\u0056\u0065\u0075\u0069\u006c\u006c\u0065\u007a\u0020\u003c\u0061\u0020\u0068\u0072\u0065\u0066\u003d\u0022\u002f\u0064\u0061\u0074\u0061\u0076\u0065\u0072\u0073\u0065\u002f\u007b\u0030\u007d\u0022\u003e\u0020\u0065\u0066\u0066\u0061\u0063\u0065\u0072\u0020\u0076\u006f\u0074\u0072\u0065\u0020\u0072\u0065\u0063\u0068\u0065\u0072\u0063\u0068\u0065\u0020\u003c\u002f\u0061\u003e\u0020\u0065\u0074\u0020\u0065\u0073\u0073\u0061\u0079\u0065\u0072\u0020\u0064\u0065\u0020\u006e\u006f\u0075\u0076\u0065\u0061\u0075\u002e +dataverse.results.count.toofresults=\u007b\u0030\u007d\u0020\u00e0\u0020\u007b\u0031\u007d\u0020\u0064\u0065\u0020\u007b\u0032\u007d\u0020\u007b\u0032\u002c\u0020\u0063\u0068\u006f\u0069\u0063\u0065\u002c\u0020\u0030\u0023\u0072\u00e9\u0073\u0075\u006c\u0074\u0061\u0074\u0073\u007c\u0031\u0023\u0072\u00e9\u0073\u0075\u006c\u0074\u0061\u0074\u007c\u0032\u0023\u0072\u00e9\u0073\u0075\u006c\u0074\u0061\u0074\u0073\u007d +dataverse.results.paginator.current=\u0028\u0041\u0063\u0074\u0075\u0065\u006c\u0029 +dataverse.results.btn.sort=\u0054\u0072\u0069 +dataverse.results.btn.sort.option.nameAZ=\u004e\u006f\u006d\u0020\u0028\u0041\u002d\u005a\u0029 +dataverse.results.btn.sort.option.nameZA=\u004e\u006f\u006d\u0020\u0028\u005a\u002d\u0041\u0029 +dataverse.results.btn.sort.option.newest=\u0050\u006c\u0075\u0073\u0020\u0072\u00e9\u0063\u0065\u006e\u0074 +dataverse.results.btn.sort.option.oldest=\u0050\u006c\u0075\u0073\u0020\u0061\u006e\u0063\u0069\u0065\u006e +dataverse.results.btn.sort.option.relevance=\u0050\u0065\u0072\u0074\u0069\u006e\u0065\u006e\u0063\u0065 +dataverse.results.cards.foundInMetadata=\u0054\u0072\u006f\u0075\u0076\u00e9\u0028\u0073\u0029\u0020\u0064\u0061\u006e\u0073\u0020\u006c\u0065\u0073\u0020\u0063\u0068\u0061\u006d\u0070\u0073\u0020\u0064\u0065\u0020\u006d\u00e9\u0074\u0061\u0064\u006f\u006e\u006e\u00e9\u0065\u0073\u005c\u0075\u0030\u0030\u0041\u0030\u003a +dataverse.results.cards.files.tabularData=\u0044\u006f\u006e\u006e\u00e9\u0065\u0073\u0020\u0074\u0061\u0062\u0075\u006c\u0061\u0069\u0072\u0065\u0073 +dataverse.results.solrIsDown=\u0056\u0065\u0075\u0069\u006c\u006c\u0065\u007a\u0020\u006e\u006f\u0074\u0065\u0072\u0020\u0071\u0075\u0027\u0065\u006e\u0020\u0072\u0061\u0069\u0073\u006f\u006e\u0020\u0064\u0027\u0075\u006e\u0065\u0020\u0065\u0072\u0072\u0065\u0075\u0072\u0020\u0069\u006e\u0074\u0065\u0072\u006e\u0065\u002c\u0020\u006c\u0065\u0073\u0020\u0066\u006f\u006e\u0063\u0074\u0069\u006f\u006e\u0073\u0020\u0064\u0065\u0020\u0063\u006f\u006e\u0073\u0075\u006c\u0074\u0061\u0074\u0069\u006f\u006e\u0020\u0065\u0074\u0020\u0064\u0065\u0020\u0072\u0065\u0063\u0068\u0065\u0072\u0063\u0068\u0065\u0020\u006e\u0065\u0020\u0073\u006f\u006e\u0074\u0020\u0070\u0061\u0073\u0020\u0064\u0069\u0073\u0070\u006f\u006e\u0069\u0062\u006c\u0065\u0073\u002e +dataverse.theme.title=\u0054\u0068\u00e8\u006d\u0065 +dataverse.theme.inheritCustomization.title=\u0043\u006f\u0063\u0068\u0065\u007a\u0020\u0063\u0065\u0074\u0074\u0065\u0020\u0063\u0061\u0073\u0065\u0020\u0070\u006f\u0075\u0072\u0020\u0075\u0074\u0069\u006c\u0069\u0073\u0065\u0072\u0020\u006c\u0065\u0020\u0074\u0068\u00e8\u006d\u0065\u0020\u0065\u0078\u0069\u0073\u0074\u0061\u006e\u0074\u002e +dataverse.theme.inheritCustomization.label=\u0050\u0065\u0072\u0073\u006f\u006e\u006e\u0061\u006c\u0069\u0073\u0061\u0074\u0069\u006f\u006e\u0020\u0068\u00e9\u0072\u0069\u0074\u00e9\u0065\u0020 +dataverse.theme.inheritCustomization.checkbox=\u0050\u0065\u0072\u0073\u006f\u006e\u006e\u0061\u006c\u0069\u0073\u0061\u0074\u0069\u006f\u006e\u0020\u0068\u00e9\u0072\u0069\u0074\u00e9\u0065\u0020\u0064\u0065\u0020\u007b\u0030\u007d +dataverse.theme.logo=\u004c\u006f\u0067\u006f +dataverse.theme.logo.tip=\u004c\u0065\u0073\u0020\u0066\u006f\u0072\u006d\u0061\u0074\u0073\u0020\u0064\u0027\u0069\u006d\u0061\u0067\u0065\u0020\u0070\u0072\u0069\u0073\u0020\u0065\u006e\u0020\u0063\u0068\u0061\u0072\u0067\u0065\u0020\u0073\u006f\u006e\u0074\u0020\u004a\u0050\u0047\u002c\u0020\u0054\u0049\u0046\u0020\u006f\u0075\u0020\u0050\u004e\u0047\u0020\u0065\u0074\u0020\u006c\u0065\u0073\u0020\u0066\u0069\u0063\u0068\u0069\u0065\u0072\u0073\u0020\u006e\u0065\u0020\u0064\u006f\u0069\u0076\u0065\u006e\u0074\u0020\u0070\u0061\u0073\u0020\u0064\u00e9\u0070\u0061\u0073\u0073\u0065\u0072\u0020\u0035\u0030\u0030\u0020\u004b\u006f\u002e\u0020\u004c\u0061\u0020\u0074\u0061\u0069\u006c\u006c\u0065\u0020\u0064\u0027\u0061\u0066\u0066\u0069\u0063\u0068\u0061\u0067\u0065\u0020\u006d\u0061\u0078\u0069\u006d\u0061\u006c\u0065\u0020\u0064\u0027\u0075\u006e\u0020\u0066\u0069\u0063\u0068\u0069\u0065\u0072\u0020\u0069\u006d\u0061\u0067\u0065\u0020\u0064\u0061\u006e\u0073\u0020\u006c\u0065\u0020\u0074\u0068\u00e8\u006d\u0065\u0020\u0064\u0027\u0075\u006e\u0020\u0064\u0061\u0074\u0061\u0076\u0065\u0072\u0073\u0065\u0020\u0065\u0073\u0074\u0020\u0064\u0065\u0020\u0039\u0034\u0030\u0020\u0070\u0069\u0078\u0065\u006c\u0073\u0020\u0064\u0065\u0020\u006c\u0061\u0072\u0067\u0065\u0020\u0070\u0061\u0072\u0020\u0031\u0032\u0030\u0020\u0070\u0069\u0078\u0065\u006c\u0073\u0020\u0064\u0065\u0020\u0068\u0061\u0075\u0074\u0065\u0075\u0072\u002e +dataverse.theme.logo.format=\u0046\u006f\u0072\u006d\u0061\u0074\u0020\u0064\u0075\u0020\u006c\u006f\u0067\u006f +dataverse.theme.logo.format.selectTab.square=\u0043\u0061\u0072\u0072\u00e9 +dataverse.theme.logo.format.selectTab.rectangle=\u0052\u0065\u0063\u0074\u0061\u006e\u0067\u006c\u0065 +dataverse.theme.logo.alignment=\u0041\u006c\u0069\u0067\u006e\u0065\u006d\u0065\u006e\u0074\u0020\u0064\u0075\u0020\u006c\u006f\u0067\u006f +dataverse.theme.logo.alignment.selectTab.left=\u0047\u0061\u0075\u0063\u0068\u0065 +dataverse.theme.logo.alignment.selectTab.center=\u0043\u0065\u006e\u0074\u0072\u0065 +dataverse.theme.logo.alignment.selectTab.right=\u0044\u0072\u006f\u0069\u0074\u0065 +dataverse.theme.logo.backColor=\u0043\u006f\u0075\u006c\u0065\u0075\u0072\u0020\u0064\u0075\u0020\u0066\u006f\u006e\u0064\u0020\u0064\u0075\u0020\u006c\u006f\u0067\u006f +dataverse.theme.logo.image.upload=\u0054\u00e9\u006c\u00e9\u0076\u0065\u0072\u0073\u0065\u0072\u0020\u006c\u0027\u0069\u006d\u0061\u0067\u0065 +dataverse.theme.tagline=\u0054\u0069\u0074\u0072\u0065\u0020\u0064\u0027\u0061\u0070\u0070\u0065\u006c +dataverse.theme.website=\u0053\u0069\u0074\u0065\u0020\u0057\u0065\u0062 +dataverse.theme.linkColor=\u0043\u006f\u0075\u006c\u0065\u0075\u0072\u0020\u0064\u0075\u0020\u006c\u0069\u0065\u006e +dataverse.theme.txtColor=\u0043\u006f\u0075\u006c\u0065\u0075\u0072\u0020\u0064\u0075\u0020\u0074\u0065\u0078\u0074\u0065 +dataverse.theme.backColor=\u0043\u006f\u0075\u006c\u0065\u0075\u0072\u0020\u0064\u0075\u0020\u0066\u006f\u006e\u0064 +dataverse.theme.success=\u0056\u006f\u0075\u0073\u0020\u0061\u0076\u0065\u007a\u0020\u0062\u0069\u0065\u006e\u0020\u006d\u0069\u0073\u0020\u00e0\u0020\u006a\u006f\u0075\u0072\u0020\u006c\u0065\u0020\u0074\u0068\u00e8\u006d\u0065\u0020\u0064\u0065\u0020\u0063\u0065\u0020\u0064\u0061\u0074\u0061\u0076\u0065\u0072\u0073\u0065\u0021 +dataverse.theme.failure=\u004c\u0065\u0020\u0074\u0068\u00e8\u006d\u0065\u0020\u0064\u0065\u0020\u0063\u0065\u0020\u0064\u0061\u0074\u0061\u0076\u0065\u0072\u0073\u0065\u0020\u006e\u0027\u0061\u0020\u0070\u0061\u0073\u0020\u00e9\u0074\u00e9\u0020\u006d\u0069\u0073\u0020\u00e0\u0020\u006a\u006f\u0075\u0072\u002e +dataverse.theme.logo.image=\u0049\u006d\u0061\u0067\u0065\u0020\u0064\u0075\u0020\u006c\u006f\u0067\u006f +dataverse.theme.logo.image.title=\u004c\u0065\u0020\u006c\u006f\u0067\u006f\u0020\u006f\u0075\u0020\u006c\u0065\u0020\u0066\u0069\u0063\u0068\u0069\u0065\u0072\u0020\u0069\u006d\u0061\u0067\u0065\u0020\u0071\u0075\u0065\u0020\u0076\u006f\u0075\u0073\u0020\u0064\u00e9\u0073\u0069\u0072\u0065\u007a\u0020\u0061\u0066\u0066\u0069\u0063\u0068\u0065\u0072\u0020\u0064\u0061\u006e\u0073\u0020\u006c\u0027\u0065\u006e\u002d\u0074\u00ea\u0074\u0065\u0020\u0064\u0065\u0020\u0063\u0065\u0020\u0064\u0061\u0074\u0061\u0076\u0065\u0072\u0073\u0065\u002e +dataverse.theme.logo.image.uploadNewFile=\u0054\u00e9\u006c\u00e9\u0076\u0065\u0072\u0073\u0065\u0072\u0020\u0075\u006e\u0020\u006e\u006f\u0075\u0076\u0065\u0061\u0075\u0020\u0066\u0069\u0063\u0068\u0069\u0065\u0072 +dataverse.theme.logo.image.invalidMsg=\u004c\u0027\u0069\u006d\u0061\u0067\u0065\u0020\u006e\u0065\u0020\u0070\u0065\u0075\u0074\u0020\u0070\u0061\u0073\u0020\u00ea\u0074\u0072\u0065\u0020\u0074\u00e9\u006c\u00e9\u0076\u0065\u0072\u0073\u00e9\u0065\u002e\u0020\u0056\u0065\u0075\u0069\u006c\u006c\u0065\u007a\u0020\u0065\u0073\u0073\u0061\u0079\u0065\u0072\u0020\u00e0\u0020\u006e\u006f\u0075\u0076\u0065\u0061\u0075\u0020\u0065\u006e\u0020\u0075\u0074\u0069\u006c\u0069\u0073\u0061\u006e\u0074\u0020\u0075\u006e\u0020\u0066\u0069\u0063\u0068\u0069\u0065\u0072\u0020\u004a\u0050\u0047\u002c\u0020\u0054\u0049\u0046\u0020\u006f\u0075\u0020\u0050\u004e\u0047\u002e +dataverse.theme.logo.image.uploadImgFile=\u0054\u00e9\u006c\u00e9\u0076\u0065\u0072\u0073\u0065\u0072\u0020\u006c\u0065\u0020\u0066\u0069\u0063\u0068\u0069\u0065\u0072\u0020\u0069\u006d\u0061\u0067\u0065 +dataverse.theme.logo.format.title=\u004c\u0061\u0020\u0066\u006f\u0072\u006d\u0065\u0020\u0064\u0075\u0020\u006c\u006f\u0067\u006f\u0020\u006f\u0075\u0020\u006c\u0065\u0020\u0066\u0069\u0063\u0068\u0069\u0065\u0072\u0020\u0069\u006d\u0061\u0067\u0065\u0020\u0071\u0075\u0065\u0020\u0076\u006f\u0075\u0073\u0020\u0074\u00e9\u006c\u00e9\u0076\u0065\u0072\u0073\u0065\u007a\u0020\u0070\u006f\u0075\u0072\u0020\u0063\u0065\u0020\u0064\u0061\u0074\u0061\u0076\u0065\u0072\u0073\u0065\u002e +dataverse.theme.logo.format.selectTab.square2=\u0043\u0061\u0072\u0072\u00e9 +dataverse.theme.logo.format.selectTab.rectangle2=\u0052\u0065\u0063\u0074\u0061\u006e\u0067\u006c\u0065 +dataverse.theme.logo.alignment.title=\u004c\u0027\u0065\u006e\u0064\u0072\u006f\u0069\u0074\u0020\u006f\u00f9\u0020\u006c\u0065\u0020\u006c\u006f\u0067\u006f\u0020\u006f\u0075\u0020\u006c\u0027\u0069\u006d\u0061\u0067\u0065\u0020\u0064\u0065\u0076\u0072\u0061\u0069\u0074\u0020\u0061\u0070\u0070\u0061\u0072\u0061\u00ee\u0074\u0072\u0065\u0020\u0064\u0061\u006e\u0073\u0020\u006c\u0027\u0065\u006e\u002d\u0074\u00ea\u0074\u0065 +dataverse.theme.logo.alignment.selectTab.left2=\u0047\u0061\u0075\u0063\u0068\u0065 +dataverse.theme.logo.alignment.selectTab.center2=\u0043\u0065\u006e\u0074\u0072\u0065 +dataverse.theme.logo.alignment.selectTab.right2=\u0044\u0072\u006f\u0069\u0074\u0065 +dataverse.theme.logo.backColor.title=\u0053\u00e9\u006c\u0065\u0063\u0074\u0069\u006f\u006e\u006e\u0065\u0072\u0020\u0075\u006e\u0065\u0020\u0063\u006f\u0075\u006c\u0065\u0075\u0072\u0020\u00e0\u0020\u0061\u0066\u0066\u0069\u0063\u0068\u0065\u0072\u0020\u0064\u0065\u0072\u0072\u0069\u00e8\u0072\u0065\u0020\u006c\u0065\u0020\u006c\u006f\u0067\u006f\u0020\u0064\u0065\u0020\u0063\u0065\u0020\u0064\u0061\u0074\u0061\u0076\u0065\u0072\u0073\u0065\u002e +dataverse.theme.headerColor=\u0043\u006f\u0075\u006c\u0065\u0075\u0072\u0073\u0020\u0064\u0065\u0020\u006c\u0027\u0065\u006e\u002d\u0074\u00ea\u0074\u0065 +dataverse.theme.headerColor.tip=\u0043\u006f\u0075\u006c\u0065\u0075\u0072\u0073\u0020\u0071\u0075\u0065\u0020\u0076\u006f\u0075\u0073\u0020\u0073\u00e9\u006c\u0065\u0063\u0074\u0069\u006f\u006e\u006e\u0065\u007a\u0020\u0070\u006f\u0075\u0072\u0020\u0063\u0072\u00e9\u0065\u0072\u0020\u006c\u0027\u0065\u006e\u002d\u0074\u00ea\u0074\u0065\u0020\u0064\u0065\u0020\u0063\u0065\u0020\u0064\u0061\u0074\u0061\u0076\u0065\u0072\u0073\u0065\u002e +dataverse.theme.backColor.title=\u0043\u006f\u0075\u006c\u0065\u0075\u0072\u0020\u0064\u0065\u0020\u006c\u0061\u0020\u007a\u006f\u006e\u0065\u0020\u0064\u0065\u0020\u006c\u0027\u0065\u006e\u002d\u0074\u00ea\u0074\u0065\u0020\u0071\u0075\u0069\u0020\u0063\u006f\u006e\u0074\u0069\u0065\u006e\u0074\u0020\u006c\u0027\u0069\u006d\u0061\u0067\u0065\u002c\u0020\u006c\u0065\u0020\u0074\u0069\u0074\u0072\u0065\u0020\u0064\u0027\u0061\u0070\u0070\u0065\u006c\u002c\u0020\u006c\u0027\u0061\u0064\u0072\u0065\u0073\u0073\u0065\u0020\u0055\u0052\u004c\u0020\u0065\u0074\u0020\u006c\u0065\u0020\u0074\u0065\u0078\u0074\u0065\u002e +dataverse.theme.linkColor.title=\u0043\u006f\u0075\u006c\u0065\u0075\u0072\u0020\u0064\u0075\u0020\u006c\u0069\u0065\u006e\u0020\u00e0\u0020\u0061\u0066\u0066\u0069\u0063\u0068\u0065\u0072 +dataverse.theme.txtColor.title=\u0043\u006f\u0075\u006c\u0065\u0075\u0072\u0020\u0064\u0075\u0020\u0074\u0065\u0078\u0074\u0065\u0020\u0064\u0075\u0020\u0074\u0069\u0074\u0072\u0065\u0020\u0064\u0027\u0061\u0070\u0070\u0065\u006c\u0020\u0065\u0074\u0020\u006c\u0065\u0020\u006e\u006f\u006d\u0020\u0064\u0065\u0020\u0063\u0065\u0020\u0064\u0061\u0074\u0061\u0076\u0065\u0072\u0073\u0065\u002e +dataverse.theme.tagline.title=\u0055\u006e\u0065\u0020\u0070\u0068\u0072\u0061\u0073\u0065\u0020\u0071\u0075\u0069\u0020\u0064\u00e9\u0063\u0072\u0069\u0074\u0020\u0063\u0065\u0020\u0064\u0061\u0074\u0061\u0076\u0065\u0072\u0073\u0065\u002e +dataverse.theme.tagline.tip=\u0046\u006f\u0075\u0072\u006e\u0069\u0072\u0020\u0075\u006e\u0020\u0074\u0069\u0074\u0072\u0065\u0020\u0064\u0027\u0061\u0070\u0070\u0065\u006c\u0020\u0064\u0065\u0020\u0031\u0034\u0030\u0020\u0063\u0061\u0072\u0061\u0063\u0074\u00e8\u0072\u0065\u0073\u0020\u006f\u0075\u0020\u006d\u006f\u0069\u006e\u0073\u002e +dataverse.theme.website.title=\u004c\u0027\u0061\u0064\u0072\u0065\u0073\u0073\u0065\u0020\u0055\u0052\u004c\u0020\u0064\u0065\u0020\u0076\u006f\u0074\u0072\u0065\u0020\u0073\u0069\u0074\u0065\u0020\u0057\u0065\u0062\u0020\u0070\u0065\u0072\u0073\u006f\u006e\u006e\u0065\u006c\u0020\u006f\u0075\u0020\u0064\u0065\u0020\u0076\u006f\u0074\u0072\u0065\u0020\u00e9\u0074\u0061\u0062\u006c\u0069\u0073\u0073\u0065\u006d\u0065\u006e\u0074\u0020\u006f\u0075\u0020\u0064\u0065\u0020\u0074\u006f\u0075\u0074\u0020\u0073\u0069\u0074\u0065\u0020\u0057\u0065\u0062\u0020\u0061\u0073\u0073\u006f\u0063\u0069\u00e9\u0020\u00e0\u0020\u0063\u0065\u0020\u0064\u0061\u0074\u0061\u0076\u0065\u0072\u0073\u0065\u002e +dataverse.theme.website.tip=\u004c\u0065\u0020\u006c\u0069\u0065\u006e\u0020\u0070\u006f\u0075\u0072\u0020\u006c\u0065\u0020\u0073\u0069\u0074\u0065\u0020\u0057\u0065\u0062\u0020\u0073\u0065\u0020\u0074\u0072\u006f\u0075\u0076\u0065\u0072\u0061\u0020\u0064\u0065\u0072\u0072\u0069\u00e8\u0072\u0065\u0020\u006c\u0065\u0020\u0074\u0069\u0074\u0072\u0065\u0020\u0064\u0027\u0061\u0070\u0070\u0065\u006c\u002e\u0020\u0050\u006f\u0075\u0072\u0020\u0071\u0075\u0027\u0075\u006e\u0020\u0073\u0069\u0074\u0065\u0020\u0057\u0065\u0062\u0020\u0061\u0070\u0070\u0061\u0072\u0061\u0069\u0073\u0073\u0065\u0020\u0064\u0061\u006e\u0073\u0020\u006c\u0061\u0020\u006c\u0069\u0073\u0074\u0065\u002c\u0020\u0076\u006f\u0075\u0073\u0020\u0064\u0065\u0076\u0065\u007a\u0020\u00e9\u0067\u0061\u006c\u0065\u006d\u0065\u006e\u0074\u0020\u0066\u006f\u0075\u0072\u006e\u0069\u0072\u0020\u0075\u006e\u0020\u0074\u0069\u0074\u0072\u0065\u0020\u0064\u0027\u0061\u0070\u0070\u0065\u006c\u0020 +dataverse.theme.website.watermark=\u0056\u006f\u0074\u0072\u0065\u0020\u0073\u0069\u0074\u0065\u0020\u0070\u0065\u0072\u0073\u006f\u006e\u006e\u0065\u006c\u002c\u0020\u0068\u0074\u0074\u0070\u003a\u002f\u002f\u005c\u0075\u0032\u0030\u0032\u0036 +dataverse.theme.website.invalidMsg=\u0041\u0064\u0072\u0065\u0073\u0073\u0065\u0020\u0055\u0052\u004c\u0020\u006e\u006f\u006e\u0020\u0076\u0061\u006c\u0069\u0064\u0065\u002e +dataverse.theme.disabled=The theme for the root dataverse has been administratively disabled with the :DisableRootDataverseTheme database setting. +dataverse.widgets.title=\u0057\u0069\u0064\u0067\u0065\u0074\u0073 +dataverse.widgets.notPublished.why.header=\u0050\u006f\u0075\u0072\u0071\u0075\u006f\u0069\u0020\u0066\u0061\u0069\u0072\u0065\u0020\u0061\u0070\u0070\u0065\u006c\u0020\u0061\u0075\u0078\u0020\u0077\u0069\u0064\u0067\u0065\u0074\u0073\u003f +dataverse.widgets.notPublished.why.reason1=\u0041\u0075\u0067\u006d\u0065\u006e\u0074\u0065\u0020\u006c\u0061\u0020\u0076\u0069\u0073\u0069\u0062\u0069\u006c\u0069\u0074\u00e9\u0020\u0064\u0065\u0020\u0076\u006f\u0073\u0020\u0064\u006f\u006e\u006e\u00e9\u0065\u0073\u0020\u0073\u0075\u0072\u0020\u006c\u0065\u0020\u0057\u0065\u0062\u0020\u0065\u006e\u0020\u0076\u006f\u0075\u0073\u0020\u0070\u0065\u0072\u006d\u0065\u0074\u0074\u0061\u006e\u0074\u0020\u0064\u0027\u0069\u006e\u0074\u00e9\u0067\u0072\u0065\u0072\u0020\u0076\u006f\u0074\u0072\u0065\u0020\u0064\u0061\u0074\u0061\u0076\u0065\u0072\u0073\u0065\u0020\u0065\u0074\u0020\u006c\u0065\u0073\u0020\u0065\u006e\u0073\u0065\u006d\u0062\u006c\u0065\u0073\u0020\u0064\u0065\u0020\u0064\u006f\u006e\u006e\u00e9\u0065\u0073\u0020\u0064\u0061\u006e\u0073\u0020\u0076\u006f\u0074\u0072\u0065\u0020\u0073\u0069\u0074\u0065\u0020\u0077\u0065\u0062\u0020\u0070\u0065\u0072\u0073\u006f\u006e\u006e\u0065\u006c\u0020\u006f\u0075\u0020\u0064\u0065\u0020\u0070\u0072\u006f\u006a\u0065\u0074\u002e +dataverse.widgets.notPublished.why.reason2=\u0050\u0065\u0072\u006d\u0065\u0074\u0020\u0061\u0075\u0078\u0020\u0061\u0075\u0074\u0072\u0065\u0073\u0020\u0064\u0065\u0020\u0070\u0061\u0072\u0063\u006f\u0075\u0072\u0069\u0072\u0020\u0076\u006f\u0074\u0072\u0065\u0020\u0064\u0061\u0074\u0061\u0076\u0065\u0072\u0073\u0065\u0020\u0061\u0069\u006e\u0073\u0069\u0020\u0071\u0075\u0065\u0020\u0076\u006f\u0073\u0020\u0065\u006e\u0073\u0065\u006d\u0062\u006c\u0065\u0073\u0020\u0064\u0065\u0020\u0064\u006f\u006e\u006e\u00e9\u0065\u0073\u0020\u0073\u0061\u006e\u0073\u0020\u0071\u0075\u0069\u0074\u0074\u0065\u0072\u0020\u0076\u006f\u0074\u0072\u0065\u0020\u0073\u0069\u0074\u0065\u0020\u0070\u0065\u0072\u0073\u006f\u006e\u006e\u0065\u006c\u0020\u006f\u0075\u0020\u0064\u0065\u0020\u0070\u0072\u006f\u006a\u0065\u0074\u002e +dataverse.widgets.notPublished.how.header=\u0043\u006f\u006d\u006d\u0065\u006e\u0074\u0020\u0075\u0074\u0069\u006c\u0069\u0073\u0065\u0072\u0020\u006c\u0065\u0073\u0020\u0077\u0069\u0064\u0067\u0065\u0074\u0073 +dataverse.widgets.notPublished.how.tip1=\u0050\u006f\u0075\u0072\u0020\u0070\u006f\u0075\u0076\u006f\u0069\u0072\u0020\u0075\u0074\u0069\u006c\u0069\u0073\u0065\u0072\u0020\u0064\u0065\u0073\u0020\u0077\u0069\u0064\u0067\u0065\u0074\u0073\u002c\u0020\u0076\u006f\u0074\u0072\u0065\u0020\u0064\u0061\u0074\u0061\u0076\u0065\u0072\u0073\u0065\u0020\u0065\u0074\u0020\u0076\u006f\u0073\u0020\u0065\u006e\u0073\u0065\u006d\u0062\u006c\u0065\u0073\u0020\u0064\u0065\u0020\u0064\u006f\u006e\u006e\u00e9\u0065\u0073\u0020\u0064\u006f\u0069\u0076\u0065\u006e\u0074\u0020\u00ea\u0074\u0072\u0065\u0020\u0070\u0075\u0062\u006c\u0069\u00e9\u0073\u002e +dataverse.widgets.notPublished.how.tip2=\u0053\u0075\u0069\u0074\u0065\u0020\u00e0\u0020\u006c\u0061\u0020\u0070\u0075\u0062\u006c\u0069\u0063\u0061\u0074\u0069\u006f\u006e\u002c\u0020\u006c\u0065\u0020\u0063\u006f\u0064\u0065\u0020\u0073\u0065\u0072\u0061\u0020\u0064\u0069\u0073\u0070\u006f\u006e\u0069\u0062\u006c\u0065\u0020\u0073\u0075\u0072\u0020\u0063\u0065\u0074\u0074\u0065\u0020\u0070\u0061\u0067\u0065\u0020\u0070\u006f\u0075\u0072\u0020\u0071\u0075\u0065\u0020\u0076\u006f\u0075\u0073\u0020\u0070\u0075\u0069\u0073\u0073\u0069\u0065\u007a\u0020\u006c\u0065\u0020\u0063\u006f\u0070\u0069\u0065\u0072\u0020\u0065\u0074\u0020\u006c\u0027\u0061\u006a\u006f\u0075\u0074\u0065\u0072\u0020\u00e0\u0020\u0076\u006f\u0074\u0072\u0065\u0020\u0073\u0069\u0074\u0065\u0020\u0077\u0065\u0062\u0020\u0070\u0065\u0072\u0073\u006f\u006e\u006e\u0065\u006c\u0020\u006f\u0075\u0020\u0064\u0065\u0020\u0070\u0072\u006f\u006a\u0065\u0074\u002e +dataverse.widgets.notPublished.how.tip3=\u0041\u0076\u0065\u007a\u002d\u0076\u006f\u0075\u0073\u0020\u0075\u006e\u0020\u0073\u0069\u0074\u0065\u0020\u0077\u0065\u0062\u0020\u004f\u0070\u0065\u006e\u0053\u0063\u0068\u006f\u006c\u0061\u0072\u003f\u0020\u0053\u0069\u0020\u006f\u0075\u0069\u002c\u0020\u0061\u0070\u0070\u0072\u0065\u006e\u0065\u007a\u002d\u0065\u006e\u0020\u0064\u0061\u0076\u0061\u006e\u0074\u0061\u0067\u0065\u0020\u0073\u0075\u0072\u0020\u006c\u0027\u0061\u006a\u006f\u0075\u0074\u0020\u0064\u0065\u0020\u0077\u0069\u0064\u0067\u0065\u0074\u0073\u0020\u0044\u0061\u0074\u0061\u0076\u0065\u0072\u0073\u0065\u0020\u0064\u0061\u006e\u0073\u0020\u0076\u006f\u0074\u0072\u0065\u0020\u0073\u0069\u0074\u0065\u0020\u0077\u0065\u0062\u0020\u003c\u0061\u0020\u0068\u0072\u0065\u0066\u0020\u003d\u0020\u0022\u007b\u0030\u007d\u002f\u007b\u0031\u007d\u002f\u0075\u0073\u0065\u0072\u002f\u0064\u0061\u0074\u0061\u0076\u0065\u0072\u0073\u0065\u002d\u006d\u0061\u006e\u0061\u0067\u0065\u006d\u0065\u006e\u0074\u002e\u0068\u0074\u006d\u006c\u0023\u0061\u0064\u0064\u0069\u006e\u0067\u002d\u0077\u0069\u0064\u0067\u0065\u0074\u0073\u002d\u0074\u006f\u002d\u0061\u006e\u002d\u006f\u0070\u0065\u006e\u0073\u0063\u0068\u006f\u006c\u0061\u0072\u002d\u0077\u0065\u0062\u0073\u0069\u0074\u0065\u0022\u0020\u0074\u0069\u0074\u006c\u0065\u0020\u003d\u0020\u0022\u0041\u0064\u0064\u0069\u006e\u0067\u0020\u0057\u0069\u0064\u0067\u0065\u0074\u0073\u0020\u0074\u006f\u0020\u0061\u006e\u0020\u004f\u0070\u0065\u006e\u0053\u0063\u0068\u006f\u006c\u0061\u0072\u0020\u0057\u0065\u0062\u0073\u0069\u0074\u0065\u0020\u002d\u0020\u0044\u0061\u0074\u0061\u0076\u0065\u0072\u0073\u0065\u0020\u0055\u0073\u0065\u0072\u0020\u0047\u0075\u0069\u0064\u0065\u0022\u0020\u0074\u0061\u0072\u0067\u0065\u0074\u0020\u003d\u0022\u005f\u0062\u006c\u0061\u006e\u006b\u0022\u003e\u0069\u0063\u0069\u003c\u002f\u0061\u003e\u002e +dataverse.widgets.notPublished.getStarted=\u0050\u006f\u0075\u0072\u0020\u0064\u00e9\u0062\u0075\u0074\u0065\u0072\u002c\u0020\u0070\u0075\u0062\u006c\u0069\u0065\u007a\u0020\u0076\u006f\u0074\u0072\u0065\u0020\u0064\u0061\u0074\u0061\u0076\u0065\u0072\u0073\u0065\u002e\u0020\u0050\u006f\u0075\u0072\u0020\u0065\u006e\u0020\u0073\u0061\u0076\u006f\u0069\u0072\u0020\u0064\u0061\u0076\u0061\u006e\u0074\u0061\u0067\u0065\u0020\u0073\u0075\u0072\u0020\u006c\u0065\u0073\u0020\u0077\u0069\u0064\u0067\u0065\u0074\u0073\u002c\u0020\u0063\u006f\u006e\u0073\u0075\u006c\u0074\u0065\u007a\u0020\u006c\u0061\u0020\u0073\u0065\u0063\u0074\u0069\u006f\u006e\u0020\u003c\u0061\u0020\u0068\u0072\u0065\u0066\u003d\u0022\u007b\u0030\u007d\u002f\u007b\u0031\u007d\u002f\u0075\u0073\u0065\u0072\u002f\u0064\u0061\u0074\u0061\u0076\u0065\u0072\u0073\u0065\u002d\u006d\u0061\u006e\u0061\u0067\u0065\u006d\u0065\u006e\u0074\u002e\u0068\u0074\u006d\u006c\u0023\u0074\u0068\u0065\u006d\u0065\u002d\u0077\u0069\u0064\u0067\u0065\u0074\u0073\u0022\u0020\u0074\u0069\u0074\u006c\u0065\u003d\u0022\u0054\u0068\u0065\u006d\u0065\u0020\u002b\u0020\u0057\u0069\u0064\u0067\u0065\u0074\u0073\u0020\u002d\u0020\u0044\u0061\u0074\u0061\u0076\u0065\u0072\u0073\u0065\u0020\u0055\u0073\u0065\u0072\u0020\u0047\u0075\u0069\u0064\u0065\u0022\u0020\u0074\u0061\u0072\u0067\u0065\u0074\u003d\u0022\u005f\u0062\u006c\u0061\u006e\u006b\u0022\u003e\u0074\u0068\u00e8\u006d\u0065\u0020\u0065\u0074\u0020\u0077\u0069\u0064\u0067\u0065\u0074\u0073\u003c\u002f\u0061\u003e\u0020\u0064\u0075\u0020\u0067\u0075\u0069\u0064\u0065\u0020\u0064\u0027\u0075\u0074\u0069\u006c\u0069\u0073\u0061\u0074\u0069\u006f\u006e\u002e +dataverse.widgets.tip=\u0043\u006f\u0070\u0069\u0065\u007a\u0020\u0065\u0074\u0020\u0063\u006f\u006c\u006c\u0065\u007a\u0020\u0063\u0065\u0020\u0063\u006f\u0064\u0065\u0020\u0064\u0061\u006e\u0073\u0020\u006c\u0065\u0020\u0063\u006f\u0064\u0065\u0020\u0048\u0054\u004d\u004c\u0020\u0064\u0065\u0020\u0076\u006f\u0074\u0072\u0065\u0020\u0073\u0069\u0074\u0065\u0020\u0077\u0065\u0062\u002e\u0020\u0050\u006f\u0075\u0072\u0020\u0065\u006e\u0020\u0073\u0061\u0076\u006f\u0069\u0072\u0020\u0064\u0061\u0076\u0061\u006e\u0074\u0061\u0067\u0065\u0020\u0073\u0075\u0072\u0020\u006c\u0065\u0073\u0020\u0077\u0069\u0064\u0067\u0065\u0074\u0073\u002c\u0020\u0063\u006f\u006e\u0073\u0075\u006c\u0074\u0065\u007a\u0020\u006c\u0061\u0020\u0073\u0065\u0063\u0074\u0069\u006f\u006e\u0020\u003c\u0061\u0020\u0068\u0072\u0065\u0066\u003d\u0022\u007b\u0030\u007d\u002f\u007b\u0031\u007d\u002f\u0075\u0073\u0065\u0072\u002f\u0064\u0061\u0074\u0061\u0076\u0065\u0072\u0073\u0065\u002d\u006d\u0061\u006e\u0061\u0067\u0065\u006d\u0065\u006e\u0074\u002e\u0068\u0074\u006d\u006c\u0023\u0074\u0068\u0065\u006d\u0065\u002d\u0077\u0069\u0064\u0067\u0065\u0074\u0073\u0022\u0020\u0074\u0069\u0074\u006c\u0065\u003d\u0022\u0054\u0068\u0065\u006d\u0065\u0020\u002b\u0020\u0057\u0069\u0064\u0067\u0065\u0074\u0073\u0020\u002d\u0020\u0044\u0061\u0074\u0061\u0076\u0065\u0072\u0073\u0065\u0020\u0055\u0073\u0065\u0072\u0020\u0047\u0075\u0069\u0064\u0065\u0022\u0020\u0074\u0061\u0072\u0067\u0065\u0074\u003d\u0022\u005f\u0062\u006c\u0061\u006e\u006b\u0022\u003e\u0054\u0068\u00e8\u006d\u0065\u0020\u0065\u0074\u0020\u0077\u0069\u0064\u0067\u0065\u0074\u0073\u003c\u002f\u0061\u003e\u0020\u0064\u0075\u0020\u0067\u0075\u0069\u0064\u0065\u0020\u0064\u0027\u0075\u0074\u0069\u006c\u0069\u0073\u0061\u0074\u0069\u006f\u006e\u002e +dataverse.widgets.searchBox.txt=\u0042\u006f\u00ee\u0074\u0065\u0020\u0064\u0065\u0020\u0072\u0065\u0063\u0068\u0065\u0072\u0063\u0068\u0065\u0020\u0044\u0061\u0074\u0061\u0076\u0065\u0072\u0073\u0065 +dataverse.widgets.searchBox.tip=\u0050\u0065\u0072\u006d\u0065\u0074\u0020\u0061\u0075\u0078\u0020\u0076\u0069\u0073\u0069\u0074\u0065\u0075\u0072\u0073\u0020\u0064\u0065\u0020\u0076\u006f\u0074\u0072\u0065\u0020\u0073\u0069\u0074\u0065\u0020\u0057\u0065\u0062\u0020\u0064\u0027\u0065\u0066\u0066\u0065\u0063\u0074\u0075\u0065\u0072\u0020\u0075\u006e\u0065\u0020\u0072\u0065\u0063\u0068\u0065\u0072\u0063\u0068\u0065\u0020\u0064\u0061\u006e\u0073\u0020\u0044\u0061\u0074\u0061\u0076\u0065\u0072\u0073\u0065\u002e +dataverse.widgets.dataverseListing.txt=\u004c\u0069\u0073\u0074\u0065\u0020\u0064\u0065\u0073\u0020\u0064\u0061\u0074\u0061\u0076\u0065\u0072\u0073\u0065\u0073 +dataverse.widgets.dataverseListing.tip=\u0050\u0065\u0072\u006d\u0065\u0074\u0020\u0061\u0075\u0078\u0020\u0076\u0069\u0073\u0069\u0074\u0065\u0075\u0072\u0073\u0020\u0064\u0065\u0020\u0076\u006f\u0074\u0072\u0065\u0020\u0073\u0069\u0074\u0065\u0020\u0057\u0065\u0062\u0020\u0064\u0065\u0020\u0076\u006f\u0069\u0072\u0020\u0076\u006f\u0073\u0020\u0064\u0061\u0074\u0061\u0076\u0065\u0072\u0073\u0065\u0073\u0020\u0065\u0074\u0020\u0076\u006f\u0073\u0020\u0065\u006e\u0073\u0065\u006d\u0062\u006c\u0065\u0073\u0020\u0064\u0065\u0020\u0064\u006f\u006e\u006e\u00e9\u0065\u0073\u002c\u0020\u0064\u0065\u0020\u006c\u0065\u0073\u0020\u0074\u0072\u0069\u0065\u0072\u0020\u006f\u0075\u0020\u0064\u0065\u0020\u006c\u0065\u0073\u0020\u0070\u0061\u0072\u0063\u006f\u0075\u0072\u0069\u0072\u0020\u0065\u006e\u0020\u0072\u0065\u0076\u0075\u0065\u002e +dataverse.widgets.advanced.popup.header=\u0057\u0069\u0064\u0067\u0065\u0074\u0073\u005c\u0075\u0030\u0030\u0041\u0030\u003a\u0020\u004f\u0070\u0074\u0069\u006f\u006e\u0073\u0020\u0061\u0076\u0061\u006e\u0063\u00e9\u0065\u0073 +dataverse.widgets.advanced.prompt=\u0045\u0078\u0070\u00e9\u0064\u0069\u0065\u0072\u0020\u0076\u0065\u0072\u0073\u0020\u0076\u006f\u0074\u0072\u0065\u0020\u0073\u0069\u0074\u0065\u0020\u0077\u0065\u0062\u0020\u0070\u0065\u0072\u0073\u006f\u006e\u006e\u0065\u006c\u0020\u006c\u0027\u0055\u0052\u004c\u0020\u0070\u00e9\u0072\u0065\u006e\u006e\u0065\u0020\u0064\u0065\u0020\u006c\u0061\u0020\u0072\u00e9\u0066\u00e9\u0072\u0065\u006e\u0063\u0065\u0020\u0062\u0069\u0062\u006c\u0069\u006f\u0067\u0072\u0061\u0070\u0068\u0069\u0071\u0075\u0065\u0020\u0064\u0065\u0020\u006c\u0027\u0065\u006e\u0073\u0065\u006d\u0062\u006c\u0065\u0020\u0064\u0065\u0020\u0064\u006f\u006e\u006e\u00e9\u0065\u0073\u002e\u0020\u004c\u0061\u0020\u0070\u0061\u0067\u0065\u0020\u0071\u0075\u0065\u0020\u0076\u006f\u0075\u0073\u0020\u0072\u00e9\u0066\u0065\u0072\u0072\u0065\u007a\u0020\u0063\u006f\u006d\u006d\u0065\u0020\u00e9\u0074\u0061\u006e\u0074\u0020\u006c\u0027\u0055\u0052\u004c\u0020\u0064\u0065\u0020\u0076\u006f\u0074\u0072\u0065\u0020\u0073\u0069\u0074\u0065\u0020\u0077\u0065\u0062\u0020\u0070\u0065\u0072\u0073\u006f\u006e\u006e\u0065\u006c\u0020\u0064\u006f\u0069\u0074\u0020\u0063\u006f\u006e\u0074\u0065\u006e\u0069\u0072\u0020\u006c\u0027\u0065\u0078\u0074\u0072\u0061\u0069\u0074\u0020\u0064\u0065\u0020\u0063\u006f\u0064\u0065\u0020\u0075\u0074\u0069\u006c\u0069\u0073\u00e9\u0020\u0070\u0061\u0072\u0020\u006c\u0065\u0020\u0077\u0069\u0064\u0067\u0065\u0074\u0020\u004c\u0069\u0073\u0074\u0069\u006e\u0067\u0020\u0064\u0065\u0020\u0044\u0061\u0074\u0061\u0076\u0065\u0072\u0073\u0065\u002e +dataverse.widgets.advanced.url.label=\u0055\u0052\u004c\u0020\u0064\u0065\u0020\u0076\u006f\u0074\u0072\u0065\u0020\u0073\u0069\u0074\u0065\u0020\u0077\u0065\u0062\u0020\u0070\u0065\u0072\u0073\u006f\u006e\u006e\u0065\u006c +dataverse.widgets.advanced.url.watermark=\u0068\u0074\u0074\u0070\u003a\u002f\u002f\u0077\u0077\u0077\u002e\u0065\u0078\u0065\u006d\u0070\u006c\u0065\u002e\u0063\u006f\u006d\u002f\u006e\u006f\u006d\u002d\u0064\u0065\u002d\u006c\u0061\u002d\u0070\u0061\u0067\u0065 +dataverse.widgets.advanced.invalid.message=\u0056\u0065\u0075\u0069\u006c\u006c\u0065\u007a\u0020\u0073\u0061\u0069\u0073\u0069\u0072\u0020\u0075\u006e\u0020\u0055\u0052\u004c\u0020\u0076\u0061\u006c\u0069\u0064\u0065 +dataverse.widgets.advanced.success.message=\u004d\u0069\u0073\u0065\u0020\u00e0\u0020\u006a\u006f\u0075\u0072\u0020\u0072\u00e9\u0075\u0073\u0073\u0069\u0065\u0020\u0064\u0065\u0020\u006c\u0027\u0055\u0052\u004c\u0020\u0064\u0065\u0020\u0076\u006f\u0074\u0072\u0065\u0020\u0073\u0069\u0074\u0065\u0020\u0077\u0065\u0062\u0020\u0070\u0065\u0072\u0073\u006f\u006e\u006e\u0065\u006c +dataverse.widgets.advanced.failure.message=\u004c\u0027\u0055\u0052\u004c\u0020\u0064\u0075\u0020\u0073\u0069\u0074\u0065\u0020\u0077\u0065\u0062\u0020\u0070\u0065\u0072\u0073\u006f\u006e\u006e\u0065\u006c\u0020\u0061\u0073\u0073\u006f\u0063\u0069\u00e9\u0020\u00e0\u0020\u0063\u0065\u0020\u0064\u0061\u0074\u0061\u0076\u0065\u0072\u0073\u0065\u0020\u006e\u0027\u0061\u0020\u0070\u0061\u0073\u0020\u00e9\u0074\u00e9\u0020\u006d\u0069\u0073\u0020\u00e0\u0020\u006a\u006f\u0075\u0072\u002e +# permissions-manage.xhtml= +dataverse.permissions.title=\u0050\u0065\u0072\u006d\u0069\u0073\u0073\u0069\u006f\u006e\u0073 +dataverse.permissions.dataset.title=\u0050\u0065\u0072\u006d\u0069\u0073\u0073\u0069\u006f\u006e\u0073\u0020\u0070\u006f\u0075\u0072\u0020\u006c\u0027\u0065\u006e\u0073\u0065\u006d\u0062\u006c\u0065\u0020\u0064\u0065\u0020\u0064\u006f\u006e\u006e\u00e9\u0065\u0073 +dataverse.permissions.access.accessBtn=\u004d\u006f\u0064\u0069\u0066\u0069\u0065\u0072\u0020\u006c\u0027\u0061\u0063\u0063\u00e8\u0073 +dataverse.permissions.usersOrGroups=\u0055\u0074\u0069\u006c\u0069\u0073\u0061\u0074\u0065\u0075\u0072\u0073\u002f\u0047\u0072\u006f\u0075\u0070\u0065\u0073 +dataverse.permissions.usersOrGroups.assignBtn=\u0041\u0073\u0073\u0069\u0067\u006e\u0065\u0072\u0020\u006c\u0065\u0073\u0020\u0072\u00f4\u006c\u0065\u0073\u0020\u0061\u0075\u0078\u0020\u0075\u0074\u0069\u006c\u0069\u0073\u0061\u0074\u0065\u0075\u0072\u0073\u002f\u0067\u0072\u006f\u0075\u0070\u0065\u0073 +dataverse.permissions.usersOrGroups.createGroupBtn=\u0043\u0072\u00e9\u0065\u0072\u0020\u0075\u006e\u0020\u0067\u0072\u006f\u0075\u0070\u0065 +dataverse.permissions.usersOrGroups.description=\u0054\u006f\u0075\u0073\u0020\u006c\u0065\u0073\u0020\u0075\u0074\u0069\u006c\u0069\u0073\u0061\u0074\u0065\u0075\u0072\u0073\u0020\u0065\u0074\u0020\u006c\u0065\u0073\u0020\u0067\u0072\u006f\u0075\u0070\u0065\u0073\u0020\u0071\u0075\u0069\u0020\u006f\u006e\u0074\u0020\u0061\u0063\u0063\u00e8\u0073\u0020\u00e0\u0020\u0076\u006f\u0074\u0072\u0065\u0020\u0064\u0061\u0074\u0061\u0076\u0065\u0072\u0073\u0065\u002e +dataverse.permissions.usersOrGroups.tabHeader.userOrGroup=\u004e\u006f\u006d\u0020\u0064\u0065\u0020\u006c\u0027\u0075\u0074\u0069\u006c\u0069\u0073\u0061\u0074\u0065\u0075\u0072\u0020\u006f\u0075\u0020\u0064\u0075\u0020\u0067\u0072\u006f\u0075\u0070\u0065\u0020\u0028\u0061\u0066\u0066\u0069\u006c\u0069\u0061\u0074\u0069\u006f\u006e\u0029 +dataverse.permissions.usersOrGroups.tabHeader.id=\u0049\u0064\u0065\u006e\u0074\u0069\u0066\u0069\u0061\u006e\u0074 +dataverse.permissions.usersOrGroups.tabHeader.role=\u0052\u00f4\u006c\u0065 +dataverse.permissions.usersOrGroups.tabHeader.action=\u0041\u0063\u0074\u0069\u006f\u006e +dataverse.permissions.usersOrGroups.assignedAt=\u0052\u00f4\u006c\u0065\u0020\u0061\u0073\u0073\u0069\u0067\u006e\u00e9\u0020\u00e0\u0020\u007b\u0030\u007d +dataverse.permissions.usersOrGroups.removeBtn=\u0053\u0075\u0070\u0070\u0072\u0069\u006d\u0065\u0072\u0020\u006c\u0065\u0020\u0072\u00f4\u006c\u0065\u0020\u0061\u0073\u0073\u0069\u0067\u006e\u00e9 +dataverse.permissions.usersOrGroups.removeBtn.confirmation=\u00ca\u0074\u0065\u0073\u002d\u0076\u006f\u0075\u0073\u0020\u0073\u00fb\u0072\u0028\u0065\u0029\u0020\u0064\u0065\u0020\u0076\u006f\u0075\u006c\u006f\u0069\u0072\u0020\u0073\u0075\u0070\u0070\u0072\u0069\u006d\u0065\u0072\u0020\u0063\u0065\u0074\u0074\u0065\u0020\u0061\u0074\u0074\u0072\u0069\u0062\u0075\u0074\u0069\u006f\u006e\u0020\u0064\u0065\u0073\u0020\u0072\u00f4\u006c\u0065\u0073\u003f +dataverse.permissions.roles=\u0052\u00f4\u006c\u0065\u0073 +dataverse.permissions.roles.add=\u0041\u006a\u006f\u0075\u0074\u0065\u0072\u0020\u0075\u006e\u0020\u006e\u006f\u0075\u0076\u0065\u0061\u0075\u0020\u0072\u00f4\u006c\u0065 +dataverse.permissions.roles.description=\u0054\u006f\u0075\u0073\u0020\u006c\u0065\u0073\u0020\u0072\u00f4\u006c\u0065\u0073\u0020\u00e9\u0074\u0061\u0062\u006c\u0069\u0073\u0020\u0064\u0061\u006e\u0073\u0020\u0076\u006f\u0074\u0072\u0065\u0020\u0064\u0061\u0074\u0061\u0076\u0065\u0072\u0073\u0065\u0020\u0071\u0075\u0065\u0020\u0076\u006f\u0075\u0073\u0020\u0070\u006f\u0075\u0076\u0065\u007a\u0020\u0061\u0074\u0074\u0072\u0069\u0062\u0075\u0065\u0072\u0020\u0061\u0075\u0078\u0020\u0075\u0074\u0069\u006c\u0069\u0073\u0061\u0074\u0065\u0075\u0072\u0073\u0020\u0065\u0074\u0020\u0061\u0075\u0078\u0020\u0067\u0072\u006f\u0075\u0070\u0065\u0073\u002e +dataverse.permissions.roles.edit=\u004d\u006f\u0064\u0069\u0066\u0069\u0065\u0072\u0020\u006c\u0065\u0020\u0072\u00f4\u006c\u0065 +dataverse.permissions.roles.copy=\u0043\u006f\u0070\u0069\u0065\u0072\u0020\u006c\u0065\u0020\u0072\u00f4\u006c\u0065 +# permissions-manage-files.xhtml= +dataverse.permissionsFiles.title=\u0050\u0065\u0072\u006d\u0069\u0073\u0073\u0069\u006f\u006e\u0073\u0020\u0064\u0065\u0073\u0020\u0066\u0069\u0063\u0068\u0069\u0065\u0072\u0073\u0020\u00e0\u0020\u0061\u0063\u0063\u00e8\u0073\u0020\u0072\u00e9\u0073\u0065\u0072\u0076\u00e9 +dataverse.permissionsFiles.usersOrGroups=\u0055\u0074\u0069\u006c\u0069\u0073\u0061\u0074\u0065\u0075\u0072\u0073\u002f\u0047\u0072\u006f\u0075\u0070\u0065\u0073 +dataverse.permissionsFiles.usersOrGroups.assignBtn=\u0041\u0063\u0063\u006f\u0072\u0064\u0065\u0072\u0020\u006c\u0027\u0061\u0063\u0063\u00e8\u0073\u0020\u0061\u0075\u0078\u0020\u0075\u0074\u0069\u006c\u0069\u0073\u0061\u0074\u0065\u0075\u0072\u0073\u002f\u0067\u0072\u006f\u0075\u0070\u0065\u0073 +dataverse.permissionsFiles.usersOrGroups.description=\u0054\u006f\u0075\u0073\u0020\u006c\u0065\u0073\u0020\u0075\u0074\u0069\u006c\u0069\u0073\u0061\u0074\u0065\u0075\u0072\u0073\u0020\u0065\u0074\u0020\u006c\u0065\u0073\u0020\u0067\u0072\u006f\u0075\u0070\u0065\u0073\u0020\u0071\u0075\u0069\u0020\u006f\u006e\u0074\u0020\u0061\u0063\u0063\u00e8\u0073\u0020\u0061\u0075\u0078\u0020\u0066\u0069\u0063\u0068\u0069\u0065\u0072\u0073\u0020\u00e0\u0020\u0061\u0063\u0063\u00e8\u0073\u0020\u0072\u00e9\u0073\u0065\u0072\u0076\u00e9\u0020\u0064\u0065\u0020\u0063\u0065\u0074\u0020\u0065\u006e\u0073\u0065\u006d\u0062\u006c\u0065\u0020\u0064\u0065\u0020\u0064\u006f\u006e\u006e\u00e9\u0065\u0073\u002e +dataverse.permissionsFiles.usersOrGroups.tabHeader.userOrGroup=\u004e\u006f\u006d\u0020\u0064\u0065\u0020\u006c\u0027\u0075\u0074\u0069\u006c\u0069\u0073\u0061\u0074\u0065\u0075\u0072\u0020\u006f\u0075\u0020\u0064\u0075\u0020\u0067\u0072\u006f\u0075\u0070\u0065\u0020\u0028\u0061\u0066\u0066\u0069\u006c\u0069\u0061\u0074\u0069\u006f\u006e\u0029 +dataverse.permissionsFiles.usersOrGroups.tabHeader.id=\u0049\u0064\u0065\u006e\u0074\u0069\u0066\u0069\u0063\u0061\u0074\u0069\u006f\u006e +dataverse.permissionsFiles.usersOrGroups.tabHeader.email=\u0043\u006f\u0075\u0072\u0072\u0069\u0065\u006c +dataverse.permissionsFiles.usersOrGroups.tabHeader.files=\u0046\u0069\u0063\u0068\u0069\u0065\u0072\u0073 +dataverse.permissionsFiles.usersOrGroups.tabHeader.access=\u0041\u0063\u0063\u00e8\u0073 +dataverse.permissionsFiles.usersOrGroups.file=\u0046\u0069\u0063\u0068\u0069\u0065\u0072 +dataverse.permissionsFiles.usersOrGroups.files=\u0046\u0069\u0063\u0068\u0069\u0065\u0072\u0073 +dataverse.permissionsFiles.usersOrGroups.invalidMsg=\u0041\u0075\u0063\u0075\u006e\u0020\u0075\u0074\u0069\u006c\u0069\u0073\u0061\u0074\u0065\u0075\u0072\u0020\u006f\u0075\u0020\u0067\u0072\u006f\u0075\u0070\u0065\u0020\u006e\u0027\u0061\u0020\u0061\u0063\u0063\u00e8\u0073\u0020\u0061\u0075\u0078\u0020\u0066\u0069\u0063\u0068\u0069\u0065\u0072\u0073\u0020\u0072\u00e9\u0073\u0065\u0072\u0076\u00e9\u0073\u0020\u0064\u0065\u0020\u0063\u0065\u0074\u0020\u0065\u006e\u0073\u0065\u006d\u0062\u006c\u0065\u0020\u0064\u0065\u0020\u0064\u006f\u006e\u006e\u00e9\u0065\u0073\u002e +dataverse.permissionsFiles.files=\u0046\u0069\u0063\u0068\u0069\u0065\u0072\u0073\u0020\u00e0\u0020\u0061\u0063\u0063\u00e8\u0073\u0020\u0072\u00e9\u0073\u0065\u0072\u0076\u00e9 +dataverse.permissionsFiles.files.label=\u007b\u0030\u002c\u0020\u0063\u0068\u006f\u0069\u0063\u0065\u002c\u0020\u0030\u0023\u0046\u0069\u0063\u0068\u0069\u0065\u0072\u0073\u0020\u00e0\u0020\u0061\u0063\u0063\u00e8\u0073\u0020\u0072\u00e9\u0073\u0065\u0072\u0076\u00e9\u007c\u0031\u0023\u0046\u0069\u0063\u0068\u0069\u0065\u0072\u0020\u00e0\u0020\u0061\u0063\u0063\u00e8\u0073\u0020\u0072\u00e9\u0073\u0065\u0072\u0076\u00e9\u007c\u0032\u0023\u0046\u0069\u0063\u0068\u0069\u0065\u0072\u0073\u0020\u00e0\u0020\u0061\u0063\u0063\u00e8\u0073\u0020\u0072\u00e9\u0073\u0065\u0072\u0076\u00e9\u007d +dataverse.permissionsFiles.files.description=\u0054\u006f\u0075\u0073\u0020\u006c\u0065\u0073\u0020\u0066\u0069\u0063\u0068\u0069\u0065\u0072\u0073\u0020\u0065\u006e\u0020\u0061\u0063\u0063\u00e8\u0073\u0020\u0072\u00e9\u0073\u0065\u0072\u0076\u00e9\u0020\u0064\u0061\u006e\u0073\u0020\u0063\u0065\u0074\u0020\u0065\u006e\u0073\u0065\u006d\u0062\u006c\u0065\u0020\u0064\u0065\u0020\u0064\u006f\u006e\u006e\u00e9\u0065\u0073\u002e +dataverse.permissionsFiles.files.tabHeader.fileName=\u004e\u006f\u006d\u0020\u0064\u0075\u0020\u0066\u0069\u0063\u0068\u0069\u0065\u0072 +dataverse.permissionsFiles.files.tabHeader.roleAssignees=\u0055\u0074\u0069\u006c\u0069\u0073\u0061\u0074\u0065\u0075\u0072\u0073\u002f\u0047\u0072\u006f\u0075\u0070\u0065\u0073 +dataverse.permissionsFiles.files.tabHeader.access=\u0041\u0063\u0063\u00e8\u0073 +dataverse.permissionsFiles.files.tabHeader.publishedRestrictedState=\u0050\u0075\u0062\u006c\u0069\u00e9 +dataverse.permissionsFiles.files.tabHeader.draftRestrictedState=\u0056\u0065\u0072\u0073\u0069\u006f\u006e\u0020\u0070\u0072\u006f\u0076\u0069\u0073\u006f\u0069\u0072\u0065 +dataverse.permissionsFiles.files.deleted=\u0053\u0075\u0070\u0070\u0072\u0069\u006d\u00e9 +dataverse.permissionsFiles.files.public=\u0050\u0075\u0062\u006c\u0069\u0063 +dataverse.permissionsFiles.files.restricted=\u0041\u0063\u0063\u00e8\u0073\u0020\u0072\u00e9\u0073\u0065\u0072\u0076\u00e9 +dataverse.permissionsFiles.files.roleAssignee=\u0055\u0074\u0069\u006c\u0069\u0073\u0061\u0074\u0065\u0075\u0072\u002f\u0047\u0072\u006f\u0075\u0070\u0065 +dataverse.permissionsFiles.files.roleAssignees=\u0055\u0074\u0069\u006c\u0069\u0073\u0061\u0074\u0065\u0075\u0072\u0073\u002f\u0047\u0072\u006f\u0075\u0070\u0065\u0073 +dataverse.permissionsFiles.files.roleAssignees.label=\u007b\u0030\u002c\u0020\u0063\u0068\u006f\u0069\u0063\u0065\u002c\u0020\u0030\u0023\u0055\u0074\u0069\u006c\u0069\u0073\u0061\u0074\u0065\u0075\u0072\u0073\u002f\u0047\u0072\u006f\u0075\u0070\u0065\u0073\u007c\u0031\u0023\u0055\u0074\u0069\u006c\u0069\u0073\u0061\u0074\u0065\u0075\u0072\u002f\u0047\u0072\u006f\u0075\u0070\u0065\u007c\u0032\u0023\u0055\u0074\u0069\u006c\u0069\u0073\u0061\u0074\u0065\u0075\u0072\u0073\u002f\u0047\u0072\u006f\u0075\u0070\u0065\u0073\u007d +dataverse.permissionsFiles.files.assignBtn=\u0041\u0063\u0063\u006f\u0072\u0064\u0065\u0072\u0020\u006c\u0027\u0061\u0063\u0063\u00e8\u0073 +dataverse.permissionsFiles.files.invalidMsg=\u0043\u0065\u0074\u0020\u0065\u006e\u0073\u0065\u006d\u0062\u006c\u0065\u0020\u0064\u0065\u0020\u0064\u006f\u006e\u006e\u00e9\u0065\u0073\u0020\u006e\u0065\u0020\u0063\u006f\u006e\u0074\u0069\u0065\u006e\u0074\u0020\u0061\u0075\u0063\u0075\u006e\u0020\u0066\u0069\u0063\u0068\u0069\u0065\u0072\u0020\u0065\u006e\u0020\u0061\u0063\u0063\u00e8\u0073\u0020\u0072\u00e9\u0073\u0065\u0072\u0076\u00e9\u002e +dataverse.permissionsFiles.files.requested=\u0046\u0069\u0063\u0068\u0069\u0065\u0072\u0073\u0020\u0064\u0065\u006d\u0061\u006e\u0064\u00e9\u0073 +dataverse.permissionsFiles.files.selected=\u0053\u00e9\u006c\u0065\u0063\u0074\u0069\u006f\u006e\u005c\u0075\u0030\u0030\u0041\u0030\u003a\u0020\u007b\u0030\u007d\u0020\u0064\u0065\u0020\u007b\u0031\u007d\u0020\u007b\u0032\u007d +dataverse.permissionsFiles.viewRemoveDialog.header=\u0041\u0063\u0063\u00e8\u0073\u0020\u0061\u0075\u0020\u0066\u0069\u0063\u0068\u0069\u0065\u0072 +dataverse.permissionsFiles.viewRemoveDialog.removeBtn=\u0053\u0075\u0070\u0070\u0072\u0069\u006d\u0065\u0072\u0020\u006c\u0027\u0061\u0063\u0063\u00e8\u0073 +dataverse.permissionsFiles.viewRemoveDialog.removeBtn.confirmation=\u00ca\u0074\u0065\u0073\u002d\u0076\u006f\u0075\u0073\u0020\u0073\u00fb\u0072\u0028\u0065\u0029\u0020\u0064\u0065\u0020\u0076\u006f\u0075\u006c\u006f\u0069\u0072\u0020\u0073\u0075\u0070\u0070\u0072\u0069\u006d\u0065\u0072\u0020\u006c\u0027\u0061\u0063\u0063\u00e8\u0073\u0020\u00e0\u0020\u0063\u0065\u0020\u0066\u0069\u0063\u0068\u0069\u0065\u0072\u003f\u0020\u0055\u006e\u0065\u0020\u0066\u006f\u0069\u0073\u0020\u006c\u0027\u0061\u0063\u0063\u00e8\u0073\u0020\u0073\u0075\u0070\u0070\u0072\u0069\u006d\u00e9\u002c\u0020\u006c\u0027\u0075\u0074\u0069\u006c\u0069\u0073\u0061\u0074\u0065\u0075\u0072\u0020\u006f\u0075\u0020\u006c\u0065\u0020\u0067\u0072\u006f\u0075\u0070\u0065\u0020\u006e\u0065\u0020\u0073\u0065\u0072\u0061\u0020\u0070\u006c\u0075\u0073\u0020\u0065\u006e\u0020\u006d\u0065\u0073\u0075\u0072\u0065\u0020\u0064\u0065\u0020\u0074\u00e9\u006c\u00e9\u0063\u0068\u0061\u0072\u0067\u0065\u0072\u0020\u0063\u0065\u0020\u0066\u0069\u0063\u0068\u0069\u0065\u0072\u002e +dataverse.permissionsFiles.assignDialog.header=\u0041\u0063\u0063\u006f\u0072\u0064\u0065\u0072\u0020\u006c\u0027\u0061\u0063\u0063\u00e8\u0073\u0020\u0061\u0075\u0020\u0066\u0069\u0063\u0068\u0069\u0065\u0072 +dataverse.permissionsFiles.assignDialog.description=\u0041\u0063\u0063\u006f\u0072\u0064\u0065\u0072\u0020\u006c\u0027\u0061\u0063\u0063\u00e8\u0073\u0020\u0061\u0075\u0020\u0066\u0069\u0063\u0068\u0069\u0065\u0072\u0020\u0061\u0075\u0078\u0020\u0075\u0074\u0069\u006c\u0069\u0073\u0061\u0074\u0065\u0075\u0072\u0073\u0020\u0065\u0074\u0020\u0061\u0075\u0078\u0020\u0067\u0072\u006f\u0075\u0070\u0065\u0073 +dataverse.permissionsFiles.assignDialog.userOrGroup=\u0055\u0074\u0069\u006c\u0069\u0073\u0061\u0074\u0065\u0075\u0072\u0073\u002f\u0047\u0072\u006f\u0075\u0070\u0065\u0073 +dataverse.permissionsFiles.assignDialog.userOrGroup.enterName=\u0049\u006e\u0064\u0069\u0071\u0075\u0065\u0072\u0020\u006c\u0065\u0020\u006e\u006f\u006d\u0020\u0064\u0065\u0020\u006c\u0027\u0075\u0074\u0069\u006c\u0069\u0073\u0061\u0074\u0065\u0075\u0072\u0020\u006f\u0075\u0020\u0064\u0075\u0020\u0067\u0072\u006f\u0075\u0070\u0065 +dataverse.permissionsFiles.assignDialog.userOrGroup.invalidMsg=\u0041\u0075\u0063\u0075\u006e\u0020\u0072\u00e9\u0073\u0075\u006c\u0074\u0061\u0074 +dataverse.permissionsFiles.assignDialog.userOrGroup.requiredMsg=\u0056\u0065\u0075\u0069\u006c\u006c\u0065\u007a\u0020\u0073\u00e9\u006c\u0065\u0063\u0074\u0069\u006f\u006e\u006e\u0065\u0072\u0020\u0061\u0075\u0020\u006d\u006f\u0069\u006e\u0073\u0020\u0075\u006e\u0020\u0075\u0074\u0069\u006c\u0069\u0073\u0061\u0074\u0065\u0075\u0072\u0020\u006f\u0075\u0020\u0067\u0072\u006f\u0075\u0070\u0065\u002e +dataverse.permissionsFiles.assignDialog.fileName=\u004e\u006f\u006d\u0020\u0064\u0075\u0020\u0066\u0069\u0063\u0068\u0069\u0065\u0072 +dataverse.permissionsFiles.assignDialog.grantBtn=\u0041\u0063\u0063\u006f\u0072\u0064\u0065\u0072 +dataverse.permissionsFiles.assignDialog.rejectBtn=\u0052\u0065\u006a\u0065\u0074\u0065\u0072 +# permissions-configure.xhtml= +dataverse.permissions.accessDialog.header=\u004d\u006f\u0064\u0069\u0066\u0069\u0065\u0072\u0020\u006c\u0027\u0061\u0063\u0063\u00e8\u0073 +dataverse.permissions.description=\u0043\u006f\u006e\u0066\u0069\u0067\u0075\u0072\u0061\u0074\u0069\u006f\u006e\u0020\u0061\u0063\u0074\u0075\u0065\u006c\u006c\u0065\u0020\u0064\u0065\u0020\u006c\u0027\u0061\u0063\u0063\u00e8\u0073\u0020\u00e0\u0020\u0076\u006f\u0074\u0072\u0065\u0020\u0064\u0061\u0074\u0061\u0076\u0065\u0072\u0073\u0065\u002e +dataverse.permissions.tip=\u0053\u00e9\u006c\u0065\u0063\u0074\u0069\u006f\u006e\u006e\u0065\u007a\u002c\u0020\u0065\u006e\u0020\u0063\u006c\u0069\u0071\u0075\u0061\u006e\u0074\u0020\u0073\u0075\u0072\u0020\u006c\u0065\u0020\u0062\u006f\u0075\u0074\u006f\u006e\u0020\u00ab\u0020\u004d\u006f\u0064\u0069\u0066\u0069\u0065\u0072\u0020\u006c\u0027\u0061\u0063\u0063\u00e8\u0073\u0020\u00bb\u002c\u0020\u0073\u0069\u0020\u0074\u006f\u0075\u0073\u0020\u006c\u0065\u0073\u0020\u0075\u0074\u0069\u006c\u0069\u0073\u0061\u0074\u0065\u0075\u0072\u0073\u0020\u006f\u0075\u0020\u0073\u0065\u0075\u006c\u0065\u006d\u0065\u006e\u0074\u0020\u0063\u0065\u0072\u0074\u0061\u0069\u006e\u0073\u0020\u0073\u006f\u006e\u0074\u0020\u0065\u006e\u0020\u006d\u0065\u0073\u0075\u0072\u0065\u0020\u0064\u0027\u0061\u006a\u006f\u0075\u0074\u0065\u0072\u0020\u0064\u0065\u0073\u0020\u0064\u006f\u006e\u006e\u00e9\u0065\u0073\u0020\u00e0\u0020\u0063\u0065\u0020\u0064\u0061\u0074\u0061\u0076\u0065\u0072\u0073\u0065\u002e\u0020 +dataverse.permissions.Q1=\u0051\u0075\u0069\u0020\u0070\u0065\u0075\u0074\u0020\u0061\u006a\u006f\u0075\u0074\u0065\u0072\u0020\u0064\u0065\u0073\u0020\u0064\u006f\u006e\u006e\u00e9\u0065\u0073\u0020\u00e0\u0020\u0063\u0065\u0020\u0064\u0061\u0074\u0061\u0076\u0065\u0072\u0073\u0065\u003f +dataverse.permissions.Q1.answer1=\u0054\u006f\u0075\u0074\u0065\u0020\u0070\u0065\u0072\u0073\u006f\u006e\u006e\u0065\u0020\u0071\u0075\u0069\u0020\u0061\u006a\u006f\u0075\u0074\u0065\u0020\u0064\u0065\u0073\u0020\u0064\u006f\u006e\u006e\u00e9\u0065\u0073\u0020\u00e0\u0020\u0063\u0065\u0020\u0064\u0061\u0074\u0061\u0076\u0065\u0072\u0073\u0065\u0020\u0064\u006f\u0069\u0074\u0020\u0079\u0020\u0061\u0076\u006f\u0069\u0072\u0020\u0061\u0063\u0063\u00e8\u0073\u002e +dataverse.permissions.Q1.answer2=\u0054\u006f\u0075\u0074\u0065\u0020\u0070\u0065\u0072\u0073\u006f\u006e\u006e\u0065\u0020\u0070\u006f\u0073\u0073\u00e9\u0064\u0061\u006e\u0074\u0020\u0075\u006e\u0020\u0063\u006f\u006d\u0070\u0074\u0065\u0020\u0044\u0061\u0074\u0061\u0076\u0065\u0072\u0073\u0065\u0020\u0070\u0065\u0075\u0074\u0020\u0061\u006a\u006f\u0075\u0074\u0065\u0072\u0020\u0064\u0065\u0073\u0020\u0073\u006f\u0075\u0073\u002d\u0064\u0061\u0074\u0061\u0076\u0065\u0072\u0073\u0065\u0073\u002e +dataverse.permissions.Q1.answer3=\u0054\u006f\u0075\u0074\u0065\u0020\u0070\u0065\u0072\u0073\u006f\u006e\u006e\u0065\u0020\u0070\u006f\u0073\u0073\u00e9\u0064\u0061\u006e\u0074\u0020\u0075\u006e\u0020\u0063\u006f\u006d\u0070\u0074\u0065\u0020\u0044\u0061\u0074\u0061\u0076\u0065\u0072\u0073\u0065\u0020\u0070\u0065\u0075\u0074\u0020\u0061\u006a\u006f\u0075\u0074\u0065\u0072\u0020\u0064\u0065\u0073\u0020\u0065\u006e\u0073\u0065\u006d\u0062\u006c\u0065\u0073\u0020\u0064\u0065\u0020\u0064\u006f\u006e\u006e\u00e9\u0065\u0073\u002e +dataverse.permissions.Q1.answer4=\u0054\u006f\u0075\u0074\u0065\u0020\u0070\u0065\u0072\u0073\u006f\u006e\u006e\u0065\u0020\u0070\u006f\u0073\u0073\u00e9\u0064\u0061\u006e\u0074\u0020\u0075\u006e\u0020\u0063\u006f\u006d\u0070\u0074\u0065\u0020\u0044\u0061\u0074\u0061\u0076\u0065\u0072\u0073\u0065\u0020\u0070\u0065\u0075\u0074\u0020\u0061\u006a\u006f\u0075\u0074\u0065\u0072\u0020\u0064\u0065\u0073\u0020\u0073\u006f\u0075\u0073\u002d\u0064\u0061\u0074\u0061\u0076\u0065\u0072\u0073\u0065\u0073\u0020\u0065\u0074\u0020\u0064\u0065\u0073\u0020\u0065\u006e\u0073\u0065\u006d\u0062\u006c\u0065\u0073\u0020\u0064\u0065\u0020\u0064\u006f\u006e\u006e\u00e9\u0065\u0073\u002e +dataverse.permissions.Q2=\u0051\u0075\u0065\u006c\u0020\u0064\u0065\u0076\u0072\u0061\u0069\u0074\u0020\u00ea\u0074\u0072\u0065\u0020\u006c\u0065\u0020\u0072\u00f4\u006c\u0065\u0020\u0070\u0061\u0072\u0020\u0064\u00e9\u0066\u0061\u0075\u0074\u0020\u0064\u0027\u0075\u006e\u0065\u0020\u0070\u0065\u0072\u0073\u006f\u006e\u006e\u0065\u0020\u0071\u0075\u0069\u0020\u0061\u006a\u006f\u0075\u0074\u0065\u0020\u0064\u0065\u0073\u0020\u0065\u006e\u0073\u0065\u006d\u0062\u006c\u0065\u0073\u0020\u0064\u0065\u0020\u0064\u006f\u006e\u006e\u00e9\u0065\u0073\u0020\u00e0\u0020\u0063\u0065\u0020\u0064\u0061\u0074\u0061\u0076\u0065\u0072\u0073\u0065\u003f +dataverse.permissions.Q2.answer.editor.description=\u005c\u0075\u0032\u0032\u0031\u0039\u0020\u004d\u006f\u0064\u0069\u0066\u0069\u0065\u0072\u0020\u006c\u0065\u0073\u0020\u006d\u00e9\u0074\u0061\u0064\u006f\u006e\u006e\u00e9\u0065\u0073\u002c\u0020\u0074\u00e9\u006c\u00e9\u0076\u0065\u0072\u0073\u0065\u0072\u0020\u006c\u0065\u0073\u0020\u0066\u0069\u0063\u0068\u0069\u0065\u0072\u0073\u0020\u0065\u0074\u0020\u006d\u006f\u0064\u0069\u0066\u0069\u0065\u0072\u0020\u006c\u0065\u0073\u0020\u0066\u0069\u0063\u0068\u0069\u0065\u0072\u0073\u002c\u0020\u006d\u006f\u0064\u0069\u0066\u0069\u0065\u0072\u0020\u006c\u0065\u0073\u0020\u0063\u006f\u006e\u0064\u0069\u0074\u0069\u006f\u006e\u0073\u002c\u0020\u006c\u0065\u0020\u0072\u0065\u0067\u0069\u0073\u0074\u0072\u0065\u0020\u0064\u0065\u0073\u0020\u0076\u0069\u0073\u0069\u0074\u0065\u0075\u0072\u0073\u002c\u0020\u0073\u006f\u0075\u006d\u0065\u0074\u0074\u0072\u0065\u0020\u006c\u0065\u0073\u0020\u0065\u006e\u0073\u0065\u006d\u0062\u006c\u0065\u0073\u0020\u0064\u0065\u0020\u0064\u006f\u006e\u006e\u00e9\u0065\u0073\u0020\u0061\u0075\u0078\u0020\u0066\u0069\u006e\u0073\u0020\u0064\u0027\u0065\u0078\u0061\u006d\u0065\u006e\u002e +dataverse.permissions.Q2.answer.manager.description=\u005c\u0075\u0032\u0032\u0031\u0039\u0020\u004d\u006f\u0064\u0069\u0066\u0069\u0065\u0072\u0020\u006c\u0065\u0073\u0020\u006d\u00e9\u0074\u0061\u0064\u006f\u006e\u006e\u00e9\u0065\u0073\u002c\u0020\u0074\u00e9\u006c\u00e9\u0076\u0065\u0072\u0073\u0065\u0072\u0020\u006c\u0065\u0073\u0020\u0066\u0069\u0063\u0068\u0069\u0065\u0072\u0073\u0020\u0065\u0074\u0020\u006d\u006f\u0064\u0069\u0066\u0069\u0065\u0072\u0020\u006c\u0065\u0073\u0020\u0066\u0069\u0063\u0068\u0069\u0065\u0072\u0073\u002c\u0020\u006d\u006f\u0064\u0069\u0066\u0069\u0065\u0072\u0020\u006c\u0065\u0073\u0020\u0063\u006f\u006e\u0064\u0069\u0074\u0069\u006f\u006e\u0073\u002c\u0020\u006c\u0065\u0020\u0072\u0065\u0067\u0069\u0073\u0074\u0072\u0065\u0020\u0064\u0065\u0073\u0020\u0076\u0069\u0073\u0069\u0074\u0065\u0075\u0072\u0073\u002c\u0020\u006c\u0065\u0073\u0020\u0072\u0065\u0073\u0074\u0072\u0069\u0063\u0074\u0069\u006f\u006e\u0073\u0020\u0072\u0065\u006c\u0061\u0074\u0069\u0076\u0065\u0073\u0020\u0061\u0075\u0078\u0020\u0066\u0069\u0063\u0068\u0069\u0065\u0072\u0073\u0020\u0028\u0061\u0063\u0063\u00e8\u0073\u0020\u0061\u0075\u0078\u0020\u0066\u0069\u0063\u0068\u0069\u0065\u0072\u0073\u0020\u002b\u0020\u0075\u0074\u0069\u006c\u0069\u0073\u0061\u0074\u0069\u006f\u006e\u0029 +dataverse.permissions.Q2.answer.curator.description=\u005c\u0075\u0032\u0032\u0031\u0039\u0020\u004d\u006f\u0064\u0069\u0066\u0069\u0065\u0072\u0020\u006c\u0065\u0073\u0020\u006d\u00e9\u0074\u0061\u0064\u006f\u006e\u006e\u00e9\u0065\u0073\u002c\u0020\u0074\u00e9\u006c\u00e9\u0076\u0065\u0072\u0073\u0065\u0072\u0020\u006c\u0065\u0073\u0020\u0066\u0069\u0063\u0068\u0069\u0065\u0072\u0073\u0020\u0065\u0074\u0020\u006d\u006f\u0064\u0069\u0066\u0069\u0065\u0072\u0020\u006c\u0065\u0073\u0020\u0066\u0069\u0063\u0068\u0069\u0065\u0072\u0073\u002c\u0020\u006d\u006f\u0064\u0069\u0066\u0069\u0065\u0072\u0020\u006c\u0065\u0073\u0020\u0063\u006f\u006e\u0064\u0069\u0074\u0069\u006f\u006e\u0073\u002c\u0020\u006c\u0065\u0020\u0072\u0065\u0067\u0069\u0073\u0074\u0072\u0065\u0020\u0064\u0065\u0073\u0020\u0076\u0069\u0073\u0069\u0074\u0065\u0075\u0072\u0073\u002c\u0020\u006c\u0065\u0073\u0020\u0072\u0065\u0073\u0074\u0072\u0069\u0063\u0074\u0069\u006f\u006e\u0073\u0020\u0072\u0065\u006c\u0061\u0074\u0069\u0076\u0065\u0073\u0020\u0061\u0075\u0078\u0020\u0066\u0069\u0063\u0068\u0069\u0065\u0072\u0073\u0020\u0028\u0061\u0063\u0063\u00e8\u0073\u0020\u0061\u0075\u0078\u0020\u0066\u0069\u0063\u0068\u0069\u0065\u0072\u0073\u0020\u002b\u0020\u0075\u0074\u0069\u006c\u0069\u0073\u0061\u0074\u0069\u006f\u006e\u0029\u002c\u0020\u006d\u006f\u0064\u0069\u0066\u0069\u0065\u0072\u0020\u006c\u0065\u0073\u0020\u0070\u0065\u0072\u006d\u0069\u0073\u0073\u0069\u006f\u006e\u0073\u002f\u0061\u0073\u0073\u0069\u0067\u006e\u0065\u0072\u0020\u006c\u0065\u0073\u0020\u0072\u00f4\u006c\u0065\u0073\u0020\u002b\u0020\u0070\u0075\u0062\u006c\u0069\u0065\u0072 +# roles-assign.xhtml= +dataverse.permissions.usersOrGroups.assignDialog.header=\u0041\u0073\u0073\u0069\u0067\u006e\u0065\u0072\u0020\u006c\u0065\u0020\u0072\u00f4\u006c\u0065 +dataverse.permissions.usersOrGroups.assignDialog.description=\u0041\u0063\u0063\u006f\u0072\u0064\u0065\u0072\u0020\u006c\u0065\u0073\u0020\u0070\u0065\u0072\u006d\u0069\u0073\u0073\u0069\u006f\u006e\u0073\u0020\u0061\u0075\u0078\u0020\u0075\u0074\u0069\u006c\u0069\u0073\u0061\u0074\u0065\u0075\u0072\u0073\u0020\u0065\u0074\u0020\u0061\u0075\u0078\u0020\u0067\u0072\u006f\u0075\u0070\u0065\u0073\u0020\u0065\u006e\u0020\u006c\u0065\u0075\u0072\u0020\u0061\u0074\u0074\u0072\u0069\u0062\u0075\u0061\u006e\u0074\u0020\u0075\u006e\u0020\u0072\u00f4\u006c\u0065\u002e +dataverse.permissions.usersOrGroups.assignDialog.userOrGroup=\u0055\u0074\u0069\u006c\u0069\u0073\u0061\u0074\u0065\u0075\u0072\u0073\u002f\u0067\u0072\u006f\u0075\u0070\u0065\u0073 +dataverse.permissions.usersOrGroups.assignDialog.userOrGroup.enterName=\u0049\u006e\u0064\u0069\u0071\u0075\u0065\u0072\u0020\u006c\u0065\u0020\u006e\u006f\u006d\u0020\u0064\u0065\u0020\u006c\u0027\u0075\u0074\u0069\u006c\u0069\u0073\u0061\u0074\u0065\u0075\u0072\u0020\u006f\u0075\u0020\u0064\u0075\u0020\u0067\u0072\u006f\u0075\u0070\u0065\u002e +dataverse.permissions.usersOrGroups.assignDialog.userOrGroup.invalidMsg=\u0041\u0075\u0063\u0075\u006e\u0020\u0072\u00e9\u0073\u0075\u006c\u0074\u0061\u0074 +dataverse.permissions.usersOrGroups.assignDialog.userOrGroup.requiredMsg=\u0056\u0065\u0075\u0069\u006c\u006c\u0065\u007a\u0020\u0073\u00e9\u006c\u0065\u0063\u0074\u0069\u006f\u006e\u006e\u0065\u0072\u0020\u0061\u0075\u0020\u006d\u006f\u0069\u006e\u0073\u0020\u0075\u006e\u0020\u0075\u0074\u0069\u006c\u0069\u0073\u0061\u0074\u0065\u0075\u0072\u0020\u006f\u0075\u0020\u0075\u006e\u0020\u0067\u0072\u006f\u0075\u0070\u0065\u002e +dataverse.permissions.usersOrGroups.assignDialog.role.description=\u0056\u006f\u0069\u0063\u0069\u0020\u006c\u0065\u0073\u0020\u0070\u0065\u0072\u006d\u0069\u0073\u0073\u0069\u006f\u006e\u0073\u0020\u0061\u0073\u0073\u006f\u0063\u0069\u00e9\u0065\u0073\u0020\u0061\u0075\u0020\u0072\u00f4\u006c\u0065\u0020\u0073\u00e9\u006c\u0065\u0063\u0074\u0069\u006f\u006e\u006e\u00e9\u002e +dataverse.permissions.usersOrGroups.assignDialog.role.warning=\u004c\u0027\u0061\u0074\u0074\u0072\u0069\u0062\u0075\u0074\u0069\u006f\u006e\u0020\u0064\u0075\u0020\u0072\u00f4\u006c\u0065\u0020\u007b\u0030\u007d\u0020\u0073\u0069\u0067\u006e\u0069\u0066\u0069\u0065\u0020\u0071\u0075\u0065\u0020\u006c\u0065\u0020\u006f\u0075\u0020\u006c\u0065\u0073\u0020\u0075\u0074\u0069\u006c\u0069\u0073\u0061\u0074\u0065\u0075\u0072\u0073\u0020\u0061\u0075\u0072\u006f\u006e\u0074\u0020\u00e9\u0067\u0061\u006c\u0065\u006d\u0065\u006e\u0074\u0020\u006c\u0065\u0020\u0072\u00f4\u006c\u0065\u0020\u007b\u0030\u007d\u0020\u0071\u0075\u0069\u0020\u0073\u0027\u0061\u0070\u0070\u006c\u0069\u0071\u0075\u0065\u0020\u00e0\u0020\u0074\u006f\u0075\u0073\u0020\u006c\u0065\u0073\u0020\u007b\u0031\u007d\u0020\u0064\u0061\u006e\u0073\u0020\u0063\u0065\u0020\u007b\u0032\u007d\u002e +dataverse.permissions.usersOrGroups.assignDialog.role.requiredMsg=\u0056\u0065\u0075\u0069\u006c\u006c\u0065\u007a\u0020\u0073\u00e9\u006c\u0065\u0063\u0074\u0069\u006f\u006e\u006e\u0065\u0072\u0020\u0075\u006e\u0020\u0072\u00f4\u006c\u0065\u0020\u00e0\u0020\u0061\u0074\u0074\u0072\u0069\u0062\u0075\u0065\u0072\u002e +# roles-edit.xhtml= +dataverse.permissions.roles.header=\u004d\u006f\u0064\u0069\u0066\u0069\u0065\u0072\u0020\u006c\u0065\u0020\u0072\u00f4\u006c\u0065 +dataverse.permissions.roles.name=\u004e\u006f\u006d\u0020\u0064\u0075\u0020\u0072\u00f4\u006c\u0065 +dataverse.permissions.roles.name.title=\u0049\u006e\u0064\u0069\u0071\u0075\u0065\u0072\u0020\u0075\u006e\u0020\u006e\u006f\u006d\u0020\u0070\u006f\u0075\u0072\u0020\u006c\u0065\u0020\u0072\u00f4\u006c\u0065\u002e +dataverse.permissions.roles.id=\u0049\u0064\u0065\u006e\u0074\u0069\u0066\u0069\u0061\u006e\u0074 +dataverse.permissions.roles.id.title=\u0049\u006e\u0064\u0069\u0071\u0075\u0065\u0072\u0020\u0075\u006e\u0020\u006e\u006f\u006d\u0020\u0070\u006f\u0075\u0072\u0020\u006c\u0027\u0061\u006c\u0069\u0061\u0073\u002e +dataverse.permissions.roles.description.title=\u0044\u00e9\u0063\u0072\u0069\u0072\u0065\u0020\u006c\u0065\u0020\u0072\u00f4\u006c\u0065\u0020\u0028\u0031\u0030\u0030\u0030\u0020\u0063\u0061\u0072\u0061\u0063\u0074\u00e8\u0072\u0065\u0073\u0020\u006d\u0061\u0078\u0069\u006d\u0075\u006d\u0029\u002e +dataverse.permissions.roles.description.counter=\u007b\u0030\u007d\u0020\u0063\u0061\u0072\u0061\u0063\u0074\u00e8\u0072\u0065\u0028\u0073\u0029\u0020\u0072\u0065\u0073\u0074\u0061\u006e\u0074\u0028\u0073\u0029 +dataverse.permissions.roles.roleList.header=\u0050\u0065\u0072\u006d\u0069\u0073\u0073\u0069\u006f\u006e\u0073\u0020\u0064\u0075\u0020\u0072\u00f4\u006c\u0065 +dataverse.permissions.roles.roleList.authorizedUserOnly=\u004c\u0065\u0073\u0020\u0070\u0065\u0072\u006d\u0069\u0073\u0073\u0069\u006f\u006e\u0073\u0020\u0063\u006f\u006d\u0070\u006f\u0072\u0074\u0061\u006e\u0074\u0020\u006c\u0027\u0069\u0063\u00f4\u006e\u0065\u0020\u0049\u006e\u0066\u006f\u0072\u006d\u0061\u0074\u0069\u006f\u006e\u0020\u0069\u006e\u0064\u0069\u0071\u0075\u0065\u006e\u0074\u0020\u0071\u0075\u0065\u0020\u006c\u0065\u0073\u0020\u0061\u0063\u0074\u0069\u006f\u006e\u0073\u0020\u0070\u0065\u0075\u0076\u0065\u006e\u0074\u0020\u00ea\u0074\u0072\u0065\u0020\u0066\u0061\u0069\u0074\u0065\u0073\u0020\u0070\u0061\u0072\u0020\u0064\u0065\u0073\u0020\u0075\u0074\u0069\u006c\u0069\u0073\u0061\u0074\u0065\u0075\u0072\u0073\u0020\u006e\u006f\u006e\u0020\u0061\u0075\u0074\u0068\u0065\u006e\u0074\u0069\u0066\u0069\u00e9\u0073\u0020\u0064\u0061\u006e\u0073\u0020\u0044\u0061\u0074\u0061\u0076\u0065\u0072\u0073\u0065\u002e +# explicitGroup-new-dialog.xhtml= +dataverse.permissions.explicitGroupEditDialog.title.new=\u0043\u0072\u00e9\u0065\u0072\u0020\u0075\u006e\u0020\u0067\u0072\u006f\u0075\u0070\u0065 +dataverse.permissions.explicitGroupEditDialog.title.edit=\u004d\u006f\u0064\u0069\u0066\u0069\u0065\u0072\u0020\u006c\u0065\u0020\u0067\u0072\u006f\u0075\u0070\u0065\u0020\u007b\u0030\u007d +dataverse.permissions.explicitGroupEditDialog.help=\u0041\u006a\u006f\u0075\u0074\u0065\u0072\u0020\u0064\u0065\u0073\u0020\u0075\u0074\u0069\u006c\u0069\u0073\u0061\u0074\u0065\u0075\u0072\u0073\u0020\u006f\u0075\u0020\u0064\u0027\u0061\u0075\u0074\u0072\u0065\u0073\u0020\u0067\u0072\u006f\u0075\u0070\u0065\u0073\u0020\u00e0\u0020\u0063\u0065\u0020\u0067\u0072\u006f\u0075\u0070\u0065 +dataverse.permissions.explicitGroupEditDialog.groupIdentifier=\u0049\u0064\u0065\u006e\u0074\u0069\u0066\u0069\u0061\u006e\u0074\u0020\u0064\u0075\u0020\u0067\u0072\u006f\u0075\u0070\u0065 +dataverse.permissions.explicitGroupEditDialog.groupIdentifier.tip=\u004e\u006f\u006d\u0020\u0061\u0062\u0072\u00e9\u0067\u00e9\u0020\u0075\u0074\u0069\u006c\u0069\u0073\u00e9\u0020\u0070\u006f\u0075\u0072\u0020\u006c\u0027\u0069\u0064\u0065\u006e\u0074\u0069\u0066\u0069\u0061\u006e\u0074\u0020\u0064\u0065\u0020\u0063\u0065\u0020\u0067\u0072\u006f\u0075\u0070\u0065\u002e +dataverse.permissions.explicitGroupEditDialog.groupIdentifier.required=\u004c\u0027\u0069\u0064\u0065\u006e\u0074\u0069\u0066\u0069\u0061\u006e\u0074\u0020\u0064\u0075\u0020\u0067\u0072\u006f\u0075\u0070\u0065\u0020\u006e\u0065\u0020\u0064\u006f\u0069\u0074\u0020\u0070\u0061\u0073\u0020\u00ea\u0074\u0072\u0065\u0020\u0076\u0069\u0064\u0065\u002e +dataverse.permissions.explicitGroupEditDialog.groupIdentifier.invalid=\u004c\u0027\u0069\u0064\u0065\u006e\u0074\u0069\u0066\u0069\u0061\u006e\u0074\u0020\u0064\u0075\u0020\u0067\u0072\u006f\u0075\u0070\u0065\u0020\u0070\u0065\u0075\u0074\u0020\u0073\u0065\u0075\u006c\u0065\u006d\u0065\u006e\u0074\u0020\u0063\u006f\u006e\u0074\u0065\u006e\u0069\u0072\u0020\u0064\u0065\u0073\u0020\u006c\u0065\u0074\u0074\u0072\u0065\u0073\u002c\u0020\u0064\u0065\u0073\u0020\u0063\u0068\u0069\u0066\u0066\u0072\u0065\u0073\u002c\u0020\u0064\u0065\u0073\u0020\u0074\u0072\u0061\u0069\u0074\u0073\u0020\u0064\u0065\u0020\u0073\u006f\u0075\u006c\u0069\u0067\u006e\u0065\u006d\u0065\u006e\u0074\u0020\u0028\u005f\u0029\u0020\u0065\u0074\u0020\u0064\u0065\u0073\u0020\u0074\u0069\u0072\u0065\u0074\u0073\u0020\u0028\u002d\u0029\u002e +dataverse.permissions.explicitGroupEditDialog.groupIdentifier.helpText=\u0043\u006f\u006d\u0070\u0072\u0065\u006e\u0064\u0020\u0064\u0065\u0073\u0020\u006c\u0065\u0074\u0074\u0072\u0065\u0073\u002c\u0020\u0064\u0065\u0073\u0020\u0063\u0068\u0069\u0066\u0066\u0072\u0065\u0073\u002c\u0020\u0064\u0065\u0073\u0020\u0074\u0072\u0061\u0069\u0074\u0073\u0020\u0064\u0065\u0020\u0073\u006f\u0075\u006c\u0069\u0067\u006e\u0065\u006d\u0065\u006e\u0074\u0020\u0028\u005f\u0029\u0020\u0065\u0074\u0020\u0064\u0065\u0073\u0020\u0074\u0069\u0072\u0065\u0074\u0073\u002e +dataverse.permissions.explicitGroupEditDialog.groupIdentifier.taken=\u0049\u0064\u0065\u006e\u0074\u0069\u0066\u0069\u0061\u006e\u0074\u0020\u0064\u0075\u0020\u0067\u0072\u006f\u0075\u0070\u0065\u0020\u0064\u00e9\u006a\u00e0\u0020\u0075\u0074\u0069\u006c\u0069\u0073\u00e9\u0020\u0064\u0061\u006e\u0073\u0020\u0063\u0065\u0020\u0064\u0061\u0074\u0061\u0076\u0065\u0072\u0073\u0065\u002e +dataverse.permissions.explicitGroupEditDialog.groupName=\u004e\u006f\u006d\u0020\u0064\u0075\u0020\u0067\u0072\u006f\u0075\u0070\u0065 +dataverse.permissions.explicitGroupEditDialog.groupName.required=\u004c\u0065\u0020\u006e\u006f\u006d\u0020\u0064\u0075\u0020\u0067\u0072\u006f\u0075\u0070\u0065\u0020\u006e\u0065\u0020\u0070\u0065\u0075\u0074\u0020\u0070\u0061\u0073\u0020\u00ea\u0074\u0072\u0065\u0020\u0076\u0069\u0064\u0065\u002e +dataverse.permissions.explicitGroupEditDialog.groupDescription=\u0044\u0065\u0073\u0063\u0072\u0069\u0070\u0074\u0069\u006f\u006e +dataverse.permissions.explicitGroupEditDialog.roleAssigneeName=\u0055\u0074\u0069\u006c\u0069\u0073\u0061\u0074\u0065\u0075\u0072\u002f\u0047\u0072\u006f\u0075\u0070\u0065 +dataverse.permissions.explicitGroupEditDialog.roleAssigneeNames=\u0055\u0074\u0069\u006c\u0069\u0073\u0061\u0074\u0065\u0075\u0072\u0073\u002f\u0047\u0072\u006f\u0075\u0070\u0065\u0073 +dataverse.permissions.explicitGroupEditDialog.createGroup=\u0043\u0072\u00e9\u0065\u0072\u0020\u0075\u006e\u0020\u0067\u0072\u006f\u0075\u0070\u0065 +# manage-templates.xhtml= +dataset.manageTemplates.pageTitle=\u0047\u00e9\u0072\u0065\u0072\u0020\u006c\u0065\u0073\u0020\u006d\u006f\u0064\u00e8\u006c\u0065\u0073\u0020\u0064\u0027\u0065\u006e\u0073\u0065\u006d\u0062\u006c\u0065\u0073\u0020\u0064\u0065\u0020\u0064\u006f\u006e\u006e\u00e9\u0065\u0073 +dataset.manageTemplates.select.txt=\u0049\u006e\u0074\u00e9\u0067\u0072\u0065\u0072\u0020\u0064\u0065\u0073\u0020\u006d\u006f\u0064\u00e8\u006c\u0065\u0073\u0020\u0070\u0072\u006f\u0076\u0065\u006e\u0061\u006e\u0074\u0020\u0064\u0065\u0020\u007b\u0030\u007d +dataset.manageTemplates.createBtn=\u0043\u0072\u00e9\u0065\u0072\u0020\u0075\u006e\u0020\u006d\u006f\u0064\u00e8\u006c\u0065\u0020\u0064\u0027\u0065\u006e\u0073\u0065\u006d\u0062\u006c\u0065\u0020\u0064\u0065\u0020\u0064\u006f\u006e\u006e\u00e9\u0065\u0073 +dataset.manageTemplates.saveNewTerms=\u0053\u0061\u0075\u0076\u0065\u0067\u0061\u0072\u0064\u0065\u0072\u0020\u006c\u0065\u0020\u006d\u006f\u0064\u00e8\u006c\u0065\u0020\u0064\u0027\u0065\u006e\u0073\u0065\u006d\u0062\u006c\u0065\u0020\u0064\u0065\u0020\u0064\u006f\u006e\u006e\u00e9\u0065\u0073 +dataset.manageTemplates.noTemplates.why.header=\u0050\u006f\u0075\u0072\u0071\u0075\u006f\u0069\u0020\u0075\u0074\u0069\u006c\u0069\u0073\u0065\u0072\u0020\u0064\u0065\u0073\u0020\u006d\u006f\u0064\u00e8\u006c\u0065\u0073\u003f +dataset.manageTemplates.noTemplates.why.reason1=\u004c\u0065\u0073\u0020\u006d\u006f\u0064\u00e8\u006c\u0065\u0073\u0020\u0073\u006f\u006e\u0074\u0020\u0075\u0074\u0069\u006c\u0065\u0073\u0020\u006c\u006f\u0072\u0073\u0071\u0075\u0065\u0020\u0076\u006f\u0075\u0073\u0020\u0070\u006f\u0073\u0073\u00e9\u0064\u0065\u007a\u0020\u0070\u006c\u0075\u0073\u0069\u0065\u0075\u0072\u0073\u0020\u0065\u006e\u0073\u0065\u006d\u0062\u006c\u0065\u0073\u0020\u0064\u0065\u0020\u0064\u006f\u006e\u006e\u00e9\u0065\u0073\u0020\u0070\u006f\u0075\u0072\u0020\u006c\u0065\u0073\u0071\u0075\u0065\u006c\u0073\u0020\u006c\u0065\u0073\u0020\u006d\u00ea\u006d\u0065\u0073\u0020\u0072\u0065\u006e\u0073\u0065\u0069\u0067\u006e\u0065\u006d\u0065\u006e\u0074\u0073\u0020\u0073\u0027\u0061\u0070\u0070\u006c\u0069\u0071\u0075\u0065\u006e\u0074\u0020\u0065\u0074\u0020\u0071\u0075\u0065\u0020\u0076\u006f\u0075\u0073\u0020\u006e\u0065\u0020\u0076\u006f\u0075\u006c\u0065\u007a\u0020\u0070\u0061\u0073\u0020\u0061\u0076\u006f\u0069\u0072\u0020\u00e0\u0020\u006c\u0065\u0073\u0020\u0073\u0061\u0069\u0073\u0069\u0072\u0020\u006d\u0061\u006e\u0075\u0065\u006c\u006c\u0065\u006d\u0065\u006e\u0074\u0020\u00e0\u0020\u0063\u0068\u0061\u0071\u0075\u0065\u0020\u0066\u006f\u0069\u0073\u002e +dataset.manageTemplates.noTemplates.why.reason2=\u004c\u0065\u0073\u0020\u006d\u006f\u0064\u00e8\u006c\u0065\u0073\u0020\u0070\u0065\u0075\u0076\u0065\u006e\u0074\u0020\u00ea\u0074\u0072\u0065\u0020\u0075\u0074\u0069\u006c\u0069\u0073\u00e9\u0073\u0020\u0070\u006f\u0075\u0072\u0020\u0065\u006e\u0074\u0072\u0065\u0072\u0020\u0064\u0065\u0073\u0020\u0064\u0069\u0072\u0065\u0063\u0074\u0069\u0076\u0065\u0073\u0020\u00e0\u0020\u006c\u0027\u0069\u006e\u0074\u0065\u006e\u0074\u0069\u006f\u006e\u0020\u0064\u0065\u0073\u0020\u0070\u0065\u0072\u0073\u006f\u006e\u006e\u0065\u0073\u0020\u0071\u0075\u0069\u0020\u0074\u00e9\u006c\u00e9\u0076\u0065\u0072\u0073\u0065\u006e\u0074\u0020\u0064\u0065\u0073\u0020\u0065\u006e\u0073\u0065\u006d\u0062\u006c\u0065\u0073\u0020\u0064\u0065\u0020\u0064\u006f\u006e\u006e\u00e9\u0065\u0073\u0020\u0064\u0061\u006e\u0073\u0020\u0076\u006f\u0074\u0072\u0065\u0020\u0064\u0061\u0074\u0061\u0076\u0065\u0072\u0073\u0065\u0020\u0073\u0069\u0020\u0076\u006f\u0075\u0073\u0020\u0064\u00e9\u0073\u0069\u0072\u0065\u007a\u0020\u0071\u0075\u0027\u0075\u006e\u0020\u0063\u0068\u0061\u006d\u0070\u0020\u0064\u0065\u0020\u006d\u00e9\u0074\u0061\u0064\u006f\u006e\u006e\u00e9\u0065\u0073\u0020\u0073\u006f\u0069\u0074\u0020\u0072\u0065\u006d\u0070\u006c\u0069\u0020\u0064\u0027\u0075\u006e\u0065\u0020\u0066\u0061\u00e7\u006f\u006e\u0020\u0070\u0061\u0072\u0074\u0069\u0063\u0075\u006c\u0069\u00e8\u0072\u0065\u002e\u0020 +dataset.manageTemplates.noTemplates.how.header=\u0043\u006f\u006d\u006d\u0065\u006e\u0074\u0020\u0075\u0074\u0069\u006c\u0069\u0073\u0065\u0072\u0020\u006c\u0065\u0073\u0020\u006d\u006f\u0064\u00e8\u006c\u0065\u0073 +dataset.manageTemplates.noTemplates.how.tip1=\u004c\u0065\u0073\u0020\u006d\u006f\u0064\u00e8\u006c\u0065\u0073\u0020\u0073\u006f\u006e\u0074\u0020\u0063\u0072\u00e9\u00e9\u0073\u0020\u0061\u0075\u0020\u006e\u0069\u0076\u0065\u0061\u0075\u0020\u0064\u0075\u0020\u0064\u0061\u0074\u0061\u0076\u0065\u0072\u0073\u0065\u002c\u0020\u0070\u0065\u0075\u0076\u0065\u006e\u0074\u0020\u00ea\u0074\u0072\u0065\u0020\u0073\u0075\u0070\u0070\u0072\u0069\u006d\u00e9\u0073\u0020\u0028\u0073\u0069\u0020\u006f\u006e\u0020\u006e\u0065\u0020\u0076\u0065\u0075\u0074\u0020\u0070\u0061\u0073\u0020\u0071\u0075\u0027\u0069\u006c\u0073\u0020\u0070\u0061\u0072\u0061\u0069\u0073\u0073\u0065\u006e\u0074\u0020\u0064\u0061\u006e\u0073\u0020\u006c\u0065\u0073\u0020\u0066\u0075\u0074\u0075\u0072\u0073\u0020\u0065\u006e\u0073\u0065\u006d\u0062\u006c\u0065\u0073\u0020\u0064\u0065\u0020\u0064\u006f\u006e\u006e\u00e9\u0065\u0073\u0029\u002c\u0020\u0073\u006f\u006e\u0074\u0020\u0061\u0063\u0074\u0069\u0076\u00e9\u0073\u0020\u0070\u0061\u0072\u0020\u0064\u00e9\u0066\u0061\u0075\u0074\u0020\u0028\u006e\u006f\u006e\u0020\u0072\u0065\u0071\u0075\u0069\u0073\u0029\u0020\u0065\u0074\u0020\u0070\u0065\u0075\u0076\u0065\u006e\u0074\u0020\u00ea\u0074\u0072\u0065\u0020\u0063\u006f\u0070\u0069\u00e9\u0073\u0020\u0064\u0065\u0020\u0066\u0061\u00e7\u006f\u006e\u0020\u00e0\u0020\u0063\u0065\u0020\u0071\u0075\u0065\u0020\u0076\u006f\u0075\u0073\u0020\u006e\u0027\u0061\u0079\u0065\u007a\u0020\u0070\u0061\u0073\u0020\u00e0\u0020\u0072\u0065\u0063\u006f\u006d\u006d\u0065\u006e\u0063\u0065\u0072\u0020\u0064\u0075\u0020\u0064\u00e9\u0062\u0075\u0074\u0020\u006c\u006f\u0072\u0073\u0071\u0075\u0065\u0020\u0076\u006f\u0075\u0073\u0020\u0063\u0072\u00e9\u0065\u007a\u0020\u0075\u006e\u0020\u006e\u006f\u0075\u0076\u0065\u0061\u0075\u0020\u006d\u006f\u0064\u00e8\u006c\u0065\u0020\u0063\u006f\u006e\u0074\u0065\u006e\u0061\u006e\u0074\u0020\u0064\u0065\u0073\u0020\u006d\u00e9\u0074\u0061\u0064\u006f\u006e\u006e\u00e9\u0065\u0073\u0020\u0073\u0069\u006d\u0069\u006c\u0061\u0069\u0072\u0065\u0073\u0020\u00e0\u0020\u0075\u006e\u0020\u0061\u0075\u0074\u0072\u0065\u0020\u006d\u006f\u0064\u00e8\u006c\u0065\u002e\u0020\u004c\u006f\u0072\u0073\u0071\u0075\u0027\u0075\u006e\u0020\u006d\u006f\u0064\u00e8\u006c\u0065\u0020\u0065\u0073\u0074\u0020\u0073\u0075\u0070\u0070\u0072\u0069\u006d\u00e9\u002c\u0020\u0069\u006c\u0020\u006e\u0027\u0079\u0020\u0061\u0020\u0061\u0075\u0063\u0075\u006e\u0065\u0020\u0069\u006e\u0063\u0069\u0064\u0065\u006e\u0063\u0065\u0020\u0073\u0075\u0072\u0020\u006c\u0065\u0073\u0020\u0065\u006e\u0073\u0065\u006d\u0062\u006c\u0065\u0073\u0020\u0064\u0065\u0020\u0064\u006f\u006e\u006e\u00e9\u0065\u0073\u0020\u0071\u0075\u0069\u0020\u006f\u006e\u0074\u0020\u0064\u00e9\u006a\u00e0\u0020\u0075\u0074\u0069\u006c\u0069\u0073\u00e9\u0020\u006c\u0065\u0020\u006d\u006f\u0064\u00e8\u006c\u0065\u002e +dataset.manageTemplates.noTemplates.how.tip2=\u0056\u0065\u0075\u0069\u006c\u006c\u0065\u007a\u0020\u006e\u006f\u0074\u0065\u0072\u0020\u0071\u0075\u0065\u0020\u006c\u0061\u0020\u0070\u006f\u0073\u0073\u0069\u0062\u0069\u006c\u0069\u0074\u00e9\u0020\u0064\u0065\u0020\u0063\u0068\u006f\u0069\u0073\u0069\u0072\u0020\u006c\u0065\u0073\u0020\u0063\u0068\u0061\u006d\u0070\u0073\u0020\u0064\u0065\u0020\u006d\u00e9\u0074\u0061\u0064\u006f\u006e\u006e\u00e9\u0065\u0073\u0020\u0071\u0075\u0069\u0020\u0073\u0065\u0072\u006f\u006e\u0074\u0020\u0063\u0061\u0063\u0068\u00e9\u0073\u002c\u0020\u006f\u0062\u006c\u0069\u0067\u0061\u0074\u006f\u0069\u0072\u0065\u0073\u0020\u006f\u0075\u0020\u0066\u0061\u0063\u0075\u006c\u0074\u0061\u0074\u0069\u0066\u0073\u0020\u0065\u0073\u0074\u0020\u0064\u0069\u0073\u0070\u006f\u006e\u0069\u0062\u006c\u0065\u0020\u0073\u0075\u0072\u0020\u006c\u0061\u0020\u0070\u0061\u0067\u0065\u0020\u003c\u0061\u0020\u0068\u0072\u0065\u0066\u003d\u0022\u002f\u0064\u0061\u0074\u0061\u0076\u0065\u0072\u0073\u0065\u002f\u007b\u0030\u007d\u003f\u0065\u0064\u0069\u0074\u004d\u006f\u0064\u0065\u003d\u0049\u004e\u0046\u004f\u0022\u0020\u0074\u0069\u0074\u006c\u0065\u003d\u0022\u0044\u0061\u0074\u0061\u0076\u0065\u0072\u0073\u0065\u0020\u0047\u0065\u006e\u0065\u0072\u0061\u006c\u0020\u0049\u006e\u0066\u006f\u0072\u006d\u0061\u0074\u0069\u006f\u006e\u0022\u003e\u0052\u0065\u006e\u0073\u0065\u0069\u0067\u006e\u0065\u006d\u0065\u006e\u0074\u0073\u0020\u0067\u00e9\u006e\u00e9\u0072\u0061\u0075\u0078\u003c\u002f\u0061\u003e\u0020\u0064\u0065\u0020\u0063\u0065\u0020\u0064\u0061\u0074\u0061\u0076\u0065\u0072\u0073\u0065\u002e +dataset.manageTemplates.noTemplates.getStarted=\u0050\u006f\u0075\u0072\u0020\u0063\u006f\u006d\u006d\u0065\u006e\u0063\u0065\u0072\u002c\u0020\u0063\u006c\u0069\u0071\u0075\u0065\u007a\u0020\u0073\u0075\u0072\u0020\u006c\u0065\u0020\u0062\u006f\u0075\u0074\u006f\u006e\u0020\u00ab\u005c\u0075\u0030\u0030\u0041\u0030\u0043\u0072\u00e9\u0065\u0072\u0020\u0075\u006e\u0020\u006d\u006f\u0064\u00e8\u006c\u0065\u0020\u0064\u0027\u0065\u006e\u0073\u0065\u006d\u0062\u006c\u0065\u0020\u0064\u0065\u0020\u0064\u006f\u006e\u006e\u00e9\u0065\u0073\u005c\u0075\u0030\u0030\u0041\u0030\u00bb\u0020\u0063\u0069\u002d\u0064\u0065\u0073\u0073\u0075\u0073\u002e\u0020\u0050\u006f\u0075\u0072\u0020\u0065\u006e\u0020\u0073\u0061\u0076\u006f\u0069\u0072\u0020\u0070\u006c\u0075\u0073\u0020\u0061\u0075\u0020\u0073\u0075\u006a\u0065\u0074\u0020\u0064\u0065\u0073\u0020\u006d\u006f\u0064\u00e8\u006c\u0065\u0073\u002c\u0020\u0063\u006f\u006e\u0073\u0075\u006c\u0074\u0065\u007a\u0020\u006c\u0061\u0020\u0073\u0065\u0063\u0074\u0069\u006f\u006e\u0020\u003c\u0061\u0020\u0068\u0072\u0065\u0066\u003d\u0022\u007b\u0030\u007d\u002f\u007b\u0031\u007d\u002f\u0075\u0073\u0065\u0072\u002f\u0064\u0061\u0074\u0061\u0076\u0065\u0072\u0073\u0065\u002d\u006d\u0061\u006e\u0061\u0067\u0065\u006d\u0065\u006e\u0074\u002e\u0068\u0074\u006d\u006c\u005c\u0023\u0064\u0061\u0074\u0061\u0073\u0065\u0074\u002d\u0074\u0065\u006d\u0070\u006c\u0061\u0074\u0065\u0073\u0022\u0020\u0074\u0069\u0074\u006c\u0065\u003d\u0022\u0044\u0061\u0074\u0061\u0073\u0065\u0074\u0020\u0054\u0065\u006d\u0070\u006c\u0061\u0074\u0065\u0073\u0020\u002d\u0020\u0044\u0061\u0074\u0061\u0076\u0065\u0072\u0073\u0065\u0020\u0055\u0073\u0065\u0072\u0020\u0047\u0075\u0069\u0064\u0065\u0022\u0020\u0074\u0061\u0072\u0067\u0065\u0074\u003d\u0022\u005f\u0062\u006c\u0061\u006e\u006b\u0022\u003e\u004d\u006f\u0064\u00e8\u006c\u0065\u0073\u0020\u0064\u0027\u0065\u006e\u0073\u0065\u006d\u0062\u006c\u0065\u0073\u0020\u0064\u0065\u0020\u0064\u006f\u006e\u006e\u00e9\u0065\u0073\u003c\u002f\u0061\u003e\u0064\u0075\u0020\u0067\u0075\u0069\u0064\u0065\u0020\u0064\u0027\u0075\u0074\u0069\u006c\u0069\u0073\u0061\u0074\u0069\u006f\u006e\u002e +dataset.manageTemplates.tab.header.templte=\u004e\u006f\u006d\u0020\u0064\u0075\u0020\u006d\u006f\u0064\u00e8\u006c\u0065 +dataset.manageTemplates.tab.header.date=\u0044\u0061\u0074\u0065\u0020\u0064\u0065\u0020\u0063\u0072\u00e9\u0061\u0074\u0069\u006f\u006e +dataset.manageTemplates.tab.header.usage=\u0055\u0073\u0061\u0067\u0065 +dataset.manageTemplates.tab.header.action=\u0041\u0063\u0074\u0069\u006f\u006e +dataset.manageTemplates.tab.action.btn.makeDefault=\u0044\u00e9\u0066\u0069\u006e\u0069\u0072\u0020\u0070\u0061\u0072\u0020\u0064\u00e9\u0066\u0061\u0075\u0074 +dataset.manageTemplates.tab.action.btn.default=\u0050\u0061\u0072\u0020\u0064\u00e9\u0066\u0061\u0075\u0074 +dataset.manageTemplates.tab.action.btn.view=\u0043\u006f\u006e\u0073\u0075\u006c\u0074\u0065\u0072 +dataset.manageTemplates.tab.action.btn.copy=\u0043\u006f\u0070\u0069\u0065\u0072 +dataset.manageTemplates.tab.action.btn.edit=\u004d\u006f\u0064\u0069\u0066\u0069\u0065\u0072 +dataset.manageTemplates.tab.action.btn.edit.metadata=\u004d\u00e9\u0074\u0061\u0064\u006f\u006e\u006e\u00e9\u0065\u0073 +dataset.manageTemplates.tab.action.btn.edit.terms=\u0043\u006f\u006e\u0064\u0069\u0074\u0069\u006f\u006e\u0073 +dataset.manageTemplates.tab.action.btn.delete=\u0053\u0075\u0070\u0070\u0072\u0069\u006d\u0065\u0072 +dataset.manageTemplates.tab.action.btn.delete.dialog.tip=\u00ca\u0074\u0065\u0073\u002d\u0076\u006f\u0075\u0073\u0020\u0073\u00fb\u0072\u0028\u0065\u0029\u0020\u0064\u0065\u0020\u0076\u006f\u0075\u006c\u006f\u0069\u0072\u0020\u0073\u0075\u0070\u0070\u0072\u0069\u006d\u0065\u0072\u0020\u0063\u0065\u0020\u006d\u006f\u0064\u00e8\u006c\u0065\u003f\u0020\u0055\u006e\u0020\u006e\u006f\u0075\u0076\u0065\u006c\u0020\u0065\u006e\u0073\u0065\u006d\u0062\u006c\u0065\u0020\u0064\u0065\u0020\u0064\u006f\u006e\u006e\u00e9\u0065\u0073\u0020\u006e\u0065\u0020\u0070\u006f\u0075\u0072\u0072\u0061\u0020\u0075\u0074\u0069\u006c\u0069\u0073\u0065\u0072\u0020\u0063\u0065\u0020\u006d\u006f\u0064\u00e8\u006c\u0065\u002e\u0020 +dataset.manageTemplates.tab.action.btn.delete.dialog.header=\u0053\u0075\u0070\u0070\u0072\u0069\u006d\u0065\u0072\u0020\u006c\u0065\u0020\u006d\u006f\u0064\u00e8\u006c\u0065 +dataset.manageTemplates.tab.action.btn.view.dialog.header=\u0041\u0070\u0065\u0072\u00e7\u0075\u0020\u0064\u0075\u0020\u006d\u006f\u0064\u00e8\u006c\u0065\u0020\u0064\u0027\u0065\u006e\u0073\u0065\u006d\u0062\u006c\u0065\u0020\u0064\u0065\u0020\u0064\u006f\u006e\u006e\u00e9\u0065\u0073 +dataset.manageTemplates.tab.action.btn.view.dialog.datasetTemplate=\u004d\u006f\u0064\u00e8\u006c\u0065\u0020\u0064\u0027\u0065\u006e\u0073\u0065\u006d\u0062\u006c\u0065\u0020\u0064\u0065\u0020\u0064\u006f\u006e\u006e\u00e9\u0065\u0073 +dataset.manageTemplates.tab.action.btn.view.dialog.datasetTemplate.title=\u004c\u0065\u0020\u006d\u006f\u0064\u00e8\u006c\u0065\u0020\u0064\u0027\u0065\u006e\u0073\u0065\u006d\u0062\u006c\u0065\u0020\u0064\u0065\u0020\u0064\u006f\u006e\u006e\u00e9\u0065\u0073\u0020\u0071\u0075\u0069\u0020\u0070\u0065\u0072\u006d\u0065\u0074\u0020\u0064\u0027\u0069\u006e\u0074\u00e9\u0067\u0072\u0065\u0072\u0020\u0020\u0061\u0075\u0020\u0070\u0072\u00e9\u0061\u006c\u0061\u0062\u006c\u0065\u0020\u0064\u0065\u0073\u0020\u0076\u0061\u006c\u0065\u0075\u0072\u0073\u0020\u0073\u0074\u0061\u006e\u0064\u0061\u0072\u0064\u0073\u0020\u0064\u0061\u006e\u0073\u0020\u0064\u0065\u0073\u0020\u0063\u0068\u0061\u006d\u0070\u0073\u0020\u0064\u0065\u0020\u006d\u00e9\u0074\u0061\u0064\u006f\u006e\u006e\u00e9\u0065\u0073\u002e +dataset.manageTemplates.tab.action.noedit.createdin=\u004d\u006f\u0064\u00e8\u006c\u0065\u0020\u0063\u0072\u00e9\u00e9\u0020\u0070\u006f\u0075\u0072\u0020\u007b\u0030\u007d +dataset.manageTemplates.delete.usedAsDefault=\u0043\u0065\u0020\u006d\u006f\u0064\u00e8\u006c\u0065\u0020\u0065\u0073\u0074\u0020\u006c\u0065\u0020\u006d\u006f\u0064\u00e8\u006c\u0065\u0020\u0070\u0061\u0072\u0020\u0064\u00e9\u0066\u0061\u0075\u0074\u0020\u0064\u0075\u0020\u006f\u0075\u0020\u0064\u0065\u0073\u0020\u0064\u0061\u0074\u0061\u0076\u0065\u0072\u0073\u0065\u0073\u0020\u0073\u0075\u0069\u0076\u0061\u006e\u0074\u0073\u002e\u0020\u0049\u006c\u0020\u0073\u0065\u0072\u0061\u0020\u00e9\u0067\u0061\u006c\u0065\u006d\u0065\u006e\u0074\u0020\u0073\u0075\u0070\u0070\u0072\u0069\u006d\u00e9\u0020\u0063\u006f\u006d\u006d\u0065\u0020\u006d\u006f\u0064\u00e8\u006c\u0065\u0020\u0070\u0061\u0072\u0020\u0064\u00e9\u0066\u0061\u0075\u0074\u002e +dataset.manageTemplates.info.message.notEmptyTable=\u0043\u0072\u00e9\u0065\u0072\u002c\u0020\u0063\u006c\u006f\u006e\u0065\u0072\u002c\u0020\u006d\u006f\u0064\u0069\u0066\u0069\u0065\u0072\u002c\u0020\u0076\u006f\u0069\u0072\u0020\u006f\u0075\u0020\u0073\u0075\u0070\u0070\u0072\u0069\u006d\u0065\u0072\u0020\u0064\u0065\u0073\u0020\u006d\u006f\u0064\u00e8\u006c\u0065\u0073\u0020\u0064\u0027\u0065\u006e\u0073\u0065\u006d\u0062\u006c\u0065\u0073\u0020\u0064\u0065\u0020\u0064\u006f\u006e\u006e\u00e9\u0065\u0073\u002e\u0020\u0043\u0072\u00e9\u0065\u0072\u0020\u0075\u006e\u0020\u006d\u006f\u0064\u00e8\u006c\u0065\u0020\u0064\u0027\u0065\u006e\u0073\u0065\u006d\u0062\u006c\u0065\u0020\u0064\u0065\u0020\u0064\u006f\u006e\u006e\u00e9\u0065\u0073\u0020\u006f\u00f9\u0020\u0073\u006f\u006e\u0074\u0020\u0072\u0065\u006d\u0070\u006c\u0069\u0073\u0020\u0061\u0075\u0020\u0070\u0072\u00e9\u0061\u006c\u0061\u0062\u006c\u0065\u0020\u0063\u0065\u0072\u0074\u0061\u0069\u006e\u0073\u0020\u0063\u0068\u0061\u006d\u0070\u0073\u0020\u0064\u0065\u0020\u006d\u00e9\u0074\u0061\u0064\u006f\u006e\u006e\u00e9\u0065\u0073\u0020\u0061\u0075\u0020\u006d\u006f\u0079\u0065\u006e\u0020\u0064\u0065\u0020\u0076\u0061\u006c\u0065\u0075\u0072\u0073\u0020\u0073\u0074\u0061\u006e\u0064\u0061\u0072\u0064\u0073\u002c\u0020\u0063\u006f\u006d\u006d\u0065\u0020\u006c\u0027\u0061\u0066\u0066\u0069\u006c\u0069\u0061\u0074\u0069\u006f\u006e\u0020\u0064\u0065\u0020\u006c\u0027\u0061\u0075\u0074\u0065\u0075\u0072\u002c\u0020\u0061\u0066\u0069\u006e\u0020\u0064\u0065\u0020\u0066\u0061\u0063\u0069\u006c\u0069\u0074\u0065\u0072\u0020\u006c\u0027\u0061\u006a\u006f\u0075\u0074\u0020\u0064\u0027\u0065\u006e\u0073\u0065\u006d\u0062\u006c\u0065\u0073\u0020\u0064\u0065\u0020\u0064\u006f\u006e\u006e\u00e9\u0065\u0073\u0020\u0064\u0061\u006e\u0073\u0020\u0063\u0065\u0020\u0064\u0061\u0074\u0061\u0076\u0065\u0072\u0073\u0065\u002e\u0020\u0056\u006f\u0075\u0073\u0020\u0070\u006f\u0075\u0076\u0065\u007a\u0020\u00e9\u0067\u0061\u006c\u0065\u006d\u0065\u006e\u0074\u0020\u0061\u006a\u006f\u0075\u0074\u0065\u0072\u0020\u0064\u0075\u0020\u0074\u0065\u0078\u0074\u0065\u0020\u0064\u0027\u0061\u0069\u0064\u0065\u0020\u0061\u0075\u0078\u0020\u0063\u0068\u0061\u006d\u0070\u0073\u0020\u0064\u0065\u0020\u006d\u00e9\u0074\u0061\u0064\u006f\u006e\u006e\u00e9\u0065\u0073\u0020\u0061\u0066\u0069\u006e\u0020\u0064\u0027\u006f\u0072\u0069\u0065\u006e\u0074\u0065\u0072\u0020\u006c\u0065\u0073\u0020\u0075\u0074\u0069\u006c\u0069\u0073\u0061\u0074\u0065\u0075\u0072\u0073\u0020\u0073\u0075\u0072\u0020\u006c\u0065\u0073\u0020\u00e9\u006c\u00e9\u006d\u0065\u006e\u0074\u0073\u0020\u00e0\u0020\u0061\u006a\u006f\u0075\u0074\u0065\u0072\u0020\u00e0\u0020\u0063\u0065\u0073\u0020\u0063\u0068\u0061\u006d\u0070\u0073\u002e +# metadataFragment.xhtml= +# template.xhtml= +dataset.template.name.tip=\u004c\u0065\u0020\u006e\u006f\u006d\u0020\u0064\u0075\u0020\u006d\u006f\u0064\u00e8\u006c\u0065\u0020\u0064\u0027\u0065\u006e\u0073\u0065\u006d\u0062\u006c\u0065\u0020\u0064\u0065\u0020\u0064\u006f\u006e\u006e\u00e9\u0065\u0073\u002e +dataset.template.returnBtn=\u0052\u0065\u0076\u0065\u006e\u0069\u0072\u0020\u00e0\u0020\u0047\u00e9\u0072\u0065\u0072\u0020\u006c\u0065\u0073\u0020\u006d\u006f\u0064\u00e8\u006c\u0065\u0073 +dataset.template.name.title=\u0049\u006e\u0064\u0069\u0071\u0075\u0065\u0072\u0020\u0075\u006e\u0020\u006e\u006f\u006d\u0020\u0075\u006e\u0069\u0071\u0075\u0065\u0020\u0070\u006f\u0075\u0072\u0020\u006c\u0027\u0065\u006e\u0073\u0065\u006d\u0062\u006c\u0065\u0020\u0064\u0065\u0020\u0064\u006f\u006e\u006e\u00e9\u0065\u0073 +template.asterisk.tip=\u004c\u0065\u0073\u0020\u0061\u0073\u0074\u00e9\u0072\u0069\u0073\u0071\u0075\u0065\u0073\u0020\u0069\u006e\u0064\u0069\u0071\u0075\u0065\u006e\u0074\u0020\u006c\u0065\u0073\u0020\u0063\u0068\u0061\u006d\u0070\u0073\u0020\u0064\u0065\u0020\u006d\u00e9\u0074\u0061\u0064\u006f\u006e\u006e\u00e9\u0065\u0073\u0020\u0071\u0075\u0065\u0020\u006c\u0065\u0073\u0020\u0075\u0074\u0069\u006c\u0069\u0073\u0061\u0074\u0065\u0075\u0072\u0073\u0020\u0064\u006f\u0069\u0076\u0065\u006e\u0074\u0020\u006f\u0062\u006c\u0069\u0067\u0061\u0074\u006f\u0069\u0072\u0065\u006d\u0065\u006e\u0074\u0020\u0072\u0065\u006d\u0070\u006c\u0069\u0072\u0020\u006c\u006f\u0072\u0073\u0020\u0064\u0065\u0020\u006c\u0027\u0061\u006a\u006f\u0075\u0074\u0020\u0064\u0027\u0075\u006e\u0020\u0065\u006e\u0073\u0065\u006d\u0062\u006c\u0065\u0020\u0064\u0065\u0020\u0064\u006f\u006e\u006e\u00e9\u0065\u0073\u0020\u00e0\u0020\u0063\u0065\u0020\u0064\u0061\u0074\u0061\u0076\u0065\u0072\u0073\u0065\u002e +dataset.template.popup.create.title=\u0043\u0072\u00e9\u0065\u0072\u0020\u0075\u006e\u0020\u006d\u006f\u0064\u00e8\u006c\u0065 +dataset.template.popup.create.text=\u0044\u00e9\u0073\u0069\u0072\u0065\u007a\u002d\u0076\u006f\u0075\u0073\u0020\u0061\u006a\u006f\u0075\u0074\u0065\u0072\u0020\u0064\u0065\u0073\u0020\u0063\u006f\u006e\u0064\u0069\u0074\u0069\u006f\u006e\u0073\u0020\u0064\u0027\u0075\u0074\u0069\u006c\u0069\u0073\u0061\u0074\u0069\u006f\u006e\u0020\u0065\u0074\u0020\u0064\u0027\u0061\u0063\u0063\u00e8\u0073\u0020\u0070\u0061\u0072\u0020\u0064\u00e9\u0066\u0061\u0075\u0074\u003f +dataset.create.add.terms=\u0053\u0061\u0075\u0076\u0065\u0067\u0061\u0072\u0064\u0065\u0072\u0020\u0065\u0074\u0020\u0061\u006a\u006f\u0075\u0074\u0065\u0072\u0020\u0064\u0065\u0073\u0020\u0063\u006f\u006e\u0064\u0069\u0074\u0069\u006f\u006e\u0073\u0020\u0064\u0027\u0075\u0074\u0069\u006c\u0069\u0073\u0061\u0074\u0069\u006f\u006e +# manage-groups.xhtml= +dataverse.manageGroups.pageTitle=\u0047\u00e9\u0072\u0065\u0072\u0020\u0064\u0065\u0073\u0020\u0067\u0072\u006f\u0075\u0070\u0065\u0073\u0020\u0044\u0061\u0074\u0061\u0076\u0065\u0072\u0073\u0065 +dataverse.manageGroups.createBtn=\u0043\u0072\u00e9\u0065\u0072\u0020\u0075\u006e\u0020\u0067\u0072\u006f\u0075\u0070\u0065 +dataverse.manageGroups.noGroups.why.header=\u0050\u006f\u0075\u0072\u0071\u0075\u006f\u0069\u0020\u0066\u0061\u0069\u0072\u0065\u0020\u0061\u0070\u0070\u0065\u006c\u0020\u0061\u0075\u0078\u0020\u0067\u0072\u006f\u0075\u0070\u0065\u0073\u003f +dataverse.manageGroups.noGroups.why.reason1=\u004c\u0065\u0073\u0020\u0067\u0072\u006f\u0075\u0070\u0065\u0073\u0020\u0076\u006f\u0075\u0073\u0020\u0070\u0065\u0072\u006d\u0065\u0074\u0074\u0065\u006e\u0074\u0020\u0064\u0027\u0061\u0074\u0074\u0072\u0069\u0062\u0075\u0065\u0072\u0020\u0064\u0065\u0073\u0020\u0072\u00f4\u006c\u0065\u0073\u0020\u0065\u0074\u0020\u0070\u0065\u0072\u006d\u0069\u0073\u0073\u0069\u006f\u006e\u0073\u0020\u00e0\u0020\u0070\u006c\u0075\u0073\u0069\u0065\u0075\u0072\u0073\u0020\u0070\u0065\u0072\u0073\u006f\u006e\u006e\u0065\u0073\u0020\u00e0\u0020\u006c\u0061\u0020\u0066\u006f\u0069\u0073\u002e\u0020 +dataverse.manageGroups.noGroups.why.reason2=\u0056\u006f\u0075\u0073\u0020\u0070\u006f\u0075\u0076\u0065\u007a\u0020\u0066\u0061\u0069\u0072\u0065\u0020\u0061\u0070\u0070\u0065\u006c\u0020\u0061\u0075\u0078\u0020\u0067\u0072\u006f\u0075\u0070\u0065\u0073\u0020\u0070\u006f\u0075\u0072\u0020\u006c\u0061\u0020\u0067\u0065\u0073\u0074\u0069\u006f\u006e\u0020\u0064\u0065\u0020\u0064\u0069\u0066\u0066\u00e9\u0072\u0065\u006e\u0074\u0073\u0020\u0074\u0079\u0070\u0065\u0073\u0020\u0064\u0027\u0075\u0074\u0069\u006c\u0069\u0073\u0061\u0074\u0065\u0075\u0072\u0073\u0020\u0028\u00e9\u0074\u0075\u0064\u0069\u0061\u006e\u0074\u0073\u002c\u0020\u0063\u006f\u006c\u006c\u0061\u0062\u006f\u0072\u0061\u0074\u0065\u0075\u0072\u0073\u002c\u0020\u0065\u0074\u0063\u002e\u0029\u002e +dataverse.manageGroups.noGroups.how.header=\u0043\u006f\u006d\u006d\u0065\u006e\u0074\u0020\u0075\u0074\u0069\u006c\u0069\u0073\u0065\u0072\u0020\u006c\u0065\u0073\u0020\u0067\u0072\u006f\u0075\u0070\u0065\u0073 +dataverse.manageGroups.noGroups.how.tip1=\u0055\u006e\u0020\u0067\u0072\u006f\u0075\u0070\u0065\u0020\u0070\u0065\u0075\u0074\u0020\u0063\u006f\u006d\u0070\u0072\u0065\u006e\u0064\u0072\u0065\u0020\u00e0\u0020\u006c\u0061\u0020\u0066\u006f\u0069\u0073\u0020\u0064\u0065\u0073\u0020\u0069\u006e\u0064\u0069\u0076\u0069\u0064\u0075\u0073\u0020\u0065\u0074\u0020\u0064\u0027\u0061\u0075\u0074\u0072\u0065\u0073\u0020\u0067\u0072\u006f\u0075\u0070\u0065\u0073\u002e\u0020 +dataverse.manageGroups.noGroups.how.tip2=\u0056\u006f\u0075\u0073\u0020\u0070\u006f\u0075\u0076\u0065\u007a\u0020\u0061\u0074\u0074\u0072\u0069\u0062\u0075\u0065\u0072\u0020\u0064\u0065\u0073\u0020\u0070\u0065\u0072\u006d\u0069\u0073\u0073\u0069\u006f\u006e\u0073\u0020\u00e0\u0020\u0075\u006e\u0020\u0067\u0072\u006f\u0075\u0070\u0065\u0020\u0064\u0061\u006e\u0073\u0020\u006c\u0065\u0020\u0076\u006f\u006c\u0065\u0074\u0020\u00ab\u0020\u0050\u0065\u0072\u006d\u0069\u0073\u0073\u0069\u006f\u006e\u0073\u0020\u00bb\u002e\u0020 +dataverse.manageGroups.noGroups.getStarted=\u0050\u006f\u0075\u0072\u0020\u0064\u00e9\u0062\u0075\u0074\u0065\u0072\u002c\u0020\u0063\u006c\u0069\u0071\u0075\u0065\u007a\u0020\u0073\u0075\u0072\u0020\u006c\u0065\u0020\u0062\u006f\u0075\u0074\u006f\u006e\u0020\u00ab\u0020\u0043\u0072\u00e9\u0065\u0072\u0020\u0075\u006e\u0020\u0067\u0072\u006f\u0075\u0070\u0065\u0020\u00bb\u0020\u0063\u0069\u002d\u0064\u0065\u0073\u0073\u0075\u0073\u002e\u0020 +dataverse.manageGroups.tab.header.name=\u004e\u006f\u006d\u0020\u0064\u0075\u0020\u0067\u0072\u006f\u0075\u0070\u0065 +dataverse.manageGroups.tab.header.id=\u0049\u0064\u0065\u006e\u0074\u0069\u0066\u0069\u0061\u006e\u0074\u0020\u0064\u0075\u0020\u0067\u0072\u006f\u0075\u0070\u0065 +dataverse.manageGroups.tab.header.membership=\u0041\u0064\u0068\u00e9\u0073\u0069\u006f\u006e +dataverse.manageGroups.tab.header.action=\u0041\u0063\u0074\u0069\u006f\u006e +dataverse.manageGroups.tab.action.btn.view=\u0056\u006f\u0069\u0072 +dataverse.manageGroups.tab.action.btn.copy=\u0043\u006f\u0070\u0069\u0065\u0072 +dataverse.manageGroups.tab.action.btn.enable=\u0041\u0063\u0074\u0069\u0076\u0065\u0072 +dataverse.manageGroups.tab.action.btn.disable=\u0044\u00e9\u0073\u0061\u0063\u0074\u0069\u0076\u0065\u0072 +dataverse.manageGroups.tab.action.btn.edit=\u004d\u006f\u0064\u0069\u0066\u0069\u0065\u0072 +dataverse.manageGroups.tab.action.btn.viewCollectedData=\u0056\u006f\u0069\u0072\u0020\u006c\u0065\u0073\u0020\u0064\u006f\u006e\u006e\u00e9\u0065\u0073\u0020\u0063\u006f\u006c\u006c\u0069\u0067\u00e9\u0065\u0073 +dataverse.manageGroups.tab.action.btn.delete=\u0053\u0075\u0070\u0070\u0072\u0069\u006d\u0065\u0072 +dataverse.manageGroups.tab.action.btn.delete.dialog.header=\u0053\u0075\u0070\u0070\u0072\u0069\u006d\u0065\u0072\u0020\u0075\u006e\u0020\u0067\u0072\u006f\u0075\u0070\u0065 +dataverse.manageGroups.tab.action.btn.delete.dialog.tip=\u00ca\u0074\u0065\u0073\u002d\u0076\u006f\u0075\u0073\u0020\u0073\u00fb\u0072\u0020\u0064\u0065\u0020\u0076\u006f\u0075\u006c\u006f\u0069\u0072\u0020\u0073\u0075\u0070\u0070\u0072\u0069\u006d\u0065\u0072\u0020\u0063\u0065\u0020\u0067\u0072\u006f\u0075\u0070\u0065\u003f\u0020\u0056\u006f\u0075\u0073\u0020\u006e\u0065\u0020\u0070\u006f\u0075\u0076\u0065\u007a\u0020\u0061\u006e\u006e\u0075\u006c\u0065\u0072\u0020\u006c\u0061\u0020\u0073\u0075\u0070\u0070\u0072\u0065\u0073\u0073\u0069\u006f\u006e\u0020\u0064\u0027\u0075\u006e\u0020\u0067\u0072\u006f\u0075\u0070\u0065\u002e +dataverse.manageGroups.tab.action.btn.view.dialog.header=\u0047\u0072\u006f\u0075\u0070\u0065\u0020\u0044\u0061\u0074\u0061\u0076\u0065\u0072\u0073\u0065 +dataverse.manageGroups.tab.action.btn.view.dialog.group=\u004e\u006f\u006d\u0020\u0064\u0075\u0020\u0067\u0072\u006f\u0075\u0070\u0065 +dataverse.manageGroups.tab.action.btn.view.dialog.groupView.name=\u004e\u006f\u006d\u0020\u0064\u0075\u0020\u006d\u0065\u006d\u0062\u0072\u0065 +dataverse.manageGroups.tab.action.btn.view.dialog.groupView.type=\u0043\u0061\u0074\u00e9\u0067\u006f\u0072\u0069\u0065\u0020\u0064\u0065\u0020\u006d\u0065\u006d\u0062\u0072\u0065 +dataverse.manageGroups.tab.action.btn.view.dialog.groupView.action=\u0041\u0063\u0074\u0069\u006f\u006e +dataverse.manageGroups.tab.action.btn.view.dialog.groupView.delete=\u0053\u0075\u0070\u0070\u0072\u0069\u006d\u0065\u0072 +dataverse.manageGroups.tab.action.btn.view.dialog.groupMembers=\u004d\u0065\u006d\u0062\u0072\u0065\u0073\u0020\u0064\u0075\u0020\u0067\u0072\u006f\u0075\u0070\u0065 +dataverse.manageGroups.tab.action.btn.view.dialog.enterName=\u0045\u006e\u0074\u0072\u0065\u0072\u0020\u006c\u0065\u0020\u006e\u006f\u006d\u0020\u0064\u0027\u0075\u0074\u0069\u006c\u0069\u0073\u0061\u0074\u0065\u0075\u0072\u0020\u006f\u0075\u0020\u0064\u0065\u0020\u0067\u0072\u006f\u0075\u0070\u0065 +dataverse.manageGroups.tab.action.btn.view.dialog.invalidMsg=\u0041\u0075\u0063\u0075\u006e\u0020\u0072\u00e9\u0073\u0075\u006c\u0074\u0061\u0074\u002e +# manage-guestbooks.xhtml= +dataset.manageGuestbooks.pageTitle=\u0047\u00e9\u0072\u0065\u0072\u0020\u006c\u0065\u0073\u0020\u0072\u0065\u0067\u0069\u0073\u0074\u0072\u0065\u0073\u0020\u0064\u0065\u0020\u0076\u0069\u0073\u0069\u0074\u0065\u0075\u0072\u0073\u0020\u0070\u006f\u0075\u0072\u0020\u006c\u0065\u0073\u0020\u0065\u006e\u0073\u0065\u006d\u0062\u006c\u0065\u0073\u0020\u0064\u0065\u0020\u0064\u006f\u006e\u006e\u00e9\u0065\u0073 +dataset.manageGuestbooks.include=\u0043\u006f\u006d\u0070\u0072\u0065\u006e\u0064\u0020\u006c\u0065\u0073\u0020\u0072\u0065\u0067\u0069\u0073\u0074\u0072\u0065\u0073\u0020\u0064\u0065\u0020\u0076\u0069\u0073\u0069\u0074\u0065\u0075\u0072\u0073\u0020\u0064\u0065\u0020\u007b\u0030\u007d\u002e +dataset.manageGuestbooks.createBtn=\u0043\u0072\u00e9\u0065\u0072\u0020\u0075\u006e\u0020\u0072\u0065\u0067\u0069\u0073\u0074\u0072\u0065\u0020\u0064\u0065\u0073\u0020\u0076\u0069\u0073\u0069\u0074\u0065\u0075\u0072\u0073\u0020\u0070\u006f\u0075\u0072\u0020\u006c\u0027\u0065\u006e\u0073\u0065\u006d\u0062\u006c\u0065\u0020\u0064\u0065\u0020\u0064\u006f\u006e\u006e\u00e9\u0065\u0073 +dataset.manageGuestbooks.download.all.responses=\u0054\u00e9\u006c\u00e9\u0063\u0068\u0061\u0072\u0067\u0065\u0072\u0020\u0074\u006f\u0075\u0074\u0065\u0073\u0020\u006c\u0065\u0073\u0020\u0065\u006e\u0074\u0072\u00e9\u0065\u0073 +dataset.manageGuestbooks.download.responses=\u0054\u00e9\u006c\u00e9\u0063\u0068\u0061\u0072\u0067\u0065\u0072\u0020\u006c\u0065\u0073\u0020\u0065\u006e\u0074\u0072\u00e9\u0065\u0073 +dataset.manageGuestbooks.noGuestbooks.why.header=\u0050\u006f\u0075\u0072\u0071\u0075\u006f\u0069\u0020\u0075\u0074\u0069\u006c\u0069\u0073\u0065\u0072\u0020\u0064\u0065\u0073\u0020\u0072\u0065\u0067\u0069\u0073\u0074\u0072\u0065\u0073\u0020\u0064\u0065\u0020\u0076\u0069\u0073\u0069\u0074\u0065\u0075\u0072\u0073\u003f +dataset.manageGuestbooks.noGuestbooks.why.reason1=\u004c\u0065\u0073\u0020\u0072\u0065\u0067\u0069\u0073\u0074\u0072\u0065\u0073\u0020\u0064\u0065\u0020\u0076\u0069\u0073\u0069\u0074\u0065\u0075\u0072\u0073\u0020\u0076\u006f\u0075\u0073\u0020\u0070\u0065\u0072\u006d\u0065\u0074\u0074\u0065\u006e\u0074\u0020\u0064\u0065\u0020\u0072\u0065\u0063\u0075\u0065\u0069\u006c\u006c\u0069\u0072\u0020\u0064\u0065\u0073\u0020\u0064\u006f\u006e\u006e\u00e9\u0065\u0073\u0020\u0061\u0075\u0020\u0073\u0075\u006a\u0065\u0074\u0020\u0064\u0065\u0073\u0020\u0070\u0065\u0072\u0073\u006f\u006e\u006e\u0065\u0073\u0020\u0071\u0075\u0069\u0020\u0074\u00e9\u006c\u00e9\u0063\u0068\u0061\u0072\u0067\u0065\u006e\u0074\u0020\u006c\u0065\u0073\u0020\u0066\u0069\u0063\u0068\u0069\u0065\u0072\u0073\u0020\u0064\u0065\u0020\u0076\u006f\u0073\u0020\u0065\u006e\u0073\u0065\u006d\u0062\u006c\u0065\u0073\u0020\u0064\u0065\u0020\u0064\u006f\u006e\u006e\u00e9\u0065\u0073\u002e\u0020\u0056\u006f\u0075\u0073\u0020\u0070\u006f\u0075\u0076\u0065\u007a\u0020\u0072\u0065\u0063\u0075\u0065\u0069\u006c\u006c\u0069\u0072\u0020\u0064\u0065\u0073\u0020\u0072\u0065\u006e\u0073\u0065\u0069\u0067\u006e\u0065\u006d\u0065\u006e\u0074\u0073\u0020\u0069\u0073\u0073\u0075\u0073\u0020\u0064\u0075\u0020\u0063\u006f\u006d\u0070\u0074\u0065\u0020\u0028\u006e\u006f\u006d\u0020\u0064\u0027\u0075\u0074\u0069\u006c\u0069\u0073\u0061\u0074\u0065\u0075\u0072\u002c\u0020\u0070\u0072\u00e9\u006e\u006f\u006d\u0020\u0065\u0074\u0020\u006e\u006f\u006d\u002c\u0020\u0061\u0066\u0066\u0069\u006c\u0069\u0061\u0074\u0069\u006f\u006e\u002c\u0020\u0065\u0074\u0063\u002e\u0029\u0020\u0065\u0074\u0020\u0063\u0072\u00e9\u0065\u0072\u0020\u0064\u0065\u0073\u0020\u0071\u0075\u0065\u0073\u0074\u0069\u006f\u006e\u0073\u0020\u0070\u0065\u0072\u0073\u006f\u006e\u006e\u0061\u006c\u0069\u0073\u00e9\u0065\u0073\u0020\u0028\u0070\u002e\u005c\u0075\u0030\u0030\u0041\u0030\u0065\u0078\u002e\u0020\u00c0\u0020\u0071\u0075\u006f\u0069\u0020\u0073\u0065\u0072\u0076\u0069\u0072\u006f\u006e\u0074\u0020\u006c\u0065\u0073\u0020\u0064\u006f\u006e\u006e\u00e9\u0065\u0073\u003f\u0029 +dataset.manageGuestbooks.noGuestbooks.why.reason2=\u0056\u006f\u0075\u0073\u0020\u0070\u006f\u0075\u0076\u0065\u007a\u0020\u0074\u00e9\u006c\u00e9\u0063\u0068\u0061\u0072\u0067\u0065\u0072\u0020\u006c\u0065\u0073\u0020\u0064\u006f\u006e\u006e\u00e9\u0065\u0073\u0020\u0072\u0065\u0063\u0075\u0065\u0069\u006c\u006c\u0069\u0065\u0073\u0020\u0064\u0061\u006e\u0073\u0020\u006c\u0065\u0073\u0020\u0072\u0065\u0067\u0069\u0073\u0074\u0072\u0065\u0073\u0020\u0064\u0065\u0020\u0076\u0069\u0073\u0069\u0074\u0065\u0075\u0072\u0073\u0020\u0061\u0063\u0074\u0069\u0076\u00e9\u0073\u0020\u0061\u0066\u0069\u006e\u0020\u0064\u0065\u0020\u0070\u006f\u0075\u0076\u006f\u0069\u0072\u0020\u006c\u0065\u0073\u0020\u0065\u006e\u0072\u0065\u0067\u0069\u0073\u0074\u0072\u0065\u0072\u0020\u0065\u006e\u0020\u0064\u0065\u0068\u006f\u0072\u0073\u0020\u0064\u0065\u0020\u0044\u0061\u0074\u0061\u0076\u0065\u0072\u0073\u0065\u002e +dataset.manageGuestbooks.noGuestbooks.how.header=\u0043\u006f\u006d\u006d\u0065\u006e\u0074\u0020\u0075\u0074\u0069\u006c\u0069\u0073\u0065\u0072\u0020\u006c\u0065\u0073\u0020\u0072\u0065\u0067\u0069\u0073\u0074\u0072\u0065\u0073\u0020\u0064\u0065\u0020\u0076\u0069\u0073\u0069\u0074\u0065\u0075\u0072\u0073 +dataset.manageGuestbooks.noGuestbooks.how.tip1=\u0055\u006e\u0020\u0072\u0065\u0067\u0069\u0073\u0074\u0072\u0065\u0020\u0064\u0065\u0073\u0020\u0076\u0069\u0073\u0069\u0074\u0065\u0075\u0072\u0073\u0020\u0070\u0065\u0075\u0074\u0020\u00ea\u0074\u0072\u0065\u0020\u0075\u0074\u0069\u006c\u0069\u0073\u00e9\u0020\u0070\u006f\u0075\u0072\u0020\u0070\u006c\u0075\u0073\u0069\u0065\u0075\u0072\u0073\u0020\u0065\u006e\u0073\u0065\u006d\u0062\u006c\u0065\u0073\u0020\u0064\u0065\u0020\u0064\u006f\u006e\u006e\u00e9\u0065\u0073\u002c\u0020\u006d\u0061\u0069\u0073\u0020\u0075\u006e\u0020\u0073\u0065\u0075\u006c\u0020\u0072\u0065\u0067\u0069\u0073\u0074\u0072\u0065\u0020\u0064\u0065\u0073\u0020\u0076\u0069\u0073\u0069\u0074\u0065\u0075\u0072\u0073\u0020\u0070\u0065\u0075\u0074\u0020\u00ea\u0074\u0072\u0065\u0020\u0075\u0074\u0069\u006c\u0069\u0073\u00e9\u0020\u0070\u006f\u0075\u0072\u0020\u0075\u006e\u0020\u0065\u006e\u0073\u0065\u006d\u0062\u006c\u0065\u0020\u0064\u0065\u0020\u0064\u006f\u006e\u006e\u00e9\u0065\u0073\u002e +dataset.manageGuestbooks.noGuestbooks.how.tip2=\u004c\u0065\u0073\u0020\u0071\u0075\u0065\u0073\u0074\u0069\u006f\u006e\u0073\u0020\u0070\u0065\u0072\u0073\u006f\u006e\u006e\u0061\u006c\u0069\u0073\u00e9\u0065\u0073\u0020\u0070\u0065\u0075\u0076\u0065\u006e\u0074\u0020\u0063\u006f\u006d\u0070\u0072\u0065\u006e\u0064\u0072\u0065\u0020\u0064\u0065\u0073\u0020\u0072\u00e9\u0070\u006f\u006e\u0073\u0065\u0073\u0020\u0065\u006e\u0020\u0074\u0065\u0078\u0074\u0065\u0020\u006c\u0069\u0062\u0072\u0065\u0020\u006f\u0075\u0020\u0064\u0065\u0073\u0020\u0071\u0075\u0065\u0073\u0074\u0069\u006f\u006e\u0073\u0020\u00e0\u0020\u0063\u0068\u006f\u0069\u0063\u0065\u0020\u0064\u0065\u0020\u0072\u00e9\u0070\u006f\u006e\u0073\u0065\u0073\u002e +dataset.manageGuestbooks.noGuestbooks.getStarted=\u0050\u006f\u0075\u0072\u0020\u0063\u006f\u006d\u006d\u0065\u006e\u0063\u0065\u0072\u002c\u0020\u0063\u006c\u0069\u0071\u0075\u0065\u0072\u0020\u0063\u0069\u002d\u0064\u0065\u0073\u0073\u0075\u0073\u0020\u0073\u0075\u0072\u0020\u006c\u0065\u0020\u0062\u006f\u0075\u0074\u006f\u006e\u0020\u00ab\u005c\u0075\u0030\u0030\u0041\u0030\u0043\u0072\u00e9\u0065\u0072\u0020\u0075\u006e\u0020\u0072\u0065\u0067\u0069\u0073\u0074\u0072\u0065\u0020\u0064\u0065\u0073\u0020\u0076\u0069\u0073\u0069\u0074\u0065\u0075\u0072\u0073\u0020\u0070\u006f\u0075\u0072\u0020\u006c\u0027\u0065\u006e\u0073\u0065\u006d\u0062\u006c\u0065\u0020\u0064\u0065\u0020\u0064\u006f\u006e\u006e\u00e9\u0065\u0073\u005c\u0075\u0030\u0030\u0041\u0030\u00bb\u002e\u0020\u0050\u006f\u0075\u0072\u0020\u0065\u006e\u0020\u0073\u0061\u0076\u006f\u0069\u0072\u0020\u0070\u006c\u0075\u0073\u0020\u0073\u0075\u0072\u0020\u006c\u0065\u0073\u0020\u0072\u0065\u0067\u0069\u0073\u0074\u0072\u0065\u0073\u0020\u0064\u0065\u0020\u0076\u0069\u0073\u0069\u0074\u0065\u0075\u0072\u0073\u002c\u0020\u0076\u0069\u0073\u0069\u0074\u0065\u007a\u0020\u006c\u0061\u0020\u0073\u0065\u0063\u0074\u0069\u006f\u006e\u0020\u003c\u0061\u0020\u0068\u0072\u0065\u0066\u003d\u0022\u007b\u0030\u007d\u002f\u007b\u0031\u007d\u002f\u0075\u0073\u0065\u0072\u002f\u0064\u0061\u0074\u0061\u0076\u0065\u0072\u0073\u0065\u002d\u006d\u0061\u006e\u0061\u0067\u0065\u006d\u0065\u006e\u0074\u002e\u0068\u0074\u006d\u006c\u005c\u0023\u0064\u0061\u0074\u0061\u0073\u0065\u0074\u002d\u0067\u0075\u0065\u0073\u0074\u0062\u006f\u006f\u006b\u0073\u0022\u0020\u0074\u0069\u0074\u006c\u0065\u003d\u0022\u0044\u0061\u0074\u0061\u0073\u0065\u0074\u0020\u0047\u0075\u0065\u0073\u0074\u0062\u006f\u006f\u006b\u0020\u002d\u0020\u0044\u0061\u0074\u0061\u0076\u0065\u0072\u0073\u0065\u0020\u0055\u0073\u0065\u0072\u0020\u0047\u0075\u0069\u0064\u0065\u0022\u0020\u0074\u0061\u0072\u0067\u0065\u0074\u003d\u0022\u005f\u0062\u006c\u0061\u006e\u006b\u0022\u003e\u0052\u0065\u0067\u0069\u0073\u0074\u0072\u0065\u0020\u0064\u0065\u0073\u0020\u0076\u0069\u0073\u0069\u0074\u0065\u0075\u0072\u0073\u003c\u002f\u0061\u003e\u0020\u0064\u0075\u0020\u0067\u0075\u0069\u0064\u0065\u0020\u0064\u0027\u0075\u0074\u0069\u006c\u0069\u0073\u0061\u0074\u0069\u006f\u006e\u002e +dataset.manageGuestbooks.tab.header.name=\u004e\u006f\u006d\u0020\u0064\u0075\u0020\u0072\u0065\u0067\u0069\u0073\u0074\u0072\u0065\u0020\u0064\u0065\u0073\u0020\u0076\u0069\u0073\u0069\u0074\u0065\u0075\u0072\u0073 +dataset.manageGuestbooks.tab.header.date=\u0044\u0061\u0074\u0065\u0020\u0064\u0065\u0020\u0063\u0072\u00e9\u0061\u0074\u0069\u006f\u006e +dataset.manageGuestbooks.tab.header.usage=\u0055\u0073\u0061\u0067\u0065 +dataset.manageGuestbooks.tab.header.responses=\u0052\u00e9\u0070\u006f\u006e\u0073\u0065\u0073 +dataset.manageGuestbooks.tab.header.action=\u0041\u0063\u0074\u0069\u006f\u006e +dataset.manageGuestbooks.tab.action.btn.view=\u0043\u006f\u006e\u0073\u0075\u006c\u0074\u0065\u0072\u0020\u0020 +dataset.manageGuestbooks.tab.action.btn.copy=\u0043\u006f\u0070\u0069\u0065\u0072 +dataset.manageGuestbooks.tab.action.btn.enable=\u0041\u0063\u0074\u0069\u0076\u0065\u0072 +dataset.manageGuestbooks.tab.action.btn.disable=\u0044\u00e9\u0073\u0061\u0063\u0074\u0069\u0076\u0065\u0072 +dataset.manageGuestbooks.tab.action.btn.edit=\u004d\u006f\u0064\u0069\u0066\u0069\u0065\u0072 +dataset.manageGuestbooks.tab.action.btn.preview=Preview +dataset.manageGuestbooks.tab.action.btn.viewCollectedData=\u0043\u006f\u006e\u0073\u0075\u006c\u0074\u0065\u0072\u0020\u006c\u0065\u0073\u0020\u0064\u006f\u006e\u006e\u00e9\u0065\u0073\u0020\u0072\u0065\u0063\u0075\u0065\u0069\u006c\u006c\u0069\u0065\u0073 +dataset.manageGuestbooks.tab.action.btn.delete=\u0053\u0075\u0070\u0070\u0072\u0069\u006d\u0065\u0072 +dataset.manageGuestbooks.tab.action.btn.delete.dialog.header=\u0053\u0075\u0070\u0070\u0072\u0069\u006d\u0065\u0072\u0020\u006c\u0065\u0020\u0072\u0065\u0067\u0069\u0073\u0074\u0072\u0065\u0020\u0064\u0065\u0073\u0020\u0076\u0069\u0073\u0069\u0074\u0065\u0075\u0072\u0073 +dataset.manageGuestbooks.tab.action.btn.delete.dialog.tip=\u00ca\u0074\u0065\u0073\u002d\u0076\u006f\u0075\u0073\u0020\u0073\u00fb\u0072\u0028\u0065\u0029\u0020\u0064\u0065\u0020\u0076\u006f\u0075\u006c\u006f\u0069\u0072\u0020\u0073\u0075\u0070\u0070\u0072\u0069\u006d\u0065\u0072\u0020\u0063\u0065\u0020\u0072\u0065\u0067\u0069\u0073\u0074\u0072\u0065\u0020\u0064\u0065\u0073\u0020\u0076\u0069\u0073\u0069\u0074\u0065\u0075\u0072\u0073\u003f\u0020\u0056\u006f\u0075\u0073\u0020\u006e\u0065\u0020\u0070\u006f\u0075\u0076\u0065\u007a\u0020\u0070\u0061\u0073\u0020\u0061\u006e\u006e\u0075\u006c\u0065\u0072\u0020\u006c\u0061\u0020\u0073\u0075\u0070\u0070\u0072\u0065\u0073\u0073\u0069\u006f\u006e\u0020\u0064\u0027\u0075\u006e\u0020\u0072\u0065\u0067\u0069\u0073\u0074\u0072\u0065\u0020\u0064\u0065\u0073\u0020\u0076\u0069\u0073\u0069\u0074\u0065\u0075\u0072\u0073\u002e +dataset.manageGuestbooks.tab.action.btn.view.dialog.header=\u0052\u0065\u0067\u0069\u0073\u0074\u0072\u0065\u0020\u0064\u0065\u0073\u0020\u0076\u0069\u0073\u0069\u0074\u0065\u0075\u0072\u0073\u0020\u0070\u006f\u0075\u0072\u0020\u006c\u0027\u0065\u006e\u0073\u0065\u006d\u0062\u006c\u0065\u0020\u0064\u0065\u0020\u0064\u006f\u006e\u006e\u00e9\u0065\u0073 +dataset.manageGuestbooks.tab.action.btn.view.dialog.datasetGuestbook.title=\u0041\u0075\u0020\u006d\u006f\u006d\u0065\u006e\u0074\u0020\u0064\u0065\u0020\u0074\u00e9\u006c\u00e9\u0063\u0068\u0061\u0072\u0067\u0065\u0072\u0020\u006c\u0065\u0073\u0020\u0066\u0069\u0063\u0068\u0069\u0065\u0072\u0073\u002c\u0020\u006c\u0065\u0020\u0072\u0065\u0067\u0069\u0073\u0074\u0072\u0065\u0020\u0064\u0065\u0073\u0020\u0076\u0069\u0073\u0069\u0074\u0065\u0075\u0072\u0073\u0020\u0064\u0065\u006d\u0061\u006e\u0064\u0065\u0020\u006c\u0065\u0073\u0020\u0072\u0065\u006e\u0073\u0065\u0069\u0067\u006e\u0065\u006d\u0065\u006e\u0074\u0073\u0020\u0073\u0075\u0069\u0076\u0061\u006e\u0074\u0073\u002e +dataset.manageGuestbooks.tab.action.btn.view.dialog.datasetGuestbook=\u004e\u006f\u006d\u0020\u0064\u0075\u0020\u0072\u0065\u0067\u0069\u0073\u0074\u0072\u0065\u0020\u0064\u0065\u0073\u0020\u0076\u0069\u0073\u0069\u0074\u0065\u0075\u0072\u0073 +dataset.manageGuestbooks.tab.action.btn.viewCollectedData.dialog.header=\u0044\u006f\u006e\u006e\u00e9\u0065\u0073\u0020\u0072\u0065\u0063\u0075\u0065\u0069\u006c\u006c\u0069\u0065\u0073\u0020\u0064\u0061\u006e\u0073\u0020\u006c\u0065\u0020\u0072\u0065\u0067\u0069\u0073\u0074\u0072\u0065\u0020\u0064\u0065\u0073\u0020\u0076\u0069\u0073\u0069\u0074\u0065\u0075\u0072\u0073\u0020\u0070\u006f\u0075\u0072\u0020\u006c\u0027\u0065\u006e\u0073\u0065\u006d\u0062\u006c\u0065\u0020\u0064\u0065\u0073\u0020\u0064\u006f\u006e\u006e\u00e9\u0065\u0073 +dataset.manageGuestbooks.tab.action.btn.view.dialog.userCollectedData.title=\u0044\u006f\u006e\u006e\u00e9\u0065\u0073\u0020\u0073\u0075\u0072\u0020\u006c\u0065\u0073\u0020\u0075\u0074\u0069\u006c\u0069\u0073\u0061\u0074\u0065\u0075\u0072\u0073\u0020\u0072\u0065\u0063\u0075\u0065\u0069\u006c\u006c\u0069\u0065\u0073\u0020\u0070\u0061\u0072\u0020\u006c\u0065\u0020\u0072\u0065\u0067\u0069\u0073\u0074\u0072\u0065\u0020\u0064\u0065\u0073\u0020\u0076\u0069\u0073\u0069\u0074\u0065\u0075\u0072\u0073 +dataset.manageGuestbooks.tab.action.btn.view.dialog.userCollectedData=\u0044\u006f\u006e\u006e\u00e9\u0065\u0073\u0020\u0072\u0065\u0063\u0075\u0065\u0069\u006c\u006c\u0069\u0065\u0073 +dataset.manageGuestbooks.tab.action.noedit.createdin=\u0052\u0065\u0067\u0069\u0073\u0074\u0072\u0065\u0020\u0064\u0065\u0073\u0020\u0076\u0069\u0073\u0069\u0074\u0065\u0075\u0072\u0073\u0020\u0063\u0072\u00e9\u00e9\u0020\u0070\u006f\u0075\u0072\u0020\u007b\u0030\u007d +dataset.manageGuestbooks.message.deleteSuccess=\u004c\u0065\u0020\u0072\u0065\u0067\u0069\u0073\u0074\u0072\u0065\u0020\u0064\u0065\u0073\u0020\u0076\u0069\u0073\u0069\u0074\u0065\u0075\u0072\u0073\u0020\u0061\u0020\u00e9\u0074\u00e9\u0020\u0073\u0075\u0070\u0070\u0072\u0069\u006d\u00e9\u002e +dataset.manageGuestbooks.message.deleteFailure=\u004c\u0065\u0020\u0072\u0065\u0067\u0069\u0073\u0074\u0072\u0065\u0020\u0064\u0065\u0073\u0020\u0076\u0069\u0073\u0069\u0074\u0065\u0075\u0072\u0073\u0020\u006e\u0065\u0020\u0070\u0065\u0075\u0074\u0020\u0070\u0061\u0073\u0020\u00ea\u0074\u0072\u0065\u0020\u0073\u0075\u0070\u0070\u0072\u0069\u006d\u00e9\u002e +dataset.manageGuestbooks.message.editSuccess=\u004c\u0065\u0020\u0072\u0065\u0067\u0069\u0073\u0074\u0072\u0065\u0020\u0064\u0065\u0073\u0020\u0076\u0069\u0073\u0069\u0074\u0065\u0075\u0072\u0073\u0020\u0061\u0020\u00e9\u0074\u00e9\u0020\u006d\u0069\u0073\u0020\u00e0\u0020\u006a\u006f\u0075\u0072\u002e +dataset.manageGuestbooks.message.editFailure=\u004c\u0065\u0020\u0072\u0065\u0067\u0069\u0073\u0074\u0072\u0065\u0020\u0064\u0065\u0073\u0020\u0076\u0069\u0073\u0069\u0074\u0065\u0075\u0072\u0073\u0020\u006e\u0065\u0020\u0070\u0065\u0075\u0074\u0020\u0070\u0061\u0073\u0020\u00ea\u0074\u0072\u0065\u0020\u006d\u0069\u0073\u0020\u00e0\u0020\u006a\u006f\u0075\u0072\u002e +dataset.manageGuestbooks.message.enableSuccess=\u004c\u0065\u0020\u0072\u0065\u0067\u0069\u0073\u0074\u0072\u0065\u0020\u0064\u0065\u0073\u0020\u0076\u0069\u0073\u0069\u0074\u0065\u0075\u0072\u0073\u0020\u0061\u0020\u00e9\u0074\u00e9\u0020\u0061\u0063\u0074\u0069\u0076\u00e9\u002e +dataset.manageGuestbooks.message.enableFailure=\u004c\u0065\u0020\u0072\u0065\u0067\u0069\u0073\u0074\u0072\u0065\u0020\u0064\u0065\u0073\u0020\u0076\u0069\u0073\u0069\u0074\u0065\u0075\u0072\u0073\u0020\u006e\u0065\u0020\u0070\u0065\u0075\u0074\u0020\u0070\u0061\u0073\u0020\u00ea\u0074\u0072\u0065\u0020\u0061\u0063\u0074\u0069\u0076\u00e9\u002e +dataset.manageGuestbooks.message.disableSuccess=\u004c\u0065\u0020\u0072\u0065\u0067\u0069\u0073\u0074\u0072\u0065\u0020\u0064\u0065\u0073\u0020\u0076\u0069\u0073\u0069\u0074\u0065\u0075\u0072\u0073\u0020\u0061\u0020\u00e9\u0074\u00e9\u0020\u0064\u00e9\u0073\u0061\u0063\u0074\u0069\u0076\u00e9\u002e +dataset.manageGuestbooks.message.disableFailure=\u004c\u0065\u0020\u0072\u0065\u0067\u0069\u0073\u0074\u0072\u0065\u0020\u0064\u0065\u0073\u0020\u0076\u0069\u0073\u0069\u0074\u0065\u0075\u0072\u0073\u0020\u006e\u0065\u0020\u0070\u0065\u0075\u0074\u0020\u0070\u0061\u0073\u0020\u00ea\u0074\u0072\u0065\u0020\u0064\u00e9\u0073\u0061\u0063\u0074\u0069\u0076\u00e9\u002e +dataset.manageGuestbooks.tip.title=Manage Dataset Guestbooks +dataset.manageGuestbooks.tip.downloadascsv=Click \"Download All Responses\" to download all collected guestbook responses for this dataverse, as a CSV file. To navigate and analyze your collected responses, we recommend importing this CSV file into Excel, Google Sheets or similar software. +dataset.guestbooksResponses.dataset=\u0045\u006e\u0073\u0065\u006d\u0062\u006c\u0065\u0020\u0064\u0065\u0020\u0064\u006f\u006e\u006e\u00e9\u0065\u0073 +dataset.guestbooksResponses.date=\u0044\u0061\u0074\u0065 +dataset.guestbooksResponses.type=\u0054\u0079\u0070\u0065 +dataset.guestbooksResponses.file=\u0046\u0069\u0063\u0068\u0069\u0065\u0072 +dataset.guestbooksResponses.tip.title=Guestbook Responses +dataset.guestbooksResponses.count.responses={0} {0, choice, 0#Responses|1#Response|2#Responses} +dataset.guestbooksResponses.count.toofresults={0} to {1} of {2} {2, choice, 0#Responses|1#Response|2#Responses} +dataset.guestbooksResponses.tip.downloadascsv=Click \"Download Responses\" to download all collected responses for this guestbook, as a CSV file. To navigate and analyze your collected responses, we recommend importing this CSV file into Excel, Google Sheets or similar software. +dataset.guestbooksResponses.tooManyResponses.message=Note: this guestbook has too many responses to display on this page. Only the most recent {0} responses are shown below. Click \"Download Responses\" to download all collected responses ({1} total) as a CSV file. +# guestbook-responses.xhtml= +dataset.guestbookResponses.pageTitle=\u0043\u006f\u006e\u0073\u0075\u006c\u0074\u0065\u0072\u0020\u006c\u0065\u0073\u0020\u0072\u00e9\u0070\u006f\u006e\u0073\u0065\u0073\u0020\u0064\u0061\u006e\u0073\u0020\u006c\u0065\u0020\u0072\u0065\u0067\u0069\u0073\u0074\u0072\u0065\u0020\u0064\u0065\u0073\u0020\u0076\u0069\u0073\u0069\u0074\u0065\u0075\u0072\u0073 +# guestbook.xhtml= +dataset.manageGuestbooks.guestbook.name=\u004e\u006f\u006d\u0020\u0064\u0075\u0020\u0072\u0065\u0067\u0069\u0073\u0074\u0072\u0065\u0020\u0064\u0065\u0073\u0020\u0076\u0069\u0073\u0069\u0074\u0065\u0075\u0072\u0073 +dataset.manageGuestbooks.guestbook.name.tip=\u0049\u006e\u0064\u0069\u0071\u0075\u0065\u0072\u0020\u0075\u006e\u0020\u006e\u006f\u006d\u0020\u0075\u006e\u0069\u0071\u0075\u0065\u0020\u0070\u006f\u0075\u0072\u0020\u0063\u0065\u0020\u0072\u0065\u0067\u0069\u0073\u0074\u0072\u0065\u0020\u0064\u0065\u0073\u0020\u0076\u0069\u0073\u0069\u0074\u0065\u0075\u0072\u0073\u002e +dataset.manageGuestbooks.guestbook.dataCollected=\u0044\u006f\u006e\u006e\u00e9\u0065\u0073\u0020\u0072\u0065\u0063\u0075\u0065\u0069\u006c\u006c\u0069\u0065\u0073 +dataset.manageGuestbooks.guestbook.dataCollected.description=\u0052\u0065\u006e\u0073\u0065\u0069\u0067\u006e\u0065\u006d\u0065\u006e\u0074\u0073\u0020\u0073\u0075\u0072\u0020\u006c\u0065\u0020\u0063\u006f\u006d\u0070\u0074\u0065\u0020\u0044\u0061\u0074\u0061\u0076\u0065\u0072\u0073\u0065\u0020\u0071\u0075\u0069\u0020\u0073\u0065\u0072\u006f\u006e\u0074\u0020\u0072\u0065\u0063\u0075\u0065\u0069\u006c\u006c\u0069\u0073\u0020\u006c\u006f\u0072\u0073\u0071\u0075\u0027\u0075\u006e\u0020\u0075\u0074\u0069\u006c\u0069\u0073\u0061\u0074\u0065\u0075\u0072\u0020\u0074\u00e9\u006c\u00e9\u0063\u0068\u0061\u0072\u0067\u0065\u0020\u0075\u006e\u0020\u0066\u0069\u0063\u0068\u0069\u0065\u0072\u002e\u0020\u0056\u0065\u0075\u0069\u006c\u006c\u0065\u007a\u0020\u0063\u006f\u0063\u0068\u0065\u0072\u0020\u006c\u0065\u0073\u0020\u00e9\u006c\u00e9\u006d\u0065\u006e\u0074\u0073\u0020\u0071\u0075\u0069\u0020\u0073\u0065\u0072\u006f\u006e\u0074\u0020\u0072\u0065\u0071\u0075\u0069\u0073\u002e +dataset.manageGuestbooks.guestbook.customQuestions=\u0051\u0075\u0065\u0073\u0074\u0069\u006f\u006e\u0073\u0020\u0070\u0065\u0072\u0073\u006f\u006e\u006e\u0061\u006c\u0069\u0073\u00e9\u0065\u0073 +dataset.manageGuestbooks.guestbook.accountInformation=Account Information +dataset.manageGuestbooks.guestbook.required=(Required) +dataset.manageGuestbooks.guestbook.optional=(Optional) +dataset.manageGuestbooks.guestbook.customQuestions.description=\u0043\u0072\u00e9\u0065\u007a\u0020\u0076\u006f\u0073\u0020\u0070\u0072\u006f\u0070\u0072\u0065\u0073\u0020\u0071\u0075\u0065\u0073\u0074\u0069\u006f\u006e\u0073\u0020\u0061\u0066\u0069\u006e\u0020\u0071\u0075\u0065\u0020\u006c\u0065\u0073\u0020\u0075\u0074\u0069\u006c\u0069\u0073\u0061\u0074\u0065\u0075\u0072\u0073\u0020\u0066\u006f\u0075\u0072\u006e\u0069\u0073\u0073\u0065\u006e\u0074\u0020\u0070\u006c\u0075\u0073\u0020\u0064\u0027\u0069\u006e\u0066\u006f\u0072\u006d\u0061\u0074\u0069\u006f\u006e\u0020\u0071\u0075\u0065\u0020\u0063\u0065\u0020\u0071\u0075\u0069\u0020\u0073\u0065\u0020\u0074\u0072\u006f\u0075\u0076\u0065\u0020\u0064\u0061\u006e\u0073\u0020\u006c\u0065\u0075\u0072\u0020\u0063\u006f\u006d\u0070\u0074\u0065\u0020\u006c\u006f\u0072\u0073\u0071\u0075\u0027\u0069\u006c\u0073\u0020\u0074\u00e9\u006c\u00e9\u0063\u0068\u0061\u0072\u0067\u0065\u006e\u0074\u0020\u0075\u006e\u0020\u0066\u0069\u0063\u0068\u0069\u0065\u0072\u002e\u0020\u004c\u0065\u0073\u0020\u0071\u0075\u0065\u0073\u0074\u0069\u006f\u006e\u0073\u0020\u0070\u0065\u0075\u0076\u0065\u006e\u0074\u0020\u00ea\u0074\u0072\u0065\u0020\u006f\u0062\u006c\u0069\u0067\u0061\u0074\u006f\u0069\u0072\u0065\u0073\u0020\u006f\u0075\u0020\u0066\u0061\u0063\u0075\u006c\u0074\u0061\u0074\u0069\u0076\u0065\u0073\u0020\u0065\u0074\u0020\u006c\u0065\u0073\u0020\u0072\u00e9\u0070\u006f\u006e\u0073\u0065\u0073\u0020\u0070\u0065\u0075\u0076\u0065\u006e\u0074\u0020\u00ea\u0074\u0072\u0065\u0020\u0065\u006e\u0020\u0066\u006f\u0072\u006d\u0061\u0074\u0020\u0074\u0065\u0078\u0074\u0065\u0020\u006f\u0075\u0020\u00e0\u0020\u0063\u0068\u006f\u0069\u0063\u0065\u0020\u006d\u0075\u006c\u0074\u0069\u0070\u006c\u0065\u0073\u002e\u0020 +dataset.manageGuestbooks.guestbook.customQuestions.questionType=\u0054\u0079\u0070\u0065\u0020\u0064\u0065\u0020\u0071\u0075\u0065\u0073\u0074\u0069\u006f\u006e +dataset.manageGuestbooks.guestbook.customQuestions.questionText=\u0054\u0065\u0078\u0074\u0065\u0020\u0064\u0065\u0020\u006c\u0061\u0020\u0071\u0075\u0065\u0073\u0074\u0069\u006f\u006e +dataset.manageGuestbooks.guestbook.customQuestions.responseOptions=\u0063\u0068\u006f\u0069\u0063\u0065\u0020\u0064\u0065\u0020\u0072\u00e9\u0070\u006f\u006e\u0073\u0065 +dataset.manageGuestbooks.guestbook.customQuestions.questionType.text=\u0054\u0065\u0078\u0074\u0065 +dataset.manageGuestbooks.guestbook.customQuestions.questionType.multiple=\u0063\u0068\u006f\u0069\u0063\u0065\u0020\u006d\u0075\u006c\u0074\u0069\u0070\u006c\u0065\u0073 +# guestbookResponseFragment.xhtml= +dataset.guestbookResponse.guestbook.additionalQuestions=\u0041\u0075\u0074\u0072\u0065\u0073\u0020\u0071\u0075\u0065\u0073\u0074\u0069\u006f\u006e\u0073 +dataset.guestbookResponse.guestbook.responseTooLong=\u0056\u0065\u0075\u0069\u006c\u006c\u0065\u007a\u0020\u006c\u0069\u006d\u0069\u0074\u0065\u0072\u0020\u0076\u006f\u0074\u0072\u0065\u0020\u0072\u00e9\u0070\u006f\u006e\u0073\u0065\u0020\u00e0\u0020\u0032\u0035\u0035\u0020\u0063\u0061\u0072\u0061\u0063\u0074\u00e8\u0072\u0065\u0073 +# dataset.xhtml= +dataset.configureBtn=Configure +dataset.pageTitle=\u0041\u006a\u006f\u0075\u0074\u0065\u0072\u0020\u0075\u006e\u0020\u006e\u006f\u0075\u0076\u0065\u006c\u0020\u0065\u006e\u0073\u0065\u006d\u0062\u006c\u0065\u0020\u0064\u0065\u0020\u0064\u006f\u006e\u006e\u00e9\u0065\u0073 +dataset.editBtn=\u004d\u006f\u0064\u0069\u0066\u0069\u0065\u0072 +dataset.editBtn.itemLabel.upload=\u0046\u0069\u0063\u0068\u0069\u0065\u0072\u0073\u0020\u0028\u0074\u00e9\u006c\u00e9\u0076\u0065\u0072\u0073\u0065\u0072\u0029 +dataset.editBtn.itemLabel.metadata=\u004d\u00e9\u0074\u0061\u0064\u006f\u006e\u006e\u00e9\u0065\u0073 +dataset.editBtn.itemLabel.terms=\u0043\u006f\u006e\u0064\u0069\u0074\u0069\u006f\u006e\u0073\u0020\u0064\u0027\u0075\u0074\u0069\u006c\u0069\u0073\u0061\u0074\u0069\u006f\u006e +dataset.editBtn.itemLabel.permissions=\u0050\u0065\u0072\u006d\u0069\u0073\u0073\u0069\u006f\u006e\u0073 +dataset.editBtn.itemLabel.thumbnailsAndWidgets=\u0056\u0069\u0067\u006e\u0065\u0074\u0074\u0065\u0073\u0020\u002b\u0020\u0057\u0069\u0064\u0067\u0065\u0074\u0073 +dataset.editBtn.itemLabel.privateUrl=\u0055\u0052\u004c\u0020\u0070\u0072\u0069\u0076\u00e9 +dataset.editBtn.itemLabel.permissionsDataset=\u0045\u006e\u0073\u0065\u006d\u0062\u006c\u0065\u0020\u0064\u0065\u0020\u0064\u006f\u006e\u006e\u00e9\u0065\u0073 +dataset.editBtn.itemLabel.permissionsFile=\u0046\u0069\u0063\u0068\u0069\u0065\u0072\u0073\u0020\u00e0\u0020\u0061\u0063\u0063\u00e8\u0073\u0020\u0072\u00e9\u0073\u0065\u0072\u0076\u00e9 +dataset.editBtn.itemLabel.deleteDataset=\u0053\u0075\u0070\u0070\u0072\u0069\u006d\u0065\u0072\u0020\u006c\u0027\u0065\u006e\u0073\u0065\u006d\u0062\u006c\u0065\u0020\u0064\u0065\u0020\u0064\u006f\u006e\u006e\u00e9\u0065\u0073 +dataset.editBtn.itemLabel.deleteDraft=\u0053\u0075\u0070\u0070\u0072\u0069\u006d\u0065\u0072\u0020\u006c\u0061\u0020\u0076\u0065\u0072\u0073\u0069\u006f\u006e\u0020\u0070\u0072\u006f\u0076\u0069\u0073\u006f\u0069\u0072\u0065 +dataset.editBtn.itemLabel.deaccession=\u0052\u0065\u0074\u0069\u0072\u0065\u0072\u0020\u006c\u0027\u0065\u006e\u0073\u0065\u006d\u0062\u006c\u0065\u0020\u0064\u0065\u0020\u0064\u006f\u006e\u006e\u00e9\u0065\u0073\u0020\u0064\u0065\u0020\u006c\u0061\u0020\u0064\u0069\u0066\u0066\u0075\u0073\u0069\u006f\u006e +dataset.exportBtn=\u0045\u0078\u0070\u006f\u0072\u0074\u0065\u0072\u0020\u006c\u0065\u0073\u0020\u006d\u00e9\u0074\u0061\u0064\u006f\u006e\u006e\u00e9\u0065\u0073 +dataset.exportBtn.itemLabel.ddi=\u0044\u0044\u0049 +dataset.exportBtn.itemLabel.dublinCore=\u0044\u0075\u0062\u006c\u0069\u006e\u0020\u0043\u006f\u0072\u0065 +dataset.exportBtn.itemLabel.schemaDotOrg=Schema.org JSON-LD +dataset.exportBtn.itemLabel.json=\u004a\u0053\u004f\u004e +metrics.title=\u0053\u0074\u0061\u0074\u0069\u0073\u0074\u0069\u0071\u0075\u0065\u0073 +metrics.title.tip=\u0041\u0066\u0066\u0069\u0063\u0068\u0065\u0072\u0020\u0070\u006c\u0075\u0073\u0020\u0064\u0027\u0069\u006e\u0066\u006f\u0072\u006d\u0061\u0074\u0069\u006f\u006e\u0073\u0020\u0073\u0075\u0072\u0020\u006c\u0065\u0073\u0020\u0073\u0074\u0061\u0074\u0069\u0073\u0074\u0069\u0071\u0075\u0065\u0073\u0020\u0064\u0027\u0075\u0074\u0069\u006c\u0069\u0073\u0061\u0074\u0069\u006f\u006e +metrics.comingsoon=\u0042\u0069\u0065\u006e\u0074\u00f4\u0074\u0020\u0064\u0069\u0073\u0070\u006f\u006e\u0069\u0062\u006c\u0065\u005c\u0075\u0032\u0030\u0032\u0036 +metrics.views=\u0050\u0061\u0067\u0065\u0073\u0020\u0063\u006f\u006e\u0073\u0075\u006c\u0074\u00e9\u0065\u0073 +metrics.downloads=\u007b\u0030\u002c\u0020\u0063\u0068\u006f\u0069\u0063\u0065\u002c\u0020\u0030\u0023\u0074\u00e9\u006c\u00e9\u0063\u0068\u0061\u0072\u0067\u0065\u006d\u0065\u006e\u0074\u0073\u007c\u0031\u0023\u0074\u00e9\u006c\u00e9\u0063\u0068\u0061\u0072\u0067\u0065\u006d\u0065\u006e\u0074\u007c\u0032\u0023\u0074\u00e9\u006c\u00e9\u0063\u0068\u0061\u0072\u0067\u0065\u006d\u0065\u006e\u0074\u0073\u007d +metrics.citations=\u0043\u0069\u0074\u0061\u0074\u0069\u006f\u006e\u0073 +metrics.shares=\u0050\u0061\u0072\u0074\u0061\u0067\u0065\u0073 +dataset.publish.btn=\u0050\u0075\u0062\u006c\u0069\u0065\u0072 +dataset.publish.header=\u0050\u0075\u0062\u006c\u0069\u0065\u0072\u0020\u006c\u0027\u0065\u006e\u0073\u0065\u006d\u0062\u006c\u0065\u0020\u0064\u0065\u0020\u0064\u006f\u006e\u006e\u00e9\u0065\u0073 +dataset.rejectBtn=\u0052\u0065\u0074\u006f\u0075\u0072\u006e\u0065\u0072\u0020\u00e0\u0020\u006c\u0027\u0061\u0075\u0074\u0065\u0075\u0072 +dataset.submitBtn=\u0053\u006f\u0075\u006d\u0065\u0074\u0074\u0072\u0065\u0020\u0061\u0075\u0078\u0020\u0066\u0069\u006e\u0073\u0020\u0064\u0027\u0065\u0078\u0061\u006d\u0065\u006e +dataset.disabledSubmittedBtn=\u0053\u006f\u0075\u006d\u0069\u0073\u0020\u0061\u0075\u0078\u0020\u0066\u0069\u006e\u0073\u0020\u0064\u0027\u0065\u0078\u0061\u006d\u0065\u006e +dataset.submitMessage=\u0053\u006f\u0075\u006d\u0065\u0074\u0074\u0072\u0065\u0020\u0063\u0065\u0074\u0020\u0065\u006e\u0073\u0065\u006d\u0062\u006c\u0065\u0020\u0064\u0065\u0020\u0064\u006f\u006e\u006e\u00e9\u0065\u0073\u0020\u0070\u006f\u0075\u0072\u0020\u0065\u0078\u0061\u006d\u0065\u006e\u0020\u0070\u0061\u0072\u0020\u006c\u0027\u0069\u006e\u0074\u0065\u006e\u0064\u0061\u006e\u0074\u0020\u0064\u0065\u0073\u0020\u0064\u006f\u006e\u006e\u00e9\u0065\u0073\u0020\u0064\u0065\u0020\u0063\u0065\u0020\u0064\u0061\u0074\u0061\u0076\u0065\u0072\u0073\u0065\u0020\u0065\u006e\u0020\u0076\u0075\u0065\u0020\u0064\u0065\u0020\u0070\u0075\u0062\u006c\u0069\u0065\u0072\u002e +dataset.submit.success=Your dataset has been submitted for review. +dataset.inreview.infoMessage=\u2013 This dataset is currently under review prior to publication. +dataset.submit.failure=Dataset Submission Failed - {0} +dataset.submit.failure.null=Can't submit for review. Dataset is null. +dataset.submit.failure.isReleased=Latest version of dataset is already released. Only draft versions can be submitted for review. +dataset.submit.failure.inReview=You cannot submit this dataset for review because it is already in review. +dataset.rejectMessage=\u0052\u0065\u0074\u006f\u0075\u0072\u006e\u0065\u0072\u0020\u0063\u0065\u0074\u0020\u0065\u006e\u0073\u0065\u006d\u0062\u006c\u0065\u0020\u0064\u0065\u0020\u0064\u006f\u006e\u006e\u00e9\u0065\u0073\u0020\u0061\u0075\u0020\u0063\u006f\u006c\u006c\u0061\u0062\u006f\u0072\u0061\u0074\u0065\u0075\u0072\u0020\u0061\u0075\u0078\u0020\u0066\u0069\u006e\u0073\u0020\u0064\u0065\u0020\u006d\u006f\u0064\u0069\u0066\u0069\u0063\u0061\u0074\u0069\u006f\u006e\u002e +dataset.rejectWatermark=Please enter a reason for returning this dataset to its author(s). +dataset.reject.enterReason=Reason for return to author is required +dataset.reject.enterReason.header=Required entry +dataset.reject.success=This dataset has been sent back to the contributor. +dataset.reject.failure=Dataset Submission Return Failed - {0} +dataset.reject.datasetNull=Cannot return the dataset to the author(s) because it is null. +dataset.reject.datasetNotInReview=This dataset cannot be return to the author(s) because the latest version is not In Review. The author(s) needs to click Submit for Review first. +dataset.publish.tip=\u00ca\u0074\u0065\u0073\u002d\u0076\u006f\u0075\u0073\u0020\u0073\u00fb\u0072\u0028\u0065\u0029\u0020\u0064\u0065\u0020\u0076\u006f\u0075\u006c\u006f\u0069\u0072\u0020\u0070\u0075\u0062\u006c\u0069\u0065\u0072\u0020\u0063\u0065\u0074\u0020\u0065\u006e\u0073\u0065\u006d\u0062\u006c\u0065\u0020\u0064\u0065\u0020\u0064\u006f\u006e\u006e\u00e9\u0065\u0073\u003f\u0020\u0055\u006e\u0065\u0020\u0066\u006f\u0069\u0073\u0020\u0070\u0075\u0062\u006c\u0069\u00e9\u002c\u0020\u0069\u006c\u0020\u0064\u006f\u0069\u0074\u0020\u0064\u0065\u006d\u0065\u0075\u0072\u0065\u0072\u0020\u0070\u0075\u0062\u006c\u0069\u00e9\u002e +dataset.publishBoth.tip=\u0055\u006e\u0065\u0020\u0066\u006f\u0069\u0073\u0020\u0071\u0075\u0065\u0020\u0076\u006f\u0075\u0073\u0020\u0070\u0075\u0062\u006c\u0069\u0065\u007a\u0020\u0063\u0065\u0074\u0020\u0065\u006e\u0073\u0065\u006d\u0062\u006c\u0065\u0020\u0064\u0065\u0020\u0064\u006f\u006e\u006e\u00e9\u0065\u0073\u002c\u0020\u0069\u006c\u0020\u0064\u006f\u0069\u0074\u0020\u0064\u0065\u006d\u0065\u0075\u0072\u0065\u0072\u0020\u0070\u0075\u0062\u006c\u0069\u00e9\u002e +dataset.unregistered.tip=\u0043\u0065\u0074\u0020\u0065\u006e\u0073\u0065\u006d\u0062\u006c\u0065\u0020\u0064\u0065\u0020\u0064\u006f\u006e\u006e\u00e9\u0065\u0073\u0020\u006e\u0027\u0065\u0073\u0074\u0020\u0070\u0061\u0073\u0020\u0065\u006e\u0072\u0065\u0067\u0069\u0073\u0074\u0072\u00e9\u002e\u0020\u004e\u006f\u0075\u0073\u0020\u0074\u0065\u006e\u0074\u0065\u0072\u006f\u006e\u0073\u0020\u0064\u0065\u0020\u006c\u0027\u0065\u006e\u0072\u0065\u0067\u0069\u0073\u0074\u0072\u0065\u0072\u0020\u0061\u0076\u0061\u006e\u0074\u0020\u0064\u0065\u0020\u006c\u0065\u0020\u0070\u0075\u0062\u006c\u0069\u0065\u0072\u002e +dataset.republish.tip=\u00ca\u0074\u0065\u0073\u002d\u0076\u006f\u0075\u0073\u0020\u0073\u00fb\u0072\u0028\u0065\u0029\u0020\u0064\u0065\u0020\u0076\u006f\u0075\u006c\u006f\u0069\u0072\u0020\u0070\u0075\u0062\u006c\u0069\u0065\u0072\u0020\u00e0\u0020\u006e\u006f\u0075\u0076\u0065\u0061\u0075\u0020\u0063\u0065\u0074\u0020\u0065\u006e\u0073\u0065\u006d\u0062\u006c\u0065\u0020\u0064\u0065\u0020\u0064\u006f\u006e\u006e\u00e9\u0065\u0073\u003f\u0020 +dataset.selectVersionNumber=\u0049\u006e\u0064\u0069\u0071\u0075\u0065\u0072\u0020\u0073\u0027\u0069\u006c\u0020\u0073\u0027\u0061\u0067\u0069\u0074\u0020\u0064\u0027\u0075\u006e\u0065\u0020\u006d\u0069\u0073\u0065\u0020\u00e0\u0020\u006a\u006f\u0075\u0072\u0020\u006d\u0069\u006e\u0065\u0075\u0072\u0065\u0020\u006f\u0075\u0020\u006d\u0061\u006a\u0065\u0075\u0072\u0065\u0020\u0064\u0065\u0020\u006c\u0061\u0020\u0076\u0065\u0072\u0073\u0069\u006f\u006e\u002e +dataset.majorRelease=\u0056\u0065\u0072\u0073\u0069\u006f\u006e\u0020\u006d\u0061\u006a\u0065\u0075\u0072\u0065 +dataset.minorRelease=\u0056\u0065\u0072\u0073\u0069\u006f\u006e\u0020\u006d\u0069\u006e\u0065\u0075\u0072\u0065 +dataset.majorRelease.tip=\u0045\u006e\u0020\u0072\u0061\u0069\u0073\u006f\u006e\u0020\u0064\u0065\u0020\u006c\u0061\u0020\u006e\u0061\u0074\u0075\u0072\u0065\u0020\u0064\u0065\u0073\u0020\u006d\u006f\u0064\u0069\u0066\u0069\u0063\u0061\u0074\u0069\u006f\u006e\u0073\u0020\u0061\u0070\u0070\u006f\u0072\u0074\u00e9\u0065\u0073\u0020\u00e0\u0020\u006c\u0061\u0020\u0076\u0065\u0072\u0073\u0069\u006f\u006e\u0020\u0070\u0072\u006f\u0076\u0069\u0073\u006f\u0069\u0072\u0065\u0020\u0061\u0063\u0074\u0075\u0065\u006c\u006c\u0065\u002c\u0020\u0069\u006c\u0020\u0073\u0027\u0061\u0067\u0069\u0072\u0061\u0020\u0064\u0027\u0075\u006e\u0065\u0020\u0076\u0065\u0072\u0073\u0069\u006f\u006e\u0020\u006d\u0061\u006a\u0065\u0075\u0072\u0065\u0028\u007b\u0030\u007d\u0029\u002e +dataset.mayNotBePublished=\u0049\u006d\u0070\u006f\u0073\u0073\u0069\u0062\u006c\u0065\u0020\u0064\u0065\u0020\u0070\u0075\u0062\u006c\u0069\u0065\u0072\u0020\u006c\u0027\u0065\u006e\u0073\u0065\u006d\u0062\u006c\u0065\u0020\u0064\u0065\u0020\u0064\u006f\u006e\u006e\u00e9\u0065\u0073 +dataset.mayNotPublish.administrator=\u0043\u0065\u0074\u0020\u0065\u006e\u0073\u0065\u006d\u0062\u006c\u0065\u0020\u0064\u0065\u0020\u0064\u006f\u006e\u006e\u00e9\u0065\u0073\u0020\u006e\u0065\u0020\u0070\u0065\u0075\u0074\u0020\u00ea\u0074\u0072\u0065\u0020\u0070\u0075\u0062\u006c\u0069\u00e9\u0020\u0074\u0061\u006e\u0074\u0020\u0071\u0075\u0065\u0020\u007b\u0030\u007d\u0020\u006e\u0027\u0065\u0073\u0074\u0020\u0070\u0061\u0073\u0020\u0070\u0075\u0062\u006c\u0069\u00e9\u0020\u0070\u0061\u0072\u0020\u0073\u006f\u006e\u0020\u0061\u0064\u006d\u0069\u006e\u0069\u0073\u0074\u0072\u0061\u0074\u0065\u0075\u0072\u002e +dataset.mayNotPublish.both=\u0043\u0065\u0074\u0020\u0065\u006e\u0073\u0065\u006d\u0062\u006c\u0065\u0020\u0064\u0065\u0020\u0064\u006f\u006e\u006e\u00e9\u0065\u0073\u0020\u006e\u0065\u0020\u0070\u0065\u0075\u0074\u0020\u00ea\u0074\u0072\u0065\u0020\u0070\u0075\u0062\u006c\u0069\u00e9\u0020\u0074\u0061\u006e\u0074\u0020\u0071\u0075\u0065\u0020\u007b\u0030\u007d\u0020\u006e\u0027\u0065\u0073\u0074\u0020\u0070\u0061\u0073\u0020\u0070\u0075\u0062\u006c\u0069\u00e9\u002e\u0020\u0056\u006f\u0075\u006c\u0065\u007a\u002d\u0076\u006f\u0075\u0073\u0020\u0070\u0075\u0062\u006c\u0069\u0065\u0072\u0020\u006c\u0065\u0073\u0020\u0064\u0065\u0075\u0078\u0020\u0069\u006d\u006d\u00e9\u0064\u0069\u0061\u0074\u0065\u006d\u0065\u006e\u0074\u003f +dataset.mayNotPublish.twoGenerations=\u0043\u0065\u0074\u0020\u0065\u006e\u0073\u0065\u006d\u0062\u006c\u0065\u0020\u0064\u0065\u0020\u0064\u006f\u006e\u006e\u00e9\u0065\u0073\u0020\u006e\u0065\u0020\u0070\u0065\u0075\u0074\u0020\u00ea\u0074\u0072\u0065\u0020\u0070\u0075\u0062\u006c\u0069\u00e9\u0020\u0074\u0061\u006e\u0074\u0020\u0071\u0075\u0065\u0020\u007b\u0030\u007d\u0020\u0065\u0074\u0020\u007b\u0031\u007d\u0020\u006e\u0065\u0020\u0073\u006f\u006e\u0074\u0020\u0070\u0061\u0073\u0020\u0070\u0075\u0062\u006c\u0069\u00e9\u0073\u002e +dataset.mayNotBePublished.both.button=\u004f\u0075\u0069\u002c\u0020\u0070\u0075\u0062\u006c\u0069\u0065\u0072\u0020\u006c\u0065\u0073\u0020\u0064\u0065\u0075\u0078\u002e +dataset.viewVersion.unpublished=\u0056\u006f\u0069\u0072\u0020\u006c\u0061\u0020\u0076\u0065\u0072\u0073\u0069\u006f\u006e\u0020\u006e\u006f\u006e\u0020\u0070\u0075\u0062\u006c\u0069\u00e9\u0065 +dataset.viewVersion.published=\u0056\u006f\u0069\u0072\u0020\u006c\u0061\u0020\u0076\u0065\u0072\u0073\u0069\u006f\u006e\u0020\u0070\u0075\u0062\u006c\u0069\u00e9\u0065 +dataset.email.datasetContactBtn=\u0045\u006e\u0076\u006f\u0079\u0065\u0072\u0020\u0075\u006e\u0020\u0063\u006f\u0075\u0072\u0072\u0069\u0065\u006c\u0020\u00e0\u0020\u006c\u0061\u0020\u0070\u0065\u0072\u0073\u006f\u006e\u006e\u0065\u002d\u0072\u0065\u0073\u0073\u006f\u0075\u0072\u0063\u0065\u0020\u0070\u006f\u0075\u0072\u0020\u006c\u0027\u0065\u006e\u0073\u0065\u006d\u0062\u006c\u0065\u0020\u0064\u0065\u0020\u0064\u006f\u006e\u006e\u00e9\u0065\u0073 +dataset.email.hiddenMessage= +dataset.email.messageSubject=\u004f\u0062\u006a\u0065\u0074\u0020\u0064\u0075\u0020\u006d\u0065\u0073\u0073\u0061\u0067\u0065 +dataset.email.datasetLinkBtn.tip=\u004c\u0069\u0065\u0072\u0020\u006c\u0027\u0065\u006e\u0073\u0065\u006d\u0062\u006c\u0065\u0020\u0064\u0065\u0020\u0064\u006f\u006e\u006e\u00e9\u0065\u0073\u0020\u00e0\u0020\u0076\u006f\u0074\u0072\u0065\u0020\u0064\u0061\u0074\u0061\u0076\u0065\u0072\u0073\u0065 +dataset.share.datasetShare=\u0050\u0061\u0072\u0074\u0061\u0067\u0065\u0072\u0020\u006c\u0027\u0065\u006e\u0073\u0065\u006d\u0062\u006c\u0065\u0020\u0064\u0065\u0020\u0064\u006f\u006e\u006e\u00e9\u0065\u0073 +dataset.share.datasetShare.tip=\u0050\u0061\u0072\u0074\u0061\u0067\u0065\u0072\u0020\u0063\u0065\u0074\u0020\u0065\u006e\u0073\u0065\u006d\u0062\u006c\u0065\u0020\u0064\u0065\u0020\u0064\u006f\u006e\u006e\u00e9\u0065\u0073\u0020\u0073\u0075\u0072\u0020\u0076\u006f\u0073\u0020\u006d\u00e9\u0064\u0069\u0061\u0073\u0020\u0073\u006f\u0063\u0069\u0061\u0075\u0078\u0020\u0070\u0072\u00e9\u0066\u00e9\u0072\u00e9\u0073\u002e +dataset.share.datasetShare.shareText=\u0043\u006f\u006e\u0073\u0075\u006c\u0074\u0065\u0072\u0020\u0063\u0065\u0074\u0020\u0065\u006e\u0073\u0065\u006d\u0062\u006c\u0065\u0020\u0064\u0065\u0020\u0064\u006f\u006e\u006e\u00e9\u0065\u0073 +dataset.locked.message=Dataset Locked +dataset.locked.inReview.message=Submitted for Review +dataset.publish.error=\u004c\u0027\u0065\u006e\u0073\u0065\u006d\u0062\u006c\u0065\u0020\u0064\u0065\u0020\u0064\u006f\u006e\u006e\u00e9\u0065\u0073\u0020\u006e\u0065\u0020\u0070\u0065\u0075\u0074\u0020\u00ea\u0074\u0072\u0065\u0020\u0070\u0075\u0062\u006c\u0069\u00e9\u002c\u0020\u0063\u0061\u0072\u0020\u006c\u0065\u0020\u0073\u0065\u0072\u0076\u0069\u0063\u0065\u0020\u0064\u0065\u0020\u003c\u0061\u0020\u0068\u0072\u0065\u0066\u003d\u005c\u007b\u0031\u007d\u0020\u0074\u0061\u0072\u0067\u0065\u0074\u003d\u005c\u0022\u005f\u0062\u006c\u0061\u006e\u006b\u005c\u0022\u002f\u003e\u0020\u007b\u0030\u007d\u0020\u003c\u002f\u0061\u003e\u0020\u0065\u0073\u0074\u0020\u0061\u0063\u0074\u0075\u0065\u006c\u006c\u0065\u006d\u0065\u006e\u0074\u0020\u0069\u006e\u0061\u0063\u0063\u0065\u0073\u0073\u0069\u0062\u006c\u0065\u002e\u0020\u0056\u0065\u0075\u0069\u006c\u006c\u0065\u007a\u0020\u0065\u0073\u0073\u0061\u0079\u0065\u0072\u0020\u00e0\u0020\u006e\u006f\u0075\u0076\u0065\u0061\u0075\u002e\u0020\u004c\u0065\u0020\u0070\u0072\u006f\u0062\u006c\u00e8\u006d\u0065\u0020\u0070\u0065\u0072\u0073\u0069\u0073\u0074\u0065\u002d\u0069\u006c\u003f\u0020 +dataset.publish.error.doi=\u004c\u0027\u0065\u006e\u0073\u0065\u006d\u0062\u006c\u0065\u0020\u0064\u0065\u0020\u0064\u006f\u006e\u006e\u00e9\u0065\u0073\u0020\u006e\u0065\u0020\u0070\u0065\u0075\u0074\u0020\u00ea\u0074\u0072\u0065\u0020\u0072\u0065\u0074\u0069\u0072\u00e9\u002c\u0020\u0063\u0061\u0072\u0020\u006c\u0061\u0020\u006d\u0069\u0073\u0065\u0020\u00e0\u0020\u006a\u006f\u0075\u0072\u0020\u0044\u004f\u0049\u0020\u0061\u0020\u00e9\u0063\u0068\u006f\u0075\u00e9\u002e\u0020 +dataset.delete.error=\u004c\u0027\u0065\u006e\u0073\u0065\u006d\u0062\u006c\u0065\u0020\u0064\u0065\u0020\u0064\u006f\u006e\u006e\u00e9\u0065\u0073\u0020\u006e\u0065\u0020\u0070\u0065\u0075\u0074\u0020\u00ea\u0074\u0072\u0065\u0020\u0070\u0075\u0062\u006c\u0069\u00e9\u002c\u0020\u0063\u0061\u0072\u0020\u006c\u0061\u0020\u006d\u0069\u0073\u0065\u0020\u00e0\u0020\u006a\u006f\u0075\u0072\u0020\u007b\u0030\u007d\u0020\u0061\u0020\u00e9\u0063\u0068\u006f\u0075\u00e9\u002e +dataset.publish.worldMap.deleteConfirm=\u0050\u0072\u0065\u006e\u0065\u007a\u0020\u006e\u006f\u0074\u0065\u0020\u0071\u0075\u0065\u0020\u0076\u006f\u0073\u0020\u0064\u006f\u006e\u006e\u00e9\u0065\u0073\u0020\u0065\u0074\u0020\u0076\u006f\u0074\u0072\u0065\u0020\u0063\u0061\u0072\u0074\u0065\u0020\u0073\u0075\u0072\u0020\u0057\u006f\u0072\u006c\u0064\u004d\u0061\u0070\u0020\u0073\u0065\u0072\u006f\u006e\u0074\u0020\u0073\u0075\u0070\u0070\u0072\u0069\u006d\u00e9\u0065\u0073\u0020\u0065\u006e\u0020\u0072\u0061\u0069\u0073\u006f\u006e\u0020\u0064\u0065\u0020\u006d\u006f\u0064\u0069\u0066\u0069\u0063\u0061\u0074\u0069\u006f\u006e\u0073\u0020\u0064\u0065\u0073\u0020\u0072\u0065\u0073\u0074\u0072\u0069\u0063\u0074\u0069\u006f\u006e\u0073\u0020\u0064\u0027\u0061\u0063\u0063\u00e8\u0073\u0020\u0061\u0075\u0078\u0020\u0066\u0069\u0063\u0068\u0069\u0065\u0072\u0073\u0020\u0064\u0061\u006e\u0073\u0020\u0063\u0065\u0074\u0074\u0065\u0020\u0076\u0065\u0072\u0073\u0069\u006f\u006e\u0020\u0064\u0065\u0020\u006c\u0027\u0065\u006e\u0073\u0065\u006d\u0062\u006c\u0065\u0020\u0064\u0065\u0020\u0064\u006f\u006e\u006e\u00e9\u0065\u0073\u0020\u0071\u0075\u0065\u0020\u0076\u006f\u0075\u0073\u0020\u0070\u0075\u0062\u006c\u0069\u0065\u007a\u002e\u0020\u0056\u006f\u0075\u006c\u0065\u007a\u002d\u0076\u006f\u0075\u0073\u0020\u0063\u006f\u006e\u0074\u0069\u006e\u0075\u0065\u0072\u003f +dataset.publish.workflow.inprogress=Publish workflow in progress +dataset.versionUI.draft=\u0056\u0065\u0072\u0073\u0069\u006f\u006e\u0020\u0070\u0072\u006f\u0076\u0069\u0073\u006f\u0069\u0072\u0065 +dataset.versionUI.inReview=\u0045\u006e\u0020\u0072\u00e9\u0076\u0069\u0073\u0069\u006f\u006e +dataset.versionUI.unpublished=\u004e\u006f\u006e\u0020\u0070\u0075\u0062\u006c\u0069\u00e9 +dataset.versionUI.deaccessioned=\u0052\u0065\u0074\u0069\u0072\u00e9 +dataset.cite.title.released=\u004c\u0061\u0020\u0056\u0045\u0052\u0053\u0049\u004f\u004e\u0020\u0050\u0052\u004f\u0056\u0049\u0053\u004f\u0049\u0052\u0045\u0020\u0073\u0065\u0072\u0061\u0020\u0072\u0065\u006d\u0070\u006c\u0061\u0063\u00e9\u0065\u0020\u0064\u0061\u006e\u0073\u0020\u006c\u0061\u0020\u0072\u00e9\u0066\u00e9\u0072\u0065\u006e\u0063\u0065\u0020\u0062\u0069\u0062\u006c\u0069\u006f\u0067\u0072\u0061\u0070\u0068\u0069\u0071\u0075\u0065\u0020\u0070\u0061\u0072\u0020\u006c\u0061\u0020\u0056\u0031\u0020\u0075\u006e\u0065\u0020\u0066\u006f\u0069\u0073\u0020\u006c\u0027\u0065\u006e\u0073\u0065\u006d\u0062\u006c\u0065\u0020\u0064\u0065\u0020\u0064\u006f\u006e\u006e\u00e9\u0065\u0073\u0020\u0070\u0075\u0062\u006c\u0069\u00e9\u002e +dataset.cite.title.draft=\u004c\u0061\u0020\u0056\u0045\u0052\u0053\u0049\u004f\u004e\u0020\u0050\u0052\u004f\u0056\u0049\u0053\u004f\u0049\u0052\u0045\u0020\u0073\u0065\u0072\u0061\u0020\u0072\u0065\u006d\u0070\u006c\u0061\u0063\u00e9\u0065\u0020\u0064\u0061\u006e\u0073\u0020\u006c\u0061\u0020\u0072\u00e9\u0066\u00e9\u0072\u0065\u006e\u0063\u0065\u0020\u0062\u0069\u0062\u006c\u0069\u006f\u0067\u0072\u0061\u0070\u0068\u0069\u0071\u0075\u0065\u0020\u0070\u0061\u0072\u0020\u006c\u0061\u0020\u0076\u0065\u0072\u0073\u0069\u006f\u006e\u0020\u0073\u00e9\u006c\u0065\u0063\u0074\u0069\u006f\u006e\u006e\u00e9\u0065\u0020\u0075\u006e\u0065\u0020\u0066\u006f\u0069\u0073\u0020\u006c\u0027\u0065\u006e\u0073\u0065\u006d\u0062\u006c\u0065\u0020\u0064\u0065\u0020\u0064\u006f\u006e\u006e\u00e9\u0065\u0073\u0020\u0070\u0075\u0062\u006c\u0069\u00e9\u002e +dataset.cite.title.deassessioned=\u004c\u0061\u0020\u006d\u0065\u006e\u0074\u0069\u006f\u006e\u0020\u0056\u0045\u0052\u0053\u0049\u004f\u004e\u0020\u0052\u0045\u0054\u0049\u0052\u00c9\u0045\u0020\u0061\u0020\u00e9\u0074\u00e9\u0020\u0061\u006a\u006f\u0075\u0074\u00e9\u0065\u0020\u00e0\u0020\u006c\u0061\u0020\u0072\u00e9\u0066\u00e9\u0072\u0065\u006e\u0063\u0065\u0020\u0062\u0069\u0062\u006c\u0069\u006f\u0067\u0072\u0061\u0070\u0068\u0069\u0071\u0075\u0065\u0020\u0070\u006f\u0075\u0072\u0020\u0063\u0065\u0074\u0074\u0065\u0020\u0076\u0065\u0072\u0073\u0069\u006f\u006e\u0020\u00e9\u0074\u0061\u006e\u0074\u0020\u0064\u006f\u006e\u006e\u00e9\u0020\u0071\u0075\u0027\u0065\u006c\u006c\u0065\u0020\u006e\u0027\u0065\u0073\u0074\u0020\u0070\u006c\u0075\u0073\u0020\u0064\u0069\u0073\u0070\u006f\u006e\u0069\u0062\u006c\u0065\u002e +dataset.cite.standards.tip=\u0050\u006f\u0075\u0072\u0020\u0065\u006e\u0020\u0061\u0070\u0070\u0072\u0065\u006e\u0064\u0072\u0065\u0020\u0073\u0075\u0072\u0020\u006c\u0065\u0020\u0073\u0075\u006a\u0065\u0074\u002c\u0020\u0063\u006f\u006e\u0073\u0075\u006c\u0074\u0065\u007a\u0020\u006c\u0065\u0020\u0064\u006f\u0063\u0075\u006d\u0065\u006e\u0074\u0020\u003c\u0061\u0020\u0068\u0072\u0065\u0066\u003d\u0022\u0068\u0074\u0074\u0070\u005c\u003a\u002f\u002f\u0062\u0065\u0073\u0074\u002d\u0070\u0072\u0061\u0063\u0074\u0069\u0063\u0065\u0073\u002e\u0064\u0061\u0074\u0061\u0076\u0065\u0072\u0073\u0065\u002e\u006f\u0072\u0067\u002f\u0064\u0061\u0074\u0061\u002d\u0063\u0069\u0074\u0061\u0074\u0069\u006f\u006e\u002f\u0022\u0020\u0074\u0069\u0074\u006c\u0065\u003d\u0022\u0047\u0065\u0074\u0020\u0052\u0065\u0063\u006f\u0067\u006e\u0069\u0074\u0069\u006f\u006e\u005c\u003a\u0020\u0044\u0061\u0074\u0061\u0020\u0043\u0069\u0074\u0061\u0074\u0069\u006f\u006e\u0020\u0020\u002d\u0020\u0044\u0061\u0074\u0061\u0076\u0065\u0072\u0073\u0065\u0020\u0042\u0065\u0073\u0074\u0020\u0050\u0072\u0061\u0063\u0074\u0069\u0063\u0065\u0073\u0022\u0020\u0074\u0061\u0072\u0067\u0065\u0074\u003d\u0022\u005f\u0062\u006c\u0061\u006e\u006b\u0022\u003e\u0044\u0061\u0074\u0061\u0020\u0043\u0069\u0074\u0061\u0074\u0069\u006f\u006e\u0020\u0053\u0074\u0061\u006e\u0064\u0061\u0072\u0064\u0073\u003c\u002f\u0061\u003e\u002e +dataset.cite.downloadBtn=\u0043\u0069\u0074\u0065\u0072\u0020\u006c\u0027\u0065\u006e\u0073\u0065\u006d\u0062\u006c\u0065\u0020\u0064\u0065\u0020\u0064\u006f\u006e\u006e\u00e9\u0065\u0073 +dataset.cite.downloadBtn.xml=\u0045\u006e\u0064\u004e\u006f\u0074\u0065\u0020\u0058\u004d\u004c +dataset.cite.downloadBtn.ris=\u0052\u0049\u0053 +dataset.cite.downloadBtn.bib=\u0042\u0069\u0062\u0054\u0065\u0058 +dataset.create.authenticatedUsersOnly=\u0053\u0065\u0075\u006c\u0073\u0020\u006c\u0065\u0073\u0020\u0075\u0074\u0069\u006c\u0069\u0073\u0061\u0074\u0065\u0075\u0072\u0073\u0020\u0061\u0075\u0074\u0068\u0065\u006e\u0074\u0069\u0066\u0069\u00e9\u0073\u0020\u0070\u0065\u0075\u0076\u0065\u006e\u0074\u0020\u0063\u0072\u00e9\u0065\u0072\u0020\u0064\u0065\u0073\u0020\u0065\u006e\u0073\u0065\u006d\u0062\u006c\u0065\u0073\u0020\u0064\u0065\u0020\u0064\u006f\u006e\u006e\u00e9\u0065\u0073\u002e +dataset.deaccession.reason=\u0052\u0061\u0069\u0073\u006f\u006e\u0020\u0064\u0075\u0020\u0072\u0065\u0074\u0072\u0061\u0069\u0074 +dataset.beAccessedAt=\u004c\u0027\u0065\u006e\u0073\u0065\u006d\u0062\u006c\u0065\u0020\u0064\u0065\u0020\u0064\u006f\u006e\u006e\u00e9\u0065\u0073\u0020\u0070\u0065\u0075\u0074\u0020\u006d\u0061\u0069\u006e\u0074\u0065\u006e\u0061\u006e\u0074\u0020\u00ea\u0074\u0072\u0065\u0020\u0063\u006f\u006e\u0073\u0075\u006c\u0074\u00e9\u0020\u00e0\u005c\u0075\u0030\u0030\u0041\u0030\u003a +dataset.descriptionDisplay.title=\u0044\u0065\u0073\u0063\u0072\u0069\u0070\u0074\u0069\u006f\u006e +dataset.keywordDisplay.title=\u004d\u006f\u0074\u002d\u0063\u006c\u00e9 +dataset.subjectDisplay.title=\u0053\u0075\u006a\u0065\u0074 +dataset.contact.tip=\u0055\u0074\u0069\u006c\u0069\u0073\u0065\u0072\u0020\u006c\u0065\u0020\u0062\u006f\u0075\u0074\u006f\u006e\u0020\u0064\u0065\u0020\u0063\u006f\u0075\u0072\u0072\u0069\u0065\u006c\u0020\u0063\u0069\u002d\u0064\u0065\u0073\u0073\u0075\u0073\u0020\u0070\u006f\u0075\u0072\u0020\u0063\u006f\u006d\u006d\u0075\u006e\u0069\u0071\u0075\u0065\u0072\u0020\u0061\u0076\u0065\u0063\u0020\u0063\u0065\u0074\u0074\u0065\u0020\u0070\u0065\u0072\u0073\u006f\u006e\u006e\u0065\u002e\u0020 +dataset.asterisk.tip=\u004c\u0065\u0073\u0020\u0061\u0073\u0074\u00e9\u0072\u0069\u0073\u0071\u0075\u0065\u0073\u0020\u0069\u006e\u0064\u0069\u0071\u0075\u0065\u006e\u0074\u0020\u006c\u0065\u0073\u0020\u0063\u0068\u0061\u006d\u0070\u0073\u0020\u006f\u0062\u006c\u0069\u0067\u0061\u0074\u006f\u0069\u0072\u0065\u0073\u002e +dataset.message.uploadFiles=\u0054\u00e9\u006c\u00e9\u0076\u0065\u0072\u0073\u0065\u0072\u0020\u006c\u0065\u0073\u0020\u0066\u0069\u0063\u0068\u0069\u0065\u0072\u0073\u0020\u0064\u0065\u0020\u006c\u0027\u0065\u006e\u0073\u0065\u006d\u0062\u006c\u0065\u0020\u0064\u0065\u0020\u0064\u006f\u006e\u006e\u00e9\u0065\u0073\u0020\u002d\u0020\u0056\u006f\u0075\u0073\u0020\u0070\u006f\u0075\u0076\u0065\u007a\u0020\u0067\u006c\u0069\u0073\u0073\u0065\u0072\u002d\u0064\u00e9\u0070\u006c\u0061\u0063\u0065\u0072\u0020\u006c\u0065\u0073\u0020\u0066\u0069\u0063\u0068\u0069\u0065\u0072\u0073\u0020\u00e0\u0020\u0070\u0061\u0072\u0074\u0069\u0072\u0020\u0064\u0065\u0020\u0076\u006f\u0074\u0072\u0065\u0020\u006f\u0072\u0064\u0069\u006e\u0061\u0074\u0065\u0075\u0072\u0020\u0076\u0065\u0072\u0073\u0020\u006c\u0065\u0020\u0077\u0069\u0064\u0067\u0065\u0074\u0020\u0064\u0065\u0020\u0074\u00e9\u006c\u00e9\u0076\u0065\u0072\u0073\u0065\u006d\u0065\u006e\u0074\u002e\u0020 +dataset.message.editMetadata=\u004d\u006f\u0064\u0069\u0066\u0069\u0065\u0072\u0020\u006c\u0065\u0073\u0020\u006d\u00e9\u0074\u0061\u0064\u006f\u006e\u006e\u00e9\u0065\u0073\u0020\u0064\u0065\u0020\u006c\u0027\u0065\u006e\u0073\u0065\u006d\u0062\u006c\u0065\u0020\u0064\u0065\u0020\u0064\u006f\u006e\u006e\u00e9\u0065\u0073\u005c\u0075\u0030\u0030\u0041\u0030\u003a\u0020\u0061\u006a\u006f\u0075\u0074\u0065\u0072\u0020\u0070\u006c\u0075\u0073\u0020\u0064\u0065\u0020\u006d\u00e9\u0074\u0061\u0064\u006f\u006e\u006e\u00e9\u0065\u0073\u0020\u0061\u0066\u0069\u006e\u0020\u0064\u0065\u0020\u0066\u0061\u0063\u0069\u006c\u0069\u0074\u0065\u0072\u0020\u006c\u0065\u0020\u0072\u0065\u0070\u00e9\u0072\u0061\u0067\u0065\u0020\u0064\u0065\u0020\u0063\u0065\u0074\u0020\u0065\u006e\u0073\u0065\u006d\u0062\u006c\u0065\u002e +dataset.message.editTerms=\u004d\u006f\u0064\u0069\u0066\u0069\u0065\u0072\u0020\u006c\u0065\u0073\u0020\u0063\u006f\u006e\u0064\u0069\u0074\u0069\u006f\u006e\u0073\u0020\u0064\u0065\u0020\u006c\u0027\u0065\u006e\u0073\u0065\u006d\u0062\u006c\u0065\u0020\u0064\u0065\u0020\u0064\u006f\u006e\u006e\u00e9\u0065\u0073\u005c\u0075\u0030\u0030\u0041\u0030\u003a\u0020\u006d\u0065\u0074\u0074\u0072\u0065\u0020\u00e0\u0020\u006a\u006f\u0075\u0072\u0020\u006c\u0065\u0073\u0020\u0063\u006f\u006e\u0064\u0069\u0074\u0069\u006f\u006e\u0073\u0020\u0064\u0027\u0075\u0074\u0069\u006c\u0069\u0073\u0061\u0074\u0069\u006f\u006e\u0020\u0064\u0065\u0020\u0063\u0065\u0074\u0020\u0065\u006e\u0073\u0065\u006d\u0062\u006c\u0065\u0020\u0064\u0065\u0020\u0064\u006f\u006e\u006e\u00e9\u0065\u0073\u002e +dataset.message.locked.editNotAllowedInReview=Dataset cannot be edited due to In Review dataset lock. +dataset.message.locked.downloadNotAllowedInReview=Dataset file(s) may not be downloaded due to In Review dataset lock. +dataset.message.locked.downloadNotAllowed=Dataset file(s) may not be downloaded due to dataset lock. +dataset.message.locked.editNotAllowed=Dataset cannot be edited due to dataset lock. +dataset.message.createSuccess=\u0043\u0065\u0074\u0020\u0065\u006e\u0073\u0065\u006d\u0062\u006c\u0065\u0020\u0064\u0065\u0020\u0064\u006f\u006e\u006e\u00e9\u0065\u0073\u0020\u0061\u0020\u00e9\u0074\u00e9\u0020\u0063\u0072\u00e9\u00e9\u002e +dataset.message.linkSuccess=\u0043\u0065\u0074\u0020\u0065\u006e\u0073\u0065\u006d\u0062\u006c\u0065\u0020\u0064\u0065\u0020\u0064\u006f\u006e\u006e\u00e9\u0065\u0073\u0020\u0065\u0073\u0074\u0020\u006d\u0061\u0069\u006e\u0074\u0065\u006e\u0061\u006e\u0074\u0020\u006c\u0069\u00e9\u0020\u00e0\u0020\u007b\u0031\u007d\u002e +dataset.message.metadataSuccess=\u004c\u0065\u0073\u0020\u006d\u00e9\u0074\u0061\u0064\u006f\u006e\u006e\u00e9\u0065\u0073\u0020\u0070\u006f\u0075\u0072\u0020\u0063\u0065\u0074\u0020\u0065\u006e\u0073\u0065\u006d\u0062\u006c\u0065\u0020\u0064\u0065\u0020\u0064\u006f\u006e\u006e\u00e9\u0065\u0073\u0020\u006f\u006e\u0074\u0020\u00e9\u0074\u00e9\u0020\u006d\u0069\u0073\u0065\u0073\u0020\u00e0\u0020\u006a\u006f\u0075\u0072\u002e +dataset.message.termsSuccess=\u004c\u0065\u0073\u0020\u0063\u006f\u006e\u0064\u0069\u0074\u0069\u006f\u006e\u0073\u0020\u0070\u006f\u0075\u0072\u0020\u0063\u0065\u0074\u0020\u0065\u006e\u0073\u0065\u006d\u0062\u006c\u0065\u0020\u0064\u0065\u0020\u0064\u006f\u006e\u006e\u00e9\u0065\u0073\u0020\u006f\u006e\u0074\u0020\u00e9\u0074\u00e9\u0020\u006d\u0069\u0073\u0065\u0073\u0020\u00e0\u0020\u006a\u006f\u0075\u0072\u002e +dataset.message.filesSuccess=\u004c\u0065\u0073\u0020\u0066\u0069\u0063\u0068\u0069\u0065\u0072\u0073\u0020\u0070\u006f\u0075\u0072\u0020\u0063\u0065\u0074\u0020\u0065\u006e\u0073\u0065\u006d\u0062\u006c\u0065\u0020\u0064\u0065\u0020\u0064\u006f\u006e\u006e\u00e9\u0065\u0073\u0020\u006f\u006e\u0074\u0020\u00e9\u0074\u00e9\u0020\u006d\u0069\u0073\u0020\u00e0\u0020\u006a\u006f\u0075\u0072\u002e +dataset.message.publishSuccess=\u0043\u0065\u0074\u0020\u0065\u006e\u0073\u0065\u006d\u0062\u006c\u0065\u0020\u0064\u0065\u0020\u0064\u006f\u006e\u006e\u00e9\u0065\u0073\u0020\u0061\u0020\u00e9\u0074\u00e9\u0020\u0070\u0075\u0062\u006c\u0069\u00e9\u002e\u0020 +dataset.message.only.authenticatedUsers=\u0053\u0065\u0075\u006c\u0073\u0020\u006c\u0065\u0073\u0020\u0075\u0074\u0069\u006c\u0069\u0073\u0061\u0074\u0065\u0075\u0072\u0073\u0020\u0061\u0075\u0074\u0068\u0065\u006e\u0074\u0069\u0066\u0069\u00e9\u0073\u0020\u0070\u0065\u0075\u0076\u0065\u006e\u0074\u0020\u0070\u0075\u0062\u006c\u0069\u0065\u0072\u0020\u0064\u0065\u0073\u0020\u0065\u006e\u0073\u0065\u006d\u0062\u006c\u0065\u0073\u0020\u0064\u0065\u0020\u0064\u006f\u006e\u006e\u00e9\u0065\u0073\u002e\u0020 +dataset.message.deleteSuccess=\u0043\u0065\u0074\u0020\u0065\u006e\u0073\u0065\u006d\u0062\u006c\u0065\u0020\u0064\u0065\u0020\u0064\u006f\u006e\u006e\u00e9\u0065\u0073\u0020\u0061\u0020\u00e9\u0074\u00e9\u0020\u0073\u0075\u0070\u0070\u0072\u0069\u006d\u00e9\u002e\u0020 +dataset.message.bulkFileUpdateSuccess=\u004c\u0065\u0073\u0020\u0066\u0069\u0063\u0068\u0069\u0065\u0072\u0073\u0020\u0073\u00e9\u006c\u0065\u0063\u0074\u0069\u006f\u006e\u006e\u00e9\u0073\u0020\u006f\u006e\u0074\u0020\u00e9\u0074\u00e9\u0020\u006d\u0069\u0073\u0020\u00e0\u0020\u006a\u006f\u0075\u0072\u002e +dataset.message.bulkFileDeleteSuccess=The selected files have been deleted. +datasetVersion.message.deleteSuccess=\u004c\u0061\u0020\u0076\u0065\u0072\u0073\u0069\u006f\u006e\u0020\u0070\u0072\u006f\u0076\u0069\u0073\u006f\u0069\u0072\u0065\u0020\u0064\u0065\u0020\u0063\u0065\u0074\u0020\u0065\u006e\u0073\u0065\u006d\u0062\u006c\u0065\u0020\u0064\u0065\u0020\u0064\u006f\u006e\u006e\u00e9\u0065\u0073\u0020\u0061\u0020\u00e9\u0074\u00e9\u0020\u0073\u0075\u0070\u0070\u0072\u0069\u006d\u00e9\u0065\u002e +datasetVersion.message.deaccessionSuccess=\u004c\u0061\u0020\u006f\u0075\u0020\u006c\u0065\u0073\u0020\u0076\u0065\u0072\u0073\u0069\u006f\u006e\u0073\u0020\u0073\u00e9\u006c\u0065\u0063\u0074\u0069\u006f\u006e\u006e\u00e9\u0065\u0073\u0020\u006f\u006e\u0074\u0020\u00e9\u0074\u00e9\u0020\u0072\u0065\u0074\u0069\u0072\u00e9\u0065\u0073\u002e +dataset.message.deaccessionSuccess=\u0043\u0065\u0074\u0020\u0065\u006e\u0073\u0065\u006d\u0062\u006c\u0065\u0020\u0064\u0065\u0020\u0064\u006f\u006e\u006e\u00e9\u0065\u0073\u0020\u0061\u0020\u00e9\u0074\u00e9\u0020\u0072\u0065\u0074\u0069\u0072\u00e9\u002e +dataset.message.files.ingestSuccess=\u004c\u0065\u0028\u0073\u0029\u0020\u0066\u0069\u0063\u0068\u0069\u0065\u0072\u0028\u0073\u0029\u0020\u0061\u0028\u006f\u006e\u0074\u0029\u0020\u0062\u0069\u0065\u006e\u0020\u00e9\u0074\u00e9\u0020\u0063\u0068\u0061\u0072\u0067\u00e9\u0028\u0073\u0029\u002e\u0020\u0056\u006f\u0075\u0073\u0020\u0070\u006f\u0075\u0076\u0065\u007a\u0020\u006d\u0061\u0069\u006e\u0074\u0065\u006e\u0061\u006e\u0074\u0020\u006c\u0065\u0073\u0020\u0063\u006f\u006e\u0073\u0075\u006c\u0074\u0065\u0072\u0020\u00e0\u0020\u006c\u0027\u0061\u0069\u0064\u0065\u0020\u0064\u0065\u0020\u0054\u0077\u006f\u0052\u0061\u0076\u0065\u006e\u0073\u0020\u006f\u0075\u0020\u006c\u0065\u0073\u0020\u0074\u00e9\u006c\u00e9\u0063\u0068\u0061\u0072\u0067\u0065\u0072\u0020\u0065\u006e\u0020\u0064\u0027\u0061\u0075\u0074\u0072\u0065\u0073\u0020\u0066\u006f\u0072\u006d\u0061\u0074\u0073\u002e +dataset.message.validationError=\u0045\u0072\u0072\u0065\u0075\u0072\u0020\u0064\u0065\u0020\u0076\u0061\u006c\u0069\u0064\u0061\u0074\u0069\u006f\u006e\u0020\u002d\u0020\u004c\u0065\u0073\u0020\u0063\u0068\u0061\u006d\u0070\u0073\u0020\u006f\u0062\u006c\u0069\u0067\u0061\u0074\u006f\u0069\u0072\u0065\u0073\u0020\u006f\u006e\u0074\u0020\u00e9\u0074\u00e9\u0020\u006f\u006d\u0069\u0073\u0020\u006f\u0075\u0020\u0069\u006c\u0020\u0079\u0020\u0061\u0020\u0065\u0075\u0020\u0075\u006e\u0065\u0020\u0065\u0072\u0072\u0065\u0075\u0072\u0020\u0064\u0065\u0020\u0076\u0061\u006c\u0069\u0064\u0061\u0074\u0069\u006f\u006e\u002e\u0020\u0056\u0065\u0075\u0069\u006c\u006c\u0065\u007a\u0020\u0064\u00e9\u0066\u0069\u006c\u0065\u0072\u0020\u006c\u0065\u0020\u006d\u0065\u006e\u0075\u0020\u0076\u0065\u0072\u0073\u0020\u006c\u0065\u0020\u0062\u0061\u0073\u0020\u0070\u006f\u0075\u0072\u0020\u0076\u006f\u0069\u0072\u0020\u006c\u0065\u0073\u0020\u0064\u00e9\u0074\u0061\u0069\u006c\u0073\u002e\u0020 +dataset.message.publishFailure=\u004c\u0027\u0065\u006e\u0073\u0065\u006d\u0062\u006c\u0065\u0020\u0064\u0065\u0020\u0064\u006f\u006e\u006e\u00e9\u0065\u0073\u0020\u006e\u0027\u0061\u0020\u0070\u0061\u0073\u0020\u0070\u0075\u0020\u00ea\u0074\u0072\u0065\u0020\u0070\u0075\u0062\u006c\u0069\u00e9\u002e +dataset.message.metadataFailure=\u004c\u0065\u0073\u0020\u006d\u00e9\u0074\u0061\u0064\u006f\u006e\u006e\u00e9\u0065\u0073\u0020\u006e\u0027\u006f\u006e\u0074\u0020\u0070\u0061\u0073\u0020\u0070\u0075\u0020\u00ea\u0074\u0072\u0065\u0020\u006d\u0069\u0073\u0065\u0073\u0020\u00e0\u0020\u006a\u006f\u0075\u0072\u002e +dataset.message.filesFailure=\u004c\u0065\u0073\u0020\u0066\u0069\u0063\u0068\u0069\u0065\u0072\u0073\u0020\u006e\u0027\u006f\u006e\u0074\u0020\u0070\u0061\u0073\u0020\u0070\u0075\u0020\u00ea\u0074\u0072\u0065\u0020\u0074\u00e9\u006c\u00e9\u0063\u0068\u0061\u0072\u0067\u00e9\u0073\u002e +dataset.message.bulkFileDeleteFailure=The selected files could not be deleted. +dataset.message.files.ingestFailure=\u004c\u0065\u0020\u006f\u0075\u0020\u006c\u0065\u0073\u0020\u0066\u0069\u0063\u0068\u0069\u0065\u0072\u0073\u0020\u006e\u0027\u006f\u006e\u0074\u0020\u0070\u0061\u0073\u0020\u0070\u0075\u0020\u00ea\u0074\u0072\u0065\u0020\u0063\u0068\u0061\u0072\u0067\u00e9\u0073\u002e +dataset.message.deleteFailure=\u004c\u0061\u0020\u0076\u0065\u0072\u0073\u0069\u006f\u006e\u0020\u0070\u0072\u006f\u0076\u0069\u0073\u006f\u0069\u0072\u0065\u0020\u0064\u0065\u0020\u0063\u0065\u0074\u0020\u0065\u006e\u0073\u0065\u006d\u0062\u006c\u0065\u0020\u0064\u0065\u0020\u0064\u006f\u006e\u006e\u00e9\u0065\u0073\u0020\u006e\u0027\u0061\u0020\u0070\u0061\u0073\u0020\u0070\u0075\u0020\u00ea\u0074\u0072\u0065\u0020\u0073\u0075\u0070\u0070\u0072\u0069\u006d\u00e9\u0065\u002e +dataset.message.deaccessionFailure=\u0043\u0065\u0074\u0020\u0065\u006e\u0073\u0065\u006d\u0062\u006c\u0065\u0020\u0064\u0065\u0020\u0064\u006f\u006e\u006e\u00e9\u0065\u0073\u0020\u006e\u0027\u0061\u0020\u0070\u0061\u0073\u0020\u0070\u0075\u0020\u00ea\u0074\u0072\u0065\u0020\u0072\u0065\u0074\u0069\u0072\u00e9\u002e\u0020 +dataset.message.createFailure=\u004c\u0027\u0065\u006e\u0073\u0065\u006d\u0062\u006c\u0065\u0020\u0064\u0065\u0020\u0064\u006f\u006e\u006e\u00e9\u0065\u0073\u0020\u006e\u0027\u0061\u0020\u0070\u0061\u0073\u0020\u0070\u0075\u0020\u00ea\u0074\u0072\u0065\u0020\u0063\u0072\u00e9\u00e9\u002e +dataset.message.termsFailure=\u004c\u0065\u0073\u0020\u0063\u006f\u006e\u0064\u0069\u0074\u0069\u006f\u006e\u0073\u0020\u0064\u0065\u0020\u0063\u0065\u0074\u0020\u0065\u006e\u0073\u0065\u006d\u0062\u006c\u0065\u0020\u0064\u0065\u0020\u0064\u006f\u006e\u006e\u00e9\u0065\u0073\u0020\u006e\u0027\u006f\u006e\u0074\u0020\u0070\u0061\u0073\u0020\u0070\u0075\u0020\u00ea\u0074\u0072\u0065\u0020\u006d\u0069\u0073\u0065\u0073\u0020\u00e0\u0020\u006a\u006f\u0075\u0072\u002e +dataset.message.publicInstall=File Access - Files are stored on a publicly accessible storage server. +dataset.metadata.publicationDate=\u0044\u0061\u0074\u0065\u0020\u0064\u0065\u0020\u0070\u0075\u0062\u006c\u0069\u0063\u0061\u0074\u0069\u006f\u006e +dataset.metadata.publicationDate.tip=\u004c\u0061\u0020\u0064\u0061\u0074\u0065\u0020\u0064\u0065\u0020\u0070\u0075\u0062\u006c\u0069\u0063\u0061\u0074\u0069\u006f\u006e\u0020\u0064\u0027\u0075\u006e\u0020\u0065\u006e\u0073\u0065\u006d\u0062\u006c\u0065\u0020\u0064\u0065\u0020\u0064\u006f\u006e\u006e\u00e9\u0065\u0073\u002e +dataset.metadata.persistentId=\u0049\u0064\u0065\u006e\u0074\u0069\u0066\u0069\u0061\u006e\u0074\u0020\u0070\u00e9\u0072\u0065\u006e\u006e\u0065\u0020\u0064\u0065\u0020\u006c\u0027\u0065\u006e\u0073\u0065\u006d\u0062\u006c\u0065\u0020\u0064\u0065\u0020\u0064\u006f\u006e\u006e\u00e9\u0065\u0073 +dataset.metadata.persistentId.tip=\u004c\u0027\u0069\u0064\u0065\u006e\u0074\u0069\u0066\u0069\u0061\u006e\u0074\u0020\u0070\u00e9\u0072\u0065\u006e\u006e\u0065\u0020\u0075\u006e\u0069\u0071\u0075\u0065\u0020\u0064\u0027\u0075\u006e\u0020\u0065\u006e\u0073\u0065\u006d\u0062\u006c\u0065\u0020\u0064\u0065\u0020\u0064\u006f\u006e\u006e\u00e9\u0065\u0073\u002c\u0020\u0071\u0075\u0069\u0020\u0070\u0065\u0075\u0074\u0020\u00ea\u0074\u0072\u0065\u0020\u0075\u006e\u0020\u0048\u0061\u006e\u0064\u006c\u0065\u0020\u006f\u0075\u0020\u0075\u006e\u0020\u0044\u004f\u0049\u0020\u0064\u0061\u006e\u0073\u0020\u0044\u0061\u0074\u0061\u0076\u0065\u0072\u0073\u0065\u002e +dataset.versionDifferences.termsOfUseAccess=\u0043\u006f\u006e\u0064\u0069\u0074\u0069\u006f\u006e\u0073\u0020\u0064\u0027\u0075\u0074\u0069\u006c\u0069\u0073\u0061\u0074\u0069\u006f\u006e\u0020\u0065\u0074\u0020\u0064\u0027\u0061\u0063\u0063\u00e8\u0073 +dataset.versionDifferences.termsOfUseAccessChanged=\u0043\u006f\u006e\u0064\u0069\u0074\u0069\u006f\u006e\u0073\u0020\u0064\u0027\u0075\u0074\u0069\u006c\u0069\u0073\u0061\u0074\u0069\u006f\u006e\u0020\u0065\u0074\u0020\u0064\u0027\u0061\u0063\u0063\u00e8\u0073\u0020\u006d\u006f\u0064\u0069\u0066\u0069\u00e9\u0065\u0073 +file.viewDiffDialog.restricted=\u0041\u0063\u0063\u00e8\u0073\u0020\u0072\u00e9\u0073\u0065\u0072\u0076\u00e9 +dataset.template.tip=\u004d\u006f\u0064\u0069\u0066\u0069\u0065\u0072\u0020\u006c\u0065\u0020\u006d\u006f\u0064\u00e8\u006c\u0065\u0020\u0065\u0066\u0066\u0061\u0063\u0065\u0072\u0061\u0020\u0074\u006f\u0075\u0073\u0020\u006c\u0065\u0073\u0020\u0063\u0068\u0061\u006d\u0070\u0073\u0020\u0064\u0061\u006e\u0073\u0020\u006c\u0065\u0073\u0071\u0075\u0065\u006c\u0073\u0020\u0076\u006f\u0075\u0073\u0020\u0061\u0075\u0072\u0069\u0065\u007a\u0020\u0065\u006e\u0074\u0072\u00e9\u0020\u0064\u0065\u0073\u0020\u0064\u006f\u006e\u006e\u00e9\u0065\u0073\u002e +dataset.noTemplate.label=\u0041\u0075\u0063\u0075\u006e +dataset.noSelectedFiles.header=\u0053\u00e9\u006c\u0065\u0063\u0074\u0069\u006f\u006e\u006e\u0065\u0072\u0020\u0075\u006e\u0020\u006f\u0075\u0020\u0064\u0065\u0073\u0020\u0066\u0069\u0063\u0068\u0069\u0065\u0072\u0073 +dataset.noSelectedFilesForDownload=\u0056\u0065\u0075\u0069\u006c\u006c\u0065\u007a\u0020\u0073\u00e9\u006c\u0065\u0063\u0074\u0069\u006f\u006e\u006e\u0065\u0072\u0020\u006c\u0065\u0020\u006f\u0075\u0020\u006c\u0065\u0073\u0020\u0066\u0069\u0063\u0068\u0069\u0065\u0072\u0073\u0020\u00e0\u0020\u0074\u00e9\u006c\u00e9\u0063\u0068\u0061\u0072\u0067\u0065\u0072\u002e +dataset.noSelectedFilesForRequestAccess=\u0056\u0065\u0075\u0069\u006c\u006c\u0065\u007a\u0020\u0073\u00e9\u006c\u0065\u0063\u0074\u0069\u006f\u006e\u006e\u0065\u0072\u0020\u006c\u0065\u0020\u006f\u0075\u0020\u006c\u0065\u0073\u0020\u0066\u0069\u0063\u0068\u0069\u0065\u0072\u0073\u0020\u0070\u006f\u0075\u0072\u0020\u006c\u0061\u0020\u0064\u0065\u006d\u0061\u006e\u0064\u0065\u0020\u0064\u0027\u0061\u0063\u0063\u00e8\u0073\u002e +dataset.noSelectedFilesForDelete=\u0056\u0065\u0075\u0069\u006c\u006c\u0065\u007a\u0020\u0073\u00e9\u006c\u0065\u0063\u0074\u0069\u006f\u006e\u006e\u0065\u0072\u0020\u006c\u0065\u0020\u006f\u0075\u0020\u006c\u0065\u0073\u0020\u0066\u0069\u0063\u0068\u0069\u0065\u0072\u0073\u0020\u00e0\u0020\u0073\u0075\u0070\u0070\u0072\u0069\u006d\u0065\u0072\u002e +dataset.noSelectedFilesForMetadataEdit=\u0056\u0065\u0075\u0069\u006c\u006c\u0065\u007a\u0020\u0073\u00e9\u006c\u0065\u0063\u0074\u0069\u006f\u006e\u006e\u0065\u0072\u0020\u006c\u0065\u0020\u006f\u0075\u0020\u006c\u0065\u0073\u0020\u0066\u0069\u0063\u0068\u0069\u0065\u0072\u0073\u0020\u00e0\u0020\u006d\u006f\u0064\u0069\u0066\u0069\u0065\u0072\u002e +dataset.noSelectedFilesForRestrict=\u0056\u0065\u0075\u0069\u006c\u006c\u0065\u007a\u0020\u0073\u00e9\u006c\u0065\u0063\u0074\u0069\u006f\u006e\u006e\u0065\u0072\u0020\u006c\u0065\u0020\u006f\u0075\u0020\u006c\u0065\u0073\u0020\u0066\u0069\u0063\u0068\u0069\u0065\u0072\u0073\u0020\u006e\u006f\u006e\u0020\u0072\u00e9\u0073\u0065\u0072\u0076\u00e9\u0073\u0020\u00e0\u0020\u006d\u0065\u0074\u0074\u0072\u0065\u0020\u0065\u006e\u0020\u0061\u0063\u0063\u00e8\u0073\u0020\u0072\u00e9\u0073\u0065\u0072\u0076\u00e9\u002e +dataset.noSelectedFilesForUnRestrict=\u0056\u0065\u0075\u0069\u006c\u006c\u0065\u007a\u0020\u0073\u00e9\u006c\u0065\u0063\u0074\u0069\u006f\u006e\u006e\u0065\u0072\u0020\u006c\u0065\u0020\u006f\u0075\u0020\u006c\u0065\u0073\u0020\u0066\u0069\u0063\u0068\u0069\u0065\u0072\u0073\u0020\u0072\u00e9\u0073\u0065\u0072\u0076\u00e9\u0073\u0020\u00e0\u0020\u006d\u0065\u0074\u0074\u0072\u0065\u0020\u0065\u006e\u0020\u0061\u0063\u0063\u00e8\u0073\u0020\u006e\u006f\u006e\u0020\u0072\u00e9\u0073\u0065\u0072\u0076\u00e9\u002e +dataset.inValidSelectedFilesForDownload=\u0046\u0069\u0063\u0068\u0069\u0065\u0072\u0073\u0020\u0072\u00e9\u0073\u0065\u0072\u0076\u00e9\u0073\u0020\u0073\u00e9\u006c\u0065\u0063\u0074\u0069\u006f\u006e\u006e\u00e9\u0073 +dataset.noValidSelectedFilesForDownload=\u004c\u0065\u0020\u006f\u0075\u0020\u006c\u0065\u0073\u0020\u0066\u0069\u0063\u0068\u0069\u0065\u0072\u0073\u0020\u0072\u00e9\u0073\u0065\u0072\u0076\u00e9\u0073\u0020\u0073\u00e9\u006c\u0065\u0063\u0074\u0069\u006f\u006e\u006e\u00e9\u0073\u0020\u006e\u0065\u0020\u0070\u0065\u0075\u0076\u0065\u006e\u0074\u0020\u00ea\u0074\u0072\u0065\u0020\u0074\u00e9\u006c\u00e9\u0063\u0068\u0061\u0072\u0067\u00e9\u0073\u002c\u0020\u0063\u0061\u0072\u0020\u006c\u0065\u0073\u0020\u0061\u0063\u0063\u00e8\u0073\u0020\u006e\u0065\u0020\u0076\u006f\u0075\u0073\u0020\u006f\u006e\u0074\u0020\u0070\u0061\u0073\u0020\u00e9\u0074\u00e9\u0020\u0061\u0063\u0063\u006f\u0072\u0064\u00e9\u0073\u002e +dataset.mixedSelectedFilesForDownload=\u004c\u0065\u0020\u006f\u0075\u0020\u006c\u0065\u0073\u0020\u0066\u0069\u0063\u0068\u0069\u0065\u0072\u0073\u0020\u0072\u00e9\u0073\u0065\u0072\u0076\u00e9\u0073\u0020\u0073\u00e9\u006c\u0065\u0063\u0074\u0069\u006f\u006e\u006e\u00e9\u0073\u0020\u006e\u0065\u0020\u0070\u0065\u0075\u0076\u0065\u006e\u0074\u0020\u00ea\u0074\u0072\u0065\u0020\u0074\u00e9\u006c\u00e9\u0063\u0068\u0061\u0072\u0067\u00e9\u0073\u002c\u0020\u0063\u0061\u0072\u0020\u006c\u0065\u0073\u0020\u0061\u0063\u0063\u00e8\u0073\u0020\u006e\u0065\u0020\u0076\u006f\u0075\u0073\u0020\u006f\u006e\u0074\u0020\u0070\u0061\u0073\u0020\u00e9\u0074\u00e9\u0020\u0061\u0063\u0063\u006f\u0072\u0064\u00e9\u0073\u002e +dataset.downloadUnrestricted=\u0043\u006c\u0069\u0071\u0075\u0065\u007a\u0020\u0073\u0075\u0072\u0020\u0043\u006f\u006e\u0074\u0069\u006e\u0075\u0065\u0072\u0020\u0070\u006f\u0075\u0072\u0020\u0074\u00e9\u006c\u00e9\u0063\u0068\u0061\u0072\u0067\u0065\u0072\u0020\u006c\u0065\u0073\u0020\u0066\u0069\u0063\u0068\u0069\u0065\u0072\u0073\u0020\u0070\u006f\u0075\u0072\u0020\u006c\u0065\u0073\u0071\u0075\u0065\u006c\u0073\u0020\u0076\u006f\u0075\u0073\u0020\u0061\u0076\u0065\u007a\u0020\u0075\u006e\u0020\u0061\u0063\u0063\u00e8\u0073\u002e +dataset.requestAccessToRestrictedFiles=\u0056\u006f\u0075\u0073\u0020\u0070\u006f\u0075\u0076\u0065\u007a\u0020\u0064\u0065\u006d\u0061\u006e\u0064\u0065\u0072\u0020\u006c\u0027\u0061\u0063\u0063\u00e8\u0073\u0020\u00e0\u0020\u0075\u006e\u0020\u006f\u0075\u0020\u0064\u0065\u0073\u0020\u0066\u0069\u0063\u0068\u0069\u0065\u0072\u0073\u0020\u0072\u00e9\u0073\u0065\u0072\u0076\u00e9\u0073\u0020\u0065\u006e\u0020\u0063\u006c\u0069\u0071\u0075\u0061\u006e\u0074\u0020\u0073\u0075\u0072\u0020\u006c\u0065\u0020\u0062\u006f\u0075\u0074\u006f\u006e\u0020\u00ab\u0020\u0044\u0065\u006d\u0061\u006e\u0064\u0065\u0072\u0020\u006c\u0027\u0061\u0063\u0063\u00e8\u0073\u0020\u00bb\u002e +dataset.privateurl.infoMessageAuthor=\u0055\u0052\u004c\u0020\u0070\u0072\u0069\u0076\u00e9\u0020\u0064\u0065\u0020\u006c\u0027\u0065\u006e\u0073\u0065\u006d\u0062\u006c\u0065\u0020\u0064\u0065\u0020\u0064\u006f\u006e\u006e\u00e9\u0065\u0073\u0020\u006e\u006f\u006e\u0020\u0070\u0075\u0062\u006c\u0069\u00e9\u0020\u002d\u0020\u0050\u0061\u0072\u0074\u0061\u0067\u0065\u0072\u0020\u0065\u006e\u0020\u0070\u0072\u0069\u0076\u00e9\u0020\u0063\u0065\u0074\u0020\u0065\u006e\u0073\u0065\u006d\u0062\u006c\u0065\u0020\u0064\u0065\u0020\u0064\u006f\u006e\u006e\u00e9\u0065\u0073\u0020\u0061\u0076\u0061\u006e\u0074\u0020\u0073\u0061\u0020\u0070\u0075\u0062\u006c\u0069\u0063\u0061\u0074\u0069\u006f\u006e\u005c\u0075\u0030\u0030\u0041\u0030\u003a\u0020\u007b\u0030\u007d +dataset.privateurl.infoMessageReviewer=\u0055\u0052\u004c\u0020\u0070\u0072\u0069\u0076\u00e9\u0020\u0064\u0065\u0020\u006c\u0027\u0065\u006e\u0073\u0065\u006d\u0062\u006c\u0065\u0020\u0064\u0065\u0020\u0064\u006f\u006e\u006e\u00e9\u0065\u0073\u0020\u006e\u006f\u006e\u0020\u0070\u0075\u0062\u006c\u0069\u00e9\u0020\u002d\u0020\u0020\u0043\u0065\u0074\u0020\u0065\u006e\u0073\u0065\u006d\u0062\u006c\u0065\u0020\u0064\u0065\u0020\u0064\u006f\u006e\u006e\u00e9\u0065\u0073\u0020\u006e\u006f\u006e\u0020\u0070\u0075\u0062\u006c\u0069\u00e9\u0020\u0065\u0073\u0074\u0020\u0070\u0061\u0072\u0074\u0061\u0067\u00e9\u0020\u0065\u006e\u0020\u0070\u0072\u0069\u0076\u00e9\u002e\u0020\u0056\u006f\u0075\u0073\u0020\u006e\u0065\u0020\u0070\u006f\u0075\u0072\u0072\u0065\u007a\u0020\u0070\u0061\u0073\u0020\u0079\u0020\u0061\u0063\u0063\u00e9\u0064\u0065\u0072\u0020\u006c\u006f\u0072\u0073\u0071\u0075\u0065\u0020\u0063\u006f\u006e\u006e\u0065\u0063\u0074\u00e9\u0020\u00e0\u0020\u0076\u006f\u0074\u0072\u0065\u0020\u0063\u006f\u006d\u0070\u0074\u0065\u0020\u0044\u0061\u0074\u0061\u0076\u0065\u0072\u0073\u0065\u002e +dataset.privateurl.header=\u0055\u0052\u004c\u0020\u0070\u0072\u0069\u0076\u00e9\u0065\u0020\u0064\u0065\u0020\u006c\u0027\u0065\u006e\u0073\u0065\u006d\u0062\u006c\u0065\u0020\u0064\u0065\u0020\u0064\u006f\u006e\u006e\u00e9\u0065\u0073\u0020\u006e\u006f\u006e\u0020\u0070\u0075\u0062\u006c\u0069\u00e9 +dataset.privateurl.tip=\u0055\u0074\u0069\u006c\u0069\u0073\u0065\u007a\u0020\u0075\u006e\u0065\u0020\u0061\u0064\u0072\u0065\u0073\u0073\u0065\u0020\u0055\u0052\u004c\u0020\u0070\u0072\u0069\u0076\u00e9\u0065\u0020\u0070\u006f\u0075\u0072\u0020\u0070\u0065\u0072\u006d\u0065\u0074\u0074\u0072\u0065\u0020\u00e0\u0020\u0063\u0065\u0075\u0078\u0020\u0071\u0075\u0069\u0020\u006e\u0027\u006f\u006e\u0074\u0020\u0070\u0061\u0073\u0020\u0064\u0065\u0020\u0063\u006f\u006d\u0070\u0074\u0065\u0020\u0044\u0061\u0074\u0061\u0076\u0065\u0072\u0073\u0065\u0020\u0064\u0027\u0061\u0063\u0063\u00e9\u0064\u0065\u0072\u0020\u00e0\u0020\u0076\u006f\u0074\u0072\u0065\u0020\u0065\u006e\u0073\u0065\u006d\u0062\u006c\u0065\u0020\u0064\u0065\u0020\u0064\u006f\u006e\u006e\u00e9\u0065\u0073\u0020\u006e\u006f\u006e\u0020\u0070\u0075\u0062\u006c\u0069\u00e9\u002e\u0020\u0050\u006f\u0075\u0072\u0020\u0070\u006c\u0075\u0073\u0020\u0064\u0027\u0069\u006e\u0066\u006f\u0072\u006d\u0061\u0074\u0069\u006f\u006e\u0073\u0020\u0073\u0075\u0072\u0020\u006c\u0061\u0020\u0066\u006f\u006e\u0063\u0074\u0069\u006f\u006e\u006e\u0061\u006c\u0069\u0074\u00e9\u0020\u0064\u0027\u0055\u0052\u004c\u0020\u0070\u0072\u0069\u0076\u00e9\u002c\u0020\u0072\u0065\u0070\u006f\u0072\u0074\u0065\u007a\u002d\u0076\u006f\u0075\u0073\u0020\u0061\u0075\u0020\u003c\u0061\u0020\u0068\u0072\u0065\u0066\u003d\u0022\u007b\u0030\u007d\u002f\u007b\u0031\u007d\u002f\u0075\u0073\u0065\u0072\u002f\u0064\u0061\u0074\u0061\u0073\u0065\u0074\u002d\u006d\u0061\u006e\u0061\u0067\u0065\u006d\u0065\u006e\u0074\u002e\u0068\u0074\u006d\u006c\u0023\u0070\u0072\u0069\u0076\u0061\u0074\u0065\u002d\u0075\u0072\u006c\u002d\u0066\u006f\u0072\u002d\u0072\u0065\u0076\u0069\u0065\u0077\u0069\u006e\u0067\u002d\u0061\u006e\u002d\u0075\u006e\u0070\u0075\u0062\u006c\u0069\u0073\u0068\u0065\u0064\u002d\u0064\u0061\u0074\u0061\u0073\u0065\u0074\u0022\u0020\u0074\u0069\u0074\u006c\u0065\u003d\u0022\u0050\u0072\u0069\u0076\u0061\u0074\u0065\u0020\u0055\u0052\u004c\u0020\u0066\u006f\u0072\u0020\u0052\u0065\u0076\u0069\u0065\u0077\u0069\u006e\u0067\u0020\u0061\u006e\u0020\u0055\u006e\u0070\u0075\u0062\u006c\u0069\u0073\u0068\u0065\u0064\u0020\u0044\u0061\u0074\u0061\u0073\u0065\u0074\u0020\u002d\u0020\u0044\u0061\u0074\u0061\u0076\u0065\u0072\u0073\u0065\u0020\u0055\u0073\u0065\u0072\u0020\u0047\u0075\u0069\u0064\u0065\u0022\u0020\u0074\u0061\u0072\u0067\u0065\u0074\u003d\u0022\u005f\u0062\u006c\u0061\u006e\u006b\u0022\u003e\u0067\u0075\u0069\u0064\u0065\u0020\u0064\u0027\u0075\u0074\u0069\u006c\u0069\u0073\u0061\u0074\u0069\u006f\u006e\u003c\u002f\u0061\u003e\u002e +dataset.privateurl.absent=\u004c\u0027\u0061\u0064\u0072\u0065\u0073\u0073\u0065\u0020\u0055\u0052\u004c\u0020\u0070\u0072\u0069\u0076\u00e9\u0065\u0020\u006e\u0027\u0061\u0020\u0070\u0061\u0073\u0020\u00e9\u0074\u00e9\u0020\u0063\u0072\u00e9\u00e9\u0065\u002e +dataset.privateurl.createPrivateUrl=\u0043\u0072\u00e9\u0065\u0072\u0020\u0075\u006e\u0065\u0020\u0061\u0064\u0072\u0065\u0073\u0073\u0065\u0020\u0055\u0052\u004c\u0020\u0070\u0072\u0069\u0076\u00e9\u0065 +dataset.privateurl.disablePrivateUrl=\u0044\u00e9\u0073\u0061\u0063\u0074\u0069\u0076\u0065\u0072\u0020\u006c\u0027\u0055\u0052\u004c\u0020\u0070\u0072\u0069\u0076\u00e9 +dataset.privateurl.disablePrivateUrlConfirm=\u0043\u006f\u006e\u0066\u0069\u0072\u006d\u0065\u0072\u0020\u006c\u0061\u0020\u0064\u00e9\u0073\u0061\u0063\u0074\u0069\u0076\u0061\u0074\u0069\u006f\u006e\u0020\u0064\u0065\u0020\u006c\u0027\u0055\u0052\u004c\u0020\u0070\u0072\u0069\u0076\u00e9 +dataset.privateurl.disableConfirmationText=\u0056\u006f\u0075\u006c\u0065\u007a\u002d\u0076\u006f\u0075\u0073\u0020\u0076\u0072\u0061\u0069\u006d\u0065\u006e\u0074\u0020\u0064\u00e9\u0073\u0061\u0063\u0074\u0069\u0076\u0065\u0072\u0020\u006c\u0027\u0055\u0052\u004c\u0020\u0070\u0072\u0069\u0076\u00e9\u003f\u0020\u0053\u0069\u0020\u0076\u006f\u0075\u0073\u0020\u0061\u0076\u0065\u007a\u0020\u0070\u0061\u0072\u0074\u0061\u0067\u00e9\u0020\u006c\u0027\u0055\u0052\u004c\u0020\u0070\u0072\u0069\u0076\u00e9\u0020\u0061\u0076\u0065\u0063\u0020\u0064\u0027\u0061\u0075\u0074\u0072\u0065\u0073\u0020\u0075\u0074\u0069\u006c\u0069\u0073\u0061\u0074\u0065\u0075\u0072\u0073\u002c\u0020\u0063\u0065\u0075\u0078\u002d\u0063\u0069\u0020\u006e\u0065\u0020\u0070\u006f\u0075\u0072\u0072\u006f\u006e\u0074\u0020\u0070\u006c\u0075\u0073\u0020\u006c\u0027\u0075\u0074\u0069\u006c\u0069\u0073\u0065\u0072\u0020\u0070\u006f\u0075\u0072\u0020\u0061\u0063\u0063\u00e9\u0064\u0065\u0072\u0020\u00e0\u0020\u0076\u006f\u0074\u0072\u0065\u0020\u0065\u006e\u0073\u0065\u006d\u0062\u006c\u0065\u0020\u0064\u0065\u0020\u0064\u006f\u006e\u006e\u00e9\u0065\u0073\u0020\u006e\u006f\u006e\u0020\u0070\u0075\u0062\u006c\u0069\u00e9\u002e +dataset.privateurl.cannotCreate=\u004c\u0027\u0055\u0052\u004c\u0020\u0070\u0072\u0069\u0076\u00e9\u0020\u006e\u0065\u0020\u0070\u0065\u0075\u0074\u0020\u00ea\u0074\u0072\u0065\u0020\u0075\u0074\u0069\u006c\u0069\u0073\u00e9\u0020\u0071\u0075\u0027\u0061\u0076\u0065\u0063\u0020\u0064\u0065\u0073\u0020\u0076\u0065\u0072\u0073\u0069\u006f\u006e\u0073\u0020\u006e\u006f\u006e\u0020\u0070\u0075\u0062\u006c\u0069\u00e9\u0065\u0073\u0020\u0064\u0027\u0065\u006e\u0073\u0065\u006d\u0062\u006c\u0065\u0073\u0020\u0064\u0065\u0020\u0064\u006f\u006e\u006e\u00e9\u0065\u0073\u002e +dataset.privateurl.roleassigeeTitle=\u0055\u0052\u004c\u0020\u0070\u0072\u0069\u0076\u00e9\u0020\u0061\u0063\u0074\u0069\u0076\u00e9 +dataset.privateurl.createdSuccess=\u004f\u0070\u00e9\u0072\u0061\u0074\u0069\u006f\u006e\u0020\u0072\u00e9\u0075\u0073\u0073\u0069\u0065\u0021 +dataset.privateurl.disabledSuccess=\u0056\u006f\u0075\u0073\u0020\u0061\u0076\u0065\u007a\u0020\u0062\u0069\u0065\u006e\u0020\u0064\u00e9\u0073\u0061\u0063\u0074\u0069\u0076\u00e9\u0020\u006c\u0027\u0055\u0052\u004c\u0020\u0070\u0072\u0069\u0076\u00e9\u0020\u0064\u0065\u0020\u0063\u0065\u0074\u0020\u0065\u006e\u0073\u0065\u006d\u0062\u006c\u0065\u0020\u0064\u0065\u0020\u0064\u006f\u006e\u006e\u00e9\u0065\u0073\u0020\u006e\u006f\u006e\u0020\u0070\u0075\u0062\u006c\u0069\u00e9\u002e +dataset.privateurl.noPermToCreate=\u0050\u006f\u0075\u0072\u0020\u0063\u0072\u00e9\u0065\u0072\u0020\u0075\u006e\u0065\u0020\u0061\u0064\u0072\u0065\u0073\u0073\u0065\u0020\u0055\u0052\u004c\u0020\u0070\u0072\u0069\u0076\u00e9\u002c\u0020\u0076\u006f\u0075\u0073\u0020\u0064\u0065\u0076\u0065\u007a\u0020\u0064\u0069\u0073\u0070\u006f\u0073\u0065\u0072\u0020\u0064\u0065\u0073\u0020\u0061\u0075\u0074\u006f\u0072\u0069\u0073\u0061\u0074\u0069\u006f\u006e\u0073\u0020\u0073\u0075\u0069\u0076\u0061\u006e\u0074\u0065\u0073\u005c\u0075\u0030\u0030\u0041\u0030\u003a\u0020\u007b\u0030\u007d\u002e +file.count=\u007b\u0030\u007d\u0020\u007b\u0030\u002c\u0020\u0063\u0068\u006f\u0069\u0063\u0065\u002c\u0020\u0030\u0023\u0046\u0069\u0063\u0068\u0069\u0065\u0072\u0073\u007c\u0031\u0023\u0046\u0069\u0063\u0068\u0069\u0065\u0072\u0073\u007c\u0032\u0023\u0046\u0069\u0063\u0068\u0069\u0065\u0072\u0073\u007d +file.count.selected=\u007b\u0030\u007d\u0020\u007b\u0030\u002c\u0020\u0063\u0068\u006f\u0069\u0063\u0065\u002c\u0020\u0030\u0023\u0046\u0069\u0063\u0068\u0069\u0065\u0072\u0073\u0020\u0073\u00e9\u006c\u0065\u0063\u0074\u0069\u006f\u006e\u006e\u00e9\u0073\u007c\u0031\u0023\u0046\u0069\u0063\u0068\u0069\u0065\u0072\u0020\u0073\u00e9\u006c\u0065\u0063\u0074\u0069\u006f\u006e\u006e\u00e9\u007c\u0032\u0023\u0046\u0069\u0063\u0068\u0069\u0065\u0072\u0073\u0020\u0073\u00e9\u006c\u0065\u0063\u0074\u0069\u006f\u006e\u006e\u00e9\u0073\u007d +file.selectToAddBtn=\u0053\u00e9\u006c\u0065\u0063\u0074\u0069\u006f\u006e\u006e\u0065\u0072\u0020\u006c\u0065\u0073\u0020\u0066\u0069\u0063\u0068\u0069\u0065\u0072\u0073\u0020\u00e0\u0020\u0061\u006a\u006f\u0075\u0074\u0065\u0072 +file.selectToAdd.tipLimit=\u004c\u0061\u0020\u006c\u0069\u006d\u0069\u0074\u0065\u0020\u0064\u0065\u0020\u0074\u00e9\u006c\u00e9\u0076\u0065\u0072\u0073\u0065\u006d\u0065\u006e\u0074\u0020\u0065\u0073\u0074\u0020\u0064\u0065\u0020\u007b\u0030\u007d\u0020\u006f\u0063\u0074\u0065\u0074\u0073\u0020\u0070\u0061\u0072\u0020\u0066\u0069\u0063\u0068\u0069\u0065\u0072\u002e +file.selectToAdd.tipMoreInformation=\u0050\u006f\u0075\u0072\u0020\u0070\u006c\u0075\u0073\u0020\u0064\u0027\u0069\u006e\u0066\u006f\u0072\u006d\u0061\u0074\u0069\u006f\u006e\u0073\u0020\u0073\u0075\u0072\u0020\u006c\u0065\u0073\u0020\u0066\u006f\u0072\u006d\u0061\u0074\u0073\u0020\u0064\u0065\u0020\u0066\u0069\u0063\u0068\u0069\u0065\u0072\u0073\u0020\u0070\u0072\u0069\u0073\u0020\u0065\u006e\u0020\u0063\u0068\u0061\u0072\u0067\u0065\u002c\u0020\u0072\u0065\u0070\u006f\u0072\u0074\u0065\u007a\u002d\u0076\u006f\u0075\u0073\u0020\u0061\u0075\u0020\u003c\u0061\u0020\u0068\u0072\u0065\u0066\u003d\u0022\u007b\u0030\u007d\u002f\u007b\u0031\u007d\u002f\u0075\u0073\u0065\u0072\u002f\u0064\u0061\u0074\u0061\u0073\u0065\u0074\u002d\u006d\u0061\u006e\u0061\u0067\u0065\u006d\u0065\u006e\u0074\u002e\u0068\u0074\u006d\u006c\u0023\u0066\u0069\u006c\u0065\u002d\u0068\u0061\u006e\u0064\u006c\u0069\u006e\u0067\u002d\u0061\u006e\u0064\u002d\u0075\u0070\u006c\u006f\u0061\u0064\u0069\u006e\u0067\u0022\u0020\u0074\u0069\u0074\u006c\u0065\u003d\u0022\u0046\u0069\u006c\u0065\u0020\u0048\u0061\u006e\u0064\u006c\u0069\u006e\u0067\u0020\u0061\u006e\u0064\u0020\u0055\u0070\u006c\u006f\u0061\u0064\u0069\u006e\u0067\u0020\u002d\u0020\u0044\u0061\u0074\u0061\u0076\u0065\u0072\u0073\u0065\u0020\u0055\u0073\u0065\u0072\u0020\u0047\u0075\u0069\u0064\u0065\u0022\u0020\u0074\u0061\u0072\u0067\u0065\u0074\u003d\u0022\u005f\u0062\u006c\u0061\u006e\u006b\u0022\u003e\u0067\u0075\u0069\u0064\u0065\u0020\u0064\u0027\u0075\u0074\u0069\u006c\u0069\u0073\u0061\u0074\u0069\u006f\u006e\u003c\u002f\u0061\u003e\u002e +file.selectToAdd.dragdropMsg=Drag and drop files here. +file.createUploadDisabled=Once you have saved your dataset, you can upload your data using the "Upload Files" button on the dataset page. For more information about supported file formats, please refer to the User Guide. +file.fromDropbox=\u0054\u00e9\u006c\u00e9\u0076\u0065\u0072\u0073\u0065\u0072\u0020\u00e0\u0020\u0070\u0061\u0072\u0074\u0069\u0072\u0020\u0064\u0065\u0020\u0044\u0072\u006f\u0070\u0062\u006f\u0078 +file.fromDropbox.tip=\u004c\u0065\u0073\u0020\u0066\u0069\u0063\u0068\u0069\u0065\u0072\u0073\u0020\u0070\u0065\u0075\u0076\u0065\u006e\u0074\u0020\u0061\u0075\u0073\u0073\u0069\u0020\u00ea\u0074\u0072\u0065\u0020\u0074\u00e9\u006c\u00e9\u0076\u0065\u0072\u0073\u0065\u0072\u0020\u0064\u0069\u0072\u0065\u0063\u0074\u0065\u006d\u0065\u006e\u0074\u0020\u0064\u0065\u0020\u0044\u0072\u006f\u0070\u0062\u006f\u0078\u002e +file.replace.original=\u004f\u0072\u0069\u0067\u0069\u006e\u0061\u006c\u0020\u0046\u0069\u006c\u0065 +file.editFiles=\u004d\u006f\u0064\u0069\u0066\u0069\u0065\u0072\u0020\u006c\u0065\u0073\u0020\u0066\u0069\u0063\u0068\u0069\u0065\u0072\u0073 +file.bulkUpdate=\u0043\u0068\u0061\u0072\u0067\u0065\u006d\u0065\u006e\u0074\u0020\u0065\u006e\u0020\u006c\u006f\u0074 +file.uploadFiles=\u0054\u00e9\u006c\u00e9\u0076\u0065\u0072\u0073\u0065\u0072\u0020\u0064\u0065\u0073\u0020\u0066\u0069\u0063\u0068\u0069\u0065\u0072\u0073 +file.replaceFile=\u0052\u0065\u006d\u0070\u006c\u0061\u0063\u0065\u0072\u0020\u006c\u0065\u0020\u0066\u0069\u0063\u0068\u0069\u0065\u0072 +file.notFound.tip=\u0043\u0065\u0074\u0020\u0065\u006e\u0073\u0065\u006d\u0062\u006c\u0065\u0020\u0064\u0065\u0020\u0064\u006f\u006e\u006e\u00e9\u0065\u0073\u0020\u006e\u0065\u0020\u0063\u006f\u006e\u0074\u0069\u0065\u006e\u0074\u0020\u0061\u0075\u0063\u0075\u006e\u0020\u0066\u0069\u0063\u0068\u0069\u0065\u0072\u002e +file.noSelectedFiles.tip=\u0041\u0075\u0063\u0075\u006e\u0020\u0066\u0069\u0063\u0068\u0069\u0065\u0072\u0020\u006e\u0027\u0061\u0020\u00e9\u0074\u00e9\u0020\u0073\u00e9\u006c\u0065\u0063\u0074\u0069\u006f\u006e\u006e\u00e9\u0020\u0070\u006f\u0075\u0072\u0020\u0061\u0066\u0066\u0069\u0063\u0068\u0061\u0067\u0065\u002e\u0020 +file.noUploadedFiles.tip=\u004c\u0065\u0073\u0020\u0066\u0069\u0063\u0068\u0069\u0065\u0072\u0073\u0020\u0074\u00e9\u006c\u00e9\u0076\u0065\u0072\u0073\u00e9\u0073\u0020\u0070\u0061\u0072\u0061\u00ee\u0074\u0072\u006f\u006e\u0074\u0020\u0069\u0063\u0069\u002e\u0020 +file.replace=\u0052\u0065\u006d\u0070\u006c\u0061\u0063\u0065\u0072 +file.replaced.warning.header=\u004d\u006f\u0064\u0069\u0066\u0069\u0065\u0072\u0020\u006c\u0065\u0020\u0066\u0069\u0063\u0068\u0069\u0065\u0072 +file.replaced.warning.draft.warningMessage=\u0056\u006f\u0075\u0073\u0020\u006e\u0065\u0020\u0070\u006f\u0075\u0076\u0065\u007a\u0020\u0070\u0061\u0073\u0020\u0072\u0065\u006d\u0070\u006c\u0061\u0063\u0065\u0072\u0020\u0075\u006e\u0020\u0066\u0069\u0063\u0068\u0069\u0065\u0072\u0020\u0071\u0075\u0069\u0020\u0061\u0020\u00e9\u0074\u00e9\u0020\u0072\u0065\u006d\u0070\u006c\u0061\u0063\u00e9\u0020\u0064\u0061\u006e\u0073\u0020\u0075\u006e\u0020\u0065\u006e\u0073\u0065\u006d\u0062\u006c\u0065\u0020\u0064\u0065\u0020\u0064\u006f\u006e\u006e\u00e9\u0065\u0073\u0020\u0070\u0072\u006f\u0076\u0069\u0073\u006f\u0069\u0072\u0065\u002e\u0020\u0050\u006f\u0075\u0072\u0020\u006c\u0065\u0020\u0072\u0065\u006d\u0070\u006c\u0061\u0063\u0065\u0072\u0020\u0070\u0061\u0072\u0020\u0075\u006e\u0020\u0066\u0069\u0063\u0068\u0069\u0065\u0072\u0020\u0064\u0069\u0066\u0066\u00e9\u0072\u0065\u006e\u0074\u002c\u0020\u0076\u006f\u0075\u0073\u0020\u0064\u0065\u0076\u0065\u007a\u0020\u0064\u0027\u0061\u0062\u006f\u0072\u0064\u0020\u0073\u0075\u0070\u0070\u0070\u0072\u0069\u006d\u0065\u0072\u0020\u006c\u0027\u0065\u006e\u0073\u0065\u006d\u0062\u006c\u0065\u0020\u0064\u0065\u0020\u0064\u006f\u006e\u006e\u00e9\u0065\u0073\u0020\u0070\u0072\u006f\u0076\u0069\u0073\u006f\u0069\u0072\u0065\u002e\u0020\u004e\u006f\u0074\u0065\u007a\u0020\u0071\u0075\u0065\u0020\u0063\u0065\u0020\u0066\u0061\u0069\u0073\u0061\u006e\u0074\u002c\u0020\u0074\u006f\u0075\u0074\u0065\u0020\u0061\u0075\u0074\u0072\u0065\u0020\u006d\u006f\u0064\u0069\u0066\u0069\u0063\u0061\u0074\u0069\u006f\u006e\u0020\u0061\u0070\u0070\u006f\u0072\u0074\u00e9\u0065\u0020\u00e0\u0020\u0063\u0065\u0074\u0020\u0065\u006e\u0073\u0065\u006d\u0062\u006c\u0065\u0020\u0070\u0072\u006f\u0076\u0069\u0073\u006f\u0069\u0072\u0065\u0020\u0073\u0065\u0072\u0061\u0020\u0061\u006e\u006e\u0075\u006c\u00e9\u0065\u002e +file.replaced.warning.previous.warningMessage=\u0056\u006f\u0075\u0073\u0020\u006e\u0065\u0020\u0070\u006f\u0075\u0076\u0065\u007a\u0020\u0070\u0061\u0073\u0020\u00e9\u0064\u0069\u0074\u0065\u0072\u0020\u0075\u006e\u0020\u0066\u0069\u0063\u0068\u0069\u0065\u0072\u0020\u0071\u0075\u0069\u0020\u0061\u0020\u00e9\u0074\u00e9\u0020\u0072\u0065\u006d\u0070\u006c\u0061\u0063\u00e9\u0020\u0064\u0061\u006e\u0073\u0020\u0075\u006e\u0065\u0020\u0076\u0065\u0072\u0073\u0069\u006f\u006e\u0020\u0070\u0072\u00e9\u0063\u00e9\u0064\u0065\u006e\u0074\u0065\u0020\u0064\u0027\u0075\u006e\u0020\u0065\u006e\u0073\u0065\u006d\u0062\u006c\u0065\u0020\u0064\u0065\u0020\u0064\u006f\u006e\u006e\u00e9\u0065\u0073\u002e\u0020\u0050\u006f\u0075\u0072\u0020\u0070\u006f\u0075\u0076\u006f\u0069\u0072\u0020\u006c\u0027\u00e9\u0064\u0069\u0074\u0065\u0072\u002c\u0020\u0076\u006f\u0075\u0073\u0020\u0064\u0065\u0076\u0065\u007a\u0020\u0061\u0063\u0063\u00e9\u0064\u0065\u0072\u0020\u00e0\u0020\u006c\u0061\u0020\u0064\u0065\u0072\u006e\u0069\u00e8\u0072\u0065\u0020\u0076\u0065\u0072\u0073\u0069\u006f\u006e\u0020\u0070\u0075\u0062\u006c\u0069\u00e9\u0065\u0020\u0064\u0075\u0020\u0066\u0069\u0063\u0068\u0069\u0065\u0072\u002e\u0020 +file.alreadyDeleted.previous.warningMessage=\u0043\u0065\u0020\u0066\u0069\u0063\u0068\u0069\u0065\u0072\u0020\u0061\u0020\u0064\u00e9\u006a\u00e0\u0020\u00e9\u0074\u00e9\u0020\u0073\u0075\u0070\u0070\u0072\u0069\u006d\u00e9\u0020\u0064\u0061\u006e\u0073\u0020\u006c\u0061\u0020\u0076\u0065\u0072\u0073\u0069\u006f\u006e\u0020\u0061\u0063\u0074\u0075\u0065\u006c\u006c\u0065\u002e\u0020\u0049\u006c\u0020\u0070\u0065\u0075\u0074\u0020\u006e\u0065\u0020\u0070\u0061\u0073\u0020\u00ea\u0074\u0072\u0065\u0020\u006d\u006f\u0064\u0069\u0066\u0069\u00e9\u002e +file.delete=\u0053\u0075\u0070\u0070\u0072\u0069\u006d\u0065\u0072 +file.metadata=\u004d\u00e9\u0074\u0061\u0064\u006f\u006e\u006e\u00e9\u0065\u0073 +file.deleted.success=\u0045\u006e\u0020\u0063\u006c\u0069\u0071\u0075\u0061\u006e\u0074\u0020\u0073\u0075\u0072\u0020\u00ab\u005c\u0075\u0030\u0030\u0041\u0030\u0045\u006e\u0072\u0065\u0067\u0069\u0073\u0074\u0072\u0065\u0072\u0020\u006c\u0065\u0073\u0020\u006d\u006f\u0064\u0069\u0066\u0069\u0063\u0061\u0074\u0069\u006f\u006e\u0073\u005c\u0075\u0030\u0030\u0041\u0030\u00bb\u002c\u0020\u006c\u0065\u0073\u0020\u0066\u0069\u0063\u0068\u0069\u0065\u0072\u0073\u0020\u005c\u0075\u0032\u0030\u0031\u0043\u007b\u0030\u007d\u005c\u0075\u0032\u0030\u0031\u0044\u0020\u0073\u0065\u0072\u006f\u006e\u0074\u0020\u0073\u0075\u0070\u0070\u0072\u0069\u006d\u00e9\u0073\u0020\u0064\u0065\u0020\u0066\u0061\u00e7\u006f\u006e\u0020\u0070\u0065\u0072\u006d\u0061\u006e\u0065\u006e\u0074\u0065\u0020\u0064\u0065\u0020\u0063\u0065\u0074\u0074\u0065\u0020\u0076\u0065\u0072\u0073\u0069\u006f\u006e\u0020\u0064\u0065\u0020\u006c\u0027\u0065\u006e\u0073\u0065\u006d\u0062\u006c\u0065\u0020\u0064\u0065\u0020\u0064\u006f\u006e\u006e\u00e9\u0065\u0073\u002e +file.deleted.replacement.success=\u004c\u0065\u0020\u0066\u0069\u0063\u0068\u0069\u0065\u0072\u0020\u0064\u0065\u0020\u0072\u0065\u006d\u0070\u006c\u0061\u0063\u0065\u006d\u0065\u006e\u0074\u0020\u0061\u0020\u00e9\u0074\u00e9\u0020\u0073\u0075\u0070\u0070\u0072\u0069\u006d\u00e9\u002e +file.editAccess=\u004d\u006f\u0064\u0069\u0066\u0069\u0065\u0072\u0020\u006c\u0065\u0073\u0020\u0061\u0063\u0063\u00e8\u0073 +file.restrict=\u0052\u0065\u0073\u0074\u0072\u0065\u0069\u006e\u0064\u0072\u0065 +file.unrestrict=\u0053\u0061\u006e\u0073\u0020\u0072\u0065\u0073\u0074\u0072\u0069\u0063\u0074\u0069\u006f\u006e +file.restricted.success=\u004c\u0027\u0061\u0063\u0063\u00e8\u0073\u0020\u0061\u0075\u0078\u0020\u0066\u0069\u0063\u0068\u0069\u0065\u0072\u0073\u0020\u005c\u0075\u0032\u0030\u0031\u0043\u007b\u0030\u007d\u005c\u0075\u0032\u0030\u0031\u0044\u0020\u0073\u0065\u0072\u0061\u0020\u0072\u0065\u0073\u0074\u0072\u0065\u0069\u006e\u0074\u0020\u0064\u0075\u0020\u006d\u006f\u006d\u0065\u006e\u0074\u0020\u006f\u00f9\u0020\u0076\u006f\u0075\u0073\u0020\u0063\u006c\u0069\u0071\u0075\u0065\u0072\u0065\u007a\u0020\u0073\u0075\u0072\u0020\u006c\u0065\u0020\u0062\u006f\u0075\u0074\u006f\u006e\u0020\u00ab\u005c\u0075\u0030\u0030\u0041\u0030\u0045\u006e\u0072\u0065\u0067\u0069\u0073\u0074\u0072\u0065\u0072\u0020\u006c\u0065\u0073\u0020\u006d\u006f\u0064\u0069\u0066\u0069\u0063\u0061\u0074\u0069\u006f\u006e\u0073\u005c\u0075\u0030\u0030\u0041\u0030\u00bb\u0020\u0061\u0075\u0020\u0062\u0061\u0073\u0020\u0064\u0065\u0020\u006c\u0061\u0020\u0070\u0061\u0067\u0065\u002e +file.download.header=\u0054\u00e9\u006c\u00e9\u0063\u0068\u0061\u0072\u0067\u0065\u0072 +file.download.subset.header=Download Data Subset +file.preview=\u0041\u0070\u0065\u0072\u00e7\u0075\u005c\u0075\u0030\u0030\u0041\u0030\u003a +file.previewMap=\u0041\u0070\u0065\u0072\u00e7\u0075\u0020\u0064\u0065\u0020\u006c\u0061\u0020\u0063\u0061\u0072\u0074\u0065\u005c\u0075\u0030\u0030\u0041\u0030\u003a +file.fileName=\u004e\u006f\u006d\u0020\u0064\u0075\u0020\u0066\u0069\u0063\u0068\u0069\u0065\u0072 +file.type.tabularData=\u0044\u006f\u006e\u006e\u00e9\u0065\u0073\u0020\u0074\u0061\u0062\u0075\u006c\u0061\u0069\u0072\u0065\u0073 +file.originalChecksumType=\u0046\u0069\u0063\u0068\u0069\u0065\u0072\u0020\u006f\u0072\u0069\u0067\u0069\u006e\u0061\u006c\u0020\u007b\u0030\u007d +file.checksum.exists.tip=\u0055\u006e\u0020\u0066\u0069\u0063\u0068\u0069\u0065\u0072\u0020\u0061\u0076\u0065\u0063\u0020\u0063\u0065\u0074\u0074\u0065\u0020\u0073\u006f\u006d\u006d\u0065\u0020\u0064\u0065\u0020\u0063\u006f\u006e\u0074\u0072\u00f4\u006c\u0065\u0020\u0065\u0078\u0069\u0073\u0074\u0065\u0020\u0064\u00e9\u006a\u00e0\u0020\u0064\u0061\u006e\u0073\u0020\u006c\u0027\u0065\u006e\u0073\u0065\u006d\u0062\u006c\u0065\u0020\u0064\u0065\u0020\u0064\u006f\u006e\u006e\u00e9\u0065\u0073\u002e +file.selectedThumbnail=\u0056\u0069\u0067\u006e\u0065\u0074\u0074\u0065 +file.selectedThumbnail.tip=\u004c\u0061\u0020\u0076\u0069\u0067\u006e\u0065\u0074\u0074\u0065\u0020\u0061\u0073\u0073\u006f\u0063\u0069\u00e9\u0065\u0020\u0061\u0075\u0020\u0066\u0069\u0063\u0068\u0069\u0065\u0072\u0020\u0065\u0073\u0074\u0020\u0075\u0074\u0069\u006c\u0069\u0073\u00e9\u0065\u0020\u0063\u006f\u006d\u006d\u0065\u0020\u0076\u0069\u0067\u006e\u0065\u0074\u0074\u0065\u0020\u0070\u0061\u0072\u0020\u0064\u00e9\u0066\u0061\u0075\u0074\u0020\u0070\u006f\u0075\u0072\u0020\u006c\u0027\u0065\u006e\u0073\u0065\u006d\u0062\u006c\u0065\u0020\u0064\u0065\u0020\u0064\u006f\u006e\u006e\u00e9\u0065\u0073\u002e\u0020\u0043\u006c\u0069\u0071\u0075\u0065\u007a\u0020\u0073\u0075\u0072\u0020\u006c\u0065\u0020\u0062\u006f\u0075\u0074\u006f\u006e\u0020\u00ab\u005c\u0075\u0030\u0030\u0041\u0030\u004f\u0070\u0074\u0069\u006f\u006e\u0073\u0020\u0061\u0076\u0061\u006e\u0063\u00e9\u0065\u0073\u005c\u0075\u0030\u0030\u0041\u0030\u00bb\u0020\u0064\u0027\u0075\u006e\u0020\u0061\u0075\u0074\u0072\u0065\u0020\u0066\u0069\u0063\u0068\u0069\u0065\u0072\u0020\u0070\u006f\u0075\u0072\u0020\u0073\u00e9\u006c\u0065\u0063\u0074\u0069\u006f\u006e\u006e\u0065\u0072\u0020\u0063\u0065\u0020\u0066\u0069\u0063\u0068\u0069\u0065\u0072\u002e +file.cloudStorageAccess=\u0041\u0063\u0063\u00e8\u0073\u0020\u0061\u0075\u0020\u0073\u0074\u006f\u0063\u006b\u0061\u0067\u0065\u0020\u0069\u006e\u0066\u006f\u006e\u0075\u0061\u0067\u0069\u0071\u0075\u0065 +file.cloudStorageAccess.tip=\u004c\u0065\u0020\u006e\u006f\u006d\u0020\u0064\u0075\u0020\u0063\u006f\u006e\u0074\u0065\u006e\u0065\u0075\u0072\u0020\u0070\u006f\u0075\u0072\u0020\u0063\u0065\u0074\u0020\u0065\u006e\u0073\u0065\u006d\u0062\u006c\u0065\u0020\u0064\u0065\u0020\u0064\u006f\u006e\u006e\u00e9\u0065\u0073\u0020\u0064\u006f\u0069\u0074\u0020\u0061\u0063\u0063\u00e9\u0064\u0065\u0072\u0020\u0061\u0075\u0078\u0020\u0066\u0069\u0063\u0068\u0069\u0065\u0072\u0073\u0020\u0064\u0061\u006e\u0073\u0020\u006c\u0065\u0020\u0073\u0074\u006f\u0063\u006b\u0061\u0067\u0065\u0020\u0069\u006e\u0066\u006f\u006e\u0075\u0061\u0067\u0069\u0071\u0075\u0065\u002e +file.cloudStorageAccess.help=\u0050\u006f\u0075\u0072\u0020\u0061\u0063\u0063\u00e9\u0064\u0065\u0072\u0020\u0064\u0069\u0072\u0065\u0063\u0074\u0065\u006d\u0065\u006e\u0074\u0020\u00e0\u0020\u0063\u0065\u0073\u0020\u0064\u006f\u006e\u006e\u00e9\u0065\u0073\u0020\u0064\u0061\u006e\u0073\u0020\u006c\u0027\u0065\u006e\u0076\u0069\u0072\u006f\u006e\u006e\u0065\u006d\u0065\u006e\u0074\u0020\u0069\u006e\u0066\u006f\u006e\u0075\u0061\u0067\u0069\u0071\u0075\u0065\u0020\u007b\u0032\u007d\u002c\u0020\u0075\u0074\u0069\u006c\u0069\u0073\u0065\u007a\u0020\u006c\u0065\u0020\u006e\u006f\u006d\u0020\u0064\u0075\u0020\u0063\u006f\u006e\u0074\u0065\u006e\u0065\u0075\u0072\u0020\u0064\u0061\u006e\u0073\u0020\u006c\u0061\u0020\u0063\u0061\u0073\u0065\u0020\u0064\u0027\u0061\u0063\u0063\u00e8\u0073\u0020\u0061\u0075\u0020\u0073\u0074\u006f\u0063\u006b\u0061\u0067\u0065\u0020\u0069\u006e\u0066\u006f\u006e\u0075\u0061\u0067\u0069\u0071\u0075\u0065\u0020\u0063\u0069\u002d\u0064\u0065\u0073\u0073\u006f\u0075\u0073\u002e\u0020\u0050\u006f\u0075\u0072\u0020\u0065\u006e\u0020\u0073\u0061\u0076\u006f\u0069\u0072\u0020\u0070\u006c\u0075\u0073\u0020\u0073\u0075\u0072\u0020\u006c\u0027\u0065\u006e\u0076\u0069\u0072\u006f\u006e\u006e\u0065\u006d\u0065\u006e\u0074\u0020\u0069\u006e\u0066\u006f\u006e\u0075\u0061\u0067\u0069\u0071\u0075\u0065\u002c\u0020\u0063\u006f\u006e\u0073\u0075\u006c\u0074\u0065\u007a\u0020\u006c\u0061\u0020\u0073\u0065\u0063\u0074\u0069\u006f\u006e\u0020\u003c\u0061\u0020\u0068\u0072\u0065\u0066\u003d\u0022\u007b\u0030\u007d\u002f\u007b\u0031\u007d\u002f\u0075\u0073\u0065\u0072\u002f\u0064\u0061\u0074\u0061\u0073\u0065\u0074\u002d\u006d\u0061\u006e\u0061\u0067\u0065\u006d\u0065\u006e\u0074\u002e\u0068\u0074\u006d\u006c\u0023\u0063\u006c\u006f\u0075\u0064\u002d\u0073\u0074\u006f\u0072\u0061\u0067\u0065\u0022\u0020\u0074\u0069\u0074\u006c\u0065\u003d\u0022\u0043\u006c\u006f\u0075\u0064\u0020\u0053\u0074\u006f\u0072\u0061\u0067\u0065\u0020\u0041\u0063\u0063\u0065\u0073\u0073\u0020\u002d\u0020\u0044\u0061\u0074\u0061\u0076\u0065\u0072\u0073\u0065\u0020\u0055\u0073\u0065\u0072\u0020\u0047\u0075\u0069\u0064\u0065\u0022\u0020\u0074\u0061\u0072\u0067\u0065\u0074\u003d\u0022\u005f\u0062\u006c\u0061\u006e\u006b\u0022\u003e\u0041\u0063\u0063\u00e8\u0073\u0020\u0061\u0075\u0020\u0073\u0074\u006f\u0063\u006b\u0061\u0067\u0065\u0020\u0069\u006e\u0066\u006f\u006e\u0075\u0061\u0067\u0069\u0071\u0075\u0065\u003c\u002f\u0061\u003e\u0020\u0064\u0075\u0020\u0047\u0075\u0069\u0064\u0065\u0020\u0064\u0027\u0075\u0074\u0069\u006c\u0069\u0073\u0061\u0074\u0069\u006f\u006e\u002e +file.copy=\u0043\u006f\u0070\u0069\u0065\u0072 +file.compute=\u0043\u0061\u006c\u0063\u0075\u006c\u0065\u0072 +file.rsyncUpload.info=Follow these steps to upload your data. To learn more about the upload process and how to prepare your data, please refer to the User Guide. +file.rsyncUpload.noScriptAvailable=Rsync script not available! +file.rsyncUpload.filesExist=You can not upload additional files to this dataset. +file.rsyncUpload.step1=Make sure your data is stored under a single directory. All files within this directory and its subdirectories will be uploaded to your dataset. +file.rsyncUpload.step2=Download this file upload script: +file.rsyncUpload.step2.downloadScriptButton=Download Script +file.rsyncUpload.step3=Open a terminal window in the same directory you saved the script and run this command: bash ./{0} +file.rsyncUpload.step4=Follow the instructions in the script. It will ask for a full path (beginning with "/") to the directory containing your data. Note: this script will expire after 7 days. +file.rsyncUpload.inProgressMessage.summary=DCM File Upload +file.rsyncUpload.inProgressMessage.details=This dataset is locked until the data files have been transferred and verified. +file.metaData.dataFile.dataTab.variables=\u0056\u0061\u0072\u0069\u0061\u0062\u006c\u0065\u0073\u002c +file.metaData.dataFile.dataTab.observations=\u004f\u0062\u0073\u0065\u0072\u0076\u0061\u0074\u0069\u006f\u006e\u0073 +file.metaData.viewOnWorldMap=\u0045\u0078\u0070\u006c\u006f\u0072\u0065\u0072\u0020\u0073\u0075\u0072\u0020\u0057\u006f\u0072\u006c\u0064\u004d\u0061\u0070 +file.addDescription=\u0041\u006a\u006f\u0075\u0074\u0065\u0072\u0020\u0075\u006e\u0065\u0020\u0064\u0065\u0073\u0063\u0072\u0069\u0070\u0074\u0069\u006f\u006e\u0020\u0064\u0075\u0020\u0066\u0069\u0063\u0068\u0069\u0065\u0072\u005c\u0075\u0032\u0030\u0032\u0036 +file.tags=\u004c\u0069\u0062\u0065\u006c\u006c\u00e9\u0073 +file.editTags=\u004c\u0069\u0062\u0065\u006c\u006c\u00e9\u0073 +file.editTagsDialog.tip=\u0053\u00e9\u006c\u0065\u0063\u0074\u0069\u006f\u006e\u006e\u0065\u0072\u0020\u006c\u0065\u0073\u0020\u006c\u0069\u0062\u0065\u006c\u006c\u00e9\u0073\u0020\u0064\u0065\u0020\u0066\u0069\u0063\u0068\u0069\u0065\u0072\u0020\u0065\u0078\u0069\u0073\u0074\u0061\u006e\u0074\u0073\u0020\u006f\u0075\u0020\u0063\u0072\u00e9\u0065\u0072\u0020\u0064\u0065\u0020\u006e\u006f\u0075\u0076\u0065\u0061\u0075\u0078\u0020\u006c\u0069\u0062\u0065\u006c\u006c\u00e9\u0073\u0020\u0070\u006f\u0075\u0072\u0020\u0064\u00e9\u0063\u0072\u0069\u0072\u0065\u0020\u0076\u006f\u0073\u0020\u0066\u0069\u0063\u0068\u0069\u0065\u0072\u0073\u002e\u0020\u0043\u0068\u0061\u0071\u0075\u0065\u0020\u0066\u0069\u0063\u0068\u0069\u0065\u0072\u0020\u0070\u0065\u0075\u0074\u0020\u0061\u0076\u006f\u0069\u0072\u0020\u0070\u006c\u0075\u0073\u0020\u0064\u0027\u0075\u006e\u0020\u006c\u0069\u0062\u0065\u006c\u006c\u00e9\u002e +file.editTagsDialog.select=\u004c\u0069\u0062\u0065\u006c\u006c\u00e9\u0073\u0020\u0064\u0065\u0020\u0066\u0069\u0063\u0068\u0069\u0065\u0072 +file.editTagsDialog.selectedTags=\u004c\u0069\u0062\u0065\u006c\u006c\u00e9\u0073\u0020\u0073\u00e9\u006c\u0065\u0063\u0074\u0069\u006f\u006e\u006e\u00e9\u0073 +file.editTagsDialog.selectedTags.none=\u0041\u0075\u0063\u0075\u006e\u0020\u006c\u0069\u0062\u0065\u006c\u006c\u00e9\u0020\u0073\u00e9\u006c\u0065\u0063\u0074\u0069\u006f\u006e\u006e\u00e9 +file.editTagsDialog.add=\u0050\u0065\u0072\u0073\u006f\u006e\u006e\u0061\u006c\u0069\u0073\u0065\u0072\u0020\u006c\u0065\u0020\u006c\u0069\u0062\u0065\u006c\u006c\u00e9\u0020\u0064\u0075\u0020\u0066\u0069\u0063\u0068\u0069\u0065\u0072 +file.editTagsDialog.add.tip=\u0043\u0072\u00e9\u0065\u0072\u0020\u0075\u006e\u0020\u006e\u006f\u0075\u0076\u0065\u0061\u0075\u0020\u006c\u0069\u0062\u0065\u006c\u006c\u00e9\u0020\u006c\u0027\u0061\u006a\u006f\u0075\u0074\u0065\u0072\u0061\u0020\u0063\u006f\u006d\u006d\u0065\u0020\u006f\u0070\u0074\u0069\u006f\u006e\u0020\u0064\u0065\u0020\u006c\u0069\u0062\u0065\u006c\u006c\u00e9\u0020\u0070\u006f\u0075\u0072\u0020\u0074\u006f\u0075\u0073\u0020\u006c\u0065\u0073\u0020\u0066\u0069\u0063\u0068\u0069\u0065\u0072\u0073\u0020\u0064\u0065\u0020\u0063\u0065\u0074\u0020\u0065\u006e\u0073\u0065\u006d\u0062\u006c\u0065\u0020\u0064\u0065\u0020\u0064\u006f\u006e\u006e\u00e9\u0065\u0073\u002e +file.editTagsDialog.newName=\u0041\u006a\u006f\u0075\u0074\u0065\u0072\u0020\u0075\u006e\u0020\u006e\u006f\u0075\u0076\u0065\u0061\u0075\u0020\u006c\u0069\u0062\u0065\u006c\u006c\u00e9\u0020\u0064\u0065\u0020\u0066\u0069\u0063\u0068\u0069\u0065\u0072\u005c\u0075\u0032\u0030\u0032\u0036 +dataset.removeUnusedFileTags.label=\u0053\u0075\u0070\u0070\u0072\u0069\u006d\u0065\u0072\u0020\u006c\u0065\u0073\u0020\u006c\u0069\u0062\u0065\u006c\u006c\u00e9\u0073 +dataset.removeUnusedFileTags.tip=\u0053\u00e9\u006c\u0065\u0063\u0074\u0069\u006f\u006e\u006e\u0065\u0072\u0020\u0070\u006f\u0075\u0072\u0020\u0073\u0075\u0070\u0070\u0072\u0069\u006d\u0065\u0072\u0020\u006c\u0065\u0073\u0020\u006c\u0069\u0062\u0065\u006c\u006c\u00e9\u0073\u0020\u0070\u0065\u0072\u0073\u006f\u006e\u006e\u0061\u006c\u0069\u0073\u00e9\u0073\u0020\u006e\u006f\u006e\u0020\u0075\u0074\u0069\u006c\u0069\u0073\u00e9\u0073\u0020\u0070\u0061\u0072\u0020\u006c\u0065\u0073\u0020\u0066\u0069\u0063\u0068\u0069\u0065\u0072\u0073\u0020\u0064\u0065\u0020\u006c\u0027\u0065\u006e\u0073\u0065\u006d\u0062\u006c\u0065\u0020\u0064\u0065\u0020\u0064\u006f\u006e\u006e\u00e9\u0065\u0073\u002e +dataset.removeUnusedFileTags.check=\u0053\u0075\u0070\u0070\u0072\u0069\u006d\u0065\u0072\u0020\u006c\u0065\u0073\u0020\u006c\u0069\u0062\u0065\u006c\u006c\u00e9\u0073\u0020\u006e\u006f\u006e\u0020\u0075\u0074\u0069\u006c\u0069\u0073\u00e9\u0073 +file.setThumbnail=\u0053\u00e9\u006c\u0065\u0063\u0074\u0069\u006f\u006e\u006e\u0065\u0072\u0020\u006c\u0061\u0020\u0076\u0069\u0067\u006e\u0065\u0074\u0074\u0065\u0020\u0020 +file.setThumbnail.header=\u0053\u00e9\u006c\u0065\u0063\u0074\u0069\u006f\u006e\u006e\u0065\u0072\u0020\u006c\u0061\u0020\u0076\u0069\u0067\u006e\u0065\u0074\u0074\u0065\u0020\u0064\u0065\u0020\u006c\u0027\u0065\u006e\u0073\u0065\u006d\u0062\u006c\u0065\u0020\u0064\u0065\u0020\u0064\u006f\u006e\u006e\u00e9\u0065\u0073 +file.datasetThumbnail=\u0056\u0069\u0067\u006e\u0065\u0074\u0074\u0065\u0020\u0064\u0065\u0020\u006c\u0027\u0065\u006e\u0073\u0065\u006d\u0062\u006c\u0065\u0020\u0064\u0065\u0020\u0064\u006f\u006e\u006e\u00e9\u0065\u0073 +file.datasetThumbnail.tip=\u0053\u00e9\u006c\u0065\u0063\u0074\u0069\u006f\u006e\u006e\u0065\u0072\u0020\u0063\u0065\u0074\u0074\u0065\u0020\u006f\u0070\u0074\u0069\u006f\u006e\u005f\u0070\u006f\u0075\u0072\u0020\u0073\u00e9\u006c\u0065\u0063\u0074\u0069\u006f\u006e\u006e\u0065\u0072\u0020\u0063\u0065\u0074\u0074\u0065\u0020\u0069\u006d\u0061\u0067\u0065\u0020\u0065\u006e\u0020\u0074\u0061\u006e\u0074\u0020\u0071\u0075\u0065\u0020\u0076\u0069\u0067\u006e\u0065\u0074\u0074\u0065\u0020\u0061\u0066\u0066\u0069\u0063\u0068\u00e9\u0065\u0020\u0064\u0061\u006e\u0073\u0020\u006c\u0061\u0020\u0070\u0061\u0067\u0065\u0020\u0064\u0065\u0073\u0020\u0072\u00e9\u0073\u0075\u006c\u0074\u0061\u0074\u0073\u0020\u0064\u0065\u0020\u0072\u0065\u0063\u0068\u0065\u0072\u0063\u0068\u0065\u0020\u0070\u006f\u0075\u0072\u0020\u0063\u0065\u0074\u0020\u0065\u006e\u0073\u0065\u006d\u0062\u006c\u0065\u0020\u0064\u0065\u0020\u0064\u006f\u006e\u006e\u00e9\u0065\u0073\u002e +file.setThumbnail.confirmation=\u00ca\u0074\u0065\u0073\u002d\u0076\u006f\u0075\u0073\u0020\u0063\u0065\u0072\u0074\u0061\u0069\u006e\u0020\u0064\u0065\u0020\u0076\u006f\u0075\u006c\u006f\u0069\u0072\u0020\u0063\u0068\u006f\u0069\u0073\u0069\u0072\u0020\u0063\u0065\u0074\u0074\u0065\u0020\u0069\u006d\u0061\u0067\u0065\u0020\u0063\u006f\u006d\u006d\u0065\u0020\u0076\u0069\u0067\u006e\u0065\u0074\u0074\u0065\u0020\u0070\u006f\u0075\u0072\u0020\u0076\u006f\u0074\u0072\u0065\u0020\u0065\u006e\u0073\u0065\u006d\u0062\u006c\u0065\u0020\u0064\u0065\u0020\u0064\u006f\u006e\u006e\u00e9\u0065\u0073\u003f\u0020\u0049\u006c\u0020\u0079\u0020\u0061\u0020\u0064\u00e9\u006a\u00e0\u0020\u0075\u006e\u0065\u0020\u0069\u006d\u0061\u0067\u0065\u0020\u0074\u00e9\u006c\u00e9\u0076\u0065\u0072\u0073\u00e9\u0065\u0020\u0065\u006e\u0020\u0074\u0061\u006e\u0074\u0020\u0071\u0075\u0065\u0020\u0076\u0069\u0067\u006e\u0065\u0074\u0074\u0065\u0020\u0065\u0074\u0020\u0063\u0065\u0074\u0074\u0065\u0020\u0061\u0063\u0074\u0069\u006f\u006e\u0020\u006c\u0027\u0065\u006e\u006c\u00e8\u0076\u0065\u0072\u0061\u002e +file.useThisIamge=\u0055\u0074\u0069\u006c\u0069\u0073\u0065\u0072\u0020\u0063\u0065\u0074\u0074\u0065\u0020\u0069\u006d\u0061\u0067\u0065\u0020\u0063\u006f\u006d\u006d\u0065\u0020\u0076\u0069\u0067\u006e\u0065\u0074\u0074\u0065\u0020\u0070\u006f\u0075\u0072\u0020\u006c\u0027\u0065\u006e\u0073\u0065\u006d\u0062\u006c\u0065\u0020\u0064\u0065\u0020\u0064\u006f\u006e\u006e\u00e9\u0065\u0073\u002e +file.advancedOptions=\u004f\u0070\u0074\u0069\u006f\u006e\u0073\u0020\u0061\u0076\u0061\u006e\u0063\u00e9\u0065\u0073 +file.advancedIngestOptions=\u004f\u0070\u0074\u0069\u006f\u006e\u0073\u0020\u0064\u0065\u0020\u0063\u0068\u0061\u0072\u0067\u0065\u006d\u0065\u006e\u0074\u0020\u0061\u0076\u0061\u006e\u0063\u00e9\u0065\u0073 +file.assignedDataverseImage.success=\u007b\u0030\u007d\u0020\u0061\u0020\u00e9\u0074\u00e9\u0020\u0073\u0061\u0075\u0076\u0065\u0067\u0061\u0072\u0064\u00e9\u0065\u0020\u0063\u006f\u006d\u006d\u0065\u0020\u0076\u0069\u0067\u006e\u0065\u0074\u0074\u0065\u0020\u0070\u006f\u0075\u0072\u0020\u0063\u0065\u0074\u0020\u0065\u006e\u0073\u0065\u006d\u0062\u006c\u0065\u0020\u0064\u0065\u0020\u0064\u006f\u006e\u006e\u00e9\u0065\u0073\u002e\u0020 +file.assignedTabFileTags.success=\u004c\u0065\u0073\u0020\u006c\u0069\u0062\u0065\u006c\u006c\u00e9\u0073\u0020\u006f\u006e\u0074\u0020\u0062\u0069\u0065\u006e\u0020\u00e9\u0074\u00e9\u0020\u0061\u006a\u006f\u0075\u0074\u00e9\u0073\u0020\u0070\u006f\u0075\u0072\u0020\u007b\u0030\u007d\u002e +file.tabularDataTags=\u004c\u0069\u0062\u0065\u006c\u006c\u00e9\u0073\u0020\u0064\u0065\u0073\u0020\u0064\u006f\u006e\u006e\u00e9\u0065\u0073\u0020\u0074\u0061\u0062\u0075\u006c\u0061\u0069\u0072\u0065\u0073 +file.tabularDataTags.tip=\u0053\u00e9\u006c\u0065\u0063\u0074\u0069\u006f\u006e\u006e\u0065\u0072\u0020\u0075\u006e\u0020\u006f\u0075\u0020\u0070\u006c\u0075\u0073\u0069\u0065\u0075\u0072\u0073\u0020\u006c\u0069\u0062\u0065\u006c\u006c\u00e9\u0073\u0020\u0064\u00e9\u0063\u0072\u0069\u0076\u0061\u006e\u0074\u0020\u006c\u0065\u0020\u0074\u0079\u0070\u0065\u0020\u0064\u0065\u0020\u0066\u0069\u0063\u0068\u0069\u0065\u0072\u0020\u0064\u0065\u0020\u0064\u006f\u006e\u006e\u00e9\u0065\u0073\u002e +file.spss-savEncoding=\u0045\u006e\u0063\u006f\u0064\u0061\u0067\u0065\u0020\u006c\u0069\u006e\u0067\u0075\u0069\u0073\u0074\u0069\u0071\u0075\u0065 +file.spss-savEncoding.title=\u0053\u00e9\u006c\u0065\u0063\u0074\u0069\u006f\u006e\u006e\u0065\u0072\u0020\u006c\u0061\u0020\u006c\u0061\u006e\u0067\u0075\u0065\u0020\u0075\u0074\u0069\u006c\u0069\u0073\u00e9\u0065\u0020\u0070\u006f\u0075\u0072\u0020\u0065\u006e\u0063\u006f\u0064\u0065\u0072\u0020\u0063\u0065\u0020\u0066\u0069\u0063\u0068\u0069\u0065\u0072\u0020\u0064\u0065\u0020\u0064\u006f\u006e\u006e\u00e9\u0065\u0073\u0020\u0053\u0050\u0053\u0053\u0020\u0028\u0073\u0061\u0076\u0029\u002e +file.spss-savEncoding.current=\u0053\u00e9\u006c\u0065\u0063\u0074\u0069\u006f\u006e\u0020\u0061\u0063\u0074\u0075\u0065\u006c\u006c\u0065\u005c\u0075\u0030\u0030\u0041\u0030\u003a +file.spss-porExtraLabels=\u004c\u0069\u0062\u0065\u006c\u006c\u00e9\u0073\u0020\u0064\u0065\u0020\u0076\u0061\u0072\u0069\u0061\u0062\u006c\u0065 +file.spss-porExtraLabels.title=\u0054\u00e9\u006c\u00e9\u0076\u0065\u0072\u0073\u0065\u0072\u0020\u0075\u006e\u0020\u0066\u0069\u0063\u0068\u0069\u0065\u0072\u0020\u0074\u0065\u0078\u0074\u0065\u0020\u0073\u0075\u0070\u0070\u006c\u00e9\u006d\u0065\u006e\u0074\u0061\u0069\u0072\u0065\u0020\u0063\u006f\u006e\u0074\u0065\u006e\u0061\u006e\u0074\u0020\u0064\u0065\u0073\u0020\u006c\u0069\u0062\u0065\u006c\u006c\u00e9\u0073\u0020\u0064\u0065\u0020\u0076\u0061\u0072\u0069\u0061\u0062\u006c\u0065\u0020\u0073\u0075\u0070\u0070\u006c\u00e9\u006d\u0065\u006e\u0074\u0061\u0069\u0072\u0065\u0073\u002e +file.spss-porExtraLabels.selectToAddBtn=\u0053\u00e9\u006c\u0065\u0063\u0074\u0069\u006f\u006e\u006e\u0065\u0072\u0020\u006c\u0065\u0020\u0066\u0069\u0063\u0068\u0069\u0065\u0072\u0020\u00e0\u0020\u0061\u006a\u006f\u0075\u0074\u0065\u0072 +file.ingestFailed.header=Upload Completed with Errors +file.ingestFailed.message=Tabular data ingest failed. +file.explore.twoRavens=\u0054\u0077\u006f\u0052\u0061\u0076\u0065\u006e\u0073 +file.map=\u0043\u0061\u0072\u0074\u0065 +file.mapData=\u0047\u00e9\u006f\u006c\u006f\u0063\u0061\u006c\u0069\u0073\u0065\u0072\u0020\u006c\u0065\u0073\u0020\u0064\u006f\u006e\u006e\u00e9\u0065\u0073 +file.mapData.worldMap=\u0057\u006f\u0072\u006c\u0064\u004d\u0061\u0070 +file.mapData.unpublished.header=\u0044\u006f\u006e\u006e\u00e9\u0065\u0073\u0020\u006e\u006f\u006e\u0020\u0070\u0075\u0062\u006c\u0069\u00e9\u0065\u0073 +file.mapData.unpublished.message=\u0050\u006f\u0075\u0072\u0020\u0067\u00e9\u006f\u006c\u006f\u0063\u0061\u006c\u0069\u0073\u0065\u0072\u0020\u0076\u006f\u0073\u0020\u0064\u006f\u006e\u006e\u00e9\u0065\u0073\u0020\u0061\u0076\u0065\u0063\u0020\u0057\u006f\u0072\u006c\u0064\u004d\u0061\u0070\u002c\u0020\u0076\u006f\u0073\u0020\u0064\u006f\u006e\u006e\u00e9\u0065\u0073\u0020\u0064\u006f\u0069\u0076\u0065\u006e\u0074\u0020\u00ea\u0074\u0072\u0065\u0020\u0070\u0075\u0062\u006c\u0069\u00e9\u0065\u0073\u002e\u0020\u0056\u0065\u0075\u0069\u006c\u006c\u0065\u007a\u0020\u0070\u0075\u0062\u006c\u0069\u0065\u0072\u0020\u0063\u0065\u0074\u0020\u0065\u006e\u0073\u0065\u006d\u0062\u006c\u0065\u0020\u0064\u0065\u0020\u0064\u006f\u006e\u006e\u00e9\u0065\u0073\u0020\u0065\u0074\u0020\u0065\u0073\u0073\u0061\u0079\u0065\u0072\u0020\u00e0\u0020\u006e\u006f\u0075\u0076\u0065\u0061\u0075\u002e +file.downloadBtn.format.all=\u0054\u006f\u0075\u0073\u0020\u006c\u0065\u0073\u0020\u0066\u006f\u0072\u006d\u0061\u0074\u0073\u0020\u0064\u0065\u0020\u0066\u0069\u0063\u0068\u0069\u0065\u0072\u0020\u002b\u0020\u0072\u0065\u006e\u0073\u0065\u0069\u0067\u006e\u0065\u006d\u0065\u006e\u0074\u0073 +file.downloadBtn.format.tab=\u0053\u00e9\u0070\u0061\u0072\u00e9\u0020\u0070\u0061\u0072\u0020\u0064\u0065\u0073\u0020\u0074\u0061\u0062\u0075\u006c\u0061\u0074\u0065\u0075\u0072\u0073 +file.downloadBtn.format.original=\u0046\u006f\u0072\u006d\u0061\u0074\u0020\u0064\u0075\u0020\u0066\u0069\u0063\u0068\u0069\u0065\u0072\u0020\u006f\u0072\u0069\u0067\u0069\u006e\u0061\u006c\u0020\u0028\u007b\u0030\u007d\u0029 +file.downloadBtn.format.rdata=\u0046\u006f\u0072\u006d\u0061\u0074\u0020\u0052\u0044\u0061\u0074\u0061 +file.downloadBtn.format.var=\u004d\u00e9\u0074\u0061\u0064\u006f\u006e\u006e\u00e9\u0065\u0073\u0020\u0064\u0065\u0073\u0020\u0076\u0061\u0072\u0069\u0061\u0062\u006c\u0065\u0073 +file.downloadBtn.format.citation=\u0052\u00e9\u0066\u00e9\u0072\u0065\u006e\u0063\u0065\u0020\u0062\u0069\u0062\u006c\u0069\u006f\u0067\u0072\u0061\u0070\u0068\u0069\u0071\u0075\u0065\u0020\u0064\u0075\u0020\u0066\u0069\u0063\u0068\u0069\u0065\u0072\u0020\u0064\u0065\u0020\u0064\u006f\u006e\u006e\u00e9\u0065\u0073 +file.more.information.link=\u004d\u0065\u0074\u0074\u0072\u0065\u0020\u0075\u006e\u0020\u006c\u0069\u0065\u006e\u0020\u0076\u0065\u0072\u0073\u0020\u0070\u006c\u0075\u0073\u0020\u0064\u0027\u0069\u006e\u0066\u006f\u0072\u006d\u0061\u0074\u0069\u006f\u006e\u0020\u0073\u0075\u0072\u0020\u006c\u0065\u0020\u0066\u0069\u0063\u0068\u0069\u0065\u0072\u005c\u0075\u0030\u0030\u0041\u0030\u003a\u0020 +file.requestAccess=\u0044\u0065\u006d\u0061\u006e\u0064\u0065\u0072\u0020\u006c\u0027\u0061\u0063\u0063\u00e8\u0073 +file.requestAccess.dialog.msg=\u0056\u006f\u0075\u0073\u0020\u0064\u0065\u0076\u0065\u007a\u0020\u003c\u0061\u0020\u0068\u0072\u0065\u0066\u003d\u0022\u002f\u006c\u006f\u0067\u0069\u006e\u0070\u0061\u0067\u0065\u002e\u0078\u0068\u0074\u006d\u006c\u007b\u0030\u007d\u0022\u0020\u0074\u0069\u0074\u006c\u0065\u003d\u0022\u0076\u006f\u0075\u0073\u0020\u0063\u006f\u006e\u006e\u0065\u0063\u0074\u0065\u0072\u0020\u00e0\u0020\u0076\u006f\u0074\u0072\u0065\u0020\u0063\u006f\u006d\u0070\u0074\u0065\u0020\u0044\u0061\u0074\u0061\u0076\u0065\u0072\u0073\u0065\u0022\u003e\u0076\u006f\u0075\u0073\u0020\u0061\u0075\u0074\u0068\u0065\u006e\u0074\u0069\u0066\u0069\u0065\u0072\u003c\u002f\u0061\u003e\u0020\u0070\u006f\u0075\u0072\u0020\u0064\u0065\u006d\u0061\u006e\u0064\u0065\u0072\u0020\u0075\u006e\u0020\u0061\u0063\u0063\u00e8\u0073\u0020\u00e0\u0020\u0063\u0065\u0020\u0066\u0069\u0063\u0068\u0069\u0065\u0072\u002e +file.requestAccess.dialog.msg.signup=\u0056\u006f\u0075\u0073\u0020\u0064\u0065\u0076\u0065\u007a\u0020\u003c\u0061\u0020\u0068\u0072\u0065\u0066\u003d\u0022\u002f\u0064\u0061\u0074\u0061\u0076\u0065\u0072\u0073\u0065\u0075\u0073\u0065\u0072\u002e\u0078\u0068\u0074\u006d\u006c\u007b\u0030\u007d\u0026\u0061\u006d\u0070\u003b\u0065\u0064\u0069\u0074\u004d\u006f\u0064\u0065\u003d\u0043\u0052\u0045\u0041\u0054\u0045\u0022\u0020\u0074\u0069\u0074\u006c\u0065\u003d\u0022\u0049\u006e\u0073\u0063\u0072\u0069\u0076\u0065\u007a\u002d\u0076\u006f\u0075\u0073\u0020\u0070\u006f\u0075\u0072\u0020\u006f\u0075\u0076\u0072\u0069\u0072\u0020\u0075\u006e\u0020\u0063\u006f\u006d\u0070\u0074\u0065\u0020\u0044\u0061\u0074\u0061\u0076\u0065\u0072\u0073\u0065\u0022\u003e\u0076\u006f\u0075\u0073\u0020\u0063\u0072\u00e9\u0065\u0072\u0020\u0075\u006e\u0020\u0063\u006f\u006d\u0070\u0074\u0065\u003c\u002f\u0061\u003e\u0020\u006f\u0075\u0020\u003c\u0061\u0020\u0068\u0072\u0065\u0066\u003d\u0022\u002f\u006c\u006f\u0067\u0069\u006e\u0070\u0061\u0067\u0065\u002e\u0078\u0068\u0074\u006d\u006c\u007b\u0030\u007d\u0022\u0020\u0074\u0069\u0074\u006c\u0065\u003d\u0022\u0076\u006f\u0075\u0073\u0020\u0063\u006f\u006e\u006e\u0065\u0063\u0074\u0065\u0072\u0020\u00e0\u0020\u0076\u006f\u0074\u0072\u0065\u0020\u0063\u006f\u006d\u0070\u0074\u0065\u0020\u0044\u0061\u0074\u0061\u0076\u0065\u0072\u0073\u0065\u0022\u003e\u0076\u006f\u0075\u0073\u0020\u0061\u0075\u0074\u0068\u0065\u006e\u0074\u0069\u0066\u0069\u0065\u0072\u003c\u002f\u0061\u003e\u0020\u0070\u006f\u0075\u0072\u0020\u0064\u0065\u006d\u0061\u006e\u0064\u0065\u0072\u0020\u0075\u006e\u0020\u0061\u0063\u0063\u00e8\u0073\u0020\u00e0\u0020\u0063\u0065\u0020\u0066\u0069\u0063\u0068\u0069\u0065\u0072\u002e +file.accessRequested=\u0041\u0063\u0063\u00e8\u0073\u0020\u0064\u0065\u006d\u0061\u006e\u0064\u00e9 +file.restrictions=\u0052\u0065\u0073\u0074\u0072\u0069\u0063\u0074\u0069\u006f\u006e\u0073\u0020\u0064\u0027\u0061\u0063\u0063\u00e8\u0073\u0020\u0061\u0075\u0078\u0020\u0066\u0069\u0063\u0068\u0069\u0065\u0072\u0073 +file.restrictions.description=\u004c\u0069\u006d\u0069\u0074\u0065\u0072\u0020\u006c\u0027\u0061\u0063\u0063\u00e8\u0073\u0020\u0061\u0075\u0078\u0020\u0066\u0069\u0063\u0068\u0069\u0065\u0072\u0073\u0020\u0070\u0075\u0062\u006c\u0069\u00e9\u0073\u0020\u0065\u006e\u0020\u006c\u0065\u0073\u0020\u0069\u006e\u0064\u0069\u0071\u0075\u0061\u006e\u0074\u0020\u0063\u006f\u006d\u006d\u0065\u0020\u00e9\u0074\u0061\u006e\u0074\u0020\u0072\u0065\u0073\u0074\u0072\u0065\u0069\u006e\u0074\u0073\u002e\u0020\u0046\u006f\u0075\u0072\u006e\u0069\u0072\u0020\u0061\u0075\u0078\u0020\u0075\u0074\u0069\u006c\u0069\u0073\u0061\u0074\u0065\u0075\u0072\u0073\u0020\u006c\u0065\u0073\u0020\u0043\u006f\u006e\u0064\u0069\u0074\u0069\u006f\u006e\u0073\u0020\u0064\u0027\u0061\u0063\u0063\u00e8\u0073\u0020\u0065\u0074\u0020\u006c\u0065\u0075\u0072\u0020\u0070\u0065\u0072\u006d\u0065\u0074\u0074\u0072\u0065\u0020\u0064\u0065\u0020\u0064\u0065\u006d\u0061\u006e\u0064\u0065\u0072\u0020\u006c\u0027\u0061\u0063\u0063\u00e8\u0073\u002e +file.restrictions.worldmap.warning=\u0056\u0065\u0075\u0069\u006c\u006c\u0065\u007a\u0020\u006e\u006f\u0074\u0065\u0072\u0020\u0071\u0075\u0065\u002c\u0020\u0075\u006e\u0065\u0020\u0066\u006f\u0069\u0073\u0020\u0076\u006f\u0073\u0020\u006d\u006f\u0064\u0069\u0066\u0069\u0063\u0061\u0074\u0069\u006f\u006e\u0073\u0020\u0064\u0027\u0061\u0063\u0063\u00e8\u0073\u0020\u0061\u0075\u0020\u0066\u0069\u0063\u0068\u0069\u0065\u0072\u0020\u0070\u0075\u0062\u006c\u0069\u00e9\u0065\u0073\u002c\u0020\u0076\u006f\u0074\u0072\u0065\u0020\u0063\u0061\u0072\u0074\u0065\u0020\u0073\u0075\u0072\u0020\u0057\u006f\u0072\u006c\u0064\u004d\u0061\u0070\u0020\u0073\u0065\u0072\u0061\u0020\u0073\u0075\u0070\u0070\u0072\u0069\u006d\u00e9\u0065\u0020\u0065\u0074\u0020\u006c\u0061\u0020\u0066\u006f\u006e\u0063\u0074\u0069\u006f\u006e\u0020\u0045\u0078\u0070\u006c\u006f\u0072\u0065\u0072\u0020\u0073\u0075\u0072\u0020\u0057\u006f\u0072\u006c\u0064\u004d\u0061\u0070\u0020\u0073\u0065\u0072\u0061\u0020\u0072\u0065\u0074\u0069\u0072\u00e9\u0065\u002e +file.ingestInProgress=\u0043\u0068\u0061\u0072\u0067\u0065\u006d\u0065\u006e\u0074\u0020\u0065\u006e\u0020\u0063\u006f\u0075\u0072\u0073\u005c\u0075\u0032\u0030\u0032\u0036 +file.dataFilesTab.metadata.header=\u004d\u00e9\u0074\u0061\u0064\u006f\u006e\u006e\u00e9\u0065\u0073 +file.dataFilesTab.metadata.addBtn=\u0041\u006a\u006f\u0075\u0074\u0065\u0072\u0020\u002b\u0020\u004d\u006f\u0064\u0069\u0066\u0069\u0065\u0072\u0020\u006c\u0065\u0073\u0020\u006d\u00e9\u0074\u0061\u0064\u006f\u006e\u006e\u00e9\u0065\u0073 +file.dataFilesTab.terms.header=\u0043\u006f\u006e\u0064\u0069\u0074\u0069\u006f\u006e\u0073 +file.dataFilesTab.terms.editTermsBtn=\u004d\u006f\u0064\u0069\u0066\u0069\u0065\u0072\u0020\u006c\u0065\u0073\u0020\u0063\u006f\u006e\u0064\u0069\u0074\u0069\u006f\u006e\u0073 +file.dataFilesTab.terms.list.termsOfUse.header=\u0043\u006f\u006e\u0064\u0069\u0074\u0069\u006f\u006e\u0073\u0020\u0064\u0027\u0075\u0074\u0069\u006c\u0069\u0073\u0061\u0074\u0069\u006f\u006e +file.dataFilesTab.terms.list.termsOfUse.waiver=\u004c\u0069\u0063\u0065\u006e\u0063\u0065\u0020\u0061\u0063\u0063\u006f\u0072\u0064\u00e9\u0065 +file.dataFilesTab.terms.list.termsOfUse.waiver.title=\u004c\u0061\u0020\u006c\u0069\u0063\u0065\u006e\u0063\u0065\u0020\u0070\u0065\u0072\u006d\u0065\u0074\u0020\u0064\u0027\u0069\u006e\u0066\u006f\u0072\u006d\u0065\u0072\u0020\u006c\u0065\u0073\u0020\u0075\u0074\u0069\u006c\u0069\u0073\u0061\u0074\u0065\u0075\u0072\u0073\u0020\u0064\u0065\u0020\u0063\u0065\u0020\u0071\u0075\u0069\u0020\u006c\u0065\u0075\u0072\u0020\u0065\u0073\u0074\u0020\u0070\u0065\u0072\u006d\u0069\u0073\u0020\u0064\u0065\u0020\u0066\u0061\u0069\u0072\u0065\u0020\u0061\u0076\u0065\u0063\u0020\u0063\u0065\u0073\u0020\u0064\u006f\u006e\u006e\u00e9\u0065\u0073\u0020\u0074\u00e9\u006c\u00e9\u0063\u0068\u0061\u0072\u0067\u00e9\u0065\u0073\u002e\u0020 +file.dataFilesTab.terms.list.termsOfUse.waiver.txt=\u006c\u0069\u0063\u0065\u006e\u0063\u0065\u0020\u0043\u0043\u0030\u0020\u002d\u0020\u005c\u0075\u0032\u0030\u0031\u0043\u0054\u0072\u0061\u006e\u0073\u0066\u0065\u0072\u0074\u0020\u0064\u0061\u006e\u0073\u0020\u006c\u0065\u0020\u0064\u006f\u006d\u0061\u0069\u006e\u0065\u0020\u0070\u0075\u0062\u006c\u0069\u0063\u005c\u0075\u0032\u0030\u0031\u0044 +file.dataFilesTab.terms.list.termsOfUse.waiver.description=\u004c\u0065\u0073\u0020\u0065\u006e\u0073\u0065\u006d\u0062\u006c\u0065\u0073\u0020\u0064\u0065\u0020\u0064\u006f\u006e\u006e\u00e9\u0065\u0073\u0020\u0073\u0065\u0020\u0076\u0065\u0072\u0072\u006f\u006e\u0074\u0020\u0061\u0074\u0074\u0072\u0069\u0062\u0075\u0065\u0072\u0020\u0070\u0061\u0072\u0020\u0064\u00e9\u0066\u0061\u0075\u0074\u0020\u0075\u006e\u0065\u0020\u003c\u0061\u0020\u0068\u0072\u0065\u0066\u003d\u0022\u0068\u0074\u0074\u0070\u0073\u005c\u003a\u002f\u002f\u0063\u0072\u0065\u0061\u0074\u0069\u0076\u0065\u0063\u006f\u006d\u006d\u006f\u006e\u0073\u002e\u006f\u0072\u0067\u002f\u0070\u0075\u0062\u006c\u0069\u0063\u0064\u006f\u006d\u0061\u0069\u006e\u002f\u007a\u0065\u0072\u006f\u002f\u0031\u002e\u0030\u002f\u0022\u0020\u0074\u0069\u0074\u006c\u0065\u003d\u0022\u0054\u0072\u0061\u006e\u0073\u0066\u0065\u0072\u0074\u0020\u0064\u0061\u006e\u0073\u0020\u006c\u0065\u0020\u0064\u006f\u006d\u0061\u0069\u006e\u0065\u0020\u0070\u0075\u0062\u006c\u0069\u0063\u0020\u002d\u0020\u0043\u0072\u0065\u0061\u0074\u0069\u0076\u0065\u0020\u0043\u006f\u006d\u006d\u006f\u006e\u0073\u0022\u0020\u0074\u0061\u0072\u0067\u0065\u0074\u003d\u0022\u005f\u0062\u006c\u0061\u006e\u006b\u0022\u003e\u006c\u0069\u0063\u0065\u006e\u0063\u0065\u0020\u0043\u0043\u0030\u0020\u002d\u0020\u0054\u0072\u0061\u006e\u0073\u0066\u0065\u0072\u0074\u0020\u0064\u0061\u006e\u0073\u0020\u006c\u0065\u0020\u0064\u006f\u006d\u0061\u0069\u006e\u0065\u0020\u0070\u0075\u0062\u006c\u0069\u0063\u003c\u002f\u0061\u003e\u002e\u0020\u0043\u0043\u0030\u0020\u0066\u0061\u0063\u0069\u006c\u0069\u0074\u0065\u0020\u006c\u0061\u0020\u0072\u00e9\u0075\u0074\u0069\u006c\u0069\u0073\u0061\u0074\u0069\u006f\u006e\u0020\u0064\u0065\u0073\u0020\u0064\u006f\u006e\u006e\u00e9\u0065\u0073\u0020\u0064\u0065\u0020\u0072\u0065\u0063\u0068\u0065\u0072\u0063\u0068\u0065\u002e\u0020\u004c\u0065\u0073\u0020\u003c\u0061\u0020\u0068\u0072\u0065\u0066\u003d\u0022\u0068\u0074\u0074\u0070\u005c\u003a\u002f\u002f\u0062\u0065\u0073\u0074\u002d\u0070\u0072\u0061\u0063\u0074\u0069\u0063\u0065\u0073\u002e\u0064\u0061\u0074\u0061\u0076\u0065\u0072\u0073\u0065\u002e\u006f\u0072\u0067\u002f\u0068\u0061\u0072\u0076\u0061\u0072\u0064\u002d\u0070\u006f\u006c\u0069\u0063\u0069\u0065\u0073\u002f\u0063\u006f\u006d\u006d\u0075\u006e\u0069\u0074\u0079\u002d\u006e\u006f\u0072\u006d\u0073\u002e\u0068\u0074\u006d\u006c\u0022\u0020\u0074\u0069\u0074\u006c\u0065\u003d\u0022\u0044\u0061\u0074\u0061\u0076\u0065\u0072\u0073\u0065\u0020\u0043\u006f\u006d\u006d\u0075\u006e\u0069\u0074\u0079\u0020\u004e\u006f\u0072\u006d\u0073\u0020\u002d\u0020\u0044\u0061\u0074\u0061\u0076\u0065\u0072\u0073\u0065\u0020\u0042\u0065\u0073\u0074\u0020\u0050\u0072\u0061\u0063\u0074\u0069\u0063\u0065\u0073\u0022\u0020\u0074\u0061\u0072\u0067\u0065\u0074\u003d\u0022\u005f\u0062\u006c\u0061\u006e\u006b\u0022\u003e\u006e\u006f\u0072\u006d\u0065\u0073\u0020\u0064\u0065\u0020\u006c\u0061\u0020\u0063\u006f\u006d\u006d\u0075\u006e\u0061\u0075\u0074\u00e9\u0020\u0044\u0061\u0074\u0061\u0076\u0065\u0072\u0073\u0065\u003c\u002f\u0061\u003e\u0020\u0064\u0065\u0020\u006d\u00ea\u006d\u0065\u0020\u0071\u0075\u0065\u0020\u006c\u0065\u0073\u0020\u0062\u006f\u006e\u006e\u0065\u0073\u0020\u0070\u0072\u0061\u0074\u0069\u0071\u0075\u0065\u0073\u0020\u0073\u0063\u0069\u0065\u006e\u0074\u0069\u0066\u0069\u0071\u0075\u0065\u0073\u0020\u0065\u0078\u0069\u0067\u0065\u006e\u0074\u0020\u0071\u0075\u0065\u0020\u0074\u006f\u0075\u0074\u0065\u0020\u0073\u006f\u0075\u0072\u0063\u0065\u0020\u0075\u0074\u0069\u006c\u0069\u0073\u00e9\u0065\u0020\u0073\u006f\u0069\u0074\u0020\u0063\u0069\u0074\u00e9\u0065\u0020\u0063\u006f\u0072\u0072\u0065\u0063\u0074\u0065\u006d\u0065\u006e\u0074\u002e\u0020\u0053\u0069\u0020\u0076\u006f\u0075\u0073\u0020\u006e\u0065\u0020\u0070\u006f\u0075\u0076\u0065\u007a\u0020\u0061\u0063\u0063\u006f\u0072\u0064\u0065\u0072\u0020\u0075\u006e\u0065\u0020\u006c\u0069\u0063\u0065\u006e\u0063\u0065\u0020\u0043\u0043\u0030\u002c\u0020\u0076\u006f\u0075\u0073\u0020\u0070\u006f\u0075\u0076\u0065\u007a\u0020\u00e9\u0074\u0061\u0062\u006c\u0069\u0072\u0020\u0064\u0065\u0073\u0020\u0063\u006f\u006e\u0064\u0069\u0074\u0069\u006f\u006e\u0073\u0020\u0064\u0027\u0075\u0074\u0069\u006c\u0069\u0073\u0061\u0074\u0069\u006f\u006e\u0020\u0070\u0065\u0072\u0073\u006f\u006e\u006e\u0061\u006c\u0069\u0073\u00e9\u0065\u0073\u0020\u0070\u006f\u0075\u0072\u0020\u0076\u006f\u0073\u0020\u0065\u006e\u0073\u0065\u006d\u0062\u006c\u0065\u0073\u0020\u0064\u0065\u0020\u0064\u006f\u006e\u006e\u00e9\u0065\u0073\u002e\u0020 +file.dataFilesTab.terms.list.termsOfUse.no.waiver.txt=\u0041\u0075\u0063\u0075\u006e\u0065\u0020\u006c\u0069\u0063\u0065\u006e\u0063\u0065\u0020\u006e\u0027\u0061\u0020\u00e9\u0074\u00e9\u0020\u0073\u00e9\u006c\u0065\u0063\u0074\u0069\u006f\u006e\u006e\u00e9\u0065\u0020\u0070\u006f\u0075\u0072\u0020\u0063\u0065\u0074\u0020\u0065\u006e\u0073\u0065\u006d\u0062\u006c\u0065\u0020\u0064\u0065\u0020\u0064\u006f\u006e\u006e\u00e9\u0065\u0073\u002e\u0020 +file.dataFilesTab.terms.list.termsOfUse.waiver.txt.description=\u004c\u0065\u0073\u0020\u003c\u0061\u0020\u0068\u0072\u0065\u0066\u003d\u0022\u0068\u0074\u0074\u0070\u005c\u003a\u002f\u002f\u0062\u0065\u0073\u0074\u002d\u0070\u0072\u0061\u0063\u0074\u0069\u0063\u0065\u0073\u002e\u0064\u0061\u0074\u0061\u0076\u0065\u0072\u0073\u0065\u002e\u006f\u0072\u0067\u002f\u0068\u0061\u0072\u0076\u0061\u0072\u0064\u002d\u0070\u006f\u006c\u0069\u0063\u0069\u0065\u0073\u002f\u0063\u006f\u006d\u006d\u0075\u006e\u0069\u0074\u0079\u002d\u006e\u006f\u0072\u006d\u0073\u002e\u0068\u0074\u006d\u006c\u0022\u0020\u0074\u0069\u0074\u006c\u0065\u003d\u0022\u0044\u0061\u0074\u0061\u0076\u0065\u0072\u0073\u0065\u0020\u0043\u006f\u006d\u006d\u0075\u006e\u0069\u0074\u0079\u0020\u004e\u006f\u0072\u006d\u0073\u0020\u002d\u0020\u0044\u0061\u0074\u0061\u0076\u0065\u0072\u0073\u0065\u0020\u0042\u0065\u0073\u0074\u0020\u0050\u0072\u0061\u0063\u0074\u0069\u0063\u0065\u0073\u0022\u0020\u0074\u0061\u0072\u0067\u0065\u0074\u003d\u0022\u005f\u0062\u006c\u0061\u006e\u006b\u0022\u003e\u006e\u006f\u0072\u006d\u0065\u0073\u0020\u0064\u0065\u0020\u006c\u0061\u0020\u0063\u006f\u006d\u006d\u0075\u006e\u0061\u0075\u0074\u00e9\u0020\u0044\u0061\u0074\u0061\u0076\u0065\u0072\u0073\u0065\u003c\u002f\u0061\u003e\u0020\u0064\u0065\u0020\u006d\u00ea\u006d\u0065\u0020\u0071\u0075\u0065\u0020\u006c\u0065\u0073\u0020\u0062\u006f\u006e\u006e\u0065\u0073\u0020\u0070\u0072\u0061\u0074\u0069\u0071\u0075\u0065\u0073\u0020\u0073\u0063\u0069\u0065\u006e\u0074\u0069\u0066\u0069\u0071\u0075\u0065\u0073\u0020\u0065\u0078\u0069\u0067\u0065\u006e\u0074\u0020\u0071\u0075\u0065\u0020\u0074\u006f\u0075\u0074\u0065\u0020\u0073\u006f\u0075\u0072\u0063\u0065\u0020\u0075\u0074\u0069\u006c\u0069\u0073\u00e9\u0065\u0020\u0073\u006f\u0069\u0074\u0020\u0063\u0069\u0074\u00e9\u0065\u0020\u0063\u006f\u0072\u0072\u0065\u0063\u0074\u0065\u006d\u0065\u006e\u0074\u002e\u0020\u0056\u0065\u0075\u0069\u006c\u006c\u0065\u007a\u0020\u0075\u0074\u0069\u006c\u0069\u0073\u0065\u0072\u0020\u006c\u0061\u0020\u0072\u00e9\u0066\u00e9\u0072\u0065\u006e\u0063\u0065\u0020\u0062\u0069\u0062\u006c\u0069\u006f\u0067\u0072\u0061\u0070\u0068\u0069\u0071\u0075\u0065\u0020\u0063\u0069\u002d\u0064\u0065\u0073\u0073\u0075\u0073\u0020\u0067\u00e9\u006e\u00e9\u0072\u00e9\u0065\u0020\u0070\u0061\u0072\u0020\u0044\u0061\u0074\u0061\u0076\u0065\u0072\u0073\u0065\u002e +file.dataFilesTab.terms.list.termsOfUse.waiver.select.CCO=\u004f\u0075\u0069\u002c\u0020\u0061\u0070\u0070\u006c\u0069\u0071\u0075\u0065\u0072\u0020\u006c\u0061\u0020\u006c\u0069\u0063\u0065\u006e\u0063\u0065\u0020\u0043\u0043\u0030\u0020\u002d\u0020\u00ab\u005c\u0075\u0030\u0030\u0041\u0030\u0054\u0072\u0061\u006e\u0073\u0066\u0065\u0072\u0074\u0020\u0064\u0061\u006e\u0073\u0020\u006c\u0065\u0020\u0064\u006f\u006d\u0061\u0069\u006e\u0065\u0020\u0070\u0075\u0062\u006c\u0069\u0063\u005c\u0075\u0030\u0030\u0041\u0030\u00bb\u002e +file.dataFilesTab.terms.list.termsOfUse.waiver.select.notCCO=\u004e\u006f\u006e\u002c\u0020\u006e\u0065\u0020\u0070\u0061\u0073\u0020\u0061\u0070\u0070\u006c\u0069\u0071\u0075\u0065\u0072\u0020\u006c\u0061\u0020\u006c\u0069\u0063\u0065\u006e\u0063\u0065\u0020\u0043\u0043\u0030\u0020\u002d\u0020\u00ab\u005c\u0075\u0030\u0030\u0041\u0030\u0054\u0072\u0061\u006e\u0073\u0066\u0065\u0072\u0074\u0020\u0064\u0061\u006e\u0073\u0020\u006c\u0065\u0020\u0064\u006f\u006d\u0061\u0069\u006e\u0065\u0020\u0070\u0075\u0062\u006c\u0069\u0063\u005c\u0075\u0030\u0030\u0041\u0030\u00bb\u002e +file.dataFilesTab.terms.list.termsOfUse.waiver.select.tip=\u0056\u006f\u0069\u0063\u0069\u0020\u0063\u0065\u0020\u0071\u0075\u0065\u0020\u006c\u0065\u0073\u0020\u0075\u0074\u0069\u006c\u0069\u0073\u0061\u0074\u0065\u0075\u0072\u0073\u0020\u0066\u0069\u006e\u0061\u0075\u0078\u0020\u0076\u0065\u0072\u0072\u006f\u006e\u0074\u0020\u0061\u0066\u0066\u0069\u0063\u0068\u00e9\u0020\u0070\u006f\u0075\u0072\u0020\u0063\u0065\u0074\u0020\u0065\u006e\u0073\u0065\u006d\u0062\u006c\u0065\u0020\u0064\u0065\u0020\u0064\u006f\u006e\u006e\u00e9\u0065\u0073\u002e +file.dataFilesTab.terms.list.termsOfUse.termsOfUse=\u0043\u006f\u006e\u0064\u0069\u0074\u0069\u006f\u006e\u0073\u0020\u0064\u0027\u0075\u0074\u0069\u006c\u0069\u0073\u0061\u0074\u0069\u006f\u006e +file.dataFilesTab.terms.list.termsOfUse.termsOfUse.title=\u0049\u006e\u0064\u0069\u0071\u0075\u0065\u0020\u006c\u0061\u0020\u0066\u0061\u00e7\u006f\u006e\u0020\u0064\u006f\u006e\u0074\u0020\u0063\u0065\u0073\u0020\u0064\u006f\u006e\u006e\u00e9\u0065\u0073\u0020\u0070\u0065\u0075\u0076\u0065\u006e\u0074\u0020\u00ea\u0074\u0072\u0065\u0020\u0075\u0074\u0069\u006c\u0069\u0073\u00e9\u0065\u0073\u0020\u0075\u006e\u0065\u0020\u0066\u006f\u0069\u0073\u0020\u0074\u00e9\u006c\u00e9\u0063\u0068\u0061\u0072\u0067\u00e9\u0065\u0073\u002e +file.dataFilesTab.terms.list.termsOfUse.termsOfUse.description=\u0053\u0069\u0020\u0076\u006f\u0075\u0073\u0020\u006e\u0027\u00ea\u0074\u0065\u0073\u0020\u0070\u0061\u0073\u0020\u0065\u006e\u0020\u006d\u0065\u0073\u0075\u0072\u0065\u0020\u0064\u0027\u0075\u0074\u0069\u006c\u0069\u0073\u0065\u0072\u0020\u006c\u0061\u0020\u006c\u0069\u0063\u0065\u006e\u0063\u0065\u0020\u0043\u0043\u0030\u0020\u0070\u006f\u0075\u0072\u0020\u006c\u0065\u0073\u0020\u0065\u006e\u0073\u0065\u006d\u0062\u006c\u0065\u0073\u0020\u0064\u0065\u0020\u0064\u006f\u006e\u006e\u00e9\u0065\u0073\u002c\u0020\u0076\u006f\u0075\u0073\u0020\u0070\u006f\u0075\u0076\u0065\u007a\u0020\u00e9\u0074\u0061\u0062\u006c\u0069\u0072\u0020\u0064\u0065\u0073\u0020\u0063\u006f\u006e\u0064\u0069\u0074\u0069\u006f\u006e\u0073\u0020\u0064\u0027\u0075\u0074\u0069\u006c\u0069\u0073\u0061\u0074\u0069\u006f\u006e\u0020\u0070\u0065\u0072\u0073\u006f\u006e\u006e\u0061\u006c\u0069\u0073\u00e9\u0065\u0073\u002e\u0020\u0056\u006f\u0069\u0063\u0069\u0020\u0075\u006e\u0020\u0065\u0078\u0065\u006d\u0070\u006c\u0065\u0020\u0064\u0027\u0075\u006e\u0065\u0020\u003c\u0061\u0020\u0068\u0072\u0065\u0066\u003d\u0022\u0068\u0074\u0074\u0070\u005c\u003a\u002f\u002f\u0062\u0065\u0073\u0074\u002d\u0070\u0072\u0061\u0063\u0074\u0069\u0063\u0065\u0073\u002e\u0064\u0061\u0074\u0061\u0076\u0065\u0072\u0073\u0065\u002e\u006f\u0072\u0067\u002f\u0068\u0061\u0072\u0076\u0061\u0072\u0064\u002d\u0070\u006f\u006c\u0069\u0063\u0069\u0065\u0073\u002f\u0073\u0061\u006d\u0070\u006c\u0065\u002d\u0064\u0075\u0061\u002e\u0068\u0074\u006d\u006c\u0022\u0020\u0074\u0069\u0074\u006c\u0065\u003d\u0022\u0053\u0061\u006d\u0070\u006c\u0065\u0020\u0044\u0061\u0074\u0061\u0020\u0055\u0073\u0061\u0067\u0065\u0020\u0041\u0067\u0072\u0065\u0065\u006d\u0065\u006e\u0074\u0020\u002d\u0020\u0044\u0061\u0074\u0061\u0076\u0065\u0072\u0073\u0065\u0020\u0042\u0065\u0073\u0074\u0020\u0050\u0072\u0061\u0063\u0074\u0069\u0063\u0065\u0073\u0022\u0020\u0074\u0061\u0072\u0067\u0065\u0074\u003d\u0022\u005f\u0062\u006c\u0061\u006e\u006b\u0022\u003e\u004c\u0069\u0063\u0065\u006e\u0063\u0065\u0020\u0064\u0065\u0020\u0064\u006f\u006e\u006e\u00e9\u0065\u0073\u003c\u002f\u0061\u003e\u0020\u0070\u006f\u0075\u0072\u0020\u0064\u0065\u0073\u0020\u0065\u006e\u0073\u0065\u006d\u0062\u006c\u0065\u0073\u0020\u0064\u0065\u0020\u0064\u006f\u006e\u006e\u00e9\u0065\u0073\u0020\u0071\u0075\u0069\u0020\u0063\u006f\u006d\u0070\u006f\u0072\u0074\u0065\u006e\u0074\u0020\u0064\u0065\u0073\u0020\u0064\u006f\u006e\u006e\u00e9\u0065\u0073\u0020\u0061\u006e\u006f\u006e\u0079\u006d\u0069\u0073\u00e9\u0065\u0073\u0020\u0064\u0065\u0020\u0073\u0075\u006a\u0065\u0074\u0073\u0020\u0068\u0075\u006d\u0061\u0069\u006e\u0073\u002e +file.dataFilesTab.terms.list.termsOfUse.addInfo=\u0052\u0065\u006e\u0073\u0065\u0069\u0067\u006e\u0065\u006d\u0065\u006e\u0074\u0073\u0020\u0073\u0075\u0070\u0070\u006c\u00e9\u006d\u0065\u006e\u0074\u0061\u0069\u0072\u0065\u0073 +file.dataFilesTab.terms.list.termsOfUse.addInfo.declaration=\u0044\u00e9\u0063\u006c\u0061\u0072\u0061\u0074\u0069\u006f\u006e\u0020\u0064\u0065\u0020\u0063\u006f\u006e\u0066\u0069\u0064\u0065\u006e\u0074\u0069\u0061\u006c\u0069\u0074\u00e9 +file.dataFilesTab.terms.list.termsOfUse.addInfo.declaration.title=\u0049\u006e\u0064\u0069\u0071\u0075\u0065\u0020\u0073\u0027\u0069\u006c\u0020\u0066\u0061\u0075\u0074\u0020\u0073\u0069\u0067\u006e\u0065\u0072\u0020\u0075\u006e\u0065\u0020\u0064\u00e9\u0063\u006c\u0061\u0072\u0061\u0074\u0069\u006f\u006e\u0020\u0064\u0065\u0020\u0063\u006f\u006e\u0066\u0069\u0064\u0065\u006e\u0074\u0069\u0061\u006c\u0069\u0074\u00e9\u0020\u0070\u006f\u0075\u0072\u0020\u0061\u0076\u006f\u0069\u0072\u0020\u0061\u0063\u0063\u00e8\u0073\u0020\u00e0\u0020\u0075\u006e\u0065\u0020\u0072\u0065\u0073\u0073\u006f\u0075\u0072\u0063\u0065\u002e +file.dataFilesTab.terms.list.termsOfUse.addInfo.permissions=\u0050\u0065\u0072\u006d\u0069\u0073\u0073\u0069\u006f\u006e\u0073\u0020\u0073\u0070\u00e9\u0063\u0069\u0061\u006c\u0065\u0073 +file.dataFilesTab.terms.list.termsOfUse.addInfo.permissions.title=\u0044\u00e9\u0074\u0065\u0072\u006d\u0069\u006e\u0065\u0072\u0020\u0073\u0069\u0020\u0064\u0065\u0073\u0020\u0070\u0065\u0072\u006d\u0069\u0073\u0073\u0069\u006f\u006e\u0073\u0020\u0073\u0070\u00e9\u0063\u0069\u0061\u006c\u0065\u0073\u0020\u0073\u006f\u006e\u0074\u0020\u0072\u0065\u0071\u0075\u0069\u0073\u0065\u0073\u0020\u0070\u006f\u0075\u0072\u0020\u0061\u0076\u006f\u0069\u0072\u0020\u0061\u0063\u0063\u00e8\u0073\u0020\u00e0\u0020\u0075\u006e\u0065\u0020\u0072\u0065\u0073\u0073\u006f\u0075\u0072\u0063\u0065\u0020\u0028\u0070\u002e\u005c\u0075\u0030\u0030\u0041\u0030\u0065\u0078\u002e\u0020\u0073\u0069\u0020\u0075\u006e\u0020\u0066\u006f\u0072\u006d\u0075\u006c\u0061\u0069\u0072\u0065\u0020\u0065\u0073\u0074\u0020\u006e\u00e9\u0063\u0065\u0073\u0073\u0061\u0069\u0072\u0065\u0020\u0065\u0074\u0020\u006f\u00f9\u0020\u006f\u0062\u0074\u0065\u006e\u0069\u0072\u0020\u006c\u0065\u0020\u0066\u006f\u0072\u006d\u0075\u006c\u0061\u0069\u0072\u0065\u0029\u002e +file.dataFilesTab.terms.list.termsOfUse.addInfo.restrictions=\u0052\u0065\u0073\u0074\u0072\u0069\u0063\u0074\u0069\u006f\u006e\u0073 +file.dataFilesTab.terms.list.termsOfUse.addInfo.restrictions.title=\u0054\u006f\u0075\u0074\u0065\u0020\u0072\u0065\u0073\u0074\u0072\u0069\u0063\u0074\u0069\u006f\u006e\u0020\u0073\u0027\u0061\u0070\u0070\u006c\u0069\u0071\u0075\u0061\u006e\u0074\u0020\u00e0\u0020\u006c\u0027\u0061\u0063\u0063\u00e8\u0073\u0020\u00e0\u0020\u006c\u0027\u0065\u006e\u0073\u0065\u006d\u0062\u006c\u0065\u0020\u0064\u0065\u0020\u0064\u006f\u006e\u006e\u00e9\u0065\u0073\u0020\u0065\u0074\u0020\u00e0\u0020\u0073\u006f\u006e\u0020\u0075\u0074\u0069\u006c\u0069\u0073\u0061\u0074\u0069\u006f\u006e\u002c\u0020\u0063\u006f\u006d\u006d\u0065\u0020\u006c\u0061\u0020\u0063\u0065\u0072\u0074\u0069\u0066\u0069\u0063\u0061\u0074\u0069\u006f\u006e\u0020\u0072\u0065\u006c\u0061\u0074\u0069\u0076\u0065\u0020\u00e0\u0020\u006c\u0061\u0020\u0076\u0069\u0065\u0020\u0070\u0072\u0069\u0076\u00e9\u0065\u0020\u006f\u0075\u0020\u006c\u0065\u0073\u0020\u0072\u0065\u0073\u0074\u0072\u0069\u0063\u0074\u0069\u006f\u006e\u0073\u0020\u0063\u006f\u006e\u0063\u0065\u0072\u006e\u0061\u006e\u0074\u0020\u006c\u0061\u0020\u0064\u0069\u0066\u0066\u0075\u0073\u0069\u006f\u006e\u002c\u0020\u0064\u006f\u0069\u0074\u0020\u00ea\u0074\u0072\u0065\u0020\u0069\u006e\u0064\u0069\u0071\u0075\u00e9\u0065\u0020\u00e0\u0020\u0063\u0065\u0074\u0020\u0065\u006e\u0064\u0072\u006f\u0069\u0074\u002e\u0020\u0049\u006c\u0020\u0070\u0065\u0075\u0074\u0020\u0073\u0027\u0061\u0067\u0069\u0072\u0020\u0064\u0065\u0020\u0072\u0065\u0073\u0074\u0072\u0069\u0063\u0074\u0069\u006f\u006e\u0073\u0020\u00e9\u0074\u0061\u0062\u006c\u0069\u0065\u0073\u0020\u0073\u0065\u006c\u006f\u006e\u0020\u006c\u0027\u0061\u0075\u0074\u0065\u0075\u0072\u002c\u0020\u006c\u0065\u0020\u0070\u0072\u006f\u0064\u0075\u0063\u0074\u0065\u0075\u0072\u0020\u006f\u0075\u0020\u006c\u0065\u0020\u0064\u0069\u0066\u0066\u0075\u0073\u0065\u0075\u0072\u0020\u0064\u0065\u0073\u0020\u0064\u006f\u006e\u006e\u00e9\u0065\u0073\u002e\u0020\u0053\u0069\u0020\u006c\u0027\u0061\u0063\u0063\u00e8\u0073\u0020\u0061\u0075\u0078\u0020\u0064\u006f\u006e\u006e\u00e9\u0065\u0073\u0020\u0065\u0073\u0074\u0020\u006c\u0069\u006d\u0069\u0074\u00e9\u0020\u00e0\u0020\u0075\u006e\u0065\u0020\u0063\u0065\u0072\u0074\u0061\u0069\u006e\u0065\u0020\u0063\u0061\u0074\u00e9\u0067\u006f\u0072\u0069\u0065\u0020\u0064\u0027\u0075\u0074\u0069\u006c\u0069\u0073\u0061\u0074\u0065\u0075\u0072\u0073\u002c\u0020\u0076\u0065\u0075\u0069\u006c\u006c\u0065\u007a\u0020\u006c\u0065\u0020\u0070\u0072\u00e9\u0063\u0069\u0073\u0065\u0072\u002e\u0020 +file.dataFilesTab.terms.list.termsOfUse.addInfo.citationRequirements=\u0045\u0078\u0069\u0067\u0065\u006e\u0063\u0065\u0073\u0020\u0064\u0065\u0020\u0063\u0069\u0074\u0061\u0074\u0069\u006f\u006e +file.dataFilesTab.terms.list.termsOfUse.addInfo.citationRequirements.title=\u00c9\u006c\u00e9\u006d\u0065\u006e\u0074\u0073\u0020\u0073\u0070\u00e9\u0063\u0069\u0061\u0075\u0078\u0020\u0072\u0065\u0071\u0075\u0069\u0073\u0020\u0070\u006f\u0075\u0072\u0020\u0063\u0069\u0074\u0065\u0072\u0020\u0061\u0064\u00e9\u0071\u0075\u0061\u0074\u0065\u006d\u0065\u006e\u0074\u0020\u0063\u0065\u0073\u0020\u0064\u006f\u006e\u006e\u00e9\u0065\u0073\u0020\u0064\u0061\u006e\u0073\u0020\u006c\u0065\u0073\u0020\u0061\u0072\u0074\u0069\u0063\u006c\u0065\u0073\u0020\u006f\u0075\u0020\u0061\u0075\u0074\u0072\u0065\u0073\u0020\u0070\u0075\u0062\u006c\u0069\u0063\u0061\u0074\u0069\u006f\u006e\u0073\u0020\u0071\u0075\u0069\u0020\u0073\u0027\u0061\u0070\u0070\u0075\u0069\u0065\u006e\u0074\u0020\u0073\u0075\u0072\u0020\u0063\u0065\u0073\u0020\u0064\u006f\u006e\u006e\u00e9\u0065\u0073\u002e\u0020\u0050\u006f\u0075\u0072\u0020\u006c\u0065\u0073\u0020\u0065\u0078\u0069\u0067\u0065\u006e\u0063\u0065\u0073\u0020\u0073\u0074\u0061\u006e\u0064\u0061\u0072\u0064\u0073\u0020\u0064\u0065\u0020\u0063\u0069\u0074\u0061\u0074\u0069\u006f\u006e\u002c\u0020\u0073\u0065\u0020\u0072\u0065\u0070\u006f\u0072\u0074\u0065\u0072\u0020\u0061\u0075\u0078\u0020\u004e\u006f\u0072\u006d\u0065\u0073\u0020\u0064\u0065\u0020\u006c\u0061\u0020\u0063\u006f\u006d\u006d\u0075\u006e\u0061\u0075\u0074\u00e9\u0020\u0044\u0061\u0074\u0061\u0076\u0065\u0072\u0073\u0065\u002e +file.dataFilesTab.terms.list.termsOfUse.addInfo.depositorRequirements=\u0045\u0078\u0069\u0067\u0065\u006e\u0063\u0065\u0073\u0020\u0064\u0075\u0020\u0064\u00e9\u0070\u006f\u0073\u0061\u006e\u0074 +file.dataFilesTab.terms.list.termsOfUse.addInfo.depositorRequirements.title=\u0052\u0065\u006e\u0073\u0065\u0069\u0067\u006e\u0065\u006d\u0065\u006e\u0074\u0073\u0020\u0063\u006f\u006e\u0063\u0065\u0072\u006e\u0061\u006e\u0074\u0020\u006c\u0061\u0020\u0072\u0065\u0073\u0070\u006f\u006e\u0073\u0061\u0062\u0069\u006c\u0069\u0074\u00e9\u0020\u0064\u0065\u0020\u006c\u0027\u0075\u0074\u0069\u006c\u0069\u0073\u0061\u0074\u0065\u0075\u0072\u0020\u0064\u0027\u0069\u006e\u0066\u006f\u0072\u006d\u0065\u0072\u0020\u006c\u0065\u0073\u0020\u0064\u00e9\u0070\u006f\u0073\u0061\u006e\u0074\u0073\u002c\u0020\u006c\u0065\u0073\u0020\u0061\u0075\u0074\u0065\u0075\u0072\u0073\u0020\u006f\u0075\u0020\u006c\u0065\u0073\u0020\u0069\u006e\u0074\u0065\u006e\u0064\u0061\u006e\u0074\u0073\u0020\u0064\u0065\u0073\u0020\u0064\u006f\u006e\u006e\u00e9\u0065\u0073\u0020\u0064\u0065\u0020\u006c\u0027\u0075\u0074\u0069\u006c\u0069\u0073\u0061\u0074\u0069\u006f\u006e\u0020\u0066\u0061\u0069\u0074\u0065\u0020\u0064\u0065\u0073\u0020\u0064\u006f\u006e\u006e\u00e9\u0065\u0073\u0020\u0065\u006e\u0020\u006c\u0065\u0075\u0072\u0020\u0066\u006f\u0075\u0072\u006e\u0069\u0073\u0073\u0061\u006e\u0074\u0020\u006c\u0065\u0073\u0020\u0072\u00e9\u0066\u00e9\u0072\u0065\u006e\u0063\u0065\u0073\u0020\u0061\u0075\u0078\u0020\u0074\u0072\u0061\u0076\u0061\u0075\u0078\u0020\u0070\u0075\u0062\u006c\u0069\u00e9\u0073\u0020\u006f\u0075\u0020\u006c\u0065\u0073\u0020\u0063\u006f\u0070\u0069\u0065\u0073\u0020\u0064\u0065\u0073\u0020\u006d\u0061\u006e\u0075\u0073\u0063\u0072\u0069\u0074\u0073\u002e +file.dataFilesTab.terms.list.termsOfUse.addInfo.conditions=\u0043\u006f\u006e\u0064\u0069\u0074\u0069\u006f\u006e\u0073 +file.dataFilesTab.terms.list.termsOfUse.addInfo.conditions.title=\u0054\u006f\u0075\u0074\u0020\u0072\u0065\u006e\u0073\u0065\u0069\u0067\u006e\u0065\u006d\u0065\u006e\u0074\u0020\u0073\u0075\u0070\u0070\u006c\u00e9\u006d\u0065\u006e\u0074\u0061\u0069\u0072\u0065\u0020\u0071\u0075\u0069\u0020\u0061\u0069\u0064\u0065\u0072\u0061\u0020\u006c\u0027\u0075\u0074\u0069\u006c\u0069\u0073\u0061\u0074\u0065\u0075\u0072\u0020\u00e0\u0020\u0063\u006f\u006d\u0070\u0072\u0065\u006e\u0064\u0072\u0065\u0020\u006c\u0065\u0073\u0020\u0063\u006f\u006e\u0064\u0069\u0074\u0069\u006f\u006e\u0073\u0020\u0064\u0027\u0061\u0063\u0063\u00e8\u0073\u0020\u0065\u0074\u0020\u0064\u0027\u0075\u0074\u0069\u006c\u0069\u0073\u0061\u0074\u0069\u006f\u006e\u0020\u0064\u0065\u0020\u006c\u0027\u0065\u006e\u0073\u0065\u006d\u0062\u006c\u0065\u0020\u0064\u0065\u0020\u0064\u006f\u006e\u006e\u00e9\u0065\u0073\u002e +file.dataFilesTab.terms.list.termsOfUse.addInfo.disclaimer=\u0041\u0076\u0069\u0073\u0020\u0064\u0065\u0020\u006e\u006f\u006e\u002d\u0072\u0065\u0073\u0070\u006f\u006e\u0073\u0061\u0062\u0069\u006c\u0069\u0074\u00e9 +file.dataFilesTab.terms.list.termsOfUse.addInfo.disclaimer.title=\u0052\u0065\u006e\u0073\u0065\u0069\u0067\u006e\u0065\u006d\u0065\u006e\u0074\u0073\u0020\u0063\u006f\u006e\u0063\u0065\u0072\u006e\u0061\u006e\u0074\u0020\u006c\u0065\u0073\u0020\u0072\u0065\u0073\u0070\u006f\u006e\u0073\u0061\u0062\u0069\u006c\u0069\u0074\u00e9\u0073\u0020\u006c\u0069\u00e9\u0065\u0073\u0020\u00e0\u0020\u006c\u0027\u0075\u0074\u0069\u006c\u0069\u0073\u0061\u0074\u0069\u006f\u006e\u0020\u0064\u0065\u0020\u006c\u0027\u0065\u006e\u0073\u0065\u006d\u0062\u006c\u0065\u0020\u0064\u0065\u0020\u0064\u006f\u006e\u006e\u00e9\u0065\u0073\u002e +file.dataFilesTab.terms.list.termsOfAccess.header=\u0046\u0069\u0063\u0068\u0069\u0065\u0072\u0073\u0020\u0065\u006e\u0020\u0061\u0063\u0063\u00e8\u0073\u0020\u0072\u00e9\u0073\u0065\u0072\u0076\u00e9\u0020\u002b\u0020\u0043\u006f\u006e\u0064\u0069\u0074\u0069\u006f\u006e\u0073\u0020\u0064\u0027\u0061\u0063\u0063\u00e8\u0073 +file.dataFilesTab.terms.list.termsOfAccess.restrictedFiles=\u0046\u0069\u0063\u0068\u0069\u0065\u0072\u0073\u0020\u0065\u006e\u0020\u0061\u0063\u0063\u00e8\u0073\u0020\u0072\u00e9\u0073\u0065\u0072\u0076\u00e9 +file.dataFilesTab.terms.list.termsOfAccess.restrictedFiles.title=\u004e\u006f\u006d\u0062\u0072\u0065\u0020\u0064\u0065\u0020\u0066\u0069\u0063\u0068\u0069\u0065\u0072\u0073\u0020\u0065\u006e\u0020\u0061\u0063\u0063\u00e8\u0073\u0020\u0072\u00e9\u0073\u0065\u0072\u0076\u00e9\u0020\u0064\u0061\u006e\u0073\u0020\u0063\u0065\u0074\u0020\u0065\u006e\u0073\u0065\u006d\u0062\u006c\u0065\u0020\u0064\u0065\u0020\u0064\u006f\u006e\u006e\u00e9\u0065\u0073 +file.dataFilesTab.terms.list.termsOfAccess.restrictedFiles.txt=\u0049\u006c\u0020\u0079\u0020\u0061\u0020\u007b\u0030\u007d\u0020\u007b\u0030\u002c\u0020\u0063\u0068\u006f\u0069\u0063\u0065\u002c\u0020\u0030\u0023\u0066\u0069\u0063\u0068\u0069\u0065\u0072\u0073\u007c\u0031\u0023\u0066\u0069\u0063\u0068\u0069\u0065\u0072\u007c\u0032\u0023\u0066\u0069\u0063\u0068\u0069\u0065\u0072\u0073\u007d\u0020\u0065\u006e\u0020\u0061\u0063\u0063\u00e8\u0073\u0020\u0072\u00e9\u0073\u0065\u0072\u0076\u00e9\u0020\u0064\u0061\u006e\u0073\u0020\u0063\u0065\u0074\u0020\u0065\u006e\u0073\u0065\u006d\u0062\u006c\u0065\u0020\u0064\u0065\u0020\u0064\u006f\u006e\u006e\u00e9\u0065\u0073\u002e +file.dataFilesTab.terms.list.termsOfAccess.termsOfsAccess=\u0043\u006f\u006e\u0064\u0069\u0074\u0069\u006f\u006e\u0073\u0020\u0064\u0027\u0061\u0063\u0063\u00e8\u0073 +file.dataFilesTab.terms.list.termsOfAccess.termsOfsAccess.title=\u0052\u0065\u006e\u0073\u0065\u0069\u0067\u006e\u0065\u006d\u0065\u006e\u0074\u0073\u0020\u0073\u0075\u0072\u0020\u006c\u0061\u0020\u0066\u0061\u00e7\u006f\u006e\u0020\u0064\u006f\u006e\u0074\u0020\u006c\u0065\u0073\u0020\u0075\u0074\u0069\u006c\u0069\u0073\u0061\u0074\u0065\u0075\u0072\u0073\u0020\u0070\u0065\u0075\u0076\u0065\u006e\u0074\u0020\u0061\u0076\u006f\u0069\u0072\u0020\u0061\u0063\u0063\u00e8\u0073\u0020\u0061\u0075\u0078\u0020\u0066\u0069\u0063\u0068\u0069\u0065\u0072\u0073\u0020\u0065\u006e\u0020\u0061\u0063\u0063\u00e8\u0073\u0020\u0072\u00e9\u0073\u0065\u0072\u0076\u00e9\u0020\u0064\u0065\u0020\u0063\u0065\u0074\u0020\u0065\u006e\u0073\u0065\u006d\u0062\u006c\u0065\u0020\u0064\u0065\u0020\u0064\u006f\u006e\u006e\u00e9\u0065\u0073\u002e\u0020 +file.dataFilesTab.terms.list.termsOfAccess.requestAccess=\u0044\u0065\u006d\u0061\u006e\u0064\u0065\u0072\u0020\u006c\u0027\u0061\u0063\u0063\u00e8\u0073 +file.dataFilesTab.terms.list.termsOfAccess.requestAccess.title=\u0053\u0069\u0020\u006c\u0061\u0020\u0063\u0061\u0073\u0065\u0020\u0065\u0073\u0074\u0020\u0073\u00e9\u006c\u0065\u0063\u0074\u0069\u006f\u006e\u006e\u00e9\u0065\u002c\u0020\u006c\u0065\u0073\u0020\u0075\u0074\u0069\u006c\u0069\u0073\u0061\u0074\u0065\u0075\u0072\u0073\u0020\u0070\u0065\u0075\u0076\u0065\u006e\u0074\u0020\u0064\u0065\u006d\u0061\u006e\u0064\u0065\u0072\u0020\u006c\u0027\u0061\u0063\u0063\u00e8\u0073\u0020\u0061\u0075\u0078\u0020\u0066\u0069\u0063\u0068\u0069\u0065\u0072\u0073\u0020\u0065\u006e\u0020\u0061\u0063\u0063\u00e8\u0073\u0020\u0072\u00e9\u0073\u0065\u0072\u0076\u00e9\u0020\u0064\u0065\u0020\u0063\u0065\u0074\u0020\u0065\u006e\u0073\u0065\u006d\u0062\u006c\u0065\u0020\u0064\u0065\u0020\u0064\u006f\u006e\u006e\u00e9\u0065\u0073\u002e +file.dataFilesTab.terms.list.termsOfAccess.requestAccess.request=\u004c\u0065\u0073\u0020\u0075\u0074\u0069\u006c\u0069\u0073\u0061\u0074\u0065\u0075\u0072\u0073\u0020\u0070\u0065\u0075\u0076\u0065\u006e\u0074\u0020\u0064\u0065\u006d\u0061\u006e\u0064\u0065\u0072\u0020\u006c\u0027\u0061\u0063\u0063\u00e8\u0073\u0020\u0061\u0075\u0078\u0020\u0066\u0069\u0063\u0068\u0069\u0065\u0072\u0073\u002e +file.dataFilesTab.terms.list.termsOfAccess.requestAccess.notRequest=\u004c\u0065\u0073\u0020\u0075\u0074\u0069\u006c\u0069\u0073\u0061\u0074\u0065\u0075\u0072\u0073\u0020\u006e\u0065\u0020\u0070\u0065\u0075\u0076\u0065\u006e\u0074\u0020\u0070\u0061\u0073\u0020\u0064\u0065\u006d\u0061\u006e\u0064\u0065\u0072\u0020\u006c\u0027\u0061\u0063\u0063\u00e8\u0073\u0020\u0061\u0075\u0078\u0020\u0066\u0069\u0063\u0068\u0069\u0065\u0072\u0073\u002e +file.dataFilesTab.terms.list.termsOfAccess.requestAccess.enableBtn=\u0041\u0075\u0074\u006f\u0072\u0069\u0073\u0065\u0072\u0020\u006c\u0061\u0020\u0064\u0065\u006d\u0061\u006e\u0064\u0065\u0020\u0064\u0027\u0061\u0063\u0063\u00e8\u0073 +file.dataFilesTab.terms.list.termsOfAccess.addInfo.dataAccessPlace=\u0045\u006d\u0070\u006c\u0061\u0063\u0065\u006d\u0065\u006e\u0074\u0020\u0064\u0065\u0073\u0020\u0064\u006f\u006e\u006e\u00e9\u0065\u0073 +file.dataFilesTab.terms.list.termsOfAccess.addInfo.dataAccessPlace.title=\u0053\u0069\u0020\u006c\u0065\u0073\u0020\u0064\u006f\u006e\u006e\u00e9\u0065\u0073\u0020\u006e\u0065\u0020\u0073\u0065\u0020\u0074\u0072\u006f\u0075\u0076\u0065\u006e\u0074\u0020\u0070\u0061\u0073\u0020\u0075\u006e\u0069\u0071\u0075\u0065\u006d\u0065\u006e\u0074\u0020\u0064\u0061\u006e\u0073\u0020\u0044\u0061\u0074\u0061\u0076\u0065\u0072\u0073\u0065\u002c\u0020\u0069\u006e\u0064\u0069\u0071\u0075\u0065\u0072\u0020\u006c\u0065\u0020\u006f\u0075\u0020\u006c\u0065\u0073\u0020\u0065\u006d\u0070\u006c\u0061\u0063\u0065\u006d\u0065\u006e\u0074\u0073\u0020\u006f\u00f9\u0020\u006c\u0065\u0073\u0020\u0064\u006f\u006e\u006e\u00e9\u0065\u0073\u0020\u0073\u006f\u006e\u0074\u0020\u0061\u0063\u0074\u0075\u0065\u006c\u006c\u0065\u006d\u0065\u006e\u0074\u0020\u0063\u006f\u006e\u0073\u0065\u0072\u0076\u00e9\u0065\u0073\u002e +file.dataFilesTab.terms.list.termsOfAccess.addInfo.originalArchive=\u0044\u00e9\u0070\u00f4\u0074\u0020\u006f\u0072\u0069\u0067\u0069\u006e\u0061\u006c +file.dataFilesTab.terms.list.termsOfAccess.addInfo.originalArchive.title=\u0044\u00e9\u0070\u00f4\u0074\u0020\u0064\u0075\u0071\u0075\u0065\u006c\u0020\u006c\u0065\u0073\u0020\u0064\u006f\u006e\u006e\u00e9\u0065\u0073\u0020\u006f\u006e\u0074\u0020\u00e9\u0074\u00e9\u0020\u006f\u0062\u0074\u0065\u006e\u0075\u0065\u0073\u002e +file.dataFilesTab.terms.list.termsOfAccess.addInfo.availabilityStatus=\u00c9\u0074\u0061\u0074\u0020\u0064\u0065\u0020\u0064\u0069\u0073\u0070\u006f\u006e\u0069\u0062\u0069\u006c\u0069\u0074\u00e9 +file.dataFilesTab.terms.list.termsOfAccess.addInfo.availabilityStatus.title=\u00c9\u006e\u006f\u006e\u0063\u00e9\u0020\u0063\u006f\u006e\u0063\u0065\u0072\u006e\u0061\u006e\u0074\u0020\u006c\u0061\u0020\u0064\u0069\u0073\u0070\u006f\u006e\u0069\u0062\u0069\u006c\u0069\u0074\u00e9\u0020\u0064\u0065\u0020\u006c\u0027\u0065\u006e\u0073\u0065\u006d\u0062\u006c\u0065\u0020\u0064\u0065\u0020\u0064\u006f\u006e\u006e\u00e9\u0065\u0073\u002e\u0020\u0055\u006e\u0020\u0064\u00e9\u0070\u006f\u0073\u0061\u006e\u0074\u0020\u0070\u006f\u0075\u0072\u0072\u0061\u0069\u0074\u0020\u0064\u0065\u0076\u006f\u0072\u0020\u0069\u006e\u0064\u0069\u0071\u0075\u0065\u0072\u0020\u0071\u0075\u0027\u0075\u006e\u0020\u0065\u006e\u0073\u0065\u006d\u0062\u006c\u0065\u0020\u0064\u0065\u0020\u0064\u006f\u006e\u006e\u00e9\u0065\u0073\u0020\u006e\u0027\u0065\u0073\u0074\u0020\u0070\u0061\u0073\u0020\u0064\u0069\u0073\u0070\u006f\u006e\u0069\u0062\u006c\u0065\u0020\u0070\u0061\u0072\u0063\u0065\u0020\u0071\u0075\u0065\u0020\u0066\u0061\u0069\u0073\u0061\u006e\u0074\u0020\u006c\u0027\u006f\u0062\u006a\u0065\u0074\u0020\u0064\u0027\u0075\u006e\u0020\u0065\u006d\u0062\u0061\u0072\u0067\u006f\u0020\u0074\u0065\u006d\u0070\u006f\u0072\u0061\u0069\u0072\u0065\u002c\u0020\u0070\u0061\u0072\u0063\u0065\u0020\u0071\u0075\u0027\u0069\u006c\u0020\u0061\u0020\u00e9\u0074\u00e9\u0020\u0072\u0065\u006d\u0070\u006c\u0061\u0063\u00e9\u002c\u0020\u0070\u0061\u0072\u0063\u0065\u0020\u0071\u0075\u0027\u0075\u006e\u0065\u0020\u006e\u006f\u0075\u0076\u0065\u006c\u006c\u0065\u0020\u00e9\u0064\u0069\u0074\u0069\u006f\u006e\u0020\u0065\u0073\u0074\u0020\u0069\u006d\u006d\u0069\u006e\u0065\u006e\u0074\u0065\u002c\u0020\u0065\u0074\u0063\u002e\u0020 +file.dataFilesTab.terms.list.termsOfAccess.addInfo.contactForAccess=\u0050\u0065\u0072\u0073\u006f\u006e\u006e\u0065\u002d\u0072\u0065\u0073\u0073\u006f\u0075\u0072\u0063\u0065\u0020\u0070\u006f\u0075\u0072\u0020\u006c\u0065\u0073\u0020\u0064\u0065\u006d\u0061\u006e\u0064\u0065\u0073\u0020\u0064\u0027\u0061\u0063\u0063\u00e8\u0073 +file.dataFilesTab.terms.list.termsOfAccess.addInfo.contactForAccess.title=\u0053\u0069\u0020\u006c\u0065\u0073\u0020\u0063\u006f\u006f\u0072\u0064\u006f\u006e\u006e\u00e9\u0065\u0073\u0020\u0064\u0069\u0066\u0066\u00e8\u0072\u0065\u006e\u0074\u0020\u0064\u0065\u0073\u0020\u0063\u006f\u006f\u0072\u0064\u006f\u006e\u006e\u00e9\u0065\u0073\u0020\u0070\u006f\u0075\u0072\u0020\u006c\u0027\u0065\u006e\u0073\u0065\u006d\u0062\u006c\u0065\u0020\u0064\u0065\u0020\u0064\u006f\u006e\u006e\u00e9\u0065\u0073\u002c\u0020\u0069\u006c\u0020\u0073\u0027\u0061\u0067\u0069\u0074\u0020\u0064\u0065\u0020\u006c\u0061\u0020\u0070\u0065\u0072\u0073\u006f\u006e\u006e\u0065\u002d\u0072\u0065\u0073\u0073\u006f\u0075\u0072\u0063\u0065\u0020\u006f\u0075\u0020\u0064\u0065\u0020\u006c\u0027\u006f\u0072\u0067\u0061\u006e\u0069\u0073\u0061\u0074\u0069\u006f\u006e\u0020\u0028\u0069\u006e\u0064\u0069\u0071\u0075\u0065\u0072\u0020\u006c\u0027\u0061\u0064\u0072\u0065\u0073\u0073\u0065\u0020\u0063\u006f\u0075\u0072\u0072\u0069\u0065\u006c\u0020\u006f\u0075\u0020\u006c\u0027\u0061\u0064\u0072\u0065\u0073\u0073\u0065\u0020\u0063\u006f\u006d\u0070\u006c\u00e8\u0074\u0065\u0020\u0065\u0074\u0020\u006c\u0065\u0020\u006e\u0075\u006d\u00e9\u0072\u006f\u0020\u0064\u0065\u0020\u0074\u00e9\u006c\u00e9\u0070\u0068\u006f\u006e\u0065\u002c\u0020\u0073\u0069\u0020\u0064\u0069\u0073\u0070\u006f\u006e\u0069\u0062\u006c\u0065\u0073\u0029\u0020\u0071\u0075\u0069\u0020\u0063\u006f\u006e\u0074\u0072\u00f4\u006c\u0065\u0020\u006c\u0027\u0061\u0063\u0063\u00e8\u0073\u0020\u00e0\u0020\u0075\u006e\u0065\u0020\u0063\u006f\u006c\u006c\u0065\u0063\u0074\u0069\u006f\u006e\u0020\u0064\u0065\u0020\u0064\u006f\u006e\u006e\u00e9\u0065\u0073\u002e +file.dataFilesTab.terms.list.termsOfAccess.addInfo.sizeOfCollection=\u0054\u0061\u0069\u006c\u006c\u0065\u0020\u0064\u0065\u0020\u006c\u0061\u0020\u0063\u006f\u006c\u006c\u0065\u0063\u0074\u0069\u006f\u006e\u0020\u0064\u0065\u0020\u0064\u006f\u006e\u006e\u00e9\u0065\u0073 +file.dataFilesTab.terms.list.termsOfAccess.addInfo.sizeOfCollection.tip=\u0053\u006f\u006d\u006d\u0061\u0069\u0072\u0065\u0020\u0064\u0075\u0020\u006e\u006f\u006d\u0062\u0072\u0065\u0020\u0064\u0065\u0020\u0066\u0069\u0063\u0068\u0069\u0065\u0072\u0073\u0020\u0064\u0061\u006e\u0073\u0020\u006c\u0027\u0065\u006e\u0073\u0065\u006d\u0062\u006c\u0065\u0020\u0064\u0065\u0020\u0064\u006f\u006e\u006e\u00e9\u0065\u0073\u002c\u0020\u0064\u0075\u0020\u006e\u006f\u006d\u0062\u0072\u0065\u0020\u0064\u0065\u0020\u0066\u0069\u0063\u0068\u0069\u0065\u0072\u0073\u0020\u0063\u006f\u006e\u0074\u0065\u006e\u0061\u006e\u0074\u0020\u0064\u0065\u0073\u0020\u0064\u006f\u006e\u006e\u00e9\u0065\u0073\u003b\u0020\u0069\u006e\u0066\u006f\u0072\u006d\u0061\u0074\u0069\u006f\u006e\u0073\u0020\u0073\u0075\u0072\u0020\u006c\u0061\u0020\u0064\u0069\u0073\u0070\u006f\u006e\u0069\u0062\u0069\u006c\u0069\u0074\u00e9\u0020\u0064\u0065\u0020\u0064\u006f\u0063\u0075\u006d\u0065\u006e\u0074\u0061\u0074\u0069\u006f\u006e\u0020\u006c\u0069\u0073\u0069\u0062\u006c\u0065\u0020\u0070\u0061\u0072\u0020\u006d\u0061\u0063\u0068\u0069\u006e\u0065\u0020\u0064\u0061\u006e\u0073\u0020\u006c\u0061\u0020\u0063\u006f\u006c\u006c\u0065\u0063\u0074\u0069\u006f\u006e\u0020\u0065\u0074\u0020\u0064\u0027\u0061\u0075\u0074\u0072\u0065\u0073\u0020\u0066\u0069\u0063\u0068\u0069\u0065\u0072\u0073\u002c\u0020\u0063\u006f\u006d\u006d\u0065\u0020\u006d\u0061\u006e\u0075\u0065\u006c\u0073\u0020\u0064\u0065\u0020\u0063\u006f\u0064\u0065\u0073\u002c\u0020\u0064\u0065\u0073\u0020\u0064\u0069\u0063\u0074\u0069\u006f\u006e\u006e\u0061\u0069\u0072\u0065\u0073\u0020\u0064\u0065\u0020\u0064\u006f\u006e\u006e\u00e9\u0065\u0073\u002c\u0020\u0064\u0065\u0073\u0020\u00e9\u006e\u006f\u006e\u0063\u00e9\u0073\u0020\u0073\u0075\u0072\u0020\u006c\u0061\u0020\u0064\u00e9\u0066\u0069\u006e\u0069\u0074\u0069\u006f\u006e\u0020\u0064\u0065\u0073\u0020\u0064\u006f\u006e\u006e\u00e9\u0065\u0073\u0020\u006f\u0075\u0020\u0064\u0065\u0073\u0020\u0069\u006e\u0073\u0074\u0072\u0075\u006d\u0065\u006e\u0074\u0073\u0020\u0070\u006f\u0075\u0072\u0020\u006c\u0061\u0020\u0063\u006f\u006c\u006c\u0065\u0063\u0074\u0065\u0020\u0064\u0065\u0020\u0064\u006f\u006e\u006e\u00e9\u0065\u0073\u002e +file.dataFilesTab.terms.list.termsOfAccess.addInfo.studyCompletion=\u00c9\u006c\u00e9\u006d\u0065\u006e\u0074\u0073\u0020\u0061\u0063\u0068\u0065\u0076\u00e9\u0073\u0020\u0064\u0065\u0020\u006c\u0027\u00e9\u0074\u0075\u0064\u0065 +file.dataFilesTab.terms.list.termsOfAccess.addInfo.studyCompletion.title=\u004c\u0069\u0065\u006e\u0020\u0065\u006e\u0074\u0072\u0065\u0020\u006c\u0065\u0073\u0020\u0064\u006f\u006e\u006e\u00e9\u0065\u0073\u0020\u0072\u0065\u0063\u0075\u0065\u0069\u006c\u006c\u0069\u0065\u0073\u0020\u0065\u0074\u0020\u006c\u0061\u0020\u0071\u0075\u0061\u006e\u0074\u0069\u0074\u00e9\u0020\u0064\u0065\u0020\u0064\u006f\u006e\u006e\u00e9\u0065\u0073\u0020\u0063\u006f\u0064\u00e9\u0065\u0073\u0020\u0065\u0074\u0020\u0065\u006e\u0072\u0065\u0067\u0069\u0073\u0074\u0072\u00e9\u0065\u0073\u0020\u0064\u0061\u006e\u0073\u0020\u006c\u0027\u0065\u006e\u0073\u0065\u006d\u0062\u006c\u0065\u0020\u0064\u0065\u0020\u0064\u006f\u006e\u006e\u00e9\u0065\u0073\u002e\u0020\u0044\u0065\u0020\u006c\u0027\u0069\u006e\u0066\u006f\u0072\u006d\u0061\u0074\u0069\u006f\u006e\u0020\u0064\u0065\u0076\u0072\u0061\u0069\u0074\u0020\u00ea\u0074\u0072\u0065\u0020\u0066\u006f\u0075\u0072\u006e\u0069\u0065\u0020\u0069\u0063\u0069\u0020\u0073\u0075\u0072\u0020\u006c\u0065\u0073\u0020\u0072\u0061\u0069\u0073\u006f\u006e\u0073\u0020\u0070\u006f\u0075\u0072\u0020\u006c\u0065\u0073\u0071\u0075\u0065\u006c\u006c\u0065\u0073\u0020\u0063\u0065\u0072\u0074\u0061\u0069\u006e\u0065\u0020\u0064\u006f\u006e\u006e\u00e9\u0065\u0073\u0020\u0072\u0065\u0063\u0075\u0065\u0069\u006c\u006c\u0069\u0065\u0073\u002c\u0020\u006f\u0075\u0020\u0075\u006e\u0020\u0066\u0069\u0063\u0068\u0069\u0065\u0072\u0020\u0064\u0065\u0020\u0064\u006f\u006e\u006e\u00e9\u0065\u0073\u0020\u0070\u0072\u00e9\u0063\u0069\u0073\u002c\u0020\u006e\u0027\u006f\u006e\u0074\u0020\u0070\u0061\u0073\u0020\u00e9\u0074\u00e9\u0020\u0069\u006e\u0063\u006c\u0075\u0073\u0020\u0064\u0061\u006e\u0073\u0020\u006c\u0027\u0065\u006e\u0073\u0065\u006d\u0062\u006c\u0065\u0020\u0064\u0065\u0020\u0064\u006f\u006e\u006e\u00e9\u0065\u0073\u002e +file.dataFilesTab.terms.list.guestbook=\u0052\u0065\u0067\u0069\u0073\u0074\u0072\u0065\u0020\u0064\u0065\u0073\u0020\u0076\u0069\u0073\u0069\u0074\u0065\u0075\u0072\u0073 +file.dataFilesTab.terms.list.guestbook.title=\u0044\u0065\u0073\u0020\u0072\u0065\u006e\u0073\u0065\u0069\u0067\u006e\u0065\u006d\u0065\u006e\u0074\u0073\u0020\u0073\u0075\u0072\u0020\u006c\u0027\u0075\u0074\u0069\u006c\u0069\u0073\u0061\u0074\u0065\u0075\u0072\u0020\u0028\u0063\u002e\u002d\u00e0\u002d\u0064\u002e\u0020\u006c\u0065\u0020\u006e\u006f\u006d\u002c\u0020\u006c\u0027\u0061\u0064\u0072\u0065\u0073\u0073\u0065\u0020\u0063\u006f\u0075\u0072\u0072\u0069\u0065\u006c\u002c\u0020\u006c\u0027\u00e9\u0074\u0061\u0062\u006c\u0069\u0073\u0073\u0065\u006d\u0065\u006e\u0074\u0020\u0065\u0074\u0020\u006c\u0065\u0020\u0070\u006f\u0073\u0074\u0065\u0029\u0020\u0073\u0065\u0072\u006f\u006e\u0074\u0020\u0072\u0065\u0063\u0075\u0065\u0069\u006c\u006c\u0069\u0073\u0020\u006c\u006f\u0072\u0073\u0020\u0064\u0075\u0020\u0074\u00e9\u006c\u00e9\u0063\u0068\u0061\u0072\u0067\u0065\u006d\u0065\u006e\u0074\u0020\u0064\u0065\u0073\u0020\u0066\u0069\u0063\u0068\u0069\u0065\u0072\u0073\u002e +file.dataFilesTab.terms.list.guestbook.noSelected.tip=\u0041\u0075\u0063\u0075\u006e\u0020\u0072\u0065\u0067\u0069\u0073\u0074\u0072\u0065\u0020\u0064\u0065\u0073\u0020\u0076\u0069\u0073\u0069\u0074\u0065\u0075\u0072\u0073\u0020\u006e\u0027\u0065\u0073\u0074\u0020\u0061\u0073\u0073\u006f\u0063\u0069\u00e9\u0020\u00e0\u0020\u0063\u0065\u0074\u0020\u0065\u006e\u0073\u0065\u006d\u0062\u006c\u0065\u0020\u0064\u0065\u0020\u0064\u006f\u006e\u006e\u00e9\u0065\u0073\u0020\u0064\u006f\u006e\u0063\u0020\u0061\u0075\u0063\u0075\u006e\u0020\u0072\u0065\u006e\u0073\u0065\u0069\u0067\u006e\u0065\u006d\u0065\u006e\u0074\u0020\u006e\u0065\u0020\u0076\u006f\u0075\u0073\u0020\u0073\u0065\u0072\u0061\u0020\u0064\u0065\u006d\u0061\u006e\u0064\u00e9\u0020\u0063\u006f\u006e\u0063\u0065\u0072\u006e\u0061\u006e\u0074\u0020\u006c\u0065\u0020\u0074\u00e9\u006c\u00e9\u0063\u0068\u0061\u0072\u0067\u0065\u006d\u0065\u006e\u0074\u0020\u0064\u0065\u0073\u0020\u0066\u0069\u0063\u0068\u0069\u0065\u0072\u0073\u002e +file.dataFilesTab.terms.list.guestbook.noSelected.admin.tip=\u0041\u0075\u0063\u0075\u006e\u0020\u0072\u0065\u0067\u0069\u0073\u0074\u0072\u0065\u0020\u0064\u0065\u0073\u0020\u0076\u0069\u0073\u0069\u0074\u0065\u0075\u0072\u0073\u0020\u006e\u0027\u0065\u0073\u0074\u0020\u0064\u0069\u0073\u0070\u006f\u006e\u0069\u0062\u006c\u0065\u0020\u0064\u0061\u006e\u0073\u0020\u006c\u0065\u0020\u007b\u0030\u007d\u0020\u0070\u006f\u0075\u0072\u0020\u00ea\u0074\u0072\u0065\u0020\u0061\u0073\u0073\u0069\u0067\u006e\u00e9\u0020\u00e0\u0020\u0063\u0065\u0074\u0020\u0065\u006e\u0073\u0065\u006d\u0062\u006c\u0065\u0020\u0064\u0065\u0020\u0064\u006f\u006e\u006e\u00e9\u0065\u0073\u002e +file.dataFilesTab.terms.list.guestbook.inUse.tip=\u004c\u0065\u0020\u0072\u0065\u0067\u0069\u0073\u0074\u0072\u0065\u0020\u0064\u0065\u0073\u0020\u0076\u0069\u0073\u0069\u0074\u0065\u0075\u0072\u0073\u0020\u0073\u0075\u0069\u0076\u0061\u006e\u0074\u0020\u0064\u0065\u006d\u0061\u006e\u0064\u0065\u0072\u0061\u0020\u00e0\u0020\u0075\u006e\u0020\u0075\u0074\u0069\u006c\u0069\u0073\u0061\u0074\u0065\u0075\u0072\u0020\u0064\u0065\u0020\u0066\u006f\u0075\u0072\u006e\u0069\u0072\u0020\u0064\u0065\u0073\u0020\u0072\u0065\u006e\u0073\u0065\u0069\u0067\u006e\u0065\u006d\u0065\u006e\u0074\u0073\u0020\u0073\u0075\u0070\u0070\u006c\u00e9\u006d\u0065\u006e\u0074\u0061\u0069\u0072\u0065\u0073\u0020\u0061\u0075\u0020\u006d\u006f\u006d\u0065\u006e\u0074\u0020\u0064\u0075\u0020\u0074\u00e9\u006c\u00e9\u0063\u0068\u0061\u0072\u0067\u0065\u006d\u0065\u006e\u0074\u0020\u0064\u0027\u0075\u006e\u0020\u0066\u0069\u0063\u0068\u0069\u0065\u0072\u002e +file.dataFilesTab.terms.list.guestbook.viewBtn=\u0050\u0072\u00e9\u0076\u0069\u0073\u0075\u0061\u006c\u0069\u0073\u0065\u0072\u0020\u006c\u0065\u0020\u0072\u0065\u0067\u0069\u0073\u0074\u0072\u0065\u0020\u0064\u0065\u0073\u0020\u0076\u0069\u0073\u0069\u0074\u0065\u0075\u0072\u0073 +file.dataFilesTab.terms.list.guestbook.select.tip=\u0053\u00e9\u006c\u0065\u0063\u0074\u0069\u006f\u006e\u006e\u0065\u0072\u0020\u0075\u006e\u0020\u0072\u0065\u0067\u0069\u0073\u0074\u0072\u0065\u0020\u0064\u0065\u0073\u0020\u0076\u0069\u0073\u0069\u0074\u0065\u0075\u0072\u0073\u0020\u0061\u0066\u0069\u006e\u0020\u0071\u0075\u0027\u0075\u006e\u0020\u0075\u0074\u0069\u006c\u0069\u0073\u0061\u0074\u0065\u0075\u0072\u0020\u0066\u006f\u0075\u0072\u006e\u0069\u0073\u0073\u0065\u0020\u0064\u0065\u0073\u0020\u0072\u0065\u006e\u0073\u0065\u0069\u0067\u006e\u0065\u006d\u0065\u006e\u0074\u0073\u0020\u0073\u0075\u0070\u0070\u006c\u00e9\u006d\u0065\u006e\u0074\u0061\u0069\u0072\u0065\u0073\u0020\u006c\u006f\u0072\u0073\u0071\u0075\u0027\u0069\u006c\u0020\u0074\u00e9\u006c\u00e9\u0063\u0068\u0061\u0072\u0067\u0065\u0020\u0075\u006e\u0020\u0066\u0069\u0063\u0068\u0069\u0065\u0072\u002e +file.dataFilesTab.terms.list.guestbook.noAvailable.tip=\u0041\u0075\u0063\u0075\u006e\u0020\u0072\u0065\u0067\u0069\u0073\u0074\u0072\u0065\u0020\u0064\u0065\u0073\u0020\u0076\u0069\u0073\u0069\u0074\u0065\u0075\u0072\u0073\u0020\u006e\u0027\u0065\u0073\u0074\u0020\u0061\u0063\u0074\u0069\u0076\u00e9\u0020\u0064\u0061\u006e\u0073\u0020\u006c\u0065\u0020\u007b\u0030\u007d\u002e\u0020\u0050\u006f\u0075\u0072\u0020\u0063\u0072\u00e9\u0065\u0072\u0020\u0075\u006e\u0020\u0072\u0065\u0067\u0069\u0073\u0074\u0072\u0065\u0020\u0064\u0065\u0073\u0020\u0076\u0069\u0073\u0069\u0074\u0065\u0075\u0072\u0073\u002c\u0020\u0072\u0065\u0074\u006f\u0075\u0072\u006e\u0065\u0072\u0020\u0064\u0061\u006e\u0073\u0020\u006c\u0065\u0020\u007b\u0030\u007d\u002c\u0020\u0063\u006c\u0069\u0071\u0075\u0065\u0072\u0020\u0073\u0075\u0072\u0020\u006c\u0065\u0020\u0062\u006f\u0075\u0074\u006f\u006e\u0020\u00ab\u005c\u0075\u0030\u0030\u0041\u0030\u004d\u006f\u0064\u0069\u0066\u0069\u0065\u0072\u005c\u0075\u0030\u0030\u0041\u0030\u00bb\u0020\u0065\u0074\u0020\u0073\u00e9\u006c\u0065\u0063\u0074\u0069\u006f\u006e\u006e\u0065\u0072\u0020\u00ab\u005c\u0075\u0030\u0030\u0041\u0030\u0052\u0065\u0067\u0069\u0073\u0074\u0072\u0065\u0073\u0020\u0064\u0065\u0020\u0076\u0069\u0073\u0069\u0074\u0065\u0075\u0072\u0073\u0020\u0070\u006f\u0075\u0072\u0020\u006c\u0027\u0065\u006e\u0073\u0065\u006d\u0062\u006c\u0065\u0020\u0064\u0065\u0020\u0064\u006f\u006e\u006e\u00e9\u0065\u0073\u005c\u0075\u0030\u0030\u0041\u0030\u00bb\u002e +file.dataFilesTab.terms.list.guestbook.clearBtn=\u0045\u0066\u0066\u0061\u0063\u0065\u0072\u0020\u006c\u0061\u0020\u0073\u00e9\u006c\u0065\u0063\u0074\u0069\u006f\u006e +file.dataFilesTab.dataAccess=Data Access +file.dataFilesTab.dataAccess.info=This data file can be accessed through a terminal window, using the commands below. For more information about downloading and verifying data, see our User Guide. +file.dataFilesTab.dataAccess.info.draft=Data files can not be accessed until the dataset draft has been published. For more information about downloading and verifying data, see our User Guide. +file.dataFilesTab.dataAccess.local.label=Local Access +file.dataFilesTab.dataAccess.download.label=Download Access +file.dataFilesTab.dataAccess.verify.label=Verify Data +file.dataFilesTab.dataAccess.local.tooltip=If this data is locally available to you, this is its file path. +file.dataFilesTab.dataAccess.download.tooltip=Download this data from your preferred mirror by running this command. +file.dataFilesTab.dataAccess.verify.tooltip=This command runs a checksum to verify the integrity of the data you have downloaded. +file.dataFilesTab.versions=\u0056\u0065\u0072\u0073\u0069\u006f\u006e\u0073 +file.dataFilesTab.versions.headers.dataset=\u0045\u006e\u0073\u0065\u006d\u0062\u006c\u0065\u0020\u0064\u0065\u0020\u0064\u006f\u006e\u006e\u00e9\u0065\u0073 +file.dataFilesTab.versions.headers.summary=\u0052\u00e9\u0073\u0075\u006d\u00e9 +file.dataFilesTab.versions.headers.contributors=\u0043\u006f\u006e\u0074\u0072\u0069\u0062\u0075\u0074\u0065\u0075\u0072\u0073 +file.dataFilesTab.versions.headers.published=\u0050\u0075\u0062\u006c\u0069\u00e9 +file.dataFilesTab.versions.viewDiffBtn=\u0056\u006f\u0069\u0072\u0020\u006c\u0065\u0073\u0020\u0064\u0069\u0066\u0066\u00e9\u0072\u0065\u006e\u0063\u0065\u0073 +file.dataFilesTab.versions.citationMetadata=\u004d\u00e9\u0074\u0061\u0064\u006f\u006e\u006e\u00e9\u0065\u0073\u0020\u0062\u0069\u0062\u006c\u0069\u006f\u0067\u0072\u0061\u0070\u0068\u0069\u0071\u0075\u0065\u0073\u005c\u0075\u0030\u0030\u0041\u0030\u003a +file.dataFilesTab.versions.added=\u0041\u006a\u006f\u0075\u0074\u00e9 +file.dataFilesTab.versions.removed=\u0053\u0075\u0070\u0070\u0072\u0069\u006d\u00e9 +file.dataFilesTab.versions.changed=\u004d\u006f\u0064\u0069\u0066\u0069\u00e9 +file.dataFilesTab.versions.replaced=\u0052\u0065\u006d\u0070\u006c\u0061\u0063\u00e9 +file.dataFilesTab.versions.original=\u004f\u0072\u0069\u0067\u0069\u006e\u0061\u006c +file.dataFilesTab.versions.replacment=\u0052\u0065\u006d\u0070\u006c\u0061\u0063\u0065\u006d\u0065\u006e\u0074 +file.dataFilesTab.versions.additionalCitationMetadata=\u004d\u00e9\u0074\u0061\u0064\u006f\u006e\u006e\u00e9\u0065\u0073\u0020\u0062\u0069\u0062\u006c\u0069\u006f\u0067\u0072\u0061\u0070\u0068\u0069\u0071\u0075\u0065\u0073\u0020\u0061\u0064\u0064\u0069\u0074\u0069\u006f\u006e\u006e\u0065\u006c\u006c\u0065\u0073\u005c\u0075\u0030\u0030\u0041\u0030\u003a +file.dataFilesTab.versions.description.draft=\u0049\u006c\u0020\u0073\u0027\u0061\u0067\u0069\u0074\u0020\u0064\u0027\u0075\u006e\u0065\u0020\u0076\u0065\u0072\u0073\u0069\u006f\u006e\u0020\u0070\u0072\u006f\u0076\u0069\u0073\u006f\u0069\u0072\u0065\u002e +file.dataFilesTab.versions.description.deaccessioned=\u00c9\u0074\u0061\u006e\u0074\u0020\u0064\u006f\u006e\u006e\u00e9\u0020\u0071\u0075\u0065\u0020\u006c\u0061\u0020\u0076\u0065\u0072\u0073\u0069\u006f\u006e\u0020\u0070\u0072\u00e9\u0063\u00e9\u0064\u0065\u006e\u0074\u0065\u0020\u0061\u0020\u00e9\u0074\u00e9\u0020\u0072\u0065\u0074\u0069\u0072\u00e9\u0065\u0020\u0064\u0065\u0020\u006c\u0061\u0020\u0064\u0069\u0066\u0066\u0075\u0073\u0069\u006f\u006e\u002c\u0020\u0061\u0075\u0063\u0075\u006e\u0065\u0020\u006e\u006f\u0074\u0065\u0020\u0073\u0075\u0072\u0020\u006c\u0065\u0073\u0020\u0064\u0069\u0066\u0066\u00e9\u0072\u0065\u006e\u0063\u0065\u0073\u0020\u006e\u0027\u0065\u0073\u0074\u0020\u0064\u0069\u0073\u0070\u006f\u006e\u0069\u0062\u006c\u0065\u0020\u0070\u006f\u0075\u0072\u0020\u0063\u0065\u0074\u0074\u0065\u0020\u0076\u0065\u0072\u0073\u0069\u006f\u006e\u0020\u0070\u0075\u0062\u006c\u0069\u00e9\u0065\u002e +file.dataFilesTab.versions.description.firstPublished=\u0049\u006c\u0020\u0073\u0027\u0061\u0067\u0069\u0074\u0020\u0064\u0065\u0020\u006c\u0061\u0020\u0070\u0072\u0065\u006d\u0069\u00e8\u0072\u0065\u0020\u0076\u0065\u0072\u0073\u0069\u006f\u006e\u0020\u0070\u0075\u0062\u006c\u0069\u00e9\u0065\u002e +file.dataFilesTab.versions.description.deaccessionedReason=\u0052\u0061\u0069\u0073\u006f\u006e\u0020\u0064\u0075\u0020\u0072\u0065\u0074\u0072\u0061\u0069\u0074\u005c\u0075\u0030\u0030\u0041\u0030\u003a +file.dataFilesTab.versions.description.beAccessedAt=\u004c\u0027\u0065\u006e\u0073\u0065\u006d\u0062\u006c\u0065\u0020\u0064\u0065\u0020\u0064\u006f\u006e\u006e\u00e9\u0065\u0073\u0020\u0070\u0065\u0075\u0074\u0020\u006d\u0061\u0069\u006e\u0074\u0065\u006e\u0061\u006e\u0074\u0020\u00ea\u0074\u0072\u0065\u0020\u0063\u006f\u006e\u0073\u0075\u006c\u0074\u00e9\u0020\u00e0\u005c\u0075\u0030\u0030\u0041\u0030\u003a +file.dataFilesTab.versions.viewDetails.btn=\u0056\u006f\u0069\u0072\u0020\u006c\u0065\u0073\u0020\u0072\u0065\u006e\u0073\u0065\u0069\u0067\u006e\u0065\u006d\u0065\u006e\u0074\u0073 +file.dataFilesTab.versions.widget.viewMoreInfo=\u0050\u006f\u0075\u0072\u0020\u0061\u0066\u0066\u0069\u0063\u0068\u0065\u0072\u0020\u0070\u006c\u0075\u0073\u0020\u0064\u0027\u0069\u006e\u0066\u006f\u0072\u006d\u0061\u0074\u0069\u006f\u006e\u0073\u0020\u0073\u0075\u0072\u0020\u006c\u0065\u0073\u0020\u0076\u0065\u0072\u0073\u0069\u006f\u006e\u0073\u0020\u0064\u0065\u0020\u0063\u0065\u0074\u0020\u0065\u006e\u0073\u0065\u006d\u0062\u006c\u0065\u0020\u0064\u0065\u0020\u0064\u006f\u006e\u006e\u00e9\u0065\u0073\u0020\u0065\u0074\u0020\u0070\u006f\u0075\u0072\u0020\u006c\u0065\u0020\u006d\u006f\u0064\u0069\u0066\u0069\u0065\u0072\u0020\u0073\u0027\u0069\u006c\u0020\u0073\u0027\u0061\u0067\u0069\u0074\u0020\u0064\u0065\u0020\u0076\u006f\u0074\u0072\u0065\u0020\u0065\u006e\u0073\u0065\u006d\u0062\u006c\u0065\u0020\u0064\u0065\u0020\u0064\u006f\u006e\u006e\u00e9\u0065\u0073\u002c\u0020\u0063\u006f\u006e\u0073\u0075\u006c\u0074\u0065\u007a\u0020\u006c\u0061\u0020\u003c\u0061\u0020\u0068\u0072\u0065\u0066\u003d\u0022\u002f\u0064\u0061\u0074\u0061\u0073\u0065\u0074\u002e\u0078\u0068\u0074\u006d\u006c\u003f\u0070\u0065\u0072\u0073\u0069\u0073\u0074\u0065\u006e\u0074\u0049\u0064\u0020\u003d\u0020\u007b\u0030\u007d\u0022\u0020\u0074\u0069\u0074\u006c\u0065\u003d\u0022\u007b\u0031\u007d\u0022\u0020\u0074\u0061\u0072\u0067\u0065\u0074\u003d\u0022\u005f\u0062\u006c\u0061\u006e\u006b\u0022\u003e\u0020\u0076\u0065\u0072\u0073\u0069\u006f\u006e\u0020\u0063\u006f\u006d\u0070\u006c\u00e8\u0074\u0065\u0020\u0064\u0065\u0020\u0063\u0065\u0074\u0020\u0065\u006e\u0073\u0065\u006d\u0062\u006c\u0065\u003c\u002f\u0061\u003e\u0020\u00e0\u0020\u0020\u007b\u0032\u007d\u002e +file.deleteDialog.tip=\u00ca\u0074\u0065\u0073\u002d\u0076\u006f\u0075\u0073\u0020\u0073\u00fb\u0072\u0028\u0065\u0029\u0020\u0064\u0065\u0020\u0076\u006f\u0075\u006c\u006f\u0069\u0072\u0020\u0073\u0075\u0070\u0070\u0072\u0069\u006d\u0065\u0072\u0020\u0063\u0065\u0074\u0020\u0065\u006e\u0073\u0065\u006d\u0062\u006c\u0065\u0020\u0064\u0065\u0020\u0064\u006f\u006e\u006e\u00e9\u0065\u0073\u003f\u0020\u0056\u006f\u0075\u0073\u0020\u006e\u0065\u0020\u0070\u006f\u0075\u0072\u0072\u0065\u007a\u0020\u0070\u0061\u0073\u0020\u0061\u006e\u006e\u0075\u006c\u0065\u0072\u0020\u006c\u0061\u0020\u0073\u0075\u0070\u0070\u0072\u0065\u0073\u0073\u0069\u006f\u006e\u002e +file.deleteDialog.header=\u0053\u0075\u0070\u0070\u0072\u0069\u006d\u0065\u0072\u0020\u006c\u0027\u0065\u006e\u0073\u0065\u006d\u0062\u006c\u0065\u0020\u0064\u0065\u0020\u0064\u006f\u006e\u006e\u00e9\u0065\u0073 +file.deleteDraftDialog.tip=\u00ca\u0074\u0065\u0073\u002d\u0076\u006f\u0075\u0073\u0020\u0073\u00fb\u0072\u0028\u0065\u0029\u0020\u0064\u0065\u0020\u0076\u006f\u0075\u006c\u006f\u0069\u0072\u0020\u0073\u0075\u0070\u0070\u0072\u0069\u006d\u0065\u0072\u0020\u0063\u0065\u0074\u0074\u0065\u0020\u0076\u0065\u0072\u0073\u0069\u006f\u006e\u0020\u0070\u0072\u006f\u0076\u0069\u0073\u006f\u0069\u0072\u0065\u003f\u0020\u0056\u006f\u0075\u0073\u0020\u006e\u0065\u0020\u0070\u006f\u0075\u0072\u0072\u0065\u007a\u0020\u0070\u0061\u0073\u0020\u0061\u006e\u006e\u0075\u006c\u0065\u0072\u0020\u006c\u0061\u0020\u0073\u0075\u0070\u0070\u0072\u0065\u0073\u0073\u0069\u006f\u006e\u0020\u0064\u0065\u0020\u0063\u0065\u0074\u0074\u0065\u0020\u0076\u0065\u0072\u0073\u0069\u006f\u006e\u002e\u0020 +file.deleteDraftDialog.header=\u0053\u0075\u0070\u0070\u0072\u0069\u006d\u0065\u0072\u0020\u006c\u0061\u0020\u0076\u0065\u0072\u0073\u0069\u006f\u006e\u0020\u0070\u0072\u006f\u0076\u0069\u0073\u006f\u0069\u0072\u0065 +file.deleteFileDialog.tip=\u004c\u0065\u0020\u006f\u0075\u0020\u006c\u0065\u0073\u0020\u0066\u0069\u0063\u0068\u0069\u0065\u0072\u0073\u0020\u0073\u0065\u0072\u006f\u006e\u0074\u0020\u0073\u0075\u0070\u0070\u0072\u0069\u006d\u00e9\u0073\u0020\u0064\u00e8\u0073\u0020\u0071\u0075\u0065\u0020\u0076\u006f\u0075\u0073\u0020\u0063\u006c\u0069\u0071\u0075\u0065\u0072\u0065\u007a\u0020\u0073\u0075\u0072\u0020\u006c\u0065\u0020\u0062\u006f\u0075\u0074\u006f\u006e\u0020\u00ab\u005c\u0075\u0030\u0030\u0041\u0030\u0045\u006e\u0072\u0065\u0067\u0069\u0073\u0074\u0072\u0065\u0072\u0020\u006c\u0065\u0073\u0020\u006d\u006f\u0064\u0069\u0066\u0069\u0063\u0061\u0074\u0069\u006f\u006e\u0073\u005c\u0075\u0030\u0030\u0041\u0030\u00bb\u0020\u0061\u0075\u0020\u0062\u0061\u0073\u0020\u0064\u0065\u0020\u0063\u0065\u0074\u0074\u0065\u0020\u0070\u0061\u0067\u0065\u002e +file.deleteFileDialog.immediate=\u004c\u0065\u0020\u0066\u0069\u0063\u0068\u0069\u0065\u0072\u0020\u0073\u0065\u0072\u0061\u0020\u0073\u0075\u0070\u0070\u0072\u0069\u006d\u00e9\u0020\u0075\u006e\u0065\u0020\u0066\u006f\u0069\u0073\u0020\u0071\u0075\u0065\u0020\u0076\u006f\u0075\u0073\u0020\u0061\u0075\u0072\u0065\u007a\u0020\u0063\u006c\u0069\u0071\u0075\u00e9\u0020\u0073\u0075\u0072\u0020\u006c\u0065\u0020\u0062\u006f\u0075\u0074\u006f\u006e\u0020\u00ab\u0020\u0053\u0075\u0070\u0070\u0072\u0069\u006d\u0065\u0072\u0020\u00bb\u002e\u0020 +file.deleteFileDialog.multiple.immediate=\u004c\u0065\u0020\u006f\u0075\u0020\u006c\u0065\u0073\u0020\u0066\u0069\u0063\u0068\u0069\u0065\u0072\u0073\u0020\u0073\u0065\u0072\u006f\u006e\u0074\u0020\u0073\u0075\u0070\u0070\u0072\u0069\u006d\u00e9\u0073\u0020\u006c\u006f\u0072\u0073\u0071\u0075\u0065\u0020\u0076\u006f\u0075\u0073\u0020\u0061\u0075\u0072\u0065\u007a\u0020\u0063\u006c\u0069\u0071\u0075\u00e9\u0020\u0073\u0075\u0072\u0020\u006c\u0065\u0020\u0062\u006f\u0075\u0074\u006f\u006e\u0020\u00ab\u0020\u0053\u0075\u0070\u0070\u0072\u0069\u006d\u0065\u0072\u0020\u00bb\u002e +file.deleteFileDialog.header=\u0053\u0075\u0070\u0070\u0072\u0069\u006d\u0065\u0072\u0020\u006c\u0065\u0073\u0020\u0066\u0069\u0063\u0068\u0069\u0065\u0072\u0073 +file.deleteFileDialog.failed.tip=\u004c\u0065\u0073\u0020\u0066\u0069\u0063\u0068\u0069\u0065\u0072\u0073\u0020\u006e\u0065\u0020\u0073\u0065\u0072\u006f\u006e\u0074\u0020\u0070\u0061\u0073\u0020\u0073\u0075\u0070\u0070\u0072\u0069\u006d\u00e9\u0073\u0020\u0064\u0065\u0073\u0020\u0076\u0065\u0072\u0073\u0069\u006f\u006e\u0073\u0020\u0064\u0065\u0020\u006c\u0027\u0065\u006e\u0073\u0065\u006d\u0062\u006c\u0065\u0020\u0064\u0065\u0020\u0064\u006f\u006e\u006e\u00e9\u0065\u0073\u0020\u0070\u0075\u0062\u006c\u0069\u00e9\u0065\u0073\u0020\u0070\u0072\u00e9\u0063\u00e9\u0064\u0065\u006d\u006d\u0065\u006e\u0074\u002e +file.deaccessionDialog.tip=\u0053\u0069\u0020\u0076\u006f\u0075\u0073\u0020\u0072\u0065\u0074\u0069\u0072\u0065\u007a\u0020\u0064\u0065\u0020\u006c\u0061\u0020\u0064\u0069\u0066\u0066\u0075\u0073\u0069\u006f\u006e\u0020\u0075\u006e\u0020\u0065\u006e\u0073\u0065\u006d\u0062\u006c\u0065\u0020\u0064\u0065\u0020\u0064\u006f\u006e\u006e\u00e9\u0065\u0073\u002c\u0020\u006c\u0065\u0020\u0070\u0075\u0062\u006c\u0069\u0063\u0020\u006e\u0065\u0020\u0070\u006f\u0075\u0072\u0072\u0061\u0020\u0070\u006c\u0075\u0073\u0020\u006c\u0065\u0020\u0063\u006f\u006e\u0073\u0075\u006c\u0074\u0065\u0072\u002e +file.deaccessionDialog.version=\u0056\u0065\u0072\u0073\u0069\u006f\u006e +file.deaccessionDialog.reason.question1=\u0051\u0075\u0065\u006c\u006c\u0065\u0073\u0020\u0076\u0065\u0072\u0073\u0069\u006f\u006e\u0073\u0020\u0064\u00e9\u0073\u0069\u0072\u0065\u007a\u002d\u0076\u006f\u0075\u0073\u0020\u0072\u0065\u0074\u0069\u0072\u0065\u0072\u0020\u0064\u0065\u0020\u006c\u0061\u0020\u0064\u0069\u0066\u0066\u0075\u0073\u0069\u006f\u006e\u003f +file.deaccessionDialog.reason.question2=\u0051\u0075\u0065\u006c\u0020\u0065\u0073\u0074\u0020\u006c\u0061\u0020\u0072\u0061\u0069\u0073\u006f\u006e\u0020\u0064\u0075\u0020\u0072\u0065\u0074\u0072\u0061\u0069\u0074\u003f +file.deaccessionDialog.reason.selectItem.identifiable=\u0049\u006c\u0020\u0079\u0020\u0061\u0020\u0064\u0065\u0073\u0020\u0064\u006f\u006e\u006e\u00e9\u0065\u0073\u0020\u0070\u0065\u0072\u006d\u0065\u0074\u0074\u0061\u006e\u0074\u0020\u006c\u0027\u0069\u0064\u0065\u006e\u0074\u0069\u0066\u0069\u0063\u0061\u0074\u0069\u006f\u006e\u0020\u0064\u0065\u0020\u0070\u0065\u0072\u0073\u006f\u006e\u006e\u0065\u0073\u0020\u0064\u0061\u006e\u0073\u0020\u0075\u006e\u0020\u006f\u0075\u0020\u0070\u006c\u0075\u0073\u0069\u0065\u0075\u0072\u0073\u0020\u0066\u0069\u0063\u0068\u0069\u0065\u0072\u0073\u002e +file.deaccessionDialog.reason.selectItem.beRetracted=\u004c\u0027\u0061\u0072\u0074\u0069\u0063\u006c\u0065\u0020\u0064\u0065\u0020\u0072\u0065\u0063\u0068\u0065\u0072\u0063\u0068\u0065\u0020\u0061\u0020\u00e9\u0074\u00e9\u0020\u0072\u0065\u0074\u0069\u0072\u00e9\u002e +file.deaccessionDialog.reason.selectItem.beTransferred=\u004c\u0027\u0065\u006e\u0073\u0065\u006d\u0062\u006c\u0065\u0020\u0064\u0065\u0020\u0064\u006f\u006e\u006e\u00e9\u0065\u0073\u0020\u0061\u0020\u00e9\u0074\u00e9\u0020\u0074\u0072\u0061\u006e\u0073\u0066\u00e9\u0072\u00e9\u0020\u0064\u0061\u006e\u0073\u0020\u0075\u006e\u0020\u0061\u0075\u0074\u0072\u0065\u0020\u0064\u00e9\u0070\u00f4\u0074\u002e +file.deaccessionDialog.reason.selectItem.IRB=\u0044\u0065\u006d\u0061\u006e\u0064\u0065\u0020\u0064\u0075\u0020\u0043\u006f\u006d\u0069\u0074\u00e9\u0020\u0064\u0027\u00e9\u0074\u0068\u0069\u0071\u0075\u0065\u0020\u0064\u0065\u0020\u006c\u0061\u0020\u0072\u0065\u0063\u0068\u0065\u0072\u0063\u0068\u0065 +file.deaccessionDialog.reason.selectItem.legalIssue=\u0043\u006f\u006e\u0076\u0065\u006e\u0074\u0069\u006f\u006e\u0020\u0064\u0027\u0075\u0074\u0069\u006c\u0069\u0073\u0061\u0074\u0069\u006f\u006e\u0020\u0064\u0065\u0073\u0020\u0064\u006f\u006e\u006e\u00e9\u0065\u0073\u0020\u0065\u0074\u0020\u0071\u0075\u0065\u0073\u0074\u0069\u006f\u006e\u0020\u0064\u0065\u0020\u0064\u0072\u006f\u0069\u0074 +file.deaccessionDialog.reason.selectItem.notValid=\u0045\u006e\u0073\u0065\u006d\u0062\u006c\u0065\u0020\u0064\u0065\u0020\u0064\u006f\u006e\u006e\u00e9\u0065\u0073\u0020\u006e\u006f\u006e\u0020\u0076\u0061\u006c\u0069\u0064\u0065 +file.deaccessionDialog.reason.selectItem.other=\u0041\u0075\u0074\u0072\u0065\u0020\u0028\u0056\u0065\u0075\u0069\u006c\u006c\u0065\u007a\u0020\u0069\u006e\u0064\u0069\u0071\u0075\u0065\u0072\u0020\u006c\u0061\u0020\u0072\u0061\u0069\u0073\u006f\u006e\u0020\u0064\u0061\u006e\u0073\u0020\u006c\u0027\u0065\u0073\u0070\u0061\u0063\u0065\u0020\u0066\u006f\u0075\u0072\u006e\u0069\u0020\u0063\u0069\u002d\u0064\u0065\u0073\u0073\u006f\u0075\u0073\u002e\u0029 +file.deaccessionDialog.enterInfo=\u0056\u0065\u0075\u0069\u006c\u006c\u0065\u007a\u0020\u0065\u006e\u0074\u0072\u0065\u0072\u0020\u0064\u0065\u0073\u0020\u0072\u0065\u006e\u0073\u0065\u0069\u0067\u006e\u0065\u006d\u0065\u006e\u0074\u0073\u0020\u0073\u0075\u0070\u0070\u006c\u00e9\u006d\u0065\u006e\u0074\u0061\u0069\u0072\u0065\u0073\u0020\u0073\u0075\u0072\u0020\u006c\u0061\u0020\u0072\u0061\u0069\u0073\u006f\u006e\u0020\u0064\u0075\u0020\u0072\u0065\u0074\u0072\u0061\u0069\u0074\u002e +file.deaccessionDialog.leaveURL=\u0053\u0027\u0069\u006c\u0020\u0079\u0020\u0061\u0020\u006c\u0069\u0065\u0075\u002c\u0020\u0076\u0065\u0075\u0069\u006c\u006c\u0065\u007a\u0020\u0069\u006e\u0064\u0069\u0071\u0075\u0065\u0072\u0020\u0075\u006e\u0065\u0020\u0061\u0064\u0072\u0065\u0073\u0073\u0065\u0020\u0055\u0052\u004c\u0020\u006f\u00f9\u0020\u0063\u0065\u0074\u0020\u0065\u006e\u0073\u0065\u006d\u0062\u006c\u0065\u0020\u0064\u0065\u0020\u0064\u006f\u006e\u006e\u00e9\u0065\u0073\u0020\u0070\u0065\u0075\u0074\u0020\u00ea\u0074\u0072\u0065\u0020\u0063\u006f\u006e\u0073\u0075\u006c\u0074\u00e9\u0020\u0061\u0070\u0072\u00e8\u0073\u0020\u0073\u006f\u006e\u0020\u0072\u0065\u0074\u0072\u0061\u0069\u0074\u002e +file.deaccessionDialog.leaveURL.watermark=\u0053\u0069\u0074\u0065\u0020\u0066\u0061\u0063\u0075\u006c\u0074\u0061\u0074\u0069\u0066\u0020\u0070\u006f\u0075\u0072\u0020\u006c\u0027\u0065\u006e\u0073\u0065\u006d\u0062\u006c\u0065\u0020\u0064\u0065\u0020\u0064\u006f\u006e\u006e\u00e9\u0065\u0073\u002c\u0020\u0068\u0074\u0074\u0070\u003a\u002f\u002f\u005c\u0075\u0032\u0030\u0032\u0036 +file.deaccessionDialog.deaccession.tip=\u00ca\u0074\u0065\u0073\u002d\u0076\u006f\u0075\u0073\u0020\u0073\u00fb\u0072\u0028\u0065\u0029\u0020\u0064\u0065\u0020\u0076\u006f\u0075\u006c\u006f\u0069\u0072\u0020\u0070\u0072\u006f\u0063\u00e9\u0064\u0065\u0072\u0020\u0061\u0075\u0020\u0072\u0065\u0074\u0072\u0061\u0069\u0074\u003f\u0020\u004c\u0061\u0020\u006f\u0075\u0020\u006c\u0065\u0073\u0020\u0076\u0065\u0072\u0073\u0069\u006f\u006e\u0073\u0020\u0073\u00e9\u006c\u0065\u0063\u0074\u0069\u006f\u006e\u006e\u00e9\u0065\u0073\u0020\u006e\u0065\u0020\u0070\u006f\u0075\u0072\u0072\u006f\u006e\u0074\u0020\u0070\u006c\u0075\u0073\u0020\u00ea\u0074\u0072\u0065\u0020\u0063\u006f\u006e\u0073\u0075\u006c\u0074\u00e9\u0065\u0073\u0020\u0070\u0061\u0072\u0020\u006c\u0065\u0020\u0070\u0075\u0062\u006c\u0069\u0063\u002e +file.deaccessionDialog.deaccessionDataset.tip=\u00ca\u0074\u0065\u0073\u002d\u0076\u006f\u0075\u0073\u0020\u0073\u00fb\u0072\u0028\u0065\u0029\u0020\u0064\u0065\u0020\u0076\u006f\u0075\u006c\u006f\u0069\u0072\u0020\u0072\u0065\u0074\u0069\u0072\u00e9\u0020\u0063\u0065\u0074\u0020\u0065\u006e\u0073\u0065\u006d\u0062\u006c\u0065\u0020\u0064\u0065\u0020\u0064\u006f\u006e\u006e\u00e9\u0065\u0073\u003f\u0020\u004c\u0065\u0020\u0070\u0075\u0062\u006c\u0069\u0063\u0020\u006e\u0065\u0020\u0070\u006f\u0075\u0072\u0072\u0061\u0020\u0070\u006c\u0075\u0073\u0020\u006c\u0065\u0020\u0063\u006f\u006e\u0073\u0075\u006c\u0074\u0065\u0072\u002e +file.deaccessionDialog.dialog.selectVersion.tip=\u0056\u0065\u0075\u0069\u006c\u006c\u0065\u007a\u0020\u0073\u00e9\u006c\u0065\u0063\u0074\u0069\u006f\u006e\u006e\u0065\u0072\u0020\u006c\u0061\u0020\u006f\u0075\u0020\u006c\u0065\u0073\u0020\u0076\u0065\u0072\u0073\u0069\u006f\u006e\u0073\u0020\u00e0\u0020\u0072\u0065\u0074\u0069\u0072\u0065\u0072\u0020\u0064\u0065\u0020\u006c\u0061\u0020\u0064\u0069\u0066\u0066\u0075\u0073\u0069\u006f\u006e\u002e +file.deaccessionDialog.dialog.selectVersion.header=\u0056\u0065\u0075\u0069\u006c\u006c\u0065\u007a\u0020\u0073\u00e9\u006c\u0065\u0063\u0074\u0069\u006f\u006e\u006e\u0065\u0072\u0020\u006c\u0061\u0020\u006f\u0075\u0020\u006c\u0065\u0073\u0020\u0076\u0065\u0072\u0073\u0069\u006f\u006e\u0073\u002e +file.deaccessionDialog.dialog.reason.tip=\u0056\u0065\u0075\u0069\u006c\u006c\u0065\u007a\u0020\u0073\u00e9\u006c\u0065\u0063\u0074\u0069\u006f\u006e\u006e\u0065\u0072\u0020\u006c\u0061\u0020\u0072\u0061\u0069\u0073\u006f\u006e\u0020\u0064\u0075\u0020\u0072\u0065\u0074\u0072\u0061\u0069\u0074\u002e +file.deaccessionDialog.dialog.reason.header=\u0056\u0065\u0075\u0069\u006c\u006c\u0065\u007a\u0020\u0073\u00e9\u006c\u0065\u0063\u0074\u0069\u006f\u006e\u006e\u0065\u0072\u0020\u006c\u0061\u0020\u0072\u0061\u0069\u0073\u006f\u006e\u002e +file.deaccessionDialog.dialog.url.tip=\u0056\u0065\u0075\u0069\u006c\u006c\u0065\u007a\u0020\u0069\u006e\u0064\u0069\u0071\u0075\u0065\u0072\u0020\u0075\u006e\u0065\u0020\u0061\u0064\u0072\u0065\u0073\u0073\u0065\u0020\u0055\u0052\u004c\u0020\u0064\u0065\u0020\u0072\u0065\u0064\u0069\u0072\u0065\u0063\u0074\u0069\u006f\u006e\u0020\u0076\u0061\u006c\u0069\u0064\u0065\u002e +file.deaccessionDialog.dialog.url.header=\u0041\u0064\u0072\u0065\u0073\u0073\u0065\u0020\u0055\u0052\u004c\u0020\u006e\u006f\u006e\u0020\u0076\u0061\u006c\u0069\u0064\u0065 +file.deaccessionDialog.dialog.textForReason.tip=\u0056\u0065\u0075\u0069\u006c\u006c\u0065\u007a\u0020\u0069\u006e\u0064\u0069\u0071\u0075\u0065\u0072\u0020\u006c\u0061\u0020\u0072\u0061\u0069\u0073\u006f\u006e\u0020\u0064\u0075\u0020\u0072\u0065\u0074\u0072\u0061\u0069\u0074\u002e +file.deaccessionDialog.dialog.textForReason.header=\u0045\u006e\u0074\u0072\u0065\u0072\u0020\u0064\u0065\u0073\u0020\u0072\u0065\u006e\u0073\u0065\u0069\u0067\u006e\u0065\u006d\u0065\u006e\u0074\u0073\u0020\u0073\u0075\u0070\u0070\u006c\u00e9\u006d\u0065\u006e\u0074\u0061\u0069\u0072\u0065\u0073 +file.deaccessionDialog.dialog.limitChar.tip=\u004c\u0065\u0020\u0074\u0065\u0078\u0074\u0065\u0020\u0070\u006f\u0075\u0072\u0020\u0069\u006e\u0064\u0069\u0071\u0075\u0065\u0072\u0020\u006c\u0061\u0020\u0072\u0061\u0069\u0073\u006f\u006e\u0020\u0064\u0075\u0020\u0072\u0065\u0074\u0072\u0061\u0069\u0074\u0020\u006e\u0065\u0020\u0070\u0065\u0075\u0074\u0020\u0064\u00e9\u0070\u0061\u0073\u0073\u0065\u0072\u0020\u0031\u0030\u0030\u0030\u0020\u0063\u0061\u0072\u0061\u0063\u0074\u00e8\u0072\u0065\u0073\u002e +file.deaccessionDialog.dialog.limitChar.header=\u004c\u0069\u006d\u0069\u0074\u0065\u0020\u0064\u0065\u0020\u0031\u0030\u0030\u0030\u0020\u0063\u0061\u0072\u0061\u0063\u0074\u00e8\u0072\u0065\u0073 +file.viewDiffDialog.header=\u0052\u0065\u006e\u0073\u0065\u0069\u0067\u006e\u0065\u006d\u0065\u006e\u0074\u0073\u0020\u0073\u0075\u0072\u0020\u006c\u0065\u0073\u0020\u0064\u0069\u0066\u0066\u00e9\u0072\u0065\u006e\u0063\u0065\u0073\u0020\u0064\u0065\u0020\u0076\u0065\u0072\u0073\u0069\u006f\u006e +file.viewDiffDialog.dialog.warning=\u0056\u0065\u0075\u0069\u006c\u006c\u0065\u007a\u0020\u0073\u00e9\u006c\u0065\u0063\u0074\u0069\u006f\u006e\u006e\u0065\u0072\u0020\u0064\u0065\u0075\u0078\u0020\u0076\u0065\u0072\u0073\u0069\u006f\u006e\u0073\u0020\u0070\u006f\u0075\u0072\u0020\u0076\u006f\u0069\u0072\u0020\u006c\u0065\u0073\u0020\u0064\u0069\u0066\u0066\u00e9\u0072\u0065\u006e\u0063\u0065\u0073\u002e +file.viewDiffDialog.version=\u0056\u0065\u0072\u0073\u0069\u006f\u006e\u0020 +file.viewDiffDialog.lastUpdated=\u0044\u0065\u0072\u006e\u0069\u00e8\u0072\u0065\u0020\u006d\u0069\u0073\u0065\u0020\u00e0\u0020\u006a\u006f\u0075\u0072 +file.viewDiffDialog.fileID=\u0049\u0064\u0065\u006e\u0074\u0069\u0066\u0069\u0061\u006e\u0074\u0020\u0064\u0075\u0020\u0066\u0069\u0063\u0068\u0069\u0065\u0072 +file.viewDiffDialog.fileName=\u004e\u006f\u006d +file.viewDiffDialog.fileType=\u0054\u0079\u0070\u0065 +file.viewDiffDialog.fileSize=\u0054\u0061\u0069\u006c\u006c\u0065 +file.viewDiffDialog.category=\u004c\u0069\u0062\u0065\u006c\u006c\u00e9\u0073 +file.viewDiffDialog.description=\u0044\u0065\u0073\u0063\u0072\u0069\u0070\u0074\u0069\u006f\u006e +file.viewDiffDialog.fileReplaced=\u0046\u0069\u0063\u0068\u0069\u0065\u0072\u0020\u0072\u0065\u006d\u0070\u006c\u0061\u0063\u00e9 +file.viewDiffDialog.filesReplaced=\u0046\u0069\u0063\u0068\u0069\u0065\u0072\u0028\u0073\u0029\u0020\u0072\u0065\u006d\u0070\u006c\u0061\u0063\u00e9\u0028\u0073\u0029 +file.viewDiffDialog.files.header=\u0046\u0069\u0063\u0068\u0069\u0065\u0072\u0073 +file.viewDiffDialog.msg.draftFound=\u0026\u0023\u0031\u0036\u0030\u003b\u0043\u0065\u0063\u0069\u0020\u0065\u0073\u0074\u0020\u006c\u0061\u0020\u0076\u0065\u0072\u0073\u0069\u006f\u006e\u0020\u0070\u0072\u006f\u0076\u0069\u0073\u006f\u0069\u0072\u0065\u002e +file.viewDiffDialog.msg.draftNotFound=\u004c\u0061\u0020\u0076\u0065\u0072\u0073\u0069\u006f\u006e\u0020\u0070\u0072\u006f\u0076\u0069\u0073\u006f\u0069\u0072\u0065\u0020\u006e\u0027\u0061\u0020\u0070\u0061\u0073\u0020\u00e9\u0074\u00e9\u0020\u0074\u0072\u006f\u0075\u0076\u00e9\u0065\u002e +file.viewDiffDialog.msg.versionFound=\u0026\u0023\u0031\u0036\u0030\u003b\u0043\u0065\u0063\u0069\u0020\u0065\u0073\u0074\u0020\u006c\u0061\u0020\u0076\u0065\u0072\u0073\u0069\u006f\u006e\u0020\u0022\u007b\u0030\u007d\u0022\u002e +file.viewDiffDialog.msg.versionNotFound=\u004c\u0061\u0020\u0076\u0065\u0072\u0073\u0069\u006f\u006e\u0020\u0022\u007b\u0030\u007d\u0022\u0020\u006e\u0027\u0061\u0020\u0070\u0061\u0073\u0020\u00e9\u0074\u00e9\u0020\u0074\u0072\u006f\u0075\u0076\u00e9\u0065\u002e +file.metadataTip=\u0043\u006f\u006e\u0073\u0065\u0069\u006c\u0020\u0073\u0075\u0072\u0020\u006c\u0065\u0073\u0020\u006d\u00e9\u0074\u0061\u0064\u006f\u006e\u006e\u00e9\u0065\u0073\u005c\u0075\u0030\u0030\u0041\u0030\u003a\u0020\u0061\u0070\u0072\u00e8\u0073\u0020\u0061\u0076\u006f\u0069\u0072\u0020\u0061\u006a\u006f\u0075\u0074\u00e9\u0020\u006c\u0027\u0065\u006e\u0073\u0065\u006d\u0062\u006c\u0065\u0020\u0064\u0065\u0020\u0064\u006f\u006e\u006e\u00e9\u0065\u0073\u002c\u0020\u0063\u006c\u0069\u0071\u0075\u0065\u0072\u0020\u0073\u0075\u0072\u0020\u006c\u0065\u0020\u0062\u006f\u0075\u0074\u006f\u006e\u0020\u00ab\u005c\u0075\u0030\u0030\u0041\u0030\u004d\u006f\u0064\u0069\u0066\u0069\u0065\u0072\u0020\u006c\u0027\u0065\u006e\u0073\u0065\u006d\u0062\u006c\u0065\u0020\u0064\u0065\u0020\u0064\u006f\u006e\u006e\u00e9\u0065\u0073\u005c\u0075\u0030\u0030\u0041\u0030\u00bb\u0020\u0070\u006f\u0075\u0072\u0020\u0061\u006a\u006f\u0075\u0074\u0065\u0072\u0020\u0070\u006c\u0075\u0073\u0020\u0064\u0065\u0020\u006d\u00e9\u0074\u0061\u0064\u006f\u006e\u006e\u00e9\u0065\u0073\u002e +file.addBtn=\u0053\u0061\u0075\u0076\u0065\u0067\u0061\u0072\u0064\u0065\u0072\u0020\u006c\u0027\u0065\u006e\u0073\u0065\u006d\u0062\u006c\u0065\u0020\u0064\u0065\u0020\u0064\u006f\u006e\u006e\u00e9\u0065\u0073 +file.dataset.allFiles=\u0054\u006f\u0075\u0073\u0020\u006c\u0065\u0073\u0020\u0066\u0069\u0063\u0068\u0069\u0065\u0072\u0073\u0020\u0064\u0065\u0020\u0063\u0065\u0074\u0020\u0065\u006e\u0073\u0065\u006d\u0062\u006c\u0065\u0020\u0064\u0065\u0020\u0064\u006f\u006e\u006e\u00e9\u0065\u0073 +file.downloadDialog.header=\u0054\u00e9\u006c\u00e9\u0063\u0068\u0061\u0072\u0067\u0065\u0072\u0020\u006c\u0065\u0020\u0066\u0069\u0063\u0068\u0069\u0065\u0072 +file.downloadDialog.tip=\u0056\u0065\u0075\u0069\u006c\u006c\u0065\u007a\u0020\u0063\u006f\u006e\u0066\u0069\u0072\u006d\u0065\u0072\u0020\u006f\u0075\u0020\u0072\u0065\u006d\u0070\u006c\u0069\u0072\u0020\u006c\u0027\u0069\u006e\u0066\u006f\u0072\u006d\u0061\u0074\u0069\u006f\u006e\u0020\u0072\u0065\u0071\u0075\u0069\u0073\u0065\u0020\u0063\u0069\u002d\u0064\u0065\u0073\u0073\u006f\u0075\u0073\u0020\u0061\u0066\u0069\u006e\u0020\u0064\u0065\u0020\u0074\u00e9\u006c\u00e9\u0063\u0068\u0061\u0072\u0067\u0065\u0072\u0020\u006c\u0065\u0073\u0020\u0066\u0069\u0063\u0068\u0069\u0065\u0072\u0073\u0020\u0064\u0065\u0020\u0063\u0065\u0074\u0020\u0065\u006e\u0073\u0065\u006d\u0062\u006c\u0065\u0020\u0064\u0065\u0020\u0064\u006f\u006e\u006e\u00e9\u0065\u0073\u002e +file.downloadDialog.termsTip=\u004a\u0027\u0061\u0063\u0063\u0065\u0070\u0074\u0065\u0020\u0063\u0065\u0073\u0020\u0063\u006f\u006e\u0064\u0069\u0074\u0069\u006f\u006e\u0073\u0020\u0064\u0027\u0075\u0074\u0069\u006c\u0069\u0073\u0061\u0074\u0069\u006f\u006e\u002e +file.requestAccessTermsDialog.tip=Please confirm and/or complete the information needed below in order to request access to files in this dataset. +file.search.placeholder=\u0043\u0068\u0065\u0072\u0063\u0068\u0065\u0072\u0020\u0064\u0061\u006e\u0073\u0020\u0063\u0065\u0074\u0020\u0065\u006e\u0073\u0065\u006d\u0062\u006c\u0065\u0020\u0064\u0065\u0020\u0064\u006f\u006e\u006e\u00e9\u0065\u0073\u005c\u0075\u0032\u0030\u0032\u0036 +file.results.btn.sort=\u0054\u0072\u0069\u0065\u0072 +file.results.btn.sort.option.nameAZ=\u004e\u006f\u006d\u0020\u0028\u0041\u002d\u005a\u0029 +file.results.btn.sort.option.nameZA=\u004e\u006f\u006d\u0020\u0028\u0041\u002d\u005a\u0029 +file.results.btn.sort.option.newest=\u0050\u006c\u0075\u0073\u0020\u0072\u00e9\u0063\u0065\u006e\u0074 +file.results.btn.sort.option.oldest=\u0050\u006c\u0075\u0073\u0020\u0061\u006e\u0063\u0069\u0065\u006e +file.results.btn.sort.option.size=\u0054\u0061\u0069\u006c\u006c\u0065 +file.results.btn.sort.option.type=\u0043\u0061\u0074\u00e9\u0067\u006f\u0072\u0069\u0065 +file.compute.fileRestricted=File Restricted +file.compute.fileAccessDenied=You cannot compute on this restricted file because you do not have permission to access it. +file.configure.Button=Configure +file.configure.launchMessage.details=Please refresh this page once you have finished configuring your +dataset.compute.datasetCompute=Dataset Compute Not Supported +dataset.compute.datasetAccessDenied=You cannot compute on this dataset because you do not have permission to access all of the restricted files. +dataset.compute.datasetComputeDisabled=You cannot compute on this dataset because this functionality is not enabled yet. Please click on a file to access computing features. +# dataset-widgets.xhtml= +dataset.widgets.title=\u0056\u0069\u0067\u006e\u0065\u0074\u0074\u0065\u0020\u0064\u0065\u0020\u006c\u0027\u0065\u006e\u0073\u0065\u006d\u0062\u006c\u0065\u0020\u0064\u0065\u0020\u0064\u006f\u006e\u006e\u00e9\u0065\u0073\u0020\u002b\u0020\u0057\u0069\u0064\u0067\u0065\u0074\u0073 +dataset.widgets.notPublished.why.header=\u0050\u006f\u0075\u0072\u0071\u0075\u006f\u0069\u0020\u0066\u0061\u0069\u0072\u0065\u0020\u0061\u0070\u0070\u0065\u006c\u0020\u0061\u0075\u0078\u0020\u0077\u0069\u0064\u0067\u0065\u0074\u0073\u003f +dataset.widgets.notPublished.why.reason1=\u0041\u0075\u0067\u006d\u0065\u006e\u0074\u0065\u0020\u006c\u0061\u0020\u0076\u0069\u0073\u0069\u0062\u0069\u006c\u0069\u0074\u00e9\u0020\u0064\u0065\u0020\u0076\u006f\u0073\u0020\u0064\u006f\u006e\u006e\u00e9\u0065\u0073\u0020\u0065\u006e\u0020\u0076\u006f\u0075\u0073\u0020\u0070\u0065\u0072\u006d\u0065\u0074\u0074\u0061\u006e\u0074\u0020\u0064\u0027\u0069\u006e\u0074\u00e9\u0067\u0072\u0065\u0072\u0020\u0076\u006f\u0074\u0072\u0065\u0020\u0064\u0061\u0074\u0061\u0076\u0065\u0072\u0073\u0065\u0020\u0065\u0074\u0020\u006c\u0065\u0073\u0020\u0065\u006e\u0073\u0065\u006d\u0062\u006c\u0065\u0073\u0020\u0064\u0065\u0020\u0064\u006f\u006e\u006e\u00e9\u0065\u0073\u0020\u0064\u0061\u006e\u0073\u0020\u0076\u006f\u0074\u0072\u0065\u0020\u0073\u0069\u0074\u0065\u0020\u0077\u0065\u0062\u0020\u0070\u0065\u0072\u0073\u006f\u006e\u006e\u0065\u006c\u0020\u006f\u0075\u0020\u0064\u0065\u0020\u0070\u0072\u006f\u006a\u0065\u0074\u002e +dataset.widgets.notPublished.why.reason2=\u0050\u0065\u0072\u006d\u0065\u0074\u0020\u0061\u0075\u0078\u0020\u0061\u0075\u0074\u0072\u0065\u0073\u0020\u0064\u0065\u0020\u0070\u0061\u0072\u0063\u006f\u0075\u0072\u0069\u0072\u0020\u0076\u006f\u0074\u0072\u0065\u0020\u0064\u0061\u0074\u0061\u0076\u0065\u0072\u0073\u0065\u0020\u0061\u0069\u006e\u0073\u0069\u0020\u0071\u0075\u0065\u0020\u0076\u006f\u0073\u0020\u0065\u006e\u0073\u0065\u006d\u0062\u006c\u0065\u0073\u0020\u0064\u0065\u0020\u0064\u006f\u006e\u006e\u00e9\u0065\u0073\u0020\u0073\u0061\u006e\u0073\u0020\u0071\u0075\u0069\u0074\u0074\u0065\u0072\u0020\u0076\u006f\u0074\u0072\u0065\u0020\u0073\u0069\u0074\u0065\u0020\u0070\u0065\u0072\u0073\u006f\u006e\u006e\u0065\u006c\u0020\u006f\u0075\u0020\u0064\u0065\u0020\u0070\u0072\u006f\u006a\u0065\u0074\u002e +dataset.widgets.notPublished.how.header=\u0043\u006f\u006d\u006d\u0065\u006e\u0074\u0020\u0075\u0074\u0069\u006c\u0069\u0073\u0065\u0072\u0020\u006c\u0065\u0073\u0020\u0077\u0069\u0064\u0067\u0065\u0074\u0073 +dataset.widgets.notPublished.how.tip1=\u0050\u006f\u0075\u0072\u0020\u0070\u006f\u0075\u0076\u006f\u0069\u0072\u0020\u0075\u0074\u0069\u006c\u0069\u0073\u0065\u0072\u0020\u0064\u0065\u0073\u0020\u0077\u0069\u0064\u0067\u0065\u0074\u0073\u002c\u0020\u0076\u006f\u0074\u0072\u0065\u0020\u0064\u0061\u0074\u0061\u0076\u0065\u0072\u0073\u0065\u0020\u0065\u0074\u0020\u0076\u006f\u0073\u0020\u0065\u006e\u0073\u0065\u006d\u0062\u006c\u0065\u0073\u0020\u0064\u0065\u0020\u0064\u006f\u006e\u006e\u00e9\u0065\u0073\u0020\u0064\u006f\u0069\u0076\u0065\u006e\u0074\u0020\u00ea\u0074\u0072\u0065\u0020\u0070\u0075\u0062\u006c\u0069\u00e9\u0073\u002e +dataset.widgets.notPublished.how.tip2=\u0053\u0075\u0069\u0074\u0065\u0020\u00e0\u0020\u006c\u0061\u0020\u0070\u0075\u0062\u006c\u0069\u0063\u0061\u0074\u0069\u006f\u006e\u002c\u0020\u006c\u0065\u0020\u0063\u006f\u0064\u0065\u0020\u0073\u0065\u0072\u0061\u0020\u0064\u0069\u0073\u0070\u006f\u006e\u0069\u0062\u006c\u0065\u0020\u0073\u0075\u0072\u0020\u0063\u0065\u0074\u0074\u0065\u0020\u0070\u0061\u0067\u0065\u0020\u0070\u006f\u0075\u0072\u0020\u0071\u0075\u0065\u0020\u0076\u006f\u0075\u0073\u0020\u0070\u0075\u0069\u0073\u0073\u0069\u0065\u007a\u0020\u006c\u0065\u0020\u0063\u006f\u0070\u0069\u0065\u0072\u0020\u0065\u0074\u0020\u006c\u0027\u0061\u006a\u006f\u0075\u0074\u0065\u0072\u0020\u00e0\u0020\u0076\u006f\u0074\u0072\u0065\u0020\u0073\u0069\u0074\u0065\u0020\u0077\u0065\u0062\u0020\u0070\u0065\u0072\u0073\u006f\u006e\u006e\u0065\u006c\u0020\u006f\u0075\u0020\u0064\u0065\u0020\u0070\u0072\u006f\u006a\u0065\u0074\u002e +dataset.widgets.notPublished.how.tip3=\u0041\u0076\u0065\u007a\u002d\u0076\u006f\u0075\u0073\u0020\u0075\u006e\u0020\u0073\u0069\u0074\u0065\u0020\u0077\u0065\u0062\u0020\u004f\u0070\u0065\u006e\u0053\u0063\u0068\u006f\u006c\u0061\u0072\u003f\u0020\u0053\u0069\u0020\u006f\u0075\u0069\u002c\u0020\u0061\u0070\u0070\u0072\u0065\u006e\u0065\u007a\u002d\u0065\u006e\u0020\u0064\u0061\u0076\u0061\u006e\u0074\u0061\u0067\u0065\u0020\u0073\u0075\u0072\u0020\u006c\u0027\u0061\u006a\u006f\u0075\u0074\u0020\u0064\u0065\u0020\u0077\u0069\u0064\u0067\u0065\u0074\u0073\u0020\u0044\u0061\u0074\u0061\u0076\u0065\u0072\u0073\u0065\u0020\u0064\u0061\u006e\u0073\u0020\u0076\u006f\u0074\u0072\u0065\u0020\u0073\u0069\u0074\u0065\u0020\u0077\u0065\u0062\u0020\u003c\u0061\u0020\u0068\u0072\u0065\u0066\u0020\u003d\u0020\u0022\u007b\u0030\u007d\u002f\u007b\u0031\u007d\u002f\u0075\u0073\u0065\u0072\u002f\u0064\u0061\u0074\u0061\u0076\u0065\u0072\u0073\u0065\u002d\u006d\u0061\u006e\u0061\u0067\u0065\u006d\u0065\u006e\u0074\u002e\u0068\u0074\u006d\u006c\u0023\u0061\u0064\u0064\u0069\u006e\u0067\u002d\u0077\u0069\u0064\u0067\u0065\u0074\u0073\u002d\u0074\u006f\u002d\u0061\u006e\u002d\u006f\u0070\u0065\u006e\u0073\u0063\u0068\u006f\u006c\u0061\u0072\u002d\u0077\u0065\u0062\u0073\u0069\u0074\u0065\u0022\u0020\u0074\u0069\u0074\u006c\u0065\u003d\u0022\u0041\u0064\u0064\u0069\u006e\u0067\u0020\u0057\u0069\u0064\u0067\u0065\u0074\u0073\u0020\u0074\u006f\u0020\u0061\u006e\u0020\u004f\u0070\u0065\u006e\u0053\u0063\u0068\u006f\u006c\u0061\u0072\u0020\u0057\u0065\u0062\u0073\u0069\u0074\u0065\u0020\u002d\u0020\u0044\u0061\u0074\u0061\u0076\u0065\u0072\u0073\u0065\u0020\u0055\u0073\u0065\u0072\u0020\u0047\u0075\u0069\u0064\u0065\u0022\u0020\u0074\u0061\u0072\u0067\u0065\u0074\u003d\u0022\u005f\u0062\u006c\u0061\u006e\u006b\u0022\u003e\u0069\u0063\u0069\u003c\u002f\u0061\u003e\u002e +dataset.widgets.notPublished.getStarted=\u0050\u006f\u0075\u0072\u0020\u0064\u00e9\u0062\u0075\u0074\u0065\u0072\u002c\u0020\u0070\u0075\u0062\u006c\u0069\u0065\u007a\u0020\u0076\u006f\u0074\u0072\u0065\u0020\u0064\u0061\u0074\u0061\u0076\u0065\u0072\u0073\u0065\u002e\u0020\u0050\u006f\u0075\u0072\u0020\u0065\u006e\u0020\u0073\u0061\u0076\u006f\u0069\u0072\u0020\u0064\u0061\u0076\u0061\u006e\u0074\u0061\u0067\u0065\u0020\u0073\u0075\u0072\u0020\u006c\u0065\u0073\u0020\u0077\u0069\u0064\u0067\u0065\u0074\u0073\u002c\u0020\u0063\u006f\u006e\u0073\u0075\u006c\u0074\u0065\u007a\u0020\u006c\u0061\u0020\u0073\u0065\u0063\u0074\u0069\u006f\u006e\u0020\u003c\u0061\u0020\u0068\u0072\u0065\u0066\u003d\u0022\u007b\u0030\u007d\u002f\u007b\u0031\u007d\u002f\u0075\u0073\u0065\u0072\u002f\u0064\u0061\u0074\u0061\u0076\u0065\u0072\u0073\u0065\u002d\u006d\u0061\u006e\u0061\u0067\u0065\u006d\u0065\u006e\u0074\u002e\u0068\u0074\u006d\u006c\u0023\u0074\u0068\u0065\u006d\u0065\u002d\u0077\u0069\u0064\u0067\u0065\u0074\u0073\u0022\u0020\u0074\u0069\u0074\u006c\u0065\u003d\u0022\u0054\u0068\u0065\u006d\u0065\u0020\u002b\u0020\u0057\u0069\u0064\u0067\u0065\u0074\u0073\u0020\u002d\u0020\u0044\u0061\u0074\u0061\u0076\u0065\u0072\u0073\u0065\u0020\u0055\u0073\u0065\u0072\u0020\u0047\u0075\u0069\u0064\u0065\u0022\u0020\u0074\u0061\u0072\u0067\u0065\u0074\u003d\u0022\u005f\u0062\u006c\u0061\u006e\u006b\u0022\u003e\u0074\u0068\u00e8\u006d\u0065\u0020\u0065\u0074\u0020\u0077\u0069\u0064\u0067\u0065\u0074\u0073\u003c\u002f\u0061\u003e\u0020\u0064\u0075\u0020\u0067\u0075\u0069\u0064\u0065\u0020\u0064\u0027\u0075\u0074\u0069\u006c\u0069\u0073\u0061\u0074\u0069\u006f\u006e\u002e +dataset.widgets.editAdvanced=\u004d\u006f\u0064\u0069\u0066\u0069\u0065\u0072\u0020\u006c\u0065\u0073\u0020\u006f\u0070\u0074\u0069\u006f\u006e\u0073\u0020\u0061\u0076\u0061\u006e\u0063\u00e9\u0065\u0073 +dataset.widgets.editAdvanced.tip=\u003c\u0073\u0074\u0072\u006f\u006e\u0067\u003e\u004f\u0070\u0074\u0069\u006f\u006e\u0073\u0020\u0061\u0076\u0061\u006e\u0063\u00e9\u0065\u0073\u003c\u002f\u0073\u0074\u0072\u006f\u006e\u0067\u003e\u0020\u0026\u0023\u0031\u0035\u0030\u003b\u0020\u004f\u0070\u0074\u0069\u006f\u006e\u0073\u0020\u0073\u0075\u0070\u0070\u006c\u00e9\u006d\u0065\u006e\u0074\u0061\u0069\u0072\u0065\u0073\u0020\u0070\u006f\u0075\u0072\u0020\u0063\u006f\u006e\u0066\u0069\u0067\u0075\u0072\u0065\u0072\u0020\u0076\u006f\u0074\u0072\u0065\u0020\u0077\u0069\u0064\u0067\u0065\u0074\u0020\u0073\u0075\u0072\u0020\u0076\u006f\u0074\u0072\u0065\u0020\u0073\u0069\u0074\u0065\u0020\u0070\u0065\u0072\u0073\u006f\u006e\u006e\u0065\u006c\u0020\u006f\u0075\u0020\u0064\u0065\u0020\u0070\u0072\u006f\u006a\u0065\u0074\u002e +dataset.widgets.tip=\u0043\u006f\u0070\u0069\u0065\u007a\u0020\u0065\u0074\u0020\u0063\u006f\u006c\u006c\u0065\u007a\u0020\u0063\u0065\u0020\u0063\u006f\u0064\u0065\u0020\u0064\u0061\u006e\u0073\u0020\u006c\u0065\u0020\u0063\u006f\u0064\u0065\u0020\u0048\u0054\u004d\u004c\u0020\u0064\u0065\u0020\u0076\u006f\u0074\u0072\u0065\u0020\u0073\u0069\u0074\u0065\u0020\u0077\u0065\u0062\u002e\u0020\u0050\u006f\u0075\u0072\u0020\u0065\u006e\u0020\u0073\u0061\u0076\u006f\u0069\u0072\u0020\u0064\u0061\u0076\u0061\u006e\u0074\u0061\u0067\u0065\u0020\u0073\u0075\u0072\u0020\u006c\u0065\u0073\u0020\u0077\u0069\u0064\u0067\u0065\u0074\u0073\u002c\u0020\u0063\u006f\u006e\u0073\u0075\u006c\u0074\u0065\u007a\u0020\u006c\u0061\u0020\u0073\u0065\u0063\u0074\u0069\u006f\u006e\u0020\u003c\u0061\u0020\u0068\u0072\u0065\u0066\u003d\u0022\u007b\u0030\u007d\u002f\u007b\u0031\u007d\u002f\u0075\u0073\u0065\u0072\u002f\u0064\u0061\u0074\u0061\u0076\u0065\u0072\u0073\u0065\u002d\u006d\u0061\u006e\u0061\u0067\u0065\u006d\u0065\u006e\u0074\u002e\u0068\u0074\u006d\u006c\u0023\u0074\u0068\u0065\u006d\u0065\u002d\u0077\u0069\u0064\u0067\u0065\u0074\u0073\u0022\u0020\u0074\u0069\u0074\u006c\u0065\u003d\u0022\u0054\u0068\u0065\u006d\u0065\u0020\u002b\u0020\u0057\u0069\u0064\u0067\u0065\u0074\u0073\u0020\u002d\u0020\u0044\u0061\u0074\u0061\u0076\u0065\u0072\u0073\u0065\u0020\u0055\u0073\u0065\u0072\u0020\u0047\u0075\u0069\u0064\u0065\u0022\u0020\u0074\u0061\u0072\u0067\u0065\u0074\u003d\u0022\u005f\u0062\u006c\u0061\u006e\u006b\u0022\u003e\u0054\u0068\u00e8\u006d\u0065\u0020\u0065\u0074\u0020\u0077\u0069\u0064\u0067\u0065\u0074\u0073\u003c\u002f\u0061\u003e\u0020\u0064\u0075\u0020\u0067\u0075\u0069\u0064\u0065\u0020\u0064\u0027\u0075\u0074\u0069\u006c\u0069\u0073\u0061\u0074\u0069\u006f\u006e\u002e +dataset.widgets.citation.txt=\u0043\u0069\u0074\u0061\u0074\u0069\u006f\u006e\u0020\u0064\u0065\u0020\u006c\u0027\u0065\u006e\u0073\u0065\u006d\u0062\u006c\u0065\u0020\u0064\u0065\u0020\u0064\u006f\u006e\u006e\u00e9\u0065\u0073 +dataset.widgets.citation.tip=\u0041\u006a\u006f\u0075\u0074\u0065\u007a\u0020\u006c\u0061\u0020\u0072\u00e9\u0066\u00e9\u0072\u0065\u006e\u0063\u0065\u0020\u0064\u0065\u0020\u0076\u006f\u0074\u0072\u0065\u0020\u0065\u006e\u0073\u0065\u006d\u0062\u006c\u0065\u0020\u0064\u0065\u0020\u0064\u006f\u006e\u006e\u00e9\u0065\u0073\u0020\u00e0\u0020\u0076\u006f\u0074\u0072\u0065\u0020\u0073\u0069\u0074\u0065\u0020\u0070\u0065\u0072\u0073\u006f\u006e\u006e\u0065\u006c\u0020\u006f\u0075\u0020\u0064\u0065\u0020\u0070\u0072\u006f\u006a\u0065\u0074\u002e +dataset.widgets.datasetFull.txt=\u0045\u006e\u0073\u0065\u006d\u0062\u006c\u0065\u0020\u0064\u0065\u0020\u0064\u006f\u006e\u006e\u00e9\u0065\u0073 +dataset.widgets.datasetFull.tip=\u0050\u0065\u0072\u006d\u0065\u0074\u0020\u0061\u0075\u0078\u0020\u0076\u0069\u0073\u0069\u0074\u0065\u0075\u0072\u0073\u0020\u0064\u0065\u0020\u0076\u006f\u0074\u0072\u0065\u0020\u0073\u0069\u0074\u0065\u0020\u0077\u0065\u0062\u0020\u0064\u0027\u00ea\u0074\u0072\u0065\u0020\u0065\u006e\u0020\u006d\u0065\u0073\u0075\u0072\u0065\u0020\u0064\u0027\u0061\u0066\u0066\u0069\u0063\u0068\u0065\u0072\u0020\u0076\u006f\u0073\u0020\u006a\u0065\u0075\u0078\u0020\u0064\u0065\u0020\u0064\u006f\u006e\u006e\u00e9\u0065\u0073\u002c\u0020\u0064\u0065\u0020\u0074\u00e9\u006c\u00e9\u0063\u0068\u0061\u0072\u0067\u0065\u0072\u0020\u0064\u0065\u0073\u0020\u0066\u0069\u0063\u0068\u0069\u0065\u0072\u0073\u002c\u0020\u0065\u0074\u0063\u002e +dataset.widgets.advanced.popup.header=\u0057\u0069\u0064\u0067\u0065\u0074\u0073\u005c\u0075\u0030\u0030\u0041\u0030\u003a\u0020\u004f\u0070\u0074\u0069\u006f\u006e\u0073\u0020\u0061\u0076\u0061\u006e\u0063\u00e9\u0065\u0073 +dataset.widgets.advanced.prompt=\u0045\u0078\u0070\u00e9\u0064\u0069\u0065\u0072\u0020\u0076\u0065\u0072\u0073\u0020\u0076\u006f\u0074\u0072\u0065\u0020\u0073\u0069\u0074\u0065\u0020\u0077\u0065\u0062\u0020\u0070\u0065\u0072\u0073\u006f\u006e\u006e\u0065\u006c\u0020\u006c\u0027\u0055\u0052\u004c\u0020\u0070\u00e9\u0072\u0065\u006e\u006e\u0065\u0020\u0064\u0065\u0020\u006c\u0061\u0020\u0072\u00e9\u0066\u00e9\u0072\u0065\u006e\u0063\u0065\u0020\u0062\u0069\u0062\u006c\u0069\u006f\u0067\u0072\u0061\u0070\u0068\u0069\u0071\u0075\u0065\u0020\u0064\u0065\u0020\u006c\u0027\u0065\u006e\u0073\u0065\u006d\u0062\u006c\u0065\u0020\u0064\u0065\u0020\u0064\u006f\u006e\u006e\u00e9\u0065\u0073\u002e +dataset.widgets.advanced.url.label=\u0055\u0052\u004c\u0020\u0064\u0065\u0020\u0076\u006f\u0074\u0072\u0065\u0020\u0073\u0069\u0074\u0065\u0020\u0077\u0065\u0062\u0020\u0070\u0065\u0072\u0073\u006f\u006e\u006e\u0065\u006c +dataset.widgets.advanced.url.watermark=\u0068\u0074\u0074\u0070\u003a\u002f\u002f\u0077\u0077\u0077\u002e\u0065\u0078\u0065\u006d\u0070\u006c\u0065\u002e\u0063\u006f\u006d\u002f\u006e\u006f\u006d\u002d\u0064\u0065\u002d\u006c\u0061\u002d\u0070\u0061\u0067\u0065 +dataset.widgets.advanced.invalid.message=\u0056\u0065\u0075\u0069\u006c\u006c\u0065\u007a\u0020\u0073\u0061\u0069\u0073\u0069\u0072\u0020\u0075\u006e\u0020\u0055\u0052\u004c\u0020\u0076\u0061\u006c\u0069\u0064\u0065 +dataset.widgets.advanced.success.message=\u004d\u0069\u0073\u0065\u0020\u00e0\u0020\u006a\u006f\u0075\u0072\u0020\u0072\u00e9\u0075\u0073\u0073\u0069\u0065\u0020\u0064\u0065\u0020\u006c\u0027\u0055\u0052\u004c\u0020\u0064\u0065\u0020\u0076\u006f\u0074\u0072\u0065\u0020\u0073\u0069\u0074\u0065\u0020\u0077\u0065\u0062\u0020\u0070\u0065\u0072\u0073\u006f\u006e\u006e\u0065\u006c +dataset.widgets.advanced.failure.message=\u004c\u0027\u0055\u0052\u004c\u0020\u0064\u0075\u0020\u0073\u0069\u0074\u0065\u0020\u0077\u0065\u0062\u0020\u0070\u0065\u0072\u0073\u006f\u006e\u006e\u0065\u006c\u0020\u006e\u0027\u0061\u0020\u0070\u0061\u0073\u0020\u00e9\u0074\u00e9\u0020\u006d\u0069\u0073\u0020\u00e0\u0020\u006a\u006f\u0075\u0072\u0020\u0064\u0061\u006e\u0073\u0020\u0064\u0061\u0074\u0061\u0076\u0065\u0072\u0073\u0065\u002e +dataset.thumbnailsAndWidget.breadcrumbs.title=\u0056\u0069\u0067\u006e\u0065\u0074\u0074\u0065\u0020\u002b\u0020\u0057\u0069\u0064\u0067\u0065\u0074\u0073 +dataset.thumbnailsAndWidget.thumbnails.title=\u0056\u0069\u0067\u006e\u0065\u0074\u0074\u0065 +dataset.thumbnailsAndWidget.widgets.title=\u0057\u0069\u0064\u0067\u0065\u0074\u0073 +dataset.thumbnailsAndWidget.thumbnailImage=\u0049\u006d\u0061\u0067\u0065\u0020\u0064\u0065\u0020\u006c\u0061\u0020\u0076\u0069\u0067\u006e\u0065\u0074\u0074\u0065 +dataset.thumbnailsAndWidget.thumbnailImage.title=\u004c\u0065\u0020\u006c\u006f\u0067\u006f\u0020\u006f\u0075\u0020\u006c\u0065\u0020\u0066\u0069\u0063\u0068\u0069\u0065\u0072\u0020\u0064\u0027\u0069\u006d\u0061\u0067\u0065\u0020\u0071\u0075\u0065\u0020\u0076\u006f\u0075\u0073\u0020\u0073\u006f\u0075\u0068\u0061\u0069\u0074\u0065\u007a\u0020\u0076\u006f\u0069\u0072\u0020\u0061\u0066\u0066\u0069\u0063\u0068\u0065\u0072\u0020\u0063\u006f\u006d\u006d\u0065\u0020\u0076\u0069\u0067\u006e\u0065\u0074\u0074\u0065\u0020\u0070\u006f\u0075\u0072\u0020\u0063\u0065\u0074\u0020\u0065\u006e\u0073\u0065\u006d\u0062\u006c\u0065\u0020\u0064\u0065\u0020\u0064\u006f\u006e\u006e\u00e9\u0065\u0073\u002e +dataset.thumbnailsAndWidget.thumbnailImage.tip=\u004c\u0065\u0073\u0020\u0074\u0079\u0070\u0065\u0073\u0020\u0064\u0027\u0069\u006d\u0061\u0067\u0065\u0073\u0020\u0070\u0072\u0069\u0073\u0065\u0073\u0020\u0065\u006e\u0020\u0063\u0068\u0061\u0072\u0067\u0065\u0020\u0073\u006f\u006e\u0074\u0020\u004a\u0050\u0047\u002c\u0020\u0054\u0049\u0046\u0020\u006f\u0075\u0020\u0050\u004e\u0047\u0020\u0065\u0074\u0020\u006e\u0065\u0020\u0064\u006f\u0069\u0076\u0065\u006e\u0074\u0020\u0070\u0061\u0073\u0020\u00ea\u0074\u0072\u0065\u0020\u0073\u0075\u0070\u00e9\u0072\u0069\u0065\u0075\u0072\u0073\u0020\u00e0\u0020\u007b\u0030\u007d\u0020\u004b\u006f\u002e\u0020\u004c\u0061\u0020\u0074\u0061\u0069\u006c\u006c\u0065\u0020\u0064\u0027\u0061\u0066\u0066\u0069\u0063\u0068\u0061\u0067\u0065\u0020\u006d\u0061\u0078\u0069\u006d\u0061\u006c\u0065\u0020\u0070\u006f\u0075\u0072\u0020\u0075\u006e\u0020\u0066\u0069\u0063\u0068\u0069\u0065\u0072\u0020\u0069\u006d\u0061\u0067\u0065\u0020\u0065\u006e\u0020\u0074\u0061\u006e\u0074\u0020\u0071\u0075\u0065\u0020\u0076\u0069\u0067\u006e\u0065\u0074\u0074\u0065\u0020\u0064\u0027\u0075\u006e\u0020\u0065\u006e\u0073\u0065\u006d\u0062\u006c\u0065\u0020\u0064\u0065\u0020\u0064\u006f\u006e\u006e\u00e9\u0065\u0073\u0020\u0065\u0073\u0074\u0020\u0064\u0065\u0020\u0034\u0038\u0020\u0070\u0069\u0078\u0065\u006c\u0073\u0020\u0064\u0065\u0020\u006c\u0061\u0072\u0067\u0065\u0075\u0072\u0020\u0070\u0061\u0072\u0020\u0034\u0038\u0020\u0070\u0069\u0078\u0065\u006c\u0073\u0020\u0064\u0065\u0020\u0068\u0061\u0075\u0074\u002e +dataset.thumbnailsAndWidget.thumbnailImage.default=\u0056\u0069\u0067\u006e\u0065\u0074\u0074\u0065\u0020\u0070\u0061\u0072\u0020\u0064\u00e9\u0066\u0061\u0075\u0074 +dataset.thumbnailsAndWidget.thumbnailImage.selectAvailable=\u0053\u00e9\u006c\u0065\u0063\u0074\u0069\u006f\u006e\u006e\u0065\u007a\u0020\u006c\u0065\u0020\u0066\u0069\u0063\u0068\u0069\u0065\u0072\u0020\u0064\u0069\u0073\u0070\u006f\u006e\u0069\u0062\u006c\u0065 +dataset.thumbnailsAndWidget.thumbnailImage.selectThumbnail=\u0053\u00e9\u006c\u0065\u0063\u0074\u0069\u006f\u006e\u006e\u0065\u007a\u0020\u006c\u0061\u0020\u0076\u0069\u0067\u006e\u0065\u0074\u0074\u0065 +dataset.thumbnailsAndWidget.thumbnailImage.selectAvailable.title=\u0053\u00e9\u006c\u0065\u0063\u0074\u0069\u006f\u006e\u006e\u0065\u007a\u0020\u0075\u006e\u0065\u0020\u0076\u0069\u0067\u006e\u0065\u0074\u0074\u0065\u0020\u0070\u0061\u0072\u006d\u0069\u0020\u006c\u0065\u0073\u0020\u0066\u0069\u0063\u0068\u0069\u0065\u0072\u0073\u0020\u0064\u0065\u0020\u0064\u006f\u006e\u006e\u00e9\u0065\u0073\u0020\u0064\u0027\u0069\u006d\u0061\u0067\u0065\u0020\u0064\u0069\u0073\u0070\u006f\u006e\u0069\u0062\u006c\u0065\u0073\u0020\u0070\u0072\u006f\u0076\u0065\u006e\u0061\u006e\u0074\u0020\u0064\u0065\u0020\u0076\u006f\u0074\u0072\u0065\u0020\u0065\u006e\u0073\u0065\u006d\u0062\u006c\u0065\u0020\u0064\u0065\u0020\u0064\u006f\u006e\u006e\u00e9\u0065\u0073\u002e +dataset.thumbnailsAndWidget.thumbnailImage.uploadNew=\u0054\u00e9\u006c\u00e9\u0076\u0065\u0072\u0073\u0065\u0072\u0020\u0075\u006e\u0020\u006e\u006f\u0075\u0076\u0065\u0061\u0075\u0020\u0066\u0069\u0063\u0068\u0069\u0065\u0072 +dataset.thumbnailsAndWidget.thumbnailImage.uploadNew.title=\u0054\u00e9\u006c\u00e9\u0076\u0065\u0072\u0073\u0065\u0072\u0020\u0075\u006e\u0020\u0066\u0069\u0063\u0068\u0069\u0065\u0072\u0020\u0069\u006d\u0061\u0067\u0065\u0020\u0065\u006e\u0020\u0074\u0061\u006e\u0074\u0020\u0071\u0075\u0065\u0020\u0076\u0069\u0067\u006e\u0065\u0074\u0074\u0065\u0020\u0064\u0065\u0020\u0076\u006f\u0074\u0072\u0065\u0020\u0065\u006e\u0073\u0065\u006d\u0062\u006c\u0065\u0020\u0064\u0065\u0020\u0064\u006f\u006e\u006e\u00e9\u0065\u0073\u002c\u0020\u0071\u0075\u0069\u0020\u0073\u0065\u0072\u0061\u0020\u0073\u0074\u006f\u0063\u006b\u00e9\u0020\u0073\u00e9\u0070\u0061\u0072\u00e9\u006d\u0065\u006e\u0074\u0020\u0064\u0065\u0073\u0020\u0066\u0069\u0063\u0068\u0069\u0065\u0072\u0073\u0020\u0064\u0065\u0020\u0064\u006f\u006e\u006e\u00e9\u0065\u0073\u0020\u0061\u0070\u0070\u0061\u0072\u0074\u0065\u006e\u0061\u006e\u0074\u0020\u00e0\u0020\u0076\u006f\u0074\u0072\u0065\u0020\u0065\u006e\u0073\u0065\u006d\u0062\u006c\u0065\u0020\u0064\u0065\u0020\u0064\u006f\u006e\u006e\u00e9\u0065\u0073 +dataset.thumbnailsAndWidget.thumbnailImage.upload=\u0054\u00e9\u006c\u00e9\u0076\u0065\u0072\u0073\u0065\u0072\u0020\u0075\u006e\u0065\u0020\u0069\u006d\u0061\u0067\u0065 +dataset.thumbnailsAndWidget.thumbnailImage.upload.invalidMsg=\u004c\u0027\u0069\u006d\u0061\u0067\u0065\u0020\u006e\u0027\u0061\u0020\u0070\u0061\u0073\u0020\u0070\u0075\u0020\u00ea\u0074\u0072\u0065\u0020\u0074\u00e9\u006c\u00e9\u0076\u0065\u0072\u0073\u00e9\u0065\u002e\u0020\u0056\u0065\u0075\u0069\u006c\u006c\u0065\u007a\u0020\u0072\u00e9\u0065\u0073\u0073\u0061\u0079\u0065\u0072\u0020\u0061\u0076\u0065\u0063\u0020\u0075\u006e\u0020\u0066\u0069\u0063\u0068\u0069\u0065\u0072\u0020\u004a\u0050\u0047\u002c\u0020\u0054\u0049\u0046\u0020\u006f\u0075\u0020\u0050\u004e\u0047\u002e +dataset.thumbnailsAndWidget.success=\u0056\u0069\u0067\u006e\u0065\u0074\u0074\u0065\u0020\u0064\u0065\u0020\u006c\u0027\u0065\u006e\u0073\u0065\u006d\u0062\u006c\u0065\u0020\u0064\u0065\u0020\u0064\u006f\u006e\u006e\u00e9\u0065\u0073\u0020\u006d\u0069\u0073\u0065\u0020\u00e0\u0020\u006a\u006f\u0075\u0072\u002e +dataset.thumbnailsAndWidget.removeThumbnail=\u0053\u0075\u0070\u0070\u0072\u0069\u006d\u0065\u0072\u0020\u006c\u0061\u0020\u0076\u0069\u0067\u006e\u0065\u0074\u0074\u0065 +dataset.thumbnailsAndWidget.removeThumbnail.tip=\u0056\u006f\u0075\u0073\u0020\u006e\u0065\u0020\u0073\u0075\u0070\u0070\u0072\u0069\u006d\u0065\u007a\u0020\u0071\u0075\u0065\u0020\u006c\u0061\u0020\u0076\u0069\u0067\u006e\u0065\u0074\u0074\u0065\u0020\u0064\u0065\u0020\u006c\u0027\u0065\u006e\u0073\u0065\u006d\u0062\u006c\u0065\u0020\u0064\u0065\u0020\u0064\u006f\u006e\u006e\u00e9\u0065\u0073\u0020\u0065\u0074\u0020\u006e\u006f\u006e\u0020\u0070\u0061\u0073\u0020\u006c\u0065\u0020\u0066\u0069\u0063\u0068\u0069\u0065\u0072\u0020\u0064\u0027\u0069\u006d\u0061\u0067\u0065\u0020\u0069\u006e\u0063\u006c\u0075\u0073\u0020\u0064\u0061\u006e\u0073\u0020\u0076\u006f\u0074\u0072\u0065\u0020\u0065\u006e\u0073\u0065\u006d\u0062\u006c\u0065\u0020\u0064\u0065\u0020\u0064\u006f\u006e\u006e\u00e9\u0065\u0073\u002e\u0020\u0050\u006f\u0075\u0072\u0020\u0063\u0065\u0020\u0066\u0061\u0069\u0072\u0065\u002c\u0020\u0061\u0063\u0063\u00e9\u0064\u0065\u007a\u0020\u00e0\u0020\u006c\u0061\u0020\u0070\u0061\u0067\u0065\u0020\u004d\u006f\u0064\u0069\u0066\u0069\u0065\u0072\u0020\u006c\u0065\u0073\u0020\u0066\u0069\u0063\u0068\u0069\u0065\u0072\u0073\u002e +dataset.thumbnailsAndWidget.availableThumbnails=\u0056\u0069\u0067\u006e\u0065\u0074\u0074\u0065\u0073\u0020\u0064\u0069\u0073\u0070\u006f\u006e\u0069\u0062\u006c\u0065\u0073 +dataset.thumbnailsAndWidget.availableThumbnails.tip=\u0053\u00e9\u006c\u0065\u0063\u0074\u0069\u006f\u006e\u006e\u0065\u007a\u0020\u0075\u006e\u0065\u0020\u0076\u0069\u0067\u006e\u0065\u0074\u0074\u0065\u0020\u00e0\u0020\u0070\u0061\u0072\u0074\u0069\u0072\u0020\u0064\u0027\u0075\u006e\u0020\u0066\u0069\u0063\u0068\u0069\u0065\u0072\u0020\u0064\u0065\u0020\u0064\u006f\u006e\u006e\u00e9\u0065\u0073\u0020\u0070\u0072\u006f\u0076\u0065\u006e\u0061\u006e\u0074\u0020\u0064\u0065\u0020\u0076\u006f\u0074\u0072\u0065\u0020\u0065\u006e\u0073\u0065\u006d\u0062\u006c\u0065\u0020\u0064\u0065\u0020\u0064\u006f\u006e\u006e\u00e9\u0065\u0073\u002e\u0020\u0052\u0065\u0076\u0065\u006e\u0069\u0072\u0020\u0065\u006e\u0073\u0075\u0069\u0074\u0065\u0020\u00e0\u0020\u006c\u0061\u0020\u0070\u0061\u0067\u0065\u0020\u0056\u0069\u0067\u006e\u0065\u0074\u0074\u0065\u0020\u002b\u0020\u0057\u0069\u0064\u0067\u0065\u0074\u0073\u0020\u0070\u006f\u0075\u0072\u0020\u0065\u006e\u0072\u0065\u0067\u0069\u0073\u0074\u0072\u0065\u0072\u0020\u0076\u006f\u0073\u0020\u006d\u006f\u0064\u0069\u0066\u0069\u0063\u0061\u0074\u0069\u006f\u006e\u0073\u002e +# file.xhtml=\u0023\u0020\u0066\u0069\u006c\u0065\u002e\u0078\u0068\u0074\u006d\u006c +file.share.fileShare=\u0050\u0061\u0072\u0074\u0061\u0067\u0065\u0072\u0020\u006c\u0065\u0020\u0066\u0069\u0063\u0068\u0069\u0065\u0072 +file.share.fileShare.tip=\u0050\u0061\u0072\u0074\u0061\u0067\u0065\u0072\u0020\u0063\u0065\u0020\u0066\u0069\u0063\u0068\u0069\u0065\u0072\u0020\u0073\u0075\u0072\u0020\u0076\u006f\u0073\u0020\u006d\u00e9\u0064\u0069\u0061\u0073\u0020\u0073\u006f\u0063\u0069\u0061\u0075\u0078\u0020\u0070\u0072\u00e9\u0066\u00e9\u0072\u00e9\u0073\u002e +file.share.fileShare.shareText=\u0041\u0066\u0066\u0069\u0063\u0068\u0065\u0072\u0020\u0063\u0065\u0020\u0066\u0069\u0063\u0068\u0069\u0065\u0072\u002e +file.title.label=\u0054\u0069\u0074\u0072\u0065 +file.citation.label=\u0052\u00e9\u0066\u00e9\u0072\u0065\u006e\u0063\u0065\u0020\u0062\u0069\u0062\u006c\u0069\u006f\u0067\u0072\u0061\u0070\u0068\u0069\u0071\u0075\u0065 +file.cite.downloadBtn=\u0043\u0069\u0074\u0065\u0072\u0020\u006c\u0065\u0020\u0066\u0069\u0063\u0068\u0069\u0065\u0072\u0020\u0064\u0065\u0020\u0064\u006f\u006e\u006e\u00e9\u0065\u0073 +file.general.metadata.label=\u004d\u00e9\u0074\u0061\u0064\u006f\u006e\u006e\u00e9\u0065\u0073\u0020\u0067\u00e9\u006e\u00e9\u0072\u0061\u006c\u0065\u0073 +file.description.label=\u0044\u0065\u0073\u0063\u0072\u0069\u0070\u0074\u0069\u006f\u006e +file.tags.label=\u004c\u0069\u0062\u0065\u006c\u006c\u00e9\u0073 +file.lastupdated.label=\u0044\u0065\u0072\u006e\u0069\u00e8\u0072\u0065\u0020\u006d\u0069\u0073\u0065\u0020\u00e0\u0020\u006a\u006f\u0075\u0072 +file.DatasetVersion=\u0056\u0065\u0072\u0073\u0069\u006f\u006e +file.metadataTab.fileMetadata.header=\u004d\u00e9\u0074\u0061\u0064\u006f\u006e\u006e\u00e9\u0065\u0073\u0020\u0064\u0075\u0020\u0066\u0069\u0063\u0068\u0069\u0065\u0072 +file.metadataTab.fileMetadata.persistentid.label=\u0049\u0064\u0065\u006e\u0074\u0069\u0066\u0069\u0061\u006e\u0074\u0020\u0070\u00e9\u0072\u0065\u006e\u006e\u0065\u0020\u0064\u0075\u0020\u0066\u0069\u0063\u0068\u0069\u0065\u0072 +file.metadataTab.fileMetadata.downloadUrl.label=\u0055\u0052\u004c\u0020\u0064\u0065\u0020\u0074\u00e9\u006c\u00e9\u0063\u0068\u0061\u0072\u0067\u0065\u006d\u0065\u006e\u0074 +file.metadataTab.fileMetadata.unf.label=\u0055\u004e\u0046 +file.metadataTab.fileMetadata.size.label=\u0054\u0061\u0069\u006c\u006c\u0065 +file.metadataTab.fileMetadata.type.label=\u0043\u0061\u0074\u00e9\u0067\u006f\u0072\u0069\u0065 +file.metadataTab.fileMetadata.description.label=\u0044\u0065\u0073\u0063\u0072\u0069\u0070\u0074\u0069\u006f\u006e +file.metadataTab.fileMetadata.publicationDate.label=\u0044\u0061\u0074\u0065\u0020\u0064\u0065\u0020\u0070\u0075\u0062\u006c\u0069\u0063\u0061\u0074\u0069\u006f\u006e +file.metadataTab.fileMetadata.depositDate.label=\u0044\u0061\u0074\u0065\u0020\u0064\u0065\u0020\u0064\u00e9\u0070\u00f4\u0074 +file.metadataTab.fitsMetadata.header=\u004d\u00e9\u0074\u0061\u0064\u006f\u006e\u006e\u00e9\u0065\u0073\u0020\u0046\u0049\u0054\u0053 +file.metadataTab.provenance.header=\u0050\u0072\u006f\u0076\u0065\u006e\u0061\u006e\u0063\u0065\u0020\u0064\u0075\u0020\u0066\u0069\u0063\u0068\u0069\u0065\u0072 +file.metadataTab.provenance.body=\u0049\u006e\u0066\u006f\u0072\u006d\u0061\u0074\u0069\u006f\u006e\u0020\u0073\u0075\u0072\u0020\u006c\u0061\u0020\u0070\u0072\u006f\u0076\u0065\u006e\u0061\u006e\u0063\u0065\u0020\u0064\u0065\u0073\u0020\u0066\u0069\u0063\u0068\u0069\u0065\u0072\u0073\u0020\u00e0\u0020\u0076\u0065\u006e\u0069\u0072\u0020\u0064\u0061\u006e\u0073\u0020\u0075\u006e\u0065\u0020\u0076\u0065\u0072\u0073\u0069\u006f\u006e\u0020\u0075\u006c\u0074\u00e9\u0072\u0069\u0065\u0075\u0072\u0065\u005c\u0075\u0032\u0030\u0032\u0036 +file.versionDifferences.noChanges=\u0041\u0075\u0063\u0075\u006e\u0020\u0063\u0068\u0061\u006e\u0067\u0065\u006d\u0065\u006e\u0074\u0020\u0061\u0073\u0073\u006f\u0063\u0069\u00e9\u0020\u00e0\u0020\u0063\u0065\u0074\u0074\u0065\u0020\u0076\u0065\u0072\u0073\u0069\u006f\u006e +file.versionDifferences.fileNotInVersion=\u0046\u0069\u0063\u0068\u0069\u0065\u0072\u0020\u006e\u006f\u006e\u0020\u0069\u006e\u0063\u006c\u0075\u0073\u0020\u0064\u0061\u006e\u0073\u0020\u0063\u0065\u0074\u0074\u0065\u0020\u0076\u0065\u0072\u0073\u0069\u006f\u006e +file.versionDifferences.actionChanged=\u0043\u0068\u0061\u006e\u0067\u00e9 +file.versionDifferences.actionAdded=\u0041\u006a\u006f\u0075\u0074\u00e9 +file.versionDifferences.actionRemoved=\u0053\u0075\u0070\u0070\u0072\u0069\u006d\u00e9 +file.versionDifferences.actionReplaced=\u0052\u0065\u006d\u0070\u006c\u0061\u0063\u00e9 +file.versionDifferences.fileMetadataGroupTitle=\u004d\u00e9\u0074\u0061\u0064\u006f\u006e\u006e\u00e9\u0065\u0073\u0020\u0064\u0075\u0020\u0066\u0069\u0063\u0068\u0069\u0065\u0072 +file.versionDifferences.fileTagsGroupTitle=\u004c\u0069\u0062\u0065\u006c\u006c\u00e9\u0073\u0020\u0064\u0075\u0020\u0066\u0069\u0063\u0068\u0069\u0065\u0072 +file.versionDifferences.descriptionDetailTitle=\u0044\u0065\u0073\u0063\u0072\u0069\u0070\u0074\u0069\u006f\u006e +file.versionDifferences.fileNameDetailTitle=\u004e\u006f\u006d\u0020\u0064\u0075\u0020\u0066\u0069\u0063\u0068\u0069\u0065\u0072 +file.versionDifferences.fileAccessTitle=\u0041\u0063\u0063\u00e8\u0073\u0020\u0061\u0075\u0078\u0020\u0066\u0069\u0063\u0068\u0069\u0065\u0072\u0073 +file.versionDifferences.fileRestricted=\u0041\u0063\u0063\u00e8\u0073\u0020\u0072\u00e9\u0073\u0065\u0072\u0076\u00e9 +file.versionDifferences.fileUnrestricted=\u0041\u0063\u0063\u00e8\u0073\u0020\u0073\u0061\u006e\u0073\u0020\u0072\u0065\u0073\u0074\u0072\u0069\u0063\u0074\u0069\u006f\u006e\u0073 +file.versionDifferences.fileGroupTitle=\u0046\u0069\u0063\u0068\u0069\u0065\u0072 +# File Ingest +ingest.csv.invalidHeader=Invalid header row. One of the cells is empty. +ingest.csv.lineMismatch=Mismatch between line counts in first and final passes!, {0} found on first pass, but {1} found on second. +ingest.csv.recordMismatch=Reading mismatch, line {0} of the Data file: {1} delimited values expected, {2} found. +ingest.csv.nullStream=Stream can't be null. +# editdatafile.xhtml=\u0023\u0020\u0065\u0064\u0069\u0074\u0064\u0061\u0074\u0061\u0066\u0069\u006c\u0065\u002e\u0078\u0068\u0074\u006d\u006c +# editFilesFragment.xhtml=\u0023\u0020\u0065\u0064\u0069\u0074\u0046\u0069\u006c\u0065\u0073\u0046\u0072\u0061\u0067\u006d\u0065\u006e\u0074\u002e\u0078\u0068\u0074\u006d\u006c +file.edit.error.file_exceeds_limit=\u0043\u0065\u0020\u0066\u0069\u0063\u0068\u0069\u0065\u0072\u0020\u0064\u00e9\u0070\u0061\u0073\u0073\u0065\u0020\u006c\u0061\u0020\u0074\u0061\u0069\u006c\u006c\u0065\u0020\u006c\u0069\u006d\u0069\u0074\u0065\u002e +# File metadata error=\u0023\u0020\u0046\u0069\u006c\u0065\u0020\u006d\u0065\u0074\u0061\u0064\u0061\u0074\u0061\u0020\u0065\u0072\u0072\u006f\u0072 +file.metadata.datafiletag.not_tabular=\u0056\u006f\u0075\u0073\u0020\u006e\u0065\u0020\u0070\u006f\u0075\u0076\u0065\u007a\u0020\u0070\u0061\u0073\u0020\u0061\u006a\u006f\u0075\u0074\u0065\u0072\u0020\u0064\u0065\u0020\u006c\u0069\u0062\u0065\u006c\u006c\u00e9\u0073\u0020\u0064\u0065\u0020\u0064\u006f\u006e\u006e\u00e9\u0065\u0073\u0020\u0074\u0061\u0062\u0075\u006c\u0061\u0069\u0072\u0065\u0073\u0020\u00e0\u0020\u0075\u006e\u0020\u0066\u0069\u0063\u0068\u0069\u0065\u0072\u0020\u006e\u006f\u006e\u0020\u0074\u0061\u0062\u0075\u006c\u0061\u0069\u0072\u0065\u002e +# File Edit Success=\u0023\u0020\u0046\u0069\u006c\u0065\u0020\u0045\u0064\u0069\u0074\u0020\u0053\u0075\u0063\u0063\u0065\u0073\u0073 +file.message.editSuccess=\u0043\u0065\u0020\u0066\u0069\u0063\u0068\u0069\u0065\u0072\u0020\u0061\u0020\u00e9\u0074\u00e9\u0020\u006d\u0069\u0073\u0020\u00e0\u0020\u006a\u006f\u0075\u0072\u002e +file.message.deleteSuccess=The file has been deleted. +file.message.replaceSuccess=\u0043\u0065\u0020\u0066\u0069\u0063\u0068\u0069\u0065\u0072\u0020\u0061\u0020\u00e9\u0074\u00e9\u0020\u0072\u0065\u006d\u0070\u006c\u0061\u0063\u00e9\u002e +# File Add/Replace operation messages=\u0023\u0020\u0046\u0069\u006c\u0065\u0020\u0041\u0064\u0064\u002f\u0052\u0065\u0070\u006c\u0061\u0063\u0065\u0020\u006f\u0070\u0065\u0072\u0061\u0074\u0069\u006f\u006e\u0020\u006d\u0065\u0073\u0073\u0061\u0067\u0065\u0073 +file.addreplace.file_size_ok=\u004c\u0061\u0020\u0074\u0061\u0069\u006c\u006c\u0065\u0020\u0064\u0075\u0020\u0066\u0069\u0063\u0068\u0069\u0065\u0072\u0020\u0065\u0073\u0074\u0020\u0061\u0070\u0070\u0072\u006f\u0070\u0072\u0069\u00e9\u0065\u002e +file.addreplace.error.file_exceeds_limit=\u004c\u0061\u0020\u0074\u0061\u0069\u006c\u006c\u0065\u0020\u0064\u0065\u0020\u0063\u0065\u0020\u0066\u0069\u0063\u0068\u0069\u0065\u0072\u0020\u0028\u007b\u0030\u007d\u0029\u0020\u0064\u00e9\u0070\u0061\u0073\u0073\u0065\u0020\u006c\u0061\u0020\u006c\u0069\u006d\u0069\u0074\u0065\u0020\u0064\u0065\u0020\u0074\u0061\u0069\u006c\u006c\u0065\u0020\u0064\u0065\u0020\u007b\u0031\u007d\u0020\u006f\u0063\u0074\u0065\u0074\u0028\u0073\u0029\u002e +file.addreplace.error.dataset_is_null=\u004c\u0027\u0065\u006e\u0073\u0065\u006d\u0062\u006c\u0065\u0020\u0064\u0065\u0020\u0064\u006f\u006e\u006e\u00e9\u0065\u0073\u0020\u006e\u0065\u0020\u0070\u0065\u0075\u0074\u0020\u00ea\u0074\u0072\u0065\u0020\u006e\u0075\u006c\u002e +file.addreplace.error.dataset_id_is_null=\u004c\u0027\u0069\u0064\u0065\u006e\u0074\u0069\u0066\u0069\u0061\u006e\u0074\u0020\u0064\u0065\u0020\u006c\u0027\u0065\u006e\u0073\u0065\u006d\u0062\u006c\u0065\u0020\u0064\u0065\u0020\u0064\u006f\u006e\u006e\u00e9\u0065\u0073\u0020\u006e\u0065\u0020\u0070\u0065\u0075\u0074\u0020\u00ea\u0074\u0072\u0065\u0020\u006e\u0075\u006c\u002e +find.dataset.error.dataset_id_is_null=\u004c\u0027\u0061\u0063\u0063\u00e8\u0073\u0020\u00e0\u0020\u0075\u006e\u0020\u0065\u006e\u0073\u0065\u006d\u0062\u006c\u0065\u0020\u0064\u0065\u0020\u0064\u006f\u006e\u006e\u00e9\u0065\u0073\u0020\u0062\u0061\u0073\u00e9\u0020\u0073\u0075\u0072\u0020\u0075\u006e\u0020\u0069\u0064\u0065\u006e\u0074\u0069\u0066\u0069\u0061\u006e\u0074\u0020\u0070\u00e9\u0072\u0065\u006e\u006e\u0065\u0020\u0072\u0065\u0071\u0075\u0069\u0065\u0072\u0074\u0020\u0071\u0075\u0027\u0075\u006e\u0020\u0070\u0061\u0072\u0061\u006d\u00e8\u0074\u0072\u0065\u0020\u0064\u0065\u0020\u0072\u0065\u0071\u0075\u00ea\u0074\u0065\u0020\u007b\u0030\u007d\u0020\u0073\u006f\u0069\u0074\u0020\u0070\u0072\u00e9\u0073\u0065\u006e\u0074\u002e +find.dataset.error.dataset.not.found.persistentId=\u004c\u0027\u0065\u006e\u0073\u0065\u006d\u0062\u006c\u0065\u0020\u0064\u0065\u0020\u0064\u006f\u006e\u006e\u00e9\u0065\u0073\u0020\u0062\u0061\u0073\u00e9\u0020\u0073\u0075\u0072\u0020\u006c\u0027\u0069\u0064\u0065\u006e\u0074\u0069\u0066\u0069\u0061\u006e\u0074\u0020\u0070\u00e9\u0072\u0065\u006e\u006e\u0065\u0020\u007b\u0030\u007d\u0020\u0065\u0073\u0074\u0020\u0069\u006e\u0074\u0072\u006f\u0075\u0076\u0061\u0062\u006c\u0065\u002e +find.dataset.error.dataset.not.found.id=\u004c\u0027\u0065\u006e\u0073\u0065\u006d\u0062\u006c\u0065\u0020\u0064\u0065\u0020\u0064\u006f\u006e\u006e\u00e9\u0065\u0073\u0020\u0061\u0076\u0065\u0063\u0020\u006c\u0027\u0069\u0064\u0065\u006e\u0074\u0069\u0066\u0069\u0061\u006e\u0074\u0020\u007b\u0030\u007d\u0020\u0065\u0073\u0074\u0020\u0069\u006e\u0074\u0072\u006f\u0075\u0076\u0061\u0062\u006c\u0065\u002e +find.dataset.error.dataset.not.found.bad.id=\u004e\u0075\u006d\u00e9\u0072\u006f\u0020\u0064\u0027\u0069\u0064\u0065\u006e\u0074\u0069\u0066\u0069\u0061\u006e\u0074\u0020\u0064\u0065\u0020\u006c\u0027\u0065\u006e\u0073\u0065\u006d\u0062\u006c\u0065\u0020\u0064\u0065\u0020\u0064\u006f\u006e\u006e\u00e9\u0065\u0073\u0020\u0069\u006e\u0063\u006f\u0072\u0072\u0065\u0063\u0074\u005c\u0075\u0030\u0030\u0041\u0030\u003a\u0020\u007b\u0030\u007d\u002e +file.addreplace.error.dataset_id_not_found=\u0041\u0075\u0063\u0075\u006e\u0020\u0065\u006e\u0073\u0065\u006d\u0062\u006c\u0065\u0020\u0064\u0065\u0020\u0064\u006f\u006e\u006e\u00e9\u0065\u0073\u0020\u006e\u0027\u0061\u0020\u00e9\u0074\u00e9\u0020\u0074\u0072\u006f\u0075\u0076\u00e9\u0020\u0070\u006f\u0075\u0072\u0020\u006c\u0027\u0069\u0064\u0065\u006e\u0074\u0069\u0066\u0069\u0061\u006e\u0074\u005c\u0075\u0030\u0030\u0041\u0030\u003a +file.addreplace.error.no_edit_dataset_permission=\u0056\u006f\u0075\u0073\u0020\u006e\u0027\u0061\u0076\u0065\u007a\u0020\u0070\u0061\u0073\u0020\u006c\u0061\u0020\u0070\u0065\u0072\u006d\u0069\u0073\u0073\u0069\u006f\u006e\u0020\u0064\u0065\u0020\u006d\u006f\u0064\u0069\u0066\u0069\u0065\u0072\u0020\u0063\u0065\u0074\u0020\u0065\u006e\u0073\u0065\u006d\u0062\u006c\u0065\u0020\u0064\u0065\u0020\u0064\u006f\u006e\u006e\u00e9\u0065\u0073\u002e +file.addreplace.error.filename_undetermined=\u004c\u0065\u0020\u006e\u006f\u006d\u0020\u0064\u0075\u0020\u0066\u0069\u0063\u0068\u0069\u0065\u0072\u0020\u006e\u0065\u0020\u0070\u0065\u0075\u0074\u0020\u00ea\u0074\u0072\u0065\u0020\u00e9\u0074\u0061\u0062\u006c\u0069\u002e +file.addreplace.error.file_content_type_undetermined=\u004c\u0065\u0020\u0074\u0079\u0070\u0065\u0020\u0064\u0065\u0020\u0063\u006f\u006e\u0074\u0065\u006e\u0075\u0020\u0064\u0075\u0020\u0066\u0069\u0063\u0068\u0069\u0065\u0072\u0020\u006e\u0065\u0020\u0070\u0065\u0075\u0074\u0020\u00ea\u0074\u0072\u0065\u0020\u00e9\u0074\u0061\u0062\u006c\u0069\u002e +file.addreplace.error.file_upload_failed=\u004c\u0065\u0020\u0074\u00e9\u006c\u00e9\u0076\u0065\u0072\u0073\u0065\u006d\u0065\u006e\u0074\u0020\u0064\u0075\u0020\u0066\u0069\u0063\u0068\u0069\u0065\u0072\u0020\u0061\u0020\u00e9\u0063\u0068\u006f\u0075\u00e9\u002e +file.addreplace.error.duplicate_file=\u0043\u0065\u0020\u0066\u0069\u0063\u0068\u0069\u0065\u0072\u0020\u0065\u0078\u0069\u0073\u0074\u0065\u0020\u0064\u00e9\u006a\u00e0\u0020\u0064\u0061\u006e\u0073\u0020\u006c\u0027\u0065\u006e\u0073\u0065\u006d\u0062\u006c\u0065\u0020\u0064\u0065\u0020\u0064\u006f\u006e\u006e\u00e9\u0065\u0073\u002e\u0020 +file.addreplace.error.existing_file_to_replace_id_is_null=\u004c\u0027\u0069\u0064\u0065\u006e\u0074\u0069\u0066\u0069\u0061\u006e\u0074\u0020\u0064\u0075\u0020\u0066\u0069\u0063\u0068\u0069\u0065\u0072\u0020\u0065\u0078\u0069\u0073\u0074\u0061\u006e\u0074\u0020\u00e0\u0020\u0072\u0065\u006d\u0070\u006c\u0061\u0063\u0065\u0072\u0020\u0064\u006f\u0069\u0074\u0020\u00ea\u0074\u0072\u0065\u0020\u0066\u006f\u0075\u0072\u006e\u0069\u002e +file.addreplace.error.existing_file_to_replace_not_found_by_id=\u0046\u0069\u0063\u0068\u0069\u0065\u0072\u0020\u0064\u0065\u0020\u0072\u0065\u006d\u0070\u006c\u0061\u0063\u0065\u006d\u0065\u006e\u0074\u0020\u006e\u006f\u006e\u0020\u0074\u0072\u006f\u0075\u0076\u00e9\u002e\u0020\u0041\u0075\u0063\u0075\u006e\u0020\u0066\u0069\u0063\u0068\u0069\u0065\u0072\u0020\u006e\u0027\u0061\u0020\u00e9\u0074\u00e9\u0020\u0074\u0072\u006f\u0075\u0076\u00e9\u0020\u0070\u006f\u0075\u0072\u0020\u006c\u0027\u0069\u0064\u0065\u006e\u0074\u0069\u0066\u0069\u0061\u006e\u0074\u005c\u0075\u0030\u0030\u0041\u0030\u003a\u0020\u007b\u0030\u007d +file.addreplace.error.existing_file_to_replace_is_null=\u004c\u0065\u0020\u0066\u0069\u0063\u0068\u0069\u0065\u0072\u0020\u00e0\u0020\u0072\u0065\u006d\u0070\u006c\u0061\u0063\u0065\u0072\u0020\u006e\u0065\u0020\u0070\u0065\u0075\u0074\u0020\u00ea\u0074\u0072\u0065\u0020\u006e\u0075\u006c\u002e +file.addreplace.error.existing_file_to_replace_not_in_dataset=\u004c\u0065\u0020\u0066\u0069\u0063\u0068\u0069\u0065\u0072\u0020\u00e0\u0020\u0072\u0065\u006d\u0070\u006c\u0061\u0063\u0065\u0072\u0020\u006e\u0027\u0061\u0070\u0070\u0061\u0072\u0074\u0069\u0065\u006e\u0074\u0020\u0070\u0061\u0073\u0020\u00e0\u0020\u0063\u0065\u0074\u0020\u0065\u006e\u0073\u0065\u006d\u0062\u006c\u0065\u0020\u0064\u0065\u0020\u0064\u006f\u006e\u006e\u00e9\u0065\u0073\u002e +file.addreplace.error.existing_file_not_in_latest_published_version=\u0056\u006f\u0075\u0073\u0020\u006e\u0065\u0020\u0070\u006f\u0075\u0076\u0065\u007a\u0020\u0070\u0061\u0073\u0020\u0072\u0065\u006d\u0070\u006c\u0061\u0063\u0065\u0072\u0020\u0075\u006e\u0020\u0066\u0069\u0063\u0068\u0069\u0065\u0072\u0020\u0071\u0075\u0069\u0020\u006e\u0027\u0065\u0073\u0074\u0020\u0070\u0061\u0073\u0020\u0064\u0061\u006e\u0073\u0020\u006c\u0065\u0020\u0064\u0065\u0072\u006e\u0069\u0065\u0072\u0020\u0065\u006e\u0073\u0065\u006d\u0062\u006c\u0065\u0020\u0064\u0065\u0020\u0064\u006f\u006e\u006e\u00e9\u0065\u0073\u0020\u0070\u0075\u0062\u006c\u0069\u00e9\u002e\u0020\u0028\u004c\u0065\u0020\u0066\u0069\u0063\u0068\u0069\u0065\u0072\u0020\u0065\u0073\u0074\u0020\u006e\u006f\u006e\u0020\u0070\u0075\u0062\u006c\u0069\u00e9\u0020\u006f\u0075\u0020\u0061\u0020\u00e9\u0074\u00e9\u0020\u0073\u0075\u0070\u0070\u0072\u0069\u006d\u00e9\u0020\u0064\u0027\u0075\u006e\u0065\u0020\u0076\u0065\u0072\u0073\u0069\u006f\u006e\u0020\u0070\u0072\u00e9\u0063\u00e9\u0064\u0065\u006e\u0074\u0065\u002e\u0029 +file.addreplace.content_type.header=\u0054\u0079\u0070\u0065\u0020\u0064\u0065\u0020\u0066\u0069\u0063\u0068\u0069\u0065\u0072\u0020\u0064\u0069\u0066\u0066\u00e9\u0072\u0065\u006e\u0074 +file.addreplace.error.replace.new_file_has_different_content_type=\u004c\u0065\u0020\u0066\u0069\u0063\u0068\u0069\u0065\u0072\u0020\u0064\u0027\u006f\u0072\u0069\u0067\u0069\u006e\u0065\u0020\u0028\u007b\u0030\u007d\u0029\u0020\u0065\u0074\u0020\u006c\u0065\u0020\u0066\u0069\u0063\u0068\u0069\u0065\u0072\u0020\u0064\u0065\u0020\u0072\u0065\u006d\u0070\u006c\u0061\u0063\u0065\u006d\u0065\u006e\u0074\u0020\u0028\u007b\u0031\u007d\u0029\u0020\u0073\u006f\u006e\u0074\u0020\u0064\u0065\u0073\u0020\u0074\u0079\u0070\u0065\u0073\u0020\u0064\u0065\u0020\u0066\u0069\u0063\u0068\u0069\u0065\u0072\u0073\u0020\u0064\u0069\u0066\u0066\u00e9\u0072\u0065\u006e\u0074\u0073\u002e +file.addreplace.error.replace.new_file_same_as_replacement=\u0056\u006f\u0075\u0073\u0020\u006e\u0065\u0020\u0070\u006f\u0075\u0076\u0065\u007a\u0020\u0070\u0061\u0073\u0020\u0072\u0065\u006d\u0070\u006c\u0061\u0063\u0065\u0072\u0020\u0075\u006e\u0020\u0066\u0069\u0063\u0068\u0069\u0065\u0072\u0020\u0061\u0076\u0065\u0063\u0020\u0065\u0078\u0061\u0063\u0074\u0065\u006d\u0065\u006e\u0074\u0020\u006c\u0065\u0020\u006d\u00ea\u006d\u0065\u0020\u0066\u0069\u0063\u0068\u0069\u0065\u0072\u002e +file.addreplace.error.unpublished_file_cannot_be_replaced=\u0056\u006f\u0075\u0073\u0020\u006e\u0065\u0020\u0070\u006f\u0075\u0076\u0065\u007a\u0020\u0070\u0061\u0073\u0020\u0072\u0065\u006d\u0070\u006c\u0061\u0063\u0065\u0072\u0020\u0075\u006e\u0020\u0066\u0069\u0063\u0068\u0069\u0065\u0072\u0020\u006e\u006f\u006e\u0020\u0070\u0075\u0062\u006c\u0069\u00e9\u002e\u0020\u0053\u0075\u0070\u0070\u0072\u0069\u006d\u0065\u007a\u002d\u006c\u0065\u0020\u0061\u0075\u0020\u006c\u0069\u0065\u0075\u0020\u0064\u0065\u0020\u006c\u0065\u0020\u0072\u0065\u006d\u0070\u006c\u0061\u0063\u0065\u0072\u002e +file.addreplace.error.ingest_create_file_err=\u0055\u006e\u0065\u0020\u0065\u0072\u0072\u0065\u0075\u0072\u0020\u0073\u0027\u0065\u0073\u0074\u0020\u0070\u0072\u006f\u0064\u0075\u0069\u0074\u0065\u0020\u006c\u006f\u0072\u0073\u0020\u0064\u0065\u0020\u006c\u0027\u0061\u006a\u006f\u0075\u0074\u0020\u0064\u0075\u0020\u006e\u006f\u0075\u0076\u0065\u0061\u0075\u0020\u0066\u0069\u0063\u0068\u0069\u0065\u0072\u002e +file.addreplace.error.initial_file_list_empty=\u0055\u006e\u0065\u0020\u0065\u0072\u0072\u0065\u0075\u0072\u0020\u0073\u0027\u0065\u0073\u0074\u0020\u0070\u0072\u006f\u0064\u0075\u0069\u0074\u0065\u0020\u0065\u0074\u0020\u006c\u0065\u0020\u006e\u006f\u0075\u0076\u0065\u0061\u0075\u0020\u0066\u0069\u0063\u0068\u0069\u0065\u0072\u0020\u006e\u0027\u0061\u0020\u0070\u0061\u0073\u0020\u00e9\u0074\u00e9\u0020\u0061\u006a\u006f\u0075\u0074\u00e9\u002e +file.addreplace.error.initial_file_list_more_than_one=\u0056\u006f\u0075\u0073\u0020\u006e\u0065\u0020\u0070\u006f\u0075\u0076\u0065\u007a\u0020\u0070\u0061\u0073\u0020\u0072\u0065\u006d\u0070\u006c\u0061\u0063\u0065\u0072\u0020\u0075\u006e\u0020\u0073\u0065\u0075\u006c\u0020\u0066\u0069\u0063\u0068\u0069\u0065\u0072\u0020\u0070\u0061\u0072\u0020\u0070\u006c\u0075\u0073\u0069\u0065\u0075\u0072\u0073\u0020\u0066\u0069\u0063\u0068\u0069\u0065\u0072\u0073\u002e\u0020\u004c\u0065\u0020\u0066\u0069\u0063\u0068\u0069\u0065\u0072\u0020\u0071\u0075\u0065\u0020\u0076\u006f\u0075\u0073\u0020\u0061\u0076\u0065\u007a\u0020\u0074\u00e9\u006c\u00e9\u0076\u0065\u0072\u0073\u00e9\u0020\u0061\u0020\u00e9\u0074\u00e9\u0020\u0069\u006e\u0067\u00e9\u0072\u00e9\u0020\u0064\u0061\u006e\u0073\u0020\u0070\u006c\u0075\u0073\u0069\u0065\u0075\u0072\u0073\u0020\u0066\u0069\u0063\u0068\u0069\u0065\u0072\u0073\u002e +file.addreplace.error.final_file_list_empty=\u0049\u006c\u0020\u006e\u0027\u0079\u0020\u0061\u0020\u0070\u0061\u0073\u0020\u0064\u0065\u0020\u0066\u0069\u0063\u0068\u0069\u0065\u0072\u0073\u0020\u00e0\u0020\u0061\u006a\u006f\u0075\u0074\u0065\u0072\u002e\u0020\u0028\u0043\u0065\u0074\u0074\u0065\u0020\u0065\u0072\u0072\u0065\u0075\u0072\u0020\u006e\u0065\u0020\u0064\u0065\u0076\u0072\u0061\u0069\u0074\u0020\u0070\u0061\u0073\u0020\u0073\u0065\u0020\u0070\u0072\u006f\u0064\u0075\u0069\u0072\u0065\u0020\u0073\u0069\u0020\u006c\u0061\u0020\u0073\u00e9\u0071\u0075\u0065\u006e\u0063\u0065\u0020\u0064\u0065\u0073\u0020\u00e9\u0074\u0061\u0070\u0065\u0073\u0020\u0061\u0020\u00e9\u0074\u00e9\u0020\u0072\u0065\u0073\u0070\u0065\u0063\u0074\u00e9\u0065\u002e\u0029 +file.addreplace.error.only_replace_operation=\u0043\u0065\u0063\u0069\u0020\u006e\u0065\u0020\u0064\u0065\u0076\u0072\u0061\u0069\u0074\u0020\u00ea\u0074\u0072\u0065\u0020\u0061\u0070\u0070\u0065\u006c\u00e9\u0020\u0071\u0075\u0065\u0020\u0070\u006f\u0075\u0072\u0020\u006c\u0065\u0073\u0020\u006f\u0070\u00e9\u0072\u0061\u0074\u0069\u006f\u006e\u0073\u0020\u0064\u0065\u0020\u0072\u0065\u006d\u0070\u006c\u0061\u0063\u0065\u006d\u0065\u006e\u0074\u0020\u0064\u0065\u0020\u0066\u0069\u0063\u0068\u0069\u0065\u0072\u0021 +file.addreplace.error.failed_to_remove_old_file_from_dataset=\u0049\u006d\u0070\u006f\u0073\u0073\u0069\u0062\u006c\u0065\u0020\u0064\u0065\u0020\u0072\u0065\u0074\u0069\u0072\u0065\u0072\u0020\u0075\u006e\u0020\u0061\u006e\u0063\u0069\u0065\u006e\u0020\u0066\u0069\u0063\u0068\u0069\u0065\u0072\u0020\u0064\u0075\u0020\u006e\u006f\u0075\u0076\u0065\u006c\u0020\u0065\u006e\u0073\u0065\u006d\u0062\u006c\u0065\u0020\u0064\u0065\u0020\u0064\u006f\u006e\u006e\u00e9\u0065\u0073\u0020\u0076\u0065\u0072\u0073\u0069\u006f\u006e\u006e\u00e9\u002e +file.addreplace.error.add.add_file_error=\u0049\u006d\u0070\u006f\u0073\u0073\u0069\u0062\u006c\u0065\u0020\u0064\u0027\u0061\u006a\u006f\u0075\u0074\u0065\u0072\u0020\u0075\u006e\u0020\u0066\u0069\u0063\u0068\u0069\u0065\u0072\u0020\u00e0\u0020\u006c\u0027\u0065\u006e\u0073\u0065\u006d\u0062\u006c\u0065\u0020\u0064\u0065\u0020\u0064\u006f\u006e\u006e\u00e9\u0065\u0073\u002e +file.addreplace.error.phase2_called_early_no_new_files=\u0055\u006e\u0065\u0020\u0065\u0072\u0072\u0065\u0075\u0072\u0020\u0073\u0027\u0065\u0073\u0074\u0020\u0070\u0072\u006f\u0064\u0075\u0069\u0074\u0065\u0020\u006c\u006f\u0072\u0073\u0020\u0064\u0065\u0020\u006c\u0027\u0065\u006e\u0072\u0065\u0067\u0069\u0073\u0074\u0072\u0065\u006d\u0065\u006e\u0074\u0020\u0064\u0065\u0020\u006c\u0027\u0065\u006e\u0073\u0065\u006d\u0062\u006c\u0065\u0020\u0064\u0065\u0020\u0064\u006f\u006e\u006e\u00e9\u0065\u0073\u002e\u0020\u0041\u0075\u0063\u0075\u006e\u0020\u006e\u006f\u0075\u0076\u0065\u0061\u0075\u0020\u0066\u0069\u0063\u0068\u0069\u0065\u0072\u0020\u006e\u0027\u0061\u0020\u00e9\u0074\u00e9\u0020\u0074\u0072\u006f\u0075\u0076\u00e9\u002e +file.addreplace.success.add=\u004c\u0065\u0020\u0066\u0069\u0063\u0068\u0069\u0065\u0072\u0020\u0061\u0020\u0062\u0069\u0065\u006e\u0020\u00e9\u0074\u00e9\u0020\u0061\u006a\u006f\u0075\u0074\u00e9\u0021 +file.addreplace.success.replace=\u004c\u0065\u0020\u0066\u0069\u0063\u0068\u0069\u0065\u0072\u0020\u0061\u0020\u0062\u0069\u0065\u006e\u0020\u00e9\u0074\u00e9\u0020\u0072\u0065\u006d\u0070\u006c\u0061\u0063\u00e9\u0021 +file.addreplace.error.auth=\u004c\u0061\u0020\u0063\u006c\u00e9\u0020\u0041\u0050\u0049\u0020\u006e\u0027\u0065\u0073\u0074\u0020\u0070\u0061\u0073\u0020\u0076\u0061\u006c\u0069\u0064\u0065\u002e +file.addreplace.error.invalid_datafile_tag=\u004c\u0069\u0062\u0065\u006c\u006c\u00e9\u0020\u0064\u0065\u0020\u0064\u006f\u006e\u006e\u00e9\u0065\u0073\u0020\u0074\u0061\u0062\u0075\u006c\u0061\u0069\u0072\u0065\u0073\u0020\u006e\u006f\u006e\u0020\u0076\u0061\u006c\u0069\u0064\u0065\u005c\u0075\u0030\u0030\u0041\u0030\u003a\u0020 +# 500.xhtml=\u0023\u0020\u0035\u0030\u0030\u002e\u0078\u0068\u0074\u006d\u006c +error.500.page.title=\u0035\u0030\u0030\u0020\u002d\u0020\u0045\u0072\u0072\u0065\u0075\u0072\u0020\u0069\u006e\u0074\u0065\u0072\u006e\u0065\u0020\u0064\u0075\u0020\u0073\u0065\u0072\u0076\u0065\u0075\u0072 +error.500.message=\u003c\u0073\u0074\u0072\u006f\u006e\u0067\u003e\u0045\u0072\u0072\u0065\u0075\u0072\u0020\u0069\u006e\u0074\u0065\u0072\u006e\u0065\u0020\u0064\u0075\u0020\u0073\u0065\u0072\u0076\u0065\u0075\u0072\u003c\u002f\u0073\u0074\u0072\u006f\u006e\u0067\u003e\u0020\u002d\u0020\u0055\u006e\u0065\u0020\u0065\u0072\u0072\u0065\u0075\u0072\u0020\u0069\u006e\u0061\u0074\u0074\u0065\u006e\u0064\u0075\u0065\u0020\u0073\u0027\u0065\u0073\u0074\u0020\u0070\u0072\u006f\u0064\u0075\u0069\u0074\u0065\u002c\u0020\u0061\u0075\u0063\u0075\u006e\u0065\u0020\u0069\u006e\u0066\u006f\u0072\u006d\u0061\u0074\u0069\u006f\u006e\u0020\u0073\u0075\u0070\u0070\u006c\u00e9\u006d\u0065\u006e\u0074\u0061\u0069\u0072\u0065\u0020\u006e\u0027\u0065\u0073\u0074\u0020\u0064\u0069\u0073\u0070\u006f\u006e\u0069\u0062\u006c\u0065\u002e\u0020 +# 404.xhtml= +error.404.page.title=\u0034\u0030\u0034\u0020\u002d\u0020\u0050\u0061\u0067\u0065\u0020\u006e\u006f\u006e\u0020\u0074\u0072\u006f\u0075\u0076\u00e9\u0065 +error.404.message=\u003c\u0073\u0074\u0072\u006f\u006e\u0067\u003e\u0050\u0061\u0067\u0065\u0020\u006e\u006f\u006e\u0020\u0074\u0072\u006f\u0075\u0076\u00e9\u0065\u003c\u002f\u0073\u0074\u0072\u006f\u006e\u0067\u003e\u0020\u002d\u0020\u004c\u0061\u0020\u0070\u0061\u0067\u0065\u0020\u0071\u0075\u0065\u0020\u0076\u006f\u0075\u0073\u0020\u0063\u0068\u0065\u0072\u0063\u0068\u0065\u007a\u0020\u006e\u0027\u0061\u0020\u0070\u0061\u0073\u0020\u00e9\u0074\u00e9\u0020\u0074\u0072\u006f\u0075\u0076\u00e9\u0065\u002e\u0020 +# 403.xhtml=\u0023\u0020\u0034\u0030\u0033\u002e\u0078\u0068\u0074\u006d\u006c +error.403.page.title=\u0034\u0030\u0033\u0020\u002d\u0020\u004e\u006f\u006e\u0020\u0061\u0075\u0074\u006f\u0072\u0069\u0073\u00e9 +error.403.message=\u003c\u0073\u0074\u0072\u006f\u006e\u0067\u003e\u004e\u006f\u006e\u0020\u0061\u0075\u0074\u006f\u0072\u0069\u0073\u00e9\u003c\u002f\u0073\u0074\u0072\u006f\u006e\u0067\u003e\u0020\u002d\u0020\u0056\u006f\u0075\u0073\u0020\u006e\u0027\u00ea\u0074\u0065\u0073\u0020\u0070\u0061\u0073\u0020\u0061\u0075\u0074\u006f\u0072\u0069\u0073\u00e9\u0020\u00e0\u0020\u0076\u006f\u0069\u0072\u0020\u0063\u0065\u0074\u0074\u0065\u0020\u0070\u0061\u0067\u0065\u002e +# general error - support message= +error.support.message=\u0053\u0069\u0020\u0076\u006f\u0075\u0073\u0020\u0070\u0065\u006e\u0073\u0065\u007a\u0020\u0071\u0075\u0027\u0069\u006c\u0020\u0073\u0027\u0061\u0067\u0069\u0074\u0020\u0064\u0027\u0075\u006e\u0065\u0020\u0065\u0072\u0072\u0065\u0075\u0072\u002c\u0020\u0076\u0065\u0075\u0069\u006c\u006c\u0065\u007a\u0020\u0063\u006f\u006e\u0074\u0061\u0063\u0074\u0065\u0072\u0020\u007b\u0030\u007d\u0020\u0070\u006f\u0075\u0072\u0020\u006f\u0062\u0074\u0065\u006e\u0069\u0072\u0020\u0064\u0065\u0020\u006c\u0027\u0061\u0069\u0064\u0065\u002e +# citation-frame.xhtml= +citationFrame.banner.message=\u0053\u0069\u0020\u006c\u0065\u0020\u0073\u0069\u0074\u0065\u0020\u0063\u0069\u002d\u0064\u0065\u0073\u0073\u006f\u0075\u0073\u0020\u006e\u0065\u0020\u0073\u0065\u0020\u0063\u0068\u0061\u0072\u0067\u0065\u0020\u0070\u0061\u0073\u002c\u0020\u006c\u0065\u0073\u0020\u0064\u006f\u006e\u006e\u00e9\u0065\u0073\u0020\u0061\u0072\u0063\u0068\u0069\u0076\u00e9\u0065\u0073\u0020\u0073\u006f\u006e\u0074\u0020\u0064\u0069\u0073\u0070\u006f\u006e\u0069\u0062\u006c\u0065\u0073\u0020\u0064\u0061\u006e\u0073\u0020\u007b\u0030\u007d\u0020\u007b\u0031\u007d\u002e\u0020\u007b\u0032\u007d +citationFrame.banner.message.here=\u0069\u0063\u0069 +citationFrame.banner.closeIcon=\u0046\u0065\u0072\u006d\u0065\u0072\u0020\u0063\u0065\u0020\u006d\u0065\u0073\u0073\u0061\u0067\u0065\u002c\u0020\u0061\u006c\u006c\u0065\u0072\u0020\u0064\u0061\u006e\u0073\u0020\u006c\u0027\u0065\u006e\u0073\u0065\u006d\u0062\u006c\u0065\u0020\u0064\u0065\u0020\u0064\u006f\u006e\u006e\u00e9\u0065\u0073 +citationFrame.banner.countdownMessage=\u0043\u0065\u0020\u006d\u0065\u0073\u0073\u0061\u0067\u0065\u0020\u0073\u0065\u0020\u0066\u0065\u0072\u006d\u0065\u0072\u0061\u0020\u0064\u0061\u006e\u0073\u0020 +citationFrame.banner.countdownMessage.seconds=\u0073\u0065\u0063\u006f\u006e\u0064\u0065\u0073 +# Friendly AuthenticationProvider names=\u0023\u0020\u0046\u0072\u0069\u0065\u006e\u0064\u006c\u0079\u0020\u0041\u0075\u0074\u0068\u0065\u006e\u0074\u0069\u0063\u0061\u0074\u0069\u006f\u006e\u0050\u0072\u006f\u0076\u0069\u0064\u0065\u0072\u0020\u006e\u0061\u006d\u0065\u0073 +authenticationProvider.name.builtin=\u0044\u0061\u0074\u0061\u0076\u0065\u0072\u0073\u0065 +authenticationProvider.name.null=\u0028\u004c\u0065\u0020\u0066\u006f\u0075\u0072\u006e\u0069\u0073\u0073\u0065\u0075\u0072\u0020\u0065\u0073\u0074\u0020\u0069\u006e\u0063\u006f\u006e\u006e\u0075\u0029 +authenticationProvider.name.github=\u0047\u0069\u0074\u0048\u0075\u0062 +authenticationProvider.name.google=\u0047\u006f\u006f\u0067\u006c\u0065 +authenticationProvider.name.orcid=\u004f\u0052\u0043\u0069\u0044 +authenticationProvider.name.orcid-sandbox=\u0042\u0061\u0063\u0020\u00e0\u0020\u0073\u0061\u0062\u006c\u0065\u0020\u004f\u0052\u0043\u0069\u0044 +authenticationProvider.name.shib=\u0053\u0068\u0069\u0062\u0062\u006f\u006c\u0065\u0074\u0068 +ingest.csv.invalidHeader=Invalid header row. One of the cells is empty. +ingest.csv.lineMismatch=Mismatch between line counts in first and final passes!, {0} found on first pass, but {1} found on second. +ingest.csv.recordMismatch=Reading mismatch, line {0} of the Data file: {1} delimited values expected, {2} found. +ingest.csv.nullStream=Stream can't be null. +\u0043\u0068\u0061\u006d\u0070\u0020\u006f\u0062\u006c\u0069\u0067\u0061\u0074\u006f\u0069\u0072\u0065 diff --git a/dataversedock/lang.properties/Bundle_ua.properties b/dataversedock/lang.properties/Bundle_ua.properties new file mode 100644 index 0000000..0c33ddd --- /dev/null +++ b/dataversedock/lang.properties/Bundle_ua.properties @@ -0,0 +1,1746 @@ +dataverse=Dataverse +newDataverse=Новий Dataverse +hostDataverse=Головний Dataverse +dataverses=Dataverses +passwd=Пароль +dataset=Набір даних +datasets=Набори даних +newDataset=Новий набір даних +files=Файли +file=Файл +restricted=Обмежений +restrictedaccess=Обмежений Доступ +find=Знайти +search=Шукати +unpublished=Не опубліковано +cancel=Скасувати +ok=OK +saveChanges=Зберегти зміни +acceptTerms=Погодитись +submit=Надати +signup=Зареєструватися +login=Увійти +email=Електронна пошта +account=Акаунт +requiredField=Обов'язкове поле +new=Новий +identifier=Ідентифікатор +description=Опис +subject=Тема +close=Закрити +preview=Попередній перегляд +continue=Продовжити +name=Ім'я +institution=Установа +position=Позиція +affiliation=Приналежність +createDataverse=Створити Dataverse +remove=Видалити +done=Зроблено +editor=Помічник +manager=Менеджер +curator=Куратор +explore=Дослідити +download=Завантажити +deaccession=Припинення дії +share=Поділитись +link=Посилання +linked=Пов'язані +harvested=Заготовлені +apply=Застосувати +add=Додати +delete=Стирати +yes=Так +no=Ні +previous=Попередній +next=Наступний +first=Перший +last=Останній +more=Більше... +less=Менше... +select=Вибрати... +selectedFiles=Вибрані Файли +htmlAllowedTitle=Дозволені HTML Теги +htmlAllowedMsg=Це поле підтримує лише певні HTML теги. +htmlAllowedTags=, ,
                        ,
                        , , ,
                        ,
                        ,
                        , ,
                        ,

                        -

                        , , , ,
                      • ,
                          ,

                          ,

                          , , , , ,  
                          +, 
                            + +# dataverse_header.xhtml +header.status.header=Статус +header.search.title=Шукати всі dataverses... +header.about=Про +header.support=Підтримка +header.guides=Довідники +header.guides.user=Довідник користувача +header.guides.developer=Довідник розробника +header.guides.installation=Керівництво по встановленню +header.guides.api=Довідник API +header.guides.admin= Довідник адміна +header.signUp=Зареєструватися +header.logOut=Вийти +header.accountInfo=Дані про акаунт +header.dashboard=Панель інструментів +header.user.selectTab.dataRelated=Мої дані +header.user.selectTab.notifications=Сповіщення +header.user.selectTab.accountInfo=Інформація про акаунт +header.user.selectTab.groupsAndRoles=Групи + Ролі +header.user.selectTab.apiToken=API токен + +# dataverse_template.xhtml +head.meta.description=Tвін Dataverse Project - це програма з відкритим кодом для обміну, цитування та архівування даних. Dataverse забезпечує надійну інфраструктуру для розпорядників даних для розміщення та архівування даних, +надаючи дослідникам простий спосіб поділитися та отримати вдячність за свої дані. +body.skip=Перейти до основного змісту + +# dataverse_footer.xhtml +footer.copyright=Авторське право © {0} +footer.widget.datastored=Дані зберігаються в {0}. +footer.widget.login=Увійти до +footer.privacyPolicy=Політика конфіденційності +footer.poweredby=Працює на +footer.dataverseProject=The Dataverse Project + +# messages.xhtml +messages.error=Помилка +messages.success=Успіх! +messages.info=Інформація +messages.validation=Помилка перевірки +messages.validation.msg=Необхідні поля були пропущені або виникла помилка перевірки. Будь ласка, прокрутіть вниз, щоб переглянути деталі. + +# contactFormFragment.xhtml +contact.header=Contact {0} +contact.dataverse.header=Електронна пошта Dataverse Contact +contact.dataset.header=Електронна пошта Dataset Contact +contact.to=До +contact.support=Підтримка +contact.from=Від +contact.from.required=Потрібна електронна адреса користувача. +contact.from.invalid=Електронна пошта недійсна. +contact.subject=Тема +contact.subject.required=Потрібна тема +contact.subject.selectTab.top=Вибрати тему... +contact.subject.selectTab.support=Питання до підтримки +contact.subject.selectTab.dataIssue=Data Issue +contact.msg=Повідомлення +contact.msg.required=Текст повідомлення обов'язковий. +contact.send=Відправити повідомлення +contact.question=Будь ласка, заповніть це, щоб довести, що ви не робот. +contact.sum.required=Потрібне значення +contact.sum.invalid=Неправильна сума, будь ласка, спробуйте ще раз. +contact.sum.converterMessage=Будь ласка, введіть номер. +contact.contact=Контакт + +# dataverseuser.xhtml +account.info=Інформація про акаунт +account.edit=Редагувати акаунт +account.apiToken=API Token +user.isShibUser=Інформація про акаунт не може бути змінена при вході через акаунт установи. +user.helpShibUserMigrateOffShibBeforeLink=Залишаєте свою установу? Будь ласка зв'яжіться з нами +user.helpShibUserMigrateOffShibAfterLink=для допомоги. +user.helpOAuthBeforeLink=Ваш Dataverse акаунт використовує {0} для входу. Якщо ви зацікавлені в зміні методів входу, зв'яжіться з нами +user.helpOAuthAfterLink=Для допомоги. +user.lostPasswdTip=Якщо ви загубили або забули свій пароль, введіть своє ім'я користувача або електронну адресу нижче та натисніть кнопку "Надіслати". Ми надішлемо вам електронний лист із вашим новим паролем. +user.dataRelatedToMe=Мої дані +wasCreatedIn=, створено в +wasCreatedTo=, додано до +wasSubmittedForReview=, було подано на розгляд для опублікування в +wasPublished=, опубліковано в +wasReturnedByReviewer=, був повернений куратором +# TODO: Confirm that "toReview" can be deleted. +toReview=Не забудьте опублікувати його або відправити назад автору! +worldMap.added=Набір даних містив шар даних WorldMap. +# Bundle file editors, please note that "notification.welcome" is used in a unit test. +notification.welcome=Ласкаво просимо до {0}! Почніть з додавання чи пошуку даних. Є питання? Перевірте {1}. Хочете протестувати функції Dataverse? Скористайтесь {2}. Також +перевірте ваш вітальний лист, щоб підтвердити свою адресу. +notification.demoSite=Демо-сайт +notification.requestFileAccess=Доступ до файлу, який потрібен для набору даних: {0}. + + + + + +notification.grantFileAccess= Доступ надано для файлів у наборі даних: {0}. +notification.rejectFileAccess=Доступ відхилено для запитаних файлів у наборі даних: {0}. +notification.createDataverse={0} створено в {1} . Щоб дізнатись більше про те, що ви можете зробити з dataverse, перегляньте {2}. +notification.dataverse.management.title= Управління Dataverse - Довідник користувача Dataverse +notification.createDataset={0} створено в {1}. Щоб дізнатись більше про те, що ви можете зробити з набором даних, перегляньте {2}. +notification.dataset.management.title= Управління набором даних - Довідник користувача набором даних +notification.wasSubmittedForReview={0} було подано на розгляд для опублікування в {1}. Не забудьте його опублікувати або відправити назад автору \! +notification.wasReturnedByReviewer={0} було повернено куратором {1}. +notification.wasPublished={0} опубліковано в {1}. +notification.worldMap.added={0}, До набору даних доданий шар даних WorldMap. +notification.maplayer.deletefailed= Не вдалося видалити шар карти, пов'язаний з файлом обмеженого доступу {0} з WorldMap. Будь ласка, повторіть спробу, або зв'яжіться з WorldMap тв/або Dataverse підтримка (Набір даних: {1}) +notification.generic.objectDeleted= dataverse, набір даних або файл для цього сповіщення були видалені. +notification.access.granted.dataverse= Ви отримали роль {0} для {1}. +notification.access.granted.dataset= Ви отримали роль {0} для {1}. +notification.access.granted.datafile= Ви отримали роль {0} для файлу в {1}. +notification.access.granted.fileDownloader.additionalDataverse={0} Тепер у вас є доступ до всіх опублікованих обмежених і необмежених файлів у даному Dataverse. +notification.access.granted.fileDownloader.additionalDataset={0} Тепер у вас є доступ до всіх опублікованих обмежених і необмежених файлів у цьому наборі даних . +notification.access.revoked.dataverse= Ви були вилучені з ролі в {0}. +notification.access.revoked.dataset= Ви були вилучені з ролі в {0}. +notification.access.revoked.datafile= Ви були вилучені з ролі в {0}. +notification.checksumfail= Помилка при перевірці контрольної суми одного або кількох файлів у вашому завантаженні для набору даних {0}. Будь ласка, повторно запустіть сценарій завантаження. Якщо проблема не зникне, зв'яжіться з службою підтримки. +notification.mail.import.filesystem= Набір даних {2} ({0}/dataset.xhtml?persistentId={1}) успішно завантажено та підтверджено. +notification.import.filesystem= Набір даних {1} успішно завантажено та підтверджено. +notification.import.checksum={1}, до набору даних додано контрольні суми файлів за допомогою пакетного завдання. +removeNotification= Видалити сповіщення +groupAndRoles.manageTips= Тут ви можете отримати доступ до всіх груп, до яких ви належите, і до управління ними, а також призначеними ролями. +user.signup.tip= Чому необхідно мати Dataverse акаунт? Щоб створити власний dataverse, налаштувати його, додати набори даних або замовити доступ до обмежених файлів. +user.signup.otherLogInOptions.tip= Ви також можете створити Dataverse акаунт за допомогою одного з наших параметрів входу . +user.username.illegal.tip= Від 2 до 60 символів, і можна використовувати "a-z", "0-9", "_" для вашого імені користувача. +user.username= Ім'я користувача +user.username.taken= Це ім'я вже використовується. +user.username.invalid= Це ім'я користувача містить недійсний символ або не відповідає довжині (2-60 символів). +user.username.valid= Створіть дійсне ім'я користувача довжиною від 2 до 60 символів, що містить букви (a-z), цифри (0-9), риски (-), підкреслення (_) та крапки (.). +user.noPasswd= Немає пароля +user.currentPasswd= поточний пароль +user.currentPasswd.tip= Будь ласка, введіть поточний пароль для цього акаунту. +user.passwd.illegal.tip= Пароль повинен містити не менше 6 символів, включати в себе одну літеру та одну цифру, а також можуть бути використані спеціальні символи. +user.rePasswd= Повторно введіть пароль. +user.rePasswd.tip= Повторно введіть пароль, який ви вказали вище +user.firstName= ім'я +user.firstName.tip= Ім'я або назва, які ви хочете використовувати для цього акаунту. +user.lastName= Прізвище +user.lastName.tip= Прізвище, яке ви хочете використовувати для цього акаунту. +user.email.tip= дійсна адреса електронної пошти, до якої ви маєте доступ, щоб можна було з вами зв'язатися. +user.email.taken= Ця електронна адреса вже прийнята. +user.affiliation.tip= Установа, до якої ви належите. +user.position=Посада +user.position.tip= Ваша роль чи звання в установі, до якої ви належите; наприклад: персонал, факультет, студент і т. д. +user.acccountterms= Загальні умови користування +user.acccountterms.tip= Загальні положення та умови користування програмою та послугами. +user.acccountterms.required= Будь ласка, зробіть відмітку про те, що ви погоджуєтесь із Загальними умовами користування. +user.acccountterms.iagree= Я прочитав та погоджуюсь із загальними умовами користування Dataverse, як зазначено вище. +user.createBtn= Створити акаунт. + +user.updatePassword.welcome= Ласкаво просимо до Dataverse {0}, {1} +user.updatePassword.warning= Після випуску нашої нової версії Datavlesh 4.0, вимоги до пароля та Загальних умов використання оновлено. Оскільки ви використовуєте Dataverse вперше після оновлення, вам потрібно створити новий пароль і погодитися з новими загальними умовами користування. +user.updatePassword.password={0} +authenticationProvidersAvailable.tip={0} Немає активних постачальників аутентифікації {1} Якщо ви системний адміністратор, будь ласка, увімкніть її за допомогою API. {2} Якщо ви не є системним адміністратором, будь ласка, зв'яжіться з тим, хто відповідає за вашу установу. +passwdVal.passwdReq.title= Ваш пароль повинен містити: +passwdVal.passwdReq.goodStrength = паролі, що складаються із щонайменше {0} символів, не пілягають усім іншим вимогам. +passwdVal.passwdReq.lengthReq = Щонайменше {0} символів +passwdVal.passwdReq.characteristicsReq = Щонайменше 1 символ з {0} наступних видів: +passwdVal.passwdReq.notInclude = Він не може включати: +passwdVal.passwdReq.consecutiveDigits =Більше ніж {0} цифр у рядку +passwdVal.passwdReq.dictionaryWords = Слова зі словника +passwdVal.passwdReq.unknownPasswordRule = Невідомо, зверніться до свого адміністратора. +#printf syntax used to pass to passay library синтаксис, який використовується для переходу до бібліотеки +passwdVal.expireRule.errorCode =Не дійсний +passwdVal.expireRule.errorMsg = Термін паролю закінчився % 1 $ s днів і він є недійсним. +passwdVal.goodStrengthRule.errorMsg = Примітка: Паролі з довжиною символів %1$s або більше завжди дійсні. +passwdVal.goodStrengthRule.errorCode =NO_GOODSTRENGTH не дійсний +passwdVal.passwdReset.resetLinkTitle = Посилання для зміни пароля +passwdVal.passwdReset.resetLinkDesc = Ваше посилання для для зміни пароля недійсне. +passwdVal.passwdReset.valBlankLog = новий пароль порожній. +passwdVal.passwdReset.valFacesError = Помилка паролю +passwdVal.passwdReset.valFacesErrorDesc = Будь ласка, введіть новий пароль для свого акаунту. +passwdVal.passwdValBean.warnDictionaryRead = Словник був встановлений, але такого слова там немає. +passwdVal.passwdValBean.warnDictionaryObj =PwDictionaries словники не встановлені, і файл паролю за замовчуванням не знайдено: +passwdVal.passwdValBean.warnSetStrength = Значення PwGoodStrength {0} конкурує зі значенням PwMinLength {1} і додається до {2} +#loginpage.xhtml +login.System= Система входу +login.forgot.text= Забули пароль? +login.builtin= Акаунт Dataverse +login.institution= Акаунт установи +login.institution.blurb= Увійдіть або зареєструйтеся за допомогою акаунту своєї установи — дізнайтеся більшe . +login.institution.support.beforeLink= Залишаєте свою установу? Будь ласка зв'яжіться з +login.institution.support.afterLink= для допомоги. +login.builtin.credential.usernameOrEmail= Ім'я користувача / електронна пошта +login.builtin.credential.password= Пароль +login.builtin.invalidUsernameEmailOrPassword= Введене ім'я користувача, електронна адреса або пароль недійсні. Потрібна допомога для доступу до вашого акаунту? +# як ми здійснюємо помилку пароля? Через помилку оновлення пароля? Див. +https://github.com/IQSS/dataverse/pull/2922 +login.error= Помилка перевірки імені користувача, електронної адреси або пароля. Будь ласка спробуйте ще раз. Якщо проблема не зникне, зв'яжіться з адміністратором. +user.error.cannotChangePassword= Вибачте, ваш пароль не може бути змінений. Будь ласка, зв'яжіться зі своїм системним адміністратором. +user.error.wrongPassword= Вибачте, невірний пароль. +login.button= Увійти з {0} +login.button.orcid= Створіть або підключіть ваш ORCID + +# authentication providers постачальники аутентифікації +auth.providers.title= Інші опції +auth.providers.tip= Ви можете перетворити Dataverse акаунт, щоб скористатись однією із наведених вище оцій. Learn more. +auth.providers.title.builtin= Ім'я користувача / електронна пошта +auth.providers.title.shib= Ваше установа +auth.providers.title.orcid=ORCID +auth.providers.title.google=Google +auth.providers.title.github=GitHub +auth.providers.blurb= Увійдіть або зареєструйтесь у своєму {0} акаунті — learn more. Виникли проблеми? Будь ласка, зв'яжіться з {3}для допомоги. +auth.providers.persistentUserIdName.orcid=ORCID iD +auth.providers.persistentUserIdName.github=ID +auth.providers.persistentUserIdTooltip.orcid=ORCID надає постійний цифровий ідентифікатор, який відрізняє вас від інших дослідників. +auth.providers.persistentUserIdTooltip.github=GitHub призначає унікальний номер для кожного користувача. +auth.providers.orcid.insufficientScope=Dataverse не було надано дозвіл на читання користувацьких даних з ORCID. +# Friendly AuthenticationProvider names зручні для користувачів постачальники аутентифікації +authenticationProvider.name.builtin=Dataverse +authenticationProvider.name.null=(постачальник невідомий) +authenticationProvider.name.github=GitHub +authenticationProvider.name.google=Google +authenticationProvider.name.orcid=ORCiD +authenticationProvider.name.orcid-sandbox=ORCiD Sandbox +authenticationProvider.name.shib=Shibboleth + +#confirmemail.xhtml +confirmEmail.pageTitle= Перевірка електронної пошти +confirmEmail.submitRequest= Підтвердити електронну пошту +confirmEmail.submitRequest.success= Верифікаційний електронний лист надіслано на адресу {0}. Зверніть увагу, що посилання на підтвердження закінчиться після {1}. +confirmEmail.details.success= Електронна адреса підтверджена! +confirmEmail.details.failure= Ми не змогли підтвердити адресу вашої електронної пошти. Перейдіть на сторінку "Інформація про акаунт" та натисніть кнопку "Підтвердити електронну адресу". +confirmEmail.details.goToAccountPageButton= Перейти до інформації про акаунт +confirmEmail.notVerified= Не підтверджено +confirmEmail.verified= підтверджено + +#shib.xhtml +shib.btn.convertAccount= Перетворити акаунт +shib.btn.createAccount= Створити акаунт +shib.askToConvert= Ви хочете перетворити свій Dataverse акаунт, щоб завжди використовувати логін вашої установи? + +# Bundle file editors, please note that "shib.welcomeExistingUserMessageDefaultInstitution" is used in a unit test = Логін Вашої установи для {0} відповідає електронній адресі, яка вже використовується для Dataverse акаунту. Введіть свій поточний Dataverse пароль нижче, щоб ваш існуючий Dataverse акаунт можна було перетворити для використовування логіну вашої установи. Після перетворення вам потрібно буде використовувати лише логін вашої установи. + +# Bundle file editors, please note that "shib.welcomeExistingUserMessageDefaultInstitution" is used in a unit test +shib.welcomeExistingUserMessageDefaultInstitution=ваша установа +shib.dataverseUsername= Ім'я користувача Dataverse +shib.currentDataversePassword=Поточний пароль Dataverse +shib.accountInformation= Інформація про акаунт +shib.offerToCreateNewAccount= Ця інформація надається вашою установою та буде використана для створення вашого Dataverse акаунту. +shib.passwordRejected= Помилка перевірки - Ваш акаунт можна конвертувати, лише якщо ви введете правильний пароль для вашого існуючого акаунту. + +# oauth2/firstLogin.xhtml +oauth2.btn.convertAccount = Перетворити існуючий акаунт +oauth2.btn.createAccount = Створити новий акаунт +oauth2.askToConvert= Ви хочете перетворити свій Dataverse акаунт, щоб завжди використовувати логін вашої установи? +oauth2.welcomeExistingUserMessage= Логін вашої установи для {0} відповідає електронній адресі, яка вже використовується для Dataverse акаунту. Введіть свій поточний Dataverse пароль нижче, щоб ваш існуючий Dataverse акаунт можна було перетворити для використовування логіну вашої установи. Після перетворення вам потрібно буде використовувати лише логін вашої установи. + + +oauth2.welcomeExistingUserMessageDefaultInstitution= ваша установа +oauth2.dataverseUsername= Ім'я користувача Dataverse +oauth2.currentDataversePassword= Поточний пароль Dataverse +oauth2.chooseUsername= Ім'я користувача: +oauth2.passwordRejected=Validation Error - неправильне ім'я користувача або пароль. +# oauth2.newAccount.title= Створення акаунту +oauth2.newAccount.welcomeWithName= Ласкаво просимо до Dataverse, {0} +oauth2.newAccount.welcomeNoName= Ласкаво просимо до Dataverse +# oauth2.newAccount.email=Email +# oauth2.newAccount.email.tip=Dataverse використовує цю електронну адресу, щоб повідомляти вас про проблеми, пов'язані з вашими даними. +oauth2.newAccount.suggestedEmails= Рекомендовані електронні адреси: +oauth2.newAccount.username= Ім'я користувача +oauth2.newAccount.username.tip= Це ім'я буде вашим унікальним ідентифікатором користувача Dataverse. +oauth2.newAccount.explanation= Ця інформація надається {0} і буде використана для створення вашого {1} акаунту. Щоб знову ввійти, вам необхідно скористатися опцією входу {0}. +oauth2.newAccount.suggestConvertInsteadOfCreate= Якщо у вас вже є акаунт {0}, вам необхідно буде конвертувати ваш акаунт. + +# oauth2.newAccount.tabs.convertAccount= Перетворити існуючий акаунт +oauth2.newAccount.buttons.convertNewAccount= Перетворити акаунт +oauth2.newAccount.emailTaken= Електронна адреса вже прийнята. Замість цього, подумайте про об'єднання відповідного акаунту. +oauth2.newAccount.emailOk= Електронна адреса OK. +oauth2.newAccount.emailInvalid= Недійсна електронна адреса. +# oauth2.newAccount.usernameTaken= Ім'я користувача вже прийнято. +# oauth2.newAccount.usernameOk= Ім'я користувача OK. + +# oauth2/convert.xhtml +# oauth2.convertAccount.title=Перетворення акаунту +oauth2.convertAccount.explanation= Будь ласка, введіть своє {0} ім'я користувача акаунту або електронну адресу та пароль, щоб конвертувати ваш акаунт в опцію {1} логіну. Додаткова інформація про перетворення вашого акаунту. +oauth2.convertAccount.username=Існуюче ім'я користувача: +oauth2.convertAccount.password=Пароль +oauth2.convertAccount.authenticationFailed= Підтвердження не виконане - невірне ім'я користувача або пароль. +oauth2.convertAccount.buttonTitle= Перетворити акаунт +oauth2.convertAccount.success= Ваш Dataverse акаунт тепер зв'язаний з вашим акаунтом {0}. + +# oauth2/callback.xhtml +oauth2.callback.page.title=OAuth Зворотний виклик +oauth2.callback.message= Помилка підтвердження - Dataverse не може автентифікувати ваш ORCID логін. Будь ласка, переконайтеся, що ви авторизуєте свій ORCID акаунт для підключення до Dataverse. Для отримання додаткової інформації про запитувану інформацію див. User Guide. + +# tab on dataverseuser.xhtml +apitoken.title=API маркер (Token) +apitoken.message= ВашAPI маркер відображається нижче після його створення. Перегляньте наш {0} Довідник з API {1}, щоб дізнатись більше про те, як використовувати свій API маркер з Dataverse APIs. +apitoken.notFound=API маркер для {0} не був створений. +apitoken.generateBtn=Створити маркер +apitoken.regenerateBtn= Відтворити маркер + +#dashboard.xhtml +dashboard.title= Панель інструментів +dashboard.card.harvestingclients.header= Збір клієнтів +dashboard.card.harvestingclients.btn.manage= Керування клієнтами +dashboard.card.harvestingclients.clients = {0, вибір, 0 # Клієнти | 1 # Клієнт | 2 | Клієнти} +dashboard.card.harvestingclients.datasets = {0, вибір, 0 # Набори даних | 1 # Набір даних | 2 # Набори даних } +dashboard.card.harvestingserver.header = Збір серверів +dashboard.card.harvestingserver.enabled = включений сервер OAI +dashboard.card.harvestingserver.disabled = Сервер OAI вимкнено +dashboard.card.harvestingserver.status = Статус +{0, вибір, 0 # Набори | 1 # Набір | 2 # Набори } +dashboard.card.harvestingserver.btn.manage = Управління сервером +dashboard.card.metadataexport.header = Експорт метаданих +dashboard.card.metadataexport.message = Експорт метаданих набору даних доступний лише через API {0}. Дізнайтеся більше в {0} {1} Довіднику API {2}. + +#harvestclients.xhtml +harvestclients.title= Керування збором клієнтів +harvestclients.toptip= - Можна запланувати проведення збору на певний час або за вимогою. Збір можна розпочати тут або через API REST. +harvestclients.noClients.label= Клієнти не скомпоновані. +harvestclients.noClients.why.header= Що таке збір? +harvestclients.noClients.why.reason1= Збір - це процес обміну метаданими з іншими репозиторіями. У зборі клієнт, ваша Datavese збирає записи метаданих з віддалених джерел. Це можуть бути інші зразки Datavers або інші архіви, які підтримують OAI-PMH - стандартний протокол збору. +harvestclients.noClients.why.reason2= Зібрані записи метаданих шукають користувачі. Посилання на зібраний набір даних в результатах пошуку переносить користувача до оригінального репозиторію. Зібрані набори даних не можна редагувати у вашій установці Dataverse. +harvestclients.noClients.how.header= Як користуватися збором +harvestclients.noClients.how.tip1= Щоб збирати метадані, створюється Harvesting Client та компонується для кожного віддаленого репозиторію. Зауважте, що при зборі клієнтів вам необхідно буде вибрати існуючу локальну програму dataverse для прийому зібраних наборів даних. +harvestclients.noClients.how.tip2=Заготовлені записи можна зберігати синхронно з оригінальним репозиторієм за допомогою регулярних додаткових оновлень, наприклад, щодня або щотижня. Альтернативно, збір можна проводити за запитом, з цієї сторінки або через REST API. +harvestclients.noClients.getStarted= Щоб розпочати, натисніть кнопку "Додати клієнта" вище. Щоб дізнатись більше про збір, відвідайте розділ Довідника користувача Harvesting section +harvestclients.btn.add= Додати клієнта +harvestclients.tab.header.name=Псевдонім +harvestclients.tab.header.url=URL +harvestclients.tab.header.lastrun= Останній запуск +harvestclients.tab.header.lastresults = останні результати +harvestclients.tab.header.action = Дії +harvestclients.tab.header.action.btn.run=Виконати збір +harvestclients.tab.header.action.btn.edit= Редагувати +harvestclients.tab.header.action.btn.delete= Видалити +harvestclients.tab.header.action.btn.delete.dialog.header= Видалити збір клієнтів +harvestclients.tab.header.action.btn.delete.dialog.warning= Ви впевнені, що хочете видалити збір клієнтів "{0}"? Видалення клієнта видаляє всі набори даних зібрані з цього віддаленого сервера. +harvestclients.tab.header.action.btn.delete.dialog.tip= Зауважте, що ця операція може зайняти деякий час для обробки, залежно від кількості зібраних наборів даних. +harvestclients.tab.header.action.delete.infomessage= Збір клієнта видаляється. Зауважте, що це може зайняти деякий час, залежно від кількості завантаженого контенту. +harvestclients.actions.runharvest.success= Успішно розпочато асинхронний збір для клієнта "{0}". Будь ласка, перезавантажте сторінку, щоб перевірити результати збору). +harvestclients.newClientDialog.step1= Крок 1 з 4 - Інформація про клієнта +harvestclients.newClientDialog.title.new= Створити клієнта, що збирає +harvestclients.newClientDialog.help= Налаштувати клієнта на збір контенту з віддаленого сервера. +harvestclients.newClientDialog.nickname= Псевдонім +harvestclients.newClientDialog.nickname.helptext= складається з букв, цифр, підкреслення (_) і рисок (-). +harvestclients.newClientDialog.nickname.required= Поле "псевдонім клієнта" не може бути порожнім! +harvestclients.newClientDialog.nickname.invalid= Псевдонім може містити лише літери, цифри, підкреслення (_) і риски (-); і не повинен мати більше 30 символів. +harvestclients.newClientDialog.nickname.alreadyused= Цей псевдонім вже використовується. +harvestclients.newClientDialog.type = Протокол серверний +harvestclients.newClientDialog.type.helptext = Наразі підтримується лише протокол сервера OAI. +harvestclients.newClientDialog.type.OAI = OAI +harvestclients.newClientDialog.type.Nesstar = Несстар +harvestclients.newClientDialog.url = URL-адреса сервера +harvestclients.newClientDialog.url.tip = URL ресурсу збору. +harvestclients.newClientDialog.url.watermark= Віддалений хост-сервер, http://... +harvestclients.newClientDialog.url.helptext.notvalidated=URL ресурсу збору. Після натискання кнопки "Далі" ми спробуємо встановити з'єднання з сервером, щоб перевірити, що він працює, і отримати додаткову інформацію про його можливості. +harvestclients.newClientDialog.url.required= Вимагається дійсна адреса сервера для збору. +harvestclients.newClientDialog.url.invalid= Недійсна URL-адреса. Не вдалося встановити з'єднання та отримати дійсну відповідь сервера. harvestclients.newClientDialog.url.noresponse= Не вдається встановити з'єднання з сервером. +harvestclients.newClientDialog.url.badresponse= Недійсна відповідь з сервера. +harvestclients.newClientDialog.dataverse=Локальна Dataverse +harvestclients.newClientDialog.dataverse.tip=Dataverse, що прийматиме набори даних, зібрані з цього віддаленого ресурсу. +harvestclients.newClientDialog.dataverse.menu.enterName= Введіть ім'я Dataverse. +harvestclients.newClientDialog.dataverse.menu.header= Назва Dataverse (установа), ім'я. harvestclients.newClientDialog.dataverse.menu.invalidMsg= Жодних співпадінь не знайдено. +harvestclients.newClientDialog.dataverse.required= Ви повинні вибрати існуючу dataversе для цього клієнта. +harvestclients.newClientDialog.step2 = Крок 2 з 4 - Формат +harvestclients.newClientDialog.oaiSets = Набір OAI +harvestclients.newClientDialog.oaiSets.tip= Збиральні набори, запропоновані цим OAI сервером. +harvestclients.newClientDialog.oaiSets.noset=Немає. +harvestclients.newClientDialog.oaiSets.helptext= Вибір опції "немає"приведе до збору стандартного набору, визначеного сервером. Часто це буде весь контент по всіх субнаборах. +harvestclients.newClientDialog.oaiSets.helptext.noset= Цей OAI сервер не підтримує названі набори. Буде зібраний весь контент, запропонований сервером. +harvestclients.newClientDialog.oaiMetadataFormat = Формат метаданих +harvestclients.newClientDialog.oaiMetadataFormat.tip = Формати метаданих, що пропонуються віддаленим сервером. +harvestclients.newClientDialog.oaiMetadataFormat.required= Будь ласка, виберіть формат метаданих для збору з цього архіву. +harvestclients.newClientDialog.step3= Крок 3 з 4 - Графік. +harvestclients.newClientDialog.schedule= Графік. +harvestclients.newClientDialog.schedule.tip= Налаштуйте щоденний або щотижневий автоматичний запуск збору. +harvestclients.newClientDialog.schedule.time.none.helptext= Залиште збір незапланованим для запуску лише на вимогу. +harvestclients.newClientDialog.schedule.none = Немає +harvestclients.newClientDialog.schedule.daily = Щодня +harvestclients.newClientDialog.schedule.weekly=Щотижня +harvestclients.newClientDialog.schedule.time=Час +harvestclients.newClientDialog.schedule.day=День +harvestclients.newClientDialog.schedule.time.am=До полудня +harvestclients.newClientDialog.schedule.time.pm=Після полудня +harvestclients.newClientDialog.schedule.time.helptext= Час планується за вашим місцевим часом. +harvestclients.newClientDialog.btn.create= Створити клієнта +harvestclients.newClientDialog.success= Успішно створений клієнт для збору "{0}". +harvestclients.newClientDialog.step4= Крок 4 з 4 - Відображення +harvestclients.newClientDialog.harvestingStyle= Тип архіву. +harvestclients.newClientDialog.harvestingStyle.tip= Тип віддаленого архіву. +harvestclients.newClientDialog.harvestingStyle.helptext=Виберіть тип архіву, який найкраще описує цей віддалений сервер, щоб правильно застосовувати правила та стилі форматування для зібраних метаданих подібно тому як вони відображаються в результатах пошуку. Зауважте, що неправильний вибір типу віддаленого архіву може призвести до неповних записів у результатах пошуку і неможливості перенаправлення користувача до архівного джерела даних. +harvestclients.viewEditDialog.title= Редагувати клієнта збору. +harvestclients.viewEditDialog.archiveUrl= URL адреса архіву. +harvestclients.viewEditDialog.archiveUrl.tip= URL адреса архіву, що обслуговує зібрані цим клієнтом дані, яка використовується в результатах пошуку для посилань на оригінальні +джерела зібраного контенту. +harvestclients.viewEditDialog.archiveUrl.helptext= Редагувати, якщо ця URL-адреса відрізняється від URL-адреси сервера. +harvestclients.viewEditDialog.archiveDescription=Опис архіву. +harvestclients.viewEditDialog.archiveDescription.tip= Опис архівного джерела зібраного контенту, відображеного в результатах пошуку. +harvestclients.viewEditDialog.archiveDescription.default.generic= Цей набір даних збирається від наших партнерів. Натиснувши на посилання, ви перейдете безпосередньо до архівного джерела даних. +harvestclients.viewEditDialog.btn.save= Зберегти зміни +harvestclients.newClientDialog.title.edit=Редагувати групу {0} + +#harvestset.xhtml +harvestserver.title= Управління сервером збору. +harvestserver.toptip= - Визначити групи локальних наборів даних, які будуть доступні для збору віддаленими клієнтами. +harvestserver.service.label=OAI сервер +harvestserver.service.enabled= Підключено. +harvestserver.service.disabled= Відключено. +harvestserver.service.disabled.msg= Сервер збору наразі відключений. +harvestserver.service.empty= Набори не скомпоновані. +harvestserver.service.enable.success= Служба OAI успішно підключена. +harvestserver.noSets.why.header= Що таке сервер збору? +harvestserver.noSets.why.reason1= Збір - це процес обміну метаданими з іншими репозиторіями. У процесі збору сервер , ваша Dataverse може зробити деякі з локальних метаданих набору даних доступними для віддалених клієнтів. Це можуть бути інші зразки Dataverse або будь-які інші клієнти, які підтримують протокол збору OAI-PMH. +harvestserver.noSets.why.reason2= Можна збирати лише опубліковані, необмежені набори даних у вашій Datavese. Віддалені клієнти зазвичай зберігають свої записи в синхронному режимі за допомогою запланованих покрокових щоденних або щотижневих оновлень, що дозволяє мінімізувати навантаження на ваш сервер. Зауважте, що збираються лише метадані. Зазвичай, віддалені збирачі не намагаються самостійно завантажувати файли даних. +harvestserver.noSets.how.header= Як запустити сервер збору? +harvestserver.noSets.how.tip1= Можна підключити чи відключити сервер збору на цій сторінці. +harvestserver.noSets.how.tip2= Після активації служби ви можете визначити сукупність локальних наборів даних, які будуть доступні для віддалених збирачів OAI Sets . Набори визначаються пошуковими запитами (наприклад, назва автора: king; або parentId: 1234 - щоб вибрати всі набори даних, що належать вказаній dataverse; або dsPersistentId: "doi: 1234 /" щоб вибрати всі набори даних за допомогою вказаного постійного ідентифікатора). Для отримання додаткової інформації про пошукові запити зверніться до розділу "Пошук API" у Довіднику користувача Dataverse. +harvestserver.noSets.getStarted= Щоб почати, підключіть сервер OAI та натисніть кнопку Add Set. Щоб дізнатись більше про збір, перейдіть на сторінку Harvesting. +harvestserver.btn.add= Додати набір +harvestserver.tab.header.spec=OAI setSpec +harvestserver.tab.header.description= Опис +harvestserver.tab.header.definition= Керувальний запит. +harvestserver.tab.header.stats= набори даних. +harvestserver.tab.col.stats.empty= Немає записів (порожній набір). +harvestserver.tab.col.stats.results={0} {0, вибір, 0# набори даних |1# набір даних |2# набори даних } ({1} {1, вибір, 0#записи|1#запис|2#записи} експортовано, {2} позначено як видалений) +harvestserver.tab.header.action= Дії +harvestserver.tab.header.action.btn.export=Запустити експорт +harvestserver.actions.runreexport.success= Успішно розпочато асинхронний реекспорт для OAI набору "{0}" (будь ласка, перезавантажте сторінку, щоб перевірити перебіг експорту). +harvestserver.tab.header.action.btn.edit = Редагувати +harvestserver.tab.header.action.btn.delete = Видалити + +harvestserver.tab.header.action.btn.delete.dialog.header= Видалити набір збору. +harvestserver.tab.header.action.btn.delete.dialog.tip= Ви впевнені, що хочете видалити OAI набір "{0}"? Ви не зможете скасувати видалення! +harvestserver.tab.header.action.delete.infomessage= Вибраний набір збору видаляється. (це може зайняти кілька хвилин). +harvestserver.newSetDialog.title.new=Створити набір збору. +harvestserver.newSetDialog.help= Визначити комплект локальних наборів даних, доступних для віддалених збирачів. +harvestserver.newSetDialog.setspec = Ім'я / OAI setSpec +harvestserver.newSetDialog.setspec.tip = Унікальне ім'я (OAI setSpec), що ідентифікує цей набір. +harvestserver.newSetDialog.setspec.helptext= Складається з букв, цифр, підкреслення (_) і рисок (-). +harvestserver.editSetDialog.setspec.helptext= Ім'я не можна змінювати після створенння набору. +harvestserver.newSetDialog.setspec.required= Ім'я (OAI setSpec) не може бути порожнім! +harvestserver.newSetDialog.setspec.invalid= Ім'я (OAI setSpec) може складатися тільки з букв, цифр, підкреслення (_) і рисок (-). +harvestserver.newSetDialog.setspec.alreadyused = Ця назва набору (OAI setSpec) вже використовується. +harvestserver.newSetDialog.setdescription = Опис. +harvestserver.newSetDialog.setdescription.tip= Надайте короткий опис цього OAI набору. +harvestserver.newSetDialog.setdescription.required= Поле опису не може бути порожнім! +harvestserver.newSetDialog.setquery= Керувальний запит. +harvestserver.newSetDialog.setquery.tip= Пошуковий запит, який визначає зміст набору даних. +harvestserver.newSetDialog.setquery.helptext=Приклад запиту: authorName:king +harvestserver.newSetDialog.setquery.required= Не можна залишати поле пошукового запиту пустим! +harvestserver.newSetDialog.setquery.results= Пошуковий запит дав {0} наборів даних! +harvestserver.newSetDialog.setquery.empty= ПОПЕРЕДЖЕННЯ: пошуковий запит не дав результатів! +harvestserver.newSetDialog.btn.create= Створити набір +harvestserver.newSetDialog.success= Успішно створений набір збору"{0}". +harvestserver.viewEditDialog.title= Редагувати набір збору +harvestserver.viewEditDialog.btn.save = Зберегти зміни. + + +#dashboard-users.xhtml +dashboard.card.users = Користувачі +dashboard.card.users.header = Інформаційна панель - Перелік користувачів +dashboard.card.users.super = Сперкористувачі +dashboard.card.users.manage = Керувати користувачами +dashboard.card.users.message = Перелік користувачів та керування ними. +dashboard.list_users.searchTerm.watermark = Пошук цих користувачів ... +dashboard.list_users.tbl_header.userId = ідентифікатор +dashboard.list_users.tbl_header.userIdentifier = Ім'я користувача +dashboard.list_users.tbl_header.name = Ім'я +dashboard.list_users.tbl_header.lastName = Прізвище +dashboard.list_users.tbl_header.firstName = Ім'я +dashboard.list_users.tbl_header.email = Електронна пошта +dashboard.list_users.tbl_header.affiliation= Приналежність до установи +dashboard.list_users.tbl_header.roles = Ролі +dashboard.list_users.tbl_header.position = Посада +dashboard.list_users.tbl_header.isSuperuser = Суперкористувач +dashboard.list_users.tbl_header.authProviderFactoryAlias = Аутентифікація +dashboard.list_users.tbl_header.createdTime = Створено час +dashboard.list_users.tbl_header.lastLoginTime = Час останнього входу +dashboard.list_users.tbl_header.lastApiUseTime = Час останнього використання API +dashboard.list_users.tbl_header.roles.removeAll = Видалити все +dashboard.list_users.tbl_header.roles.removeAll.header = Видалити всі ролі +dashboard.list_users.tbl_header.roles.removeAll.confirmationText = Ви впевнені, що хочете видалити всі ролі для користувача {0}? +dashboard.list_users.removeAll.message.success = Усі ролі для користувача {0} видалені. +dashboard.list_users.removeAll.message.failure = Не вдалося видалити ролі для користувача {0}. +dashboard.list_users.toggleSuperuser=Редагувати статус суперкористувача. +dashboard.list_users.toggleSuperuser.confirmationText.add = Ви впевнені, що хочете активувати статус суперкористувача для користувача {0}? +dashboard.list_users.toggleSuperuser.confirmationText.remove = Ви впевнені, що хочете вимкнути статус суперкористувача для користувача {0}? +dashboard.list_users.toggleSuperuser.confirm = Продовжити +dashboard.list_users.api.auth.invalid_apikey = Ключ API недійсний. +dashboard.list_users.api.auth.not_superuser = Заборонений. Ви повинні бути суперкористувачем. + +#MailServiceBean.java +notification.email.create.dataverse.subject={0}: Ваша dataverse створена. +notification.email.create.dataset.subject={0}: Ваш набір даних створений. +notification.email.request.file.access.subject={0}: Запит про доступ до файлу з обмеженим доступом. +notification.email.grant.file.access.subject={0}: Ввам надано доступ до обмеженого файлу. +notification.email.rejected.file.access.subject={0}: Ваш запит про доступ до обмеженого файлу відхилено. +notification.email.update.maplayer={0}: Шар WorldMap доданий до набору даних. +notification.email.maplayer.deletefailed.subject={0}: не вдалося видалити шар WorldMap. +notification.email.maplayer.deletefailed.text= Ми не змогли видалити шар WorldMap, пов'язаний із обмеженим файлом {0}, та всі відповідні дані, які ще можуть залишатися загальнодоступними на сайті WorldMap. Будь ласка, повторіть спробу, або зв'яжіться зі службою підтримки WorldMap та / або Dataverse. (Набір даних: {1}) +notification.email.submit.dataset.subject={0}: Ваш набір даних подано на розгляд. +notification.email.publish.dataset.subject={0}: Ваш набір даних опубліковано. +notification.email.returned.dataset.subject={0}: Ваш набір даних повернуто. +notification.email.create.account.subject={0}: Ваш акаунт створено. +notification.email.assign.role.subject = {0}: вам призначено роль +notification.email.revoke.role.subject = {0}: вашу роль скасовано. +notification.email.verifyEmail.subject={0}: підтвердити свою електронну адресу. +notification.email.greeting=Привіт, \n +# Bundle file editors, please note that "notification.email.welcome" is used in a unit test +notification.email.welcome= Ласкаво просимо до {0}! Почніть з додавання чи пошуку даних. Є питання? Зверніться до Довідника користувача {1}/{2}/ користувача або зв'яжіться з {3} в {4} щоб отримати допомогу. + +notification.email.welcomeConfirmEmailAddOn=\n\n Перевірте адресу своєї електронної пошти на {0}. Зверніть увагу, що посилання на підтвердження закінчиться після {1}. Надішліть ще один електронний лист для перевірки, відвідавши вашу сторінку з акаунтом. +notification.email.requestFileAccess= Запит про доступ до файлу набору даних: {0}. Керування дозволами в {1}. +notification.email.grantFileAccess= Доступ надано для файлів у наборі даних: {0} (переглянути на {1}). +notification.email.rejectFileAccess= Ваш запит про доступ було відхилено для запитаних файлів у наборі даних: {0} (перегляд на {1}). Якщо у вас є якісь питання про те, чому +запит відхилили, ви можете зв'язатися з власником набору даних, використовуючи посилання "Контакт" у верхньому правому куті сторінки набору даних. +# Bundle file editors, please note that "notification.email.createDataverse" is used in a unit test +notification.email.createDataverse= Ваша нова dataverse під назвою {0} (перегляд на {1}) була створена в {2} (перегляд {3}). Щоб дізнатись більше про те, що ви можете зробити з вашою dataverse, перегляньте розділ "Управління Datavese - Довідник користувача" на {4}/{5}/user/dataverse-management.html . +# Bundle file editors, please note that "notification.email.createDataset" is used in a unit test +notification.email.createDataset=Ваш новий набір даних під назвою {0} (перегляд на {1}) був створений у {2} (перегляд {3}). Щоб дізнатися більше про те, що ви можете зробити з набором даних, перегляньте "Управління набором даних" - Довідник користувача на {4}/{5} /user/dataset-management.html. +notification.email.wasSubmittedForReview={0} (див. на {1}) було подано на розгляд для опублікування в {2} (перегляд на {3}). Не забудьте його опублікувати або надіслати назад +автору\! +notification.email.wasReturnedByReviewer={0} (перегляд на {1}) був повернений куратором {2} (перегляд у {3}). +notification.email.wasPublished={0} (перегляд на {1}) опубліковано в {2} (перегляд на {3}). + notification.email.worldMap.added={0} (перегдяд на {1}) до нього додані дані шарів WorldMap. +notification.email.closing=\n\nДякуємо вам,\n{0} +notification.email.assignRole= Ви зараз {0} для {1} "(2)" (перегляд у {3}). +notification.email.revokeRole= Одна з ваших ролей для {0} "{1}" була скасована (перегляд на {2}). +notification.email.changeEmail= Привіт, {0}. {1} \ n \ n Зв'яжіться з нами, якщо ви не планували цю зміну або вам потрібна допомога. +hours=години +hour= година +minutes= хвилини +minute= хвилина +notification.email.checksumfail.subject={0}: Your upload failed checksum validation Помилка при перевірці контрольної суми у вашому завантаженні. +notification.email.import.filesystem.subject= Набір даних {0} успішно завантажено та підтверджено. +notification.email.import.checksum.subject={0}: Перевірка контрольної суми вашого файлу завершена. + +# passwordreset.xhtml +pageTitle.passwdReset.pre= Скидання пароля акаунта. +passwdReset.token=маркер: +passwdReset.userLookedUp= користувач шукав: +passwdReset.emailSubmitted= електронна пошта доставлена: +passwdReset.details={0} Скидання пароля {1} - Щоб розпочати процес скидання пароля, введіть адресу своєї електронної пошти. +passwdReset.submitRequest= Надіслати запит на пароль. +passwdReset.successSubmit.tip= Якщо ця електронна пошта пов'язана з акаунтом, то нею вам надішлють додаткові інструкції щодо {0}. +passwdReset.debug=DEBUG налагодження +passwdReset.resetUrl= URL скидання. +passwdReset.noEmail.tip= Насправді жодного електронного листа не було надіслано, оскільки користувача не вдалося знайти за вказаною електронною адресою {0}, але ми про це не вказуємо, тому що ми не зловмисні користувачі, що використовують форму, щоб визначити, чи пов'язаний акаунт з електронною адресою. +passwdReset.illegalLink.tip= Посилання на зміну Вашого пароля недійсне. Якщо вам потрібно скинути пароль, {0} натисніть тут {1}, щоб попросити про повторне скидання вашого паролю. +passwdReset.newPasswd.details={0} Скидання пароля {1} \ u2013 Наші вимоги до пароля змінилися. Будь ласка, виберіть сильний пароль, який відповідає критеріям вказаним нижче. +passwdReset.newPasswd = Новий пароль +passwdReset.rePasswd = Повторіть пароль +passwdReset.resetBtn = Скидання пароля + + +# dataverse.xhtml +dataverse.title= Проект, відділ, університет, професор або журнал, для яких ця dataverse міститиме дані. +dataverse.enterName= Введіть ім'я ... +dataverse.host.title= dataverse, що містить ці дані. +dataverse.identifier.title= Коротке ім'я, яке використовується для URL-адреси цієї dataverse. +dataverse.affiliation.title= Організація, з якою пов'язана ця dataverse. +dataverse.category=Категорія. +dataverse.category.title= Тип, який найбільше відображає цю dataverse. +dataverse.type.selectTab.top=Виберіть одну... +dataverse.type.selectTab.researchers= Дослідник +dataverse.type.selectTab.researchProjects= Дослідницький проект. +dataverse.type.selectTab.journals=Журнал. +dataverse.type.selectTab.organizationsAndInsitutions= Організація або установа. +dataverse.type.selectTab.teachingCourses= Навчальний курс. +dataverse.type.selectTab.uncategorized=Різне. +dataverse.type.selectTab.researchGroup=Дослідницька група. +dataverse.type.selectTab.laboratory= Лабораторія. +dataverse.type.selectTab.department=Відділ. +dataverse.description.title= Резюме, що описує мету, характер чи обсяг даної dataverse. +dataverse.email=Email +dataverse.email.title= Адреса (и) електронної пошти контакту (ів) для dataverse. +dataverse.share.dataverseShare= Поділитися Dataverse. +dataverse.share.dataverseShare.tip= Поділіться цією Dataverse у своїх улюблених соціальних мережах. +dataverse.share.dataverseShare.shareText= Перегляньте цю dataverse. +dataverse.subject.title= Тема (и), про які йдеться у цій dataverse. +dataverse.metadataElements= поля метаданих. +dataverse.metadataElements.tip= Виберіть поля метаданих для використання в шаблонах набору даних і при додаванні набору даних до цієї dataverse. +dataverse.metadataElements.from.tip= Використовуйте поля метаданих з {0}. +dataverse.resetModifications= Скидання модифікацій. +dataverse.resetModifications.text= Ви впевнені, що хочете скинути вибрані поля метаданих? Якщо ви це зробите, будь-які зроблені вами налаштування (приховані, обов'язкові, необов'язкові) більше не з'являться. +dataverse.field.required=(обов'язкові) +dataverse.field.example1= (Приклади: +dataverse.field.example2=) +dataverse.field.set.tip=[+] Перегляньте поля і налаштуйте їх як приховані, обов'язкові або необов'язкові. +dataverse.field.set.view=[+] Переглянути поля +dataverse.field.requiredByDataverse=Обов'язкове для Dataverse +dataverse.facetPickList.text= Огляд / пошук аспектів +dataverse.facetPickList.tip= Виберіть поля метаданих, які будуть використовуватися як аспекти для перегляду наборів даних dataverses у цій dataverse. +dataverse.facetPickList.facetsFromHost.text= Використовуйте перегляд / пошук аспектів з {0} +dataverse.facetPickList.metadataBlockList.all= Усі поля метаданих +dataverse.edit= Редагувати +dataverse.option.generalInfo= Загальна інформація +dataverse.option.themeAndWidgets= Тема + Віджети +dataverse.option.featuredDataverse= Характерні Dataverses +dataverse.option.permissions= Дозволи +dataverse.option.dataverseGroups=Групи +dataverse.option.datasetTemplates=Шаблони набору даних +dataverse.option.datasetGuestbooks= Набори даних гостьових книг +dataverse.option.deleteDataverse= Видалити Dataverse +dataverse.publish.btn=Опублікувати +dataverse.publish.header= Опублікувати Dataverse +dataverse.nopublished=Немає опублікованих Dataverses +dataverse.nopublished.tip= Для використання цієї функції ви повинні мати принаймні одну опубліковану dataverse. +dataverse.contact=Eлектронна адреса Dataverse +dataset.link=Link Dataset Посилання на набір даних. +dataverse.link= Посилання на Dataverse +dataverse.link.btn.tip= Посилання на Вашу Dataverse +dataverse.link.yourDataverses=Ваша {0, вибір, 1#Dataverse|2#Dataverses} +dataverse.link.save= Зберегти пов'язані Dataverse +dataset.link.save= Зберегти пов'язані набори даних +dataverse.link.dataverse.choose= Виберіть, який із ваших datavers ви хотіли б пов'язати з цією dataverse. +dataverse.link.dataset.choose= Виберіть, який із ваших наборів даних ви хотіли б пов'язати з цим набором даних. +dataverse.link.no.choice= У вас є одна dataverse, до якої ви можете додати пов'язані dataverses і набори даних. +dataverse.link.no.linkable= Щоб мати змогу пов'язати dataverse або набір даних, вам потрібно мати власну dataverse. Щоб розпочати, натисніть кнопку Додати дані на домашній сторінці. +dataverse.link.no.linkable.remaining= Ви вже пов'язали всі ваші прийнятні dataverses. +dataverse.savedsearch.link= Пошук посилань +dataverse.savedsearch.searchquery= Пошук +dataverse.savedsearch.filterQueries=Аспекти +dataverse.savedsearch.save= Зберегти пов'язаний пошук +dataverse.savedsearch.dataverse.choose= Виберіть, до якої з ваших dataverse ви хотіли б прив'язати цей пошук. +dataverse.savedsearch.no.choice= У вас є одна dataverse, до якої ви можете додати збережений пошук. + +# Bundle file editors, please note that "dataverse.savedsearch.save.success" is used in a unit test +dataverse.saved.search.success= Збережений пошук був успішно пов'язаний з {0}. +dataverse.saved.search.failure= Збережений пошук не вдалося пов'язати. +dataverse.linked.success= {0} успішно пов'язано з {1}. +dataverse.linked.success.wait= {0} успішно пов'язано з {1}. Будь ласка, зачекайте, щоб з'явився його контент. +dataverse.linked.internalerror={0} успішно пов'язано з {1} але контент не відображатиметься, доки внутрішня помилка не буде виправлена. +dataverse.page.pre= Попередній +dataverse.page.next=наступний +dataverse.byCategory=Dataverses за категорією. +dataverse.displayFeatured=Display Відображати вибрані нижче dataverses на домашній сторінці для цієї dataverse. +dataverse.selectToFeature= Виберіть dataverses, які потрібно показати на домашній сторінці цієї dataverse. +dataverse.publish.tip= Ви впевнені, що хочете опублікувати свою datavese? Як тільки ви це зробите, вона має залишатися опублікованою. +dataverse.publish.failed.tip=. Ця dataverse не може бути опублікованою, тому що її dataverse не була опублікована. +dataverse.publish.failed= Не можу опублікувати datavese. +dataverse.publish.success= Ваша datavese зараз є загальнодоступною. +dataverse.publish.failure= Цю dataverse неможливо опублікувати. +dataverse.delete.tip= Ви впевнені, що хочете видалити свою dataverse? Ви не можете відновити цю dataverse. +dataverse.delete=Видалити Dataverse +dataverse.delete.success= Ваша dataverse видалена. +dataverse.delete.failure= Цю dataverse неможливо видалити. +# Редактори файлів пакетів, зверніть увагу на те, що "dataverse.create.success" використовується в тестовому модулі, оскільки воно неперевершене з двома параметрами +dataverse.create.success= Ви успішно створили свою dataverse! Щоб дізнатись більше про те, що ви можете зробити з вашою dataverse, перегляньте User Guide. +dataverse.create.failure= Цю dataverse неможливо створити. +dataverse.create.authenticatedUsersOnly= Тільки автентифіковані користувачі можуть створювати dataverses. +dataverse.update.success= Ви успішно оновили свою dataverse! +dataverse.update.failure= Цю dataverse неможливо оновити. + +# rolesAndPermissionsFragment.xhtml + +# advanced.xhtml +advanced.search.header.dataverses=Dataverses +advanced.search.dataverses.name.tip= Проект, відділ, університет, професор або журнал, для яких ця dataverse міститиме дані. +advanced.search.dataverses.affiliation.tip= Організація, з якою пов'язана ця Datavese. advanced.search.dataverses.description.tip=Резюме, що описує мету, характер чи сферу застосування цієї Dataverse. +advanced.search.dataverses.subject.tip= Категорії тем специфічні для домену, які актуальні для цієї Datavese. +advanced.search.header.datasets=набори даних. +advanced.search.header.files=Файли. +advanced.search.files.name.tip= Ім'я, яке призначено для ідентифікації файлу. +advanced.search.files.description.tip= Резюме, що описує файл та його змінні. +advanced.search.files.fileType=Тип файлу. +advanced.search.files.fileType.tip= Розширення для файлу, наприклад CSV, zip, Stata, R, PDF, JPEG тощо. +advanced.search.files.variableName= Назва змінної. +advanced.search.files.variableName.tip= Назва графи змінної в системі даних. +advanced.search.files.variableLabel= Позначка змінної +advanced.search.files.variableLabel.tip= Короткий опис змінної. + +# search-include-fragment.xhtml +dataverse.search.advancedSearch= Розширений пошук +dataverse.search.input.watermark= Шукати цю dataverse... +account.search.input.watermark= Шукати ці дані... +dataverse.search.btn.find= Знайти. +dataverse.results.btn.addData=Додати дані. +dataverse.results.btn.addData.newDataverse=Нова Dataverse +dataverse.results.btn.addData.newDataset=Новий набір даних +dataverse.results.dialog.addDataGuest.header= Додати дані. +dataverse.results.dialog.addDataGuest.msg= Вам потрібно увійти щоб створити dataverse або додати набір даних. +dataverse.results.dialog.addDataGuest.msg.signup= Вам потрібно Зареєструватися або Увійдіть , щоб створити Dataverse або додати набір даних. +dataverse.results.types.dataverses=Dataverses +dataverse.results.types.datasets= набори даних +dataverse.results.types.files=Файли. +# Bundle file editors, please note that "dataverse.results.empty.zero" is used in a unit test +dataverse.results.empty.zero= Немає dataverses, наборів даних, чи файлів, які відповідають вашому пошуку. Будь ласка, спробуйте новий пошук, використовуючи інші або більш широкі терміни. Ви також можете переглянути для довідки: +the search guide +# Bundle file editors, please note that "dataverse.results.empty.hidden" is used in a unit test +dataverse.results.empty.hidden= Немає результатів пошуку на основі того, як ви звузили пошук. Ви можете переглянути для довідки: search guide . +dataverse.results.empty.browse.guest.zero=Ця dataverse наразі не має dataverses, наборів даних або файлів. Будь ласка, спробуйте ввійти щоб дізнатися, чи можете ви додати до нього. +dataverse.results.empty.browse.guest.hidden= Ця dataverse не має dataverses. Будь ласка, спробуйте увійти щоб дізнатися, чи можете ви додати до нього.. +dataverse.results.empty.browse.loggedin.noperms.zero= Ця dataverse наразі не має dataverses, наборів даних або файлів. Ви можете скористатися кнопкою Email Dataverse Contact, розташованою вище, щоб запитати про цю dataverse або попросити про доступу до цієї dataverse. +dataverse.results.empty.browse.loggedin.noperms.hidden= Ця dataverse не має dataverses. dataverse.results.empty.browse.loggedin.perms.zero= Ця dataverse наразі не має dataverses, наборів даних або файлів. Ви можете додати їх, скориставшись кнопкою Add Data (Додати дані) на цій сторінці. +account.results.empty.browse.loggedin.perms.zero= У вас немає dataverses, наборів даних або файлів пов'язаних з вашим акаунтом. Ви можете додати dataverse або набір даних, натиснувши кнопку Додати Дані. Докладніше про додавання даних див. у +User Guide. +dataverse.results.empty.browse.loggedin.perms.hidden= Ця dataverse не має dataverses. Ви можете додати їх, скориставшись кнопкою Add Data (Додати Дані) на цій сторінці. +dataverse.results.empty.link.technicalDetails= Більше технічних подробиць. +dataverse.search.facet.error= Виникла помилка з параметрами пошуку. Будь ласка, Очистіть свій пошук і повторіть спробу. +dataverse.results.count.toofresults= від{0} до {1} з {2} {2, вибір, # Результати |1# Результат||2# Результати |} +dataverse.results.paginator.current=(поточний) +dataverse.results.btn.sort=Сортувати +dataverse.results.btn.sort.option.nameAZ=Назва (A-Z) +dataverse.results.btn.sort.option.nameZA= Назва (Z-A) +dataverse.results.btn.sort.option.newest= Найновіший +dataverse.results.btn.sort.option.oldest= Найстаріший +dataverse.results.btn.sort.option.relevance= релевантність +dataverse.results.cards.foundInMetadata= Знайдено в полях метаданих: +dataverse.results.cards.files.tabularData= Табличні дані +dataverse.results.solrIsDown= Будь ласка, зверніть увагу: через внутрішню помилку перегляд та пошук недоступні. +dataverse.theme.title=Tема. +dataverse.theme.inheritCustomization.title= Для цієї dataverse використовуйте ті самі теми, що і в батьківській dataverse. +dataverse.theme.inheritCustomization.label= Успадковуйте тему +dataverse.theme.inheritCustomization.checkbox= Успадковуйте тему з{0} +dataverse.theme.logo=Лого +dataverse.theme.logo.tip= Підтримувані типи зображень - це JPG, TIF або PNG, і їх розмір не повинен перевищувати 500 КБ. Максимальний розмір зображення для файлу зображення в темі dataverse складає 940 пікселів завширшки і120 пікселів заввишки. +dataverse.theme.logo.format=Формат лого. +dataverse.theme.logo.format.selectTab.square= квадратний dataverse.theme.logo.format.selectTab.rectangle=прямокутний +dataverse.theme.logo.alignment= Вирівнювання логотипу +dataverse.theme.logo.alignment.selectTab.left= ліворуч +dataverse.theme.logo.alignment.selectTab.center=по центру +dataverse.theme.logo.alignment.selectTab.right=праворуч +dataverse.theme.logo.backColor= Колір фону логотипа +dataverse.theme.logo.image.upload= Завантажити зображення +dataverse.theme.tagline= слоган +dataverse.theme.website=Веб-сайт +dataverse.theme.linkColor= Колір посилання +dataverse.theme.txtColor= Колір тексту +dataverse.theme.backColor= Колір фону +dataverse.theme.success= Ви успішно оновили тему для цієї dataverse! +dataverse.theme.failure= Тема dataverse не була оновлена. +dataverse.theme.logo.image=Зображення лого. +dataverse.theme.logo.image.title= Файл логотипу або зображення, який ви хочете відобразити в заголовку цієї dataverse. +dataverse.theme.logo.image.uploadNewFile= Завантажити новий файл +dataverse.theme.logo.image.invalidMsg= Зображення неможливо завантажити. Будь ласка, повторіть спробу за допомогою файлу JPG, TIF або PNG. +dataverse.theme.logo.image.uploadImgFile= Завантажити файл зображення. +dataverse.theme.logo.format.title= Форма для файлу логотипу або зображення, який ви завантажуєте для цієї dataverse. +dataverse.theme.logo.format.selectTab.square2=квадрат +dataverse.theme.logo.format.selectTab.rectangle2= прямокутник +dataverse.theme.logo.alignment.title= Де логотип або зображення повинні відображатися у заголовку. +dataverse.theme.logo.alignment.selectTab.left2= ліворуч +dataverse.theme.logo.alignment.selectTab.center2= по центру +dataverse.theme.logo.alignment.selectTab.right2= праворуч +dataverse.theme.logo.backColor.title= Виберіть колір фону логотипу цієї dataverse. +dataverse.theme.headerColor= Кольори заголовків +dataverse.theme.headerColor.tip= Кольори, які ви обираєте для дизайну заголовка цієї dataverse. +dataverse.theme.backColor.title= Колір для області заголовка, що містить зображення, слоган, URL-адресу та текст. +dataverse.theme.linkColor.title= Колір для відображення посилання. +dataverse.theme.txtColor.title= Колір тексту заголовка та назва. Колір слогану та назви цієї dataverse. +dataverse.theme.tagline.title= Фраза або речення, що описує цю dataverse. +dataverse.theme.tagline.tip= Вкажіть слоган, що містить 140 символів або менше. +dataverse.theme.website.title= URL для вашого особистого веб-сайту, установи чи будь-якого веб-сайту, що стосується цієї dataverse. +dataverse.theme.website.tip=. На Веб-сайт буде посилання за слоганом. Щоб веб-сайт був у переліку, ви також повинні надати слоган. +dataverse.theme.website.watermark= Ваш особистий сайт, http://... +dataverse.theme.website.invalidMsg=Недійсна URL. +dataverse.theme.disabled= Тема для основної dataverse була в адміністративному порядку деактивована за допомогою налаштування бази даних: DisableRootDataveseTheme. +dataverse.widgets.title= віджети +dataverse.widgets.notPublished.why.header= Чому використовуються віджети? +dataverse.widgets.notPublished.why.reason1= Збільшує видимість ваших даних, дозволяючи вставляти ваші dataverse та набори даних у ваш персональний або проектний веб-сайт. +dataverse.widgets.notPublished.why.reason2= Дозволяє іншим переглядати ваші dataverse та набори даних, не виходячи з вашого особистого або проектного веб-сайту. +dataverse.widgets.notPublished.how.header= Як використовувати віджети? +dataverse.widgets.notPublished.how.tip1= Для використання віджетів, ваші dataverse та набори даних повинні бути опубліковані. + +dataverse.widgets.notPublished.how.tip2= Після публікації буде доступний код на цій сторінці, щоб ви могли копіювати та додавати до вашого особистого або проектного веб-сайту. +dataverse.widgets.notPublished.how.tip3= У вас є веб-сайт OpenScholar? Якщо так, дізнайтеся більше про додавання Dataverse віджетів до вашого веб-сайту here. +dataverse.widgets.notPublished.getStarted= Щоб розпочати, опублікуйте свою dataverse. Щоб дізнатися більше про віджети, відвідайте розділ Довідника користувача Theme + Widgets section of the User Guide. +dataverse.widgets.tip= Скопіюйте та вставте цей код у HTML на своєму сайті. Щоб дізнатися більше про віджети, відвідайте розділ Довідника користувача Theme + Widgets. +dataverse.widgets.searchBox.txt= Поле пошуку Dataverse +dataverse.widgets.searchBox.tip= Додайте шлях, щоб відвідувачі вашого веб-сайту могли шукати Dataverse. +dataverse.widgets.dataverseListing.txt= складання списків Dataverse . +dataverse.widgets.dataverseListing.tip= Додайте шлях, щоб відвідувачі вашого веб-сайту могли бачити ваші dataverses та набори даних, сортувати або переглядати їх. +dataverse.widgets.advanced.popup.header= Додаткові параметри віджетів. +dataverse.widgets.advanced.prompt= Надішліть перелік наборів даних постійних URL-адрес на ваш особистий веб-сайт. Сторінка, яку ви заявляєте як URL-адреса персонального веб-сайту, повинна містити фрагмент коду для віджета Dataverse Listing. +dataverse.widgets.advanced.url.label= URL-адреса персонального веб-сайту +dataverse.widgets.advanced.url.watermark=http://www.example.com/page-name +dataverse.widgets.advanced.invalid.message= Будь ласка, введіть дійсну URL-адресу +dataverse.widgets.advanced.success.message= Успішно оновлено URL-адресу вашого особистого веб-сайту. +dataverse.widgets.advanced.failure.message= URL- адреса особистого dataverse веб-сайту не оновлена. + +# permissions-manage.xhtml +dataverse.permissions.title= Дозволи +dataverse.permissions.dataset.title= Дозволи набору даних +dataverse.permissions.access.accessBtn= Редагувати доступ +dataverse.permissions.usersOrGroups= Користувачі / Групи +dataverse.permissions.usersOrGroups.assignBtn= Призначити ролі користувачам / групам +dataverse.permissions.usersOrGroups.createGroupBtn= Створити групу +dataverse.permissions.usersOrGroups.description= Усі користувачі та групи, які мають доступ до вашої dataverse. +dataverse.permissions.usersOrGroups.tabHeader.userOrGroup= Ім'я користувача / групи (приналежність) +dataverse.permissions.usersOrGroups.tabHeader.id=ID +dataverse.permissions.usersOrGroups.tabHeader.role= Роль +dataverse.permissions.usersOrGroups.tabHeader.action=дія +dataverse.permissions.usersOrGroups.assignedAt= Роль призначена в {0} +dataverse.permissions.usersOrGroups.removeBtn= Видалити призначену роль +dataverse.permissions.usersOrGroups.removeBtn.confirmation= Ви впевнені, що хочете видалити це призначення ролі? + +dataverse.permissions.roles=Ролі +dataverse.permissions.roles.add= Додати нову роль +dataverse.permissions.roles.description= Всі ролі, налаштовані у вашій dataverse, які ви можете призначити користувачам і групам. +dataverse.permissions.roles.edit= Редагувати роль +dataverse.permissions.roles.copy= Копіювати роль + + +# permissions-manage-files.xhtml +dataverse.permissionsFiles.title= Дозволи на файли обмеженого доступу +dataverse.permissionsFiles.usersOrGroups= Користувачі / Групи +dataverse.permissionsFiles.usersOrGroups.assignBtn= Надати доступ користувачам / групам +dataverse.permissionsFiles.usersOrGroups.description= Усі користувачі та групи, які мають доступ до файлів обмеженого доступу в цьому наборі даних. +dataverse.permissionsFiles.usersOrGroups.tabHeader.userOrGroup= Ім'я користувача / групи (приналежність) +dataverse.permissionsFiles.usersOrGroups.tabHeader.id=ID +dataverse.permissionsFiles.usersOrGroups.tabHeader.email= Електронна пошта +dataverse.permissionsFiles.usersOrGroups.tabHeader.files= Файли +dataverse.permissionsFiles.usersOrGroups.tabHeader.access=Доступ +dataverse.permissionsFiles.usersOrGroups.file= Файл +dataverse.permissionsFiles.usersOrGroups.files= Файли +dataverse.permissionsFiles.usersOrGroups.invalidMsg= Немає користувачів або груп, які мають доступ до файлів обмеженого доступу у цьому наборі даних. +dataverse.permissionsFiles.files= файли обмеженого доступу +dataverse.permissionsFiles.files.label={0, вибір, 0# Обмежені файли |1# Обмежений файл |2# Обмежені файли } +dataverse.permissionsFiles.files.description= Всі файли обмеженого доступу в цьому наборі даних +dataverse.permissionsFiles.files.tabHeader.fileName= Ім'я файлу +dataverse.permissionsFiles.files.tabHeader.roleAssignees= Користувачі / Групи +dataverse.permissionsFiles.files.tabHeader.access= Доступ dataverse.permissionsFiles.files.tabHeader.publishedRestrictedState=Опубліковано +dataverse.permissionsFiles.files.tabHeader.draftRestrictedState=Проект +dataverse.permissionsFiles.files.deleted= Видалено +dataverse.permissionsFiles.files.public=загальнодоступний +dataverse.permissionsFiles.files.restricted= Обмежений +dataverse.permissionsFiles.files.roleAssignee= Користувач / Група +dataverse.permissionsFiles.files.roleAssignees= Користувачі / Групи +dataverse.permissionsFiles.files.roleAssignees.label={0, вибір, 0# Користувачі / Групи |1# Користувач / Група |2# Користувачі / Групи } +dataverse.permissionsFiles.files.assignBtn= Призначити доступ +dataverse.permissionsFiles.files.invalidMsg= У цьому наборі даних нема файлів обмеженого доступу. +dataverse.permissionsFiles.files.requested= Запитані файли +dataverse.permissionsFiles.files.selected=Вибір {0} з {1} {2} +dataverse.permissionsFiles.viewRemoveDialog.header= Доступ до файлу +dataverse.permissionsFiles.viewRemoveDialog.removeBtn= Видалити доступ. +dataverse.permissionsFiles.viewRemoveDialog.removeBtn.confirmation= Ви впевнені, що хочете видалити доступ до цього файлу? Після видалення доступу користувач або група більше не зможе завантажити цей файл. + +dataverse.permissionsFiles.assignDialog.header= Надати доступ до файлу +dataverse.permissionsFiles.assignDialog.description= Надати доступ до файлу користувачам і групам. +dataverse.permissionsFiles.assignDialog.userOrGroup= Користувачі / Групи +dataverse.permissionsFiles.assignDialog.userOrGroup.enterName= Введіть ім'я користувача / групи +dataverse.permissionsFiles.assignDialog.userOrGroup.invalidMsg= Відповідностей не знайдено. +dataverse.permissionsFiles.assignDialog.userOrGroup.requiredMsg= Будь ласка, виберіть принаймні одного користувача або групу. +dataverse.permissionsFiles.assignDialog.fileName= Ім'я файлу +dataverse.permissionsFiles.assignDialog.grantBtn=Надати +dataverse.permissionsFiles.assignDialog.rejectBtn=Відхилити. + +# permissions-configure.xhtml +dataverse.permissions.accessDialog.header= Редагувати доступ +dataverse.permissions.description= Конфігурація поточного доступу до вашої dataverse. +dataverse.permissions.tip= Виберіть: всі чи лише окремі користувачі зможуть додавати до цієї dataverse, натиснувши кнопку Редагувати доступ. +dataverse.permissions.Q1= Хто може додавати до цієї dataverse? +dataverse.permissions.Q1.answer1= Кожен хто додає до цієї dataverse, повинен мати доступ. +dataverse.permissions.Q1.answer2= Кожен, хто має Dataverse акаунт, може додавати суб- dataverses. +dataverse.permissions.Q1.answer3= Кожен, хто має Dataverse акаунт, може додавати набори даних. +dataverse.permissions.Q1.answer4= Кожен, хто має Dataverse акаунт, може додавати суб- dataverses та набори даних. +dataverse.permissions.Q2= Коли користувач додає новий набір даних до цієї dataverse, яку роль слід автоматично присвоювати їм в цьому наборі даних? +dataverse.permissions.Q2.answer.editor.description=- редагувати метадані, завантажувати файли та редагувати файли, редагувати Умови, гостьову книгу, подавати набори даних для перегляду. +dataverse.permissions.Q2.answer.manager.description=- редагувати метадані, завантажувати файли та редагувати файли, редагувати Умови, гостьову книгу, обмеження доступу до файлів (доступ до файлів + використання). +dataverse.permissions.Q2.answer.curator.description=- редагувати метадані, завантажувати файли та редагувати файли, редагувати Умови, гостьову книгу, обмеження доступу до файлів (доступ до файлів + використання), редагування Дозволів / Призначення ролей + публікування. + +# roles-assign.xhtml +dataverse.permissions.usersOrGroups.assignDialog.header= Призначити роль +dataverse.permissions.usersOrGroups.assignDialog.description= Надати права користувачам та групам, призначивши їм роль. +dataverse.permissions.usersOrGroups.assignDialog.userOrGroup= Користувачі / Групи +dataverse.permissions.usersOrGroups.assignDialog.userOrGroup.enterName= Введіть ім'я користувача / групи +dataverse.permissions.usersOrGroups.assignDialog.userOrGroup.invalidMsg= Відповідностей не знайдено. +dataverse.permissions.usersOrGroups.assignDialog.userOrGroup.requiredMsg= Будь ласка, виберіть принаймні одного користувача або групу. +dataverse.permissions.usersOrGroups.assignDialog.role.description= Це дозволи, пов'язані з обраною роллю. +dataverse.permissions.usersOrGroups.assignDialog.role.warning=Assigning the {0} role means the user(s) will also have the {0} role applied to all {1} within this {2}. Призначення ролі {0} означає, що користувач (і) також матиме {0} роль, застосовну до всіх {1} у цьому {2}. +dataverse.permissions.usersOrGroups.assignDialog.role.requiredMsg= Будь ласка, виберіть роль, яку потрібно призначити. + +# roles-edit.xhtml +dataverse.permissions.roles.header= Редагувати роль +dataverse.permissions.roles.name= Назва ролі +dataverse.permissions.roles.name.title= Ввести назву для ролі. +dataverse.permissions.roles.id= Ідентифікатор +dataverse.permissions.roles.id.title= Ввести ім'я псевдоніму +dataverse.permissions.roles.description.title= Описати роль (максимум 1000 символів). +dataverse.permissions.roles.description.counter={0} символів залишається +dataverse.permissions.roles.roleList.header= дозволи на ролі +dataverse.permissions.roles.roleList.authorizedUserOnly= Дозволи з інформаційною піктограмою вказують дії, які можуть виконувати користувачі не зареєстровані в Dataverse. + +# explicitGroup-new-dialog.xhtml +dataverse.permissions.explicitGroupEditDialog.title.new= Створити групу +dataverse.permissions.explicitGroupEditDialog.title.edit= Редагувати групу {0} +dataverse.permissions.explicitGroupEditDialog.help= Додавати користувачів або інші групи до цієї групи. +dataverse.permissions.explicitGroupEditDialog.groupIdentifier= Ідентифікатор групи +dataverse.permissions.explicitGroupEditDialog.groupIdentifier.tip= Коротке ім'я, яке використовується для ідентифікатора цієї групи. +dataverse.permissions.explicitGroupEditDialog.groupIdentifier.required= Ідентифікатор групи не може бути порожнім +dataverse.permissions.explicitGroupEditDialog.groupIdentifier.invalid= Ідентифікатор групи може містити лише літери, цифри, символи підкреслення (_) та рисок (-) +dataverse.permissions.explicitGroupEditDialog.groupIdentifier.helpText= Містить лише літери, цифри, символи підкреслення (_) та рисок (-). +dataverse.permissions.explicitGroupEditDialog.groupIdentifier.taken= Ідентифікатор групи, який вже використовується у цій dataverse. +dataverse.permissions.explicitGroupEditDialog.groupName= ім'я групи +dataverse.permissions.explicitGroupEditDialog.groupName.required= Ім'я групи не може бути порожнім. +dataverse.permissions.explicitGroupEditDialog.groupDescription= Опис +dataverse.permissions.explicitGroupEditDialog.roleAssigneeName= Користувач / група +dataverse.permissions.explicitGroupEditDialog.roleAssigneeNames= Користувачі / Групи +dataverse.permissions.explicitGroupEditDialog.createGroup= Створити групу + +# manage-templates.xhtml +dataset.manageTemplates.pageTitle= Управляти шаблонами наборів даних +dataset.manageTemplates.select.txt= Включити шаблони з {0} +dataset.manageTemplates.createBtn= Створити шаблон набору даних +dataset.manageTemplates.saveNewTerms= Зберегти шаблон набору даних +dataset.manageTemplates.noTemplates.why.header= Чому використовуються шаблони? +dataset.manageTemplates.noTemplates.why.reason1= Шаблони корисні, коли у вас є кілька наборів даних з однаковою інформацією в багатьох полях метаданих, яку ви б не хотіли вводити вручну. +dataset.manageTemplates.noTemplates.why.reason2= Шаблони можна використовувати для введення інструкцій для тих, хто завантажує набори даних у вашу dataverse, якщо ви хочете, щоб поле метаданих заповнювалось певним способом. +dataset.manageTemplates.noTemplates.how.header= Як використовувати шаблони +dataset.manageTemplates.noTemplates.how.tip1= Шаблони створені на рівні dataverse, можна видаляти (щоб вони не відображалися для майбутніх наборів даних); їх встановлюють за замовчуванням (необовя'зково), і можна копіювати. Тому вам не потрібно починати все спочатку при створенні нового шаблону зі схожими метаданими з іншого шаблону. Видалення шаблону не впливає на набори даних, які вже використали цей шаблон шаблон. +dataset.manageTemplates.noTemplates.how.tip2= Будь ласка, зауважте, що можна вибрати, які поля метаданих є прихованими, обов'язковими або необов'язковими, на сторінці General Information. +dataset.manageTemplates.noTemplates.getStarted= Щоб розпочати, натисніть кнопку "Створити шаблон набору даних" зверху. Щоб дізнатись більше про шаблони, перейдіть на сторінку Dataset Templates Довідник користувача Dataverse. +dataset.manageTemplates.tab.header.templte= Назва шаблону +dataset.manageTemplates.tab.header.date= Створено дату +dataset.manageTemplates.tab.header.usage= Використання +dataset.manageTemplates.tab.header.action= Дія +dataset.manageTemplates.tab.action.btn.makeDefault=Зробити налаштування за замовчуванням +dataset.manageTemplates.tab.action.btn.default= за замовчуванням +dataset.manageTemplates.tab.action.btn.view= Переглянути +dataset.manageTemplates.tab.action.btn.copy = Копіювати +dataset.manageTemplates.tab.action.btn.edit = Редагувати +dataset.manageTemplates.tab.action.btn.edit.metadata = Метадані +dataset.manageTemplates.tab.action.btn.edit.terms = Умови +dataset.manageTemplates.tab.action.btn.delete = Видалити +dataset.manageTemplates.tab.action.btn.delete.dialog.tip = Ви впевнені, що хочете видалити цей шаблон? Новий набір даних не зможе використовувати цей шаблон. +dataset.manageTemplates.tab.action.btn.delete.dialog.header = Видалити шаблон +dataset.manageTemplates.tab.action.btn.view.dialog.header = Попередній перегляд шаблону набору даних +dataset.manageTemplates.tab.action.btn.view.dialog.datasetTemplate = Шаблон набору даних +dataset.manageTemplates.tab.action.btn.view.dialog.datasetTemplate.title = Шаблон набору даних, який автоматично підставляє інформацію у поля форми. +dataset.manageTemplates.tab.action.noedit.createdin = Шаблон, створений в {0} +dataset.manageTemplates.delete.usedAsDefault= Це є шаблон за замовчуванням для наступної (их) dataverse(s). Його також буде видалено за замовчуванням. +dataset.manageTemplates.info.message.notEmptyTable= Створюйте, розмножуйте, редагуйте, переглядайте або видаляйте шаблони набору даних. Створіть шаблон набору даних для заповнення полів метаданих стандартними значеннями (як наприклад: приналежність автора), щоб допомогти користувачам створювати набори даних у цій dataverse. Ви також можете додати довідковий текст безпосередньо в поля метаданих, щоб надати користувачам більше інформації про те, що можна додавати до цих полів метаданих. + +# metadataFragment.xhtml + +# template.xhtml +dataset.template.name.tip = Назва шаблону набору даних. +dataset.template.returnBtn = Повернутися до керування шаблонами +dataset.template.name.title = Введіть унікальне ім'я для шаблону. +template.asterisk.tip= Зірочки позначають поля метаданих, які користувачі повинні заповнити під час додавання набору даних до цієї dataverse. +dataset.template.popup.create.title= Створити шаблон +dataset.template.popup.create.text= Ви хочете додати умови та / або доступ за замовчуванням? +dataset.create.add.terms= Зберегти + Додати умови + +# manage-groups.xhtml +dataverse.manageGroups.pageTitle= Керувати групами Dataverse +dataverse.manageGroups.createBtn= Створити групу +dataverse.manageGroups.noGroups.why.header= Чому використовують групи? +dataverse.manageGroups.noGroups.why.reason1= Групи дозволяють призначати ролі та дозволи одночасно для багатьох користувачів. +dataverse.manageGroups.noGroups.why.reason2= Ви можете використовувати групи для управління декількома різними типами користувачів (студенти, співробітники, і т.д). +dataverse.manageGroups.noGroups.how.header= Як використовувати групи +dataverse.manageGroups.noGroups.how.tip1= Група може містити як користувачів, так і інші групи. +dataverse.manageGroups.noGroups.how.tip2= Ви можете призначити дозволи для групи у вікні "Дозволи". +datavese.manageGroups.noGroups.getStarted = Щоб розпочати, натисніть кнопку "Створити групу" вгорі. +datavese.manageGroups.tab.header.name = Назва групи +datavese.manageGroups.tab.header.id = Ідентифікатор групи +datavese.manageGroups.tab.header.membership = Членство +datavese.manageGroups.tab.header.action = Дія +datavese.manageGroups.tab.action.btn.view = Перегляд +datavese.manageGroups.tab.action.btn.copy = Копіювати +datavese.manageGroups.tab.action.btn.enable = Увімкнути +datavlesh.manageGroups.tab.action.btn.disable = Вимкнути +datavese.manageGroups.tab.action.btn.edit = Редагувати +datavese.manageGroups.tab.action.btn.viewCollectedData = Переглянути зібрані дані +datavlesh.manageGroups.tab.action.btn.delete = Видалити +datavese.manageGroups.tab.action.btn.delete.dialog.header = Видалити групу +datavese.manageGroups.tab.action.btn.delete.dialog.tip = Ви впевнені, що хочете видалити цю групу? Ви не можете відновити групу. +datavese.manageGroups.tab.action.btn.view.dialog.header = Група Dataverse +datavese.manageGroups.tab.action.btn.view.dialog.group = Назва групи +datavlesh.manageGroups.tab.action.btn.view.dialog.groupView.name = Ім'я учасника +datavlesh.manageGroups.tab.action.btn.view.dialog.groupView.type = Тип учасника +datavese.manageGroups.tab.action.btn.view.dialog.groupView.action = Дія +datavese.manageGroups.tab.action.btn.view.dialog.groupView.delete = Видалити +datavese.manageGroups.tab.action.btn.view.dialog.groupMembers = Члени групи +datavese.manageGroups.tab.action.btn.view.dialog.enterName = Ввести ім'я користувача / групи +dataverse.manageGroups.tab.action.btn.view.dialog.invalidMsg= Відповідностей не знайдено. + + +# manage-guestbooks.xhtml +dataset.manageGuestbooks.pageTitle= Керувати гостьовими книгами наборів даних +dataset.manageGuestbooks.include= Включити гостьові книги з {0} +dataset.manageGuestbooks.createBtn= Створити гостьову книгу набору даних +dataset.manageGuestbooks.download.all.responses= Завантажити всі відповіді +dataset.manageGuestbooks.download.responses= Завантажити відповідь. +dataset.manageGuestbooks.noGuestbooks.why.header= Чому використовують гостьові книги? +dataset.manageGuestbooks.noGuestbooks.why.reason1= Гостові книги дозволяють збирати дані про те, хто завантажує файли з ваших наборів даних. Ви можете вирішити збирати інформацію про акаунт (ім'я користувача, ім'я та прізвище, приналежність тощо), а також створювати власні запитання (наприклад, "Для чого ви плануєте використовувати ці дані?") +dataset.manageGuestbooks.noGuestbooks.why.reason2= Ви можете завантажувати дані, зібрані з активованих гостьових книг, щоб мати можливість зберігати їх за межами Dataverse. +dataset.manageGuestbooks.noGuestbooks.how.header= Як використовувати гостьові книги +dataset.manageGuestbooks.noGuestbooks.how.tip1= Гостьову книгу можна використовувати для багатьох наборів даних, але лише одну гостьову книгу можна використовувати для одного набору даних. +dataset.manageGuestbooks.noGuestbooks.how.tip2= Користувачі отримують відповіді на запитання у вільному текстовму форматі або вибирають їх декількох варіантів. +dataset.manageGuestbooks.noGuestbooks.getStarted= Щоб розпочати, натисніть кнопку "Створити гостьову книгу набору даних" угорі. Щоб дізнатись більше про Гостьові книги, відвідайте сторінку Довідник користувача Dataset Guestbook. +dataset.manageGuestbooks.tab.header.name = Назва гостьової книги +dataset.manageGuestbooks.tab.header.date = Створено дату +dataset.manageGuestbooks.tab.header.usage = Використання +dataset.manageGuestbooks.tab.header.responses = Відповіді +dataset.manageGuestbooks.tab.header.action = Дія +dataset.manageGuestbooks.tab.action.btn.view = Попередній перегляд +dataset.manageGuestbooks.tab.action.btn.copy = Копіювати +dataset.manageGuestbooks.tab.action.btn.enable = Увімкнути +dataset.manageGuestbooks.tab.action.btn.disable = Вимкнути +dataset.manageGuestbooks.tab.action.btn.edit = Редагувати +dataset.manageGuestbooks.tab.action.btn.preview = Попередній перегляд +dataset.manageGuestbooks.tab.action.btn.viewCollectedData = Переглянути відповіді +dataset.manageGuestbooks.tab.action.btn.delete = Видалити +dataset.manageGuestbooks.tab.action.btn.delete.dialog.header = Видалити гостьову книгу +dataset.manageGuestbooks.tab.action.btn.delete.dialog.tip = Ви впевнені, що хочете видалити цю гостьову книгу? Ви не можете відновити гостьову книгу. +dataset.manageGuestbooks.tab.action.btn.view.dialog.header = Перегляд гостьової книги +dataset.manageGuestbooks.tab.action.btn.view.dialog.datasetGuestbook.title = Після завантаження файлів, гостьова книга запитує наступну інформацію. +dataset.manageGuestbooks.tab.action.btn.view.dialog.datasetGuestbook = Назва гостьової книги +dataset.manageGuestbooks.tab.action.btn.viewCollectedData.dialog.header= Зібрані дані у гостьовій книзі набору даних +dataset.manageGuestbooks.tab.action.btn.view.dialog.userCollectedData.title = Дані користувача, зібрані в гостьовій книзі. +dataset.manageGuestbooks.tab.action.btn.view.dialog.userCollectedData = Зібрані дані +dataset.manageGuestbooks.tab.action.noedit.createdin = Гостьова книга створена в {0} +dataset.manageGuestbooks.message.deleteSuccess = Гостьову книгу видалено. +dataset.manageGuestbooks.message.deleteFailure = Гостьову книгу неможливо видалити. +dataset.manageGuestbooks.message.editSuccess = Гостьову книгу оновлено. +dataset.manageGuestbooks.message.editFailure = Гостьову книгу неможливо оновити. +dataset.manageGuestbooks.message.enableSuccess = Гостьову книгу увімкнено. +dataset.manageGuestbooks.message.enableFailure = Гостьову книгу не вдалося увімкнути. +dataset.manageGuestbooks.message.disableSuccess = Гостьову книгу вимкнено. +dataset.manageGuestbooks.message.disableFailure = Гостьову книгу неможливо вимкнути. +dataset.manageGuestbooks.tip.title= Керувати гостьовими книгами наборів даних +dataset.manageGuestbooks.tip.downloadascsv=Натисніть \"Завантажити всі відповіді\", щоб завантажити всі зібрані відповіді у гостьовій книзі для цієї dataverse у вигляді CSV файлу. +Для пошуку та аналізу ваших зібраних відповідей ми рекомендуємо імпортувати цей CSV файл в Excel, Google Таблиці або аналогічні програми. +dataset.guestbooksResponses.dataset = Набір даних +dataset.guestbooksResponses.date = Дата +dataset.guestbooksResponses.type = Тип +dataset.guestbooksResponses.file = Файл +dataset.guestbooksResponses.tip.title = Відповіді гостьової книги +dataset.guestbooksResponses.count.responses = {0} {0, вибір, 0 # Відповіді | 1 # Відповідь | 2 # Відповіді) +dataset.guestbooksResponses.count.toofresults = від {0} до {1} з {2} {2, вибір, 0 # Відповіді | 1 # Відповідь | 2 # Відповіді) +dataset.guestbooksResponses.tip.downloadascsv= Натисніть \"Завантажити відповіді\", щоб завантажити всі зібрані відповіді для цієї гостьової книги у вигляді CSV файлу. +Для пошуку та аналізу ваших зібраних відповідей ми рекомендуємо імпортувати цей CSV файл в Excel, Google Таблиці або аналогічні програми. +dataset.guestbooksResponses.tooManyResponses.message= Примітка. У цій гостьовій книзі забагато відповідей для показу на цій сторінці. Нижче показані лише самі останні {0} відповіді. +Натисніть \ "Завантажити відповіді \", щоб завантажити всі зібрані відповіді (загалом {1}) у вигляді CSV файлу. + +# guestbook-responses.xhtml +dataset.guestbookResponses.pageTitle= Відповіді гостьової книги + +# guestbook.xhtml +dataset.manageGuestbooks.guestbook.name = Назва гостьової книги +dataset.manageGuestbooks.guestbook.name.tip = Введіть унікальну назву для цієї гостьової книги. +dataset.manageGuestbooks.guestbook.dataCollected = Зібрані дані +dataset.manageGuestbooks.guestbook.dataCollected.description= Відомості про Dataverse акаунт, які збиратимуться, коли користувач завантажить файл. Перевірте ті, які будуть вимагатися. + +dataset.manageGuestbooks.guestbook.customQuestions= Запитання користувачів +dataset.manageGuestbooks.guestbook.accountInformation = Інформація про акаунт +dataset.manageGuestbooks.guestbook.required = (обов'язкові) +dataset.manageGuestbooks.guestbook.optional = (необов'язкові) +dataset.manageGuestbooks.guestbook.customQuestions.description= Створіть власні запитання, щоб під час завантаження файлу користувачі надавали більше відомостей, ніж ті, що стосуються акаунту. +Питання можуть бути обов'язковими або необов'язковими, а відповіді можуть бути текстовими або довільними. +dataset.manageGuestbooks.guestbook.customQuestions.questionType= Тип запитання +dataset.manageGuestbooks.guestbook.customQuestions.questionText= Запитання у текстовому форматі +dataset.manageGuestbooks.guestbook.customQuestions.responseOptions=Варіанти відповідей +dataset.manageGuestbooks.guestbook.customQuestions.questionType.text= Текстові +dataset.manageGuestbooks.guestbook.customQuestions.questionType.multiple=довільні + +# guestbookResponseFragment.xhtml +dataset.guestbookResponse.guestbook.additionalQuestions = Додаткові запитання +dataset.guestbookResponse.guestbook.responseTooLong = Обмежте відповідь 255 символами + +# dataset.xhtml +dataset.configureBtn = Налаштувати +dataset.pageTitle = Додати новий набір даних +dataset.editBtn = Редагувати +dataset.editBtn.itemLabel.upload = Файли (завантажити) +dataset.editBtn.itemLabel.metadata = Метадані +dataset.editBtn.itemLabel.terms = Умови +dataset.editBtn.itemLabel.permissions = Дозволи +dataset.editBtn.itemLabel.thumbnails AndWidgets = Мініатюри + Віджети +dataset.editBtn.itemLabel.privateUrl = Приватна URL-адреса +dataset.editBtn.itemLabel.permissionsDataset = Набір даних +dataset.editBtn.itemLabel.permissionsFile = Обмежені файли +dataset.editBtn.itemLabel.deleteDataset = Видалити набір даних +dataset.editBtn.itemLabel.deleteDraft= Видалити проектну версію +dataset.editBtn.itemLabel.deaccession=Припинити дію набору даних +dataset.exportBtn= Експортувати метадані +dataset.exportBtn.itemLabel.ddi=DDI +dataset.exportBtn.itemLabel.dublinCore=Дублінське ядро +dataset.exportBtn.itemLabel.schemaDotOrg=Schema.org JSON-LD +dataset.exportBtn.itemLabel.json=JSON +metrics.title = показники +metrics.title.tip = Переглянути інформацію про інші показники +metrics.comingsoon = Скоро з'явиться +metrics.views= Перегляди +metrics.downloads={0, вибір, 0#Завантаження|1# Завантаження|2# Завантаження|} +metrics.citations= Цитати +metrics.shares=Поширення +dataset.publish.btn = Опублікувати +dataset.publish.header = Опублікувати набір даних +dataset.rejectBtn = Повернути автору +dataset.submitBtn = Надіслати на розгляд +dataset.disabledSubmittedBtn = Надіслано на розгляд +dataset.submitMessage=. Ви не зможете внести зміни до цього набору даних, поки він розглядається. +dataset.submit.success= Ваш набір даних був поданий на розгляд +dataset.inreview.infoMessage=\u2013 Цей набір даних в даний час розглядається перед публікацією. +dataset.submit.failure= Не вдалося надіслати набір даних - {0} +dataset.submit.failure.null= Неможливо надіслати на розгляд. Набір даних недійсний. +dataset.submit.failure.isReleased= Остання версія набору даних вже випущена. Лише проектні версії можна надсилати на розгляд. +dataset.submit.failure.inReview= Ви не зможете надіслати цей набір даних на розгляд, тому що він уже розглядається. +dataset.rejectMessage= Повернути цей набір даних автору на коригування. +dataset.rejectWatermark= Будь ласка, введіть причину повернення цього набору даних його автору(ам). +dataset.reject.enterReason= Необхідно вказати причину повернення автору +dataset.reject.enterReason.header= Обов'язковий запис +dataset.reject.success= Цей набір даних надіслано назад автору. +dataset.reject.failure=Dataset Submission Return Failed - {0} Не вдалося надіслати набір даних назад автору - {0} +dataset.reject.datasetNull= Неможливо повернути набір даних автору(ам), оскільки він недійсний. +dataset.reject.datasetNotInReview= Цей набір даних неможливо повернути автору(ам), оскільки остання версія не є на розггляді. Автор(и) має спершу натиснути "Надіслати на розгляд". +dataset.publish.tip= Ви впевнені, що хочете опублікувати цей набір даних? Як тільки ви це зробите, він має залишатися опублікованим +dataset.publishBoth.tip= Після публікації цього набору він повинен залишатися опублікованим. +dataset.unregistered.tip= Цей набір даних незареєстрований. Ми спробуємо зареєструвати його перед публікацією. +dataset.republish.tip= Ви впевнені, що хочете ще раз опублікувати цей набір даних? +dataset.selectVersionNumber= Виберіть: це поточне чи основне оновлення версії. +dataset.majorRelease=основний випуск +dataset.minorRelease=поточний випуск +dataset.majorRelease.tip= Через характер змін поточного проекту це буде основним випуском ({0}) +dataset.mayNotBePublished= Неможливо опублікувати набір даних. +dataset.mayNotPublish.administrator= Цей набір даних неможливо опублікувати, поки його {0} не опублікує його адміністратор. +dataset.mayNotPublish.both= Цей набір даних не можна опублікувати, поки не буде опубліковано {0}. Ви хотіли б опублікувати обидва прямо зараз? +dataset.mayNotPublish.twoGenerations= Цей набір даних неможливо опублікувати, поки не будуть опубліковані {0} і {1}. +dataset.mayNotBePublished.both.button = Так, опублікувати обидва +dataset.viewVersion.unpublished = Переглянути неопубліковану версію +dataset.viewVersion.published = Переглянути опубліковану версію +dataset.email.datasetContactBtn=Eл. Пошта Dataset Contact +dataset.email.hiddenMessage= +dataset.email.messageSubject= Тема тестового повідомлення +dataset.email.datasetLinkBtn.tip= Прив'яжіть набір даних до вашої Datavese +dataset.share.datasetShare= Поширити набір даних +dataset.share.datasetShare.tip= Поширити цей набір даних у ваших улюблених соціальних мережах. +dataset.share.datasetShare.shareText = Переглянути цей набір даних. +dataset.locked.message = Цей набір даних заблоковано. +dataset.locked.inReview.message = Надіслано на розгляд. +dataset.publish.error= Цей набір даних неможливо опублікувати, тому що {0} служба наразі недоступна. Будь ласка спробуйте ще раз. Проблема продовжує існувати? +dataset.publish.error.doi= Цей набір даних неможливо опублікувати, оскільки оновлення DOI не вдалося. +dataset.delete.error= Не вдається припинити дію набору даних, оскільки не вдалося оновити {0}. +dataset.publish.worldMap.deleteConfirm= Будь ласка, зверніть увагу, що ваші дані та карта на WorldMap будуть видалені через зміни у файлах обмеженого доступу у версії набору даних, яку ви публікуєте. Ви хочете продовжити? +dataset.publish.workflow.inprogress= Опублікувати поточну незавершену версію +dataset.versionUI.draft=Проектна версія +dataset.versionUI.inReview=В процесі розгляду +dataset.versionUI.unpublished= Не опублікована +dataset.versionUI.deaccessioned=Припинена дія +dataset.cite.title.released= ПРОЕКТНУ ВЕРСІЮ замінять на V1 в цитаті, як тільки набір даних буде опубліковано. +dataset.cite.title.draft= ПРОЕКТНУ ВЕРСІЮ замінять на вибрану версію в цитаті (посиланні), як тільки набір даних буде опубліковано. +dataset.cite.title.deassessioned= ВИЛУЧЕНУ ВЕРСІЮ додано до цитування для цієї версії, оскільки вона більше недоступна. +dataset.cite.standards.tip= Дізнайтеся про Стандарти цитування даних . +dataset.cite.downloadBtn= Цитувати набір даних +dataset.cite.downloadBtn.xml=EndNote XML +dataset.cite.downloadBtn.ris=RIS +dataset.cite.downloadBtn.bib=BibTeX +dataset.create.authenticatedUsersOnly= Тільки автентифіковані користувачі можуть створювати набори даних. +dataset.deaccession.reason= Причини припинення дії (вилучення) +dataset.beAccessedAt = Набір даних тепер доступний за адресою: +dataset.descriptionDisplay.title = Опис +dataset.keywordDisplay.title = Ключове слово +dataset.subjectDisplay.title = Тема +dataset.contact.tip= Використовувати кнопку електронної пошти вгорі, щоб зв'язатися. +dataset.asterisk.tip= Зірочки позначають обов'язкові поля +dataset.message.uploadFiles= Завантажити файли наборів даних - Ви можете перетягувати файли зі свого робочого столу безпосередньо у віджет завантаження. +dataset.message.editMetadata= Редагувати метадані набору даних - додайте більше метаданих про цей набір даних, щоб допомогти іншим легко його знайти. +dataset.message.editTerms= Редагувати умови використання набору даних - оновіть умови використання цього набору даних. +dataset.message.locked.editNotAllowedInReview= Не вдається редагувати набір даних через його блокування, оскільки він перебуває "В процесі розгляду". + +dataset.message.locked.downloadNotAllowedInReview= Файл(и) набору даних неможливо завантажити через блокування набору даних, оскільки він перебуває "В процесі розгляду". +dataset.message.locked.downloadNotAllowed= Файл(и) набору даних неможливо завантажити через блокування набору даних. +dataset.message.locked.editNotAllowed= Набір даних неможливо редагувати через блокування набору даних. +dataset.message.createSuccess= Цей набір даних створено. +dataset.message.linkSuccess= {0} успішно пов'язано з {1}. +dataset.message.metadataSuccess= Метадані цього набору даних оновлено. +dataset.message.termsSuccess= Умови для цього набору даних оновлено. +dataset.message.filesSuccess= Файли для цього набору даних оновлено. +dataset.message.publishSuccess= Цей набір даних був опубліковано. +dataset.message.only.authenticatedUsers= Тільки автентифіковані користувачі можуть випускати набори даних. +dataset.message.deleteSuccess= Цей набір даних видалено. +dataset.message.bulkFileUpdateSuccess= Вибрані файли оновлено. +dataset.message.bulkFileDeleteSuccess= Вибрані файли видалено. +datasetVersion.message.deleteSuccess= Цей проект набору даних видалено. +datasetVersion.message.deaccessionSuccess= Вибрані версії припинено. +dataset.message.deaccessionSuccess= Цей набір даних припинено. +dataset.message.validationError= Помилка перевірки - обов'язкові поля були пропущені або виникла помилка перевірки. Будь ласка, прокрутіть вниз, щоб переглянути деталі. +dataset.message.publishFailure= Набір даних неможливо опублікувати. +dataset.message.metadataFailure= Метадані неможливо оновити. +dataset.message.filesFailure= Файли неможливо оновити. +dataset.message.bulkFileDeleteFailure= Вибрані файли неможливо видалити. +dataset.message.files.ingestFailure= Файл(и) неможливо використити. +dataset.message.deleteFailure= Цей проект набору даних неможливо видалити. +dataset.message.deaccessionFailure= Цей набір даних неможливо припинити. +dataset.message.createFailure= Набір даних неможливо створити. +dataset.message.termsFailure= Умови використання набору даних неможливо оновити. +dataset.message.publicInstall=File Access - Доступ до файлів - Файли зберігаються на загальнодоступному сервері зберігання. +dataset.metadata.publicationDate = Дата публікації +dataset.metadata.publicationDate.tip = дата публікації набору даних. +dataset.metadata.persistentId = Постійний ідентифікатор набору даних +dataset.metadata.persistentId.tip= Унікальний постійний ідентифікатор для набору даних, який може бути Handle або DOI в Dataverse. +dataset.versionDifferences.termsOfUseAccess= Умови використання та доступ +dataset.versionDifferences.termsOfUseAccessChanged= Умови використання / доступ змінено +file.viewDiffDialog.restricted= Обмежено +dataset.template.tip= Зміна шаблону очистить будь-які поля, в які ви могли вводити дані. +dataset.noTemplate.label = Немає +dataset.noSelectedFiles.header = Вибрати файли +dataset.noSelectedFilesForDownload = Будь ласка, виберіть файл або файли для завантаження. +dataset.noSelectedFilesForRequestAccess = Будь ласка, виберіть файл або файли для запиту доступу. +dataset.noSelectedFilesForDelete = Будь ласка, виберіть файл або файли для видалення. +dataset.noSelectedFilesForMetadataEdit= Будь ласка, виберіть файл або файли для редагування. + +dataset.noSelectedFilesForRestrict= Будь ласка, виберіть необмежений файл (и) для перетворення його в обмежений. +dataset.noSelectedFilesForUnRestrict= Будь-ласка, виберіть обмежений файл (и) для перетворення його в необмежений. +dataset.inValidSelectedFilesForDownload= Вибрані файли обмеженого доступу +dataset.noValidSelectedFilesForDownload= Вибрані файли обмеженого доступу неможна завантажити, оскільки вам не надано доступу. +dataset.mixedSelectedFilesForDownload= Вибрані файли обмеженого доступу неможна завантажити, оскільки вам не надано доступу. +dataset.downloadUnrestricted= Натисніть Continue (продовжити), щоб завантажити файли, до яких у вас є доступ для завантаження. +dataset.requestAccessToRestrictedFiles= Ви можете зробити запит про доступ до обмежених файлів, натиснувши кнопку Запит доступу. +dataset.privateurl.infoMessageAuthor= Приватна URL-адреса для неопублікованого набору даних- Приватно поширте доступ до цього набору даних до його опублікування: {0} +dataset.privateurl.infoMessageReviewer= Приватна URL-адреса для неопублікованого набору даних - Цей неопублікований набір даних поширюється у приватному порядку. - Ви не зможете отримати доступ до нього при вході в ваш Dataverse акаунт. +dataset.privateurl.header= Приватна URL-адреса для неопублікованого набору даних +dataset.privateurl.tip= Використовуйте приватну URL-адресу для неопублікованого набору даних, щоб дозволити тим, хто не має Dataverse акаунтів отримати доступ до вашого неопублікованого набору даних. Для отримання додаткової інформації про функцію Private URL, будь ласка, зверніться до Довідник користувача. +dataset.privateurl.absent= Приватна URL-адреса не створена. +dataset.privateurl.createPrivateUrl= Створити приватну URL-адресу. +dataset.privateurl.disablePrivateUrl= Вимкнути приватну URL-адресу. +dataset.privateurl.disablePrivateUrlConfirm=Так, вимкнути приватну URL-адресу. +dataset.privateurl.disableConfirmationText=. Ви впевнені, що хочете вимкнути приватну URL-адресу? Якщо ви поширите приватну URL-адресу серед інших, вони більше не зможуть її використовувати для отримання доступу до вашого неопублікованого набору даних. +dataset.privateurl.cannotCreate= Приватна URL-адреса може використовуватися тільки з неопублікованими версіями наборів даних. +dataset.privateurl.roleassigeeTitle= Приватна URL-адреса ввімкнена. +dataset.privateurl.createdSuccess= Успіх! +dataset.privateurl.disabledSuccess= Ви успішно вимкнули приватну URL-адресу для цього неопублікованого набору даних. +dataset.privateurl.noPermToCreate= Для створення приватної URL-адреси потрібно мати такі дозволи: {0}. +file.count={0} {0,вибір, 0#Files|1#File|2#Files} +file.count.selected={0} {0, вибір, 0#Файли вибрані|1# Файл вибраний |2# Файли вибрані } +file.selectToAddBtn = Виберіть файли для додавання +file.selectToAdd.tipLimit = Ліміт завантаження файлу - {0} байт на файл. +file.selectToAdd.tipMoreInformation= Для отримання додаткової інформації про підтримувані формати файлів, будь ласка, зверніться до Довідник користувача. + + +file.selectToAdd.dragdropMsg= Перетягніть файли сюди. +file.createUploadDisabled= Після збереження набору даних ви можете завантажувати свої дані за допомогою кнопки "Завантажити файли" на сторінці набору даних. Для отримання додаткової інформації про підтримувані формати файлів, будь ласка, зверніться до Довідника користувача. +file.fromDropbox= Завантажити з Dropbox +file.fromDropbox.tip= Файли також можна завантажувати безпосередньо з Dropbox. +file.replace.original = оригінальний файл +file.editFiles = Редагувати файли +file.bulkUpdate = Масове оновлення +file.uploadFiles = Завантажити файли +file.replaceFile = Замінити файл +file.notFound.tip = Немає файлів у цьому наборі даних. +file.noSelectedFiles.tip = Немає вибраних файлів для відображення. +file.noUploadedFiles.tip = Файли, які ви завантажуєте, з'являться тут. +file.replace = Замінити +file.replaced.warning.header = Редагувати файл +file.replaced.warning.draft.warningMessage= Ви не можете замінити файл, який був замінений в проекті набору даних. Щоб замінити його іншим файлом, ви повинні видалити проект набору даних Зауважте, що це призведе до відхилення будь-яких інших змін у цьому проекті. +file.replaced.warning.previous.warningMessage= Ви не можете редагувати файл, який був замінений у попередній версії набору даних. Для того, щоб відредагувати його, потрібно перейти до самої останньої опублікованої версії файлу. +file.alreadyDeleted.previous.warningMessage= Цей файл вже видалений у поточній версії. Його не можна редагувати. +file.delete = Видалити +file.metadata = Метадані +file.deleted.success= Файли "{0}" будуть постійно видалятися з цієї версії цього набору даних, коли ви натискатимете кнопку Зберегти зміни. +file.deleted.replacement.success= Файл заміни видалено. +file.editAccess = Редагувати доступ +file.restrict = Обмежити +file.unrestrict= Не обмежити +file.restricted.success= Файли "{0}" будуть обмежені після натискання кнопки "Зберегти зміни". +file.download.header = Завантажити +file.download.subset.header = Завантажити піднабір даних +file.preview = Попередній перегляд: +file.previewMap = Попередній перегляд карти: o +file.fileName = Ім'я файлу +file.type.tabularData = Табличні дані +file.originalChecksumType = Оригінальний файл {0} +file.checksum.exists.tip= Файл із цією контрольною сумою вже існує в наборі даних. +.file.selectedThumbnail= Мініатюра +file.selectedThumbnail.tip= Мініатюра для цього файлу використовується як мініатюра за замовчуванням для набору даних. Натисніть кнопку "Додаткові параметри" іншого файлу, щоб вибрати цей файл. +file.cloudStorageAccess= Доступ до Cloud Storage (хмарного сховища) +file.cloudStorageAccess.tip= Ім'я контейнера для цього набору даних, необхідне для доступу до файлів у хмарному сховищі. + +file.cloudStorageAccess.help= Для безпосереднього доступу до цих даних в хмарному середовищі {2}, використовуйте ім'я контейнера в полі Cloud Storage Access нижче. Щоб дізнатись більше про хмарне середовище відвідайте розділ Довідника користувача Cloud Storage Accesss . +file.copy= Копіювати +file.compute= Обчислити +file.rsyncUpload.info= Виконайте ці кроки, щоб завантажити свої дані. Щоб дізнатись більше про процес завантаження та способи підготовки ваших даних, зверніться до Довідника користувача User Guide. +file.rsyncUpload.noScriptAvailable= Сценарій Rsync недоступний! +file.rsyncUpload.filesExist= Ви не можете завантажувати додаткові файли в цей набір даних. +file.rsyncUpload.step1= Переконайтеся, що ваші дані зберігаються в єдиному каталозі. Всі файли в цьому каталозі та його підкаталогах будуть завантажені у ваш набір даних. +file.rsyncUpload.step2= Завантажте цей сценарій завантаження файлів: +file.rsyncUpload.step2.downloadScriptButton= Завантажити сценарій. +file.rsyncUpload.step3= Відкрийте вікно терміналу в тому самому каталозі, в якому ви зберегли сценарій, і запустіть цю команду: bash ./{0} +file.rsyncUpload.step4= Дотримуйтесь інструкцій у сценарії. Він вимагатиме повний шлях (починаючи з "/") до каталогу, що містить ваші дані. Примітка: цей сценарій +закінчиться через 7 днів. +file.rsyncUpload.inProgressMessage.summary= Завантажити файл DCM +file.rsyncUpload.inProgressMessage.details=. Цей набір даних заблоковано, поки файли даних не будуть передані та перевірені. + + +file.metaData.dataFile.dataTab.variables = Змінні +file.metaData.dataFile.dataTab.observations = Спостереження +file.metaData.viewOnWorldMap = Дослідіть на WorldMap +file.addDescription = Додати опис файлу ... +file.tags = Теги +file.editTags = Редагувати теги +file.editTagsDialog.tip = Виберіть існуючі теги файлів або створіть нові теги для опису ваших файлів. Кожен файл може містити більше одного тегу. +file.editTagsDialog.select = Теги файлів +file.editTagsDialog.selectedTags = Вибрані теги +file.editTagsDialog.selectedTags.none = Не вибрано жодного тегу +file.editTagsDialog.add = Спеціальний тег файлу +file.editTagsDialog.add.tip= Створення нового тегу додасть його як опцію тегу для всіх файлів цього набору даних. +file.editTagsDialog.newName = Додати новий тег ... +dataset.removeUnusedFileTags.label = Видалити теги +dataset.removeUnusedFileTags.tip= Виберіть "видалити власні теги файлів", які не використовуються файлами в наборі даних. +dataset.removeUnusedFileTags.check = Видалити теги, які не використовуються +file.setThumbnail = Встановити мініатюру +file.setThumbnail.header = Встановити мініатюру набору даних +file.datasetThumbnail = Мініатюра набору даних +file.datasetThumbnail.tip= Виберіть "використовувати це зображення як мініатюру", що відображатиметься в результатах пошуку цього набору даних. +file.setThumbnail.confirmation= Ви впевнені, що хочете встановити це зображення як мініатюру набору даних? Зображення, завантажене як мініатюра, вже існує, і ця дія видалить його. +file.useThisIamge= Використовуйте це зображення як мініатюру зображення набору даних. +file.advancedOptions= Додаткові параметри +file.advancedIngestOptions= Додаткові параметри введення +file.assignedDataverseImage.success={0} збережено як мініатюра цього набору даних. +file.assignedTabFileTags.success= Теги успішно додані для {0}. +file.tabularDataTags= Теги табличних даних. +file.tabularDataTags.tip= Виберіть тег для опису типу (ів) даних, наприклад (дослідження, часові ряди, геопросторові тощо). +file.spss-savEncoding= мова кодування +file.spss-savEncoding.title= Виберіть мову, яка використовується для кодування цього файлу даних SPSS (sav). +file.spss-savEncoding.current= поточний вибір: +file.spss-porExtraLabels= Ярлики змінних +file.spss-porExtraLabels.title= Завантажте додатковий текстовий файл із додатковими ярликами змінних. +file.spss-porExtraLabels.selectToAddBtn= Виберіть файл для додавання +file.ingestFailed.header= Завантажте завершено з помилками +file.ingestFailed.message= Помилка введення табличних даних. +file.map=Карта +file.mapData=Дані Карти +file.mapData.worldMap=WorldMap +file.mapData.unpublished.header= Дані не опублікованію +file.mapData.unpublished.message= Для того, щоб картувати свої дані за допомогою WorldMap, ваші дані повинні бути опубліковані. Опублікуйте цей набір даних, а потім повторіть спробу функції " Дані Карти ". +file.downloadBtn.format.all= Усі формати файлів + інформація +file.downloadBtn.format.tab=З роздільниками табуляцією +file.downloadBtn.format.original= Формат оригінального файлу ({0}) +file.downloadBtn.format.rdata= Формат RData +file.downloadBtn.format.var= Метадані змінних +file.downloadBtn.format.citation = Цитата файлу даних +file.more.information.link = Посилання на більше інформації про файл для +file.requestAccess = Запит доступу +file.requestAccess.dialog.msg= Вам потрібно увійти щоб надіслати запит про доступ до цього файлу. +file.requestAccess.dialog.msg.signup= Вам потрібно Зареєструватися або увійти щоб надіслати запит про доступ до цього файлу. +file.accessRequested= Запит про доступ +file.restrictions= Обмеження файлів +file.restrictions.description= Обмежити доступ до опублікованих файлів, позначивши їх як обмежені. Надайте користувачам умови доступу та дозвольте їм замовити доступ. + +file.restrictions.worldmap.warning= Зверніть увагу, що, як тільки будуть опубліковані ваші зміни доступу до файлу, ваша карта на WorldMap буде видалена, а функція дослідити на WorldMap буде скасована. +file.ingestInProgress=Введення виконується... +file.dataFilesTab.metadata.header = Метадані +file.dataFilesTab.metadata.addBtn = Додати + Змінити метадані +file.dataFilesTab.terms.header = Умови +file.dataFilesTab.terms.editTermsBtn= Редагувати вимоги до умов використання +file.dataFilesTab.terms.list.termsOfUse.header= Умови використання +file.dataFilesTab.terms.list.termsOfUse.waiver= Дозвіл на відкритий доступ +file.dataFilesTab.terms.list.termsOfUse.waiver.title= Дозвіл на відкритий доступ +повідомляє завантажувачам даних, як вони можуть використовувати цей набір даних. +file.dataFilesTab.terms.list.termsOfUse.waiver.txt=CC0 - "Відкритий доступ" +file.dataFilesTab.terms.list.termsOfUse.waiver.description= Набори даних будуть за замовчуванням CC0 public domain dedication . Відкрита ліцензія Творчої Співдружності (CC0) полегшує повторне використання та розширюваність дослідницьких даних. Ми сподіваємось, що завдяки цитуванням користувачі високо оцінять наші стандарти Community Norms а також хороші наукові практики. Якщо ви не можете надати наборам даних відкритий доступ, ви можете ввести "Спеціальні умови використання для наборів даних". +file.dataFilesTab.terms.list.termsOfUse.no.waiver.txt= Для цього набору даних відкритий доступ не вибраний. +file.dataFilesTab.terms.list.termsOfUse.waiver.txt.description= Ми сподіваємось, що завдяки цитуванням користувачі високо оцінять наші стандарти Community Norms а також хороші наукові практики. +Будь ласка, використовуйте вищезгадане цитування даних, утворене Dataverse. +file.dataFilesTab.terms.list.termsOfUse.waiver.select.CCO= Так, застосувати відкриту ліцензію CC0 "Відкритий доступ" +file.dataFilesTab.terms.list.termsOfUse.waiver.select.notCCO= Ні, не застосувати відкриту ліцензію CC0 "Відкритий доступ". +file.dataFilesTab.terms.list.termsOfUse.waiver.select.tip= Ось що побачать кінцеві користувачі на цьому наборі даних. +file.dataFilesTab.terms.list.termsOfUse.termsOfUse = Умови використання +file.dataFilesTab.terms.list.termsOfUse.termsOfUse.title = Описано, як ці дані можна використовувати у разі завантаження. +file.dataFilesTab.terms.list.termsOfUse.termsOfUse.description= Якщо ви не можете використовувати CC0 для наборів даних, ви можете встановити спеціальні умови користування. Ось приклад Угода про використання даних для наборів даних, які містять де-ідентифіковану інформацію. +file.dataFilesTab.terms.list.termsOfUse.addInfo= Додаткова інформація +file.dataFilesTab.terms.list.termsOfUse.addInfo.declaration= Декларація конфіденційності +file.dataFilesTab.terms.list.termsOfUse.addInfo.declaration.title= Вказує, чи потрібно підписувати декларацію конфіденційності для доступу до ресурсу. +file.dataFilesTab.terms.list.termsOfUse.addInfo.permissions= Спеціальні дозволи. +file.dataFilesTab.terms.list.termsOfUse.addInfo.permissions.title= Визначте, чи потрібні спеціальні дозволи для доступу до ресурсу (наприклад, чи потрібна форма і де отримати доступ до форми). +file.dataFilesTab.terms.list.termsOfUse.addInfo.restrictions= Обмеження. +file.dataFilesTab.terms.list.termsOfUse.addInfo.restrictions.title= Будь-які обмеження доступу або використання збірки наборів, такі як засвідчення конфіденційності або обмеження розповсюдження, слід вказати тут. Це можуть бути обмеження, застосовані автором, виробником або розповсюджувачем даних. Якщо дані обмежені лише для певного типу користувачів, вкажіть для якого саме. +file.dataFilesTab.terms.list.termsOfUse.addInfo.citationRequirements= Вимоги до цитування +file.dataFilesTab.terms.list.termsOfUse.addInfo.citationRequirements.title= Включити спеціальні /точні вимоги до цитування для даних, які повинні належним чином цитуватися в статтях або інших публікаціях, що базуються на аналізі даних. Щодо стандартних вимог до цитування даних, зверніться до Стандартів нашої Спільноти. +file.dataFilesTab.terms.list.termsOfUse.addInfo.depositorRequirements= Вимоги до власника наборів даних. +file.dataFilesTab.terms.list.termsOfUse.addInfo.depositorRequirements.title= Інформація про відповідальність користувачів за інформування власників даних, авторів та кураторів про використання їхніх даних шляхом надання посилань (цитат) на опубліковану роботу або надання копій рукописів. +file.dataFilesTab.terms.list.termsOfUse.addInfo.conditions=Умови. +file.dataFilesTab.terms.list.termsOfUse.addInfo.conditions.title= Будь-яка додаткова інформація, яка допоможе користувачеві зрозуміти умови доступу та використання набору даних. +file.dataFilesTab.terms.list.termsOfUse.addInfo.disclaimer=Дискламація +file.dataFilesTab.terms.list.termsOfUse.addInfo.disclaimer.title= Інформація про відповідальність користувачів за використання даних набору. +file.dataFilesTab.terms.list.termsOfAccess.header = Обмежені файли + Умови доступу +file.dataFilesTab.terms.list.termsOfAccess.restrictedFiles = Обмежені файли +file.dataFilesTab.terms.list.termsOfAccess.restrictedFiles.title = Кількість файлів обмеженого доступу в цьому наборі даних +file.dataFilesTab.terms.list.termsOfAccess.restrictedFiles.txt=There {0, вибір, 0#are|1#is|2#are} {0} обмежено {0, вибір, 0# файлів |1# файл |2# файлів} in this dataset. +file.dataFilesTab.terms.list.termsOfAccess.termsOfsAccess= Умови доступу +file.dataFilesTab.terms.list.termsOfAccess.termsOfsAccess.title= Інформація про те, чи можуть користувачі отримати доступ до обмежених файлів у цьому наборі даних і яким чином. +file.dataFilesTab.terms.list.termsOfAccess.requestAccess= Запит доступу +file.dataFilesTab.terms.list.termsOfAccess.requestAccess.title= Якщо цей пункт позначено "пташкою", користувачі можуть запитувати доступ до обмежених файлів у цьому наборі даних. +file.dataFilesTab.terms.list.termsOfAccess.requestAccess.request = Користувачі можуть запитувати доступ до файлів. +file.dataFilesTab.terms.list.termsOfAccess.requestAccess.notRequest = Користувачі не можуть запитувати доступ до файлів. +file.dataFilesTab.terms.list.termsOfAccess.requestAccess.enableBtn = Увімкнути запит доступу +file.dataFilesTab.terms.list.termsOfAccess.addInfo.dataAccessPlace = Місце доступу до даних +file.dataFilesTab.terms.list.termsOfAccess.addInfo.dataAccessPlace.title = Якщо дані зберігаються не тільки в Dataverse, то перелічіть місця сховища. +file.dataFilesTab.terms.list.termsOfAccess.addInfo.originalArchive = Оригінал архіву +file.dataFilesTab.terms.list.termsOfAccess.addInfo.originalArchive.title = Архів, з якого отримані дані. +file.dataFilesTab.terms.list.termsOfAccess.addInfo.availabilityStatus = Статус доступності. +file.dataFilesTab.terms.list.termsOfAccess.addInfo.availabilityStatus.title= Заява про доступність набору даних. Власнику, можливо, треба буде вказати, що набір даних недоступний на деякий час через заборону, заміну, очікувану нову редакцію і т. д. +file.dataFilesTab.terms.list.termsOfAccess.addInfo.contactForAccess=Електронна пошта для доступу. +file.dataFilesTab.terms.list.termsOfAccess.addInfo.contactForAccess.title= Якщо відрізняється від Ел. пошти Dataset Contact, то - це контактна особа або організація (включно з електронною поштою або повною адресою та номерем телефону, якщо є), яка контролює доступ до збірки. +file.dataFilesTab.terms.list.termsOfAccess.addInfo.sizeOfCollection= Розмір збірки. +file.dataFilesTab.terms.list.termsOfAccess.addInfo.sizeOfCollection.tip= Підсумок кількості фізичних файлів існуючих в наборі даних, що записує кількість файлів, які містять дані та зазначають, чи містить ця колекція машинозчитувану документацію та / або інші додаткові файли та інформацію, таку як код, словники даних, тлумачення даних або інструменти збору даних. +file.dataFilesTab.terms.list.termsOfAccess.addInfo.studyCompletion= Завершення дослідження. +file.dataFilesTab.terms.list.termsOfAccess.addInfo.studyCompletion.title= Зв'язок зібраних даних з кількістю даних, закодованих та збережених у наборі даних. Інформація про те, чому окремі елементи зібраної інформації не були включені в набір даних, або чому потрібно надати певний файл даних. +file.dataFilesTab.terms.list.guestbook = Гостьова книга +file.dataFilesTab.terms.list.guestbook.title = Інформація про користувача (наприклад, ім'я, електронна адреса, установа та посада) буде зібрана під час завантаження файлів. +file.dataFilesTab.terms.list.guestbook.noSelected.tip= Для цього набору даних не призначено жодної гостьової книги, вам не буде запропоновано надавати будь-яку інформацію про завантаження файлів. +file.dataFilesTab.terms.list.guestbook.noSelected.admin.tip = Немає гостьових книг у {0}, призначених для цього набору даних. +file.dataFilesTab.terms.list.guestbook.inUse.tip = Наступна гостьова книга запропонує користувачеві надавати додаткову інформацію під час завантаження файлу. +file.dataFilesTab.terms.list.guestbook.viewBtn = Перегляд гостьової книги +file.dataFilesTab.terms.list.guestbook.select.tip = Виберіть гостьову книгу, щоб користувач надавав додаткову інформацію під час завантаження файлу. +file.dataFilesTab.terms.list.guestbook.noAvailable.tip= У {0} немає увімкнених гостьових книг. Щоб створити гостьову книгу, поверніться до {0}, натисніть кнопку "Редагувати" та оберіть опцію "Гостьові книги наборів даних ". +file.dataFilesTab.terms.list.guestbook.clearBtn=Очистити вибір. +file.dataFilesTab.dataAccess= Доступ до даних. +file.dataFilesTab.dataAccess.info= Доступ до цього файлу даних можна отримати через вікно терміналу за допомогою команд нижче. Щоб отримати більше інформації про завантаження та перевірку даних, перегляньте наш Довідник користувача User Guide. . +file.dataFilesTab.dataAccess.info.draft= Файли даних недоступні, поки проект набору даних не буде опубліковано. Щоб отримати більше інформації про завантаження та перевірку даних, перегляньте Довідник користувача Dataverse User Guide. + +file.dataFilesTab.dataAccess.local.label = Локальний доступ +file.dataFilesTab.dataAccess.download.label = Завантаження доступу +file.dataFilesTab.dataAccess.verify.label = Перевірити дані +file.dataFilesTab.dataAccess.local.tooltip = Якщо ці дані локально доступні для вас, це їхній шлях до файлу. +file.dataFilesTab.dataAccess.download.tooltip = Завантажте ці дані з вибраного вами дзеркала, запустивши цю команду. +file.dataFilesTab.dataAccess.verify.tooltip = Ця команда запускає контрольну суму для перевірки цілісності завантажених даних. + file.dataFilesTab.versions = Версії +file.dataFilesTab.versions.headers.dataset = Набір даних +file.dataFilesTab.versions.headers.summary = Резюме +file.dataFilesTab.versions.headers.contributors = Автори +file.dataFilesTab.versions.headers.published = Опубліковано +file.dataFilesTab.versions.viewDiffBtn = Переглянути відмінності +file.dataFilesTab.versions.citationMetadata = Метадані цитування: +file.dataFilesTab.versions.added = Додано +file.dataFilesTab.versions.removed = Вилучено +file.dataFilesTab.versions.changed = Змінено +file.dataFilesTab.versions.replaced = Замінено +file.dataFilesTab.versions.original = оригінал +file.dataFilesTab.versions.replacment = Заміна +file.dataFilesTab.versions.additionalCitationMetadata = Додаткові метадані цитування: +file.dataFilesTab.versions.description.draft= Це проектна версія. +file.dataFilesTab.versions.description.deaccessioned= Через те, що попередня версія була припинена, відсутні відмітки про відмінності для цієї опублікованої версії. +file.dataFilesTab.versions.description.firstPublished= Це перша опублікована версія. +file.dataFilesTab.versions.description.deaccessionedReason= Причина припинення дії: +file.dataFilesTab.versions.description.beAccessedAt = Набір даних тепер доступний за адресою: +file.dataFilesTab.versions.viewDetails.btn = Переглянути деталі +file.dataFilesTab.versions.widget.viewMoreInfo= Щоб переглянути додаткову інформацію про версії цього набору даних, а також редагувати його, якщо це ваш набір даних, відвідайте повна версія цього набору full version of this dataset на {2}. +file.deleteDialog.tip = Ви впевнені, що хочете видалити цей набір даних? Ви не можете відновити цей набір даних. +file.deleteDialog.header = Видалити набір даних. +file.deleteDraftDialog.tip=Are you sure you want to delete this draft version? You cannot undelete this draft. Ви впевнені, що хочете видалити цю проектну версію? Ви не можете відновити її. +file.deleteDraftDialog.header= Видалити проектну версію. +file.deleteFileDialog.tip= Файл(и) буде видалено після натискання кнопки Зберегти зміни в нижній частині цієї сторінки. +file.deleteFileDialog.immediate= Файл буде видалено після натискання кнопки "Видалити". +file.deleteFileDialog.multiple.immediate= Файли буде видалено після натискання кнопки "Видалити". +file.deleteFileDialog.header = Видалити файли +file.deleteFileDialog.failed.tip = Файли не будуть видалені з раніше опублікованих версій набору даних. +file.deaccessionDialog.tip= Щойно ви припинете дію цього набору даних, він більше не буде відкритим для загального перегляду. +file.deaccessionDialog.version=Версія. +file.deaccessionDialog.reason.question1= Яку(і) версію(ї) ви хочете припинити? +file.deaccessionDialog.reason.question2= Яка причина припинення дії версії? +file.deaccessionDialog.reason.selectItem.identifiable= В одному або декількох файлах є дані, які можна ідентифікувати. +file.deaccessionDialog.reason.selectItem.beRetracted= Наукову статтю було відхилено. +file.deaccessionDialog.reason.selectItem.beTransferred= Набір даних був перенесений до іншого сховища. +file.deaccessionDialog.reason.selectItem.IRB= запит про IRB. +file.deaccessionDialog.reason.selectItem.legalIssue= Правові умови чи Угода про використання даних. +file.deaccessionDialog.reason.selectItem.notValid = Недійсний набір даних +file.deaccessionDialog.reason.selectItem.other = Інша (Будь ласка, введіть причину в поле, вказане нижче). +file.deaccessionDialog.enterInfo= Будь ласка, введіть додаткову інформацію про причину припинення дії. +file.deaccessionDialog.leaveURL= Якщо це передбачено, залиште URL-адресу, де можна отримати доступ до цього набору даних після припинення дії. +file.deaccessionDialog.leaveURL.watermark=Альтернативний веб-сайт набору даних http://... +file.deaccessionDialog.deaccession.tip= Ви впевнені, що хочете припинити дію? Вибрана(і) версія(ї) більше не будуть доступні для загального перегляду. +file.deaccessionDialog.deaccessionDataset.tip= Ви впевнені, що хочете припинити дію цього набору даних? Він більше не буде доступним для загального перегляду. +file.deaccessionDialog.dialog.selectVersion.tip= Будь ласка, виберіть версію(ї) для припинення дії. +file.deaccessionDialog.dialog.selectVersion.header= Будь ласка, виберіть версію(ї). +file.deaccessionDialog.dialog.reason.tip= Будь ласка, виберіть причину припинення дії. +file.deaccessionDialog.dialog.reason.header= Будь ласка, виберіть "Причина". +file.deaccessionDialog.dialog.url.tip= Будь ласка, введіть дійсну URL-адресу надсилання. +file.deaccessionDialog.dialog.url.header = Недійсна URL-адреса +file.deaccessionDialog.dialog.textForReason.tip = Будь ласка, введіть текст для причини припинення дії. +file.deaccessionDialog.dialog.textForReason.header = Введіть додаткову інформацію +file.deaccessionDialog.dialog.limitChar.tip= Довжина тексту причини припинення дії не може перевищувати 1000 символів. +file.deaccessionDialog.dialog.limitChar.header= Обмежитись 1000 символами. +file.viewDiffDialog.header= Деталі відмінностей версій. +file.viewDiffDialog.dialog.warning=. Будь-ласка, оберіть дві версії, щоб переглянути відмінності. +file.viewDiffDialog.version = Версія +file.viewDiffDialog.lastUpdated = Останнє оновлення +file.viewDiffDialog.fileID = Ідентифікатор файлу +file.viewDiffDialog.fileName = Ім'я +file.viewDiffDialog.fileType = Тип +file.viewDiffDialog.fileSize = Розмір +file.viewDiffDialog.category = Теги (и) +file.viewDiffDialog.description = Опис +file.viewDiffDialog.fileReplaced = Файл замінено +file.viewDiffDialog.filesReplaced = Файл (и) замінено +file.viewDiffDialog.files.header = Файли +file.viewDiffDialog.msg.draftFound=  Це "проектна" версія. +file.viewDiffDialog.msg.draftNotFound="проектна" версія не знайдена. +file.viewDiffDialog.msg.versionFound= Це версія "{0}". +file.viewDiffDialog.msg.versionNotFound=Версія "{0}" не знайдена. +file.metadataTip=Metadata Tip: Рекомендації для метаданих: Після додавання набору даних натисніть кнопку Редагувати набір даних, щоб додати більше метаданих. +file.addBtn = Зберегти набір даних. +file.dataset.allFiles = Усі файли з цього набору даних. +file.downloadDialog.header = Умови набору даних. +file.downloadDialog.tip= Щоб продовжити, будь ласка, підтвердіть та/або заповніть інформацію, що вимагається нижче. +file.requestAccessTermsDialog.tip= Будь ласка, підтвердіть та/або заповніть інформацію, що вимагається нижче, для запиту доступу до файлів у цьому наборі даних. +file.search.placeholder = Шукати цей набір даних ... +file.results.btn.sort = Сортувати +file.results.btn.sort.option.nameAZ = Ім'я (A-Z) +file.results.btn.sort.option.nameZA = Ім'я (Z-A) +file.results.btn.sort.option.newest = Найновіший +file.results.btn.sort.option.oldest = Найстаріший +file.results.btn.sort.option.size = Розмір +file.results.btn.sort.option.type = Тип +file.compute.fileRestricted = Обмежений файл +file.compute.fileAccessDenied= Ви не можете обчислювати на основі цього обмеженого файлу, оскільки у вас немає дозволу на доступ до нього. +file.configure.Button= Налаштувати. +file.configure.launchMessage.details= Будь ласка, оновіть цю сторінку, як тільки ви закінчите налаштовувати свою. +dataset.compute.datasetCompute= Обчислення набору даних не підтримується. +dataset.compute.datasetAccessDenied= Ви не можете обчислювати на цьому наборі даних, оскільки у вас немає дозволу на доступ до всіх обмежених файлів. +dataset.compute.datasetComputeDisabled= Ви не можете обчислювати на цьому наборі даних, оскільки ця функція ще не увімкнена. Будь ласка, натисніть на файл, щоб отримати доступ до обчислювальних функцій. + +# dataset-widgets.xhtml +dataset.widgets.title = Мініатюра набору даних + Віджети +dataset.widgets.notPublished.why.header = Чому слід користуватися віджетами? +dataset.widgets.notPublished.why.reason1= Підвищують видимість ваших даних, дозволяючи вставляти ваші dataverse та набори даних на вашому особистиму або проектному веб-сайті. +dataset.widgets.notPublished.why.reason2= Дозволяє іншим переглядати ваші dataverse та набори даних, не виходячи з вашого особистого або проекту веб-сайту. +dataset.widgets.notPublished.how.header= Як використовувати віджети +dataset.widgets.notPublished.how.tip1=Для використання віджетів, ваші dataverse та набори даних повинні бути опубліковані. +dataset.widgets.notPublished.how.tip2= Після публікації код буде доступний на цій сторінці, щоб ви могли скопіювати та додати на ваш особистий або проектний веб-сайт. +dataset.widgets.notPublished.how.tip3= У вас є веб-сайт OpenScholar? Якщо так, дізнайтеся більше про додавання Dataverse віджетів до вашого веб-сайту тут here. + +dataset.widgets.notPublished.getStarted= Щоб розпочати, опублікуйте свій набір даних. Щоб дізнатися більше про віджети, відвідайте розділ Довідника користувачаWidgets. +dataset.widgets.editAdvanced= Редагувати додаткові параметри. +dataset.widgets.editAdvanced.tip=Advanced Options – Додаткові параметри налаштування вашого віджета на вашому особистому або проектному веб-сайті. +dataset.widgets.tip= Скопіюйте та вставте цей код у HTML на своєму сайті. Щоб дізнатися більше про віджети, відвідайте розділ Довідника користувача Widgets. +dataset.widgets.citation.txt= Цитування (посилання) набору даних. +dataset.widgets.citation.tip=Додайте посилання для свого набору даних на вашому особистому або проектному веб-сайті. +dataset.widgets.datasetFull.txt= Набір даних. +dataset.widgets.datasetFull.tip= Додайте шлях для відвідувачів на вашому веб-сайті, щоб вони мали змогу переглядати ваші набори даних, завантажувати файли тощо. +dataset.widgets.advanced.popup.header= Додаткові параметри віджета. +dataset.widgets.advanced.prompt= Надішліть постійні URL-адреси у посиланні вашого набору даних на ваш особистий веб-сайт. +dataset.widgets.advanced.url.label= URL- адреса особистого веб-сайту. +dataset.widgets.advanced.url.watermark=http://www.example.com/page-name +dataset.widgets.advanced.invalid.message = Будь ласка, введіть дійсну URL-адресу +dataset.widgets.advanced.success.message = Успішно оновлено вашу URL-адресу особистого веб-сайту. +dataset.widgets.advanced.failure.message= URL-адресу персонального dataverse веб-сайту не оновлено. +dataset.thumbnailsAndWidget.breadcrumbs.title= Мініатюра + Віджети +dataset.thumbnailsAndWidget.thumbnails.title= Мініатюра dataset.thumbnailsAndWidget.widgets.title= Віджети +dataset.thumbnailsAndWidget.thumbnailImage= Зображення мініатюри. dataset.thumbnailsAndWidget.thumbnailImage.title= Файл логотипу або зображення, який ви хочете показати як мініатюру цього набору даних. +dataset.thumbnailsAndWidget.thumbnailImage.tip= Підтримувані типи зображень - JPG, TIF або PNG, і їхній розмір не повинен перевищувати {0} Кб. Максимальний розмір зображення для файлу зображення у вигляді мініатюри набору даних 48 пікселів завширшки і 48 пікселів заввишки. +datastat.thumbnailsAndWidget.thumbnailImage.default = Іконка за замовчуванням +dataset.thumbnailsAndWidget.thumbnailImage.selectAvailable = Виберіть доступний файл +dataset.thumbnailsAndWidget.thumbnailImage.selectThumbnail = Вибрати мініатюру +dataset.thumbnailsAndWidget.thumbnailImage.selectAvailable.title= Виберіть мініатюру з тих, що доступні у вигляді файлів даних зображення, які належать вашому набору даних. +dataset.thumbnailsAndWidget.thumbnailImage.uploadNew= Завантажити новий файл. +dataset.thumbnailsAndWidget.thumbnailImage.uploadNew.title= Завантажте файл зображення як мініатюру вашого набору даних, який буде зберігатися окремо від файлів даних, що належать вашому набору даних. +dataset.thumbnailsAndWidget.thumbnailImage.upload = Завантажити зображення +dataset.thumbnailsAndWidget.thumbnailImage.upload.invalidMsg = Зображення неможливо завантажити. Будь ласка, повторіть спробу за допомогою файлу JPG, TIF або PNG. +dataset.thumbnailsAndWidget.success= Мініатюру набору даних оновлено. + +dataset.thumbnailsAndWidget.removeThumbnail= Видалити мініатюру. +dataset.thumbnailsAndWidget.removeThumbnail.tip=Ви видаляєте це зображення лише як мініатюру набору даних, не видаляючи його з вашого набору даних. Для цього перейдіть на сторінку Редагувати файли. +dataset.thumbnailsAndWidget.availableThumbnails= Доступні мініатюри. +dataset.thumbnailsAndWidget.availableThumbnails.tip= Виберіть мініатюру з файлів даних, які належать вашому набору даних. Поверніться на сторінку Мініатюри + віджети, щоб зберегти ваші зміни. + +# file.xhtml +file.share.fileShare=Поширте цей файл. +file.share.fileShare.tip= Поширте цей файл у ваших улюблених соціальних мережах. +file.share.fileShare.shareText = Переглянути цей файл. +file.title.label = Назва +file.citation.label = Цитата +file.cite.downloadBtn=Цитувати вайл даних. +file.general.metadata.label = Загальні метадані +file.description.label = Опис +file.tags.label = Теги +file.lastupdated.label = Останнє оновлення +file.DatasetVersion = Версія +file.metadataTab.fileMetadata.header = Метадані файлу +file.metadataTab.fileMetadata.persistentid.label= Постійний ідентифікатор файлу даних. +file.metadataTab.fileMetadata.downloadUrl.label = Завантажити URL-адресу +file.metadataTab.fileMetadata.unf.label = UNF +file.metadataTab.fileMetadata.size.label = Розмір +file.metadataTab.fileMetadata.type.label = Тип +file.metadataTab.fileMetadata.description.label = Опис +file.metadataTab.fileMetadata.publicationDate.label = Дата публікації +file.metadataTab.fileMetadata.depositDate.label = Дата введення +file.metadataTab.fitsMetadata.header=Метадані FITS. +file.metadataTab.provenance.header= Походження файлів. +file.metadataTab.provenance.body= Інформація про походження файлів, що надійде у пізнішому випуску ... +file.versionDifferences.noChanges= Немає змін, пов'язаних з цією версією. +file.versionDifferences.fileNotInVersion= Файл не включено до цієї версії. +file.versionDifferences.actionChanged = Змінено +file.versionDifferences.actionAdded = Додано +file.versionDifferences.actionRemoved = Вилучено +file.versionDifferences.actionReplaced = Замінено +file.versionDifferences.fileMetadataGroupTitle = Метадані файлу. +file.versionDifferences.fileTagsGroupTitle = Теги файлів +file.versionDifferences.descriptionDetailTitle = Опис +file.versionDifferences.fileNameDetailTitle = Ім'я файлу +file.versionDifferences.fileAccessTitle = Доступ до файлу +file.versionDifferences.fileRestricted = Обмежений +file.versionDifferences.fileUnrestricted = Необмежений +file.versionDifferences.fileGroupTitle = Файл + +# File Ingest Введення файлу +ingest.csv.invalidHeader=. Недійсний рядок заголовка. Одна з клітин порожня. +ingest.csv.lineMismatch= Невідповідність між кількістю рядків у першій та останній передачах! {0} знайдено після першої передачі, але {1} знайдено після другої. +ingest.csv.recordMismatch= Невідповідність зчитування, рядок {0} файлу даних: {1} очікувані граничні значення, {2} знайдені. +ingest.csv.nullStream= Потік не може бути нульовим. + +# editdatafile.xhtml + +# editFilesFragment.xhtml +file.edit.error.file_exceeds_limit=This file exceeds the size limit. Цей файл перевищує обмеження розміру. +# File metadata error Помилка метаданих файлів +file.metadata.datafiletag.not_tabular= Ви не можете додати теги табличних даних до не табличного файлу. + +# File Edit Success Успіх у редагування файлу +file.message.editSuccess= Цей файл оновлено. +file.message.deleteSuccess= Цей файл видалено. +file.message.replaceSuccess= Цей файл замінено. + +# File Add/Replace operation messages операційні повідомлення про Додавання/Заміну Файлу +file.addreplace.file_size_ok= Розмір файлу в допустимому діапазоні. +file.addreplace.error.file_exceeds_limit= Розмір цього файлу ({0}) перевищує максимальний розмір {1} байт. +file.addreplace.error.dataset_is_null = Набір даних не може бути нульовим. +file.addreplace.error.dataset_id_is_null = Ідентифікатор набору даних не може бути нульовим. +find.dataset.error.dataset_id_is_null= При доступі до набору даних на основі постійного ідентифікатора, повинен бути присутнім параметр запиту {0}. +find.dataset.error.dataset.not.found.persistentId= Набір даних з постійним ідентифікатором {0} не знайдено. +find.dataset.error.dataset.not.found.id= Набір даних з ідентифікатором {0} не знайдено. +find.dataset.error.dataset.not.found.bad.id=Невірний номер ідентифікатора набору даних: {0}. +file.addreplace.error.dataset_id_not_found= Не знайдено жодного набору даних для ідентифікатора: +file.addreplace.error.no_edit_dataset_permission= Ви не маєте дозволу редагувати цей набір даних. +file.addreplace.error.filename_undetermined= Ім'я файлу неможливо визначити. +file.addreplace.error.file_content_type_undetermined=The file content type cannot be determined. Тип змісту файлу неможливо визначити. +file.addreplace.error.file_upload_failed = Не вдалося завантажити файл. +file.addreplace.error.duplicate_file = Цей файл вже існує у наборі даних. +file.addreplace.error.existing_file_to_replace_id_is_null= Необхідно вказати ідентифікатор існуючого файла для заміни. +file.addreplace.error.existing_file_to_replace_not_found_by_id= Файл заміни не знайдено. Не знайдено жодного файлу для ідентифікатора: {0}. +file.addreplace.error.existing_file_to_replace_is_null= Файл заміни не може бути нульовим. +file.addreplace.error.existing_file_to_replace_not_in_dataset= Файл заміни не належить до цього набору даних. +file.addreplace.error.existing_file_not_in_latest_published_version= Ви не можете замінити файл, якого немає в самому останньому опублікованому наборі даних. (Файл не опубліковано або видалено з попередньої версії.) +file.addreplace.content_type.header=Тип файлу відрізняється. +file.addreplace.error.replace.new_file_has_different_content_type= Оригінальний файл ({0}) та файл заміни ({1}) - це різні типи файлів. +file.addreplace.error.replace.new_file_same_as_replacement= Ви не можете замінити файл точно таким самим файлом. +file.addreplace.error.unpublished_file_cannot_be_replaced=. Ви не можете замінити неопублікований файл. Будь ласка, видаліть його замість того, щоб замінити. +file.addreplace.error.ingest_create_file_err= Під час спроби додати новий файл сталася помилка. +file.addreplace.error.initial_file_list_empty= Виникла помилка, і новий файл не був доданий. +file.addreplace.error.initial_file_list_more_than_one= Ви не можете замінити один файл декількома файлами. Файл, який ви завантажили, надходить у декілька файлів. +file.addreplace.error.final_file_list_empty= Немає файлів для додавання. (Ця помилка не повинна траплятися, якщо кроки виконуються послідовно.) +file.addreplace.error.only_replace_operation= Це вимагається лише для операцій заміни файлів! +file.addreplace.error.failed_to_remove_old_file_from_dataset= Неможливо видалити старий файл із нової версії набору даних. +file.addreplace.error.add.add_file_error= Не вдалося додати файл до набору даних. +file.addreplace.error.phase2_called_early_no_new_files= Виникла помилка збереження набору даних - нові файли не знайдені. +file.addreplace.success.add = Файл успішно додано! +file.addreplace.success.replace = Файл успішно замінено! +file.addreplace.error.auth = Недійсний API Ключ. +file.addreplace.error.invalid_datafile_tag= Недійсний тег табличних даних: + +# 500.xhtml +error.500.page.title=500 Внутрішня помилка сервера +error.500.message=Internal Server Error - Виникла несподівана помилка, більше інформації немає. + +# 404.xhtml +error.404.page.title=404 не знайдено +error.404.message=Page Not Found - Сторінку, яку ви шукаєте, не знайдено. + +# 403.xhtml +error.403.page.title=403 не дозволено +error.403.message=Not Authorized - Вам не дозволено переглядати цю сторінку. + +# general error - support message загальна помилка - повідомлення про підтримку +error.support.message= Якщо ви вважаєте, що це помилка, зверніться за допомогою до {0}. + +# citation-frame.xhtml +citationFrame.banner.message= Якщо наведений нижче сайт не завантажується, архівовані дані можна знайти в {0} {1}. {2} + +citationFrame.banner.message.here= тут +citationFrame.banner.closeIcon=Закрити це повідомлення, перейти до набору даних. +citationFrame.banner.countdownMessage= Це повідомлення закриється через... +citationFrame.banner.countdownMessage.seconds=секунд. + +# Friendly AuthenticationProvider names +authenticationProvider.name.builtin=Dataverse +authenticationProvider.name.null=(постачальник невідомий) +authenticationProvider.name.github=GitHub +authenticationProvider.name.google=Google +authenticationProvider.name.orcid=ORCiD +authenticationProvider.name.orcid-sandbox=ORCiD Sandbox +authenticationProvider.name.shib=Shibboleth +ingest.csv.invalidHeader= Недійсний рядок заголовка. Одна з клітин порожня. +ingest.csv.lineMismatch= Невідповідність між кількістю рядків у першій та останній передачі !, {0} знайдено після першої передачі, але {1} знайдено після другої. +ingest.csv.recordMismatch= Невідповідність зчитування, рядок {0} файлу даних: {1} очікувані граничні значення, {2} знайдені. +ingest.csv.nullStream= Потік не може бути нульовим. diff --git a/dataversedock/lang.properties/Bundle_ua.properties_utf b/dataversedock/lang.properties/Bundle_ua.properties_utf new file mode 100644 index 0000000..f0a7ea5 --- /dev/null +++ b/dataversedock/lang.properties/Bundle_ua.properties_utf @@ -0,0 +1,1747 @@ +dataverse=\u0044\u0061\u0074\u0061\u0076\u0065\u0072\u0073\u0065 +newDataverse=\u041d\u043e\u0432\u0438\u0439\u0020\u0044\u0061\u0074\u0061\u0076\u0065\u0072\u0073\u0065 +hostDataverse=\u0413\u043e\u043b\u043e\u0432\u043d\u0438\u0439\u0020\u0044\u0061\u0074\u0061\u0076\u0065\u0072\u0073\u0065 +dataverses=\u0044\u0061\u0074\u0061\u0076\u0065\u0072\u0073\u0065\u0073 +passwd=\u041f\u0430\u0440\u043e\u043b\u044c +dataset=\u041d\u0430\u0431\u0456\u0440\u0020\u0434\u0430\u043d\u0438\u0445 +datasets=\u041d\u0430\u0431\u043e\u0440\u0438\u0020\u0434\u0430\u043d\u0438\u0445 +newDataset=\u041d\u043e\u0432\u0438\u0439\u0020\u043d\u0430\u0431\u0456\u0440\u0020\u0434\u0430\u043d\u0438\u0445 +files=\u0424\u0430\u0439\u043b\u0438 +file=\u0424\u0430\u0439\u043b +restricted=\u041e\u0431\u043c\u0435\u0436\u0435\u043d\u0438\u0439 +restrictedaccess=\u041e\u0431\u043c\u0435\u0436\u0435\u043d\u0438\u0439\u0020\u0414\u043e\u0441\u0442\u0443\u043f +find=\u0417\u043d\u0430\u0439\u0442\u0438 +search=\u0428\u0443\u043a\u0430\u0442\u0438 +unpublished=\u041d\u0435\u0020\u043e\u043f\u0443\u0431\u043b\u0456\u043a\u043e\u0432\u0430\u043d\u043e +cancel=\u0421\u043a\u0430\u0441\u0443\u0432\u0430\u0442\u0438 +ok=\u004f\u004b +saveChanges=\u0417\u0431\u0435\u0440\u0435\u0433\u0442\u0438\u0020\u0437\u043c\u0456\u043d\u0438 +acceptTerms=\u041f\u043e\u0433\u043e\u0434\u0438\u0442\u0438\u0441\u044c +submit=\u041d\u0430\u0434\u0430\u0442\u0438 +signup=\u0417\u0430\u0440\u0435\u0454\u0441\u0442\u0440\u0443\u0432\u0430\u0442\u0438\u0441\u044f +login=\u0423\u0432\u0456\u0439\u0442\u0438 +email=\u0415\u043b\u0435\u043a\u0442\u0440\u043e\u043d\u043d\u0430\u0020\u043f\u043e\u0448\u0442\u0430 +account=\u0410\u043a\u0430\u0443\u043d\u0442 +requiredField=\u041e\u0431\u043e\u0432\u0027\u044f\u0437\u043a\u043e\u0432\u0435\u0020\u043f\u043e\u043b\u0435 +new=\u041d\u043e\u0432\u0438\u0439 +identifier=\u0406\u0434\u0435\u043d\u0442\u0438\u0444\u0456\u043a\u0430\u0442\u043e\u0440 +description=\u041e\u043f\u0438\u0441 +subject=\u0422\u0435\u043c\u0430 +close=\u0417\u0430\u043a\u0440\u0438\u0442\u0438 +preview=\u041f\u043e\u043f\u0435\u0440\u0435\u0434\u043d\u0456\u0439\u0020\u043f\u0435\u0440\u0435\u0433\u043b\u044f\u0434 +continue=\u041f\u0440\u043e\u0434\u043e\u0432\u0436\u0438\u0442\u0438 +name=\u0406\u043c\u0027\u044f +institution=\u0423\u0441\u0442\u0430\u043d\u043e\u0432\u0430 +position=\u041f\u043e\u0437\u0438\u0446\u0456\u044f +affiliation=\u041f\u0440\u0438\u043d\u0430\u043b\u0435\u0436\u043d\u0456\u0441\u0442\u044c +createDataverse=\u0421\u0442\u0432\u043e\u0440\u0438\u0442\u0438\u0020\u0044\u0061\u0074\u0061\u0076\u0065\u0072\u0073\u0065 +remove=\u0412\u0438\u0434\u0430\u043b\u0438\u0442\u0438 +done=\u0417\u0440\u043e\u0431\u043b\u0435\u043d\u043e +editor=\u041f\u043e\u043c\u0456\u0447\u043d\u0438\u043a +manager=\u041c\u0435\u043d\u0435\u0434\u0436\u0435\u0440 +curator=\u041a\u0443\u0440\u0430\u0442\u043e\u0440 +explore=\u0414\u043e\u0441\u043b\u0456\u0434\u0438\u0442\u0438 +download=\u0417\u0430\u0432\u0430\u043d\u0442\u0430\u0436\u0438\u0442\u0438 +deaccession=\u041f\u0440\u0438\u043f\u0438\u043d\u0435\u043d\u043d\u044f\u0020\u0434\u0456\u0457 +share=\u041f\u043e\u0434\u0456\u043b\u0438\u0442\u0438\u0441\u044c +link=\u041f\u043e\u0441\u0438\u043b\u0430\u043d\u043d\u044f +linked=\u041f\u043e\u0432\u0027\u044f\u0437\u0430\u043d\u0456 +harvested=\u0417\u0430\u0433\u043e\u0442\u043e\u0432\u043b\u0435\u043d\u0456 +apply=\u0417\u0430\u0441\u0442\u043e\u0441\u0443\u0432\u0430\u0442\u0438 +add=\u0414\u043e\u0434\u0430\u0442\u0438 +delete=\u0421\u0442\u0438\u0440\u0430\u0442\u0438 +yes=\u0422\u0430\u043a +no=\u041d\u0456 +previous=\u041f\u043e\u043f\u0435\u0440\u0435\u0434\u043d\u0456\u0439 +next=\u041d\u0430\u0441\u0442\u0443\u043f\u043d\u0438\u0439 +first=\u041f\u0435\u0440\u0448\u0438\u0439 +last=\u041e\u0441\u0442\u0430\u043d\u043d\u0456\u0439 +more=\u0411\u0456\u043b\u044c\u0448\u0435\u002e\u002e\u002e +less=\u041c\u0435\u043d\u0448\u0435\u002e\u002e\u002e +select=\u0412\u0438\u0431\u0440\u0430\u0442\u0438\u002e\u002e\u002e +selectedFiles=\u0412\u0438\u0431\u0440\u0430\u043d\u0456\u0020\u0424\u0430\u0439\u043b\u0438 +htmlAllowedTitle=\u0414\u043e\u0437\u0432\u043e\u043b\u0435\u043d\u0456\u0020\u0048\u0054\u004d\u004c\u0020\u0422\u0435\u0433\u0438 +htmlAllowedMsg=\u0426\u0435\u0020\u043f\u043e\u043b\u0435\u0020\u043f\u0456\u0434\u0442\u0440\u0438\u043c\u0443\u0454\u0020\u043b\u0438\u0448\u0435\u0020\u043f\u0435\u0432\u043d\u0456\u0020\u003c\u0073\u0070\u0061\u006e\u0020\u0063\u006c\u0061\u0073\u0073\u003d\u0022\u0074\u0065\u0078\u0074\u002d\u0069\u006e\u0066\u006f\u0020\u0070\u006f\u0070\u006f\u0076\u0065\u0072\u0048\u0054\u004d\u004c\u0022\u003e\u0048\u0054\u004d\u004c\u0020\u0442\u0435\u0433\u0438\u003c\u002f\u0073\u0070\u0061\u006e\u003e\u002e +htmlAllowedTags=\u003c\u0061\u003e\u002c\u0020\u003c\u0062\u003e\u002c\u0020\u003c\u0062\u006c\u006f\u0063\u006b\u0071\u0075\u006f\u0074\u0065\u003e\u002c\u0020\u003c\u0062\u0072\u003e\u002c\u0020\u003c\u0063\u006f\u0064\u0065\u003e\u002c\u0020\u003c\u0064\u0065\u006c\u003e\u002c\u0020\u003c\u0064\u0064\u003e\u002c\u0020\u003c\u0064\u006c\u003e\u002c\u0020\u003c\u0064\u0074\u003e\u002c\u0020\u003c\u0065\u006d\u003e\u002c\u0020\u003c\u0068\u0072\u003e\u002c\u0020\u003c\u0068\u0031\u003e\u002d\u003c\u0068\u0033\u003e\u002c\u0020\u003c\u0069\u003e\u002c\u0020\u003c\u0069\u006d\u0067\u003e\u002c\u0020\u003c\u006b\u0062\u0064\u003e\u002c\u0020\u003c\u006c\u0069\u003e\u002c\u0020\u003c\u006f\u006c\u003e\u002c\u0020\u003c\u0070\u003e\u002c\u0020\u003c\u0070\u0072\u0065\u003e\u002c\u0020\u003c\u0073\u003e\u002c\u0020\u003c\u0073\u0075\u0070\u003e\u002c\u0020\u003c\u0073\u0075\u0062\u003e\u002c\u0020\u003c\u0073\u0074\u0072\u006f\u006e\u0067\u003e\u002c\u0020\u0020 +\u003c\u0073\u0074\u0072\u0069\u006b\u0065\u003e\u002c\u0020\u003c\u0075\u006c\u003e + +\u0023\u0020\u0064\u0061\u0074\u0061\u0076\u0065\u0072\u0073\u0065\u005f\u0068\u0065\u0061\u0064\u0065\u0072\u002e\u0078\u0068\u0074\u006d\u006c +header.status.header=\u0421\u0442\u0430\u0442\u0443\u0441 +header.search.title=\u0428\u0443\u043a\u0430\u0442\u0438\u0020\u0432\u0441\u0456\u0020\u0064\u0061\u0074\u0061\u0076\u0065\u0072\u0073\u0065\u0073\u002e\u002e\u002e +header.about=\u041f\u0440\u043e +header.support=\u041f\u0456\u0434\u0442\u0440\u0438\u043c\u043a\u0430 +header.guides=\u0414\u043e\u0432\u0456\u0434\u043d\u0438\u043a\u0438 +header.guides.user=\u0414\u043e\u0432\u0456\u0434\u043d\u0438\u043a\u0020\u043a\u043e\u0440\u0438\u0441\u0442\u0443\u0432\u0430\u0447\u0430 +header.guides.developer=\u0414\u043e\u0432\u0456\u0434\u043d\u0438\u043a\u0020\u0440\u043e\u0437\u0440\u043e\u0431\u043d\u0438\u043a\u0430 +header.guides.installation=\u041a\u0435\u0440\u0456\u0432\u043d\u0438\u0446\u0442\u0432\u043e\u0020\u043f\u043e\u0020\u0432\u0441\u0442\u0430\u043d\u043e\u0432\u043b\u0435\u043d\u043d\u044e +header.guides.api=\u0414\u043e\u0432\u0456\u0434\u043d\u0438\u043a\u0020\u0041\u0050\u0049 +header.guides.admin=\u0020\u0414\u043e\u0432\u0456\u0434\u043d\u0438\u043a\u0020\u0430\u0434\u043c\u0456\u043d\u0430 +header.signUp=\u0417\u0430\u0440\u0435\u0454\u0441\u0442\u0440\u0443\u0432\u0430\u0442\u0438\u0441\u044f +header.logOut=\u0412\u0438\u0439\u0442\u0438 +header.accountInfo=\u0414\u0430\u043d\u0456\u0020\u043f\u0440\u043e\u0020\u0430\u043a\u0430\u0443\u043d\u0442 +header.dashboard=\u041f\u0430\u043d\u0435\u043b\u044c\u0020\u0456\u043d\u0441\u0442\u0440\u0443\u043c\u0435\u043d\u0442\u0456\u0432 +header.user.selectTab.dataRelated=\u041c\u043e\u0457\u0020\u0434\u0430\u043d\u0456 +header.user.selectTab.notifications=\u0421\u043f\u043e\u0432\u0456\u0449\u0435\u043d\u043d\u044f +header.user.selectTab.accountInfo=\u0406\u043d\u0444\u043e\u0440\u043c\u0430\u0446\u0456\u044f\u0020\u043f\u0440\u043e\u0020\u0430\u043a\u0430\u0443\u043d\u0442 +header.user.selectTab.groupsAndRoles=\u0413\u0440\u0443\u043f\u0438\u0020\u002b\u0020\u0420\u043e\u043b\u0456 +header.user.selectTab.apiToken=\u0041\u0050\u0049\u0020\u0442\u043e\u043a\u0435\u043d + +\u0023\u0020\u0064\u0061\u0074\u0061\u0076\u0065\u0072\u0073\u0065\u005f\u0074\u0065\u006d\u0070\u006c\u0061\u0074\u0065\u002e\u0078\u0068\u0074\u006d\u006c +head.meta.description=\u0054\u0432\u0456\u043d\u0020\u0044\u0061\u0074\u0061\u0076\u0065\u0072\u0073\u0065\u0020\u0050\u0072\u006f\u006a\u0065\u0063\u0074\u0020\u002d\u0020\u0446\u0435\u0020\u043f\u0440\u043e\u0433\u0440\u0430\u043c\u0430\u0020\u0437\u0020\u0432\u0456\u0434\u043a\u0440\u0438\u0442\u0438\u043c\u0020\u043a\u043e\u0434\u043e\u043c\u0020\u0434\u043b\u044f\u0020\u043e\u0431\u043c\u0456\u043d\u0443\u002c\u0020\u0446\u0438\u0442\u0443\u0432\u0430\u043d\u043d\u044f\u0020\u0442\u0430\u0020\u0430\u0440\u0445\u0456\u0432\u0443\u0432\u0430\u043d\u043d\u044f\u0020\u0434\u0430\u043d\u0438\u0445\u002e\u0020\u0044\u0061\u0074\u0061\u0076\u0065\u0072\u0073\u0065\u0020\u0437\u0430\u0431\u0435\u0437\u043f\u0435\u0447\u0443\u0454\u0020\u043d\u0430\u0434\u0456\u0439\u043d\u0443\u0020\u0456\u043d\u0444\u0440\u0430\u0441\u0442\u0440\u0443\u043a\u0442\u0443\u0440\u0443\u0020\u0434\u043b\u044f\u0020\u0020\u0440\u043e\u0437\u043f\u043e\u0440\u044f\u0434\u043d\u0438\u043a\u0456\u0432\u0020\u0434\u0430\u043d\u0438\u0445\u0020\u0434\u043b\u044f\u0020\u0440\u043e\u0437\u043c\u0456\u0449\u0435\u043d\u043d\u044f\u0020\u0442\u0430\u0020\u0430\u0440\u0445\u0456\u0432\u0443\u0432\u0430\u043d\u043d\u044f\u0020\u0434\u0430\u043d\u0438\u0445\u002c +\u043d\u0430\u0434\u0430\u044e\u0447\u0438\u0020\u0434\u043e\u0441\u043b\u0456\u0434\u043d\u0438\u043a\u0430\u043c\u0020\u043f\u0440\u043e\u0441\u0442\u0438\u0439\u0020\u0441\u043f\u043e\u0441\u0456\u0431\u0020\u043f\u043e\u0434\u0456\u043b\u0438\u0442\u0438\u0441\u044f\u0020\u0442\u0430\u0020\u043e\u0442\u0440\u0438\u043c\u0430\u0442\u0438\u0020\u0432\u0434\u044f\u0447\u043d\u0456\u0441\u0442\u044c\u0020\u0437\u0430\u0020\u0441\u0432\u043e\u0457\u0020\u0434\u0430\u043d\u0456\u002e +body.skip=\u041f\u0435\u0440\u0435\u0439\u0442\u0438\u0020\u0434\u043e\u0020\u043e\u0441\u043d\u043e\u0432\u043d\u043e\u0433\u043e\u0020\u0437\u043c\u0456\u0441\u0442\u0443 + +\u0023\u0020\u0064\u0061\u0074\u0061\u0076\u0065\u0072\u0073\u0065\u005f\u0066\u006f\u006f\u0074\u0065\u0072\u002e\u0078\u0068\u0074\u006d\u006c +footer.copyright=\u0410\u0432\u0442\u043e\u0440\u0441\u044c\u043a\u0435\u0020\u043f\u0440\u0430\u0432\u043e\u0020\u0026\u0023\u0031\u0036\u0039\u003b\u0020\u007b\u0030\u007d +footer.widget.datastored=\u0414\u0430\u043d\u0456\u0020\u0437\u0431\u0435\u0440\u0456\u0433\u0430\u044e\u0442\u044c\u0441\u044f\u0020\u0432\u0020\u007b\u0030\u007d\u002e +footer.widget.login=\u0423\u0432\u0456\u0439\u0442\u0438\u0020\u0434\u043e\u0020 +footer.privacyPolicy=\u041f\u043e\u043b\u0456\u0442\u0438\u043a\u0430\u0020\u043a\u043e\u043d\u0444\u0456\u0434\u0435\u043d\u0446\u0456\u0439\u043d\u043e\u0441\u0442\u0456 +footer.poweredby=\u041f\u0440\u0430\u0446\u044e\u0454\u0020\u043d\u0430 +footer.dataverseProject=\u0054\u0068\u0065\u0020\u0044\u0061\u0074\u0061\u0076\u0065\u0072\u0073\u0065\u0020\u0050\u0072\u006f\u006a\u0065\u0063\u0074 + +\u0023\u0020\u006d\u0065\u0073\u0073\u0061\u0067\u0065\u0073\u002e\u0078\u0068\u0074\u006d\u006c +messages.error=\u041f\u043e\u043c\u0438\u043b\u043a\u0430 +messages.success=\u0423\u0441\u043f\u0456\u0445\u0021 +messages.info=\u0406\u043d\u0444\u043e\u0440\u043c\u0430\u0446\u0456\u044f +messages.validation=\u041f\u043e\u043c\u0438\u043b\u043a\u0430\u0020\u043f\u0435\u0440\u0435\u0432\u0456\u0440\u043a\u0438 +messages.validation.msg=\u041d\u0435\u043e\u0431\u0445\u0456\u0434\u043d\u0456\u0020\u043f\u043e\u043b\u044f\u0020\u0431\u0443\u043b\u0438\u0020\u043f\u0440\u043e\u043f\u0443\u0449\u0435\u043d\u0456\u0020\u0430\u0431\u043e\u0020\u0432\u0438\u043d\u0438\u043a\u043b\u0430\u0020\u043f\u043e\u043c\u0438\u043b\u043a\u0430\u0020\u043f\u0435\u0440\u0435\u0432\u0456\u0440\u043a\u0438\u002e\u0020\u0411\u0443\u0434\u044c\u0020\u043b\u0430\u0441\u043a\u0430\u002c\u0020\u043f\u0440\u043e\u043a\u0440\u0443\u0442\u0456\u0442\u044c\u0020\u0432\u043d\u0438\u0437\u002c\u0020\u0449\u043e\u0431\u0020\u043f\u0435\u0440\u0435\u0433\u043b\u044f\u043d\u0443\u0442\u0438\u0020\u0434\u0435\u0442\u0430\u043b\u0456\u002e + +\u0023\u0020\u0063\u006f\u006e\u0074\u0061\u0063\u0074\u0046\u006f\u0072\u006d\u0046\u0072\u0061\u0067\u006d\u0065\u006e\u0074\u002e\u0078\u0068\u0074\u006d\u006c +contact.header=\u0043\u006f\u006e\u0074\u0061\u0063\u0074\u0020\u007b\u0030\u007d +contact.dataverse.header=\u0415\u043b\u0435\u043a\u0442\u0440\u043e\u043d\u043d\u0430\u0020\u043f\u043e\u0448\u0442\u0430\u0020\u0044\u0061\u0074\u0061\u0076\u0065\u0072\u0073\u0065\u0020\u0043\u006f\u006e\u0074\u0061\u0063\u0074 +contact.dataset.header=\u0415\u043b\u0435\u043a\u0442\u0440\u043e\u043d\u043d\u0430\u0020\u043f\u043e\u0448\u0442\u0430\u0020\u0044\u0061\u0074\u0061\u0073\u0065\u0074\u0020\u0043\u006f\u006e\u0074\u0061\u0063\u0074 +contact.to=\u0414\u043e +contact.support=\u041f\u0456\u0434\u0442\u0440\u0438\u043c\u043a\u0430 +contact.from=\u0412\u0456\u0434 +contact.from.required=\u041f\u043e\u0442\u0440\u0456\u0431\u043d\u0430\u0020\u0435\u043b\u0435\u043a\u0442\u0440\u043e\u043d\u043d\u0430\u0020\u0430\u0434\u0440\u0435\u0441\u0430\u0020\u043a\u043e\u0440\u0438\u0441\u0442\u0443\u0432\u0430\u0447\u0430\u002e +contact.from.invalid=\u0415\u043b\u0435\u043a\u0442\u0440\u043e\u043d\u043d\u0430\u0020\u043f\u043e\u0448\u0442\u0430\u0020\u043d\u0435\u0434\u0456\u0439\u0441\u043d\u0430\u002e +contact.subject=\u0422\u0435\u043c\u0430 +contact.subject.required=\u041f\u043e\u0442\u0440\u0456\u0431\u043d\u0430\u0020\u0442\u0435\u043c\u0430 +contact.subject.selectTab.top=\u0412\u0438\u0431\u0440\u0430\u0442\u0438\u0020\u0442\u0435\u043c\u0443\u002e\u002e\u002e +contact.subject.selectTab.support=\u041f\u0438\u0442\u0430\u043d\u043d\u044f\u0020\u0434\u043e\u0020\u043f\u0456\u0434\u0442\u0440\u0438\u043c\u043a\u0438 +contact.subject.selectTab.dataIssue=\u0044\u0061\u0074\u0061\u0020\u0049\u0073\u0073\u0075\u0065 +contact.msg=\u041f\u043e\u0432\u0456\u0434\u043e\u043c\u043b\u0435\u043d\u043d\u044f +contact.msg.required=\u0422\u0435\u043a\u0441\u0442\u0020\u043f\u043e\u0432\u0456\u0434\u043e\u043c\u043b\u0435\u043d\u043d\u044f\u0020\u043e\u0431\u043e\u0432\u0027\u044f\u0437\u043a\u043e\u0432\u0438\u0439\u002e +contact.send=\u0412\u0456\u0434\u043f\u0440\u0430\u0432\u0438\u0442\u0438\u0020\u043f\u043e\u0432\u0456\u0434\u043e\u043c\u043b\u0435\u043d\u043d\u044f +contact.question=\u0411\u0443\u0434\u044c\u0020\u043b\u0430\u0441\u043a\u0430\u002c\u0020\u0437\u0430\u043f\u043e\u0432\u043d\u0456\u0442\u044c\u0020\u0446\u0435\u002c\u0020\u0449\u043e\u0431\u0020\u0434\u043e\u0432\u0435\u0441\u0442\u0438\u002c\u0020\u0449\u043e\u0020\u0432\u0438\u0020\u043d\u0435\u0020\u0440\u043e\u0431\u043e\u0442\u002e +contact.sum.required=\u041f\u043e\u0442\u0440\u0456\u0431\u043d\u0435\u0020\u0437\u043d\u0430\u0447\u0435\u043d\u043d\u044f +contact.sum.invalid=\u041d\u0435\u043f\u0440\u0430\u0432\u0438\u043b\u044c\u043d\u0430\u0020\u0441\u0443\u043c\u0430\u002c\u0020\u0431\u0443\u0434\u044c\u0020\u043b\u0430\u0441\u043a\u0430\u002c\u0020\u0441\u043f\u0440\u043e\u0431\u0443\u0439\u0442\u0435\u0020\u0449\u0435\u0020\u0440\u0430\u0437\u002e +contact.sum.converterMessage=\u0411\u0443\u0434\u044c\u0020\u043b\u0430\u0441\u043a\u0430\u002c\u0020\u0432\u0432\u0435\u0434\u0456\u0442\u044c\u0020\u043d\u043e\u043c\u0435\u0440\u002e +contact.contact=\u041a\u043e\u043d\u0442\u0430\u043a\u0442 + +\u0023\u0020\u0064\u0061\u0074\u0061\u0076\u0065\u0072\u0073\u0065\u0075\u0073\u0065\u0072\u002e\u0078\u0068\u0074\u006d\u006c +account.info=\u0406\u043d\u0444\u043e\u0440\u043c\u0430\u0446\u0456\u044f\u0020\u043f\u0440\u043e\u0020\u0430\u043a\u0430\u0443\u043d\u0442 +account.edit=\u0420\u0435\u0434\u0430\u0433\u0443\u0432\u0430\u0442\u0438\u0020\u0430\u043a\u0430\u0443\u043d\u0442 +account.apiToken=\u0041\u0050\u0049\u0020\u0054\u006f\u006b\u0065\u006e +user.isShibUser=\u0406\u043d\u0444\u043e\u0440\u043c\u0430\u0446\u0456\u044f\u0020\u043f\u0440\u043e\u0020\u0430\u043a\u0430\u0443\u043d\u0442\u0020\u043d\u0435\u0020\u043c\u043e\u0436\u0435\u0020\u0431\u0443\u0442\u0438\u0020\u0437\u043c\u0456\u043d\u0435\u043d\u0430\u0020\u043f\u0440\u0438\u0020\u0432\u0445\u043e\u0434\u0456\u0020\u0447\u0435\u0440\u0435\u0437\u0020\u0430\u043a\u0430\u0443\u043d\u0442\u0020\u0443\u0441\u0442\u0430\u043d\u043e\u0432\u0438\u002e +user.helpShibUserMigrateOffShibBeforeLink=\u0417\u0430\u043b\u0438\u0448\u0430\u0454\u0442\u0435\u0020\u0441\u0432\u043e\u044e\u0020\u0443\u0441\u0442\u0430\u043d\u043e\u0432\u0443\u003f\u0020\u0411\u0443\u0434\u044c\u0020\u043b\u0430\u0441\u043a\u0430\u0020\u0437\u0432\u0027\u044f\u0436\u0456\u0442\u044c\u0441\u044f\u0020\u0437\u0020\u043d\u0430\u043c\u0438 +user.helpShibUserMigrateOffShibAfterLink=\u0434\u043b\u044f\u0020\u0434\u043e\u043f\u043e\u043c\u043e\u0433\u0438\u002e +user.helpOAuthBeforeLink=\u0412\u0430\u0448\u0020\u0044\u0061\u0074\u0061\u0076\u0065\u0072\u0073\u0065\u0020\u0430\u043a\u0430\u0443\u043d\u0442\u0020\u0432\u0438\u043a\u043e\u0440\u0438\u0441\u0442\u043e\u0432\u0443\u0454\u0020\u007b\u0030\u007d\u0020\u0434\u043b\u044f\u0020\u0432\u0445\u043e\u0434\u0443\u002e\u0020\u042f\u043a\u0449\u043e\u0020\u0432\u0438\u0020\u0437\u0430\u0446\u0456\u043a\u0430\u0432\u043b\u0435\u043d\u0456\u0020\u0432\u0020\u0437\u043c\u0456\u043d\u0456\u0020\u043c\u0435\u0442\u043e\u0434\u0456\u0432\u0020\u0432\u0445\u043e\u0434\u0443\u002c\u0020\u0437\u0432\u0027\u044f\u0436\u0456\u0442\u044c\u0441\u044f\u0020\u0437\u0020\u043d\u0430\u043c\u0438 +user.helpOAuthAfterLink=\u0414\u043b\u044f\u0020\u0434\u043e\u043f\u043e\u043c\u043e\u0433\u0438\u002e +user.lostPasswdTip=\u042f\u043a\u0449\u043e\u0020\u0432\u0438\u0020\u0437\u0430\u0433\u0443\u0431\u0438\u043b\u0438\u0020\u0430\u0431\u043e\u0020\u0437\u0430\u0431\u0443\u043b\u0438\u0020\u0441\u0432\u0456\u0439\u0020\u043f\u0430\u0440\u043e\u043b\u044c\u002c\u0020\u0432\u0432\u0435\u0434\u0456\u0442\u044c\u0020\u0441\u0432\u043e\u0454\u0020\u0456\u043c\u0027\u044f\u0020\u043a\u043e\u0440\u0438\u0441\u0442\u0443\u0432\u0430\u0447\u0430\u0020\u0430\u0431\u043e\u0020\u0435\u043b\u0435\u043a\u0442\u0440\u043e\u043d\u043d\u0443\u0020\u0430\u0434\u0440\u0435\u0441\u0443\u0020\u043d\u0438\u0436\u0447\u0435\u0020\u0442\u0430\u0020\u043d\u0430\u0442\u0438\u0441\u043d\u0456\u0442\u044c\u0020\u043a\u043d\u043e\u043f\u043a\u0443\u0020\u0022\u041d\u0430\u0434\u0456\u0441\u043b\u0430\u0442\u0438\u0022\u002e\u0020\u041c\u0438\u0020\u043d\u0430\u0434\u0456\u0448\u043b\u0435\u043c\u043e\u0020\u0432\u0430\u043c\u0020\u0435\u043b\u0435\u043a\u0442\u0440\u043e\u043d\u043d\u0438\u0439\u0020\u043b\u0438\u0441\u0442\u0020\u0456\u0437\u0020\u0432\u0430\u0448\u0438\u043c\u0020\u043d\u043e\u0432\u0438\u043c\u0020\u043f\u0430\u0440\u043e\u043b\u0435\u043c\u002e +user.dataRelatedToMe=\u041c\u043e\u0457\u0020\u0434\u0430\u043d\u0456 +wasCreatedIn=\u002c\u0020\u0441\u0442\u0432\u043e\u0440\u0435\u043d\u043e\u0020\u0432 +wasCreatedTo=\u002c\u0020\u0434\u043e\u0434\u0430\u043d\u043e\u0020\u0434\u043e +wasSubmittedForReview=\u002c\u0020\u0431\u0443\u043b\u043e\u0020\u043f\u043e\u0434\u0430\u043d\u043e\u0020\u043d\u0430\u0020\u0440\u043e\u0437\u0433\u043b\u044f\u0434\u0020\u0434\u043b\u044f\u0020\u043e\u043f\u0443\u0431\u043b\u0456\u043a\u0443\u0432\u0430\u043d\u043d\u044f\u0020\u0432 +wasPublished=\u002c\u0020\u043e\u043f\u0443\u0431\u043b\u0456\u043a\u043e\u0432\u0430\u043d\u043e\u0020\u0432 +wasReturnedByReviewer=\u002c\u0020\u0431\u0443\u0432\u0020\u043f\u043e\u0432\u0435\u0440\u043d\u0435\u043d\u0438\u0439\u0020\u043a\u0443\u0440\u0430\u0442\u043e\u0440\u043e\u043c +\u0023\u0020\u0054\u004f\u0044\u004f\u003a\u0020\u0043\u006f\u006e\u0066\u0069\u0072\u006d\u0020\u0074\u0068\u0061\u0074\u0020\u0022\u0074\u006f\u0052\u0065\u0076\u0069\u0065\u0077\u0022\u0020\u0063\u0061\u006e\u0020\u0062\u0065\u0020\u0064\u0065\u006c\u0065\u0074\u0065\u0064\u002e +toReview=\u041d\u0435\u0020\u0437\u0430\u0431\u0443\u0434\u044c\u0442\u0435\u0020\u043e\u043f\u0443\u0431\u043b\u0456\u043a\u0443\u0432\u0430\u0442\u0438\u0020\u0439\u043e\u0433\u043e\u0020\u0430\u0431\u043e\u0020\u0432\u0456\u0434\u043f\u0440\u0430\u0432\u0438\u0442\u0438\u0020\u043d\u0430\u0437\u0430\u0434\u0020\u0430\u0432\u0442\u043e\u0440\u0443\u0021 +worldMap.added=\u041d\u0430\u0431\u0456\u0440\u0020\u0434\u0430\u043d\u0438\u0445\u0020\u043c\u0456\u0441\u0442\u0438\u0432\u0020\u0448\u0430\u0440\u0020\u0434\u0430\u043d\u0438\u0445\u0020\u0057\u006f\u0072\u006c\u0064\u004d\u0061\u0070\u002e +\u0023\u0020\u0042\u0075\u006e\u0064\u006c\u0065\u0020\u0066\u0069\u006c\u0065\u0020\u0065\u0064\u0069\u0074\u006f\u0072\u0073\u002c\u0020\u0070\u006c\u0065\u0061\u0073\u0065\u0020\u006e\u006f\u0074\u0065\u0020\u0074\u0068\u0061\u0074\u0020\u0022\u006e\u006f\u0074\u0069\u0066\u0069\u0063\u0061\u0074\u0069\u006f\u006e\u002e\u0077\u0065\u006c\u0063\u006f\u006d\u0065\u0022\u0020\u0069\u0073\u0020\u0075\u0073\u0065\u0064\u0020\u0069\u006e\u0020\u0061\u0020\u0075\u006e\u0069\u0074\u0020\u0074\u0065\u0073\u0074\u002e +notification.welcome=\u041b\u0430\u0441\u043a\u0430\u0432\u043e\u0020\u043f\u0440\u043e\u0441\u0438\u043c\u043e\u0020\u0434\u043e\u0020\u007b\u0030\u007d\u0021\u0020\u041f\u043e\u0447\u043d\u0456\u0442\u044c\u0020\u0437\u0020\u0434\u043e\u0434\u0430\u0432\u0430\u043d\u043d\u044f\u0020\u0447\u0438\u0020\u043f\u043e\u0448\u0443\u043a\u0443\u0020\u0434\u0430\u043d\u0438\u0445\u002e\u0020\u0404\u0020\u043f\u0438\u0442\u0430\u043d\u043d\u044f\u003f\u0020\u041f\u0435\u0440\u0435\u0432\u0456\u0440\u0442\u0435\u0020\u007b\u0031\u007d\u002e\u0020\u0425\u043e\u0447\u0435\u0442\u0435\u0020\u043f\u0440\u043e\u0442\u0435\u0441\u0442\u0443\u0432\u0430\u0442\u0438\u0020\u0444\u0443\u043d\u043a\u0446\u0456\u0457\u0020\u0044\u0061\u0074\u0061\u0076\u0065\u0072\u0073\u0065\u003f\u0020\u0421\u043a\u043e\u0440\u0438\u0441\u0442\u0430\u0439\u0442\u0435\u0441\u044c\u0020\u007b\u0032\u007d\u002e\u0020\u0422\u0430\u043a\u043e\u0436\u0020\u0020 +\u043f\u0435\u0440\u0435\u0432\u0456\u0440\u0442\u0435\u0020\u0432\u0430\u0448\u0020\u0432\u0456\u0442\u0430\u043b\u044c\u043d\u0438\u0439\u0020\u043b\u0438\u0441\u0442\u002c\u0020\u0449\u043e\u0431\u0020\u043f\u0456\u0434\u0442\u0432\u0435\u0440\u0434\u0438\u0442\u0438\u0020\u0441\u0432\u043e\u044e\u0020\u0430\u0434\u0440\u0435\u0441\u0443\u002e +notification.demoSite=\u0414\u0435\u043c\u043e\u002d\u0441\u0430\u0439\u0442 +notification.requestFileAccess=\u0414\u043e\u0441\u0442\u0443\u043f\u0020\u0434\u043e\u0020\u0444\u0430\u0439\u043b\u0443\u002c\u0020\u044f\u043a\u0438\u0439\u0020\u043f\u043e\u0442\u0440\u0456\u0431\u0435\u043d\u0020\u0434\u043b\u044f\u0020\u043d\u0430\u0431\u043e\u0440\u0443\u0020\u0434\u0430\u043d\u0438\u0445\u003a\u0020\u007b\u0030\u007d\u002e + + + + + +notification.grantFileAccess=\u0020\u0414\u043e\u0441\u0442\u0443\u043f\u0020\u043d\u0430\u0434\u0430\u043d\u043e\u0020\u0434\u043b\u044f\u0020\u0444\u0430\u0439\u043b\u0456\u0432\u0020\u0443\u0020\u043d\u0430\u0431\u043e\u0440\u0456\u0020\u0434\u0430\u043d\u0438\u0445\u003a\u0020\u007b\u0030\u007d\u002e +notification.rejectFileAccess=\u0414\u043e\u0441\u0442\u0443\u043f\u0020\u0432\u0456\u0434\u0445\u0438\u043b\u0435\u043d\u043e\u0020\u0434\u043b\u044f\u0020\u0437\u0430\u043f\u0438\u0442\u0430\u043d\u0438\u0445\u0020\u0444\u0430\u0439\u043b\u0456\u0432\u0020\u0443\u0020\u043d\u0430\u0431\u043e\u0440\u0456\u0020\u0434\u0430\u043d\u0438\u0445\u003a\u0020\u007b\u0030\u007d\u002e +notification.createDataverse=\u007b\u0030\u007d\u0020\u0441\u0442\u0432\u043e\u0440\u0435\u043d\u043e\u0020\u0432\u0020\u007b\u0031\u007d\u0020\u002e\u0020\u0429\u043e\u0431\u0020\u0434\u0456\u0437\u043d\u0430\u0442\u0438\u0441\u044c\u0020\u0431\u0456\u043b\u044c\u0448\u0435\u0020\u043f\u0440\u043e\u0020\u0442\u0435\u002c\u0020\u0449\u043e\u0020\u0432\u0438\u0020\u043c\u043e\u0436\u0435\u0442\u0435\u0020\u0437\u0440\u043e\u0431\u0438\u0442\u0438\u0020\u0437\u0020\u0064\u0061\u0074\u0061\u0076\u0065\u0072\u0073\u0065\u002c\u0020\u043f\u0435\u0440\u0435\u0433\u043b\u044f\u043d\u044c\u0442\u0435\u0020\u007b\u0032\u007d\u002e\u0020 +notification.dataverse.management.title=\u0020\u0423\u043f\u0440\u0430\u0432\u043b\u0456\u043d\u043d\u044f\u0020\u0044\u0061\u0074\u0061\u0076\u0065\u0072\u0073\u0065\u0020\u002d\u0020\u0414\u043e\u0432\u0456\u0434\u043d\u0438\u043a\u0020\u043a\u043e\u0440\u0438\u0441\u0442\u0443\u0432\u0430\u0447\u0430\u0020\u0044\u0061\u0074\u0061\u0076\u0065\u0072\u0073\u0065\u0020 +notification.createDataset=\u007b\u0030\u007d\u0020\u0441\u0442\u0432\u043e\u0440\u0435\u043d\u043e\u0020\u0432\u0020\u007b\u0031\u007d\u002e\u0020\u0429\u043e\u0431\u0020\u0434\u0456\u0437\u043d\u0430\u0442\u0438\u0441\u044c\u0020\u0431\u0456\u043b\u044c\u0448\u0435\u0020\u043f\u0440\u043e\u0020\u0442\u0435\u002c\u0020\u0449\u043e\u0020\u0432\u0438\u0020\u043c\u043e\u0436\u0435\u0442\u0435\u0020\u0437\u0440\u043e\u0431\u0438\u0442\u0438\u0020\u0437\u0020\u043d\u0430\u0431\u043e\u0440\u043e\u043c\u0020\u0434\u0430\u043d\u0438\u0445\u002c\u0020\u043f\u0435\u0440\u0435\u0433\u043b\u044f\u043d\u044c\u0442\u0435\u0020\u007b\u0032\u007d\u002e +notification.dataset.management.title=\u0020\u0423\u043f\u0440\u0430\u0432\u043b\u0456\u043d\u043d\u044f\u0020\u043d\u0430\u0431\u043e\u0440\u043e\u043c\u0020\u0434\u0430\u043d\u0438\u0445\u0020\u002d\u0020\u0414\u043e\u0432\u0456\u0434\u043d\u0438\u043a\u0020\u043a\u043e\u0440\u0438\u0441\u0442\u0443\u0432\u0430\u0447\u0430\u0020\u043d\u0430\u0431\u043e\u0440\u043e\u043c\u0020\u0434\u0430\u043d\u0438\u0445 +notification.wasSubmittedForReview=\u007b\u0030\u007d\u0020\u0431\u0443\u043b\u043e\u0020\u043f\u043e\u0434\u0430\u043d\u043e\u0020\u043d\u0430\u0020\u0440\u043e\u0437\u0433\u043b\u044f\u0434\u0020\u0434\u043b\u044f\u0020\u043e\u043f\u0443\u0431\u043b\u0456\u043a\u0443\u0432\u0430\u043d\u043d\u044f\u0020\u0432\u0020\u007b\u0031\u007d\u002e\u0020\u041d\u0435\u0020\u0437\u0430\u0431\u0443\u0434\u044c\u0442\u0435\u0020\u0439\u043e\u0433\u043e\u0020\u043e\u043f\u0443\u0431\u043b\u0456\u043a\u0443\u0432\u0430\u0442\u0438\u0020\u0430\u0431\u043e\u0020\u0432\u0456\u0434\u043f\u0440\u0430\u0432\u0438\u0442\u0438\u0020\u043d\u0430\u0437\u0430\u0434\u0020\u0430\u0432\u0442\u043e\u0440\u0443\u0020\u005c\u0021\u0020 +notification.wasReturnedByReviewer=\u007b\u0030\u007d\u0020\u0431\u0443\u043b\u043e\u0020\u043f\u043e\u0432\u0435\u0440\u043d\u0435\u043d\u043e\u0020\u043a\u0443\u0440\u0430\u0442\u043e\u0440\u043e\u043c\u0020\u007b\u0031\u007d\u002e +notification.wasPublished=\u007b\u0030\u007d\u0020\u043e\u043f\u0443\u0431\u043b\u0456\u043a\u043e\u0432\u0430\u043d\u043e\u0020\u0432\u0020\u007b\u0031\u007d\u002e +notification.worldMap.added=\u007b\u0030\u007d\u002c\u0020\u0414\u043e\u0020\u043d\u0430\u0431\u043e\u0440\u0443\u0020\u0434\u0430\u043d\u0438\u0445\u0020\u0434\u043e\u0434\u0430\u043d\u0438\u0439\u0020\u0448\u0430\u0440\u0020\u0434\u0430\u043d\u0438\u0445\u0020\u0057\u006f\u0072\u006c\u0064\u004d\u0061\u0070\u002e +notification.maplayer.deletefailed=\u0020\u041d\u0435\u0020\u0432\u0434\u0430\u043b\u043e\u0441\u044f\u0020\u0432\u0438\u0434\u0430\u043b\u0438\u0442\u0438\u0020\u0448\u0430\u0440\u0020\u043a\u0430\u0440\u0442\u0438\u002c\u0020\u043f\u043e\u0432\u0027\u044f\u0437\u0430\u043d\u0438\u0439\u0020\u0437\u0020\u0444\u0430\u0439\u043b\u043e\u043c\u0020\u043e\u0431\u043c\u0435\u0436\u0435\u043d\u043e\u0433\u043e\u0020\u0434\u043e\u0441\u0442\u0443\u043f\u0443\u0020\u007b\u0030\u007d\u0020\u0437\u0020\u0057\u006f\u0072\u006c\u0064\u004d\u0061\u0070\u002e\u0020\u0411\u0443\u0434\u044c\u0020\u043b\u0430\u0441\u043a\u0430\u002c\u0020\u043f\u043e\u0432\u0442\u043e\u0440\u0456\u0442\u044c\u0020\u0441\u043f\u0440\u043e\u0431\u0443\u002c\u0020\u0430\u0431\u043e\u0020\u0437\u0432\u0027\u044f\u0436\u0456\u0442\u044c\u0441\u044f\u0020\u0437\u0020\u0057\u006f\u0072\u006c\u0064\u004d\u0061\u0070\u0020\u0442\u0432\u002f\u0430\u0431\u043e\u0020\u0044\u0061\u0074\u0061\u0076\u0065\u0072\u0073\u0065\u0020\u0020\u043f\u0456\u0434\u0442\u0440\u0438\u043c\u043a\u0430\u0020\u0028\u041d\u0430\u0431\u0456\u0440\u0020\u0434\u0430\u043d\u0438\u0445\u003a\u0020\u007b\u0031\u007d\u0029 +notification.generic.objectDeleted=\u0020\u0064\u0061\u0074\u0061\u0076\u0065\u0072\u0073\u0065\u002c\u0020\u043d\u0430\u0431\u0456\u0440\u0020\u0434\u0430\u043d\u0438\u0445\u0020\u0430\u0431\u043e\u0020\u0444\u0430\u0439\u043b\u0020\u0434\u043b\u044f\u0020\u0446\u044c\u043e\u0433\u043e\u0020\u0441\u043f\u043e\u0432\u0456\u0449\u0435\u043d\u043d\u044f\u0020\u0431\u0443\u043b\u0438\u0020\u0432\u0438\u0434\u0430\u043b\u0435\u043d\u0456\u002e +notification.access.granted.dataverse=\u0020\u0412\u0438\u0020\u043e\u0442\u0440\u0438\u043c\u0430\u043b\u0438\u0020\u0440\u043e\u043b\u044c\u0020\u007b\u0030\u007d\u0020\u0434\u043b\u044f\u0020\u007b\u0031\u007d\u002e +notification.access.granted.dataset=\u0020\u0412\u0438\u0020\u043e\u0442\u0440\u0438\u043c\u0430\u043b\u0438\u0020\u0440\u043e\u043b\u044c\u0020\u007b\u0030\u007d\u0020\u0434\u043b\u044f\u0020\u007b\u0031\u007d\u002e +notification.access.granted.datafile=\u0020\u0412\u0438\u0020\u043e\u0442\u0440\u0438\u043c\u0430\u043b\u0438\u0020\u0440\u043e\u043b\u044c\u0020\u007b\u0030\u007d\u0020\u0434\u043b\u044f\u0020\u0444\u0430\u0439\u043b\u0443\u0020\u0432\u0020\u007b\u0031\u007d\u002e\u0020\u0020\u0020 +notification.access.granted.fileDownloader.additionalDataverse=\u007b\u0030\u007d\u0020\u0422\u0435\u043f\u0435\u0440\u0020\u0443\u0020\u0432\u0430\u0441\u0020\u0454\u0020\u0434\u043e\u0441\u0442\u0443\u043f\u0020\u0434\u043e\u0020\u0432\u0441\u0456\u0445\u0020\u043e\u043f\u0443\u0431\u043b\u0456\u043a\u043e\u0432\u0430\u043d\u0438\u0445\u0020\u043e\u0431\u043c\u0435\u0436\u0435\u043d\u0438\u0445\u0020\u0456\u0020\u043d\u0435\u043e\u0431\u043c\u0435\u0436\u0435\u043d\u0438\u0445\u0020\u0444\u0430\u0439\u043b\u0456\u0432\u0020\u0443\u0020\u0434\u0430\u043d\u043e\u043c\u0443\u0020\u0044\u0061\u0074\u0061\u0076\u0065\u0072\u0073\u0065\u002e +notification.access.granted.fileDownloader.additionalDataset=\u007b\u0030\u007d\u0020\u0422\u0435\u043f\u0435\u0440\u0020\u0443\u0020\u0432\u0430\u0441\u0020\u0454\u0020\u0434\u043e\u0441\u0442\u0443\u043f\u0020\u0434\u043e\u0020\u0432\u0441\u0456\u0445\u0020\u043e\u043f\u0443\u0431\u043b\u0456\u043a\u043e\u0432\u0430\u043d\u0438\u0445\u0020\u043e\u0431\u043c\u0435\u0436\u0435\u043d\u0438\u0445\u0020\u0456\u0020\u043d\u0435\u043e\u0431\u043c\u0435\u0436\u0435\u043d\u0438\u0445\u0020\u0444\u0430\u0439\u043b\u0456\u0432\u0020\u0443\u0020\u0446\u044c\u043e\u043c\u0443\u0020\u043d\u0430\u0431\u043e\u0440\u0456\u0020\u0434\u0430\u043d\u0438\u0445\u0020\u002e +notification.access.revoked.dataverse=\u0020\u0412\u0438\u0020\u0431\u0443\u043b\u0438\u0020\u0432\u0438\u043b\u0443\u0447\u0435\u043d\u0456\u0020\u0437\u0020\u0440\u043e\u043b\u0456\u0020\u0432\u0020\u007b\u0030\u007d\u002e +notification.access.revoked.dataset=\u0020\u0412\u0438\u0020\u0431\u0443\u043b\u0438\u0020\u0432\u0438\u043b\u0443\u0447\u0435\u043d\u0456\u0020\u0437\u0020\u0440\u043e\u043b\u0456\u0020\u0432\u0020\u007b\u0030\u007d\u002e +notification.access.revoked.datafile=\u0020\u0412\u0438\u0020\u0431\u0443\u043b\u0438\u0020\u0432\u0438\u043b\u0443\u0447\u0435\u043d\u0456\u0020\u0437\u0020\u0440\u043e\u043b\u0456\u0020\u0432\u0020\u007b\u0030\u007d\u002e +notification.checksumfail=\u0020\u041f\u043e\u043c\u0438\u043b\u043a\u0430\u0020\u043f\u0440\u0438\u0020\u043f\u0435\u0440\u0435\u0432\u0456\u0440\u0446\u0456\u0020\u043a\u043e\u043d\u0442\u0440\u043e\u043b\u044c\u043d\u043e\u0457\u0020\u0441\u0443\u043c\u0438\u0020\u043e\u0434\u043d\u043e\u0433\u043e\u0020\u0430\u0431\u043e\u0020\u043a\u0456\u043b\u044c\u043a\u043e\u0445\u0020\u0444\u0430\u0439\u043b\u0456\u0432\u0020\u0443\u0020\u0432\u0430\u0448\u043e\u043c\u0443\u0020\u0437\u0430\u0432\u0430\u043d\u0442\u0430\u0436\u0435\u043d\u043d\u0456\u0020\u0434\u043b\u044f\u0020\u043d\u0430\u0431\u043e\u0440\u0443\u0020\u0434\u0430\u043d\u0438\u0445\u0020\u007b\u0030\u007d\u002e\u0020\u0411\u0443\u0434\u044c\u0020\u043b\u0430\u0441\u043a\u0430\u002c\u0020\u043f\u043e\u0432\u0442\u043e\u0440\u043d\u043e\u0020\u0437\u0430\u043f\u0443\u0441\u0442\u0456\u0442\u044c\u0020\u0441\u0446\u0435\u043d\u0430\u0440\u0456\u0439\u0020\u0437\u0430\u0432\u0430\u043d\u0442\u0430\u0436\u0435\u043d\u043d\u044f\u002e\u0020\u042f\u043a\u0449\u043e\u0020\u043f\u0440\u043e\u0431\u043b\u0435\u043c\u0430\u0020\u043d\u0435\u0020\u0437\u043d\u0438\u043a\u043d\u0435\u002c\u0020\u0437\u0432\u0027\u044f\u0436\u0456\u0442\u044c\u0441\u044f\u0020\u0437\u0020\u0441\u043b\u0443\u0436\u0431\u043e\u044e\u0020\u043f\u0456\u0434\u0442\u0440\u0438\u043c\u043a\u0438\u002e +notification.mail.import.filesystem=\u0020\u041d\u0430\u0431\u0456\u0440\u0020\u0434\u0430\u043d\u0438\u0445\u0020\u007b\u0032\u007d\u0020\u0028\u007b\u0030\u007d\u002f\u0064\u0061\u0074\u0061\u0073\u0065\u0074\u002e\u0078\u0068\u0074\u006d\u006c\u003f\u0070\u0065\u0072\u0073\u0069\u0073\u0074\u0065\u006e\u0074\u0049\u0064\u003d\u007b\u0031\u007d\u0029\u0020\u0443\u0441\u043f\u0456\u0448\u043d\u043e\u0020\u0437\u0430\u0432\u0430\u043d\u0442\u0430\u0436\u0435\u043d\u043e\u0020\u0442\u0430\u0020\u043f\u0456\u0434\u0442\u0432\u0435\u0440\u0434\u0436\u0435\u043d\u043e\u002e +notification.import.filesystem=\u0020\u041d\u0430\u0431\u0456\u0440\u0020\u0434\u0430\u043d\u0438\u0445\u0020\u003c\u0061\u0020\u0068\u0072\u0065\u0066\u003d\u0022\u002f\u0064\u0061\u0074\u0061\u0073\u0065\u0074\u002e\u0078\u0068\u0074\u006d\u006c\u003f\u0070\u0065\u0072\u0073\u0069\u0073\u0074\u0065\u006e\u0074\u0049\u0064\u003d\u007b\u0030\u007d\u0022\u0020\u0074\u0069\u0074\u006c\u0065\u003d\u0022\u007b\u0031\u007d\u0022\u003e\u007b\u0031\u007d\u003c\u002f\u0061\u003e\u0020\u0443\u0441\u043f\u0456\u0448\u043d\u043e\u0020\u0437\u0430\u0432\u0430\u043d\u0442\u0430\u0436\u0435\u043d\u043e\u0020\u0442\u0430\u0020\u043f\u0456\u0434\u0442\u0432\u0435\u0440\u0434\u0436\u0435\u043d\u043e\u002e +notification.import.checksum=\u003c\u0061\u0020\u0068\u0072\u0065\u0066\u003d\u0022\u002f\u0064\u0061\u0074\u0061\u0073\u0065\u0074\u002e\u0078\u0068\u0074\u006d\u006c\u003f\u0070\u0065\u0072\u0073\u0069\u0073\u0074\u0065\u006e\u0074\u0049\u0064\u003d\u007b\u0030\u007d\u0022\u0020\u0074\u0069\u0074\u006c\u0065\u003d\u0022\u007b\u0031\u007d\u0022\u003e\u007b\u0031\u007d\u003c\u002f\u0061\u003e\u002c\u0020\u0434\u043e\u0020\u043d\u0430\u0431\u043e\u0440\u0443\u0020\u0434\u0430\u043d\u0438\u0445\u0020\u0434\u043e\u0434\u0430\u043d\u043e\u0020\u043a\u043e\u043d\u0442\u0440\u043e\u043b\u044c\u043d\u0456\u0020\u0441\u0443\u043c\u0438\u0020\u0444\u0430\u0439\u043b\u0456\u0432\u0020\u0437\u0430\u0020\u0434\u043e\u043f\u043e\u043c\u043e\u0433\u043e\u044e\u0020\u043f\u0430\u043a\u0435\u0442\u043d\u043e\u0433\u043e\u0020\u0437\u0430\u0432\u0434\u0430\u043d\u043d\u044f\u002e +removeNotification=\u0020\u0412\u0438\u0434\u0430\u043b\u0438\u0442\u0438\u0020\u0441\u043f\u043e\u0432\u0456\u0449\u0435\u043d\u043d\u044f +groupAndRoles.manageTips=\u0020\u0422\u0443\u0442\u0020\u0432\u0438\u0020\u043c\u043e\u0436\u0435\u0442\u0435\u0020\u043e\u0442\u0440\u0438\u043c\u0430\u0442\u0438\u0020\u0434\u043e\u0441\u0442\u0443\u043f\u0020\u0434\u043e\u0020\u0432\u0441\u0456\u0445\u0020\u0433\u0440\u0443\u043f\u002c\u0020\u0434\u043e\u0020\u044f\u043a\u0438\u0445\u0020\u0432\u0438\u0020\u043d\u0430\u043b\u0435\u0436\u0438\u0442\u0435\u002c\u0020\u0456\u0020\u0434\u043e\u0020\u0443\u043f\u0440\u0430\u0432\u043b\u0456\u043d\u043d\u044f\u0020\u043d\u0438\u043c\u0438\u002c\u0020\u0430\u0020\u0442\u0430\u043a\u043e\u0436\u0020\u043f\u0440\u0438\u0437\u043d\u0430\u0447\u0435\u043d\u0438\u043c\u0438\u0020\u0440\u043e\u043b\u044f\u043c\u0438\u002e +user.signup.tip=\u0020\u0427\u043e\u043c\u0443\u0020\u043d\u0435\u043e\u0431\u0445\u0456\u0434\u043d\u043e\u0020\u043c\u0430\u0442\u0438\u0020\u0044\u0061\u0074\u0061\u0076\u0065\u0072\u0073\u0065\u0020\u0430\u043a\u0430\u0443\u043d\u0442\u003f\u0020\u0429\u043e\u0431\u0020\u0441\u0442\u0432\u043e\u0440\u0438\u0442\u0438\u0020\u0432\u043b\u0430\u0441\u043d\u0438\u0439\u0020\u0064\u0061\u0074\u0061\u0076\u0065\u0072\u0073\u0065\u002c\u0020\u043d\u0430\u043b\u0430\u0448\u0442\u0443\u0432\u0430\u0442\u0438\u0020\u0439\u043e\u0433\u043e\u002c\u0020\u0434\u043e\u0434\u0430\u0442\u0438\u0020\u043d\u0430\u0431\u043e\u0440\u0438\u0020\u0434\u0430\u043d\u0438\u0445\u0020\u0430\u0431\u043e\u0020\u0437\u0430\u043c\u043e\u0432\u0438\u0442\u0438\u0020\u0434\u043e\u0441\u0442\u0443\u043f\u0020\u0434\u043e\u0020\u043e\u0431\u043c\u0435\u0436\u0435\u043d\u0438\u0445\u0020\u0444\u0430\u0439\u043b\u0456\u0432\u002e +user.signup.otherLogInOptions.tip=\u0020\u0412\u0438\u0020\u0442\u0430\u043a\u043e\u0436\u0020\u043c\u043e\u0436\u0435\u0442\u0435\u0020\u0441\u0442\u0432\u043e\u0440\u0438\u0442\u0438\u0020\u0044\u0061\u0074\u0061\u0076\u0065\u0072\u0073\u0065\u0020\u0430\u043a\u0430\u0443\u043d\u0442\u0020\u0437\u0430\u0020\u0434\u043e\u043f\u043e\u043c\u043e\u0433\u043e\u044e\u0020\u043e\u0434\u043d\u043e\u0433\u043e\u0020\u0437\u0020\u043d\u0430\u0448\u0438\u0445\u0020\u043f\u0430\u0440\u0430\u043c\u0435\u0442\u0440\u0456\u0432\u0020\u0432\u0445\u043e\u0434\u0443\u0020\u003c\u0061\u0020\u0068\u0072\u0065\u0066\u003d\u0022\u002f\u006c\u006f\u0067\u0069\u006e\u0070\u0061\u0067\u0065\u002e\u0078\u0068\u0074\u006d\u006c\u0022\u0020\u0074\u0069\u0074\u006c\u0065\u003d\u0022\u0044\u0061\u0074\u0061\u0076\u0065\u0072\u0073\u0065\u0020\u004c\u006f\u0067\u0020\u0049\u006e\u0022\u003e\u0020\u003c\u002f\u0061\u003e\u002e +user.username.illegal.tip=\u0020\u0412\u0456\u0434\u0020\u0032\u0020\u0434\u043e\u0020\u0036\u0030\u0020\u0441\u0438\u043c\u0432\u043e\u043b\u0456\u0432\u002c\u0020\u0456\u0020\u043c\u043e\u0436\u043d\u0430\u0020\u0432\u0438\u043a\u043e\u0440\u0438\u0441\u0442\u043e\u0432\u0443\u0432\u0430\u0442\u0438\u0020\u0020\u0022\u0061\u002d\u007a\u0022\u002c\u0020\u0022\u0030\u002d\u0039\u0022\u002c\u0020\u0022\u005f\u0022\u0020\u0434\u043b\u044f\u0020\u0432\u0430\u0448\u043e\u0433\u043e\u0020\u0456\u043c\u0435\u043d\u0456\u0020\u043a\u043e\u0440\u0438\u0441\u0442\u0443\u0432\u0430\u0447\u0430\u002e\u0020 +user.username=\u0020\u0406\u043c\u0027\u044f\u0020\u043a\u043e\u0440\u0438\u0441\u0442\u0443\u0432\u0430\u0447\u0430 +user.username.taken=\u0020\u0426\u0435\u0020\u0456\u043c\u0027\u044f\u0020\u0432\u0436\u0435\u0020\u0432\u0438\u043a\u043e\u0440\u0438\u0441\u0442\u043e\u0432\u0443\u0454\u0442\u044c\u0441\u044f\u002e +user.username.invalid=\u0020\u0426\u0435\u0020\u0456\u043c\u0027\u044f\u0020\u043a\u043e\u0440\u0438\u0441\u0442\u0443\u0432\u0430\u0447\u0430\u0020\u043c\u0456\u0441\u0442\u0438\u0442\u044c\u0020\u043d\u0435\u0434\u0456\u0439\u0441\u043d\u0438\u0439\u0020\u0441\u0438\u043c\u0432\u043e\u043b\u0020\u0430\u0431\u043e\u0020\u043d\u0435\u0020\u0432\u0456\u0434\u043f\u043e\u0432\u0456\u0434\u0430\u0454\u0020\u0434\u043e\u0432\u0436\u0438\u043d\u0456\u0020\u0028\u0032\u002d\u0036\u0030\u0020\u0441\u0438\u043c\u0432\u043e\u043b\u0456\u0432\u0029\u002e +user.username.valid=\u0020\u0421\u0442\u0432\u043e\u0440\u0456\u0442\u044c\u0020\u0434\u0456\u0439\u0441\u043d\u0435\u0020\u0456\u043c\u0027\u044f\u0020\u043a\u043e\u0440\u0438\u0441\u0442\u0443\u0432\u0430\u0447\u0430\u0020\u0434\u043e\u0432\u0436\u0438\u043d\u043e\u044e\u0020\u0432\u0456\u0434\u0020\u0032\u0020\u0434\u043e\u0020\u0036\u0030\u0020\u0441\u0438\u043c\u0432\u043e\u043b\u0456\u0432\u002c\u0020\u0449\u043e\u0020\u043c\u0456\u0441\u0442\u0438\u0442\u044c\u0020\u0431\u0443\u043a\u0432\u0438\u0020\u0028\u0061\u002d\u007a\u0029\u002c\u0020\u0446\u0438\u0444\u0440\u0438\u0020\u0028\u0030\u002d\u0039\u0029\u002c\u0020\u0440\u0438\u0441\u043a\u0438\u0020\u0028\u002d\u0029\u002c\u0020\u043f\u0456\u0434\u043a\u0440\u0435\u0441\u043b\u0435\u043d\u043d\u044f\u0020\u0028\u005f\u0029\u0020\u0442\u0430\u0020\u043a\u0440\u0430\u043f\u043a\u0438\u0020\u0028\u002e\u0029\u002e +user.noPasswd=\u0020\u041d\u0435\u043c\u0430\u0454\u0020\u043f\u0430\u0440\u043e\u043b\u044f +user.currentPasswd=\u0020\u043f\u043e\u0442\u043e\u0447\u043d\u0438\u0439\u0020\u043f\u0430\u0440\u043e\u043b\u044c +user.currentPasswd.tip=\u0020\u0411\u0443\u0434\u044c\u0020\u043b\u0430\u0441\u043a\u0430\u002c\u0020\u0432\u0432\u0435\u0434\u0456\u0442\u044c\u0020\u043f\u043e\u0442\u043e\u0447\u043d\u0438\u0439\u0020\u043f\u0430\u0440\u043e\u043b\u044c\u0020\u0434\u043b\u044f\u0020\u0446\u044c\u043e\u0433\u043e\u0020\u0430\u043a\u0430\u0443\u043d\u0442\u0443\u002e +user.passwd.illegal.tip=\u0020\u041f\u0430\u0440\u043e\u043b\u044c\u0020\u043f\u043e\u0432\u0438\u043d\u0435\u043d\u0020\u043c\u0456\u0441\u0442\u0438\u0442\u0438\u0020\u043d\u0435\u0020\u043c\u0435\u043d\u0448\u0435\u0020\u0036\u0020\u0441\u0438\u043c\u0432\u043e\u043b\u0456\u0432\u002c\u0020\u0432\u043a\u043b\u044e\u0447\u0430\u0442\u0438\u0020\u0432\u0020\u0441\u0435\u0431\u0435\u0020\u043e\u0434\u043d\u0443\u0020\u043b\u0456\u0442\u0435\u0440\u0443\u0020\u0442\u0430\u0020\u043e\u0434\u043d\u0443\u0020\u0446\u0438\u0444\u0440\u0443\u002c\u0020\u0430\u0020\u0442\u0430\u043a\u043e\u0436\u0020\u043c\u043e\u0436\u0443\u0442\u044c\u0020\u0431\u0443\u0442\u0438\u0020\u0432\u0438\u043a\u043e\u0440\u0438\u0441\u0442\u0430\u043d\u0456\u0020\u0441\u043f\u0435\u0446\u0456\u0430\u043b\u044c\u043d\u0456\u0020\u0441\u0438\u043c\u0432\u043e\u043b\u0438\u002e +user.rePasswd=\u0020\u041f\u043e\u0432\u0442\u043e\u0440\u043d\u043e\u0020\u0432\u0432\u0435\u0434\u0456\u0442\u044c\u0020\u043f\u0430\u0440\u043e\u043b\u044c\u002e +user.rePasswd.tip=\u0020\u041f\u043e\u0432\u0442\u043e\u0440\u043d\u043e\u0020\u0432\u0432\u0435\u0434\u0456\u0442\u044c\u0020\u043f\u0430\u0440\u043e\u043b\u044c\u002c\u0020\u044f\u043a\u0438\u0439\u0020\u0432\u0438\u0020\u0432\u043a\u0430\u0437\u0430\u043b\u0438\u0020\u0432\u0438\u0449\u0435 +user.firstName=\u0020\u0456\u043c\u0027\u044f +user.firstName.tip=\u0020\u0406\u043c\u0027\u044f\u0020\u0430\u0431\u043e\u0020\u043d\u0430\u0437\u0432\u0430\u002c\u0020\u044f\u043a\u0456\u0020\u0432\u0438\u0020\u0445\u043e\u0447\u0435\u0442\u0435\u0020\u0432\u0438\u043a\u043e\u0440\u0438\u0441\u0442\u043e\u0432\u0443\u0432\u0430\u0442\u0438\u0020\u0434\u043b\u044f\u0020\u0446\u044c\u043e\u0433\u043e\u0020\u0430\u043a\u0430\u0443\u043d\u0442\u0443\u002e +user.lastName=\u0020\u041f\u0440\u0456\u0437\u0432\u0438\u0449\u0435 +user.lastName.tip=\u0020\u041f\u0440\u0456\u0437\u0432\u0438\u0449\u0435\u002c\u0020\u044f\u043a\u0435\u0020\u0432\u0438\u0020\u0445\u043e\u0447\u0435\u0442\u0435\u0020\u0432\u0438\u043a\u043e\u0440\u0438\u0441\u0442\u043e\u0432\u0443\u0432\u0430\u0442\u0438\u0020\u0434\u043b\u044f\u0020\u0446\u044c\u043e\u0433\u043e\u0020\u0430\u043a\u0430\u0443\u043d\u0442\u0443\u002e +user.email.tip=\u0020\u0434\u0456\u0439\u0441\u043d\u0430\u0020\u0430\u0434\u0440\u0435\u0441\u0430\u0020\u0435\u043b\u0435\u043a\u0442\u0440\u043e\u043d\u043d\u043e\u0457\u0020\u043f\u043e\u0448\u0442\u0438\u002c\u0020\u0434\u043e\u0020\u044f\u043a\u043e\u0457\u0020\u0432\u0438\u0020\u043c\u0430\u0454\u0442\u0435\u0020\u0434\u043e\u0441\u0442\u0443\u043f\u002c\u0020\u0449\u043e\u0431\u0020\u043c\u043e\u0436\u043d\u0430\u0020\u0431\u0443\u043b\u043e\u0020\u0437\u0020\u0020\u0432\u0430\u043c\u0438\u0020\u0437\u0432\u0027\u044f\u0437\u0430\u0442\u0438\u0441\u044f\u002e +user.email.taken=\u0020\u0426\u044f\u0020\u0435\u043b\u0435\u043a\u0442\u0440\u043e\u043d\u043d\u0430\u0020\u0430\u0434\u0440\u0435\u0441\u0430\u0020\u0432\u0436\u0435\u0020\u043f\u0440\u0438\u0439\u043d\u044f\u0442\u0430\u002e +user.affiliation.tip=\u0020\u0423\u0441\u0442\u0430\u043d\u043e\u0432\u0430\u002c\u0020\u0434\u043e\u0020\u044f\u043a\u043e\u0457\u0020\u0432\u0438\u0020\u043d\u0430\u043b\u0435\u0436\u0438\u0442\u0435\u002e +user.position=\u041f\u043e\u0441\u0430\u0434\u0430 +user.position.tip=\u0020\u0412\u0430\u0448\u0430\u0020\u0440\u043e\u043b\u044c\u0020\u0447\u0438\u0020\u0437\u0432\u0430\u043d\u043d\u044f\u0020\u0432\u0020\u0443\u0441\u0442\u0430\u043d\u043e\u0432\u0456\u002c\u0020\u0434\u043e\u0020\u044f\u043a\u043e\u0457\u0020\u0432\u0438\u0020\u043d\u0430\u043b\u0435\u0436\u0438\u0442\u0435\u003b\u0020\u043d\u0430\u043f\u0440\u0438\u043a\u043b\u0430\u0434\u003a\u0020\u043f\u0435\u0440\u0441\u043e\u043d\u0430\u043b\u002c\u0020\u0444\u0430\u043a\u0443\u043b\u044c\u0442\u0435\u0442\u002c\u0020\u0441\u0442\u0443\u0434\u0435\u043d\u0442\u0020\u0456\u0020\u0442\u002e\u0020\u0434\u002e +user.acccountterms=\u0020\u0417\u0430\u0433\u0430\u043b\u044c\u043d\u0456\u0020\u0443\u043c\u043e\u0432\u0438\u0020\u043a\u043e\u0440\u0438\u0441\u0442\u0443\u0432\u0430\u043d\u043d\u044f +user.acccountterms.tip=\u0020\u0417\u0430\u0433\u0430\u043b\u044c\u043d\u0456\u0020\u043f\u043e\u043b\u043e\u0436\u0435\u043d\u043d\u044f\u0020\u0442\u0430\u0020\u0443\u043c\u043e\u0432\u0438\u0020\u043a\u043e\u0440\u0438\u0441\u0442\u0443\u0432\u0430\u043d\u043d\u044f\u0020\u043f\u0440\u043e\u0433\u0440\u0430\u043c\u043e\u044e\u0020\u0442\u0430\u0020\u043f\u043e\u0441\u043b\u0443\u0433\u0430\u043c\u0438\u002e +user.acccountterms.required=\u0020\u0411\u0443\u0434\u044c\u0020\u043b\u0430\u0441\u043a\u0430\u002c\u0020\u0437\u0440\u043e\u0431\u0456\u0442\u044c\u0020\u0432\u0456\u0434\u043c\u0456\u0442\u043a\u0443\u0020\u043f\u0440\u043e\u0020\u0442\u0435\u002c\u0020\u0449\u043e\u0020\u0432\u0438\u0020\u043f\u043e\u0433\u043e\u0434\u0436\u0443\u0454\u0442\u0435\u0441\u044c\u0020\u0456\u0437\u0020\u0417\u0430\u0433\u0430\u043b\u044c\u043d\u0438\u043c\u0438\u0020\u0443\u043c\u043e\u0432\u0430\u043c\u0438\u0020\u043a\u043e\u0440\u0438\u0441\u0442\u0443\u0432\u0430\u043d\u043d\u044f\u002e +user.acccountterms.iagree=\u0020\u042f\u0020\u043f\u0440\u043e\u0447\u0438\u0442\u0430\u0432\u0020\u0442\u0430\u0020\u043f\u043e\u0433\u043e\u0434\u0436\u0443\u044e\u0441\u044c\u0020\u0456\u0437\u0020\u0437\u0430\u0433\u0430\u043b\u044c\u043d\u0438\u043c\u0438\u0020\u0443\u043c\u043e\u0432\u0430\u043c\u0438\u0020\u043a\u043e\u0440\u0438\u0441\u0442\u0443\u0432\u0430\u043d\u043d\u044f\u0020\u0044\u0061\u0074\u0061\u0076\u0065\u0072\u0073\u0065\u002c\u0020\u044f\u043a\u0020\u0437\u0430\u0437\u043d\u0430\u0447\u0435\u043d\u043e\u0020\u0432\u0438\u0449\u0435\u002e +user.createBtn=\u0020\u0421\u0442\u0432\u043e\u0440\u0438\u0442\u0438\u0020\u0430\u043a\u0430\u0443\u043d\u0442\u002e + +user.updatePassword.welcome=\u0020\u041b\u0430\u0441\u043a\u0430\u0432\u043e\u0020\u043f\u0440\u043e\u0441\u0438\u043c\u043e\u0020\u0434\u043e\u0020\u0044\u0061\u0074\u0061\u0076\u0065\u0072\u0073\u0065\u0020\u007b\u0030\u007d\u002c\u0020\u007b\u0031\u007d +user.updatePassword.warning=\u0020\u041f\u0456\u0441\u043b\u044f\u0020\u0432\u0438\u043f\u0443\u0441\u043a\u0443\u0020\u043d\u0430\u0448\u043e\u0457\u0020\u043d\u043e\u0432\u043e\u0457\u0020\u0432\u0435\u0440\u0441\u0456\u0457\u0020\u0044\u0061\u0074\u0061\u0076\u006c\u0065\u0073\u0068\u0020\u0034\u002e\u0030\u002c\u0020\u0432\u0438\u043c\u043e\u0433\u0438\u0020\u0434\u043e\u0020\u043f\u0430\u0440\u043e\u043b\u044f\u0020\u0442\u0430\u0020\u0417\u0430\u0433\u0430\u043b\u044c\u043d\u0438\u0445\u0020\u0443\u043c\u043e\u0432\u0020\u0432\u0438\u043a\u043e\u0440\u0438\u0441\u0442\u0430\u043d\u043d\u044f\u0020\u043e\u043d\u043e\u0432\u043b\u0435\u043d\u043e\u002e\u0020\u041e\u0441\u043a\u0456\u043b\u044c\u043a\u0438\u0020\u0432\u0438\u0020\u0432\u0438\u043a\u043e\u0440\u0438\u0441\u0442\u043e\u0432\u0443\u0454\u0442\u0435\u0020\u0044\u0061\u0074\u0061\u0076\u0065\u0072\u0073\u0065\u0020\u0432\u043f\u0435\u0440\u0448\u0435\u0020\u043f\u0456\u0441\u043b\u044f\u0020\u043e\u043d\u043e\u0432\u043b\u0435\u043d\u043d\u044f\u002c\u0020\u0432\u0430\u043c\u0020\u043f\u043e\u0442\u0440\u0456\u0431\u043d\u043e\u0020\u0441\u0442\u0432\u043e\u0440\u0438\u0442\u0438\u0020\u043d\u043e\u0432\u0438\u0439\u0020\u043f\u0430\u0440\u043e\u043b\u044c\u0020\u0456\u0020\u043f\u043e\u0433\u043e\u0434\u0438\u0442\u0438\u0441\u044f\u0020\u0437\u0020\u043d\u043e\u0432\u0438\u043c\u0438\u0020\u0437\u0430\u0433\u0430\u043b\u044c\u043d\u0438\u043c\u0438\u0020\u0443\u043c\u043e\u0432\u0430\u043c\u0438\u0020\u043a\u043e\u0440\u0438\u0441\u0442\u0443\u0432\u0430\u043d\u043d\u044f\u002e +user.updatePassword.password=\u007b\u0030\u007d +authenticationProvidersAvailable.tip=\u007b\u0030\u007d\u0020\u041d\u0435\u043c\u0430\u0454\u0020\u0430\u043a\u0442\u0438\u0432\u043d\u0438\u0445\u0020\u043f\u043e\u0441\u0442\u0430\u0447\u0430\u043b\u044c\u043d\u0438\u043a\u0456\u0432\u0020\u0430\u0443\u0442\u0435\u043d\u0442\u0438\u0444\u0456\u043a\u0430\u0446\u0456\u0457\u0020\u007b\u0031\u007d\u0020\u042f\u043a\u0449\u043e\u0020\u0432\u0438\u0020\u0441\u0438\u0441\u0442\u0435\u043c\u043d\u0438\u0439\u0020\u0430\u0434\u043c\u0456\u043d\u0456\u0441\u0442\u0440\u0430\u0442\u043e\u0440\u002c\u0020\u0431\u0443\u0434\u044c\u0020\u043b\u0430\u0441\u043a\u0430\u002c\u0020\u0443\u0432\u0456\u043c\u043a\u043d\u0456\u0442\u044c\u0020\u0457\u0457\u0020\u0437\u0430\u0020\u0434\u043e\u043f\u043e\u043c\u043e\u0433\u043e\u044e\u0020\u0041\u0050\u0049\u002e\u0020\u007b\u0032\u007d\u0020\u042f\u043a\u0449\u043e\u0020\u0432\u0438\u0020\u043d\u0435\u0020\u0454\u0020\u0441\u0438\u0441\u0442\u0435\u043c\u043d\u0438\u043c\u0020\u0430\u0434\u043c\u0456\u043d\u0456\u0441\u0442\u0440\u0430\u0442\u043e\u0440\u043e\u043c\u002c\u0020\u0431\u0443\u0434\u044c\u0020\u043b\u0430\u0441\u043a\u0430\u002c\u0020\u0437\u0432\u0027\u044f\u0436\u0456\u0442\u044c\u0441\u044f\u0020\u0437\u0020\u0442\u0438\u043c\u002c\u0020\u0445\u0442\u043e\u0020\u0432\u0456\u0434\u043f\u043e\u0432\u0456\u0434\u0430\u0454\u0020\u0437\u0430\u0020\u0432\u0430\u0448\u0443\u0020\u0443\u0441\u0442\u0430\u043d\u043e\u0432\u0443\u002e +passwdVal.passwdReq.title=\u0020\u0412\u0430\u0448\u0020\u043f\u0430\u0440\u043e\u043b\u044c\u0020\u043f\u043e\u0432\u0438\u043d\u0435\u043d\u0020\u043c\u0456\u0441\u0442\u0438\u0442\u0438\u003a +passwdVal.passwdReq.goodStrength =\u0020\u043f\u0430\u0440\u043e\u043b\u0456\u002c\u0020\u0449\u043e\u0020\u0441\u043a\u043b\u0430\u0434\u0430\u044e\u0442\u044c\u0441\u044f\u0020\u0456\u0437\u0020\u0449\u043e\u043d\u0430\u0439\u043c\u0435\u043d\u0448\u0435\u0020\u007b\u0030\u007d\u0020\u0441\u0438\u043c\u0432\u043e\u043b\u0456\u0432\u002c\u0020\u043d\u0435\u0020\u043f\u0456\u043b\u044f\u0433\u0430\u044e\u0442\u044c\u0020\u0443\u0441\u0456\u043c\u0020\u0456\u043d\u0448\u0438\u043c\u0020\u0432\u0438\u043c\u043e\u0433\u0430\u043c\u002e +passwdVal.passwdReq.lengthReq =\u0020\u0429\u043e\u043d\u0430\u0439\u043c\u0435\u043d\u0448\u0435\u0020\u007b\u0030\u007d\u0020\u0441\u0438\u043c\u0432\u043e\u043b\u0456\u0432 +passwdVal.passwdReq.characteristicsReq =\u0020\u0429\u043e\u043d\u0430\u0439\u043c\u0435\u043d\u0448\u0435\u0020\u0031\u0020\u0441\u0438\u043c\u0432\u043e\u043b\u0020\u0437\u0020\u007b\u0030\u007d\u0020\u043d\u0430\u0441\u0442\u0443\u043f\u043d\u0438\u0445\u0020\u0432\u0438\u0434\u0456\u0432\u003a +passwdVal.passwdReq.notInclude =\u0020\u0412\u0456\u043d\u0020\u043d\u0435\u0020\u043c\u043e\u0436\u0435\u0020\u0432\u043a\u043b\u044e\u0447\u0430\u0442\u0438\u003a +passwdVal.passwdReq.consecutiveDigits =\u0411\u0456\u043b\u044c\u0448\u0435\u0020\u043d\u0456\u0436\u0020\u007b\u0030\u007d\u0020\u0446\u0438\u0444\u0440\u0020\u0443\u0020\u0440\u044f\u0434\u043a\u0443 +passwdVal.passwdReq.dictionaryWords =\u0020\u0421\u043b\u043e\u0432\u0430\u0020\u0437\u0456\u0020\u0441\u043b\u043e\u0432\u043d\u0438\u043a\u0430 +passwdVal.passwdReq.unknownPasswordRule =\u0020\u041d\u0435\u0432\u0456\u0434\u043e\u043c\u043e\u002c\u0020\u0437\u0432\u0435\u0440\u043d\u0456\u0442\u044c\u0441\u044f\u0020\u0434\u043e\u0020\u0441\u0432\u043e\u0433\u043e\u0020\u0430\u0434\u043c\u0456\u043d\u0456\u0441\u0442\u0440\u0430\u0442\u043e\u0440\u0430\u002e +\u0023\u0070\u0072\u0069\u006e\u0074\u0066\u0020\u0073\u0079\u006e\u0074\u0061\u0078\u0020\u0075\u0073\u0065\u0064\u0020\u0074\u006f\u0020\u0070\u0061\u0073\u0073\u0020\u0074\u006f\u0020\u0070\u0061\u0073\u0073\u0061\u0079\u0020\u006c\u0069\u0062\u0072\u0061\u0072\u0079\u0020\u0441\u0438\u043d\u0442\u0430\u043a\u0441\u0438\u0441\u002c\u0020\u044f\u043a\u0438\u0439\u0020\u0432\u0438\u043a\u043e\u0440\u0438\u0441\u0442\u043e\u0432\u0443\u0454\u0442\u044c\u0441\u044f\u0020\u0434\u043b\u044f\u0020\u043f\u0435\u0440\u0435\u0445\u043e\u0434\u0443\u0020\u0434\u043e\u0020\u0431\u0456\u0431\u043b\u0456\u043e\u0442\u0435\u043a\u0438\u0020 +passwdVal.expireRule.errorCode =\u041d\u0435\u0020\u0434\u0456\u0439\u0441\u043d\u0438\u0439 +passwdVal.expireRule.errorMsg =\u0020\u0422\u0435\u0440\u043c\u0456\u043d\u0020\u043f\u0430\u0440\u043e\u043b\u044e\u0020\u0437\u0430\u043a\u0456\u043d\u0447\u0438\u0432\u0441\u044f\u0020\u0025\u0020\u0031\u0020\u0024\u0020\u0073\u0020\u0434\u043d\u0456\u0432\u0020\u0456\u0020\u0432\u0456\u043d\u0020\u0454\u0020\u043d\u0435\u0434\u0456\u0439\u0441\u043d\u0438\u043c\u002e +passwdVal.goodStrengthRule.errorMsg =\u0020\u041f\u0440\u0438\u043c\u0456\u0442\u043a\u0430\u003a\u0020\u041f\u0430\u0440\u043e\u043b\u0456\u0020\u0437\u0020\u0434\u043e\u0432\u0436\u0438\u043d\u043e\u044e\u0020\u0441\u0438\u043c\u0432\u043e\u043b\u0456\u0432\u0020\u0025\u0031\u0024\u0073\u0020\u0430\u0431\u043e\u0020\u0431\u0456\u043b\u044c\u0448\u0435\u0020\u0437\u0430\u0432\u0436\u0434\u0438\u0020\u0434\u0456\u0439\u0441\u043d\u0456\u002e +passwdVal.goodStrengthRule.errorCode =\u004e\u004f\u005f\u0047\u004f\u004f\u0044\u0053\u0054\u0052\u0045\u004e\u0047\u0054\u0048\u0020\u043d\u0435\u0020\u0434\u0456\u0439\u0441\u043d\u0438\u0439 +passwdVal.passwdReset.resetLinkTitle =\u0020\u041f\u043e\u0441\u0438\u043b\u0430\u043d\u043d\u044f\u0020\u0434\u043b\u044f\u0020\u0437\u043c\u0456\u043d\u0438\u0020\u043f\u0430\u0440\u043e\u043b\u044f +passwdVal.passwdReset.resetLinkDesc =\u0020\u0412\u0430\u0448\u0435\u0020\u043f\u043e\u0441\u0438\u043b\u0430\u043d\u043d\u044f\u0020\u0434\u043b\u044f\u0020\u0434\u043b\u044f\u0020\u0437\u043c\u0456\u043d\u0438\u0020\u043f\u0430\u0440\u043e\u043b\u044f\u0020\u043d\u0435\u0434\u0456\u0439\u0441\u043d\u0435\u002e +passwdVal.passwdReset.valBlankLog =\u0020\u043d\u043e\u0432\u0438\u0439\u0020\u043f\u0430\u0440\u043e\u043b\u044c\u0020\u043f\u043e\u0440\u043e\u0436\u043d\u0456\u0439\u002e +passwdVal.passwdReset.valFacesError =\u0020\u041f\u043e\u043c\u0438\u043b\u043a\u0430\u0020\u043f\u0430\u0440\u043e\u043b\u044e +passwdVal.passwdReset.valFacesErrorDesc =\u0020\u0411\u0443\u0434\u044c\u0020\u043b\u0430\u0441\u043a\u0430\u002c\u0020\u0432\u0432\u0435\u0434\u0456\u0442\u044c\u0020\u043d\u043e\u0432\u0438\u0439\u0020\u043f\u0430\u0440\u043e\u043b\u044c\u0020\u0434\u043b\u044f\u0020\u0441\u0432\u043e\u0433\u043e\u0020\u0430\u043a\u0430\u0443\u043d\u0442\u0443\u002e +passwdVal.passwdValBean.warnDictionaryRead =\u0020\u0421\u043b\u043e\u0432\u043d\u0438\u043a\u0020\u0431\u0443\u0432\u0020\u0432\u0441\u0442\u0430\u043d\u043e\u0432\u043b\u0435\u043d\u0438\u0439\u002c\u0020\u0430\u043b\u0435\u0020\u0442\u0430\u043a\u043e\u0433\u043e\u0020\u0441\u043b\u043e\u0432\u0430\u0020\u0442\u0430\u043c\u0020\u043d\u0435\u043c\u0430\u0454\u002e +passwdVal.passwdValBean.warnDictionaryObj =\u0050\u0077\u0044\u0069\u0063\u0074\u0069\u006f\u006e\u0061\u0072\u0069\u0065\u0073\u0020\u0441\u043b\u043e\u0432\u043d\u0438\u043a\u0438\u0020\u043d\u0435\u0020\u0432\u0441\u0442\u0430\u043d\u043e\u0432\u043b\u0435\u043d\u0456\u002c\u0020\u0456\u0020\u0444\u0430\u0439\u043b\u0020\u043f\u0430\u0440\u043e\u043b\u044e\u0020\u0437\u0430\u0020\u0437\u0430\u043c\u043e\u0432\u0447\u0443\u0432\u0430\u043d\u043d\u044f\u043c\u0020\u043d\u0435\u0020\u0437\u043d\u0430\u0439\u0434\u0435\u043d\u043e\u003a +passwdVal.passwdValBean.warnSetStrength =\u0020\u0417\u043d\u0430\u0447\u0435\u043d\u043d\u044f\u0020\u0050\u0077\u0047\u006f\u006f\u0064\u0053\u0074\u0072\u0065\u006e\u0067\u0074\u0068\u0020\u007b\u0030\u007d\u0020\u043a\u043e\u043d\u043a\u0443\u0440\u0443\u0454\u0020\u0437\u0456\u0020\u0437\u043d\u0430\u0447\u0435\u043d\u043d\u044f\u043c\u0020\u0050\u0077\u004d\u0069\u006e\u004c\u0065\u006e\u0067\u0074\u0068\u0020\u007b\u0031\u007d\u0020\u0456\u0020\u0434\u043e\u0434\u0430\u0454\u0442\u044c\u0441\u044f\u0020\u0434\u043e\u0020\u007b\u0032\u007d +\u0023\u006c\u006f\u0067\u0069\u006e\u0070\u0061\u0067\u0065\u002e\u0078\u0068\u0074\u006d\u006c +login.System=\u0020\u0421\u0438\u0441\u0442\u0435\u043c\u0430\u0020\u0432\u0445\u043e\u0434\u0443 +login.forgot.text=\u0020\u0417\u0430\u0431\u0443\u043b\u0438\u0020\u043f\u0430\u0440\u043e\u043b\u044c\u003f +login.builtin=\u0020\u0410\u043a\u0430\u0443\u043d\u0442\u0020\u0044\u0061\u0074\u0061\u0076\u0065\u0072\u0073\u0065\u0020 +login.institution=\u0020\u0410\u043a\u0430\u0443\u043d\u0442\u0020\u0443\u0441\u0442\u0430\u043d\u043e\u0432\u0438 +login.institution.blurb=\u0020\u0423\u0432\u0456\u0439\u0434\u0456\u0442\u044c\u0020\u0430\u0431\u043e\u0020\u0437\u0430\u0440\u0435\u0454\u0441\u0442\u0440\u0443\u0439\u0442\u0435\u0441\u044f\u0020\u0437\u0430\u0020\u0434\u043e\u043f\u043e\u043c\u043e\u0433\u043e\u044e\u0020\u0430\u043a\u0430\u0443\u043d\u0442\u0443\u0020\u0441\u0432\u043e\u0454\u0457\u0020\u0443\u0441\u0442\u0430\u043d\u043e\u0432\u0438\u0020\u0026\u006d\u0064\u0061\u0073\u0068\u003b\u0020\u003c\u0061\u0020\u0068\u0072\u0065\u0066\u003d\u0022\u007b\u0030\u007d\u002f\u007b\u0031\u007d\u002f\u0075\u0073\u0065\u0072\u002f\u0061\u0063\u0063\u006f\u0075\u006e\u0074\u002e\u0068\u0074\u006d\u006c\u0022\u0020\u0074\u0061\u0072\u0067\u0065\u0074\u003d\u0022\u005f\u0062\u006c\u0061\u006e\u006b\u0022\u003e\u0434\u0456\u0437\u043d\u0430\u0439\u0442\u0435\u0441\u044f\u0020\u0431\u0456\u043b\u044c\u0448\u0065\u0020\u0020\u003c\u002f\u0061\u003e\u002e\u0020 +login.institution.support.beforeLink=\u0020\u0417\u0430\u043b\u0438\u0448\u0430\u0454\u0442\u0435\u0020\u0441\u0432\u043e\u044e\u0020\u0443\u0441\u0442\u0430\u043d\u043e\u0432\u0443\u003f\u0020\u0411\u0443\u0434\u044c\u0020\u043b\u0430\u0441\u043a\u0430\u0020\u0437\u0432\u0027\u044f\u0436\u0456\u0442\u044c\u0441\u044f\u0020\u0437 +login.institution.support.afterLink=\u0020\u0434\u043b\u044f\u0020\u0434\u043e\u043f\u043e\u043c\u043e\u0433\u0438\u002e +login.builtin.credential.usernameOrEmail=\u0020\u0406\u043c\u0027\u044f\u0020\u043a\u043e\u0440\u0438\u0441\u0442\u0443\u0432\u0430\u0447\u0430\u0020\u002f\u0020\u0435\u043b\u0435\u043a\u0442\u0440\u043e\u043d\u043d\u0430\u0020\u043f\u043e\u0448\u0442\u0430 +login.builtin.credential.password=\u0020\u041f\u0430\u0440\u043e\u043b\u044c +login.builtin.invalidUsernameEmailOrPassword=\u0020\u0412\u0432\u0435\u0434\u0435\u043d\u0435\u0020\u0456\u043c\u0027\u044f\u0020\u043a\u043e\u0440\u0438\u0441\u0442\u0443\u0432\u0430\u0447\u0430\u002c\u0020\u0435\u043b\u0435\u043a\u0442\u0440\u043e\u043d\u043d\u0430\u0020\u0430\u0434\u0440\u0435\u0441\u0430\u0020\u0430\u0431\u043e\u0020\u043f\u0430\u0440\u043e\u043b\u044c\u0020\u043d\u0435\u0434\u0456\u0439\u0441\u043d\u0456\u002e\u0020\u041f\u043e\u0442\u0440\u0456\u0431\u043d\u0430\u0020\u0434\u043e\u043f\u043e\u043c\u043e\u0433\u0430\u0020\u0434\u043b\u044f\u0020\u0434\u043e\u0441\u0442\u0443\u043f\u0443\u0020\u0434\u043e\u0020\u0432\u0430\u0448\u043e\u0433\u043e\u0020\u0430\u043a\u0430\u0443\u043d\u0442\u0443\u003f +\u0023\u0020\u044f\u043a\u0020\u043c\u0438\u0020\u0437\u0434\u0456\u0439\u0441\u043d\u044e\u0454\u043c\u043e\u0020\u043f\u043e\u043c\u0438\u043b\u043a\u0443\u0020\u043f\u0430\u0440\u043e\u043b\u044f\u003f\u0020\u0427\u0435\u0440\u0435\u0437\u0020\u043f\u043e\u043c\u0438\u043b\u043a\u0443\u0020\u043e\u043d\u043e\u0432\u043b\u0435\u043d\u043d\u044f\u0020\u043f\u0430\u0440\u043e\u043b\u044f\u003f\u0020\u0414\u0438\u0432\u002e +\u0068\u0074\u0074\u0070\u0073\u003a\u002f\u002f\u0067\u0069\u0074\u0068\u0075\u0062\u002e\u0063\u006f\u006d\u002f\u0049\u0051\u0053\u0053\u002f\u0064\u0061\u0074\u0061\u0076\u0065\u0072\u0073\u0065\u002f\u0070\u0075\u006c\u006c\u002f\u0032\u0039\u0032\u0032 +login.error=\u0020\u041f\u043e\u043c\u0438\u043b\u043a\u0430\u0020\u043f\u0435\u0440\u0435\u0432\u0456\u0440\u043a\u0438\u0020\u0456\u043c\u0435\u043d\u0456\u0020\u043a\u043e\u0440\u0438\u0441\u0442\u0443\u0432\u0430\u0447\u0430\u002c\u0020\u0435\u043b\u0435\u043a\u0442\u0440\u043e\u043d\u043d\u043e\u0457\u0020\u0430\u0434\u0440\u0435\u0441\u0438\u0020\u0430\u0431\u043e\u0020\u043f\u0430\u0440\u043e\u043b\u044f\u002e\u0020\u0411\u0443\u0434\u044c\u0020\u043b\u0430\u0441\u043a\u0430\u0020\u0441\u043f\u0440\u043e\u0431\u0443\u0439\u0442\u0435\u0020\u0449\u0435\u0020\u0440\u0430\u0437\u002e\u0020\u042f\u043a\u0449\u043e\u0020\u043f\u0440\u043e\u0431\u043b\u0435\u043c\u0430\u0020\u043d\u0435\u0020\u0437\u043d\u0438\u043a\u043d\u0435\u002c\u0020\u0437\u0432\u0027\u044f\u0436\u0456\u0442\u044c\u0441\u044f\u0020\u0437\u0020\u0430\u0434\u043c\u0456\u043d\u0456\u0441\u0442\u0440\u0430\u0442\u043e\u0440\u043e\u043c\u002e +user.error.cannotChangePassword=\u0020\u0412\u0438\u0431\u0430\u0447\u0442\u0435\u002c\u0020\u0432\u0430\u0448\u0020\u043f\u0430\u0440\u043e\u043b\u044c\u0020\u043d\u0435\u0020\u043c\u043e\u0436\u0435\u0020\u0431\u0443\u0442\u0438\u0020\u0437\u043c\u0456\u043d\u0435\u043d\u0438\u0439\u002e\u0020\u0411\u0443\u0434\u044c\u0020\u043b\u0430\u0441\u043a\u0430\u002c\u0020\u0437\u0432\u0027\u044f\u0436\u0456\u0442\u044c\u0441\u044f\u0020\u0437\u0456\u0020\u0441\u0432\u043e\u0457\u043c\u0020\u0441\u0438\u0441\u0442\u0435\u043c\u043d\u0438\u043c\u0020\u0430\u0434\u043c\u0456\u043d\u0456\u0441\u0442\u0440\u0430\u0442\u043e\u0440\u043e\u043c\u002e +user.error.wrongPassword=\u0020\u0412\u0438\u0431\u0430\u0447\u0442\u0435\u002c\u0020\u043d\u0435\u0432\u0456\u0440\u043d\u0438\u0439\u0020\u043f\u0430\u0440\u043e\u043b\u044c\u002e +login.button=\u0020\u0423\u0432\u0456\u0439\u0442\u0438\u0020\u0437\u0020\u007b\u0030\u007d +login.button.orcid=\u0020\u0421\u0442\u0432\u043e\u0440\u0456\u0442\u044c\u0020\u0430\u0431\u043e\u0020\u043f\u0456\u0434\u043a\u043b\u044e\u0447\u0456\u0442\u044c\u0020\u0432\u0430\u0448\u0020\u004f\u0052\u0043\u0049\u0044 + +\u0023\u0020\u0061\u0075\u0074\u0068\u0065\u006e\u0074\u0069\u0063\u0061\u0074\u0069\u006f\u006e\u0020\u0070\u0072\u006f\u0076\u0069\u0064\u0065\u0072\u0073\u0020\u043f\u043e\u0441\u0442\u0430\u0447\u0430\u043b\u044c\u043d\u0438\u043a\u0438\u0020\u0430\u0443\u0442\u0435\u043d\u0442\u0438\u0444\u0456\u043a\u0430\u0446\u0456\u0457 +auth.providers.title=\u0020\u0406\u043d\u0448\u0456\u0020\u043e\u043f\u0446\u0456\u0457 +auth.providers.tip=\u0020\u0412\u0438\u0020\u043c\u043e\u0436\u0435\u0442\u0435\u0020\u043f\u0435\u0440\u0435\u0442\u0432\u043e\u0440\u0438\u0442\u0438\u0020\u0044\u0061\u0074\u0061\u0076\u0065\u0072\u0073\u0065\u0020\u0430\u043a\u0430\u0443\u043d\u0442\u002c\u0020\u0449\u043e\u0431\u0020\u0441\u043a\u043e\u0440\u0438\u0441\u0442\u0430\u0442\u0438\u0441\u044c\u0020\u043e\u0434\u043d\u0456\u0454\u044e\u0020\u0456\u0437\u0020\u043d\u0430\u0432\u0435\u0434\u0435\u043d\u0438\u0445\u0020\u0432\u0438\u0449\u0435\u0020\u043e\u0446\u0456\u0439\u002e\u0020\u003c\u0061\u0020\u0068\u0072\u0065\u0066\u003d\u0022\u007b\u0030\u007d\u002f\u007b\u0031\u007d\u002f\u0075\u0073\u0065\u0072\u002f\u0061\u0063\u0063\u006f\u0075\u006e\u0074\u002e\u0068\u0074\u006d\u006c\u0022\u0020\u0074\u0061\u0072\u0067\u0065\u0074\u003d\u0022\u005f\u0062\u006c\u0061\u006e\u006b\u0022\u003e\u004c\u0065\u0061\u0072\u006e\u0020\u006d\u006f\u0072\u0065\u003c\u002f\u0061\u003e\u002e +auth.providers.title.builtin=\u0020\u0406\u043c\u0027\u044f\u0020\u043a\u043e\u0440\u0438\u0441\u0442\u0443\u0432\u0430\u0447\u0430\u0020\u002f\u0020\u0435\u043b\u0435\u043a\u0442\u0440\u043e\u043d\u043d\u0430\u0020\u043f\u043e\u0448\u0442\u0430 +auth.providers.title.shib=\u0020\u0412\u0430\u0448\u0435\u0020\u0443\u0441\u0442\u0430\u043d\u043e\u0432\u0430 +auth.providers.title.orcid=\u004f\u0052\u0043\u0049\u0044 +auth.providers.title.google=\u0047\u006f\u006f\u0067\u006c\u0065 +auth.providers.title.github=\u0047\u0069\u0074\u0048\u0075\u0062 +auth.providers.blurb=\u0020\u0423\u0432\u0456\u0439\u0434\u0456\u0442\u044c\u0020\u0430\u0431\u043e\u0020\u0437\u0430\u0440\u0435\u0454\u0441\u0442\u0440\u0443\u0439\u0442\u0435\u0441\u044c\u0020\u0443\u0020\u0441\u0432\u043e\u0454\u043c\u0443\u0020\u007b\u0030\u007d\u0020\u0430\u043a\u0430\u0443\u043d\u0442\u0456\u0020\u0026\u006d\u0064\u0061\u0073\u0068\u003b\u0020\u003c\u0061\u0020\u0068\u0072\u0065\u0066\u003d\u0022\u007b\u0031\u007d\u002f\u007b\u0032\u007d\u002f\u0075\u0073\u0065\u0072\u002f\u0061\u0063\u0063\u006f\u0075\u006e\u0074\u002e\u0068\u0074\u006d\u006c\u0022\u0020\u0074\u0061\u0072\u0067\u0065\u0074\u003d\u0022\u005f\u0062\u006c\u0061\u006e\u006b\u0022\u003e\u006c\u0065\u0061\u0072\u006e\u0020\u006d\u006f\u0072\u0065\u003c\u002f\u0061\u003e\u002e\u0020\u0412\u0438\u043d\u0438\u043a\u043b\u0438\u0020\u043f\u0440\u043e\u0431\u043b\u0435\u043c\u0438\u003f\u0020\u0411\u0443\u0434\u044c\u0020\u043b\u0430\u0441\u043a\u0430\u002c\u0020\u0437\u0432\u0027\u044f\u0436\u0456\u0442\u044c\u0441\u044f\u0020\u0437\u0020\u007b\u0033\u007d\u0434\u043b\u044f\u0020\u0434\u043e\u043f\u043e\u043c\u043e\u0433\u0438\u002e +auth.providers.persistentUserIdName.orcid=\u004f\u0052\u0043\u0049\u0044\u0020\u0069\u0044 +auth.providers.persistentUserIdName.github=\u0049\u0044 +auth.providers.persistentUserIdTooltip.orcid=\u004f\u0052\u0043\u0049\u0044\u0020\u043d\u0430\u0434\u0430\u0454\u0020\u043f\u043e\u0441\u0442\u0456\u0439\u043d\u0438\u0439\u0020\u0446\u0438\u0444\u0440\u043e\u0432\u0438\u0439\u0020\u0456\u0434\u0435\u043d\u0442\u0438\u0444\u0456\u043a\u0430\u0442\u043e\u0440\u002c\u0020\u044f\u043a\u0438\u0439\u0020\u0432\u0456\u0434\u0440\u0456\u0437\u043d\u044f\u0454\u0020\u0432\u0430\u0441\u0020\u0432\u0456\u0434\u0020\u0456\u043d\u0448\u0438\u0445\u0020\u0434\u043e\u0441\u043b\u0456\u0434\u043d\u0438\u043a\u0456\u0432\u002e +auth.providers.persistentUserIdTooltip.github=\u0047\u0069\u0074\u0048\u0075\u0062\u0020\u043f\u0440\u0438\u0437\u043d\u0430\u0447\u0430\u0454\u0020\u0443\u043d\u0456\u043a\u0430\u043b\u044c\u043d\u0438\u0439\u0020\u043d\u043e\u043c\u0435\u0440\u0020\u0434\u043b\u044f\u0020\u043a\u043e\u0436\u043d\u043e\u0433\u043e\u0020\u043a\u043e\u0440\u0438\u0441\u0442\u0443\u0432\u0430\u0447\u0430\u002e +auth.providers.orcid.insufficientScope=\u0044\u0061\u0074\u0061\u0076\u0065\u0072\u0073\u0065\u0020\u043d\u0435\u0020\u0431\u0443\u043b\u043e\u0020\u043d\u0430\u0434\u0430\u043d\u043e\u0020\u0434\u043e\u0437\u0432\u0456\u043b\u0020\u043d\u0430\u0020\u0447\u0438\u0442\u0430\u043d\u043d\u044f\u0020\u043a\u043e\u0440\u0438\u0441\u0442\u0443\u0432\u0430\u0446\u044c\u043a\u0438\u0445\u0020\u0434\u0430\u043d\u0438\u0445\u0020\u0437\u0020\u004f\u0052\u0043\u0049\u0044\u002e\u0020 +\u0023\u0020\u0046\u0072\u0069\u0065\u006e\u0064\u006c\u0079\u0020\u0041\u0075\u0074\u0068\u0065\u006e\u0074\u0069\u0063\u0061\u0074\u0069\u006f\u006e\u0050\u0072\u006f\u0076\u0069\u0064\u0065\u0072\u0020\u006e\u0061\u006d\u0065\u0073\u0020\u0437\u0440\u0443\u0447\u043d\u0456\u0020\u0434\u043b\u044f\u0020\u043a\u043e\u0440\u0438\u0441\u0442\u0443\u0432\u0430\u0447\u0456\u0432\u0020\u043f\u043e\u0441\u0442\u0430\u0447\u0430\u043b\u044c\u043d\u0438\u043a\u0438\u0020\u0430\u0443\u0442\u0435\u043d\u0442\u0438\u0444\u0456\u043a\u0430\u0446\u0456\u0457 +authenticationProvider.name.builtin=\u0044\u0061\u0074\u0061\u0076\u0065\u0072\u0073\u0065 +authenticationProvider.name.null=\u0028\u043f\u043e\u0441\u0442\u0430\u0447\u0430\u043b\u044c\u043d\u0438\u043a\u0020\u043d\u0435\u0432\u0456\u0434\u043e\u043c\u0438\u0439\u0029 +authenticationProvider.name.github=\u0047\u0069\u0074\u0048\u0075\u0062 +authenticationProvider.name.google=\u0047\u006f\u006f\u0067\u006c\u0065 +authenticationProvider.name.orcid=\u004f\u0052\u0043\u0069\u0044 +authenticationProvider.name.orcid-sandbox=\u004f\u0052\u0043\u0069\u0044\u0020\u0053\u0061\u006e\u0064\u0062\u006f\u0078 +authenticationProvider.name.shib=\u0053\u0068\u0069\u0062\u0062\u006f\u006c\u0065\u0074\u0068 + +\u0023\u0063\u006f\u006e\u0066\u0069\u0072\u006d\u0065\u006d\u0061\u0069\u006c\u002e\u0078\u0068\u0074\u006d\u006c +confirmEmail.pageTitle=\u0020\u041f\u0435\u0440\u0435\u0432\u0456\u0440\u043a\u0430\u0020\u0435\u043b\u0435\u043a\u0442\u0440\u043e\u043d\u043d\u043e\u0457\u0020\u043f\u043e\u0448\u0442\u0438\u0020 +confirmEmail.submitRequest=\u0020\u041f\u0456\u0434\u0442\u0432\u0435\u0440\u0434\u0438\u0442\u0438\u0020\u0435\u043b\u0435\u043a\u0442\u0440\u043e\u043d\u043d\u0443\u0020\u043f\u043e\u0448\u0442\u0443 +confirmEmail.submitRequest.success=\u0020\u0412\u0435\u0440\u0438\u0444\u0456\u043a\u0430\u0446\u0456\u0439\u043d\u0438\u0439\u0020\u0435\u043b\u0435\u043a\u0442\u0440\u043e\u043d\u043d\u0438\u0439\u0020\u043b\u0438\u0441\u0442\u0020\u043d\u0430\u0434\u0456\u0441\u043b\u0430\u043d\u043e\u0020\u043d\u0430\u0020\u0430\u0434\u0440\u0435\u0441\u0443\u0020\u007b\u0030\u007d\u002e\u0020\u0417\u0432\u0435\u0440\u043d\u0456\u0442\u044c\u0020\u0443\u0432\u0430\u0433\u0443\u002c\u0020\u0449\u043e\u0020\u043f\u043e\u0441\u0438\u043b\u0430\u043d\u043d\u044f\u0020\u043d\u0430\u0020\u043f\u0456\u0434\u0442\u0432\u0435\u0440\u0434\u0436\u0435\u043d\u043d\u044f\u0020\u0437\u0430\u043a\u0456\u043d\u0447\u0438\u0442\u044c\u0441\u044f\u0020\u043f\u0456\u0441\u043b\u044f\u0020\u007b\u0031\u007d\u002e +confirmEmail.details.success=\u0020\u0415\u043b\u0435\u043a\u0442\u0440\u043e\u043d\u043d\u0430\u0020\u0430\u0434\u0440\u0435\u0441\u0430\u0020\u043f\u0456\u0434\u0442\u0432\u0435\u0440\u0434\u0436\u0435\u043d\u0430\u0021 +confirmEmail.details.failure=\u0020\u041c\u0438\u0020\u043d\u0435\u0020\u0437\u043c\u043e\u0433\u043b\u0438\u0020\u043f\u0456\u0434\u0442\u0432\u0435\u0440\u0434\u0438\u0442\u0438\u0020\u0430\u0434\u0440\u0435\u0441\u0443\u0020\u0432\u0430\u0448\u043e\u0457\u0020\u0435\u043b\u0435\u043a\u0442\u0440\u043e\u043d\u043d\u043e\u0457\u0020\u043f\u043e\u0448\u0442\u0438\u002e\u0020\u041f\u0435\u0440\u0435\u0439\u0434\u0456\u0442\u044c\u0020\u043d\u0430\u0020\u0441\u0442\u043e\u0440\u0456\u043d\u043a\u0443\u0020\u0022\u0406\u043d\u0444\u043e\u0440\u043c\u0430\u0446\u0456\u044f\u0020\u043f\u0440\u043e\u0020\u0430\u043a\u0430\u0443\u043d\u0442\u0022\u0020\u0442\u0430\u0020\u043d\u0430\u0442\u0438\u0441\u043d\u0456\u0442\u044c\u0020\u043a\u043d\u043e\u043f\u043a\u0443\u0020\u0022\u041f\u0456\u0434\u0442\u0432\u0435\u0440\u0434\u0438\u0442\u0438\u0020\u0435\u043b\u0435\u043a\u0442\u0440\u043e\u043d\u043d\u0443\u0020\u0430\u0434\u0440\u0435\u0441\u0443\u0022\u002e +confirmEmail.details.goToAccountPageButton=\u0020\u041f\u0435\u0440\u0435\u0439\u0442\u0438\u0020\u0434\u043e\u0020\u0456\u043d\u0444\u043e\u0440\u043c\u0430\u0446\u0456\u0457\u0020\u043f\u0440\u043e\u0020\u0430\u043a\u0430\u0443\u043d\u0442 +confirmEmail.notVerified=\u0020\u041d\u0435\u0020\u043f\u0456\u0434\u0442\u0432\u0435\u0440\u0434\u0436\u0435\u043d\u043e +confirmEmail.verified=\u0020\u043f\u0456\u0434\u0442\u0432\u0435\u0440\u0434\u0436\u0435\u043d\u043e + +\u0023\u0073\u0068\u0069\u0062\u002e\u0078\u0068\u0074\u006d\u006c +shib.btn.convertAccount=\u0020\u041f\u0435\u0440\u0435\u0442\u0432\u043e\u0440\u0438\u0442\u0438\u0020\u0430\u043a\u0430\u0443\u043d\u0442 +shib.btn.createAccount=\u0020\u0421\u0442\u0432\u043e\u0440\u0438\u0442\u0438\u0020\u0430\u043a\u0430\u0443\u043d\u0442 +shib.askToConvert=\u0020\u0412\u0438\u0020\u0445\u043e\u0447\u0435\u0442\u0435\u0020\u043f\u0435\u0440\u0435\u0442\u0432\u043e\u0440\u0438\u0442\u0438\u0020\u0441\u0432\u0456\u0439\u0020\u0044\u0061\u0074\u0061\u0076\u0065\u0072\u0073\u0065\u0020\u0430\u043a\u0430\u0443\u043d\u0442\u002c\u0020\u0449\u043e\u0431\u0020\u0437\u0430\u0432\u0436\u0434\u0438\u0020\u0432\u0438\u043a\u043e\u0440\u0438\u0441\u0442\u043e\u0432\u0443\u0432\u0430\u0442\u0438\u0020\u043b\u043e\u0433\u0456\u043d\u0020\u0432\u0430\u0448\u043e\u0457\u0020\u0443\u0441\u0442\u0430\u043d\u043e\u0432\u0438\u003f + +# Bundle file editors, please note that "shib.welcomeExistingUserMessageDefaultInstitution" is used in a unit test =\u0020\u041b\u043e\u0433\u0456\u043d\u0020\u0412\u0430\u0448\u043e\u0457\u0020\u0443\u0441\u0442\u0430\u043d\u043e\u0432\u0438\u0020\u0434\u043b\u044f\u0020\u007b\u0030\u007d\u0020\u0432\u0456\u0434\u043f\u043e\u0432\u0456\u0434\u0430\u0454\u0020\u0435\u043b\u0435\u043a\u0442\u0440\u043e\u043d\u043d\u0456\u0439\u0020\u0430\u0434\u0440\u0435\u0441\u0456\u002c\u0020\u044f\u043a\u0430\u0020\u0432\u0436\u0435\u0020\u0432\u0438\u043a\u043e\u0440\u0438\u0441\u0442\u043e\u0432\u0443\u0454\u0442\u044c\u0441\u044f\u0020\u0434\u043b\u044f\u0020\u0044\u0061\u0074\u0061\u0076\u0065\u0072\u0073\u0065\u0020\u0430\u043a\u0430\u0443\u043d\u0442\u0443\u002e\u0020\u0412\u0432\u0435\u0434\u0456\u0442\u044c\u0020\u0441\u0432\u0456\u0439\u0020\u043f\u043e\u0442\u043e\u0447\u043d\u0438\u0439\u0020\u0044\u0061\u0074\u0061\u0076\u0065\u0072\u0073\u0065\u0020\u043f\u0430\u0440\u043e\u043b\u044c\u0020\u043d\u0438\u0436\u0447\u0435\u002c\u0020\u0449\u043e\u0431\u0020\u0432\u0430\u0448\u0020\u0456\u0441\u043d\u0443\u044e\u0447\u0438\u0439\u0020\u0020\u0044\u0061\u0074\u0061\u0076\u0065\u0072\u0073\u0065\u0020\u0430\u043a\u0430\u0443\u043d\u0442\u0020\u043c\u043e\u0436\u043d\u0430\u0020\u0431\u0443\u043b\u043e\u0020\u043f\u0435\u0440\u0435\u0442\u0432\u043e\u0440\u0438\u0442\u0438\u0020\u0434\u043b\u044f\u0020\u0432\u0438\u043a\u043e\u0440\u0438\u0441\u0442\u043e\u0432\u0443\u0432\u0430\u043d\u043d\u044f\u0020\u043b\u043e\u0433\u0456\u043d\u0443\u0020\u0432\u0430\u0448\u043e\u0457\u0020\u0443\u0441\u0442\u0430\u043d\u043e\u0432\u0438\u002e\u0020\u041f\u0456\u0441\u043b\u044f\u0020\u043f\u0435\u0440\u0435\u0442\u0432\u043e\u0440\u0435\u043d\u043d\u044f\u0020\u0432\u0430\u043c\u0020\u043f\u043e\u0442\u0440\u0456\u0431\u043d\u043e\u0020\u0431\u0443\u0434\u0435\u0020\u0432\u0438\u043a\u043e\u0440\u0438\u0441\u0442\u043e\u0432\u0443\u0432\u0430\u0442\u0438\u0020\u043b\u0438\u0448\u0435\u0020\u043b\u043e\u0433\u0456\u043d\u0020\u0432\u0430\u0448\u043e\u0457\u0020\u0443\u0441\u0442\u0430\u043d\u043e\u0432\u0438\u002e + +\u0023\u0020\u0042\u0075\u006e\u0064\u006c\u0065\u0020\u0066\u0069\u006c\u0065\u0020\u0065\u0064\u0069\u0074\u006f\u0072\u0073\u002c\u0020\u0070\u006c\u0065\u0061\u0073\u0065\u0020\u006e\u006f\u0074\u0065\u0020\u0074\u0068\u0061\u0074\u0020\u0022\u0073\u0068\u0069\u0062\u002e\u0077\u0065\u006c\u0063\u006f\u006d\u0065\u0045\u0078\u0069\u0073\u0074\u0069\u006e\u0067\u0055\u0073\u0065\u0072\u004d\u0065\u0073\u0073\u0061\u0067\u0065\u0044\u0065\u0066\u0061\u0075\u006c\u0074\u0049\u006e\u0073\u0074\u0069\u0074\u0075\u0074\u0069\u006f\u006e\u0022\u0020\u0069\u0073\u0020\u0075\u0073\u0065\u0064\u0020\u0069\u006e\u0020\u0061\u0020\u0075\u006e\u0069\u0074\u0020\u0074\u0065\u0073\u0074 +shib.welcomeExistingUserMessageDefaultInstitution=\u0432\u0430\u0448\u0430\u0020\u0443\u0441\u0442\u0430\u043d\u043e\u0432\u0430 +shib.dataverseUsername=\u0020\u0406\u043c\u0027\u044f\u0020\u043a\u043e\u0440\u0438\u0441\u0442\u0443\u0432\u0430\u0447\u0430\u0020\u0044\u0061\u0074\u0061\u0076\u0065\u0072\u0073\u0065\u0020 +shib.currentDataversePassword=\u041f\u043e\u0442\u043e\u0447\u043d\u0438\u0439\u0020\u043f\u0430\u0440\u043e\u043b\u044c\u0020\u0044\u0061\u0074\u0061\u0076\u0065\u0072\u0073\u0065\u0020 +shib.accountInformation=\u0020\u0406\u043d\u0444\u043e\u0440\u043c\u0430\u0446\u0456\u044f\u0020\u043f\u0440\u043e\u0020\u0430\u043a\u0430\u0443\u043d\u0442 +shib.offerToCreateNewAccount=\u0020\u0426\u044f\u0020\u0456\u043d\u0444\u043e\u0440\u043c\u0430\u0446\u0456\u044f\u0020\u043d\u0430\u0434\u0430\u0454\u0442\u044c\u0441\u044f\u0020\u0432\u0430\u0448\u043e\u044e\u0020\u0443\u0441\u0442\u0430\u043d\u043e\u0432\u043e\u044e\u0020\u0442\u0430\u0020\u0431\u0443\u0434\u0435\u0020\u0432\u0438\u043a\u043e\u0440\u0438\u0441\u0442\u0430\u043d\u0430\u0020\u0434\u043b\u044f\u0020\u0441\u0442\u0432\u043e\u0440\u0435\u043d\u043d\u044f\u0020\u0432\u0430\u0448\u043e\u0433\u043e\u0020\u0044\u0061\u0074\u0061\u0076\u0065\u0072\u0073\u0065\u0020\u0430\u043a\u0430\u0443\u043d\u0442\u0443\u002e +shib.passwordRejected=\u003c\u0073\u0074\u0072\u006f\u006e\u0067\u003e\u0020\u041f\u043e\u043c\u0438\u043b\u043a\u0430\u0020\u043f\u0435\u0440\u0435\u0432\u0456\u0440\u043a\u0438\u0020\u003c\u002f\u0073\u0074\u0072\u006f\u006e\u0067\u003e\u0020\u002d\u0020\u0412\u0430\u0448\u0020\u0430\u043a\u0430\u0443\u043d\u0442\u0020\u043c\u043e\u0436\u043d\u0430\u0020\u043a\u043e\u043d\u0432\u0435\u0440\u0442\u0443\u0432\u0430\u0442\u0438\u002c\u0020\u043b\u0438\u0448\u0435\u0020\u044f\u043a\u0449\u043e\u0020\u0432\u0438\u0020\u0432\u0432\u0435\u0434\u0435\u0442\u0435\u0020\u043f\u0440\u0430\u0432\u0438\u043b\u044c\u043d\u0438\u0439\u0020\u043f\u0430\u0440\u043e\u043b\u044c\u0020\u0434\u043b\u044f\u0020\u0432\u0430\u0448\u043e\u0433\u043e\u0020\u0456\u0441\u043d\u0443\u044e\u0447\u043e\u0433\u043e\u0020\u0430\u043a\u0430\u0443\u043d\u0442\u0443\u002e + +\u0023\u0020\u006f\u0061\u0075\u0074\u0068\u0032\u002f\u0066\u0069\u0072\u0073\u0074\u004c\u006f\u0067\u0069\u006e\u002e\u0078\u0068\u0074\u006d\u006c +oauth2.btn.convertAccount =\u0020\u041f\u0435\u0440\u0435\u0442\u0432\u043e\u0440\u0438\u0442\u0438\u0020\u0456\u0441\u043d\u0443\u044e\u0447\u0438\u0439\u0020\u0430\u043a\u0430\u0443\u043d\u0442 +oauth2.btn.createAccount =\u0020\u0421\u0442\u0432\u043e\u0440\u0438\u0442\u0438\u0020\u043d\u043e\u0432\u0438\u0439\u0020\u0430\u043a\u0430\u0443\u043d\u0442 +oauth2.askToConvert=\u0020\u0412\u0438\u0020\u0445\u043e\u0447\u0435\u0442\u0435\u0020\u043f\u0435\u0440\u0435\u0442\u0432\u043e\u0440\u0438\u0442\u0438\u0020\u0441\u0432\u0456\u0439\u0020\u0044\u0061\u0074\u0061\u0076\u0065\u0072\u0073\u0065\u0020\u0430\u043a\u0430\u0443\u043d\u0442\u002c\u0020\u0449\u043e\u0431\u0020\u0437\u0430\u0432\u0436\u0434\u0438\u0020\u0432\u0438\u043a\u043e\u0440\u0438\u0441\u0442\u043e\u0432\u0443\u0432\u0430\u0442\u0438\u0020\u043b\u043e\u0433\u0456\u043d\u0020\u0432\u0430\u0448\u043e\u0457\u0020\u0443\u0441\u0442\u0430\u043d\u043e\u0432\u0438\u003f +oauth2.welcomeExistingUserMessage=\u0020\u041b\u043e\u0433\u0456\u043d\u0020\u0432\u0430\u0448\u043e\u0457\u0020\u0443\u0441\u0442\u0430\u043d\u043e\u0432\u0438\u0020\u0434\u043b\u044f\u0020\u007b\u0030\u007d\u0020\u0432\u0456\u0434\u043f\u043e\u0432\u0456\u0434\u0430\u0454\u0020\u0435\u043b\u0435\u043a\u0442\u0440\u043e\u043d\u043d\u0456\u0439\u0020\u0430\u0434\u0440\u0435\u0441\u0456\u002c\u0020\u044f\u043a\u0430\u0020\u0432\u0436\u0435\u0020\u0432\u0438\u043a\u043e\u0440\u0438\u0441\u0442\u043e\u0432\u0443\u0454\u0442\u044c\u0441\u044f\u0020\u0434\u043b\u044f\u0020\u0044\u0061\u0074\u0061\u0076\u0065\u0072\u0073\u0065\u0020\u0430\u043a\u0430\u0443\u043d\u0442\u0443\u002e\u0020\u0412\u0432\u0435\u0434\u0456\u0442\u044c\u0020\u0441\u0432\u0456\u0439\u0020\u043f\u043e\u0442\u043e\u0447\u043d\u0438\u0439\u0020\u0044\u0061\u0074\u0061\u0076\u0065\u0072\u0073\u0065\u0020\u043f\u0430\u0440\u043e\u043b\u044c\u0020\u043d\u0438\u0436\u0447\u0435\u002c\u0020\u0449\u043e\u0431\u0020\u0432\u0430\u0448\u0020\u0456\u0441\u043d\u0443\u044e\u0447\u0438\u0439\u0020\u0020\u0044\u0061\u0074\u0061\u0076\u0065\u0072\u0073\u0065\u0020\u0430\u043a\u0430\u0443\u043d\u0442\u0020\u043c\u043e\u0436\u043d\u0430\u0020\u0431\u0443\u043b\u043e\u0020\u043f\u0435\u0440\u0435\u0442\u0432\u043e\u0440\u0438\u0442\u0438\u0020\u0434\u043b\u044f\u0020\u0432\u0438\u043a\u043e\u0440\u0438\u0441\u0442\u043e\u0432\u0443\u0432\u0430\u043d\u043d\u044f\u0020\u043b\u043e\u0433\u0456\u043d\u0443\u0020\u0432\u0430\u0448\u043e\u0457\u0020\u0443\u0441\u0442\u0430\u043d\u043e\u0432\u0438\u002e\u0020\u041f\u0456\u0441\u043b\u044f\u0020\u043f\u0435\u0440\u0435\u0442\u0432\u043e\u0440\u0435\u043d\u043d\u044f\u0020\u0432\u0430\u043c\u0020\u043f\u043e\u0442\u0440\u0456\u0431\u043d\u043e\u0020\u0431\u0443\u0434\u0435\u0020\u0432\u0438\u043a\u043e\u0440\u0438\u0441\u0442\u043e\u0432\u0443\u0432\u0430\u0442\u0438\u0020\u043b\u0438\u0448\u0435\u0020\u043b\u043e\u0433\u0456\u043d\u0020\u0432\u0430\u0448\u043e\u0457\u0020\u0443\u0441\u0442\u0430\u043d\u043e\u0432\u0438\u002e + + +oauth2.welcomeExistingUserMessageDefaultInstitution=\u0020\u0432\u0430\u0448\u0430\u0020\u0443\u0441\u0442\u0430\u043d\u043e\u0432\u0430 +oauth2.dataverseUsername=\u0020\u0406\u043c\u0027\u044f\u0020\u043a\u043e\u0440\u0438\u0441\u0442\u0443\u0432\u0430\u0447\u0430\u0020\u0044\u0061\u0074\u0061\u0076\u0065\u0072\u0073\u0065 +oauth2.currentDataversePassword=\u0020\u041f\u043e\u0442\u043e\u0447\u043d\u0438\u0439\u0020\u043f\u0430\u0440\u043e\u043b\u044c\u0020\u0044\u0061\u0074\u0061\u0076\u0065\u0072\u0073\u0065 +oauth2.chooseUsername=\u0020\u0406\u043c\u0027\u044f\u0020\u043a\u043e\u0440\u0438\u0441\u0442\u0443\u0432\u0430\u0447\u0430\u003a\u0020 +oauth2.passwordRejected=\u003c\u0073\u0074\u0072\u006f\u006e\u0067\u003e\u0056\u0061\u006c\u0069\u0064\u0061\u0074\u0069\u006f\u006e\u0020\u0045\u0072\u0072\u006f\u0072\u003c\u002f\u0073\u0074\u0072\u006f\u006e\u0067\u003e\u0020\u002d\u0020\u043d\u0435\u043f\u0440\u0430\u0432\u0438\u043b\u044c\u043d\u0435\u0020\u0456\u043c\u0027\u044f\u0020\u043a\u043e\u0440\u0438\u0441\u0442\u0443\u0432\u0430\u0447\u0430\u0020\u0430\u0431\u043e\u0020\u043f\u0430\u0440\u043e\u043b\u044c\u002e +# oauth2.newAccount.title=\u0020\u0421\u0442\u0432\u043e\u0440\u0435\u043d\u043d\u044f\u0020\u0430\u043a\u0430\u0443\u043d\u0442\u0443 +oauth2.newAccount.welcomeWithName=\u0020\u041b\u0430\u0441\u043a\u0430\u0432\u043e\u0020\u043f\u0440\u043e\u0441\u0438\u043c\u043e\u0020\u0434\u043e\u0020\u0044\u0061\u0074\u0061\u0076\u0065\u0072\u0073\u0065\u002c\u0020\u007b\u0030\u007d +oauth2.newAccount.welcomeNoName=\u0020\u041b\u0430\u0441\u043a\u0430\u0432\u043e\u0020\u043f\u0440\u043e\u0441\u0438\u043c\u043e\u0020\u0434\u043e\u0020\u0044\u0061\u0074\u0061\u0076\u0065\u0072\u0073\u0065 +# oauth2.newAccount.email=\u0045\u006d\u0061\u0069\u006c +# oauth2.newAccount.email.tip=\u0044\u0061\u0074\u0061\u0076\u0065\u0072\u0073\u0065\u0020\u0432\u0438\u043a\u043e\u0440\u0438\u0441\u0442\u043e\u0432\u0443\u0454\u0020\u0446\u044e\u0020\u0435\u043b\u0435\u043a\u0442\u0440\u043e\u043d\u043d\u0443\u0020\u0430\u0434\u0440\u0435\u0441\u0443\u002c\u0020\u0449\u043e\u0431\u0020\u043f\u043e\u0432\u0456\u0434\u043e\u043c\u043b\u044f\u0442\u0438\u0020\u0432\u0430\u0441\u0020\u043f\u0440\u043e\u0020\u043f\u0440\u043e\u0431\u043b\u0435\u043c\u0438\u002c\u0020\u043f\u043e\u0432\u0027\u044f\u0437\u0430\u043d\u0456\u0020\u0437\u0020\u0432\u0430\u0448\u0438\u043c\u0438\u0020\u0434\u0430\u043d\u0438\u043c\u0438\u002e +oauth2.newAccount.suggestedEmails=\u0020\u0420\u0435\u043a\u043e\u043c\u0435\u043d\u0434\u043e\u0432\u0430\u043d\u0456\u0020\u0435\u043b\u0435\u043a\u0442\u0440\u043e\u043d\u043d\u0456\u0020\u0430\u0434\u0440\u0435\u0441\u0438\u003a +oauth2.newAccount.username=\u0020\u0406\u043c\u0027\u044f\u0020\u043a\u043e\u0440\u0438\u0441\u0442\u0443\u0432\u0430\u0447\u0430 +oauth2.newAccount.username.tip=\u0020\u0426\u0435\u0020\u0456\u043c\u0027\u044f\u0020\u0431\u0443\u0434\u0435\u0020\u0432\u0430\u0448\u0438\u043c\u0020\u0443\u043d\u0456\u043a\u0430\u043b\u044c\u043d\u0438\u043c\u0020\u0456\u0434\u0435\u043d\u0442\u0438\u0444\u0456\u043a\u0430\u0442\u043e\u0440\u043e\u043c\u0020\u0020\u043a\u043e\u0440\u0438\u0441\u0442\u0443\u0432\u0430\u0447\u0430\u0020\u0044\u0061\u0074\u0061\u0076\u0065\u0072\u0073\u0065\u002e +oauth2.newAccount.explanation=\u0020\u0426\u044f\u0020\u0456\u043d\u0444\u043e\u0440\u043c\u0430\u0446\u0456\u044f\u0020\u043d\u0430\u0434\u0430\u0454\u0442\u044c\u0441\u044f\u0020\u007b\u0030\u007d\u0020\u0456\u0020\u0431\u0443\u0434\u0435\u0020\u0432\u0438\u043a\u043e\u0440\u0438\u0441\u0442\u0430\u043d\u0430\u0020\u0434\u043b\u044f\u0020\u0441\u0442\u0432\u043e\u0440\u0435\u043d\u043d\u044f\u0020\u0432\u0430\u0448\u043e\u0433\u043e\u0020\u007b\u0031\u007d\u0020\u0430\u043a\u0430\u0443\u043d\u0442\u0443\u002e\u0020\u0429\u043e\u0431\u0020\u0437\u043d\u043e\u0432\u0443\u0020\u0432\u0432\u0456\u0439\u0442\u0438\u002c\u0020\u0432\u0430\u043c\u0020\u043d\u0435\u043e\u0431\u0445\u0456\u0434\u043d\u043e\u0020\u0441\u043a\u043e\u0440\u0438\u0441\u0442\u0430\u0442\u0438\u0441\u044f\u0020\u043e\u043f\u0446\u0456\u0454\u044e\u0020\u0432\u0445\u043e\u0434\u0443\u0020\u007b\u0030\u007d\u002e +oauth2.newAccount.suggestConvertInsteadOfCreate=\u0020\u042f\u043a\u0449\u043e\u0020\u0443\u0020\u0432\u0430\u0441\u0020\u0432\u0436\u0435\u0020\u0454\u0020\u0430\u043a\u0430\u0443\u043d\u0442\u0020\u007b\u0030\u007d\u002c\u0020\u0432\u0430\u043c\u0020\u043d\u0435\u043e\u0431\u0445\u0456\u0434\u043d\u043e\u0020\u0431\u0443\u0434\u0435\u0020\u003c\u0061\u0020\u0068\u0072\u0065\u0066\u003d\u0022\u002f\u006f\u0061\u0075\u0074\u0068\u0032\u002f\u0063\u006f\u006e\u0076\u0065\u0072\u0074\u002e\u0078\u0068\u0074\u006d\u006c\u0022\u003e\u0020\u043a\u043e\u043d\u0432\u0435\u0440\u0442\u0443\u0432\u0430\u0442\u0438\u0020\u0432\u0430\u0448\u0020\u0430\u043a\u0430\u0443\u043d\u0442\u002e\u0020\u003c\u002f\u0061\u003e + +# oauth2.newAccount.tabs.convertAccount=\u0020\u041f\u0435\u0440\u0435\u0442\u0432\u043e\u0440\u0438\u0442\u0438\u0020\u0456\u0441\u043d\u0443\u044e\u0447\u0438\u0439\u0020\u0430\u043a\u0430\u0443\u043d\u0442 +oauth2.newAccount.buttons.convertNewAccount=\u0020\u041f\u0435\u0440\u0435\u0442\u0432\u043e\u0440\u0438\u0442\u0438\u0020\u0430\u043a\u0430\u0443\u043d\u0442 +oauth2.newAccount.emailTaken=\u0020\u0415\u043b\u0435\u043a\u0442\u0440\u043e\u043d\u043d\u0430\u0020\u0430\u0434\u0440\u0435\u0441\u0430\u0020\u0432\u0436\u0435\u0020\u043f\u0440\u0438\u0439\u043d\u044f\u0442\u0430\u002e\u0020\u0417\u0430\u043c\u0456\u0441\u0442\u044c\u0020\u0446\u044c\u043e\u0433\u043e\u002c\u0020\u043f\u043e\u0434\u0443\u043c\u0430\u0439\u0442\u0435\u0020\u043f\u0440\u043e\u0020\u043e\u0431\u0027\u0454\u0434\u043d\u0430\u043d\u043d\u044f\u0020\u0432\u0456\u0434\u043f\u043e\u0432\u0456\u0434\u043d\u043e\u0433\u043e\u0020\u0430\u043a\u0430\u0443\u043d\u0442\u0443\u002e +oauth2.newAccount.emailOk=\u0020\u0415\u043b\u0435\u043a\u0442\u0440\u043e\u043d\u043d\u0430\u0020\u0430\u0434\u0440\u0435\u0441\u0430\u0020\u004f\u004b\u002e +oauth2.newAccount.emailInvalid=\u0020\u041d\u0435\u0434\u0456\u0439\u0441\u043d\u0430\u0020\u0435\u043b\u0435\u043a\u0442\u0440\u043e\u043d\u043d\u0430\u0020\u0430\u0434\u0440\u0435\u0441\u0430\u002e +# oauth2.newAccount.usernameTaken=\u0020\u0406\u043c\u0027\u044f\u0020\u043a\u043e\u0440\u0438\u0441\u0442\u0443\u0432\u0430\u0447\u0430\u0020\u0432\u0436\u0435\u0020\u043f\u0440\u0438\u0439\u043d\u044f\u0442\u043e\u002e +# oauth2.newAccount.usernameOk=\u0020\u0406\u043c\u0027\u044f\u0020\u043a\u043e\u0440\u0438\u0441\u0442\u0443\u0432\u0430\u0447\u0430\u0020\u004f\u004b\u002e + +\u0023\u0020\u006f\u0061\u0075\u0074\u0068\u0032\u002f\u0063\u006f\u006e\u0076\u0065\u0072\u0074\u002e\u0078\u0068\u0074\u006d\u006c +# oauth2.convertAccount.title=\u041f\u0435\u0440\u0435\u0442\u0432\u043e\u0440\u0435\u043d\u043d\u044f\u0020\u0430\u043a\u0430\u0443\u043d\u0442\u0443 +oauth2.convertAccount.explanation=\u0020\u0411\u0443\u0434\u044c\u0020\u043b\u0430\u0441\u043a\u0430\u002c\u0020\u0432\u0432\u0435\u0434\u0456\u0442\u044c\u0020\u0441\u0432\u043e\u0454\u0020\u007b\u0030\u007d\u0020\u0456\u043c\u0027\u044f\u0020\u043a\u043e\u0440\u0438\u0441\u0442\u0443\u0432\u0430\u0447\u0430\u0020\u0430\u043a\u0430\u0443\u043d\u0442\u0443\u0020\u0430\u0431\u043e\u0020\u0435\u043b\u0435\u043a\u0442\u0440\u043e\u043d\u043d\u0443\u0020\u0430\u0434\u0440\u0435\u0441\u0443\u0020\u0442\u0430\u0020\u043f\u0430\u0440\u043e\u043b\u044c\u002c\u0020\u0449\u043e\u0431\u0020\u043a\u043e\u043d\u0432\u0435\u0440\u0442\u0443\u0432\u0430\u0442\u0438\u0020\u0432\u0430\u0448\u0020\u0430\u043a\u0430\u0443\u043d\u0442\u0020\u0432\u0020\u043e\u043f\u0446\u0456\u044e\u0020\u007b\u0031\u007d\u0020\u043b\u043e\u0433\u0456\u043d\u0443\u002e\u0020\u003c\u0061 +href=\u0022\u007b\u0032\u007d\u002f\u007b\u0033\u007d\u002f\u0075\u0073\u0065\u0072\u002f\u0061\u0063\u0063\u006f\u0075\u006e\u0074\u002e\u0068\u0074\u006d\u006c\u0022\u0020\u0074\u0061\u0072\u0067\u0065\u0074\u003d\u0022\u005f\u0062\u006c\u0061\u006e\u006b\u0022\u003e\u0020\u0414\u043e\u0434\u0430\u0442\u043a\u043e\u0432\u0430\u0020\u0456\u043d\u0444\u043e\u0440\u043c\u0430\u0446\u0456\u044f\u0020\u003c\u002f\u0020\u0061\u003e\u0020\u043f\u0440\u043e\u0020\u043f\u0435\u0440\u0435\u0442\u0432\u043e\u0440\u0435\u043d\u043d\u044f\u0020\u0432\u0430\u0448\u043e\u0433\u043e\u0020\u0430\u043a\u0430\u0443\u043d\u0442\u0443\u002e +oauth2.convertAccount.username=\u0406\u0441\u043d\u0443\u044e\u0447\u0435\u0020\u0456\u043c\u0027\u044f\u0020\u043a\u043e\u0440\u0438\u0441\u0442\u0443\u0432\u0430\u0447\u0430\u003a +oauth2.convertAccount.password=\u041f\u0430\u0440\u043e\u043b\u044c +oauth2.convertAccount.authenticationFailed=\u0020\u041f\u0456\u0434\u0442\u0432\u0435\u0440\u0434\u0436\u0435\u043d\u043d\u044f\u0020\u043d\u0435\u0020\u0432\u0438\u043a\u043e\u043d\u0430\u043d\u0435\u0020\u002d\u0020\u043d\u0435\u0432\u0456\u0440\u043d\u0435\u0020\u0456\u043c\u0027\u044f\u0020\u043a\u043e\u0440\u0438\u0441\u0442\u0443\u0432\u0430\u0447\u0430\u0020\u0430\u0431\u043e\u0020\u043f\u0430\u0440\u043e\u043b\u044c\u002e +oauth2.convertAccount.buttonTitle=\u0020\u041f\u0435\u0440\u0435\u0442\u0432\u043e\u0440\u0438\u0442\u0438\u0020\u0430\u043a\u0430\u0443\u043d\u0442 +oauth2.convertAccount.success=\u0020\u0412\u0430\u0448\u0020\u0044\u0061\u0074\u0061\u0076\u0065\u0072\u0073\u0065\u0020\u0430\u043a\u0430\u0443\u043d\u0442\u0020\u0442\u0435\u043f\u0435\u0440\u0020\u0437\u0432\u0027\u044f\u0437\u0430\u043d\u0438\u0439\u0020\u0437\u0020\u0432\u0430\u0448\u0438\u043c\u0020\u0430\u043a\u0430\u0443\u043d\u0442\u043e\u043c\u0020\u007b\u0030\u007d\u002e +\u0020 +\u0023\u0020\u006f\u0061\u0075\u0074\u0068\u0032\u002f\u0063\u0061\u006c\u006c\u0062\u0061\u0063\u006b\u002e\u0078\u0068\u0074\u006d\u006c +oauth2.callback.page.title=\u004f\u0041\u0075\u0074\u0068\u0020\u0417\u0432\u043e\u0440\u043e\u0442\u043d\u0438\u0439\u0020\u0432\u0438\u043a\u043b\u0438\u043a +oauth2.callback.message=\u003c\u0073\u0074\u0072\u006f\u006e\u0067\u003e\u0020\u041f\u043e\u043c\u0438\u043b\u043a\u0430\u0020\u043f\u0456\u0434\u0442\u0432\u0435\u0440\u0434\u0436\u0435\u043d\u043d\u044f\u0020\u003c\u002f\u0073\u0074\u0072\u006f\u006e\u0067\u003e\u0020\u002d\u0020\u0044\u0061\u0074\u0061\u0076\u0065\u0072\u0073\u0065\u0020\u043d\u0435\u0020\u043c\u043e\u0436\u0435\u0020\u0430\u0432\u0442\u0435\u043d\u0442\u0438\u0444\u0456\u043a\u0443\u0432\u0430\u0442\u0438\u0020\u0432\u0430\u0448\u0020\u004f\u0052\u0043\u0049\u0044\u0020\u043b\u043e\u0433\u0456\u043d\u002e\u0020\u0411\u0443\u0434\u044c\u0020\u043b\u0430\u0441\u043a\u0430\u002c\u0020\u043f\u0435\u0440\u0435\u043a\u043e\u043d\u0430\u0439\u0442\u0435\u0441\u044f\u002c\u0020\u0449\u043e\u0020\u0432\u0438\u0020\u0430\u0432\u0442\u043e\u0440\u0438\u0437\u0443\u0454\u0442\u0435\u0020\u0441\u0432\u0456\u0439\u0020\u004f\u0052\u0043\u0049\u0044\u0020\u0430\u043a\u0430\u0443\u043d\u0442\u0020\u0434\u043b\u044f\u0020\u043f\u0456\u0434\u043a\u043b\u044e\u0447\u0435\u043d\u043d\u044f\u0020\u0434\u043e\u0020\u0044\u0061\u0074\u0061\u0076\u0065\u0072\u0073\u0065\u002e\u0020\u0414\u043b\u044f\u0020\u043e\u0442\u0440\u0438\u043c\u0430\u043d\u043d\u044f\u0020\u0434\u043e\u0434\u0430\u0442\u043a\u043e\u0432\u043e\u0457\u0020\u0456\u043d\u0444\u043e\u0440\u043c\u0430\u0446\u0456\u0457\u0020\u043f\u0440\u043e\u0020\u0437\u0430\u043f\u0438\u0442\u0443\u0432\u0430\u043d\u0443\u0020\u0456\u043d\u0444\u043e\u0440\u043c\u0430\u0446\u0456\u044e\u0020\u0434\u0438\u0432\u002e\u0020\u003c\u0061\u0020\u0068\u0072\u0065\u0066\u003d\u0022\u007b\u0030\u007d\u002f\u007b\u0031\u007d\u002f\u0075\u0073\u0065\u0072\u002f\u0061\u0063\u0063\u006f\u0075\u006e\u0074\u002e\u0068\u0074\u006d\u006c\u0023\u006f\u0072\u0063\u0069\u0064\u002d\u006c\u006f\u0067\u002d\u0069\u006e\u0022\u0020\u0074\u0069\u0074\u006c\u0065\u003d\u0022\u004f\u0052\u0043\u0049\u0044\u0020\u004c\u006f\u0067\u0020\u0049\u006e\u0020\u002d\u0020\u0044\u0061\u0074\u0061\u0076\u0065\u0072\u0073\u0065\u0020\u0055\u0073\u0065\u0072\u0020\u0047\u0075\u0069\u0064\u0065\u0022\u0020\u0074\u0061\u0072\u0067\u0065\u0074\u0020\u003d\u0020\u0022\u005f\u0020\u0062\u006c\u0061\u006e\u006b\u0022\u003e\u0055\u0073\u0065\u0072\u0020\u0047\u0075\u0069\u0064\u0065\u003c\u002f\u0061\u003e\u002e + +\u0023\u0020\u0074\u0061\u0062\u0020\u006f\u006e\u0020\u0064\u0061\u0074\u0061\u0076\u0065\u0072\u0073\u0065\u0075\u0073\u0065\u0072\u002e\u0078\u0068\u0074\u006d\u006c +apitoken.title=\u0041\u0050\u0049\u0020\u043c\u0430\u0440\u043a\u0435\u0440\u0020\u0028\u0054\u006f\u006b\u0065\u006e\u0029 +apitoken.message=\u0020\u0412\u0430\u0448\u0041\u0050\u0049\u0020\u043c\u0430\u0440\u043a\u0435\u0440\u0020\u0432\u0456\u0434\u043e\u0431\u0440\u0430\u0436\u0430\u0454\u0442\u044c\u0441\u044f\u0020\u043d\u0438\u0436\u0447\u0435\u0020\u043f\u0456\u0441\u043b\u044f\u0020\u0439\u043e\u0433\u043e\u0020\u0441\u0442\u0432\u043e\u0440\u0435\u043d\u043d\u044f\u002e\u0020\u0020\u041f\u0435\u0440\u0435\u0433\u043b\u044f\u043d\u044c\u0442\u0435\u0020\u043d\u0430\u0448\u0020\u007b\u0030\u007d\u0020\u0414\u043e\u0432\u0456\u0434\u043d\u0438\u043a\u0020\u0437\u0020\u0041\u0050\u0049\u0020\u007b\u0031\u007d\u002c\u0020\u0449\u043e\u0431\u0020\u0434\u0456\u0437\u043d\u0430\u0442\u0438\u0441\u044c\u0020\u0431\u0456\u043b\u044c\u0448\u0435\u0020\u043f\u0440\u043e\u0020\u0442\u0435\u002c\u0020\u044f\u043a\u0020\u0432\u0438\u043a\u043e\u0440\u0438\u0441\u0442\u043e\u0432\u0443\u0432\u0430\u0442\u0438\u0020\u0441\u0432\u0456\u0439\u0020\u0020\u0041\u0050\u0049\u0020\u0020\u043c\u0430\u0440\u043a\u0435\u0440\u0020\u0437\u0020\u0044\u0061\u0074\u0061\u0076\u0065\u0072\u0073\u0065\u0020\u0041\u0050\u0049\u0073\u002e\u0020 +apitoken.notFound=\u0041\u0050\u0049\u0020\u043c\u0430\u0440\u043a\u0435\u0440\u0020\u0434\u043b\u044f\u0020\u007b\u0030\u007d\u0020\u043d\u0435\u0020\u0431\u0443\u0432\u0020\u0441\u0442\u0432\u043e\u0440\u0435\u043d\u0438\u0439\u002e +apitoken.generateBtn=\u0421\u0442\u0432\u043e\u0440\u0438\u0442\u0438\u0020\u043c\u0430\u0440\u043a\u0435\u0440 +apitoken.regenerateBtn=\u0020\u0412\u0456\u0434\u0442\u0432\u043e\u0440\u0438\u0442\u0438\u0020\u043c\u0430\u0440\u043a\u0435\u0440 + +\u0023\u0064\u0061\u0073\u0068\u0062\u006f\u0061\u0072\u0064\u002e\u0078\u0068\u0074\u006d\u006c +dashboard.title=\u0020\u041f\u0430\u043d\u0435\u043b\u044c\u0020\u0456\u043d\u0441\u0442\u0440\u0443\u043c\u0435\u043d\u0442\u0456\u0432 +dashboard.card.harvestingclients.header=\u0020\u0417\u0431\u0456\u0440\u0020\u043a\u043b\u0456\u0454\u043d\u0442\u0456\u0432 +dashboard.card.harvestingclients.btn.manage=\u0020\u041a\u0435\u0440\u0443\u0432\u0430\u043d\u043d\u044f\u0020\u043a\u043b\u0456\u0454\u043d\u0442\u0430\u043c\u0438 +dashboard.card.harvestingclients.clients =\u0020\u007b\u0030\u002c\u0020\u0432\u0438\u0431\u0456\u0440\u002c\u0020\u0030\u0020\u0023\u0020\u041a\u043b\u0456\u0454\u043d\u0442\u0438\u0020\u007c\u0020\u0031\u0020\u0023\u0020\u041a\u043b\u0456\u0454\u043d\u0442\u0020\u007c\u0020\u0032\u0020\u007c\u0020\u041a\u043b\u0456\u0454\u043d\u0442\u0438\u007d +dashboard.card.harvestingclients.datasets =\u0020\u007b\u0030\u002c\u0020\u0432\u0438\u0431\u0456\u0440\u002c\u0020\u0030\u0020\u0023\u0020\u041d\u0430\u0431\u043e\u0440\u0438\u0020\u0434\u0430\u043d\u0438\u0445\u0020\u007c\u0020\u0031\u0020\u0023\u0020\u041d\u0430\u0431\u0456\u0440\u0020\u0434\u0430\u043d\u0438\u0445\u0020\u007c\u0020\u0032\u0020\u0023\u0020\u041d\u0430\u0431\u043e\u0440\u0438\u0020\u0434\u0430\u043d\u0438\u0445\u0020\u007d +dashboard.card.harvestingserver.header =\u0020\u0417\u0431\u0456\u0440\u0020\u0441\u0435\u0440\u0432\u0435\u0440\u0456\u0432 +dashboard.card.harvestingserver.enabled =\u0020\u0432\u043a\u043b\u044e\u0447\u0435\u043d\u0438\u0439\u0020\u0441\u0435\u0440\u0432\u0435\u0440\u0020\u004f\u0041\u0049 +dashboard.card.harvestingserver.disabled =\u0020\u0421\u0435\u0440\u0432\u0435\u0440\u0020\u004f\u0041\u0049\u0020\u0432\u0438\u043c\u043a\u043d\u0435\u043d\u043e +dashboard.card.harvestingserver.status =\u0020\u0421\u0442\u0430\u0442\u0443\u0441 +\u007b\u0030\u002c\u0020\u0432\u0438\u0431\u0456\u0440\u002c\u0020\u0030\u0020\u0023\u0020\u041d\u0430\u0431\u043e\u0440\u0438\u0020\u007c\u0020\u0031\u0020\u0023\u0020\u041d\u0430\u0431\u0456\u0440\u0020\u007c\u0020\u0032\u0020\u0023\u0020\u041d\u0430\u0431\u043e\u0440\u0438\u0020\u007d +dashboard.card.harvestingserver.btn.manage =\u0020\u0423\u043f\u0440\u0430\u0432\u043b\u0456\u043d\u043d\u044f\u0020\u0441\u0435\u0440\u0432\u0435\u0440\u043e\u043c +dashboard.card.metadataexport.header =\u0020\u0415\u043a\u0441\u043f\u043e\u0440\u0442\u0020\u043c\u0435\u0442\u0430\u0434\u0430\u043d\u0438\u0445 +dashboard.card.metadataexport.message =\u0020\u0415\u043a\u0441\u043f\u043e\u0440\u0442\u0020\u043c\u0435\u0442\u0430\u0434\u0430\u043d\u0438\u0445\u0020\u043d\u0430\u0431\u043e\u0440\u0443\u0020\u0434\u0430\u043d\u0438\u0445\u0020\u0434\u043e\u0441\u0442\u0443\u043f\u043d\u0438\u0439\u0020\u043b\u0438\u0448\u0435\u0020\u0447\u0435\u0440\u0435\u0437\u0020\u0041\u0050\u0049\u0020\u007b\u0030\u007d\u002e\u0020\u0414\u0456\u0437\u043d\u0430\u0439\u0442\u0435\u0441\u044f\u0020\u0431\u0456\u043b\u044c\u0448\u0435\u0020\u0432\u0020\u007b\u0030\u007d\u0020\u007b\u0031\u007d\u0020\u0414\u043e\u0432\u0456\u0434\u043d\u0438\u043a\u0443\u0020\u0041\u0050\u0049\u0020\u007b\u0032\u007d\u002e + +\u0023\u0068\u0061\u0072\u0076\u0065\u0073\u0074\u0063\u006c\u0069\u0065\u006e\u0074\u0073\u002e\u0078\u0068\u0074\u006d\u006c +harvestclients.title=\u0020\u041a\u0435\u0440\u0443\u0432\u0430\u043d\u043d\u044f\u0020\u0437\u0431\u043e\u0440\u043e\u043c\u0020\u043a\u043b\u0456\u0454\u043d\u0442\u0456\u0432 +harvestclients.toptip=\u0020\u002d\u0020\u041c\u043e\u0436\u043d\u0430\u0020\u0437\u0430\u043f\u043b\u0430\u043d\u0443\u0432\u0430\u0442\u0438\u0020\u043f\u0440\u043e\u0432\u0435\u0434\u0435\u043d\u043d\u044f\u0020\u0437\u0431\u043e\u0440\u0443\u0020\u043d\u0430\u0020\u043f\u0435\u0432\u043d\u0438\u0439\u0020\u0447\u0430\u0441\u0020\u0430\u0431\u043e\u0020\u0437\u0430\u0020\u0432\u0438\u043c\u043e\u0433\u043e\u044e\u002e\u0020\u0417\u0431\u0456\u0440\u0020\u043c\u043e\u0436\u043d\u0430\u0020\u0440\u043e\u0437\u043f\u043e\u0447\u0430\u0442\u0438\u0020\u0442\u0443\u0442\u0020\u0430\u0431\u043e\u0020\u0447\u0435\u0440\u0435\u0437\u0020\u0041\u0050\u0049\u0020\u0052\u0045\u0053\u0054\u002e +harvestclients.noClients.label=\u0020\u041a\u043b\u0456\u0454\u043d\u0442\u0438\u0020\u043d\u0435\u0020\u0441\u043a\u043e\u043c\u043f\u043e\u043d\u043e\u0432\u0430\u043d\u0456\u002e +harvestclients.noClients.why.header=\u0020\u0429\u043e\u0020\u0442\u0430\u043a\u0435\u0020\u0437\u0431\u0456\u0440\u003f +harvestclients.noClients.why.reason1=\u0020\u0417\u0431\u0456\u0440\u0020\u002d\u0020\u0446\u0435\u0020\u043f\u0440\u043e\u0446\u0435\u0441\u0020\u043e\u0431\u043c\u0456\u043d\u0443\u0020\u043c\u0435\u0442\u0430\u0434\u0430\u043d\u0438\u043c\u0438\u0020\u0437\u0020\u0456\u043d\u0448\u0438\u043c\u0438\u0020\u0440\u0435\u043f\u043e\u0437\u0438\u0442\u043e\u0440\u0456\u044f\u043c\u0438\u002e\u0020\u0423\u0020\u0437\u0431\u043e\u0440\u0456\u0020\u003c\u0062\u003e\u003c\u0069\u003e\u043a\u043b\u0456\u0454\u043d\u0442\u003c\u002f\u0020\u0069\u003e\u003c\u002f\u0020\u0062\u003e\u002c\u0020\u0432\u0430\u0448\u0430\u0020\u0044\u0061\u0074\u0061\u0076\u0065\u0073\u0065\u0020\u0437\u0431\u0438\u0440\u0430\u0454\u0020\u0437\u0430\u043f\u0438\u0441\u0438\u0020\u043c\u0435\u0442\u0430\u0434\u0430\u043d\u0438\u0445\u0020\u0437\u0020\u0432\u0456\u0434\u0434\u0430\u043b\u0435\u043d\u0438\u0445\u0020\u0434\u0436\u0435\u0440\u0435\u043b\u002e\u0020\u0426\u0435\u0020\u043c\u043e\u0436\u0443\u0442\u044c\u0020\u0431\u0443\u0442\u0438\u0020\u0456\u043d\u0448\u0456\u0020\u0437\u0440\u0430\u0437\u043a\u0438\u0020\u0044\u0061\u0074\u0061\u0076\u0065\u0072\u0073\u0020\u0430\u0431\u043e\u0020\u0456\u043d\u0448\u0456\u0020\u0430\u0440\u0445\u0456\u0432\u0438\u002c\u0020\u044f\u043a\u0456\u0020\u043f\u0456\u0434\u0442\u0440\u0438\u043c\u0443\u044e\u0442\u044c\u0020\u004f\u0041\u0049\u002d\u0050\u004d\u0048\u0020\u002d\u0020\u0441\u0442\u0430\u043d\u0434\u0430\u0440\u0442\u043d\u0438\u0439\u0020\u043f\u0440\u043e\u0442\u043e\u043a\u043e\u043b\u0020\u0437\u0431\u043e\u0440\u0443\u002e +harvestclients.noClients.why.reason2=\u0020\u0417\u0456\u0431\u0440\u0430\u043d\u0456\u0020\u0437\u0430\u043f\u0438\u0441\u0438\u0020\u043c\u0435\u0442\u0430\u0434\u0430\u043d\u0438\u0445\u0020\u0448\u0443\u043a\u0430\u044e\u0442\u044c\u0020\u043a\u043e\u0440\u0438\u0441\u0442\u0443\u0432\u0430\u0447\u0456\u002e\u0020\u041f\u043e\u0441\u0438\u043b\u0430\u043d\u043d\u044f\u0020\u043d\u0430\u0020\u0437\u0456\u0431\u0440\u0430\u043d\u0438\u0439\u0020\u043d\u0430\u0431\u0456\u0440\u0020\u0434\u0430\u043d\u0438\u0445\u0020\u0432\u0020\u0440\u0435\u0437\u0443\u043b\u044c\u0442\u0430\u0442\u0430\u0445\u0020\u043f\u043e\u0448\u0443\u043a\u0443\u0020\u043f\u0435\u0440\u0435\u043d\u043e\u0441\u0438\u0442\u044c\u0020\u043a\u043e\u0440\u0438\u0441\u0442\u0443\u0432\u0430\u0447\u0430\u0020\u0434\u043e\u0020\u043e\u0440\u0438\u0433\u0456\u043d\u0430\u043b\u044c\u043d\u043e\u0433\u043e\u0020\u0440\u0435\u043f\u043e\u0437\u0438\u0442\u043e\u0440\u0456\u044e\u002e\u0020\u0417\u0456\u0431\u0440\u0430\u043d\u0456\u0020\u043d\u0430\u0431\u043e\u0440\u0438\u0020\u0434\u0430\u043d\u0438\u0445\u0020\u043d\u0435\u0020\u043c\u043e\u0436\u043d\u0430\u0020\u0440\u0435\u0434\u0430\u0433\u0443\u0432\u0430\u0442\u0438\u0020\u0443\u0020\u0432\u0430\u0448\u0456\u0439\u0020\u0443\u0441\u0442\u0430\u043d\u043e\u0432\u0446\u0456\u0020\u0044\u0061\u0074\u0061\u0076\u0065\u0072\u0073\u0065\u002e +harvestclients.noClients.how.header=\u0020\u042f\u043a\u0020\u043a\u043e\u0440\u0438\u0441\u0442\u0443\u0432\u0430\u0442\u0438\u0441\u044f\u0020\u0437\u0431\u043e\u0440\u043e\u043c +harvestclients.noClients.how.tip1=\u0020\u0429\u043e\u0431\u0020\u0437\u0431\u0438\u0440\u0430\u0442\u0438\u0020\u043c\u0435\u0442\u0430\u0434\u0430\u043d\u0456\u002c\u0020\u0441\u0442\u0432\u043e\u0440\u044e\u0454\u0442\u044c\u0441\u044f\u0020\u003c\u0069\u003e\u0048\u0061\u0072\u0076\u0065\u0073\u0074\u0069\u006e\u0067\u0020\u0043\u006c\u0069\u0065\u006e\u0074\u003c\u002f\u0069\u003e\u0020\u0442\u0430\u0020\u043a\u043e\u043c\u043f\u043e\u043d\u0443\u0454\u0442\u044c\u0441\u044f\u0020\u0434\u043b\u044f\u0020\u043a\u043e\u0436\u043d\u043e\u0433\u043e\u0020\u0432\u0456\u0434\u0434\u0430\u043b\u0435\u043d\u043e\u0433\u043e\u0020\u0440\u0435\u043f\u043e\u0437\u0438\u0442\u043e\u0440\u0456\u044e\u002e\u0020\u0417\u0430\u0443\u0432\u0430\u0436\u0442\u0435\u002c\u0020\u0449\u043e\u0020\u043f\u0440\u0438\u0020\u0437\u0431\u043e\u0440\u0456\u0020\u043a\u043b\u0456\u0454\u043d\u0442\u0456\u0432\u0020\u0432\u0430\u043c\u0020\u043d\u0435\u043e\u0431\u0445\u0456\u0434\u043d\u043e\u0020\u0431\u0443\u0434\u0435\u0020\u0432\u0438\u0431\u0440\u0430\u0442\u0438\u0020\u0456\u0441\u043d\u0443\u044e\u0447\u0443\u0020\u043b\u043e\u043a\u0430\u043b\u044c\u043d\u0443\u0020\u043f\u0440\u043e\u0433\u0440\u0430\u043c\u0443\u0020\u0064\u0061\u0074\u0061\u0076\u0065\u0072\u0073\u0065\u0020\u0434\u043b\u044f\u0020\u043f\u0440\u0438\u0439\u043e\u043c\u0443\u0020\u0437\u0456\u0431\u0440\u0430\u043d\u0438\u0445\u0020\u043d\u0430\u0431\u043e\u0440\u0456\u0432\u0020\u0434\u0430\u043d\u0438\u0445\u002e +harvestclients.noClients.how.tip2=\u0417\u0430\u0433\u043e\u0442\u043e\u0432\u043b\u0435\u043d\u0456\u0020\u0437\u0430\u043f\u0438\u0441\u0438\u0020\u043c\u043e\u0436\u043d\u0430\u0020\u0437\u0431\u0435\u0440\u0456\u0433\u0430\u0442\u0438\u0020\u0441\u0438\u043d\u0445\u0440\u043e\u043d\u043d\u043e\u0020\u0437\u0020\u043e\u0440\u0438\u0433\u0456\u043d\u0430\u043b\u044c\u043d\u0438\u043c\u0020\u0440\u0435\u043f\u043e\u0437\u0438\u0442\u043e\u0440\u0456\u0454\u043c\u0020\u0437\u0430\u0020\u0434\u043e\u043f\u043e\u043c\u043e\u0433\u043e\u044e\u0020\u0440\u0435\u0433\u0443\u043b\u044f\u0440\u043d\u0438\u0445\u0020\u0434\u043e\u0434\u0430\u0442\u043a\u043e\u0432\u0438\u0445\u0020\u043e\u043d\u043e\u0432\u043b\u0435\u043d\u044c\u002c\u0020\u043d\u0430\u043f\u0440\u0438\u043a\u043b\u0430\u0434\u002c\u0020\u0449\u043e\u0434\u043d\u044f\u0020\u0430\u0431\u043e\u0020\u0449\u043e\u0442\u0438\u0436\u043d\u044f\u002e\u0020\u0410\u043b\u044c\u0442\u0435\u0440\u043d\u0430\u0442\u0438\u0432\u043d\u043e\u002c\u0020\u0437\u0431\u0456\u0440\u0020\u043c\u043e\u0436\u043d\u0430\u0020\u043f\u0440\u043e\u0432\u043e\u0434\u0438\u0442\u0438\u0020\u0437\u0430\u0020\u0437\u0430\u043f\u0438\u0442\u043e\u043c\u002c\u0020\u0437\u0020\u0446\u0456\u0454\u0457\u0020\u0441\u0442\u043e\u0440\u0456\u043d\u043a\u0438\u0020\u0430\u0431\u043e\u0020\u0447\u0435\u0440\u0435\u0437\u0020\u0052\u0045\u0053\u0054\u0020\u0041\u0050\u0049\u002e +harvestclients.noClients.getStarted=\u0020\u0429\u043e\u0431\u0020\u0440\u043e\u0437\u043f\u043e\u0447\u0430\u0442\u0438\u002c\u0020\u043d\u0430\u0442\u0438\u0441\u043d\u0456\u0442\u044c\u0020\u043a\u043d\u043e\u043f\u043a\u0443\u0020\u0022\u0414\u043e\u0434\u0430\u0442\u0438\u0020\u043a\u043b\u0456\u0454\u043d\u0442\u0430\u0022\u0020\u0432\u0438\u0449\u0435\u002e\u0020\u0429\u043e\u0431\u0020\u0434\u0456\u0437\u043d\u0430\u0442\u0438\u0441\u044c\u0020\u0431\u0456\u043b\u044c\u0448\u0435\u0020\u043f\u0440\u043e\u0020\u0437\u0431\u0456\u0440\u002c\u0020\u0432\u0456\u0434\u0432\u0456\u0434\u0430\u0439\u0442\u0435\u0020\u0440\u043e\u0437\u0434\u0456\u043b\u0020\u0414\u043e\u0432\u0456\u0434\u043d\u0438\u043a\u0430\u0020\u043a\u043e\u0440\u0438\u0441\u0442\u0443\u0432\u0430\u0447\u0430\u0020\u003c\u0061\u0020\u0068\u0072\u0065\u0066\u003d\u0022\u007b\u0030\u007d\u002f\u007b\u0031\u007d\u002f\u0075\u0073\u0065\u0072\u002f\u0069\u006e\u0064\u0065\u0078\u002e\u0068\u0074\u006d\u006c\u0023\u0069\u006e\u0064\u0065\u0078\u0022\u0020\u0074\u0069\u0074\u006c\u0065\u003d\u0022\u0048\u0061\u0072\u0076\u0065\u0073\u0074\u0069\u006e\u0067\u0020\u002d\u0020\u0044\u0061\u0074\u0061\u0076\u0065\u0072\u0073\u0065\u0020\u0055\u0073\u0065\u0072\u0020\u0047\u0075\u0069\u0064\u0065\u0022\u0020\u0074\u0061\u0072\u0067\u0065\u0074\u003d\u0022\u005f\u0062\u006c\u0061\u006e\u006b\u0022\u003e\u0048\u0061\u0072\u0076\u0065\u0073\u0074\u0069\u006e\u0067\u003c\u002f\u0061\u003e\u0020\u0073\u0065\u0063\u0074\u0069\u006f\u006e +harvestclients.btn.add=\u0020\u0414\u043e\u0434\u0430\u0442\u0438\u0020\u043a\u043b\u0456\u0454\u043d\u0442\u0430 +harvestclients.tab.header.name=\u041f\u0441\u0435\u0432\u0434\u043e\u043d\u0456\u043c +harvestclients.tab.header.url=\u0055\u0052\u004c +harvestclients.tab.header.lastrun=\u0020\u041e\u0441\u0442\u0430\u043d\u043d\u0456\u0439\u0020\u0437\u0430\u043f\u0443\u0441\u043a +harvestclients.tab.header.lastresults =\u0020\u043e\u0441\u0442\u0430\u043d\u043d\u0456\u0020\u0440\u0435\u0437\u0443\u043b\u044c\u0442\u0430\u0442\u0438 +harvestclients.tab.header.action =\u0020\u0414\u0456\u0457 +harvestclients.tab.header.action.btn.run=\u0412\u0438\u043a\u043e\u043d\u0430\u0442\u0438\u0020\u0437\u0431\u0456\u0440 +harvestclients.tab.header.action.btn.edit=\u0020\u0420\u0435\u0434\u0430\u0433\u0443\u0432\u0430\u0442\u0438 +harvestclients.tab.header.action.btn.delete=\u0020\u0412\u0438\u0434\u0430\u043b\u0438\u0442\u0438 +harvestclients.tab.header.action.btn.delete.dialog.header=\u0020\u0412\u0438\u0434\u0430\u043b\u0438\u0442\u0438\u0020\u0437\u0431\u0456\u0440\u0020\u043a\u043b\u0456\u0454\u043d\u0442\u0456\u0432 +harvestclients.tab.header.action.btn.delete.dialog.warning=\u0020\u0412\u0438\u0020\u0432\u043f\u0435\u0432\u043d\u0435\u043d\u0456\u002c\u0020\u0449\u043e\u0020\u0445\u043e\u0447\u0435\u0442\u0435\u0020\u0432\u0438\u0434\u0430\u043b\u0438\u0442\u0438\u0020\u0437\u0431\u0456\u0440\u0020\u043a\u043b\u0456\u0454\u043d\u0442\u0456\u0432\u0020\u0022\u007b\u0030\u007d\u0022\u003f\u0020\u0412\u0438\u0434\u0430\u043b\u0435\u043d\u043d\u044f\u0020\u043a\u043b\u0456\u0454\u043d\u0442\u0430\u0020\u0432\u0438\u0434\u0430\u043b\u044f\u0454\u0020\u0432\u0441\u0456\u0020\u043d\u0430\u0431\u043e\u0440\u0438\u0020\u0434\u0430\u043d\u0438\u0445\u0020\u0437\u0456\u0431\u0440\u0430\u043d\u0456\u0020\u0437\u0020\u0446\u044c\u043e\u0433\u043e\u0020\u0432\u0456\u0434\u0434\u0430\u043b\u0435\u043d\u043e\u0433\u043e\u0020\u0441\u0435\u0440\u0432\u0435\u0440\u0430\u002e +harvestclients.tab.header.action.btn.delete.dialog.tip=\u0020\u0417\u0430\u0443\u0432\u0430\u0436\u0442\u0435\u002c\u0020\u0449\u043e\u0020\u0446\u044f\u0020\u043e\u043f\u0435\u0440\u0430\u0446\u0456\u044f\u0020\u043c\u043e\u0436\u0435\u0020\u0437\u0430\u0439\u043d\u044f\u0442\u0438\u0020\u0434\u0435\u044f\u043a\u0438\u0439\u0020\u0447\u0430\u0441\u0020\u0434\u043b\u044f\u0020\u043e\u0431\u0440\u043e\u0431\u043a\u0438\u002c\u0020\u0437\u0430\u043b\u0435\u0436\u043d\u043e\u0020\u0432\u0456\u0434\u0020\u043a\u0456\u043b\u044c\u043a\u043e\u0441\u0442\u0456\u0020\u0437\u0456\u0431\u0440\u0430\u043d\u0438\u0445\u0020\u043d\u0430\u0431\u043e\u0440\u0456\u0432\u0020\u0434\u0430\u043d\u0438\u0445\u002e +harvestclients.tab.header.action.delete.infomessage=\u0020\u0417\u0431\u0456\u0440\u0020\u043a\u043b\u0456\u0454\u043d\u0442\u0430\u0020\u0432\u0438\u0434\u0430\u043b\u044f\u0454\u0442\u044c\u0441\u044f\u002e\u0020\u0417\u0430\u0443\u0432\u0430\u0436\u0442\u0435\u002c\u0020\u0449\u043e\u0020\u0446\u0435\u0020\u043c\u043e\u0436\u0435\u0020\u0437\u0430\u0439\u043d\u044f\u0442\u0438\u0020\u0434\u0435\u044f\u043a\u0438\u0439\u0020\u0447\u0430\u0441\u002c\u0020\u0437\u0430\u043b\u0435\u0436\u043d\u043e\u0020\u0432\u0456\u0434\u0020\u043a\u0456\u043b\u044c\u043a\u043e\u0441\u0442\u0456\u0020\u0437\u0430\u0432\u0430\u043d\u0442\u0430\u0436\u0435\u043d\u043e\u0433\u043e\u0020\u043a\u043e\u043d\u0442\u0435\u043d\u0442\u0443\u002e +harvestclients.actions.runharvest.success=\u0020\u0423\u0441\u043f\u0456\u0448\u043d\u043e\u0020\u0440\u043e\u0437\u043f\u043e\u0447\u0430\u0442\u043e\u0020\u0430\u0441\u0438\u043d\u0445\u0440\u043e\u043d\u043d\u0438\u0439\u0020\u0437\u0431\u0456\u0440\u0020\u0434\u043b\u044f\u0020\u043a\u043b\u0456\u0454\u043d\u0442\u0430\u0020\u0022\u007b\u0030\u007d\u0022\u002e\u0020\u0411\u0443\u0434\u044c\u0020\u043b\u0430\u0441\u043a\u0430\u002c\u0020\u043f\u0435\u0440\u0435\u0437\u0430\u0432\u0430\u043d\u0442\u0430\u0436\u0442\u0435\u0020\u0441\u0442\u043e\u0440\u0456\u043d\u043a\u0443\u002c\u0020\u0449\u043e\u0431\u0020\u043f\u0435\u0440\u0435\u0432\u0456\u0440\u0438\u0442\u0438\u0020\u0440\u0435\u0437\u0443\u043b\u044c\u0442\u0430\u0442\u0438\u0020\u0437\u0431\u043e\u0440\u0443\u0029\u002e +harvestclients.newClientDialog.step1=\u0020\u041a\u0440\u043e\u043a\u0020\u0031\u0020\u0437\u0020\u0034\u0020\u002d\u0020\u0406\u043d\u0444\u043e\u0440\u043c\u0430\u0446\u0456\u044f\u0020\u043f\u0440\u043e\u0020\u043a\u043b\u0456\u0454\u043d\u0442\u0430 +harvestclients.newClientDialog.title.new=\u0020\u0421\u0442\u0432\u043e\u0440\u0438\u0442\u0438\u0020\u043a\u043b\u0456\u0454\u043d\u0442\u0430\u002c\u0020\u0449\u043e\u0020\u0437\u0431\u0438\u0440\u0430\u0454 +harvestclients.newClientDialog.help=\u0020\u041d\u0430\u043b\u0430\u0448\u0442\u0443\u0432\u0430\u0442\u0438\u0020\u043a\u043b\u0456\u0454\u043d\u0442\u0430\u0020\u043d\u0430\u0020\u0437\u0431\u0456\u0440\u0020\u043a\u043e\u043d\u0442\u0435\u043d\u0442\u0443\u0020\u0437\u0020\u0432\u0456\u0434\u0434\u0430\u043b\u0435\u043d\u043e\u0433\u043e\u0020\u0441\u0435\u0440\u0432\u0435\u0440\u0430\u002e +harvestclients.newClientDialog.nickname=\u0020\u041f\u0441\u0435\u0432\u0434\u043e\u043d\u0456\u043c +harvestclients.newClientDialog.nickname.helptext=\u0020\u0441\u043a\u043b\u0430\u0434\u0430\u0454\u0442\u044c\u0441\u044f\u0020\u0437\u0020\u0431\u0443\u043a\u0432\u002c\u0020\u0446\u0438\u0444\u0440\u002c\u0020\u043f\u0456\u0434\u043a\u0440\u0435\u0441\u043b\u0435\u043d\u043d\u044f\u0020\u0028\u005f\u0029\u0020\u0456\u0020\u0440\u0438\u0441\u043e\u043a\u0020\u0028\u002d\u0029\u002e +harvestclients.newClientDialog.nickname.required=\u0020\u041f\u043e\u043b\u0435\u0020\u0022\u043f\u0441\u0435\u0432\u0434\u043e\u043d\u0456\u043c\u0020\u043a\u043b\u0456\u0454\u043d\u0442\u0430\u0022\u0020\u043d\u0435\u0020\u043c\u043e\u0436\u0435\u0020\u0431\u0443\u0442\u0438\u0020\u043f\u043e\u0440\u043e\u0436\u043d\u0456\u043c\u0021 +harvestclients.newClientDialog.nickname.invalid=\u0020\u041f\u0441\u0435\u0432\u0434\u043e\u043d\u0456\u043c\u0020\u043c\u043e\u0436\u0435\u0020\u043c\u0456\u0441\u0442\u0438\u0442\u0438\u0020\u043b\u0438\u0448\u0435\u0020\u043b\u0456\u0442\u0435\u0440\u0438\u002c\u0020\u0446\u0438\u0444\u0440\u0438\u002c\u0020\u043f\u0456\u0434\u043a\u0440\u0435\u0441\u043b\u0435\u043d\u043d\u044f\u0020\u0028\u005f\u0029\u0020\u0456\u0020\u0440\u0438\u0441\u043a\u0438\u0020\u0028\u002d\u0029\u003b\u0020\u0456\u0020\u043d\u0435\u0020\u043f\u043e\u0432\u0438\u043d\u0435\u043d\u0020\u043c\u0430\u0442\u0438\u0020\u0431\u0456\u043b\u044c\u0448\u0435\u0020\u0033\u0030\u0020\u0441\u0438\u043c\u0432\u043e\u043b\u0456\u0432\u002e +harvestclients.newClientDialog.nickname.alreadyused=\u0020\u0426\u0435\u0439\u0020\u043f\u0441\u0435\u0432\u0434\u043e\u043d\u0456\u043c\u0020\u0432\u0436\u0435\u0020\u0432\u0438\u043a\u043e\u0440\u0438\u0441\u0442\u043e\u0432\u0443\u0454\u0442\u044c\u0441\u044f\u002e +harvestclients.newClientDialog.type =\u0020\u041f\u0440\u043e\u0442\u043e\u043a\u043e\u043b\u0020\u0441\u0435\u0440\u0432\u0435\u0440\u043d\u0438\u0439\u0020 +harvestclients.newClientDialog.type.helptext =\u0020\u041d\u0430\u0440\u0430\u0437\u0456\u0020\u043f\u0456\u0434\u0442\u0440\u0438\u043c\u0443\u0454\u0442\u044c\u0441\u044f\u0020\u043b\u0438\u0448\u0435\u0020\u043f\u0440\u043e\u0442\u043e\u043a\u043e\u043b\u0020\u0441\u0435\u0440\u0432\u0435\u0440\u0430\u0020\u004f\u0041\u0049\u002e +harvestclients.newClientDialog.type.OAI =\u0020\u004f\u0041\u0049 +harvestclients.newClientDialog.type.Nesstar =\u0020\u041d\u0435\u0441\u0441\u0442\u0430\u0440 +harvestclients.newClientDialog.url =\u0020\u0055\u0052\u004c\u002d\u0430\u0434\u0440\u0435\u0441\u0430\u0020\u0441\u0435\u0440\u0432\u0435\u0440\u0430 +harvestclients.newClientDialog.url.tip =\u0020\u0055\u0052\u004c\u0020\u0440\u0435\u0441\u0443\u0440\u0441\u0443\u0020\u0437\u0431\u043e\u0440\u0443\u002e +harvestclients.newClientDialog.url.watermark=\u0020\u0412\u0456\u0434\u0434\u0430\u043b\u0435\u043d\u0438\u0439\u0020\u0445\u043e\u0441\u0442\u002d\u0441\u0435\u0440\u0432\u0435\u0440\u002c\u0020\u0068\u0074\u0074\u0070\u003a\u002f\u002f\u002e\u002e\u002e +harvestclients.newClientDialog.url.helptext.notvalidated=\u0055\u0052\u004c\u0020\u0440\u0435\u0441\u0443\u0440\u0441\u0443\u0020\u0437\u0431\u043e\u0440\u0443\u002e\u0020\u041f\u0456\u0441\u043b\u044f\u0020\u043d\u0430\u0442\u0438\u0441\u043a\u0430\u043d\u043d\u044f\u0020\u043a\u043d\u043e\u043f\u043a\u0438\u0020\u0022\u0414\u0430\u043b\u0456\u0022\u0020\u043c\u0438\u0020\u0441\u043f\u0440\u043e\u0431\u0443\u0454\u043c\u043e\u0020\u0432\u0441\u0442\u0430\u043d\u043e\u0432\u0438\u0442\u0438\u0020\u0437\u0027\u0454\u0434\u043d\u0430\u043d\u043d\u044f\u0020\u0437\u0020\u0441\u0435\u0440\u0432\u0435\u0440\u043e\u043c\u002c\u0020\u0449\u043e\u0431\u0020\u043f\u0435\u0440\u0435\u0432\u0456\u0440\u0438\u0442\u0438\u002c\u0020\u0449\u043e\u0020\u0432\u0456\u043d\u0020\u043f\u0440\u0430\u0446\u044e\u0454\u002c\u0020\u0456\u0020\u043e\u0442\u0440\u0438\u043c\u0430\u0442\u0438\u0020\u0434\u043e\u0434\u0430\u0442\u043a\u043e\u0432\u0443\u0020\u0456\u043d\u0444\u043e\u0440\u043c\u0430\u0446\u0456\u044e\u0020\u043f\u0440\u043e\u0020\u0439\u043e\u0433\u043e\u0020\u043c\u043e\u0436\u043b\u0438\u0432\u043e\u0441\u0442\u0456\u002e\u0020 +harvestclients.newClientDialog.url.required=\u0020\u0412\u0438\u043c\u0430\u0433\u0430\u0454\u0442\u044c\u0441\u044f\u0020\u0434\u0456\u0439\u0441\u043d\u0430\u0020\u0430\u0434\u0440\u0435\u0441\u0430\u0020\u0441\u0435\u0440\u0432\u0435\u0440\u0430\u0020\u0434\u043b\u044f\u0020\u0437\u0431\u043e\u0440\u0443\u002e +harvestclients.newClientDialog.url.invalid=\u0020\u041d\u0435\u0434\u0456\u0439\u0441\u043d\u0430\u0020\u0055\u0052\u004c\u002d\u0430\u0434\u0440\u0435\u0441\u0430\u002e\u0020\u041d\u0435\u0020\u0432\u0434\u0430\u043b\u043e\u0441\u044f\u0020\u0432\u0441\u0442\u0430\u043d\u043e\u0432\u0438\u0442\u0438\u0020\u0437\u0027\u0454\u0434\u043d\u0430\u043d\u043d\u044f\u0020\u0442\u0430\u0020\u043e\u0442\u0440\u0438\u043c\u0430\u0442\u0438\u0020\u0434\u0456\u0439\u0441\u043d\u0443\u0020\u0432\u0456\u0434\u043f\u043e\u0432\u0456\u0434\u044c\u0020\u0441\u0435\u0440\u0432\u0435\u0440\u0430\u002e\u0020\u0068\u0061\u0072\u0076\u0065\u0073\u0074\u0063\u006c\u0069\u0065\u006e\u0074\u0073\u002e\u006e\u0065\u0077\u0043\u006c\u0069\u0065\u006e\u0074\u0044\u0069\u0061\u006c\u006f\u0067\u002e\u0075\u0072\u006c\u002e\u006e\u006f\u0072\u0065\u0073\u0070\u006f\u006e\u0073\u0065\u003d\u0020\u041d\u0435\u0020\u0432\u0434\u0430\u0454\u0442\u044c\u0441\u044f\u0020\u0432\u0441\u0442\u0430\u043d\u043e\u0432\u0438\u0442\u0438\u0020\u0437\u0027\u0454\u0434\u043d\u0430\u043d\u043d\u044f\u0020\u0437\u0020\u0441\u0435\u0440\u0432\u0435\u0440\u043e\u043c\u002e +harvestclients.newClientDialog.url.badresponse=\u0020\u041d\u0435\u0434\u0456\u0439\u0441\u043d\u0430\u0020\u0432\u0456\u0434\u043f\u043e\u0432\u0456\u0434\u044c\u0020\u0437\u0020\u0441\u0435\u0440\u0432\u0435\u0440\u0430\u002e +harvestclients.newClientDialog.dataverse=\u041b\u043e\u043a\u0430\u043b\u044c\u043d\u0430\u0020\u0044\u0061\u0074\u0061\u0076\u0065\u0072\u0073\u0065 +harvestclients.newClientDialog.dataverse.tip=\u0044\u0061\u0074\u0061\u0076\u0065\u0072\u0073\u0065\u002c\u0020\u0449\u043e\u0020\u043f\u0440\u0438\u0439\u043c\u0430\u0442\u0438\u043c\u0435\u0020\u043d\u0430\u0431\u043e\u0440\u0438\u0020\u0434\u0430\u043d\u0438\u0445\u002c\u0020\u0437\u0456\u0431\u0440\u0430\u043d\u0456\u0020\u0437\u0020\u0446\u044c\u043e\u0433\u043e\u0020\u0432\u0456\u0434\u0434\u0430\u043b\u0435\u043d\u043e\u0433\u043e\u0020\u0440\u0435\u0441\u0443\u0440\u0441\u0443\u002e +harvestclients.newClientDialog.dataverse.menu.enterName=\u0020\u0412\u0432\u0435\u0434\u0456\u0442\u044c\u0020\u0456\u043c\u0027\u044f\u0020\u0044\u0061\u0074\u0061\u0076\u0065\u0072\u0073\u0065\u002e\u0020 +harvestclients.newClientDialog.dataverse.menu.header=\u0020\u041d\u0430\u0437\u0432\u0430\u0020\u0044\u0061\u0074\u0061\u0076\u0065\u0072\u0073\u0065\u0020\u0028\u0443\u0441\u0442\u0430\u043d\u043e\u0432\u0430\u0029\u002c\u0020\u0456\u043c\u0027\u044f\u002e\u0020\u0068\u0061\u0072\u0076\u0065\u0073\u0074\u0063\u006c\u0069\u0065\u006e\u0074\u0073\u002e\u006e\u0065\u0077\u0043\u006c\u0069\u0065\u006e\u0074\u0044\u0069\u0061\u006c\u006f\u0067\u002e\u0064\u0061\u0074\u0061\u0076\u0065\u0072\u0073\u0065\u002e\u006d\u0065\u006e\u0075\u002e\u0069\u006e\u0076\u0061\u006c\u0069\u0064\u004d\u0073\u0067\u003d\u0020\u0416\u043e\u0434\u043d\u0438\u0445\u0020\u0441\u043f\u0456\u0432\u043f\u0430\u0434\u0456\u043d\u044c\u0020\u043d\u0435\u0020\u0437\u043d\u0430\u0439\u0434\u0435\u043d\u043e\u002e +harvestclients.newClientDialog.dataverse.required=\u0020\u0412\u0438\u0020\u043f\u043e\u0432\u0438\u043d\u043d\u0456\u0020\u0432\u0438\u0431\u0440\u0430\u0442\u0438\u0020\u0456\u0441\u043d\u0443\u044e\u0447\u0443\u0020\u0064\u0061\u0074\u0061\u0076\u0065\u0072\u0073\u0435\u0020\u0434\u043b\u044f\u0020\u0446\u044c\u043e\u0433\u043e\u0020\u043a\u043b\u0456\u0454\u043d\u0442\u0430\u002e +harvestclients.newClientDialog.step2 =\u0020\u041a\u0440\u043e\u043a\u0020\u0032\u0020\u0437\u0020\u0034\u0020\u002d\u0020\u0424\u043e\u0440\u043c\u0430\u0442 +harvestclients.newClientDialog.oaiSets =\u0020\u041d\u0430\u0431\u0456\u0440\u0020\u004f\u0041\u0049 +harvestclients.newClientDialog.oaiSets.tip=\u0020\u0417\u0431\u0438\u0440\u0430\u043b\u044c\u043d\u0456\u0020\u043d\u0430\u0431\u043e\u0440\u0438\u002c\u0020\u0437\u0430\u043f\u0440\u043e\u043f\u043e\u043d\u043e\u0432\u0430\u043d\u0456\u0020\u0446\u0438\u043c\u0020\u004f\u0041\u0049\u0020\u0441\u0435\u0440\u0432\u0435\u0440\u043e\u043c\u002e +harvestclients.newClientDialog.oaiSets.noset=\u041d\u0435\u043c\u0430\u0454\u002e +harvestclients.newClientDialog.oaiSets.helptext=\u0020\u0412\u0438\u0431\u0456\u0440\u0020\u043e\u043f\u0446\u0456\u0457\u0020\u0022\u043d\u0435\u043c\u0430\u0454\u0022\u043f\u0440\u0438\u0432\u0435\u0434\u0435\u0020\u0434\u043e\u0020\u0437\u0431\u043e\u0440\u0443\u0020\u0441\u0442\u0430\u043d\u0434\u0430\u0440\u0442\u043d\u043e\u0433\u043e\u0020\u043d\u0430\u0431\u043e\u0440\u0443\u002c\u0020\u0432\u0438\u0437\u043d\u0430\u0447\u0435\u043d\u043e\u0433\u043e\u0020\u0441\u0435\u0440\u0432\u0435\u0440\u043e\u043c\u002e\u0020\u0427\u0430\u0441\u0442\u043e\u0020\u0446\u0435\u0020\u0431\u0443\u0434\u0435\u0020\u0432\u0435\u0441\u044c\u0020\u043a\u043e\u043d\u0442\u0435\u043d\u0442\u0020\u043f\u043e\u0020\u0432\u0441\u0456\u0445\u0020\u0441\u0443\u0431\u043d\u0430\u0431\u043e\u0440\u0430\u0445\u002e +harvestclients.newClientDialog.oaiSets.helptext.noset=\u0020\u0426\u0435\u0439\u0020\u004f\u0041\u0049\u0020\u0441\u0435\u0440\u0432\u0435\u0440\u0020\u043d\u0435\u0020\u043f\u0456\u0434\u0442\u0440\u0438\u043c\u0443\u0454\u0020\u043d\u0430\u0437\u0432\u0430\u043d\u0456\u0020\u043d\u0430\u0431\u043e\u0440\u0438\u002e\u0020\u0411\u0443\u0434\u0435\u0020\u0437\u0456\u0431\u0440\u0430\u043d\u0438\u0439\u0020\u0432\u0435\u0441\u044c\u0020\u043a\u043e\u043d\u0442\u0435\u043d\u0442\u002c\u0020\u0437\u0430\u043f\u0440\u043e\u043f\u043e\u043d\u043e\u0432\u0430\u043d\u0438\u0439\u0020\u0441\u0435\u0440\u0432\u0435\u0440\u043e\u043c\u002e +harvestclients.newClientDialog.oaiMetadataFormat =\u0020\u0424\u043e\u0440\u043c\u0430\u0442\u0020\u043c\u0435\u0442\u0430\u0434\u0430\u043d\u0438\u0445 +harvestclients.newClientDialog.oaiMetadataFormat.tip =\u0020\u0424\u043e\u0440\u043c\u0430\u0442\u0438\u0020\u043c\u0435\u0442\u0430\u0434\u0430\u043d\u0438\u0445\u002c\u0020\u0449\u043e\u0020\u043f\u0440\u043e\u043f\u043e\u043d\u0443\u044e\u0442\u044c\u0441\u044f\u0020\u0432\u0456\u0434\u0434\u0430\u043b\u0435\u043d\u0438\u043c\u0020\u0441\u0435\u0440\u0432\u0435\u0440\u043e\u043c\u002e +harvestclients.newClientDialog.oaiMetadataFormat.required=\u0020\u0411\u0443\u0434\u044c\u0020\u043b\u0430\u0441\u043a\u0430\u002c\u0020\u0432\u0438\u0431\u0435\u0440\u0456\u0442\u044c\u0020\u0444\u043e\u0440\u043c\u0430\u0442\u0020\u043c\u0435\u0442\u0430\u0434\u0430\u043d\u0438\u0445\u0020\u0434\u043b\u044f\u0020\u0437\u0431\u043e\u0440\u0443\u0020\u0437\u0020\u0446\u044c\u043e\u0433\u043e\u0020\u0430\u0440\u0445\u0456\u0432\u0443\u002e +harvestclients.newClientDialog.step3=\u0020\u041a\u0440\u043e\u043a\u0020\u0033\u0020\u0437\u0020\u0034\u0020\u002d\u0020\u0413\u0440\u0430\u0444\u0456\u043a\u002e +harvestclients.newClientDialog.schedule=\u0020\u0413\u0440\u0430\u0444\u0456\u043a\u002e +harvestclients.newClientDialog.schedule.tip=\u0020\u041d\u0430\u043b\u0430\u0448\u0442\u0443\u0439\u0442\u0435\u0020\u0449\u043e\u0434\u0435\u043d\u043d\u0438\u0439\u0020\u0430\u0431\u043e\u0020\u0449\u043e\u0442\u0438\u0436\u043d\u0435\u0432\u0438\u0439\u0020\u0430\u0432\u0442\u043e\u043c\u0430\u0442\u0438\u0447\u043d\u0438\u0439\u0020\u0437\u0430\u043f\u0443\u0441\u043a\u0020\u0437\u0431\u043e\u0440\u0443\u002e +harvestclients.newClientDialog.schedule.time.none.helptext=\u0020\u0417\u0430\u043b\u0438\u0448\u0442\u0435\u0020\u0437\u0431\u0456\u0440\u0020\u043d\u0435\u0437\u0430\u043f\u043b\u0430\u043d\u043e\u0432\u0430\u043d\u0438\u043c\u0020\u0434\u043b\u044f\u0020\u0437\u0430\u043f\u0443\u0441\u043a\u0443\u0020\u043b\u0438\u0448\u0435\u0020\u043d\u0430\u0020\u0432\u0438\u043c\u043e\u0433\u0443\u002e +harvestclients.newClientDialog.schedule.none =\u0020\u041d\u0435\u043c\u0430\u0454 +harvestclients.newClientDialog.schedule.daily =\u0020\u0429\u043e\u0434\u043d\u044f +harvestclients.newClientDialog.schedule.weekly=\u0429\u043e\u0442\u0438\u0436\u043d\u044f +harvestclients.newClientDialog.schedule.time=\u0427\u0430\u0441 +harvestclients.newClientDialog.schedule.day=\u0414\u0435\u043d\u044c +harvestclients.newClientDialog.schedule.time.am=\u0414\u043e\u0020\u043f\u043e\u043b\u0443\u0434\u043d\u044f +harvestclients.newClientDialog.schedule.time.pm=\u041f\u0456\u0441\u043b\u044f\u0020\u043f\u043e\u043b\u0443\u0434\u043d\u044f +harvestclients.newClientDialog.schedule.time.helptext=\u0020\u0427\u0430\u0441\u0020\u043f\u043b\u0430\u043d\u0443\u0454\u0442\u044c\u0441\u044f\u0020\u0437\u0430\u0020\u0432\u0430\u0448\u0438\u043c\u0020\u043c\u0456\u0441\u0446\u0435\u0432\u0438\u043c\u0020\u0447\u0430\u0441\u043e\u043c\u002e +harvestclients.newClientDialog.btn.create=\u0020\u0421\u0442\u0432\u043e\u0440\u0438\u0442\u0438\u0020\u043a\u043b\u0456\u0454\u043d\u0442\u0430 +harvestclients.newClientDialog.success=\u0020\u0423\u0441\u043f\u0456\u0448\u043d\u043e\u0020\u0441\u0442\u0432\u043e\u0440\u0435\u043d\u0438\u0439\u0020\u043a\u043b\u0456\u0454\u043d\u0442\u0020\u0434\u043b\u044f\u0020\u0437\u0431\u043e\u0440\u0443\u0020\u0022\u007b\u0030\u007d\u0022\u002e +harvestclients.newClientDialog.step4=\u0020\u041a\u0440\u043e\u043a\u0020\u0034\u0020\u0437\u0020\u0034\u0020\u002d\u0020\u0412\u0456\u0434\u043e\u0431\u0440\u0430\u0436\u0435\u043d\u043d\u044f +harvestclients.newClientDialog.harvestingStyle=\u0020\u0422\u0438\u043f\u0020\u0430\u0440\u0445\u0456\u0432\u0443\u002e +harvestclients.newClientDialog.harvestingStyle.tip=\u0020\u0422\u0438\u043f\u0020\u0432\u0456\u0434\u0434\u0430\u043b\u0435\u043d\u043e\u0433\u043e\u0020\u0430\u0440\u0445\u0456\u0432\u0443\u002e +harvestclients.newClientDialog.harvestingStyle.helptext=\u0412\u0438\u0431\u0435\u0440\u0456\u0442\u044c\u0020\u0442\u0438\u043f\u0020\u0430\u0440\u0445\u0456\u0432\u0443\u002c\u0020\u044f\u043a\u0438\u0439\u0020\u043d\u0430\u0439\u043a\u0440\u0430\u0449\u0435\u0020\u043e\u043f\u0438\u0441\u0443\u0454\u0020\u0446\u0435\u0439\u0020\u0432\u0456\u0434\u0434\u0430\u043b\u0435\u043d\u0438\u0439\u0020\u0441\u0435\u0440\u0432\u0435\u0440\u002c\u0020\u0449\u043e\u0431\u0020\u043f\u0440\u0430\u0432\u0438\u043b\u044c\u043d\u043e\u0020\u0437\u0430\u0441\u0442\u043e\u0441\u043e\u0432\u0443\u0432\u0430\u0442\u0438\u0020\u043f\u0440\u0430\u0432\u0438\u043b\u0430\u0020\u0442\u0430\u0020\u0441\u0442\u0438\u043b\u0456\u0020\u0444\u043e\u0440\u043c\u0430\u0442\u0443\u0432\u0430\u043d\u043d\u044f\u0020\u0434\u043b\u044f\u0020\u0437\u0456\u0431\u0440\u0430\u043d\u0438\u0445\u0020\u043c\u0435\u0442\u0430\u0434\u0430\u043d\u0438\u0445\u0020\u043f\u043e\u0434\u0456\u0431\u043d\u043e\u0020\u0442\u043e\u043c\u0443\u0020\u044f\u043a\u0020\u0432\u043e\u043d\u0438\u0020\u0432\u0456\u0434\u043e\u0431\u0440\u0430\u0436\u0430\u044e\u0442\u044c\u0441\u044f\u0020\u0432\u0020\u0440\u0435\u0437\u0443\u043b\u044c\u0442\u0430\u0442\u0430\u0445\u0020\u043f\u043e\u0448\u0443\u043a\u0443\u002e\u0020\u0417\u0430\u0443\u0432\u0430\u0436\u0442\u0435\u002c\u0020\u0449\u043e\u0020\u043d\u0435\u043f\u0440\u0430\u0432\u0438\u043b\u044c\u043d\u0438\u0439\u0020\u0432\u0438\u0431\u0456\u0440\u0020\u0442\u0438\u043f\u0443\u0020\u0432\u0456\u0434\u0434\u0430\u043b\u0435\u043d\u043e\u0433\u043e\u0020\u0430\u0440\u0445\u0456\u0432\u0443\u0020\u043c\u043e\u0436\u0435\u0020\u043f\u0440\u0438\u0437\u0432\u0435\u0441\u0442\u0438\u0020\u0434\u043e\u0020\u043d\u0435\u043f\u043e\u0432\u043d\u0438\u0445\u0020\u0437\u0430\u043f\u0438\u0441\u0456\u0432\u0020\u0443\u0020\u0440\u0435\u0437\u0443\u043b\u044c\u0442\u0430\u0442\u0430\u0445\u0020\u043f\u043e\u0448\u0443\u043a\u0443\u0020\u0456\u0020\u043d\u0435\u043c\u043e\u0436\u043b\u0438\u0432\u043e\u0441\u0442\u0456\u0020\u043f\u0435\u0440\u0435\u043d\u0430\u043f\u0440\u0430\u0432\u043b\u0435\u043d\u043d\u044f\u0020\u043a\u043e\u0440\u0438\u0441\u0442\u0443\u0432\u0430\u0447\u0430\u0020\u0434\u043e\u0020\u0430\u0440\u0445\u0456\u0432\u043d\u043e\u0433\u043e\u0020\u0434\u0436\u0435\u0440\u0435\u043b\u0430\u0020\u0434\u0430\u043d\u0438\u0445\u002e +harvestclients.viewEditDialog.title=\u0020\u0420\u0435\u0434\u0430\u0433\u0443\u0432\u0430\u0442\u0438\u0020\u043a\u043b\u0456\u0454\u043d\u0442\u0430\u0020\u0437\u0431\u043e\u0440\u0443\u002e\u0020 +harvestclients.viewEditDialog.archiveUrl=\u0020\u0055\u0052\u004c\u0020\u0430\u0434\u0440\u0435\u0441\u0430\u0020\u0430\u0440\u0445\u0456\u0432\u0443\u002e +harvestclients.viewEditDialog.archiveUrl.tip=\u0020\u0055\u0052\u004c\u0020\u0430\u0434\u0440\u0435\u0441\u0430\u0020\u0430\u0440\u0445\u0456\u0432\u0443\u002c\u0020\u0449\u043e\u0020\u043e\u0431\u0441\u043b\u0443\u0433\u043e\u0432\u0443\u0454\u0020\u0437\u0456\u0431\u0440\u0430\u043d\u0456\u0020\u0446\u0438\u043c\u0020\u043a\u043b\u0456\u0454\u043d\u0442\u043e\u043c\u0020\u0434\u0430\u043d\u0456\u002c\u0020\u044f\u043a\u0430\u0020\u0432\u0438\u043a\u043e\u0440\u0438\u0441\u0442\u043e\u0432\u0443\u0454\u0442\u044c\u0441\u044f\u0020\u0432\u0020\u0440\u0435\u0437\u0443\u043b\u044c\u0442\u0430\u0442\u0430\u0445\u0020\u043f\u043e\u0448\u0443\u043a\u0443\u0020\u0434\u043b\u044f\u0020\u043f\u043e\u0441\u0438\u043b\u0430\u043d\u044c\u0020\u043d\u0430\u0020\u043e\u0440\u0438\u0433\u0456\u043d\u0430\u043b\u044c\u043d\u0456 +\u0434\u0436\u0435\u0440\u0435\u043b\u0430\u0020\u0437\u0456\u0431\u0440\u0430\u043d\u043e\u0433\u043e\u0020\u043a\u043e\u043d\u0442\u0435\u043d\u0442\u0443\u002e\u0020 +harvestclients.viewEditDialog.archiveUrl.helptext=\u0020\u0420\u0435\u0434\u0430\u0433\u0443\u0432\u0430\u0442\u0438\u002c\u0020\u044f\u043a\u0449\u043e\u0020\u0446\u044f\u0020\u0055\u0052\u004c\u002d\u0430\u0434\u0440\u0435\u0441\u0430\u0020\u0432\u0456\u0434\u0440\u0456\u0437\u043d\u044f\u0454\u0442\u044c\u0441\u044f\u0020\u0432\u0456\u0434\u0020\u0055\u0052\u004c\u002d\u0430\u0434\u0440\u0435\u0441\u0438\u0020\u0441\u0435\u0440\u0432\u0435\u0440\u0430\u002e +harvestclients.viewEditDialog.archiveDescription=\u041e\u043f\u0438\u0441\u0020\u0430\u0440\u0445\u0456\u0432\u0443\u002e +harvestclients.viewEditDialog.archiveDescription.tip=\u0020\u041e\u043f\u0438\u0441\u0020\u0430\u0440\u0445\u0456\u0432\u043d\u043e\u0433\u043e\u0020\u0434\u0436\u0435\u0440\u0435\u043b\u0430\u0020\u0437\u0456\u0431\u0440\u0430\u043d\u043e\u0433\u043e\u0020\u043a\u043e\u043d\u0442\u0435\u043d\u0442\u0443\u002c\u0020\u0432\u0456\u0434\u043e\u0431\u0440\u0430\u0436\u0435\u043d\u043e\u0433\u043e\u0020\u0432\u0020\u0440\u0435\u0437\u0443\u043b\u044c\u0442\u0430\u0442\u0430\u0445\u0020\u043f\u043e\u0448\u0443\u043a\u0443\u002e +harvestclients.viewEditDialog.archiveDescription.default.generic=\u0020\u0426\u0435\u0439\u0020\u043d\u0430\u0431\u0456\u0440\u0020\u0434\u0430\u043d\u0438\u0445\u0020\u0437\u0431\u0438\u0440\u0430\u0454\u0442\u044c\u0441\u044f\u0020\u0432\u0456\u0434\u0020\u043d\u0430\u0448\u0438\u0445\u0020\u043f\u0430\u0440\u0442\u043d\u0435\u0440\u0456\u0432\u002e\u0020\u041d\u0430\u0442\u0438\u0441\u043d\u0443\u0432\u0448\u0438\u0020\u043d\u0430\u0020\u043f\u043e\u0441\u0438\u043b\u0430\u043d\u043d\u044f\u002c\u0020\u0432\u0438\u0020\u043f\u0435\u0440\u0435\u0439\u0434\u0435\u0442\u0435\u0020\u0431\u0435\u0437\u043f\u043e\u0441\u0435\u0440\u0435\u0434\u043d\u044c\u043e\u0020\u0434\u043e\u0020\u0430\u0440\u0445\u0456\u0432\u043d\u043e\u0433\u043e\u0020\u0434\u0436\u0435\u0440\u0435\u043b\u0430\u0020\u0434\u0430\u043d\u0438\u0445\u002e +harvestclients.viewEditDialog.btn.save=\u0020\u0417\u0431\u0435\u0440\u0435\u0433\u0442\u0438\u0020\u0437\u043c\u0456\u043d\u0438 +harvestclients.newClientDialog.title.edit=\u0420\u0435\u0434\u0430\u0433\u0443\u0432\u0430\u0442\u0438\u0020\u0433\u0440\u0443\u043f\u0443\u0020\u007b\u0030\u007d + +\u0023\u0068\u0061\u0072\u0076\u0065\u0073\u0074\u0073\u0065\u0074\u002e\u0078\u0068\u0074\u006d\u006c +harvestserver.title=\u0020\u0423\u043f\u0440\u0430\u0432\u043b\u0456\u043d\u043d\u044f\u0020\u0441\u0435\u0440\u0432\u0435\u0440\u043e\u043c\u0020\u0437\u0431\u043e\u0440\u0443\u002e +harvestserver.toptip=\u0020\u002d\u0020\u0412\u0438\u0437\u043d\u0430\u0447\u0438\u0442\u0438\u0020\u0433\u0440\u0443\u043f\u0438\u0020\u043b\u043e\u043a\u0430\u043b\u044c\u043d\u0438\u0445\u0020\u043d\u0430\u0431\u043e\u0440\u0456\u0432\u0020\u0434\u0430\u043d\u0438\u0445\u002c\u0020\u044f\u043a\u0456\u0020\u0431\u0443\u0434\u0443\u0442\u044c\u0020\u0434\u043e\u0441\u0442\u0443\u043f\u043d\u0456\u0020\u0434\u043b\u044f\u0020\u0437\u0431\u043e\u0440\u0443\u0020\u0432\u0456\u0434\u0434\u0430\u043b\u0435\u043d\u0438\u043c\u0438\u0020\u043a\u043b\u0456\u0454\u043d\u0442\u0430\u043c\u0438\u002e +harvestserver.service.label=\u004f\u0041\u0049\u0020\u0441\u0435\u0440\u0432\u0435\u0440 +harvestserver.service.enabled=\u0020\u041f\u0456\u0434\u043a\u043b\u044e\u0447\u0435\u043d\u043e\u002e +harvestserver.service.disabled=\u0020\u0412\u0456\u0434\u043a\u043b\u044e\u0447\u0435\u043d\u043e\u002e +harvestserver.service.disabled.msg=\u0020\u0421\u0435\u0440\u0432\u0435\u0440\u0020\u0437\u0431\u043e\u0440\u0443\u0020\u0020\u043d\u0430\u0440\u0430\u0437\u0456\u0020\u0432\u0456\u0434\u043a\u043b\u044e\u0447\u0435\u043d\u0438\u0439\u002e +harvestserver.service.empty=\u0020\u041d\u0430\u0431\u043e\u0440\u0438\u0020\u043d\u0435\u0020\u0441\u043a\u043e\u043c\u043f\u043e\u043d\u043e\u0432\u0430\u043d\u0456\u002e +harvestserver.service.enable.success=\u0020\u0421\u043b\u0443\u0436\u0431\u0430\u0020\u004f\u0041\u0049\u0020\u0443\u0441\u043f\u0456\u0448\u043d\u043e\u0020\u043f\u0456\u0434\u043a\u043b\u044e\u0447\u0435\u043d\u0430\u002e +harvestserver.noSets.why.header=\u0020\u0429\u043e\u0020\u0442\u0430\u043a\u0435\u0020\u0441\u0435\u0440\u0432\u0435\u0440\u0020\u0437\u0431\u043e\u0440\u0443\u003f +harvestserver.noSets.why.reason1=\u0020\u0417\u0431\u0456\u0440\u0020\u002d\u0020\u0446\u0435\u0020\u043f\u0440\u043e\u0446\u0435\u0441\u0020\u043e\u0431\u043c\u0456\u043d\u0443\u0020\u043c\u0435\u0442\u0430\u0434\u0430\u043d\u0438\u043c\u0438\u0020\u0437\u0020\u0456\u043d\u0448\u0438\u043c\u0438\u0020\u0440\u0435\u043f\u043e\u0437\u0438\u0442\u043e\u0440\u0456\u044f\u043c\u0438\u002e\u0020\u0423\u0020\u043f\u0440\u043e\u0446\u0435\u0441\u0456\u0020\u0437\u0431\u043e\u0440\u0443\u0020\u003c\u0062\u003e\u0020\u003c\u0069\u003e\u0020\u0441\u0435\u0440\u0432\u0435\u0440\u0020\u003c\u002f\u0020\u0069\u003e\u0020\u003c\u002f\u0020\u0062\u003e\u002c\u0020\u0432\u0430\u0448\u0430\u0020\u0044\u0061\u0074\u0061\u0076\u0065\u0072\u0073\u0065\u0020\u043c\u043e\u0436\u0435\u0020\u0437\u0440\u043e\u0431\u0438\u0442\u0438\u0020\u0434\u0435\u044f\u043a\u0456\u0020\u0437\u0020\u043b\u043e\u043a\u0430\u043b\u044c\u043d\u0438\u0445\u0020\u043c\u0435\u0442\u0430\u0434\u0430\u043d\u0438\u0445\u0020\u043d\u0430\u0431\u043e\u0440\u0443\u0020\u0434\u0430\u043d\u0438\u0445\u0020\u0434\u043e\u0441\u0442\u0443\u043f\u043d\u0438\u043c\u0438\u0020\u0434\u043b\u044f\u0020\u0432\u0456\u0434\u0434\u0430\u043b\u0435\u043d\u0438\u0445\u0020\u043a\u043b\u0456\u0454\u043d\u0442\u0456\u0432\u002e\u0020\u0426\u0435\u0020\u043c\u043e\u0436\u0443\u0442\u044c\u0020\u0431\u0443\u0442\u0438\u0020\u0456\u043d\u0448\u0456\u0020\u0437\u0440\u0430\u0437\u043a\u0438\u0020\u0044\u0061\u0074\u0061\u0076\u0065\u0072\u0073\u0065\u0020\u0430\u0431\u043e\u0020\u0431\u0443\u0434\u044c\u002d\u044f\u043a\u0456\u0020\u0456\u043d\u0448\u0456\u0020\u043a\u043b\u0456\u0454\u043d\u0442\u0438\u002c\u0020\u044f\u043a\u0456\u0020\u043f\u0456\u0434\u0442\u0440\u0438\u043c\u0443\u044e\u0442\u044c\u0020\u043f\u0440\u043e\u0442\u043e\u043a\u043e\u043b\u0020\u0437\u0431\u043e\u0440\u0443\u0020\u004f\u0041\u0049\u002d\u0050\u004d\u0048\u002e +harvestserver.noSets.why.reason2=\u0020\u041c\u043e\u0436\u043d\u0430\u0020\u0437\u0431\u0438\u0440\u0430\u0442\u0438\u0020\u043b\u0438\u0448\u0435\u0020\u043e\u043f\u0443\u0431\u043b\u0456\u043a\u043e\u0432\u0430\u043d\u0456\u002c\u0020\u043d\u0435\u043e\u0431\u043c\u0435\u0436\u0435\u043d\u0456\u0020\u043d\u0430\u0431\u043e\u0440\u0438\u0020\u0434\u0430\u043d\u0438\u0445\u0020\u0443\u0020\u0432\u0430\u0448\u0456\u0439\u0020\u0044\u0061\u0074\u0061\u0076\u0065\u0073\u0065\u002e\u0020\u0412\u0456\u0434\u0434\u0430\u043b\u0435\u043d\u0456\u0020\u043a\u043b\u0456\u0454\u043d\u0442\u0438\u0020\u0437\u0430\u0437\u0432\u0438\u0447\u0430\u0439\u0020\u0437\u0431\u0435\u0440\u0456\u0433\u0430\u044e\u0442\u044c\u0020\u0441\u0432\u043e\u0457\u0020\u0437\u0430\u043f\u0438\u0441\u0438\u0020\u0432\u0020\u0441\u0438\u043d\u0445\u0440\u043e\u043d\u043d\u043e\u043c\u0443\u0020\u0440\u0435\u0436\u0438\u043c\u0456\u0020\u0437\u0430\u0020\u0434\u043e\u043f\u043e\u043c\u043e\u0433\u043e\u044e\u0020\u0437\u0430\u043f\u043b\u0430\u043d\u043e\u0432\u0430\u043d\u0438\u0445\u0020\u043f\u043e\u043a\u0440\u043e\u043a\u043e\u0432\u0438\u0445\u0020\u0449\u043e\u0434\u0435\u043d\u043d\u0438\u0445\u0020\u0430\u0431\u043e\u0020\u0449\u043e\u0442\u0438\u0436\u043d\u0435\u0432\u0438\u0445\u0020\u043e\u043d\u043e\u0432\u043b\u0435\u043d\u044c\u002c\u0020\u0449\u043e\u0020\u0434\u043e\u0437\u0432\u043e\u043b\u044f\u0454\u0020\u043c\u0456\u043d\u0456\u043c\u0456\u0437\u0443\u0432\u0430\u0442\u0438\u0020\u043d\u0430\u0432\u0430\u043d\u0442\u0430\u0436\u0435\u043d\u043d\u044f\u0020\u043d\u0430\u0020\u0432\u0430\u0448\u0020\u0441\u0435\u0440\u0432\u0435\u0440\u002e\u0020\u0417\u0430\u0443\u0432\u0430\u0436\u0442\u0435\u002c\u0020\u0449\u043e\u0020\u0437\u0431\u0438\u0440\u0430\u044e\u0442\u044c\u0441\u044f\u0020\u043b\u0438\u0448\u0435\u0020\u043c\u0435\u0442\u0430\u0434\u0430\u043d\u0456\u002e\u0020\u0417\u0430\u0437\u0432\u0438\u0447\u0430\u0439\u002c\u0020\u0432\u0456\u0434\u0434\u0430\u043b\u0435\u043d\u0456\u0020\u0437\u0431\u0438\u0440\u0430\u0447\u0456\u0020\u043d\u0435\u0020\u043d\u0430\u043c\u0430\u0433\u0430\u044e\u0442\u044c\u0441\u044f\u0020\u0441\u0430\u043c\u043e\u0441\u0442\u0456\u0439\u043d\u043e\u0020\u0437\u0430\u0432\u0430\u043d\u0442\u0430\u0436\u0443\u0432\u0430\u0442\u0438\u0020\u0444\u0430\u0439\u043b\u0438\u0020\u0434\u0430\u043d\u0438\u0445\u002e +harvestserver.noSets.how.header=\u0020\u042f\u043a\u0020\u0437\u0430\u043f\u0443\u0441\u0442\u0438\u0442\u0438\u0020\u0441\u0435\u0440\u0432\u0435\u0440\u0020\u0437\u0431\u043e\u0440\u0443\u003f +harvestserver.noSets.how.tip1=\u0020\u041c\u043e\u0436\u043d\u0430\u0020\u043f\u0456\u0434\u043a\u043b\u044e\u0447\u0438\u0442\u0438\u0020\u0447\u0438\u0020\u0432\u0456\u0434\u043a\u043b\u044e\u0447\u0438\u0442\u0438\u0020\u0441\u0435\u0440\u0432\u0435\u0440\u0020\u0437\u0431\u043e\u0440\u0443\u0020\u043d\u0430\u0020\u0446\u0456\u0439\u0020\u0441\u0442\u043e\u0440\u0456\u043d\u0446\u0456\u002e +harvestserver.noSets.how.tip2=\u0020\u041f\u0456\u0441\u043b\u044f\u0020\u0430\u043a\u0442\u0438\u0432\u0430\u0446\u0456\u0457\u0020\u0441\u043b\u0443\u0436\u0431\u0438\u0020\u0432\u0438\u0020\u043c\u043e\u0436\u0435\u0442\u0435\u0020\u0432\u0438\u0437\u043d\u0430\u0447\u0438\u0442\u0438\u0020\u0441\u0443\u043a\u0443\u043f\u043d\u0456\u0441\u0442\u044c\u0020\u043b\u043e\u043a\u0430\u043b\u044c\u043d\u0438\u0445\u0020\u043d\u0430\u0431\u043e\u0440\u0456\u0432\u0020\u0434\u0430\u043d\u0438\u0445\u002c\u0020\u044f\u043a\u0456\u0020\u0431\u0443\u0434\u0443\u0442\u044c\u0020\u0434\u043e\u0441\u0442\u0443\u043f\u043d\u0456\u0020\u0434\u043b\u044f\u0020\u0432\u0456\u0434\u0434\u0430\u043b\u0435\u043d\u0438\u0445\u0020\u0437\u0431\u0438\u0440\u0430\u0447\u0456\u0432\u0020\u003c\u0069\u003e\u0020\u004f\u0041\u0049\u0020\u0053\u0065\u0074\u0073\u0020\u003c\u002f\u0020\u0069\u003e\u002e\u0020\u041d\u0430\u0431\u043e\u0440\u0438\u0020\u0432\u0438\u0437\u043d\u0430\u0447\u0430\u044e\u0442\u044c\u0441\u044f\u0020\u043f\u043e\u0448\u0443\u043a\u043e\u0432\u0438\u043c\u0438\u0020\u0437\u0430\u043f\u0438\u0442\u0430\u043c\u0438\u0020\u0028\u043d\u0430\u043f\u0440\u0438\u043a\u043b\u0430\u0434\u002c\u0020\u043d\u0430\u0437\u0432\u0430\u0020\u0430\u0432\u0442\u043e\u0440\u0430\u003a\u0020\u006b\u0069\u006e\u0067\u003b\u0020\u0430\u0431\u043e\u0020\u0070\u0061\u0072\u0065\u006e\u0074\u0049\u0064\u003a\u0020\u0031\u0032\u0033\u0034\u0020\u002d\u0020\u0449\u043e\u0431\u0020\u0432\u0438\u0431\u0440\u0430\u0442\u0438\u0020\u0432\u0441\u0456\u0020\u043d\u0430\u0431\u043e\u0440\u0438\u0020\u0434\u0430\u043d\u0438\u0445\u002c\u0020\u0449\u043e\u0020\u043d\u0430\u043b\u0435\u0436\u0430\u0442\u044c\u0020\u0432\u043a\u0430\u0437\u0430\u043d\u0456\u0439\u0020\u0064\u0061\u0074\u0061\u0076\u0065\u0072\u0073\u0065\u003b\u0020\u0430\u0431\u043e\u0020\u0064\u0073\u0050\u0065\u0072\u0073\u0069\u0073\u0074\u0065\u006e\u0074\u0049\u0064\u003a\u0020\u0022\u0064\u006f\u0069\u003a\u0020\u0031\u0032\u0033\u0034\u0020\u002f\u0022\u0020\u0449\u043e\u0431\u0020\u0432\u0438\u0431\u0440\u0430\u0442\u0438\u0020\u0432\u0441\u0456\u0020\u043d\u0430\u0431\u043e\u0440\u0438\u0020\u0434\u0430\u043d\u0438\u0445\u0020\u0437\u0430\u0020\u0434\u043e\u043f\u043e\u043c\u043e\u0433\u043e\u044e\u0020\u0432\u043a\u0430\u0437\u0430\u043d\u043e\u0433\u043e\u0020\u043f\u043e\u0441\u0442\u0456\u0439\u043d\u043e\u0433\u043e\u0020\u0456\u0434\u0435\u043d\u0442\u0438\u0444\u0456\u043a\u0430\u0442\u043e\u0440\u0430\u0029\u002e\u0020\u0414\u043b\u044f\u0020\u043e\u0442\u0440\u0438\u043c\u0430\u043d\u043d\u044f\u0020\u0434\u043e\u0434\u0430\u0442\u043a\u043e\u0432\u043e\u0457\u0020\u0456\u043d\u0444\u043e\u0440\u043c\u0430\u0446\u0456\u0457\u0020\u043f\u0440\u043e\u0020\u043f\u043e\u0448\u0443\u043a\u043e\u0432\u0456\u0020\u0437\u0430\u043f\u0438\u0442\u0438\u0020\u0437\u0432\u0435\u0440\u043d\u0456\u0442\u044c\u0441\u044f\u0020\u0434\u043e\u0020\u0440\u043e\u0437\u0434\u0456\u043b\u0443\u0020\u0022\u041f\u043e\u0448\u0443\u043a\u0020\u0041\u0050\u0049\u0022\u0020\u0443\u0020\u0414\u043e\u0432\u0456\u0434\u043d\u0438\u043a\u0443\u0020\u043a\u043e\u0440\u0438\u0441\u0442\u0443\u0432\u0430\u0447\u0430\u0020\u0044\u0061\u0074\u0061\u0076\u0065\u0072\u0073\u0065\u002e +harvestserver.noSets.getStarted=\u0020\u0429\u043e\u0431\u0020\u043f\u043e\u0447\u0430\u0442\u0438\u002c\u0020\u043f\u0456\u0434\u043a\u043b\u044e\u0447\u0456\u0442\u044c\u0020\u0441\u0435\u0440\u0432\u0435\u0440\u0020\u004f\u0041\u0049\u0020\u0442\u0430\u0020\u043d\u0430\u0442\u0438\u0441\u043d\u0456\u0442\u044c\u0020\u043a\u043d\u043e\u043f\u043a\u0443\u0020\u0041\u0064\u0064\u0020\u0053\u0065\u0074\u002e\u0020\u0429\u043e\u0431\u0020\u0434\u0456\u0437\u043d\u0430\u0442\u0438\u0441\u044c\u0020\u0431\u0456\u043b\u044c\u0448\u0435\u0020\u043f\u0440\u043e\u0020\u0437\u0431\u0456\u0440\u002c\u0020\u043f\u0435\u0440\u0435\u0439\u0434\u0456\u0442\u044c\u0020\u043d\u0430\u0020\u0441\u0442\u043e\u0440\u0456\u043d\u043a\u0443\u0020\u003c\u0061\u0020\u0068\u0072\u0065\u0066\u003d\u0022\u007b\u0030\u007d\u002f\u007b\u0031\u007d\u002f\u0075\u0073\u0065\u0072\u002f\u0069\u006e\u0064\u0065\u0078\u002e\u0068\u0074\u006d\u006c\u0023\u0069\u006e\u0064\u0065\u0078\u0022\u0020\u0074\u0069\u0074\u006c\u0065\u003d\u0022\u0417\u0431\u0456\u0440\u0020\u002d\u0020\u041f\u043e\u0441\u0456\u0431\u043d\u0438\u043a\u0020\u043a\u043e\u0440\u0438\u0441\u0442\u0443\u0432\u0430\u0447\u0430\u0020\u0044\u0061\u0074\u0061\u0076\u0065\u0072\u0073\u0065\u0020\u0022\u0020\u0074\u0061\u0072\u0067\u0065\u0074\u003d\u0022\u005f\u0062\u006c\u0061\u006e\u006b\u0022\u003e\u0048\u0061\u0072\u0076\u0065\u0073\u0074\u0069\u006e\u0067\u003c\u002f\u0061\u003e\u002e +harvestserver.btn.add=\u0020\u0414\u043e\u0434\u0430\u0442\u0438\u0020\u043d\u0430\u0431\u0456\u0440 +harvestserver.tab.header.spec=\u004f\u0041\u0049\u0020\u0073\u0065\u0074\u0053\u0070\u0065\u0063 +harvestserver.tab.header.description=\u0020\u041e\u043f\u0438\u0441 +harvestserver.tab.header.definition=\u0020\u041a\u0435\u0440\u0443\u0432\u0430\u043b\u044c\u043d\u0438\u0439\u0020\u0437\u0430\u043f\u0438\u0442\u002e +harvestserver.tab.header.stats=\u0020\u043d\u0430\u0431\u043e\u0440\u0438\u0020\u0434\u0430\u043d\u0438\u0445\u002e +harvestserver.tab.col.stats.empty=\u0020\u041d\u0435\u043c\u0430\u0454\u0020\u0437\u0430\u043f\u0438\u0441\u0456\u0432\u0020\u0028\u043f\u043e\u0440\u043e\u0436\u043d\u0456\u0439\u0020\u043d\u0430\u0431\u0456\u0440\u0029\u002e +harvestserver.tab.col.stats.results=\u007b\u0030\u007d\u0020\u007b\u0030\u002c\u0020\u0432\u0438\u0431\u0456\u0440\u002c\u0020\u0030\u0023\u0020\u043d\u0430\u0431\u043e\u0440\u0438\u0020\u0434\u0430\u043d\u0438\u0445\u0020\u007c\u0031\u0023\u0020\u043d\u0430\u0431\u0456\u0440\u0020\u0434\u0430\u043d\u0438\u0445\u0020\u007c\u0032\u0023\u0020\u043d\u0430\u0431\u043e\u0440\u0438\u0020\u0434\u0430\u043d\u0438\u0445\u0020\u007d\u0020\u0028\u007b\u0031\u007d\u0020\u007b\u0031\u002c\u0020\u0432\u0438\u0431\u0456\u0440\u002c\u0020\u0030\u0023\u0437\u0430\u043f\u0438\u0441\u0438\u007c\u0031\u0023\u0437\u0430\u043f\u0438\u0441\u007c\u0032\u0023\u0437\u0430\u043f\u0438\u0441\u0438\u007d\u0020\u0435\u043a\u0441\u043f\u043e\u0440\u0442\u043e\u0432\u0430\u043d\u043e\u002c\u0020\u007b\u0032\u007d\u0020\u043f\u043e\u0437\u043d\u0430\u0447\u0435\u043d\u043e\u0020\u044f\u043a\u0020\u0432\u0438\u0434\u0430\u043b\u0435\u043d\u0438\u0439\u0029 +harvestserver.tab.header.action=\u0020\u0414\u0456\u0457\u0020 +harvestserver.tab.header.action.btn.export=\u0417\u0430\u043f\u0443\u0441\u0442\u0438\u0442\u0438\u0020\u0435\u043a\u0441\u043f\u043e\u0440\u0442 +harvestserver.actions.runreexport.success=\u0020\u0423\u0441\u043f\u0456\u0448\u043d\u043e\u0020\u0440\u043e\u0437\u043f\u043e\u0447\u0430\u0442\u043e\u0020\u0430\u0441\u0438\u043d\u0445\u0440\u043e\u043d\u043d\u0438\u0439\u0020\u0440\u0435\u0435\u043a\u0441\u043f\u043e\u0440\u0442\u0020\u0434\u043b\u044f\u0020\u004f\u0041\u0049\u0020\u043d\u0430\u0431\u043e\u0440\u0443\u0020\u0022\u007b\u0030\u007d\u0022\u0020\u0028\u0431\u0443\u0434\u044c\u0020\u043b\u0430\u0441\u043a\u0430\u002c\u0020\u043f\u0435\u0440\u0435\u0437\u0430\u0432\u0430\u043d\u0442\u0430\u0436\u0442\u0435\u0020\u0441\u0442\u043e\u0440\u0456\u043d\u043a\u0443\u002c\u0020\u0449\u043e\u0431\u0020\u043f\u0435\u0440\u0435\u0432\u0456\u0440\u0438\u0442\u0438\u0020\u043f\u0435\u0440\u0435\u0431\u0456\u0433\u0020\u0435\u043a\u0441\u043f\u043e\u0440\u0442\u0443\u0029\u002e +harvestserver.tab.header.action.btn.edit =\u0020\u0420\u0435\u0434\u0430\u0433\u0443\u0432\u0430\u0442\u0438 +harvestserver.tab.header.action.btn.delete =\u0020\u0412\u0438\u0434\u0430\u043b\u0438\u0442\u0438 + +harvestserver.tab.header.action.btn.delete.dialog.header=\u0020\u0412\u0438\u0434\u0430\u043b\u0438\u0442\u0438\u0020\u043d\u0430\u0431\u0456\u0440\u0020\u0437\u0431\u043e\u0440\u0443\u002e +harvestserver.tab.header.action.btn.delete.dialog.tip=\u0020\u0412\u0438\u0020\u0432\u043f\u0435\u0432\u043d\u0435\u043d\u0456\u002c\u0020\u0449\u043e\u0020\u0445\u043e\u0447\u0435\u0442\u0435\u0020\u0432\u0438\u0434\u0430\u043b\u0438\u0442\u0438\u0020\u004f\u0041\u0049\u0020\u043d\u0430\u0431\u0456\u0440\u0020\u0022\u007b\u0030\u007d\u0022\u003f\u0020\u0412\u0438\u0020\u043d\u0435\u0020\u0437\u043c\u043e\u0436\u0435\u0442\u0435\u0020\u0441\u043a\u0430\u0441\u0443\u0432\u0430\u0442\u0438\u0020\u0432\u0438\u0434\u0430\u043b\u0435\u043d\u043d\u044f\u0021 +harvestserver.tab.header.action.delete.infomessage=\u0020\u0412\u0438\u0431\u0440\u0430\u043d\u0438\u0439\u0020\u043d\u0430\u0431\u0456\u0440\u0020\u0437\u0431\u043e\u0440\u0443\u0020\u0432\u0438\u0434\u0430\u043b\u044f\u0454\u0442\u044c\u0441\u044f\u002e\u0020\u0028\u0446\u0435\u0020\u043c\u043e\u0436\u0435\u0020\u0437\u0430\u0439\u043d\u044f\u0442\u0438\u0020\u043a\u0456\u043b\u044c\u043a\u0430\u0020\u0445\u0432\u0438\u043b\u0438\u043d\u0029\u002e +harvestserver.newSetDialog.title.new=\u0421\u0442\u0432\u043e\u0440\u0438\u0442\u0438\u0020\u043d\u0430\u0431\u0456\u0440\u0020\u0437\u0431\u043e\u0440\u0443\u002e +harvestserver.newSetDialog.help=\u0020\u0412\u0438\u0437\u043d\u0430\u0447\u0438\u0442\u0438\u0020\u043a\u043e\u043c\u043f\u043b\u0435\u043a\u0442\u0020\u043b\u043e\u043a\u0430\u043b\u044c\u043d\u0438\u0445\u0020\u043d\u0430\u0431\u043e\u0440\u0456\u0432\u0020\u0434\u0430\u043d\u0438\u0445\u002c\u0020\u0434\u043e\u0441\u0442\u0443\u043f\u043d\u0438\u0445\u0020\u0434\u043b\u044f\u0020\u0432\u0456\u0434\u0434\u0430\u043b\u0435\u043d\u0438\u0445\u0020\u0437\u0431\u0438\u0440\u0430\u0447\u0456\u0432\u002e +harvestserver.newSetDialog.setspec =\u0020\u0406\u043c\u0027\u044f\u0020\u002f\u0020\u004f\u0041\u0049\u0020\u0073\u0065\u0074\u0053\u0070\u0065\u0063 +harvestserver.newSetDialog.setspec.tip =\u0020\u0423\u043d\u0456\u043a\u0430\u043b\u044c\u043d\u0435\u0020\u0456\u043c\u0027\u044f\u0020\u0028\u004f\u0041\u0049\u0020\u0073\u0065\u0074\u0053\u0070\u0065\u0063\u0029\u002c\u0020\u0449\u043e\u0020\u0456\u0434\u0435\u043d\u0442\u0438\u0444\u0456\u043a\u0443\u0454\u0020\u0446\u0435\u0439\u0020\u043d\u0430\u0431\u0456\u0440\u002e +harvestserver.newSetDialog.setspec.helptext=\u0020\u0421\u043a\u043b\u0430\u0434\u0430\u0454\u0442\u044c\u0441\u044f\u0020\u0437\u0020\u0431\u0443\u043a\u0432\u002c\u0020\u0446\u0438\u0444\u0440\u002c\u0020\u043f\u0456\u0434\u043a\u0440\u0435\u0441\u043b\u0435\u043d\u043d\u044f\u0020\u0028\u005f\u0029\u0020\u0456\u0020\u0440\u0438\u0441\u043e\u043a\u0020\u0028\u002d\u0029\u002e +harvestserver.editSetDialog.setspec.helptext=\u0020\u0406\u043c\u0027\u044f\u0020\u043d\u0435\u0020\u043c\u043e\u0436\u043d\u0430\u0020\u0437\u043c\u0456\u043d\u044e\u0432\u0430\u0442\u0438\u0020\u043f\u0456\u0441\u043b\u044f\u0020\u0441\u0442\u0432\u043e\u0440\u0435\u043d\u043d\u043d\u044f\u0020\u043d\u0430\u0431\u043e\u0440\u0443\u002e +harvestserver.newSetDialog.setspec.required=\u0020\u0406\u043c\u0027\u044f\u0020\u0028\u004f\u0041\u0049\u0020\u0073\u0065\u0074\u0053\u0070\u0065\u0063\u0029\u0020\u043d\u0435\u0020\u043c\u043e\u0436\u0435\u0020\u0431\u0443\u0442\u0438\u0020\u043f\u043e\u0440\u043e\u0436\u043d\u0456\u043c\u0021 +harvestserver.newSetDialog.setspec.invalid=\u0020\u0406\u043c\u0027\u044f\u0020\u0028\u004f\u0041\u0049\u0020\u0073\u0065\u0074\u0053\u0070\u0065\u0063\u0029\u0020\u043c\u043e\u0436\u0435\u0020\u0441\u043a\u043b\u0430\u0434\u0430\u0442\u0438\u0441\u044f\u0020\u0442\u0456\u043b\u044c\u043a\u0438\u0020\u0437\u0020\u0431\u0443\u043a\u0432\u002c\u0020\u0446\u0438\u0444\u0440\u002c\u0020\u043f\u0456\u0434\u043a\u0440\u0435\u0441\u043b\u0435\u043d\u043d\u044f\u0020\u0028\u005f\u0029\u0020\u0456\u0020\u0440\u0438\u0441\u043e\u043a\u0020\u0028\u002d\u0029\u002e +harvestserver.newSetDialog.setspec.alreadyused =\u0020\u0426\u044f\u0020\u043d\u0430\u0437\u0432\u0430\u0020\u043d\u0430\u0431\u043e\u0440\u0443\u0020\u0028\u004f\u0041\u0049\u0020\u0073\u0065\u0074\u0053\u0070\u0065\u0063\u0029\u0020\u0432\u0436\u0435\u0020\u0432\u0438\u043a\u043e\u0440\u0438\u0441\u0442\u043e\u0432\u0443\u0454\u0442\u044c\u0441\u044f\u002e +harvestserver.newSetDialog.setdescription =\u0020\u041e\u043f\u0438\u0441\u002e +harvestserver.newSetDialog.setdescription.tip=\u0020\u041d\u0430\u0434\u0430\u0439\u0442\u0435\u0020\u043a\u043e\u0440\u043e\u0442\u043a\u0438\u0439\u0020\u043e\u043f\u0438\u0441\u0020\u0446\u044c\u043e\u0433\u043e\u0020\u004f\u0041\u0049\u0020\u043d\u0430\u0431\u043e\u0440\u0443\u002e +harvestserver.newSetDialog.setdescription.required=\u0020\u041f\u043e\u043b\u0435\u0020\u043e\u043f\u0438\u0441\u0443\u0020\u043d\u0435\u0020\u043c\u043e\u0436\u0435\u0020\u0431\u0443\u0442\u0438\u0020\u043f\u043e\u0440\u043e\u0436\u043d\u0456\u043c\u0021 +harvestserver.newSetDialog.setquery=\u0020\u041a\u0435\u0440\u0443\u0432\u0430\u043b\u044c\u043d\u0438\u0439\u0020\u0437\u0430\u043f\u0438\u0442\u002e +harvestserver.newSetDialog.setquery.tip=\u0020\u041f\u043e\u0448\u0443\u043a\u043e\u0432\u0438\u0439\u0020\u0437\u0430\u043f\u0438\u0442\u002c\u0020\u044f\u043a\u0438\u0439\u0020\u0432\u0438\u0437\u043d\u0430\u0447\u0430\u0454\u0020\u0437\u043c\u0456\u0441\u0442\u0020\u043d\u0430\u0431\u043e\u0440\u0443\u0020\u0434\u0430\u043d\u0438\u0445\u002e +harvestserver.newSetDialog.setquery.helptext=\u041f\u0440\u0438\u043a\u043b\u0430\u0434\u0020\u0437\u0430\u043f\u0438\u0442\u0443\u003a\u0020\u0061\u0075\u0074\u0068\u006f\u0072\u004e\u0061\u006d\u0065\u003a\u006b\u0069\u006e\u0067 +harvestserver.newSetDialog.setquery.required=\u0020\u041d\u0435\u0020\u043c\u043e\u0436\u043d\u0430\u0020\u0437\u0430\u043b\u0438\u0448\u0430\u0442\u0438\u0020\u043f\u043e\u043b\u0435\u0020\u043f\u043e\u0448\u0443\u043a\u043e\u0432\u043e\u0433\u043e\u0020\u0437\u0430\u043f\u0438\u0442\u0443\u0020\u0020\u043f\u0443\u0441\u0442\u0438\u043c\u0021 +harvestserver.newSetDialog.setquery.results=\u0020\u041f\u043e\u0448\u0443\u043a\u043e\u0432\u0438\u0439\u0020\u0437\u0430\u043f\u0438\u0442\u0020\u0434\u0430\u0432\u0020\u007b\u0030\u007d\u0020\u043d\u0430\u0431\u043e\u0440\u0456\u0432\u0020\u0434\u0430\u043d\u0438\u0445\u0021 +harvestserver.newSetDialog.setquery.empty=\u0020\u041f\u041e\u041f\u0415\u0420\u0415\u0414\u0416\u0415\u041d\u041d\u042f\u003a\u0020\u043f\u043e\u0448\u0443\u043a\u043e\u0432\u0438\u0439\u0020\u0437\u0430\u043f\u0438\u0442\u0020\u043d\u0435\u0020\u0434\u0430\u0432\u0020\u0440\u0435\u0437\u0443\u043b\u044c\u0442\u0430\u0442\u0456\u0432\u0021 +harvestserver.newSetDialog.btn.create=\u0020\u0421\u0442\u0432\u043e\u0440\u0438\u0442\u0438\u0020\u043d\u0430\u0431\u0456\u0440 +harvestserver.newSetDialog.success=\u0020\u0423\u0441\u043f\u0456\u0448\u043d\u043e\u0020\u0441\u0442\u0432\u043e\u0440\u0435\u043d\u0438\u0439\u0020\u043d\u0430\u0431\u0456\u0440\u0020\u0437\u0431\u043e\u0440\u0443\u0022\u007b\u0030\u007d\u0022\u002e +harvestserver.viewEditDialog.title=\u0020\u0420\u0435\u0434\u0430\u0433\u0443\u0432\u0430\u0442\u0438\u0020\u043d\u0430\u0431\u0456\u0440\u0020\u0437\u0431\u043e\u0440\u0443 +harvestserver.viewEditDialog.btn.save =\u0020\u0417\u0431\u0435\u0440\u0435\u0433\u0442\u0438\u0020\u0437\u043c\u0456\u043d\u0438\u002e + + +\u0023\u0064\u0061\u0073\u0068\u0062\u006f\u0061\u0072\u0064\u002d\u0075\u0073\u0065\u0072\u0073\u002e\u0078\u0068\u0074\u006d\u006c +dashboard.card.users =\u0020\u041a\u043e\u0440\u0438\u0441\u0442\u0443\u0432\u0430\u0447\u0456 +dashboard.card.users.header =\u0020\u0406\u043d\u0444\u043e\u0440\u043c\u0430\u0446\u0456\u0439\u043d\u0430\u0020\u043f\u0430\u043d\u0435\u043b\u044c\u0020\u002d\u0020\u041f\u0435\u0440\u0435\u043b\u0456\u043a\u0020\u043a\u043e\u0440\u0438\u0441\u0442\u0443\u0432\u0430\u0447\u0456\u0432 +dashboard.card.users.super =\u0020\u0421\u043f\u0435\u0440\u043a\u043e\u0440\u0438\u0441\u0442\u0443\u0432\u0430\u0447\u0456 +dashboard.card.users.manage =\u0020\u041a\u0435\u0440\u0443\u0432\u0430\u0442\u0438\u0020\u043a\u043e\u0440\u0438\u0441\u0442\u0443\u0432\u0430\u0447\u0430\u043c\u0438 +dashboard.card.users.message =\u0020\u041f\u0435\u0440\u0435\u043b\u0456\u043a\u0020\u043a\u043e\u0440\u0438\u0441\u0442\u0443\u0432\u0430\u0447\u0456\u0432\u0020\u0442\u0430\u0020\u043a\u0435\u0440\u0443\u0432\u0430\u043d\u043d\u044f\u0020\u043d\u0438\u043c\u0438\u002e +dashboard.list_users.searchTerm.watermark =\u0020\u041f\u043e\u0448\u0443\u043a\u0020\u0446\u0438\u0445\u0020\u043a\u043e\u0440\u0438\u0441\u0442\u0443\u0432\u0430\u0447\u0456\u0432\u0020\u002e\u002e\u002e +dashboard.list_users.tbl_header.userId =\u0020\u0456\u0434\u0435\u043d\u0442\u0438\u0444\u0456\u043a\u0430\u0442\u043e\u0440 +dashboard.list_users.tbl_header.userIdentifier =\u0020\u0406\u043c\u0027\u044f\u0020\u043a\u043e\u0440\u0438\u0441\u0442\u0443\u0432\u0430\u0447\u0430 +dashboard.list_users.tbl_header.name =\u0020\u0406\u043c\u0027\u044f +dashboard.list_users.tbl_header.lastName =\u0020\u041f\u0440\u0456\u0437\u0432\u0438\u0449\u0435 +dashboard.list_users.tbl_header.firstName =\u0020\u0406\u043c\u0027\u044f +dashboard.list_users.tbl_header.email =\u0020\u0415\u043b\u0435\u043a\u0442\u0440\u043e\u043d\u043d\u0430\u0020\u043f\u043e\u0448\u0442\u0430 +dashboard.list_users.tbl_header.affiliation=\u0020\u041f\u0440\u0438\u043d\u0430\u043b\u0435\u0436\u043d\u0456\u0441\u0442\u044c\u0020\u0434\u043e\u0020\u0443\u0441\u0442\u0430\u043d\u043e\u0432\u0438 +dashboard.list_users.tbl_header.roles =\u0020\u0420\u043e\u043b\u0456 +dashboard.list_users.tbl_header.position =\u0020\u041f\u043e\u0441\u0430\u0434\u0430 +dashboard.list_users.tbl_header.isSuperuser =\u0020\u0421\u0443\u043f\u0435\u0440\u043a\u043e\u0440\u0438\u0441\u0442\u0443\u0432\u0430\u0447 +dashboard.list_users.tbl_header.authProviderFactoryAlias =\u0020\u0410\u0443\u0442\u0435\u043d\u0442\u0438\u0444\u0456\u043a\u0430\u0446\u0456\u044f +dashboard.list_users.tbl_header.createdTime =\u0020\u0421\u0442\u0432\u043e\u0440\u0435\u043d\u043e\u0020\u0447\u0430\u0441 +dashboard.list_users.tbl_header.lastLoginTime =\u0020\u0427\u0430\u0441\u0020\u043e\u0441\u0442\u0430\u043d\u043d\u044c\u043e\u0433\u043e\u0020\u0432\u0445\u043e\u0434\u0443 +dashboard.list_users.tbl_header.lastApiUseTime =\u0020\u0427\u0430\u0441\u0020\u043e\u0441\u0442\u0430\u043d\u043d\u044c\u043e\u0433\u043e\u0020\u0432\u0438\u043a\u043e\u0440\u0438\u0441\u0442\u0430\u043d\u043d\u044f\u0020\u0041\u0050\u0049 +dashboard.list_users.tbl_header.roles.removeAll =\u0020\u0412\u0438\u0434\u0430\u043b\u0438\u0442\u0438\u0020\u0432\u0441\u0435 +dashboard.list_users.tbl_header.roles.removeAll.header =\u0020\u0412\u0438\u0434\u0430\u043b\u0438\u0442\u0438\u0020\u0432\u0441\u0456\u0020\u0440\u043e\u043b\u0456 +dashboard.list_users.tbl_header.roles.removeAll.confirmationText =\u0020\u0412\u0438\u0020\u0432\u043f\u0435\u0432\u043d\u0435\u043d\u0456\u002c\u0020\u0449\u043e\u0020\u0445\u043e\u0447\u0435\u0442\u0435\u0020\u0432\u0438\u0434\u0430\u043b\u0438\u0442\u0438\u0020\u0432\u0441\u0456\u0020\u0440\u043e\u043b\u0456\u0020\u0434\u043b\u044f\u0020\u043a\u043e\u0440\u0438\u0441\u0442\u0443\u0432\u0430\u0447\u0430\u0020\u007b\u0030\u007d\u003f +dashboard.list_users.removeAll.message.success =\u0020\u0423\u0441\u0456\u0020\u0440\u043e\u043b\u0456\u0020\u0434\u043b\u044f\u0020\u043a\u043e\u0440\u0438\u0441\u0442\u0443\u0432\u0430\u0447\u0430\u0020\u007b\u0030\u007d\u0020\u0432\u0438\u0434\u0430\u043b\u0435\u043d\u0456\u002e +dashboard.list_users.removeAll.message.failure =\u0020\u041d\u0435\u0020\u0432\u0434\u0430\u043b\u043e\u0441\u044f\u0020\u0432\u0438\u0434\u0430\u043b\u0438\u0442\u0438\u0020\u0440\u043e\u043b\u0456\u0020\u0434\u043b\u044f\u0020\u043a\u043e\u0440\u0438\u0441\u0442\u0443\u0432\u0430\u0447\u0430\u0020\u007b\u0030\u007d\u002e +dashboard.list_users.toggleSuperuser=\u0420\u0435\u0434\u0430\u0433\u0443\u0432\u0430\u0442\u0438\u0020\u0441\u0442\u0430\u0442\u0443\u0441\u0020\u0441\u0443\u043f\u0435\u0440\u043a\u043e\u0440\u0438\u0441\u0442\u0443\u0432\u0430\u0447\u0430\u002e +dashboard.list_users.toggleSuperuser.confirmationText.add =\u0020\u0412\u0438\u0020\u0432\u043f\u0435\u0432\u043d\u0435\u043d\u0456\u002c\u0020\u0449\u043e\u0020\u0445\u043e\u0447\u0435\u0442\u0435\u0020\u0430\u043a\u0442\u0438\u0432\u0443\u0432\u0430\u0442\u0438\u0020\u0441\u0442\u0430\u0442\u0443\u0441\u0020\u0441\u0443\u043f\u0435\u0440\u043a\u043e\u0440\u0438\u0441\u0442\u0443\u0432\u0430\u0447\u0430\u0020\u0434\u043b\u044f\u0020\u043a\u043e\u0440\u0438\u0441\u0442\u0443\u0432\u0430\u0447\u0430\u0020\u007b\u0030\u007d\u003f +dashboard.list_users.toggleSuperuser.confirmationText.remove =\u0020\u0412\u0438\u0020\u0432\u043f\u0435\u0432\u043d\u0435\u043d\u0456\u002c\u0020\u0449\u043e\u0020\u0445\u043e\u0447\u0435\u0442\u0435\u0020\u0432\u0438\u043c\u043a\u043d\u0443\u0442\u0438\u0020\u0441\u0442\u0430\u0442\u0443\u0441\u0020\u0441\u0443\u043f\u0435\u0440\u043a\u043e\u0440\u0438\u0441\u0442\u0443\u0432\u0430\u0447\u0430\u0020\u0434\u043b\u044f\u0020\u043a\u043e\u0440\u0438\u0441\u0442\u0443\u0432\u0430\u0447\u0430\u0020\u007b\u0030\u007d\u003f +dashboard.list_users.toggleSuperuser.confirm =\u0020\u041f\u0440\u043e\u0434\u043e\u0432\u0436\u0438\u0442\u0438 +dashboard.list_users.api.auth.invalid_apikey =\u0020\u041a\u043b\u044e\u0447\u0020\u0041\u0050\u0049\u0020\u043d\u0435\u0434\u0456\u0439\u0441\u043d\u0438\u0439\u002e +dashboard.list_users.api.auth.not_superuser =\u0020\u0417\u0430\u0431\u043e\u0440\u043e\u043d\u0435\u043d\u0438\u0439\u002e\u0020\u0412\u0438\u0020\u043f\u043e\u0432\u0438\u043d\u043d\u0456\u0020\u0431\u0443\u0442\u0438\u0020\u0441\u0443\u043f\u0435\u0440\u043a\u043e\u0440\u0438\u0441\u0442\u0443\u0432\u0430\u0447\u0435\u043c\u002e + +\u0023\u004d\u0061\u0069\u006c\u0053\u0065\u0072\u0076\u0069\u0063\u0065\u0042\u0065\u0061\u006e\u002e\u006a\u0061\u0076\u0061 +notification.email.create.dataverse.subject=\u007b\u0030\u007d\u003a\u0020\u0412\u0430\u0448\u0430\u0020\u0064\u0061\u0074\u0061\u0076\u0065\u0072\u0073\u0065\u0020\u0441\u0442\u0432\u043e\u0440\u0435\u043d\u0430\u002e +notification.email.create.dataset.subject=\u007b\u0030\u007d\u003a\u0020\u0412\u0430\u0448\u0020\u043d\u0430\u0431\u0456\u0440\u0020\u0434\u0430\u043d\u0438\u0445\u0020\u0441\u0442\u0432\u043e\u0440\u0435\u043d\u0438\u0439\u002e +notification.email.request.file.access.subject=\u007b\u0030\u007d\u003a\u0020\u0417\u0430\u043f\u0438\u0442\u0020\u043f\u0440\u043e\u0020\u0434\u043e\u0441\u0442\u0443\u043f\u0020\u0434\u043e\u0020\u0444\u0430\u0439\u043b\u0443\u0020\u0437\u0020\u043e\u0431\u043c\u0435\u0436\u0435\u043d\u0438\u043c\u0020\u0434\u043e\u0441\u0442\u0443\u043f\u043e\u043c\u002e +notification.email.grant.file.access.subject=\u007b\u0030\u007d\u003a\u0020\u0412\u0432\u0430\u043c\u0020\u043d\u0430\u0434\u0430\u043d\u043e\u0020\u0434\u043e\u0441\u0442\u0443\u043f\u0020\u0434\u043e\u0020\u043e\u0431\u043c\u0435\u0436\u0435\u043d\u043e\u0433\u043e\u0020\u0444\u0430\u0439\u043b\u0443\u002e +notification.email.rejected.file.access.subject=\u007b\u0030\u007d\u003a\u0020\u0412\u0430\u0448\u0020\u0437\u0430\u043f\u0438\u0442\u0020\u043f\u0440\u043e\u0020\u0434\u043e\u0441\u0442\u0443\u043f\u0020\u0434\u043e\u0020\u043e\u0431\u043c\u0435\u0436\u0435\u043d\u043e\u0433\u043e\u0020\u0444\u0430\u0439\u043b\u0443\u0020\u0432\u0456\u0434\u0445\u0438\u043b\u0435\u043d\u043e\u002e +notification.email.update.maplayer=\u007b\u0030\u007d\u003a\u0020\u0428\u0430\u0440\u0020\u0057\u006f\u0072\u006c\u0064\u004d\u0061\u0070\u0020\u0434\u043e\u0434\u0430\u043d\u0438\u0439\u0020\u0434\u043e\u0020\u043d\u0430\u0431\u043e\u0440\u0443\u0020\u0434\u0430\u043d\u0438\u0445\u002e +notification.email.maplayer.deletefailed.subject=\u007b\u0030\u007d\u003a\u0020\u043d\u0435\u0020\u0432\u0434\u0430\u043b\u043e\u0441\u044f\u0020\u0432\u0438\u0434\u0430\u043b\u0438\u0442\u0438\u0020\u0448\u0430\u0440\u0020\u0057\u006f\u0072\u006c\u0064\u004d\u0061\u0070\u002e +notification.email.maplayer.deletefailed.text=\u0020\u041c\u0438\u0020\u043d\u0435\u0020\u0437\u043c\u043e\u0433\u043b\u0438\u0020\u0432\u0438\u0434\u0430\u043b\u0438\u0442\u0438\u0020\u0448\u0430\u0440\u0020\u0057\u006f\u0072\u006c\u0064\u004d\u0061\u0070\u002c\u0020\u043f\u043e\u0432\u0027\u044f\u0437\u0430\u043d\u0438\u0439\u0020\u0456\u0437\u0020\u043e\u0431\u043c\u0435\u0436\u0435\u043d\u0438\u043c\u0020\u0444\u0430\u0439\u043b\u043e\u043c\u0020\u007b\u0030\u007d\u002c\u0020\u0442\u0430\u0020\u0432\u0441\u0456\u0020\u0432\u0456\u0434\u043f\u043e\u0432\u0456\u0434\u043d\u0456\u0020\u0434\u0430\u043d\u0456\u002c\u0020\u044f\u043a\u0456\u0020\u0449\u0435\u0020\u043c\u043e\u0436\u0443\u0442\u044c\u0020\u0437\u0430\u043b\u0438\u0448\u0430\u0442\u0438\u0441\u044f\u0020\u0437\u0430\u0433\u0430\u043b\u044c\u043d\u043e\u0434\u043e\u0441\u0442\u0443\u043f\u043d\u0438\u043c\u0438\u0020\u043d\u0430\u0020\u0441\u0430\u0439\u0442\u0456\u0020\u0057\u006f\u0072\u006c\u0064\u004d\u0061\u0070\u002e\u0020\u0411\u0443\u0434\u044c\u0020\u043b\u0430\u0441\u043a\u0430\u002c\u0020\u043f\u043e\u0432\u0442\u043e\u0440\u0456\u0442\u044c\u0020\u0441\u043f\u0440\u043e\u0431\u0443\u002c\u0020\u0430\u0431\u043e\u0020\u0437\u0432\u0027\u044f\u0436\u0456\u0442\u044c\u0441\u044f\u0020\u0437\u0456\u0020\u0441\u043b\u0443\u0436\u0431\u043e\u044e\u0020\u043f\u0456\u0434\u0442\u0440\u0438\u043c\u043a\u0438\u0020\u0057\u006f\u0072\u006c\u0064\u004d\u0061\u0070\u0020\u0442\u0430\u0020\u002f\u0020\u0430\u0431\u043e\u0020\u0044\u0061\u0074\u0061\u0076\u0065\u0072\u0073\u0065\u002e\u0020\u0028\u041d\u0430\u0431\u0456\u0440\u0020\u0434\u0430\u043d\u0438\u0445\u003a\u0020\u007b\u0031\u007d\u0029 +notification.email.submit.dataset.subject=\u007b\u0030\u007d\u003a\u0020\u0412\u0430\u0448\u0020\u043d\u0430\u0431\u0456\u0440\u0020\u0434\u0430\u043d\u0438\u0445\u0020\u043f\u043e\u0434\u0430\u043d\u043e\u0020\u043d\u0430\u0020\u0440\u043e\u0437\u0433\u043b\u044f\u0434\u002e +notification.email.publish.dataset.subject=\u007b\u0030\u007d\u003a\u0020\u0412\u0430\u0448\u0020\u043d\u0430\u0431\u0456\u0440\u0020\u0434\u0430\u043d\u0438\u0445\u0020\u043e\u043f\u0443\u0431\u043b\u0456\u043a\u043e\u0432\u0430\u043d\u043e\u002e +notification.email.returned.dataset.subject=\u007b\u0030\u007d\u003a\u0020\u0412\u0430\u0448\u0020\u043d\u0430\u0431\u0456\u0440\u0020\u0434\u0430\u043d\u0438\u0445\u0020\u043f\u043e\u0432\u0435\u0440\u043d\u0443\u0442\u043e\u002e +notification.email.create.account.subject=\u007b\u0030\u007d\u003a\u0020\u0412\u0430\u0448\u0020\u0430\u043a\u0430\u0443\u043d\u0442\u0020\u0441\u0442\u0432\u043e\u0440\u0435\u043d\u043e\u002e +notification.email.assign.role.subject =\u0020\u007b\u0030\u007d\u003a\u0020\u0432\u0430\u043c\u0020\u043f\u0440\u0438\u0437\u043d\u0430\u0447\u0435\u043d\u043e\u0020\u0440\u043e\u043b\u044c +notification.email.revoke.role.subject =\u0020\u007b\u0030\u007d\u003a\u0020\u0432\u0430\u0448\u0443\u0020\u0440\u043e\u043b\u044c\u0020\u0441\u043a\u0430\u0441\u043e\u0432\u0430\u043d\u043e\u002e +notification.email.verifyEmail.subject=\u007b\u0030\u007d\u003a\u0020\u043f\u0456\u0434\u0442\u0432\u0435\u0440\u0434\u0438\u0442\u0438\u0020\u0441\u0432\u043e\u044e\u0020\u0435\u043b\u0435\u043a\u0442\u0440\u043e\u043d\u043d\u0443\u0020\u0430\u0434\u0440\u0435\u0441\u0443\u002e +notification.email.greeting=\u041f\u0440\u0438\u0432\u0456\u0442\u002c\u0020\u005c\u006e +\u0023\u0020\u0042\u0075\u006e\u0064\u006c\u0065\u0020\u0066\u0069\u006c\u0065\u0020\u0065\u0064\u0069\u0074\u006f\u0072\u0073\u002c\u0020\u0070\u006c\u0065\u0061\u0073\u0065\u0020\u006e\u006f\u0074\u0065\u0020\u0074\u0068\u0061\u0074\u0020\u0022\u006e\u006f\u0074\u0069\u0066\u0069\u0063\u0061\u0074\u0069\u006f\u006e\u002e\u0065\u006d\u0061\u0069\u006c\u002e\u0077\u0065\u006c\u0063\u006f\u006d\u0065\u0022\u0020\u0069\u0073\u0020\u0075\u0073\u0065\u0064\u0020\u0069\u006e\u0020\u0061\u0020\u0075\u006e\u0069\u0074\u0020\u0074\u0065\u0073\u0074 +notification.email.welcome=\u0020\u041b\u0430\u0441\u043a\u0430\u0432\u043e\u0020\u043f\u0440\u043e\u0441\u0438\u043c\u043e\u0020\u0434\u043e\u0020\u007b\u0030\u007d\u0021\u0020\u041f\u043e\u0447\u043d\u0456\u0442\u044c\u0020\u0437\u0020\u0434\u043e\u0434\u0430\u0432\u0430\u043d\u043d\u044f\u0020\u0447\u0438\u0020\u043f\u043e\u0448\u0443\u043a\u0443\u0020\u0434\u0430\u043d\u0438\u0445\u002e\u0020\u0404\u0020\u043f\u0438\u0442\u0430\u043d\u043d\u044f\u003f\u0020\u0417\u0432\u0435\u0440\u043d\u0456\u0442\u044c\u0441\u044f\u0020\u0434\u043e\u0020\u0414\u043e\u0432\u0456\u0434\u043d\u0438\u043a\u0430\u0020\u043a\u043e\u0440\u0438\u0441\u0442\u0443\u0432\u0430\u0447\u0430\u0020\u0020\u007b\u0031\u007d\u002f\u007b\u0032\u007d\u002f\u0020\u043a\u043e\u0440\u0438\u0441\u0442\u0443\u0432\u0430\u0447\u0430\u0020\u0020\u0430\u0431\u043e\u0020\u0437\u0432\u0027\u044f\u0436\u0456\u0442\u044c\u0441\u044f\u0020\u0437\u0020\u007b\u0033\u007d\u0020\u0432\u0020\u007b\u0034\u007d\u0020\u0449\u043e\u0431\u0020\u043e\u0442\u0440\u0438\u043c\u0430\u0442\u0438\u0020\u0434\u043e\u043f\u043e\u043c\u043e\u0433\u0443\u002e + +notification.email.welcomeConfirmEmailAddOn=\u005c\u006e\u005c\u006e\u0020\u041f\u0435\u0440\u0435\u0432\u0456\u0440\u0442\u0435\u0020\u0430\u0434\u0440\u0435\u0441\u0443\u0020\u0441\u0432\u043e\u0454\u0457\u0020\u0435\u043b\u0435\u043a\u0442\u0440\u043e\u043d\u043d\u043e\u0457\u0020\u043f\u043e\u0448\u0442\u0438\u0020\u043d\u0430\u0020\u007b\u0030\u007d\u002e\u0020\u0417\u0432\u0435\u0440\u043d\u0456\u0442\u044c\u0020\u0443\u0432\u0430\u0433\u0443\u002c\u0020\u0449\u043e\u0020\u043f\u043e\u0441\u0438\u043b\u0430\u043d\u043d\u044f\u0020\u043d\u0430\u0020\u043f\u0456\u0434\u0442\u0432\u0435\u0440\u0434\u0436\u0435\u043d\u043d\u044f\u0020\u0437\u0430\u043a\u0456\u043d\u0447\u0438\u0442\u044c\u0441\u044f\u0020\u043f\u0456\u0441\u043b\u044f\u0020\u007b\u0031\u007d\u002e\u0020\u041d\u0430\u0434\u0456\u0448\u043b\u0456\u0442\u044c\u0020\u0449\u0435\u0020\u043e\u0434\u0438\u043d\u0020\u0435\u043b\u0435\u043a\u0442\u0440\u043e\u043d\u043d\u0438\u0439\u0020\u043b\u0438\u0441\u0442\u0020\u0434\u043b\u044f\u0020\u043f\u0435\u0440\u0435\u0432\u0456\u0440\u043a\u0438\u002c\u0020\u0432\u0456\u0434\u0432\u0456\u0434\u0430\u0432\u0448\u0438\u0020\u0432\u0430\u0448\u0443\u0020\u0441\u0442\u043e\u0440\u0456\u043d\u043a\u0443\u0020\u0437\u0020\u0430\u043a\u0430\u0443\u043d\u0442\u043e\u043c\u002e +notification.email.requestFileAccess=\u0020\u0417\u0430\u043f\u0438\u0442\u0020\u043f\u0440\u043e\u0020\u0434\u043e\u0441\u0442\u0443\u043f\u0020\u0434\u043e\u0020\u0444\u0430\u0439\u043b\u0443\u0020\u043d\u0430\u0431\u043e\u0440\u0443\u0020\u0434\u0430\u043d\u0438\u0445\u003a\u0020\u007b\u0030\u007d\u002e\u0020\u041a\u0435\u0440\u0443\u0432\u0430\u043d\u043d\u044f\u0020\u0434\u043e\u0437\u0432\u043e\u043b\u0430\u043c\u0438\u0020\u0432\u0020\u007b\u0031\u007d\u002e +notification.email.grantFileAccess=\u0020\u0414\u043e\u0441\u0442\u0443\u043f\u0020\u043d\u0430\u0434\u0430\u043d\u043e\u0020\u0434\u043b\u044f\u0020\u0444\u0430\u0439\u043b\u0456\u0432\u0020\u0443\u0020\u043d\u0430\u0431\u043e\u0440\u0456\u0020\u0434\u0430\u043d\u0438\u0445\u003a\u0020\u007b\u0030\u007d\u0020\u0028\u043f\u0435\u0440\u0435\u0433\u043b\u044f\u043d\u0443\u0442\u0438\u0020\u043d\u0430\u0020\u007b\u0031\u007d\u0029\u002e +notification.email.rejectFileAccess=\u0020\u0412\u0430\u0448\u0020\u0437\u0430\u043f\u0438\u0442\u0020\u043f\u0440\u043e\u0020\u0434\u043e\u0441\u0442\u0443\u043f\u0020\u0431\u0443\u043b\u043e\u0020\u0432\u0456\u0434\u0445\u0438\u043b\u0435\u043d\u043e\u0020\u0434\u043b\u044f\u0020\u0437\u0430\u043f\u0438\u0442\u0430\u043d\u0438\u0445\u0020\u0444\u0430\u0439\u043b\u0456\u0432\u0020\u0443\u0020\u043d\u0430\u0431\u043e\u0440\u0456\u0020\u0434\u0430\u043d\u0438\u0445\u003a\u0020\u007b\u0030\u007d\u0020\u0028\u043f\u0435\u0440\u0435\u0433\u043b\u044f\u0434\u0020\u043d\u0430\u0020\u007b\u0031\u007d\u0029\u002e\u0020\u042f\u043a\u0449\u043e\u0020\u0443\u0020\u0432\u0430\u0441\u0020\u0454\u0020\u044f\u043a\u0456\u0441\u044c\u0020\u043f\u0438\u0442\u0430\u043d\u043d\u044f\u0020\u043f\u0440\u043e\u0020\u0442\u0435\u002c\u0020\u0447\u043e\u043c\u0443\u0020 +\u0437\u0430\u043f\u0438\u0442\u0020\u0432\u0456\u0434\u0445\u0438\u043b\u0438\u043b\u0438\u002c\u0020\u0432\u0438\u0020\u043c\u043e\u0436\u0435\u0442\u0435\u0020\u0437\u0432\u0027\u044f\u0437\u0430\u0442\u0438\u0441\u044f\u0020\u0437\u0020\u0432\u043b\u0430\u0441\u043d\u0438\u043a\u043e\u043c\u0020\u043d\u0430\u0431\u043e\u0440\u0443\u0020\u0434\u0430\u043d\u0438\u0445\u002c\u0020\u0432\u0438\u043a\u043e\u0440\u0438\u0441\u0442\u043e\u0432\u0443\u044e\u0447\u0438\u0020\u043f\u043e\u0441\u0438\u043b\u0430\u043d\u043d\u044f\u0020\u0022\u041a\u043e\u043d\u0442\u0430\u043a\u0442\u0022\u0020\u0443\u0020\u0432\u0435\u0440\u0445\u043d\u044c\u043e\u043c\u0443\u0020\u043f\u0440\u0430\u0432\u043e\u043c\u0443\u0020\u043a\u0443\u0442\u0456\u0020\u0441\u0442\u043e\u0440\u0456\u043d\u043a\u0438\u0020\u043d\u0430\u0431\u043e\u0440\u0443\u0020\u0434\u0430\u043d\u0438\u0445\u002e +\u0023\u0020\u0042\u0075\u006e\u0064\u006c\u0065\u0020\u0066\u0069\u006c\u0065\u0020\u0065\u0064\u0069\u0074\u006f\u0072\u0073\u002c\u0020\u0070\u006c\u0065\u0061\u0073\u0065\u0020\u006e\u006f\u0074\u0065\u0020\u0074\u0068\u0061\u0074\u0020\u0022\u006e\u006f\u0074\u0069\u0066\u0069\u0063\u0061\u0074\u0069\u006f\u006e\u002e\u0065\u006d\u0061\u0069\u006c\u002e\u0063\u0072\u0065\u0061\u0074\u0065\u0044\u0061\u0074\u0061\u0076\u0065\u0072\u0073\u0065\u0022\u0020\u0069\u0073\u0020\u0075\u0073\u0065\u0064\u0020\u0069\u006e\u0020\u0061\u0020\u0075\u006e\u0069\u0074\u0020\u0074\u0065\u0073\u0074 +notification.email.createDataverse=\u0020\u0412\u0430\u0448\u0430\u0020\u043d\u043e\u0432\u0430\u0020\u0064\u0061\u0074\u0061\u0076\u0065\u0072\u0073\u0065\u0020\u043f\u0456\u0434\u0020\u043d\u0430\u0437\u0432\u043e\u044e\u0020\u007b\u0030\u007d\u0020\u0028\u043f\u0435\u0440\u0435\u0433\u043b\u044f\u0434\u0020\u043d\u0430\u0020\u007b\u0031\u007d\u0029\u0020\u0431\u0443\u043b\u0430\u0020\u0441\u0442\u0432\u043e\u0440\u0435\u043d\u0430\u0020\u0432\u0020\u007b\u0032\u007d\u0020\u0028\u043f\u0435\u0440\u0435\u0433\u043b\u044f\u0434\u0020\u007b\u0033\u007d\u0029\u002e\u0020\u0429\u043e\u0431\u0020\u0434\u0456\u0437\u043d\u0430\u0442\u0438\u0441\u044c\u0020\u0431\u0456\u043b\u044c\u0448\u0435\u0020\u043f\u0440\u043e\u0020\u0442\u0435\u002c\u0020\u0449\u043e\u0020\u0432\u0438\u0020\u043c\u043e\u0436\u0435\u0442\u0435\u0020\u0437\u0440\u043e\u0431\u0438\u0442\u0438\u0020\u0437\u0020\u0432\u0430\u0448\u043e\u044e\u0020\u0064\u0061\u0074\u0061\u0076\u0065\u0072\u0073\u0065\u002c\u0020\u043f\u0435\u0440\u0435\u0433\u043b\u044f\u043d\u044c\u0442\u0435\u0020\u0440\u043e\u0437\u0434\u0456\u043b\u0020\u0022\u0423\u043f\u0440\u0430\u0432\u043b\u0456\u043d\u043d\u044f\u0020\u0044\u0061\u0074\u0061\u0076\u0065\u0073\u0065\u0020\u002d\u0020\u0414\u043e\u0432\u0456\u0434\u043d\u0438\u043a\u0020\u043a\u043e\u0440\u0438\u0441\u0442\u0443\u0432\u0430\u0447\u0430\u0022\u0020\u043d\u0430\u0020\u007b\u0034\u007d\u002f\u007b\u0035\u007d\u002f\u0075\u0073\u0065\u0072\u002f\u0064\u0061\u0074\u0061\u0076\u0065\u0072\u0073\u0065\u002d\u006d\u0061\u006e\u0061\u0067\u0065\u006d\u0065\u006e\u0074\u002e\u0068\u0074\u006d\u006c\u0020\u002e +\u0023\u0020\u0042\u0075\u006e\u0064\u006c\u0065\u0020\u0066\u0069\u006c\u0065\u0020\u0065\u0064\u0069\u0074\u006f\u0072\u0073\u002c\u0020\u0070\u006c\u0065\u0061\u0073\u0065\u0020\u006e\u006f\u0074\u0065\u0020\u0074\u0068\u0061\u0074\u0020\u0022\u006e\u006f\u0074\u0069\u0066\u0069\u0063\u0061\u0074\u0069\u006f\u006e\u002e\u0065\u006d\u0061\u0069\u006c\u002e\u0063\u0072\u0065\u0061\u0074\u0065\u0044\u0061\u0074\u0061\u0073\u0065\u0074\u0022\u0020\u0069\u0073\u0020\u0075\u0073\u0065\u0064\u0020\u0069\u006e\u0020\u0061\u0020\u0075\u006e\u0069\u0074\u0020\u0074\u0065\u0073\u0074 +notification.email.createDataset=\u0412\u0430\u0448\u0020\u043d\u043e\u0432\u0438\u0439\u0020\u043d\u0430\u0431\u0456\u0440\u0020\u0434\u0430\u043d\u0438\u0445\u0020\u043f\u0456\u0434\u0020\u043d\u0430\u0437\u0432\u043e\u044e\u0020\u007b\u0030\u007d\u0020\u0028\u043f\u0435\u0440\u0435\u0433\u043b\u044f\u0434\u0020\u043d\u0430\u0020\u007b\u0031\u007d\u0029\u0020\u0431\u0443\u0432\u0020\u0441\u0442\u0432\u043e\u0440\u0435\u043d\u0438\u0439\u0020\u0443\u0020\u007b\u0032\u007d\u0020\u0028\u043f\u0435\u0440\u0435\u0433\u043b\u044f\u0434\u0020\u007b\u0033\u007d\u0029\u002e\u0020\u0429\u043e\u0431\u0020\u0434\u0456\u0437\u043d\u0430\u0442\u0438\u0441\u044f\u0020\u0431\u0456\u043b\u044c\u0448\u0435\u0020\u043f\u0440\u043e\u0020\u0442\u0435\u002c\u0020\u0449\u043e\u0020\u0432\u0438\u0020\u043c\u043e\u0436\u0435\u0442\u0435\u0020\u0437\u0440\u043e\u0431\u0438\u0442\u0438\u0020\u0437\u0020\u043d\u0430\u0431\u043e\u0440\u043e\u043c\u0020\u0434\u0430\u043d\u0438\u0445\u002c\u0020\u043f\u0435\u0440\u0435\u0433\u043b\u044f\u043d\u044c\u0442\u0435\u0020\u0022\u0423\u043f\u0440\u0430\u0432\u043b\u0456\u043d\u043d\u044f\u0020\u043d\u0430\u0431\u043e\u0440\u043e\u043c\u0020\u0434\u0430\u043d\u0438\u0445\u0022\u0020\u002d\u0020\u0414\u043e\u0432\u0456\u0434\u043d\u0438\u043a\u0020\u043a\u043e\u0440\u0438\u0441\u0442\u0443\u0432\u0430\u0447\u0430\u0020\u043d\u0430\u0020\u007b\u0034\u007d\u002f\u007b\u0035\u007d\u0020\u002f\u0075\u0073\u0065\u0072\u002f\u0064\u0061\u0074\u0061\u0073\u0065\u0074\u002d\u006d\u0061\u006e\u0061\u0067\u0065\u006d\u0065\u006e\u0074\u002e\u0068\u0074\u006d\u006c\u002e +notification.email.wasSubmittedForReview=\u007b\u0030\u007d\u0020\u0028\u0434\u0438\u0432\u002e\u0020\u043d\u0430\u0020\u007b\u0031\u007d\u0029\u0020\u0431\u0443\u043b\u043e\u0020\u043f\u043e\u0434\u0430\u043d\u043e\u0020\u043d\u0430\u0020\u0440\u043e\u0437\u0433\u043b\u044f\u0434\u0020\u0434\u043b\u044f\u0020\u043e\u043f\u0443\u0431\u043b\u0456\u043a\u0443\u0432\u0430\u043d\u043d\u044f\u0020\u0432\u0020\u007b\u0032\u007d\u0020\u0028\u043f\u0435\u0440\u0435\u0433\u043b\u044f\u0434\u0020\u043d\u0430\u0020\u007b\u0033\u007d\u0029\u002e\u0020\u041d\u0435\u0020\u0437\u0430\u0431\u0443\u0434\u044c\u0442\u0435\u0020\u0439\u043e\u0433\u043e\u0020\u043e\u043f\u0443\u0431\u043b\u0456\u043a\u0443\u0432\u0430\u0442\u0438\u0020\u0430\u0431\u043e\u0020\u043d\u0430\u0434\u0456\u0441\u043b\u0430\u0442\u0438\u0020\u043d\u0430\u0437\u0430\u0434 +\u0430\u0432\u0442\u043e\u0440\u0443\u005c\u0021\u0020 +notification.email.wasReturnedByReviewer=\u007b\u0030\u007d\u0020\u0028\u043f\u0435\u0440\u0435\u0433\u043b\u044f\u0434\u0020\u043d\u0430\u0020\u007b\u0031\u007d\u0029\u0020\u0431\u0443\u0432\u0020\u043f\u043e\u0432\u0435\u0440\u043d\u0435\u043d\u0438\u0439\u0020\u043a\u0443\u0440\u0430\u0442\u043e\u0440\u043e\u043c\u0020\u007b\u0032\u007d\u0020\u0028\u043f\u0435\u0440\u0435\u0433\u043b\u044f\u0434\u0020\u0443\u0020\u007b\u0033\u007d\u0029\u002e +notification.email.wasPublished=\u007b\u0030\u007d\u0020\u0028\u043f\u0435\u0440\u0435\u0433\u043b\u044f\u0434\u0020\u043d\u0430\u0020\u007b\u0031\u007d\u0029\u0020\u043e\u043f\u0443\u0431\u043b\u0456\u043a\u043e\u0432\u0430\u043d\u043e\u0020\u0432\u0020\u007b\u0032\u007d\u0020\u0028\u043f\u0435\u0440\u0435\u0433\u043b\u044f\u0434\u0020\u043d\u0430\u0020\u007b\u0033\u007d\u0029\u002e + notification.email.worldMap.added=\u007b\u0030\u007d\u0020\u0028\u043f\u0435\u0440\u0435\u0433\u0434\u044f\u0434\u0020\u043d\u0430\u0020\u007b\u0031\u007d\u0029\u0020\u0434\u043e\u0020\u043d\u044c\u043e\u0433\u043e\u0020\u0434\u043e\u0434\u0430\u043d\u0456\u0020\u0434\u0430\u043d\u0456\u0020\u0020\u0448\u0430\u0440\u0456\u0432\u0020\u0057\u006f\u0072\u006c\u0064\u004d\u0061\u0070\u002e +notification.email.closing=\u005c\u006e\u005c\u006e\u0414\u044f\u043a\u0443\u0454\u043c\u043e\u0020\u0432\u0430\u043c\u002c\u005c\u006e\u007b\u0030\u007d +notification.email.assignRole=\u0020\u0412\u0438\u0020\u0437\u0430\u0440\u0430\u0437\u0020\u007b\u0030\u007d\u0020\u0434\u043b\u044f\u0020\u007b\u0031\u007d\u0020\u0022\u0028\u0032\u0029\u0022\u0020\u0028\u043f\u0435\u0440\u0435\u0433\u043b\u044f\u0434\u0020\u0443\u0020\u007b\u0033\u007d\u0029\u002e +notification.email.revokeRole=\u0020\u041e\u0434\u043d\u0430\u0020\u0437\u0020\u0432\u0430\u0448\u0438\u0445\u0020\u0440\u043e\u043b\u0435\u0439\u0020\u0434\u043b\u044f\u0020\u007b\u0030\u007d\u0020\u0022\u007b\u0031\u007d\u0022\u0020\u0431\u0443\u043b\u0430\u0020\u0441\u043a\u0430\u0441\u043e\u0432\u0430\u043d\u0430\u0020\u0028\u043f\u0435\u0440\u0435\u0433\u043b\u044f\u0434\u0020\u043d\u0430\u0020\u007b\u0032\u007d\u0029\u002e +notification.email.changeEmail=\u0020\u041f\u0440\u0438\u0432\u0456\u0442\u002c\u0020\u007b\u0030\u007d\u002e\u0020\u007b\u0031\u007d\u0020\u005c\u0020\u006e\u0020\u005c\u0020\u006e\u0020\u0417\u0432\u0027\u044f\u0436\u0456\u0442\u044c\u0441\u044f\u0020\u0437\u0020\u043d\u0430\u043c\u0438\u002c\u0020\u044f\u043a\u0449\u043e\u0020\u0432\u0438\u0020\u043d\u0435\u0020\u043f\u043b\u0430\u043d\u0443\u0432\u0430\u043b\u0438\u0020\u0446\u044e\u0020\u0437\u043c\u0456\u043d\u0443\u0020\u0430\u0431\u043e\u0020\u0432\u0430\u043c\u0020\u043f\u043e\u0442\u0440\u0456\u0431\u043d\u0430\u0020\u0434\u043e\u043f\u043e\u043c\u043e\u0433\u0430\u002e +hours=\u0433\u043e\u0434\u0438\u043d\u0438 +hour=\u0020\u0433\u043e\u0434\u0438\u043d\u0430 +minutes=\u0020\u0445\u0432\u0438\u043b\u0438\u043d\u0438 +minute=\u0020\u0445\u0432\u0438\u043b\u0438\u043d\u0430 +notification.email.checksumfail.subject=\u007b\u0030\u007d\u003a\u0020\u0059\u006f\u0075\u0072\u0020\u0075\u0070\u006c\u006f\u0061\u0064\u0020\u0066\u0061\u0069\u006c\u0065\u0064\u0020\u0063\u0068\u0065\u0063\u006b\u0073\u0075\u006d\u0020\u0076\u0061\u006c\u0069\u0064\u0061\u0074\u0069\u006f\u006e\u0020\u041f\u043e\u043c\u0438\u043b\u043a\u0430\u0020\u043f\u0440\u0438\u0020\u043f\u0435\u0440\u0435\u0432\u0456\u0440\u0446\u0456\u0020\u043a\u043e\u043d\u0442\u0440\u043e\u043b\u044c\u043d\u043e\u0457\u0020\u0441\u0443\u043c\u0438\u0020\u0443\u0020\u0432\u0430\u0448\u043e\u043c\u0443\u0020\u0437\u0430\u0432\u0430\u043d\u0442\u0430\u0436\u0435\u043d\u043d\u0456\u002e\u0020 +notification.email.import.filesystem.subject=\u0020\u041d\u0430\u0431\u0456\u0440\u0020\u0434\u0430\u043d\u0438\u0445\u0020\u007b\u0030\u007d\u0020\u0443\u0441\u043f\u0456\u0448\u043d\u043e\u0020\u0437\u0430\u0432\u0430\u043d\u0442\u0430\u0436\u0435\u043d\u043e\u0020\u0442\u0430\u0020\u043f\u0456\u0434\u0442\u0432\u0435\u0440\u0434\u0436\u0435\u043d\u043e\u002e +notification.email.import.checksum.subject=\u007b\u0030\u007d\u003a\u0020\u041f\u0435\u0440\u0435\u0432\u0456\u0440\u043a\u0430\u0020\u043a\u043e\u043d\u0442\u0440\u043e\u043b\u044c\u043d\u043e\u0457\u0020\u0441\u0443\u043c\u0438\u0020\u0432\u0430\u0448\u043e\u0433\u043e\u0020\u0444\u0430\u0439\u043b\u0443\u0020\u0437\u0430\u0432\u0435\u0440\u0448\u0435\u043d\u0430\u002e + +\u0023\u0020\u0070\u0061\u0073\u0073\u0077\u006f\u0072\u0064\u0072\u0065\u0073\u0065\u0074\u002e\u0078\u0068\u0074\u006d\u006c +pageTitle.passwdReset.pre=\u0020\u0421\u043a\u0438\u0434\u0430\u043d\u043d\u044f\u0020\u043f\u0430\u0440\u043e\u043b\u044f\u0020\u0430\u043a\u0430\u0443\u043d\u0442\u0430\u002e +passwdReset.token=\u043c\u0430\u0440\u043a\u0435\u0440\u003a +passwdReset.userLookedUp=\u0020\u043a\u043e\u0440\u0438\u0441\u0442\u0443\u0432\u0430\u0447\u0020\u0448\u0443\u043a\u0430\u0432\u003a +passwdReset.emailSubmitted=\u0020\u0435\u043b\u0435\u043a\u0442\u0440\u043e\u043d\u043d\u0430\u0020\u043f\u043e\u0448\u0442\u0430\u0020\u0434\u043e\u0441\u0442\u0430\u0432\u043b\u0435\u043d\u0430\u003a +passwdReset.details=\u007b\u0030\u007d\u0020\u0421\u043a\u0438\u0434\u0430\u043d\u043d\u044f\u0020\u043f\u0430\u0440\u043e\u043b\u044f\u0020\u007b\u0031\u007d\u0020\u002d\u0020\u0429\u043e\u0431\u0020\u0440\u043e\u0437\u043f\u043e\u0447\u0430\u0442\u0438\u0020\u043f\u0440\u043e\u0446\u0435\u0441\u0020\u0441\u043a\u0438\u0434\u0430\u043d\u043d\u044f\u0020\u043f\u0430\u0440\u043e\u043b\u044f\u002c\u0020\u0432\u0432\u0435\u0434\u0456\u0442\u044c\u0020\u0430\u0434\u0440\u0435\u0441\u0443\u0020\u0441\u0432\u043e\u0454\u0457\u0020\u0435\u043b\u0435\u043a\u0442\u0440\u043e\u043d\u043d\u043e\u0457\u0020\u043f\u043e\u0448\u0442\u0438\u002e +passwdReset.submitRequest=\u0020\u041d\u0430\u0434\u0456\u0441\u043b\u0430\u0442\u0438\u0020\u0437\u0430\u043f\u0438\u0442\u0020\u043d\u0430\u0020\u043f\u0430\u0440\u043e\u043b\u044c\u002e +passwdReset.successSubmit.tip=\u0020\u042f\u043a\u0449\u043e\u0020\u0446\u044f\u0020\u0435\u043b\u0435\u043a\u0442\u0440\u043e\u043d\u043d\u0430\u0020\u043f\u043e\u0448\u0442\u0430\u0020\u043f\u043e\u0432\u0027\u044f\u0437\u0430\u043d\u0430\u0020\u0437\u0020\u0430\u043a\u0430\u0443\u043d\u0442\u043e\u043c\u002c\u0020\u0442\u043e\u0020\u043d\u0435\u044e\u0020\u0432\u0430\u043c\u0020\u043d\u0430\u0434\u0456\u0448\u043b\u044e\u0442\u044c\u0020\u0434\u043e\u0434\u0430\u0442\u043a\u043e\u0432\u0456\u0020\u0456\u043d\u0441\u0442\u0440\u0443\u043a\u0446\u0456\u0457\u0020\u0449\u043e\u0434\u043e\u0020\u007b\u0030\u007d\u002e +passwdReset.debug=\u0044\u0045\u0042\u0055\u0047\u0020\u043d\u0430\u043b\u0430\u0433\u043e\u0434\u0436\u0435\u043d\u043d\u044f +passwdReset.resetUrl=\u0020\u0055\u0052\u004c\u0020\u0441\u043a\u0438\u0434\u0430\u043d\u043d\u044f\u002e +passwdReset.noEmail.tip=\u0020\u041d\u0430\u0441\u043f\u0440\u0430\u0432\u0434\u0456\u0020\u0436\u043e\u0434\u043d\u043e\u0433\u043e\u0020\u0435\u043b\u0435\u043a\u0442\u0440\u043e\u043d\u043d\u043e\u0433\u043e\u0020\u043b\u0438\u0441\u0442\u0430\u0020\u043d\u0435\u0020\u0431\u0443\u043b\u043e\u0020\u043d\u0430\u0434\u0456\u0441\u043b\u0430\u043d\u043e\u002c\u0020\u043e\u0441\u043a\u0456\u043b\u044c\u043a\u0438\u0020\u043a\u043e\u0440\u0438\u0441\u0442\u0443\u0432\u0430\u0447\u0430\u0020\u043d\u0435\u0020\u0432\u0434\u0430\u043b\u043e\u0441\u044f\u0020\u0437\u043d\u0430\u0439\u0442\u0438\u0020\u0437\u0430\u0020\u0432\u043a\u0430\u0437\u0430\u043d\u043e\u044e\u0020\u0435\u043b\u0435\u043a\u0442\u0440\u043e\u043d\u043d\u043e\u044e\u0020\u0430\u0434\u0440\u0435\u0441\u043e\u044e\u0020\u007b\u0030\u007d\u002c\u0020\u0430\u043b\u0435\u0020\u043c\u0438\u0020\u043f\u0440\u043e\u0020\u0446\u0435\u0020\u043d\u0435\u0020\u0432\u043a\u0430\u0437\u0443\u0454\u043c\u043e\u002c\u0020\u0442\u043e\u043c\u0443\u0020\u0449\u043e\u0020\u043c\u0438\u0020\u043d\u0435\u0020\u0437\u043b\u043e\u0432\u043c\u0438\u0441\u043d\u0456\u0020\u043a\u043e\u0440\u0438\u0441\u0442\u0443\u0432\u0430\u0447\u0456\u002c\u0020\u0449\u043e\u0020\u0432\u0438\u043a\u043e\u0440\u0438\u0441\u0442\u043e\u0432\u0443\u044e\u0442\u044c\u0020\u0444\u043e\u0440\u043c\u0443\u002c\u0020\u0449\u043e\u0431\u0020\u0432\u0438\u0437\u043d\u0430\u0447\u0438\u0442\u0438\u002c\u0020\u0447\u0438\u0020\u043f\u043e\u0432\u0027\u044f\u0437\u0430\u043d\u0438\u0439\u0020\u0430\u043a\u0430\u0443\u043d\u0442\u0020\u0437\u0020\u0435\u043b\u0435\u043a\u0442\u0440\u043e\u043d\u043d\u043e\u044e\u0020\u0430\u0434\u0440\u0435\u0441\u043e\u044e\u002e +passwdReset.illegalLink.tip=\u0020\u041f\u043e\u0441\u0438\u043b\u0430\u043d\u043d\u044f\u0020\u043d\u0430\u0020\u0437\u043c\u0456\u043d\u0443\u0020\u0412\u0430\u0448\u043e\u0433\u043e\u0020\u043f\u0430\u0440\u043e\u043b\u044f\u0020\u043d\u0435\u0434\u0456\u0439\u0441\u043d\u0435\u002e\u0020\u042f\u043a\u0449\u043e\u0020\u0432\u0430\u043c\u0020\u043f\u043e\u0442\u0440\u0456\u0431\u043d\u043e\u0020\u0441\u043a\u0438\u043d\u0443\u0442\u0438\u0020\u043f\u0430\u0440\u043e\u043b\u044c\u002c\u0020\u007b\u0030\u007d\u0020\u043d\u0430\u0442\u0438\u0441\u043d\u0456\u0442\u044c\u0020\u0442\u0443\u0442\u0020\u007b\u0031\u007d\u002c\u0020\u0449\u043e\u0431\u0020\u043f\u043e\u043f\u0440\u043e\u0441\u0438\u0442\u0438\u0020\u043f\u0440\u043e\u0020\u043f\u043e\u0432\u0442\u043e\u0440\u043d\u0435\u0020\u0441\u043a\u0438\u0434\u0430\u043d\u043d\u044f\u0020\u0432\u0430\u0448\u043e\u0433\u043e\u0020\u043f\u0430\u0440\u043e\u043b\u044e\u002e +passwdReset.newPasswd.details=\u007b\u0030\u007d\u0020\u0421\u043a\u0438\u0434\u0430\u043d\u043d\u044f\u0020\u043f\u0430\u0440\u043e\u043b\u044f\u0020\u007b\u0031\u007d\u0020\u005c\u0020\u0075\u0032\u0030\u0031\u0033\u0020\u041d\u0430\u0448\u0456\u0020\u0432\u0438\u043c\u043e\u0433\u0438\u0020\u0434\u043e\u0020\u043f\u0430\u0440\u043e\u043b\u044f\u0020\u0437\u043c\u0456\u043d\u0438\u043b\u0438\u0441\u044f\u002e\u0020\u0411\u0443\u0434\u044c\u0020\u043b\u0430\u0441\u043a\u0430\u002c\u0020\u0432\u0438\u0431\u0435\u0440\u0456\u0442\u044c\u0020\u0441\u0438\u043b\u044c\u043d\u0438\u0439\u0020\u043f\u0430\u0440\u043e\u043b\u044c\u002c\u0020\u044f\u043a\u0438\u0439\u0020\u0432\u0456\u0434\u043f\u043e\u0432\u0456\u0434\u0430\u0454\u0020\u043a\u0440\u0438\u0442\u0435\u0440\u0456\u044f\u043c\u0020\u0432\u043a\u0430\u0437\u0430\u043d\u0438\u043c\u0020\u043d\u0438\u0436\u0447\u0435\u002e +passwdReset.newPasswd =\u0020\u041d\u043e\u0432\u0438\u0439\u0020\u043f\u0430\u0440\u043e\u043b\u044c +passwdReset.rePasswd =\u0020\u041f\u043e\u0432\u0442\u043e\u0440\u0456\u0442\u044c\u0020\u043f\u0430\u0440\u043e\u043b\u044c +passwdReset.resetBtn =\u0020\u0421\u043a\u0438\u0434\u0430\u043d\u043d\u044f\u0020\u043f\u0430\u0440\u043e\u043b\u044f + + +\u0023\u0020\u0064\u0061\u0074\u0061\u0076\u0065\u0072\u0073\u0065\u002e\u0078\u0068\u0074\u006d\u006c +dataverse.title=\u0020\u041f\u0440\u043e\u0435\u043a\u0442\u002c\u0020\u0432\u0456\u0434\u0434\u0456\u043b\u002c\u0020\u0443\u043d\u0456\u0432\u0435\u0440\u0441\u0438\u0442\u0435\u0442\u002c\u0020\u043f\u0440\u043e\u0444\u0435\u0441\u043e\u0440\u0020\u0430\u0431\u043e\u0020\u0436\u0443\u0440\u043d\u0430\u043b\u002c\u0020\u0434\u043b\u044f\u0020\u044f\u043a\u0438\u0445\u0020\u0446\u044f\u0020\u0064\u0061\u0074\u0061\u0076\u0065\u0072\u0073\u0065\u0020\u043c\u0456\u0441\u0442\u0438\u0442\u0438\u043c\u0435\u0020\u0434\u0430\u043d\u0456\u002e +dataverse.enterName=\u0020\u0412\u0432\u0435\u0434\u0456\u0442\u044c\u0020\u0456\u043c\u0027\u044f\u0020\u002e\u002e\u002e +dataverse.host.title=\u0020\u0064\u0061\u0074\u0061\u0076\u0065\u0072\u0073\u0065\u002c\u0020\u0449\u043e\u0020\u043c\u0456\u0441\u0442\u0438\u0442\u044c\u0020\u0446\u0456\u0020\u0434\u0430\u043d\u0456\u002e +dataverse.identifier.title=\u0020\u041a\u043e\u0440\u043e\u0442\u043a\u0435\u0020\u0456\u043c\u0027\u044f\u002c\u0020\u044f\u043a\u0435\u0020\u0432\u0438\u043a\u043e\u0440\u0438\u0441\u0442\u043e\u0432\u0443\u0454\u0442\u044c\u0441\u044f\u0020\u0434\u043b\u044f\u0020\u0055\u0052\u004c\u002d\u0430\u0434\u0440\u0435\u0441\u0438\u0020\u0446\u0456\u0454\u0457\u0020\u0064\u0061\u0074\u0061\u0076\u0065\u0072\u0073\u0065\u002e +dataverse.affiliation.title=\u0020\u041e\u0440\u0433\u0430\u043d\u0456\u0437\u0430\u0446\u0456\u044f\u002c\u0020\u0437\u0020\u044f\u043a\u043e\u044e\u0020\u043f\u043e\u0432\u0027\u044f\u0437\u0430\u043d\u0430\u0020\u0446\u044f\u0020\u0064\u0061\u0074\u0061\u0076\u0065\u0072\u0073\u0065\u002e +dataverse.category=\u041a\u0430\u0442\u0435\u0433\u043e\u0440\u0456\u044f\u002e +dataverse.category.title=\u0020\u0422\u0438\u043f\u002c\u0020\u044f\u043a\u0438\u0439\u0020\u043d\u0430\u0439\u0431\u0456\u043b\u044c\u0448\u0435\u0020\u0432\u0456\u0434\u043e\u0431\u0440\u0430\u0436\u0430\u0454\u0020\u0446\u044e\u0020\u0064\u0061\u0074\u0061\u0076\u0065\u0072\u0073\u0065\u002e +dataverse.type.selectTab.top=\u0412\u0438\u0431\u0435\u0440\u0456\u0442\u044c\u0020\u043e\u0434\u043d\u0443\u002e\u002e\u002e +dataverse.type.selectTab.researchers=\u0020\u0414\u043e\u0441\u043b\u0456\u0434\u043d\u0438\u043a +dataverse.type.selectTab.researchProjects=\u0020\u0414\u043e\u0441\u043b\u0456\u0434\u043d\u0438\u0446\u044c\u043a\u0438\u0439\u0020\u043f\u0440\u043e\u0435\u043a\u0442\u002e +dataverse.type.selectTab.journals=\u0416\u0443\u0440\u043d\u0430\u043b\u002e +dataverse.type.selectTab.organizationsAndInsitutions=\u0020\u041e\u0440\u0433\u0430\u043d\u0456\u0437\u0430\u0446\u0456\u044f\u0020\u0430\u0431\u043e\u0020\u0443\u0441\u0442\u0430\u043d\u043e\u0432\u0430\u002e +dataverse.type.selectTab.teachingCourses=\u0020\u041d\u0430\u0432\u0447\u0430\u043b\u044c\u043d\u0438\u0439\u0020\u043a\u0443\u0440\u0441\u002e +dataverse.type.selectTab.uncategorized=\u0420\u0456\u0437\u043d\u0435\u002e +dataverse.type.selectTab.researchGroup=\u0414\u043e\u0441\u043b\u0456\u0434\u043d\u0438\u0446\u044c\u043a\u0430\u0020\u0433\u0440\u0443\u043f\u0430\u002e +dataverse.type.selectTab.laboratory=\u0020\u041b\u0430\u0431\u043e\u0440\u0430\u0442\u043e\u0440\u0456\u044f\u002e +dataverse.type.selectTab.department=\u0412\u0456\u0434\u0434\u0456\u043b\u002e +dataverse.description.title=\u0020\u0420\u0435\u0437\u044e\u043c\u0435\u002c\u0020\u0449\u043e\u0020\u043e\u043f\u0438\u0441\u0443\u0454\u0020\u043c\u0435\u0442\u0443\u002c\u0020\u0445\u0430\u0440\u0430\u043a\u0442\u0435\u0440\u0020\u0447\u0438\u0020\u043e\u0431\u0441\u044f\u0433\u0020\u0434\u0430\u043d\u043e\u0457\u0020\u0064\u0061\u0074\u0061\u0076\u0065\u0072\u0073\u0065\u002e +dataverse.email=\u0045\u006d\u0061\u0069\u006c +dataverse.email.title=\u0020\u0410\u0434\u0440\u0435\u0441\u0430\u0020\u0028\u0438\u0029\u0020\u0435\u043b\u0435\u043a\u0442\u0440\u043e\u043d\u043d\u043e\u0457\u0020\u043f\u043e\u0448\u0442\u0438\u0020\u043a\u043e\u043d\u0442\u0430\u043a\u0442\u0443\u0020\u0028\u0456\u0432\u0029\u0020\u0434\u043b\u044f\u0020\u0064\u0061\u0074\u0061\u0076\u0065\u0072\u0073\u0065\u002e +dataverse.share.dataverseShare=\u0020\u041f\u043e\u0434\u0456\u043b\u0438\u0442\u0438\u0441\u044f\u0020\u0044\u0061\u0074\u0061\u0076\u0065\u0072\u0073\u0065\u002e +dataverse.share.dataverseShare.tip=\u0020\u041f\u043e\u0434\u0456\u043b\u0456\u0442\u044c\u0441\u044f\u0020\u0446\u0456\u0454\u044e\u0020\u0044\u0061\u0074\u0061\u0076\u0065\u0072\u0073\u0065\u0020\u0443\u0020\u0441\u0432\u043e\u0457\u0445\u0020\u0443\u043b\u044e\u0431\u043b\u0435\u043d\u0438\u0445\u0020\u0441\u043e\u0446\u0456\u0430\u043b\u044c\u043d\u0438\u0445\u0020\u043c\u0435\u0440\u0435\u0436\u0430\u0445\u002e +dataverse.share.dataverseShare.shareText=\u0020\u041f\u0435\u0440\u0435\u0433\u043b\u044f\u043d\u044c\u0442\u0435\u0020\u0446\u044e\u0020\u0064\u0061\u0074\u0061\u0076\u0065\u0072\u0073\u0065\u002e +dataverse.subject.title=\u0020\u0422\u0435\u043c\u0430\u0020\u0028\u0438\u0029\u002c\u0020\u043f\u0440\u043e\u0020\u044f\u043a\u0456\u0020\u0439\u0434\u0435\u0442\u044c\u0441\u044f\u0020\u0443\u0020\u0446\u0456\u0439\u0020\u0064\u0061\u0074\u0061\u0076\u0065\u0072\u0073\u0065\u002e +dataverse.metadataElements=\u0020\u043f\u043e\u043b\u044f\u0020\u043c\u0435\u0442\u0430\u0434\u0430\u043d\u0438\u0445\u002e +dataverse.metadataElements.tip=\u0020\u0412\u0438\u0431\u0435\u0440\u0456\u0442\u044c\u0020\u043f\u043e\u043b\u044f\u0020\u043c\u0435\u0442\u0430\u0434\u0430\u043d\u0438\u0445\u0020\u0434\u043b\u044f\u0020\u0432\u0438\u043a\u043e\u0440\u0438\u0441\u0442\u0430\u043d\u043d\u044f\u0020\u0432\u0020\u0448\u0430\u0431\u043b\u043e\u043d\u0430\u0445\u0020\u043d\u0430\u0431\u043e\u0440\u0443\u0020\u0434\u0430\u043d\u0438\u0445\u0020\u0456\u0020\u043f\u0440\u0438\u0020\u0434\u043e\u0434\u0430\u0432\u0430\u043d\u043d\u0456\u0020\u043d\u0430\u0431\u043e\u0440\u0443\u0020\u0434\u0430\u043d\u0438\u0445\u0020\u0434\u043e\u0020\u0446\u0456\u0454\u0457\u0020\u0064\u0061\u0074\u0061\u0076\u0065\u0072\u0073\u0065\u002e +dataverse.metadataElements.from.tip=\u0020\u0412\u0438\u043a\u043e\u0440\u0438\u0441\u0442\u043e\u0432\u0443\u0439\u0442\u0435\u0020\u043f\u043e\u043b\u044f\u0020\u043c\u0435\u0442\u0430\u0434\u0430\u043d\u0438\u0445\u0020\u0437\u0020\u007b\u0030\u007d\u002e +dataverse.resetModifications=\u0020\u0421\u043a\u0438\u0434\u0430\u043d\u043d\u044f\u0020\u043c\u043e\u0434\u0438\u0444\u0456\u043a\u0430\u0446\u0456\u0439\u002e +dataverse.resetModifications.text=\u0020\u0412\u0438\u0020\u0432\u043f\u0435\u0432\u043d\u0435\u043d\u0456\u002c\u0020\u0449\u043e\u0020\u0445\u043e\u0447\u0435\u0442\u0435\u0020\u0441\u043a\u0438\u043d\u0443\u0442\u0438\u0020\u0432\u0438\u0431\u0440\u0430\u043d\u0456\u0020\u043f\u043e\u043b\u044f\u0020\u043c\u0435\u0442\u0430\u0434\u0430\u043d\u0438\u0445\u003f\u0020\u042f\u043a\u0449\u043e\u0020\u0432\u0438\u0020\u0446\u0435\u0020\u0437\u0440\u043e\u0431\u0438\u0442\u0435\u002c\u0020\u0431\u0443\u0434\u044c\u002d\u044f\u043a\u0456\u0020\u0437\u0440\u043e\u0431\u043b\u0435\u043d\u0456\u0020\u0432\u0430\u043c\u0438\u0020\u043d\u0430\u043b\u0430\u0448\u0442\u0443\u0432\u0430\u043d\u043d\u044f\u0020\u0028\u043f\u0440\u0438\u0445\u043e\u0432\u0430\u043d\u0456\u002c\u0020\u043e\u0431\u043e\u0432\u0027\u044f\u0437\u043a\u043e\u0432\u0456\u002c\u0020\u043d\u0435\u043e\u0431\u043e\u0432\u0027\u044f\u0437\u043a\u043e\u0432\u0456\u0029\u0020\u0431\u0456\u043b\u044c\u0448\u0435\u0020\u043d\u0435\u0020\u0437\u0027\u044f\u0432\u043b\u044f\u0442\u044c\u0441\u044f\u002e +dataverse.field.required=\u0028\u043e\u0431\u043e\u0432\u0027\u044f\u0437\u043a\u043e\u0432\u0456\u0029 +dataverse.field.example1=\u0020\u0028\u041f\u0440\u0438\u043a\u043b\u0430\u0434\u0438\u003a +dataverse.field.example2=\u0029 +dataverse.field.set.tip=\u005b\u002b\u005d\u0020\u041f\u0435\u0440\u0435\u0433\u043b\u044f\u043d\u044c\u0442\u0435\u0020\u043f\u043e\u043b\u044f\u0020\u0456\u0020\u043d\u0430\u043b\u0430\u0448\u0442\u0443\u0439\u0442\u0435\u0020\u0457\u0445\u0020\u044f\u043a\u0020\u043f\u0440\u0438\u0445\u043e\u0432\u0430\u043d\u0456\u002c\u0020\u043e\u0431\u043e\u0432\u0027\u044f\u0437\u043a\u043e\u0432\u0456\u0020\u0430\u0431\u043e\u0020\u043d\u0435\u043e\u0431\u043e\u0432\u0027\u044f\u0437\u043a\u043e\u0432\u0456\u002e +dataverse.field.set.view=\u005b\u002b\u005d\u0020\u041f\u0435\u0440\u0435\u0433\u043b\u044f\u043d\u0443\u0442\u0438\u0020\u043f\u043e\u043b\u044f +dataverse.field.requiredByDataverse=\u041e\u0431\u043e\u0432\u0027\u044f\u0437\u043a\u043e\u0432\u0435\u0020\u0434\u043b\u044f\u0020\u0044\u0061\u0074\u0061\u0076\u0065\u0072\u0073\u0065 +dataverse.facetPickList.text=\u0020\u041e\u0433\u043b\u044f\u0434\u0020\u002f\u0020\u043f\u043e\u0448\u0443\u043a\u0020\u0430\u0441\u043f\u0435\u043a\u0442\u0456\u0432 +dataverse.facetPickList.tip=\u0020\u0412\u0438\u0431\u0435\u0440\u0456\u0442\u044c\u0020\u043f\u043e\u043b\u044f\u0020\u043c\u0435\u0442\u0430\u0434\u0430\u043d\u0438\u0445\u002c\u0020\u044f\u043a\u0456\u0020\u0431\u0443\u0434\u0443\u0442\u044c\u0020\u0432\u0438\u043a\u043e\u0440\u0438\u0441\u0442\u043e\u0432\u0443\u0432\u0430\u0442\u0438\u0441\u044f\u0020\u044f\u043a\u0020\u0430\u0441\u043f\u0435\u043a\u0442\u0438\u0020\u0434\u043b\u044f\u0020\u043f\u0435\u0440\u0435\u0433\u043b\u044f\u0434\u0443\u0020\u043d\u0430\u0431\u043e\u0440\u0456\u0432\u0020\u0434\u0430\u043d\u0438\u0445\u0020\u0064\u0061\u0074\u0061\u0076\u0065\u0072\u0073\u0065\u0073\u0020\u0443\u0020\u0446\u0456\u0439\u0020\u0064\u0061\u0074\u0061\u0076\u0065\u0072\u0073\u0065\u002e +dataverse.facetPickList.facetsFromHost.text=\u0020\u0412\u0438\u043a\u043e\u0440\u0438\u0441\u0442\u043e\u0432\u0443\u0439\u0442\u0435\u0020\u043f\u0435\u0440\u0435\u0433\u043b\u044f\u0434\u0020\u002f\u0020\u043f\u043e\u0448\u0443\u043a\u0020\u0430\u0441\u043f\u0435\u043a\u0442\u0456\u0432\u0020\u0437\u0020\u007b\u0030\u007d +dataverse.facetPickList.metadataBlockList.all=\u0020\u0423\u0441\u0456\u0020\u043f\u043e\u043b\u044f\u0020\u043c\u0435\u0442\u0430\u0434\u0430\u043d\u0438\u0445 +dataverse.edit=\u0020\u0420\u0435\u0434\u0430\u0433\u0443\u0432\u0430\u0442\u0438 +dataverse.option.generalInfo=\u0020\u0417\u0430\u0433\u0430\u043b\u044c\u043d\u0430\u0020\u0456\u043d\u0444\u043e\u0440\u043c\u0430\u0446\u0456\u044f +dataverse.option.themeAndWidgets=\u0020\u0422\u0435\u043c\u0430\u0020\u002b\u0020\u0412\u0456\u0434\u0436\u0435\u0442\u0438 +dataverse.option.featuredDataverse=\u0020\u0425\u0430\u0440\u0430\u043a\u0442\u0435\u0440\u043d\u0456\u0020\u0044\u0061\u0074\u0061\u0076\u0065\u0072\u0073\u0065\u0073 +dataverse.option.permissions=\u0020\u0414\u043e\u0437\u0432\u043e\u043b\u0438 +dataverse.option.dataverseGroups=\u0413\u0440\u0443\u043f\u0438 +dataverse.option.datasetTemplates=\u0428\u0430\u0431\u043b\u043e\u043d\u0438\u0020\u043d\u0430\u0431\u043e\u0440\u0443\u0020\u0434\u0430\u043d\u0438\u0445 +dataverse.option.datasetGuestbooks=\u0020\u041d\u0430\u0431\u043e\u0440\u0438\u0020\u0434\u0430\u043d\u0438\u0445\u0020\u0433\u043e\u0441\u0442\u044c\u043e\u0432\u0438\u0445\u0020\u043a\u043d\u0438\u0433 +dataverse.option.deleteDataverse=\u0020\u0412\u0438\u0434\u0430\u043b\u0438\u0442\u0438\u0020\u0044\u0061\u0074\u0061\u0076\u0065\u0072\u0073\u0065 +dataverse.publish.btn=\u041e\u043f\u0443\u0431\u043b\u0456\u043a\u0443\u0432\u0430\u0442\u0438 +dataverse.publish.header=\u0020\u041e\u043f\u0443\u0431\u043b\u0456\u043a\u0443\u0432\u0430\u0442\u0438\u0020\u0044\u0061\u0074\u0061\u0076\u0065\u0072\u0073\u0065 +dataverse.nopublished=\u041d\u0435\u043c\u0430\u0454\u0020\u043e\u043f\u0443\u0431\u043b\u0456\u043a\u043e\u0432\u0430\u043d\u0438\u0445\u0020\u0044\u0061\u0074\u0061\u0076\u0065\u0072\u0073\u0065\u0073 +dataverse.nopublished.tip=\u0020\u0414\u043b\u044f\u0020\u0432\u0438\u043a\u043e\u0440\u0438\u0441\u0442\u0430\u043d\u043d\u044f\u0020\u0446\u0456\u0454\u0457\u0020\u0444\u0443\u043d\u043a\u0446\u0456\u0457\u0020\u0432\u0438\u0020\u043f\u043e\u0432\u0438\u043d\u043d\u0456\u0020\u043c\u0430\u0442\u0438\u0020\u043f\u0440\u0438\u043d\u0430\u0439\u043c\u043d\u0456\u0020\u043e\u0434\u043d\u0443\u0020\u043e\u043f\u0443\u0431\u043b\u0456\u043a\u043e\u0432\u0430\u043d\u0443\u0020\u0064\u0061\u0074\u0061\u0076\u0065\u0072\u0073\u0065\u002e +dataverse.contact=\u0045\u043b\u0435\u043a\u0442\u0440\u043e\u043d\u043d\u0430\u0020\u0430\u0434\u0440\u0435\u0441\u0430\u0020\u0044\u0061\u0074\u0061\u0076\u0065\u0072\u0073\u0065\u0020 +dataset.link=\u004c\u0069\u006e\u006b\u0020\u0044\u0061\u0074\u0061\u0073\u0065\u0074\u0020\u041f\u043e\u0441\u0438\u043b\u0430\u043d\u043d\u044f\u0020\u043d\u0430\u0020\u043d\u0430\u0431\u0456\u0440\u0020\u0434\u0430\u043d\u0438\u0445\u002e +dataverse.link=\u0020\u041f\u043e\u0441\u0438\u043b\u0430\u043d\u043d\u044f\u0020\u043d\u0430\u0020\u0044\u0061\u0074\u0061\u0076\u0065\u0072\u0073\u0065 +dataverse.link.btn.tip=\u0020\u041f\u043e\u0441\u0438\u043b\u0430\u043d\u043d\u044f\u0020\u043d\u0430\u0020\u0412\u0430\u0448\u0443\u0020\u0044\u0061\u0074\u0061\u0076\u0065\u0072\u0073\u0065 +dataverse.link.yourDataverses=\u0412\u0430\u0448\u0430\u0020\u007b\u0030\u002c\u0020\u0432\u0438\u0431\u0456\u0440\u002c\u0020\u0031\u0023\u0044\u0061\u0074\u0061\u0076\u0065\u0072\u0073\u0065\u007c\u0032\u0023\u0044\u0061\u0074\u0061\u0076\u0065\u0072\u0073\u0065\u0073\u007d +dataverse.link.save=\u0020\u0417\u0431\u0435\u0440\u0435\u0433\u0442\u0438\u0020\u043f\u043e\u0432\u0027\u044f\u0437\u0430\u043d\u0456\u0020\u0044\u0061\u0074\u0061\u0076\u0065\u0072\u0073\u0065\u0020 +dataset.link.save=\u0020\u0417\u0431\u0435\u0440\u0435\u0433\u0442\u0438\u0020\u043f\u043e\u0432\u0027\u044f\u0437\u0430\u043d\u0456\u0020\u043d\u0430\u0431\u043e\u0440\u0438\u0020\u0434\u0430\u043d\u0438\u0445 +dataverse.link.dataverse.choose=\u0020\u0412\u0438\u0431\u0435\u0440\u0456\u0442\u044c\u002c\u0020\u044f\u043a\u0438\u0439\u0020\u0456\u0437\u0020\u0432\u0430\u0448\u0438\u0445\u0020\u0064\u0061\u0074\u0061\u0076\u0065\u0072\u0073\u0020\u0432\u0438\u0020\u0445\u043e\u0442\u0456\u043b\u0438\u0020\u0431\u0020\u043f\u043e\u0432\u0027\u044f\u0437\u0430\u0442\u0438\u0020\u0437\u0020\u0446\u0456\u0454\u044e\u0020\u0064\u0061\u0074\u0061\u0076\u0065\u0072\u0073\u0065\u002e +dataverse.link.dataset.choose=\u0020\u0412\u0438\u0431\u0435\u0440\u0456\u0442\u044c\u002c\u0020\u044f\u043a\u0438\u0439\u0020\u0456\u0437\u0020\u0432\u0430\u0448\u0438\u0445\u0020\u043d\u0430\u0431\u043e\u0440\u0456\u0432\u0020\u0434\u0430\u043d\u0438\u0445\u0020\u0432\u0438\u0020\u0445\u043e\u0442\u0456\u043b\u0438\u0020\u0431\u0020\u043f\u043e\u0432\u0027\u044f\u0437\u0430\u0442\u0438\u0020\u0437\u0020\u0446\u0438\u043c\u0020\u043d\u0430\u0431\u043e\u0440\u043e\u043c\u0020\u0434\u0430\u043d\u0438\u0445\u002e +dataverse.link.no.choice=\u0020\u0423\u0020\u0432\u0430\u0441\u0020\u0454\u0020\u043e\u0434\u043d\u0430\u0020\u0064\u0061\u0074\u0061\u0076\u0065\u0072\u0073\u0065\u002c\u0020\u0434\u043e\u0020\u044f\u043a\u043e\u0457\u0020\u0432\u0438\u0020\u043c\u043e\u0436\u0435\u0442\u0435\u0020\u0434\u043e\u0434\u0430\u0442\u0438\u0020\u043f\u043e\u0432\u0027\u044f\u0437\u0430\u043d\u0456\u0020\u0064\u0061\u0074\u0061\u0076\u0065\u0072\u0073\u0065\u0073\u0020\u0456\u0020\u043d\u0430\u0431\u043e\u0440\u0438\u0020\u0434\u0430\u043d\u0438\u0445\u002e +dataverse.link.no.linkable=\u0020\u0429\u043e\u0431\u0020\u043c\u0430\u0442\u0438\u0020\u0437\u043c\u043e\u0433\u0443\u0020\u043f\u043e\u0432\u0027\u044f\u0437\u0430\u0442\u0438\u0020\u0064\u0061\u0074\u0061\u0076\u0065\u0072\u0073\u0065\u0020\u0430\u0431\u043e\u0020\u043d\u0430\u0431\u0456\u0440\u0020\u0434\u0430\u043d\u0438\u0445\u002c\u0020\u0432\u0430\u043c\u0020\u043f\u043e\u0442\u0440\u0456\u0431\u043d\u043e\u0020\u043c\u0430\u0442\u0438\u0020\u0432\u043b\u0430\u0441\u043d\u0443\u0020\u0064\u0061\u0074\u0061\u0076\u0065\u0072\u0073\u0065\u002e\u0020\u0429\u043e\u0431\u0020\u0440\u043e\u0437\u043f\u043e\u0447\u0430\u0442\u0438\u002c\u0020\u043d\u0430\u0442\u0438\u0441\u043d\u0456\u0442\u044c\u0020\u043a\u043d\u043e\u043f\u043a\u0443\u0020\u0414\u043e\u0434\u0430\u0442\u0438\u0020\u0434\u0430\u043d\u0456\u0020\u043d\u0430\u0020\u0434\u043e\u043c\u0430\u0448\u043d\u0456\u0439\u0020\u0441\u0442\u043e\u0440\u0456\u043d\u0446\u0456\u002e +dataverse.link.no.linkable.remaining=\u0020\u0412\u0438\u0020\u0432\u0436\u0435\u0020\u043f\u043e\u0432\u0027\u044f\u0437\u0430\u043b\u0438\u0020\u0432\u0441\u0456\u0020\u0432\u0430\u0448\u0456\u0020\u043f\u0440\u0438\u0439\u043d\u044f\u0442\u043d\u0456\u0020\u0064\u0061\u0074\u0061\u0076\u0065\u0072\u0073\u0065\u0073\u002e +dataverse.savedsearch.link=\u0020\u041f\u043e\u0448\u0443\u043a\u0020\u043f\u043e\u0441\u0438\u043b\u0430\u043d\u044c +dataverse.savedsearch.searchquery=\u0020\u041f\u043e\u0448\u0443\u043a\u0020 +dataverse.savedsearch.filterQueries=\u0410\u0441\u043f\u0435\u043a\u0442\u0438 +dataverse.savedsearch.save=\u0020\u0417\u0431\u0435\u0440\u0435\u0433\u0442\u0438\u0020\u043f\u043e\u0432\u0027\u044f\u0437\u0430\u043d\u0438\u0439\u0020\u043f\u043e\u0448\u0443\u043a +dataverse.savedsearch.dataverse.choose=\u0020\u0412\u0438\u0431\u0435\u0440\u0456\u0442\u044c\u002c\u0020\u0434\u043e\u0020\u044f\u043a\u043e\u0457\u0020\u0437\u0020\u0432\u0430\u0448\u0438\u0445\u0020\u0064\u0061\u0074\u0061\u0076\u0065\u0072\u0073\u0065\u0020\u0432\u0438\u0020\u0445\u043e\u0442\u0456\u043b\u0438\u0020\u0431\u0020\u043f\u0440\u0438\u0432\u0027\u044f\u0437\u0430\u0442\u0438\u0020\u0446\u0435\u0439\u0020\u043f\u043e\u0448\u0443\u043a\u002e +dataverse.savedsearch.no.choice=\u0020\u0423\u0020\u0432\u0430\u0441\u0020\u0454\u0020\u043e\u0434\u043d\u0430\u0020\u0064\u0061\u0074\u0061\u0076\u0065\u0072\u0073\u0065\u002c\u0020\u0434\u043e\u0020\u044f\u043a\u043e\u0457\u0020\u0432\u0438\u0020\u043c\u043e\u0436\u0435\u0442\u0435\u0020\u0434\u043e\u0434\u0430\u0442\u0438\u0020\u0437\u0431\u0435\u0440\u0435\u0436\u0435\u043d\u0438\u0439\u0020\u043f\u043e\u0448\u0443\u043a\u002e + +\u0023\u0020\u0042\u0075\u006e\u0064\u006c\u0065\u0020\u0066\u0069\u006c\u0065\u0020\u0065\u0064\u0069\u0074\u006f\u0072\u0073\u002c\u0020\u0070\u006c\u0065\u0061\u0073\u0065\u0020\u006e\u006f\u0074\u0065\u0020\u0074\u0068\u0061\u0074\u0020\u0022\u0064\u0061\u0074\u0061\u0076\u0065\u0072\u0073\u0065\u002e\u0073\u0061\u0076\u0065\u0064\u0073\u0065\u0061\u0072\u0063\u0068\u002e\u0073\u0061\u0076\u0065\u002e\u0073\u0075\u0063\u0063\u0065\u0073\u0073\u0022\u0020\u0069\u0073\u0020\u0075\u0073\u0065\u0064\u0020\u0069\u006e\u0020\u0061\u0020\u0075\u006e\u0069\u0074\u0020\u0074\u0065\u0073\u0074 +dataverse.saved.search.success=\u0020\u0417\u0431\u0435\u0440\u0435\u0436\u0435\u043d\u0438\u0439\u0020\u043f\u043e\u0448\u0443\u043a\u0020\u0431\u0443\u0432\u0020\u0443\u0441\u043f\u0456\u0448\u043d\u043e\u0020\u043f\u043e\u0432\u0027\u044f\u0437\u0430\u043d\u0438\u0439\u0020\u0437\u0020\u007b\u0030\u007d\u002e +dataverse.saved.search.failure=\u0020\u0417\u0431\u0435\u0440\u0435\u0436\u0435\u043d\u0438\u0439\u0020\u043f\u043e\u0448\u0443\u043a\u0020\u043d\u0435\u0020\u0432\u0434\u0430\u043b\u043e\u0441\u044f\u0020\u043f\u043e\u0432\u0027\u044f\u0437\u0430\u0442\u0438\u002e +dataverse.linked.success=\u0020\u007b\u0030\u007d\u0020\u0443\u0441\u043f\u0456\u0448\u043d\u043e\u0020\u043f\u043e\u0432\u0027\u044f\u0437\u0430\u043d\u043e\u0020\u0437\u0020\u007b\u0031\u007d\u002e +dataverse.linked.success.wait=\u0020\u007b\u0030\u007d\u0020\u0443\u0441\u043f\u0456\u0448\u043d\u043e\u0020\u043f\u043e\u0432\u0027\u044f\u0437\u0430\u043d\u043e\u0020\u0437\u0020\u007b\u0031\u007d\u002e\u0020\u0411\u0443\u0434\u044c\u0020\u043b\u0430\u0441\u043a\u0430\u002c\u0020\u0437\u0430\u0447\u0435\u043a\u0430\u0439\u0442\u0435\u002c\u0020\u0449\u043e\u0431\u0020\u0437\u0027\u044f\u0432\u0438\u0432\u0441\u044f\u0020\u0439\u043e\u0433\u043e\u0020\u043a\u043e\u043d\u0442\u0435\u043d\u0442\u002e +dataverse.linked.internalerror=\u007b\u0030\u007d\u0020\u0443\u0441\u043f\u0456\u0448\u043d\u043e\u0020\u043f\u043e\u0432\u0027\u044f\u0437\u0430\u043d\u043e\u0020\u0437\u0020\u007b\u0031\u007d\u0020\u0430\u043b\u0435\u0020\u043a\u043e\u043d\u0442\u0435\u043d\u0442\u0020\u043d\u0435\u0020\u0432\u0456\u0434\u043e\u0431\u0440\u0430\u0436\u0430\u0442\u0438\u043c\u0435\u0442\u044c\u0441\u044f\u002c\u0020\u0434\u043e\u043a\u0438\u0020\u0432\u043d\u0443\u0442\u0440\u0456\u0448\u043d\u044f\u0020\u043f\u043e\u043c\u0438\u043b\u043a\u0430\u0020\u043d\u0435\u0020\u0431\u0443\u0434\u0435\u0020\u0432\u0438\u043f\u0440\u0430\u0432\u043b\u0435\u043d\u0430\u002e +dataverse.page.pre=\u0020\u041f\u043e\u043f\u0435\u0440\u0435\u0434\u043d\u0456\u0439 +dataverse.page.next=\u043d\u0430\u0441\u0442\u0443\u043f\u043d\u0438\u0439 +dataverse.byCategory=\u0044\u0061\u0074\u0061\u0076\u0065\u0072\u0073\u0065\u0073\u0020\u0437\u0430\u0020\u043a\u0430\u0442\u0435\u0433\u043e\u0440\u0456\u0454\u044e\u002e +dataverse.displayFeatured=\u0044\u0069\u0073\u0070\u006c\u0061\u0079\u0020\u0412\u0456\u0434\u043e\u0431\u0440\u0430\u0436\u0430\u0442\u0438\u0020\u0432\u0438\u0431\u0440\u0430\u043d\u0456\u0020\u043d\u0438\u0436\u0447\u0435\u0020\u0064\u0061\u0074\u0061\u0076\u0065\u0072\u0073\u0065\u0073\u0020\u043d\u0430\u0020\u0434\u043e\u043c\u0430\u0448\u043d\u0456\u0439\u0020\u0441\u0442\u043e\u0440\u0456\u043d\u0446\u0456\u0020\u0434\u043b\u044f\u0020\u0446\u0456\u0454\u0457\u0020\u0064\u0061\u0074\u0061\u0076\u0065\u0072\u0073\u0065\u002e +dataverse.selectToFeature=\u0020\u0412\u0438\u0431\u0435\u0440\u0456\u0442\u044c\u0020\u0064\u0061\u0074\u0061\u0076\u0065\u0072\u0073\u0065\u0073\u002c\u0020\u044f\u043a\u0456\u0020\u043f\u043e\u0442\u0440\u0456\u0431\u043d\u043e\u0020\u043f\u043e\u043a\u0430\u0437\u0430\u0442\u0438\u0020\u043d\u0430\u0020\u0434\u043e\u043c\u0430\u0448\u043d\u0456\u0439\u0020\u0441\u0442\u043e\u0440\u0456\u043d\u0446\u0456\u0020\u0446\u0456\u0454\u0457\u0020\u0064\u0061\u0074\u0061\u0076\u0065\u0072\u0073\u0065\u002e +dataverse.publish.tip=\u0020\u0412\u0438\u0020\u0432\u043f\u0435\u0432\u043d\u0435\u043d\u0456\u002c\u0020\u0449\u043e\u0020\u0445\u043e\u0447\u0435\u0442\u0435\u0020\u043e\u043f\u0443\u0431\u043b\u0456\u043a\u0443\u0432\u0430\u0442\u0438\u0020\u0441\u0432\u043e\u044e\u0020\u0064\u0061\u0074\u0061\u0076\u0065\u0073\u0065\u003f\u0020\u042f\u043a\u0020\u0442\u0456\u043b\u044c\u043a\u0438\u0020\u0432\u0438\u0020\u0446\u0435\u0020\u0437\u0440\u043e\u0431\u0438\u0442\u0435\u002c\u0020\u0432\u043e\u043d\u0430\u0020\u043c\u0430\u0454\u0020\u0437\u0430\u043b\u0438\u0448\u0430\u0442\u0438\u0441\u044f\u0020\u043e\u043f\u0443\u0431\u043b\u0456\u043a\u043e\u0432\u0430\u043d\u043e\u044e\u002e +dataverse.publish.failed.tip=\u002e\u0020\u0426\u044f\u0020\u0064\u0061\u0074\u0061\u0076\u0065\u0072\u0073\u0065\u0020\u043d\u0435\u0020\u043c\u043e\u0436\u0435\u0020\u0431\u0443\u0442\u0438\u0020\u043e\u043f\u0443\u0431\u043b\u0456\u043a\u043e\u0432\u0430\u043d\u043e\u044e\u002c\u0020\u0442\u043e\u043c\u0443\u0020\u0449\u043e\u0020\u0457\u0457\u0020\u0064\u0061\u0074\u0061\u0076\u0065\u0072\u0073\u0065\u0020\u043d\u0435\u0020\u0431\u0443\u043b\u0430\u0020\u043e\u043f\u0443\u0431\u043b\u0456\u043a\u043e\u0432\u0430\u043d\u0430\u002e +dataverse.publish.failed=\u0020\u041d\u0435\u0020\u043c\u043e\u0436\u0443\u0020\u043e\u043f\u0443\u0431\u043b\u0456\u043a\u0443\u0432\u0430\u0442\u0438\u0020\u0064\u0061\u0074\u0061\u0076\u0065\u0073\u0065\u002e +dataverse.publish.success=\u0020\u0412\u0430\u0448\u0430\u0020\u0064\u0061\u0074\u0061\u0076\u0065\u0073\u0065\u0020\u0437\u0430\u0440\u0430\u0437\u0020\u0454\u0020\u0437\u0430\u0433\u0430\u043b\u044c\u043d\u043e\u0434\u043e\u0441\u0442\u0443\u043f\u043d\u043e\u044e\u002e +dataverse.publish.failure=\u0020\u0426\u044e\u0020\u0064\u0061\u0074\u0061\u0076\u0065\u0072\u0073\u0065\u0020\u043d\u0435\u043c\u043e\u0436\u043b\u0438\u0432\u043e\u0020\u043e\u043f\u0443\u0431\u043b\u0456\u043a\u0443\u0432\u0430\u0442\u0438\u002e +dataverse.delete.tip=\u0020\u0412\u0438\u0020\u0432\u043f\u0435\u0432\u043d\u0435\u043d\u0456\u002c\u0020\u0449\u043e\u0020\u0445\u043e\u0447\u0435\u0442\u0435\u0020\u0432\u0438\u0434\u0430\u043b\u0438\u0442\u0438\u0020\u0441\u0432\u043e\u044e\u0020\u0064\u0061\u0074\u0061\u0076\u0065\u0072\u0073\u0065\u003f\u0020\u0412\u0438\u0020\u043d\u0435\u0020\u043c\u043e\u0436\u0435\u0442\u0435\u0020\u0432\u0456\u0434\u043d\u043e\u0432\u0438\u0442\u0438\u0020\u0446\u044e\u0020\u0064\u0061\u0074\u0061\u0076\u0065\u0072\u0073\u0065\u002e +dataverse.delete=\u0412\u0438\u0434\u0430\u043b\u0438\u0442\u0438\u0020\u0044\u0061\u0074\u0061\u0076\u0065\u0072\u0073\u0065 +dataverse.delete.success=\u0020\u0412\u0430\u0448\u0430\u0020\u0064\u0061\u0074\u0061\u0076\u0065\u0072\u0073\u0065\u0020\u0432\u0438\u0434\u0430\u043b\u0435\u043d\u0430\u002e +dataverse.delete.failure=\u0020\u0426\u044e\u0020\u0064\u0061\u0074\u0061\u0076\u0065\u0072\u0073\u0065\u0020\u043d\u0435\u043c\u043e\u0436\u043b\u0438\u0432\u043e\u0020\u0432\u0438\u0434\u0430\u043b\u0438\u0442\u0438\u002e +\u0023\u0020\u0420\u0435\u0434\u0430\u043a\u0442\u043e\u0440\u0438\u0020\u0444\u0430\u0439\u043b\u0456\u0432\u0020\u043f\u0430\u043a\u0435\u0442\u0456\u0432\u002c\u0020\u0437\u0432\u0435\u0440\u043d\u0456\u0442\u044c\u0020\u0443\u0432\u0430\u0433\u0443\u0020\u043d\u0430\u0020\u0442\u0435\u002c\u0020\u0449\u043e\u0020\u0022\u0064\u0061\u0074\u0061\u0076\u0065\u0072\u0073\u0065\u002e\u0063\u0072\u0065\u0061\u0074\u0065\u002e\u0073\u0075\u0063\u0063\u0065\u0073\u0073\u0022\u0020\u0432\u0438\u043a\u043e\u0440\u0438\u0441\u0442\u043e\u0432\u0443\u0454\u0442\u044c\u0441\u044f\u0020\u0432\u0020\u0442\u0435\u0441\u0442\u043e\u0432\u043e\u043c\u0443\u0020\u043c\u043e\u0434\u0443\u043b\u0456\u002c\u0020\u043e\u0441\u043a\u0456\u043b\u044c\u043a\u0438\u0020\u0432\u043e\u043d\u043e\u0020\u043d\u0435\u043f\u0435\u0440\u0435\u0432\u0435\u0440\u0448\u0435\u043d\u0435\u0020\u0437\u0020\u0434\u0432\u043e\u043c\u0430\u0020\u043f\u0430\u0440\u0430\u043c\u0435\u0442\u0440\u0430\u043c\u0438 +dataverse.create.success=\u0020\u0412\u0438\u0020\u0443\u0441\u043f\u0456\u0448\u043d\u043e\u0020\u0441\u0442\u0432\u043e\u0440\u0438\u043b\u0438\u0020\u0441\u0432\u043e\u044e\u0020\u0064\u0061\u0074\u0061\u0076\u0065\u0072\u0073\u0065\u0021\u0020\u0429\u043e\u0431\u0020\u0434\u0456\u0437\u043d\u0430\u0442\u0438\u0441\u044c\u0020\u0431\u0456\u043b\u044c\u0448\u0435\u0020\u043f\u0440\u043e\u0020\u0442\u0435\u002c\u0020\u0449\u043e\u0020\u0432\u0438\u0020\u043c\u043e\u0436\u0435\u0442\u0435\u0020\u0437\u0440\u043e\u0431\u0438\u0442\u0438\u0020\u0437\u0020\u0432\u0430\u0448\u043e\u044e\u0020\u0064\u0061\u0074\u0061\u0076\u0065\u0072\u0073\u0065\u002c\u0020\u043f\u0435\u0440\u0435\u0433\u043b\u044f\u043d\u044c\u0442\u0435\u0020\u003c\u0061\u0020\u0068\u0072\u0065\u0066\u003d\u0022\u007b\u0030\u007d\u002f\u007b\u0031\u007d\u002f\u0075\u0073\u0065\u0072\u002f\u0064\u0061\u0074\u0061\u0076\u0065\u0072\u0073\u0065\u002d\u006d\u0061\u006e\u0061\u0067\u0065\u006d\u0065\u006e\u0074\u002e\u0068\u0074\u006d\u006c\u0022\u0020\u0074\u0069\u0074\u006c\u0065\u003d\u0022\u0020\u0423\u043f\u0440\u0430\u0432\u043b\u0456\u043d\u043d\u044f\u0020\u0044\u0061\u0074\u0061\u0076\u0065\u0072\u0073\u0065\u0020\u002d\u0020\u0414\u043e\u0432\u0456\u0434\u043d\u0438\u043a\u0020\u043a\u043e\u0440\u0438\u0441\u0442\u0443\u0432\u0430\u0447\u0430\u0020\u0044\u0061\u0074\u0061\u0076\u0065\u0072\u0073\u0065\u0022\u0020\u0074\u0061\u0072\u0067\u0065\u0074\u003d\u0022\u005f\u0062\u006c\u0061\u006e\u006b\u0022\u003e\u0055\u0073\u0065\u0072\u0020\u0047\u0075\u0069\u0064\u0065\u003c\u002f\u0061\u003e\u002e +dataverse.create.failure=\u0020\u0426\u044e\u0020\u0064\u0061\u0074\u0061\u0076\u0065\u0072\u0073\u0065\u0020\u043d\u0435\u043c\u043e\u0436\u043b\u0438\u0432\u043e\u0020\u0441\u0442\u0432\u043e\u0440\u0438\u0442\u0438\u002e +dataverse.create.authenticatedUsersOnly=\u0020\u0422\u0456\u043b\u044c\u043a\u0438\u0020\u0430\u0432\u0442\u0435\u043d\u0442\u0438\u0444\u0456\u043a\u043e\u0432\u0430\u043d\u0456\u0020\u043a\u043e\u0440\u0438\u0441\u0442\u0443\u0432\u0430\u0447\u0456\u0020\u043c\u043e\u0436\u0443\u0442\u044c\u0020\u0441\u0442\u0432\u043e\u0440\u044e\u0432\u0430\u0442\u0438\u0020\u0064\u0061\u0074\u0061\u0076\u0065\u0072\u0073\u0065\u0073\u002e +dataverse.update.success=\u0020\u0412\u0438\u0020\u0443\u0441\u043f\u0456\u0448\u043d\u043e\u0020\u043e\u043d\u043e\u0432\u0438\u043b\u0438\u0020\u0441\u0432\u043e\u044e\u0020\u0064\u0061\u0074\u0061\u0076\u0065\u0072\u0073\u0065\u0021 +dataverse.update.failure=\u0020\u0426\u044e\u0020\u0064\u0061\u0074\u0061\u0076\u0065\u0072\u0073\u0065\u0020\u043d\u0435\u043c\u043e\u0436\u043b\u0438\u0432\u043e\u0020\u043e\u043d\u043e\u0432\u0438\u0442\u0438\u002e + +\u0023\u0020\u0072\u006f\u006c\u0065\u0073\u0041\u006e\u0064\u0050\u0065\u0072\u006d\u0069\u0073\u0073\u0069\u006f\u006e\u0073\u0046\u0072\u0061\u0067\u006d\u0065\u006e\u0074\u002e\u0078\u0068\u0074\u006d\u006c + +\u0023\u0020\u0061\u0064\u0076\u0061\u006e\u0063\u0065\u0064\u002e\u0078\u0068\u0074\u006d\u006c +advanced.search.header.dataverses=\u0044\u0061\u0074\u0061\u0076\u0065\u0072\u0073\u0065\u0073 +advanced.search.dataverses.name.tip=\u0020\u041f\u0440\u043e\u0435\u043a\u0442\u002c\u0020\u0432\u0456\u0434\u0434\u0456\u043b\u002c\u0020\u0443\u043d\u0456\u0432\u0435\u0440\u0441\u0438\u0442\u0435\u0442\u002c\u0020\u043f\u0440\u043e\u0444\u0435\u0441\u043e\u0440\u0020\u0430\u0431\u043e\u0020\u0436\u0443\u0440\u043d\u0430\u043b\u002c\u0020\u0434\u043b\u044f\u0020\u044f\u043a\u0438\u0445\u0020\u0446\u044f\u0020\u0064\u0061\u0074\u0061\u0076\u0065\u0072\u0073\u0065\u0020\u043c\u0456\u0441\u0442\u0438\u0442\u0438\u043c\u0435\u0020\u0434\u0430\u043d\u0456\u002e +advanced.search.dataverses.affiliation.tip=\u0020\u041e\u0440\u0433\u0430\u043d\u0456\u0437\u0430\u0446\u0456\u044f\u002c\u0020\u0437\u0020\u044f\u043a\u043e\u044e\u0020\u043f\u043e\u0432\u0027\u044f\u0437\u0430\u043d\u0430\u0020\u0446\u044f\u0020\u0044\u0061\u0074\u0061\u0076\u0065\u0073\u0065\u002e\u0020\u0061\u0064\u0076\u0061\u006e\u0063\u0065\u0064\u002e\u0073\u0065\u0061\u0072\u0063\u0068\u002e\u0064\u0061\u0074\u0061\u0076\u0065\u0072\u0073\u0065\u0073\u002e\u0064\u0065\u0073\u0063\u0072\u0069\u0070\u0074\u0069\u006f\u006e\u002e\u0074\u0069\u0070\u003d\u0420\u0435\u0437\u044e\u043c\u0435\u002c\u0020\u0449\u043e\u0020\u043e\u043f\u0438\u0441\u0443\u0454\u0020\u043c\u0435\u0442\u0443\u002c\u0020\u0445\u0430\u0440\u0430\u043a\u0442\u0435\u0440\u0020\u0447\u0438\u0020\u0441\u0444\u0435\u0440\u0443\u0020\u0437\u0430\u0441\u0442\u043e\u0441\u0443\u0432\u0430\u043d\u043d\u044f\u0020\u0446\u0456\u0454\u0457\u0020\u0044\u0061\u0074\u0061\u0076\u0065\u0072\u0073\u0065\u002e +advanced.search.dataverses.subject.tip=\u0020\u041a\u0430\u0442\u0435\u0433\u043e\u0440\u0456\u0457\u0020\u0442\u0435\u043c\u0020\u0441\u043f\u0435\u0446\u0438\u0444\u0456\u0447\u043d\u0456\u0020\u0434\u043b\u044f\u0020\u0434\u043e\u043c\u0435\u043d\u0443\u002c\u0020\u044f\u043a\u0456\u0020\u0430\u043a\u0442\u0443\u0430\u043b\u044c\u043d\u0456\u0020\u0434\u043b\u044f\u0020\u0446\u0456\u0454\u0457\u0020\u0044\u0061\u0074\u0061\u0076\u0065\u0073\u0065\u002e +advanced.search.header.datasets=\u043d\u0430\u0431\u043e\u0440\u0438\u0020\u0434\u0430\u043d\u0438\u0445\u002e +advanced.search.header.files=\u0424\u0430\u0439\u043b\u0438\u002e +advanced.search.files.name.tip=\u0020\u0406\u043c\u0027\u044f\u002c\u0020\u044f\u043a\u0435\u0020\u043f\u0440\u0438\u0437\u043d\u0430\u0447\u0435\u043d\u043e\u0020\u0434\u043b\u044f\u0020\u0456\u0434\u0435\u043d\u0442\u0438\u0444\u0456\u043a\u0430\u0446\u0456\u0457\u0020\u0444\u0430\u0439\u043b\u0443\u002e +advanced.search.files.description.tip=\u0020\u0420\u0435\u0437\u044e\u043c\u0435\u002c\u0020\u0449\u043e\u0020\u043e\u043f\u0438\u0441\u0443\u0454\u0020\u0444\u0430\u0439\u043b\u0020\u0442\u0430\u0020\u0439\u043e\u0433\u043e\u0020\u0437\u043c\u0456\u043d\u043d\u0456\u002e +advanced.search.files.fileType=\u0422\u0438\u043f\u0020\u0444\u0430\u0439\u043b\u0443\u002e +advanced.search.files.fileType.tip=\u0020\u0420\u043e\u0437\u0448\u0438\u0440\u0435\u043d\u043d\u044f\u0020\u0434\u043b\u044f\u0020\u0444\u0430\u0439\u043b\u0443\u002c\u0020\u043d\u0430\u043f\u0440\u0438\u043a\u043b\u0430\u0434\u0020\u0043\u0053\u0056\u002c\u0020\u007a\u0069\u0070\u002c\u0020\u0053\u0074\u0061\u0074\u0061\u002c\u0020\u0052\u002c\u0020\u0050\u0044\u0046\u002c\u0020\u004a\u0050\u0045\u0047\u0020\u0442\u043e\u0449\u043e\u002e +advanced.search.files.variableName=\u0020\u041d\u0430\u0437\u0432\u0430\u0020\u0437\u043c\u0456\u043d\u043d\u043e\u0457\u002e +advanced.search.files.variableName.tip=\u0020\u041d\u0430\u0437\u0432\u0430\u0020\u0433\u0440\u0430\u0444\u0438\u0020\u0437\u043c\u0456\u043d\u043d\u043e\u0457\u0020\u0432\u0020\u0441\u0438\u0441\u0442\u0435\u043c\u0456\u0020\u0434\u0430\u043d\u0438\u0445\u002e +advanced.search.files.variableLabel=\u0020\u041f\u043e\u0437\u043d\u0430\u0447\u043a\u0430\u0020\u0437\u043c\u0456\u043d\u043d\u043e\u0457 +advanced.search.files.variableLabel.tip=\u0020\u041a\u043e\u0440\u043e\u0442\u043a\u0438\u0439\u0020\u043e\u043f\u0438\u0441\u0020\u0437\u043c\u0456\u043d\u043d\u043e\u0457\u002e + +\u0023\u0020\u0073\u0065\u0061\u0072\u0063\u0068\u002d\u0069\u006e\u0063\u006c\u0075\u0064\u0065\u002d\u0066\u0072\u0061\u0067\u006d\u0065\u006e\u0074\u002e\u0078\u0068\u0074\u006d\u006c +dataverse.search.advancedSearch=\u0020\u0420\u043e\u0437\u0448\u0438\u0440\u0435\u043d\u0438\u0439\u0020\u043f\u043e\u0448\u0443\u043a +dataverse.search.input.watermark=\u0020\u0428\u0443\u043a\u0430\u0442\u0438\u0020\u0446\u044e\u0020\u0064\u0061\u0074\u0061\u0076\u0065\u0072\u0073\u0065\u002e\u002e\u002e +account.search.input.watermark=\u0020\u0428\u0443\u043a\u0430\u0442\u0438\u0020\u0446\u0456\u0020\u0434\u0430\u043d\u0456\u002e\u002e\u002e +dataverse.search.btn.find=\u0020\u0417\u043d\u0430\u0439\u0442\u0438\u002e +dataverse.results.btn.addData=\u0414\u043e\u0434\u0430\u0442\u0438\u0020\u0434\u0430\u043d\u0456\u002e +dataverse.results.btn.addData.newDataverse=\u041d\u043e\u0432\u0430\u0020\u0044\u0061\u0074\u0061\u0076\u0065\u0072\u0073\u0065 +dataverse.results.btn.addData.newDataset=\u041d\u043e\u0432\u0438\u0439\u0020\u043d\u0430\u0431\u0456\u0440\u0020\u0434\u0430\u043d\u0438\u0445 +dataverse.results.dialog.addDataGuest.header=\u0020\u0414\u043e\u0434\u0430\u0442\u0438\u0020\u0434\u0430\u043d\u0456\u002e +dataverse.results.dialog.addDataGuest.msg=\u0020\u0412\u0430\u043c\u0020\u043f\u043e\u0442\u0440\u0456\u0431\u043d\u043e\u0020\u003c\u0061\u0020\u0068\u0072\u0065\u0066\u003d\u0022\u002f\u006c\u006f\u0067\u0069\u006e\u0070\u0061\u0067\u0065\u002e\u0078\u0068\u0074\u006d\u006c\u007b\u0030\u007d\u0022\u0020\u0074\u0069\u0074\u006c\u0065\u003d\u0022\u0020\u0443\u0432\u0456\u0439\u0442\u0438\u0020\u0443\u0020\u0441\u0432\u0456\u0439\u0020\u0044\u0061\u0074\u0061\u0076\u0065\u0072\u0073\u0065\u0020\u0430\u043a\u0430\u0443\u043d\u0442\u0022\u003e\u0443\u0432\u0456\u0439\u0442\u0438\u003c\u002f\u0061\u003e\u0020\u0449\u043e\u0431\u0020\u0441\u0442\u0432\u043e\u0440\u0438\u0442\u0438\u0020\u0064\u0061\u0074\u0061\u0076\u0065\u0072\u0073\u0065\u0020\u0430\u0431\u043e\u0020\u0434\u043e\u0434\u0430\u0442\u0438\u0020\u043d\u0430\u0431\u0456\u0440\u0020\u0434\u0430\u043d\u0438\u0445\u002e +dataverse.results.dialog.addDataGuest.msg.signup=\u0020\u0412\u0430\u043c\u0020\u043f\u043e\u0442\u0440\u0456\u0431\u043d\u043e\u0020\u003c\u0061\u0020\u0068\u0072\u0065\u0066\u003d\u0022\u002f\u0064\u0061\u0074\u0061\u0076\u0065\u0072\u0073\u0065\u0075\u0073\u0065\u0072\u002e\u0078\u0068\u0074\u006d\u006c\u007b\u0030\u007d\u0026\u0061\u006d\u0070\u003b\u0065\u0064\u0069\u0074\u004d\u006f\u0064\u0065\u003d\u0421\u0422\u0412\u041e\u0420\u0418\u0422\u0418\u0022\u0020\u0074\u0069\u0074\u006c\u0065\u003d\u0022\u0417\u0430\u0440\u0435\u0454\u0441\u0442\u0440\u0443\u0432\u0430\u0442\u0438\u0441\u044f\u0020\u0432\u0020\u0430\u043a\u0430\u0443\u043d\u0442\u0456\u0020\u0044\u0061\u0074\u0061\u0076\u0065\u0072\u0073\u0065\u0022\u003e\u0020\u0417\u0430\u0440\u0435\u0454\u0441\u0442\u0440\u0443\u0432\u0430\u0442\u0438\u0441\u044f\u0020\u003c\u002f\u0061\u003e\u0020\u0430\u0431\u043e\u0020\u003c\u0061\u0020\u0020\u0068\u0072\u0065\u0066\u0020\u003d\u0020\u0022\u002f\u0020\u006c\u006f\u0067\u0069\u006e\u0070\u0061\u0067\u0065\u002e\u0078\u0068\u0074\u006d\u006c\u0020\u0028\u0030\u0029\u0022\u0020\u0074\u0069\u0074\u006c\u0065\u0020\u003d\u0020\u0022\u0423\u0432\u0456\u0439\u0442\u0438\u0020\u0443\u0020\u0441\u0432\u0456\u0439\u0020\u0044\u0061\u0074\u0061\u0076\u0065\u0072\u0073\u0065\u0020\u0430\u043a\u0430\u0443\u043d\u0442\u0022\u003e\u0020\u0423\u0432\u0456\u0439\u0434\u0456\u0442\u044c\u0020\u003c\u002f\u0020\u0061\u003e\u002c\u0020\u0449\u043e\u0431\u0020\u0441\u0442\u0432\u043e\u0440\u0438\u0442\u0438\u0020\u0044\u0061\u0074\u0061\u0076\u0065\u0072\u0073\u0065\u0020\u0430\u0431\u043e\u0020\u0434\u043e\u0434\u0430\u0442\u0438\u0020\u043d\u0430\u0431\u0456\u0440\u0020\u0434\u0430\u043d\u0438\u0445\u002e +dataverse.results.types.dataverses=\u0044\u0061\u0074\u0061\u0076\u0065\u0072\u0073\u0065\u0073 +dataverse.results.types.datasets=\u0020\u043d\u0430\u0431\u043e\u0440\u0438\u0020\u0434\u0430\u043d\u0438\u0445 +dataverse.results.types.files=\u0424\u0430\u0439\u043b\u0438\u002e +\u0023\u0020\u0042\u0075\u006e\u0064\u006c\u0065\u0020\u0066\u0069\u006c\u0065\u0020\u0065\u0064\u0069\u0074\u006f\u0072\u0073\u002c\u0020\u0070\u006c\u0065\u0061\u0073\u0065\u0020\u006e\u006f\u0074\u0065\u0020\u0074\u0068\u0061\u0074\u0020\u0022\u0064\u0061\u0074\u0061\u0076\u0065\u0072\u0073\u0065\u002e\u0072\u0065\u0073\u0075\u006c\u0074\u0073\u002e\u0065\u006d\u0070\u0074\u0079\u002e\u007a\u0065\u0072\u006f\u0022\u0020\u0069\u0073\u0020\u0075\u0073\u0065\u0064\u0020\u0069\u006e\u0020\u0061\u0020\u0075\u006e\u0069\u0074\u0020\u0074\u0065\u0073\u0074 +dataverse.results.empty.zero=\u0020\u041d\u0435\u043c\u0430\u0454\u0020\u0064\u0061\u0074\u0061\u0076\u0065\u0072\u0073\u0065\u0073\u002c\u0020\u043d\u0430\u0431\u043e\u0440\u0456\u0432\u0020\u0434\u0430\u043d\u0438\u0445\u002c\u0020\u0447\u0438\u0020\u0444\u0430\u0439\u043b\u0456\u0432\u002c\u0020\u044f\u043a\u0456\u0020\u0432\u0456\u0434\u043f\u043e\u0432\u0456\u0434\u0430\u044e\u0442\u044c\u0020\u0432\u0430\u0448\u043e\u043c\u0443\u0020\u043f\u043e\u0448\u0443\u043a\u0443\u002e\u0020\u0411\u0443\u0434\u044c\u0020\u043b\u0430\u0441\u043a\u0430\u002c\u0020\u0441\u043f\u0440\u043e\u0431\u0443\u0439\u0442\u0435\u0020\u043d\u043e\u0432\u0438\u0439\u0020\u043f\u043e\u0448\u0443\u043a\u002c\u0020\u0432\u0438\u043a\u043e\u0440\u0438\u0441\u0442\u043e\u0432\u0443\u044e\u0447\u0438\u0020\u0456\u043d\u0448\u0456\u0020\u0430\u0431\u043e\u0020\u0431\u0456\u043b\u044c\u0448\u0020\u0448\u0438\u0440\u043e\u043a\u0456\u0020\u0442\u0435\u0440\u043c\u0456\u043d\u0438\u002e\u0020\u0412\u0438\u0020\u0442\u0430\u043a\u043e\u0436\u0020\u043c\u043e\u0436\u0435\u0442\u0435\u0020\u043f\u0435\u0440\u0435\u0433\u043b\u044f\u043d\u0443\u0442\u0438\u0020\u0434\u043b\u044f\u0020\u0434\u043e\u0432\u0456\u0434\u043a\u0438\u003a +the ) + { + my $s = $_; + $s=~s/\r|\n//g; + if ($s=~/^(.+?)\=(.+)$/) + { + $k{$1}++; + } +# print "$s\n"; + } + close(f); + + return %k; +} + +%k1 = readprop($lang1); +%k2 = readprop($lang2); + +foreach $item (sort keys %k1) +{ +# print "$item\n"; + if (!$k2{$item}) + { + print "Missing $item\n"; + } +} diff --git a/dataversedock/lang.properties/dvnconvert.py b/dataversedock/lang.properties/dvnconvert.py new file mode 100755 index 0000000..2c0beb8 --- /dev/null +++ b/dataversedock/lang.properties/dvnconvert.py @@ -0,0 +1,21 @@ +#!/usr/bin/python + +import codecs +import re + +def display_unicode(data): + return "".join(["\\u%s" % hex(ord(l))[2:].zfill(4) for l in data]) + +filename = "Bundle_ua.properties" +#filename = "test" +with codecs.open(filename,'r',encoding='utf8') as f: + text = f.read() + +if text: + for uni in text.split('\n'): + data = uni.split("=",1) + + try: + print "%s=%s" % (data[0], display_unicode(data[1])) + except: + print "%s" % display_unicode(uni) diff --git a/dataversedock/lang.properties/propconvertor.py b/dataversedock/lang.properties/propconvertor.py new file mode 100755 index 0000000..4d2f833 --- /dev/null +++ b/dataversedock/lang.properties/propconvertor.py @@ -0,0 +1,67 @@ +#!/usr/bin/python + +import codecs +import re + +def display_unicode(data): + return "".join(["\\u%s" % hex(ord(l))[2:].zfill(4) for l in data]) + +filename = "Bundle_ua.properties" +#filename = "test" + +def propreader(filename): + #filename = "Bundle_fr.properties" + vocab = {} + order = [] + try: + with codecs.open(filename,'r',encoding='utf8') as f: + text = f.read() + except: + with codecs.open(filename,'r') as f: + text = f.read() + + if text: + for uni in text.split('\n'): + data = uni.split("=",1) + + try: +# print "%s=%s" % (data[0], display_unicode(data[1])) + vocab[data[0]] = display_unicode(data[1]) + order.append(data[0]) + except: +# print "%s" % #display_unicode(uni) + vocab[data[0]] = display_unicode(uni) + order.append(data[0]) + + return (order, vocab) + +def mainreader(filename, vocab): + try: + with codecs.open(filename,'r',encoding='utf8') as f: + text = f.read() + except: + with codecs.open(filename,'r') as f: + text = f.read() + + if text: + for uni in text.split('\n'): + if uni: + try: + data = uni.split("=",1) + if data[0] in vocab: + print "%s=%s" % (data[0], vocab[data[0]]) + else: + print uni + except: + #print "%s" % uni + skip = 1 + + return ('', '') + +vocfile = "Bundle_fr.properties" +vocfile = "Bundle_es_ES.properties" +(o1, p1) = propreader(vocfile) +(o2, p2) = mainreader('Bundle_de.properties', p1) + + +print p1['requiredField'] diff --git a/dataversedock/lang.properties/test b/dataversedock/lang.properties/test new file mode 100644 index 0000000..1e12ee1 --- /dev/null +++ b/dataversedock/lang.properties/test @@ -0,0 +1 @@ +data=укр ответ р diff --git a/dataversedock/lang.properties/x b/dataversedock/lang.properties/x new file mode 100644 index 0000000..552d904 Binary files /dev/null and b/dataversedock/lang.properties/x differ diff --git a/dataversedock/persistence.patch b/dataversedock/persistence.patch new file mode 100644 index 0000000..d30948c --- /dev/null +++ b/dataversedock/persistence.patch @@ -0,0 +1,4 @@ +14c14 +< +--- +> diff --git a/dataversedock/readme.txt b/dataversedock/readme.txt new file mode 100644 index 0000000..c763e75 --- /dev/null +++ b/dataversedock/readme.txt @@ -0,0 +1,23 @@ +first pass docker all-in-one image, intended for running integration tests against. + +Could be potentially usable for normal development as well. + + +Initial setup (aka - do once): +- Do surgery on glassfish4 and solr4.6.0 following guides, place results in `conf/docker-aio/dv/deps` as `glassfish4dv.tgz` and `solr-4.6.0dv.tgz` respectively. Running `conf/docker-aio/0prep_deps.sh` attempts to automate this. + +Per-build: +- `cd conf/docker-aio`, and run `1prep.sh` to copy files for integration test data into docker build context; `1prep.sh` will also build the war file and installation zip file +- build the docker image: `docker build -t dv0 -f c7.dockerfile .` + +- Run image: `docker run -d -p 8083:8080 --name dv dv0` (aka - forward port 8083 locally to 8080 in the container) +- Installation (integration test): `docker exec -it dv /opt/dv/setupIT.bash` +- Installation (non-interactive, uses `conf/docker-aio/default.config`): `docker exec -it dv /opt/dv/install.bash` + +- update `dataverse.siteUrl` (appears only necessary for `DatasetsIT.testPrivateUrl`): `docker exec -it dv /usr/local/glassfish4/bin/asadmin create-jvm-options "-Ddataverse.siteUrl=http\://localhost\:8083"` + +Run integration tests: +`mvn test -Dtest=DataversesIT,DatasetsIT,SwordIT,AdminIT,BuiltinUsersIT,UsersIT,UtilIT,ConfirmEmailIT,FileMetadataIT -Ddataverse.test.baseurl='http://localhost:8083'` + +There isn't any strict requirement on the local port (8083 in this doc), the name of the image (dv0) or container (dv), these can be changed as desired as long as they are consistent. + diff --git a/dataversedock/setupIT.bash b/dataversedock/setupIT.bash new file mode 100755 index 0000000..304805b --- /dev/null +++ b/dataversedock/setupIT.bash @@ -0,0 +1,24 @@ +#!/usr/bin/env bash + +# do integration-test install and test data setup + +if [ ! -e /opt/dv/status ]; then + cd /opt/dv + rm -rf dvinstall + unzip dvinstall.zip + patch -t /opt/dv/dvinstall/install < docker.patch + cd /opt/dv/dvinstall + /usr/local/glassfish4/glassfish/bin/asadmin start-domain + ./install -admin_email=pameyer+dvinstall@crystal.harvard.edu -y -f +#> install.out 2> install.err + + cd /opt/dv/deps + echo "Applying language properties..." + /usr/local/glassfish4/glassfish/bin/asadmin stop-domain + sleep 10s + cp -rf /opt/dv/$BUNDLEPROPERTIES /opt/glassfish4/glassfish/domains/domain1/applications/dataverse/WEB-INF/classes/Bundle.properties + /usr/local/glassfish4/glassfish/bin/asadmin start-domain + echo "Cleaning up installation files" + rm -rf /opt/dv/* + echo "Dataverse installed" > /opt/dv/status +fi diff --git a/dataversedock/step1.sh b/dataversedock/step1.sh new file mode 100755 index 0000000..c3ea7da --- /dev/null +++ b/dataversedock/step1.sh @@ -0,0 +1,30 @@ +#!/bin/sh + +wdir=$1"/dataversedock" +echo "Getting all dependencies in "$wdir + +if [ ! -d $wdir/dv/deps ]; then + mkdir -p $wdir/dv + mkdir -p $wdir/dv/deps +fi + +if [ ! -e $wdir/dv/deps/glassfish4dv.tgz ]; then + echo "glassfish dependency prep" + mkdir -p /tmp/dv-prep/gf + cd $wdir/dv/deps + wget http://download.java.net/glassfish/4.1/release/glassfish-4.1.zip + unzip glassfish-4.1.zip + rm glassfish4/glassfish/modules/weld-osgi-bundle.jar + wget http://search.maven.org/remotecontent?filepath=org/jboss/weld/weld-osgi-bundle/2.2.10.Final/weld-osgi-bundle-2.2.10.Final-glassfish4.jar -O weld-osgi-bundle-2.2.10.Final-glassfish4.jar + mv weld-osgi-bundle-2.2.10.Final-glassfish4.jar glassfish4/glassfish/modules + # assuming that folks usually have /tmp auto-clean as needed +fi + +if [ ! -e $wdir/dv/deps/solr-4.6.0dv.tgz ]; then + echo "solr dependency prep" + # schema changes *should* be the only ones... + cd $wdir/dv/deps/ + wget https://archive.apache.org/dist/lucene/solr/4.6.0/solr-4.6.0.tgz -O solr-4.6.0dv.tgz + cd ../../ +fi + diff --git a/dataversedock/step2.sh b/dataversedock/step2.sh new file mode 100755 index 0000000..829a938 --- /dev/null +++ b/dataversedock/step2.sh @@ -0,0 +1,23 @@ +#!/bin/sh + +# move things necessary for integration tests into build context. +# this was based off the phoenix deployment; and is likely uglier and bulkier than necessary in a perfect world + +wdir=$1"/dataversedock" +echo "Getting all dependencies in "$wdir + +mkdir -p testdata/doc/sphinx-guides/source/_static/util/ +#cp ../solr/4.6.0/schema.xml testdata/ +#cp ../../jhove/jhove.conf testdata/ + +# not using dvinstall.zip for setupIT.bash; but still used in install.bash for normal ops +echo $wdir +cd $wdir +if [ ! -e $wdir/dv/deps/dvinstall.zip ]; then + wget https://github.com/IQSS/dataverse/releases/download/v4.8.5/dvinstall.zip -O $wdir/dv/deps/dvinstall.zip +fi + +if [ ! -e $wdir/dv/deps/dataverse.war ]; then + wget https://github.com/IQSS/dataverse/releases/download/v4.8.5/dataverse-4.8.5.war -O $wdir/dv/deps/dataverse.war +fi +cd ../ diff --git a/dataversedock/testdata/doc/sphinx-guides/source/_static/util/createsequence.sql b/dataversedock/testdata/doc/sphinx-guides/source/_static/util/createsequence.sql new file mode 100644 index 0000000..2677832 --- /dev/null +++ b/dataversedock/testdata/doc/sphinx-guides/source/_static/util/createsequence.sql @@ -0,0 +1,33 @@ +-- A script for creating a numeric identifier sequence, and an external +-- stored procedure, for accessing the sequence from inside the application, +-- in a non-hacky, JPA way. + +-- NOTE: + +-- 1. The database user name "dvnapp" is hard-coded here - it may +-- need to be changed to match your database user name; + +-- 2. In the code below, the sequence starts with 1, but it can be adjusted by +-- changing the MINVALUE as needed. + +CREATE SEQUENCE datasetidentifier_seq + INCREMENT 1 + MINVALUE 1 + MAXVALUE 9223372036854775807 + START 1 +CACHE 1; + +ALTER TABLE datasetidentifier_seq OWNER TO "dvnapp"; + +-- And now create a PostgreSQL FUNCTION, for JPA to +-- access as a NamedStoredProcedure: + +CREATE OR REPLACE FUNCTION generateIdentifierAsSequentialNumber( + OUT identifier int) + RETURNS int AS +$BODY$ +BEGIN + select nextval('datasetidentifier_seq') into identifier; +END; +$BODY$ + LANGUAGE plpgsql; diff --git a/dataversedock/testdata/doc/sphinx-guides/source/_static/util/pg8-createsequence-prep.sql b/dataversedock/testdata/doc/sphinx-guides/source/_static/util/pg8-createsequence-prep.sql new file mode 100644 index 0000000..740ba6c --- /dev/null +++ b/dataversedock/testdata/doc/sphinx-guides/source/_static/util/pg8-createsequence-prep.sql @@ -0,0 +1,21 @@ +-- handle absence of CREATE OR REPLACE LANGUAGE for postgresql 8.4 or older +-- courtesy of the postgres wiki: https://wiki.postgresql.org/wiki/CREATE_OR_REPLACE_LANGUAGE +CREATE OR REPLACE FUNCTION make_plpgsql() +RETURNS VOID +LANGUAGE SQL +AS $$ +CREATE LANGUAGE plpgsql; +$$; + +SELECT + CASE + WHEN EXISTS( + SELECT 1 + FROM pg_catalog.pg_language + WHERE lanname='plpgsql' + ) + THEN NULL + ELSE make_plpgsql() END; + +DROP FUNCTION make_plpgsql(); + diff --git a/dataversedock/testdata/jhove.conf b/dataversedock/testdata/jhove.conf new file mode 100644 index 0000000..261a2e1 --- /dev/null +++ b/dataversedock/testdata/jhove.conf @@ -0,0 +1,43 @@ + + + /usr/local/src/jhove + utf-8 + /tmp + 131072 + 1.0 + 1024 + + edu.harvard.hul.ois.jhove.module.AiffModule + + + edu.harvard.hul.ois.jhove.module.WaveModule + + + edu.harvard.hul.ois.jhove.module.PdfModule + + + edu.harvard.hul.ois.jhove.module.Jpeg2000Module + + + edu.harvard.hul.ois.jhove.module.JpegModule + + + edu.harvard.hul.ois.jhove.module.GifModule + + + edu.harvard.hul.ois.jhove.module.TiffModule + + + edu.harvard.hul.ois.jhove.module.HtmlModule + + + edu.harvard.hul.ois.jhove.module.AsciiModule + + + edu.harvard.hul.ois.jhove.module.Utf8Module + + diff --git a/dataversedock/testdata/schema.xml b/dataversedock/testdata/schema.xml new file mode 100644 index 0000000..323429b --- /dev/null +++ b/dataversedock/testdata/schema.xml @@ -0,0 +1,1692 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + id + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/dataversedock/testdata/scripts/api/bin/list-dvs b/dataversedock/testdata/scripts/api/bin/list-dvs new file mode 100755 index 0000000..6daa07b --- /dev/null +++ b/dataversedock/testdata/scripts/api/bin/list-dvs @@ -0,0 +1,2 @@ +curl http://localhost:8080/api/dvs +echo diff --git a/dataversedock/testdata/scripts/api/data-deposit/create-dataset b/dataversedock/testdata/scripts/api/data-deposit/create-dataset new file mode 100755 index 0000000..437f05f --- /dev/null +++ b/dataversedock/testdata/scripts/api/data-deposit/create-dataset @@ -0,0 +1,6 @@ +#!/bin/bash +. scripts/search/export-keys +SERVER=localhost:8181 +DATAVERSE_ALIAS=trees +curl -s --insecure --data-binary "@doc/sphinx-guides/source/api/sword-atom-entry.xml" -H "Content-Type: application/atom+xml" -u $SPRUCEKEY: https://$SERVER/dvn/api/data-deposit/v1/swordv2/collection/dataverse/$DATAVERSE_ALIAS \ +| xmllint -format - diff --git a/dataversedock/testdata/scripts/api/data-deposit/create-dataset-805-rights-license b/dataversedock/testdata/scripts/api/data-deposit/create-dataset-805-rights-license new file mode 100755 index 0000000..0ac7462 --- /dev/null +++ b/dataversedock/testdata/scripts/api/data-deposit/create-dataset-805-rights-license @@ -0,0 +1,7 @@ +#!/bin/bash -x +USERNAME=spruce +PASSWORD=spruce +SERVER=localhost:8181 +DATAVERSE_ALIAS=spruce +curl -s --insecure --data-binary "@scripts/search/tests/data/dataset-trees1.xml" -H "Content-Type: application/atom+xml" -u $USERNAME:$PASSWORD https://$SERVER/dvn/api/data-deposit/v1/swordv2/collection/dataverse/$DATAVERSE_ALIAS \ +| xmllint -format - diff --git a/dataversedock/testdata/scripts/api/data-deposit/create-dataset-894-invisible-character b/dataversedock/testdata/scripts/api/data-deposit/create-dataset-894-invisible-character new file mode 100755 index 0000000..72d9d6f --- /dev/null +++ b/dataversedock/testdata/scripts/api/data-deposit/create-dataset-894-invisible-character @@ -0,0 +1,7 @@ +#!/bin/bash +USERNAME=pete +PASSWORD=pete +DVN_SERVER=localhost:8181 +DATAVERSE_ALIAS=peteTop +curl -s --insecure --data-binary "@scripts/api/data-deposit/data/atom-entry-study-894-invisible-character.xml" -H "Content-Type: application/atom+xml" -u $USERNAME:$PASSWORD https://$DVN_SERVER/dvn/api/data-deposit/v1/swordv2/collection/dataverse/$DATAVERSE_ALIAS \ +| xmllint -format - diff --git a/dataversedock/testdata/scripts/api/data-deposit/create-dataset-899-expansion b/dataversedock/testdata/scripts/api/data-deposit/create-dataset-899-expansion new file mode 100755 index 0000000..513f6e6 --- /dev/null +++ b/dataversedock/testdata/scripts/api/data-deposit/create-dataset-899-expansion @@ -0,0 +1,7 @@ +#!/bin/bash +USERNAME=pete +PASSWORD=pete +DVN_SERVER=localhost:8181 +DATAVERSE_ALIAS=peteTop +curl -s --insecure --data-binary "@scripts/api/data-deposit/data/atom-entry-study-899-expansion.xml" -H "Content-Type: application/atom+xml" -u $USERNAME:$PASSWORD https://$DVN_SERVER/dvn/api/data-deposit/v1/swordv2/collection/dataverse/$DATAVERSE_ALIAS \ +| xmllint -format - diff --git a/dataversedock/testdata/scripts/api/data-deposit/data/atom-entry-study-894-invisible-character.xml b/dataversedock/testdata/scripts/api/data-deposit/data/atom-entry-study-894-invisible-character.xml new file mode 100644 index 0000000..1264f9f --- /dev/null +++ b/dataversedock/testdata/scripts/api/data-deposit/data/atom-entry-study-894-invisible-character.xml @@ -0,0 +1,37 @@ + + + + + smoke36_study + last, first + Sotiri, elda + +  + + Peets, J., & Stumptown, J. (2013). Roasting at Home. New England Journal of Coffee, 3(1), 22-34. + + 2013-07-11 + + + Considerations before you start roasting your own coffee at home. + + drink + beverage + caffeine + + United States + World + + aggregate data + + . something something something something + + Creative Commons CC-BY 3.0 (unported) http://creativecommons.org/licenses/by/3.0/ + + Peets, John. 2010. Roasting Coffee at the Coffee Shop. Coffeemill Press + diff --git a/dataversedock/testdata/scripts/api/data-deposit/data/atom-entry-study-899-expansion.xml b/dataversedock/testdata/scripts/api/data-deposit/data/atom-entry-study-899-expansion.xml new file mode 100644 index 0000000..c06f5ad --- /dev/null +++ b/dataversedock/testdata/scripts/api/data-deposit/data/atom-entry-study-899-expansion.xml @@ -0,0 +1,26 @@ + + + “Changes in test-taking patterns over time” concerning the Flynn Effect in Estonia + + The dataset from our previous Intelligence paper consists of data collected with the National Intelligence Tests (NIT, Estonian adaptation) in two historical time points: in 1934/36 (N=890) and 2006 (N=913) for students with an average age of 13. The data-file consists of information about cohort, age, and gender and test results at the item level for nine of the ten NIT subtests and subtest scores for the 10th subtest. Three answer types are separated: right answer, wrong answer and missing answer. Data can be used for psychometric research of cohort and sex differences at the scale and item level. + + Must, Olev + Must, Aasa + Estonian Scientific Foundation: grant no 2387 and 5856. European Social Fund: a Primus grant (#3-8.2/60) to Anu Realo. Baylor University financial support for data quality control in archive. + + Insert Dataset publisher + Journal copyright, license or terms of use notice + + + 2014-09-22 + + Must, O., & Must, A. (2014). Sample submission. Journal Of Plugin Testing, 1(2). + + Academic discipline + Subject classification + Article keywords + Geographic coverage + + Keyword 1, keyword 2, keyword 3 + Data Set + diff --git a/dataversedock/testdata/scripts/api/data-deposit/data/example.zip b/dataversedock/testdata/scripts/api/data-deposit/data/example.zip new file mode 100644 index 0000000..8870dd7 Binary files /dev/null and b/dataversedock/testdata/scripts/api/data-deposit/data/example.zip differ diff --git a/dataversedock/testdata/scripts/api/data-deposit/dataset-field b/dataversedock/testdata/scripts/api/data-deposit/dataset-field new file mode 100755 index 0000000..5d01d43 --- /dev/null +++ b/dataversedock/testdata/scripts/api/data-deposit/dataset-field @@ -0,0 +1,6 @@ +#!/bin/sh +if [ -z "$1" ]; then + curl -s http://localhost:8080/api/datasetfield +else + curl -s http://localhost:8080/api/datasetfield/$1 +fi diff --git a/dataversedock/testdata/scripts/api/data-deposit/delete-dataset b/dataversedock/testdata/scripts/api/data-deposit/delete-dataset new file mode 100755 index 0000000..68f35c2 --- /dev/null +++ b/dataversedock/testdata/scripts/api/data-deposit/delete-dataset @@ -0,0 +1,13 @@ +#!/bin/bash -x +USERNAME=spruce +PASSWORD=spruce +DVN_SERVER=localhost:8181 +if [ -z "$1" ]; then + GLOBAL_ID=`scripts/api/data-deposit/list-datasets | xpath '//id/text()' | cut -d'/' -f11,12,13` +else + GLOBAL_ID=$1 + #GLOBAL_ID=doi:10.5072/FK2/17 +fi +#curl --insecure -X DELETE https://$DVN_SERVER/api/datasets/$DATABASE_ID?key=$USERNAME +curl --insecure -i -X DELETE -u $USERNAME:$PASSWORD https://$DVN_SERVER/dvn/api/data-deposit/v1/swordv2/edit/study/$GLOBAL_ID +#| xmllint -format - diff --git a/dataversedock/testdata/scripts/api/data-deposit/delete-file b/dataversedock/testdata/scripts/api/data-deposit/delete-file new file mode 100755 index 0000000..1e2f50d --- /dev/null +++ b/dataversedock/testdata/scripts/api/data-deposit/delete-file @@ -0,0 +1,14 @@ +#!/bin/bash -x +USERNAME=spruce +PASSWORD=spruce +DVN_SERVER=localhost:8181 +if [ -z "$1" ]; then + FILE_ID=`scripts/api/data-deposit/show-files | cut -d'/' -f1` + #echo $FILE_ID + #exit +else + FILE_ID=$1 +fi +#curl --insecure -X DELETE https://$DVN_SERVER/api/datasets/$DATABASE_ID?key=$USERNAME +curl --insecure -i -X DELETE https://$USERNAME:$PASSWORD@$DVN_SERVER/dvn/api/data-deposit/v1/swordv2/edit-media/file/$FILE_ID +#| xmllint -format - diff --git a/dataversedock/testdata/scripts/api/data-deposit/edit-dataset-1430-edit-subject b/dataversedock/testdata/scripts/api/data-deposit/edit-dataset-1430-edit-subject new file mode 100755 index 0000000..b7fb606 --- /dev/null +++ b/dataversedock/testdata/scripts/api/data-deposit/edit-dataset-1430-edit-subject @@ -0,0 +1,13 @@ +#!/bin/sh +# not working right now: SWORD: "Replacing metadata for a dataset" broken, throws exception - https://github.com/IQSS/dataverse/issues/1554 +USERNAME=spruce +PASSWORD=spruce +SERVER=localhost:8181 +if [ -z "$1" ]; then + GLOBAL_ID=`scripts/api/data-deposit/list-datasets | xpath '//id/text()' | cut -d'/' -f11,12,13` +else + GLOBAL_ID=$1 + #GLOBAL_ID=doi:10.5072/FK2/5555 +fi +curl --insecure --upload-file "scripts/search/tests/data/dataset-trees1-edit.xml" -H "Content-Type: application/atom+xml" https://$USERNAME:$PASSWORD@$SERVER/dvn/api/data-deposit/v1/swordv2/edit/study/$GLOBAL_ID \ +| xmllint -format - \ diff --git a/dataversedock/testdata/scripts/api/data-deposit/edit-dataset-805-rights-license b/dataversedock/testdata/scripts/api/data-deposit/edit-dataset-805-rights-license new file mode 100755 index 0000000..cbc7fbb --- /dev/null +++ b/dataversedock/testdata/scripts/api/data-deposit/edit-dataset-805-rights-license @@ -0,0 +1,12 @@ +#!/bin/sh +USERNAME=spruce +PASSWORD=spruce +SERVER=localhost:8181 +if [ -z "$1" ]; then + GLOBAL_ID=`scripts/api/data-deposit/list-datasets | xpath '//id/text()' | cut -d'/' -f11,12,13` +else + GLOBAL_ID=$1 + #GLOBAL_ID=doi:10.5072/FK2/5555 +fi +curl --insecure --upload-file "scripts/search/tests/data/dataset-trees1-edit.xml" -H "Content-Type: application/atom+xml" https://$USERNAME:$PASSWORD@$SERVER/dvn/api/data-deposit/v1/swordv2/edit/study/$GLOBAL_ID \ +| xmllint -format - \ diff --git a/dataversedock/testdata/scripts/api/data-deposit/get b/dataversedock/testdata/scripts/api/data-deposit/get new file mode 100755 index 0000000..c7361a2 --- /dev/null +++ b/dataversedock/testdata/scripts/api/data-deposit/get @@ -0,0 +1,9 @@ +#!/bin/sh +USERNAME=pete +PASSWORD=pete +DVN_SERVER=localhost:8181 +if [ -z "$1" ]; then + echo "Please provide a URL to GET" + exit 1 +fi +curl --insecure -s -u $USERNAME:$PASSWORD $1 | xmllint -format - diff --git a/dataversedock/testdata/scripts/api/data-deposit/list-datasets b/dataversedock/testdata/scripts/api/data-deposit/list-datasets new file mode 100755 index 0000000..20a4681 --- /dev/null +++ b/dataversedock/testdata/scripts/api/data-deposit/list-datasets @@ -0,0 +1,12 @@ +#!/bin/bash -x +USERNAME=spruce +PASSWORD=spruce +DVN_SERVER=localhost:8181 +if [ -z "$1" ]; then + DATAVERSE_ALIAS=spruce + #DATAVERSE_ALIAS=root +else + DATAVERSE_ALIAS=$1 +fi +curl --insecure -s -u $USERNAME:$PASSWORD https://$DVN_SERVER/dvn/api/data-deposit/v1/swordv2/collection/dataverse/$DATAVERSE_ALIAS \ +| xmllint -format - diff --git a/dataversedock/testdata/scripts/api/data-deposit/pipeline b/dataversedock/testdata/scripts/api/data-deposit/pipeline new file mode 100755 index 0000000..52e110d --- /dev/null +++ b/dataversedock/testdata/scripts/api/data-deposit/pipeline @@ -0,0 +1,55 @@ +#!/usr/bin/env ruby +require "rexml/document" +include REXML + +def pp (ugly) + formatter = REXML::Formatters::Pretty.new(2) + formatter.compact = true + formatter.write(ugly, $stdout) + puts +end + +service_document = Document.new `scripts/api/data-deposit/service-document` +regex = 'peteTop' +col1 = nil +XPath.each(service_document, "//collection/@href") { |href| + #if href.to_s.match(/col1/) + if href.to_s.match(/#{regex}/) + col1 = href + end +} + +if (!col1) + puts "Hmm. We expected to find #{regex}" + exit 1 +end + +puts "GET of #{col1}" +feed_of_studies = Document.new `scripts/api/data-deposit/get #{col1}` +#pp(feed_of_studies) + +first = XPath.first(feed_of_studies, "//entry") +if (!first) + puts "Have you created any datasets in #{col1} ?" + exit 1 +end +#pp(first) +id = XPath.first(first, "//id").text +puts "GET of #{id}" +entry = Document.new `scripts/api/data-deposit/get #{id}` +permalink = XPath.first(entry, "//link[@rel='alternate']/@href").to_s +permalink_last = permalink.split('/')[-1] +id_last = id.split('/')[-1] +if (id_last.to_s != permalink_last.to_s) + puts "WARNING: mismatch between dataset id (#{id_last}) and permalink (#{permalink_last}): https://github.com/IQSS/dataverse/issues/758" +end +# +#pp(entry) +#edit = XPath.first(entry, "//[@rel='edit']") +#puts edit +statement_link = XPath.first(entry, "//link[@rel='http://purl.org/net/sword/terms/statement']/@href") +puts "GET of #{statement_link}" +statement = Document.new `scripts/api/data-deposit/get #{statement_link}` +#pp(statement) +state = XPath.first(statement, "//category[@term='latestVersionState']").text +puts state diff --git a/dataversedock/testdata/scripts/api/data-deposit/publish-dataset b/dataversedock/testdata/scripts/api/data-deposit/publish-dataset new file mode 100755 index 0000000..5325a5c --- /dev/null +++ b/dataversedock/testdata/scripts/api/data-deposit/publish-dataset @@ -0,0 +1,14 @@ +#!/bin/sh +USERNAME=pete +PASSWORD=pete +DVN_SERVER=localhost:8181 +if [ -z "$1" ]; then + GLOBAL_ID=`scripts/api/data-deposit/list-datasets | xpath '//id/text()' | cut -d'/' -f11,12,13` +else + GLOBAL_ID=$1 + #GLOBAL_ID=doi:10.5072/FK2/5555 +fi +# We cat /dev/null so that contentLength is zero. This makes headersOnly true:: https://github.com/swordapp/JavaServer2.0/blob/sword2-server-1.0/src/main/java/org/swordapp/server/ContainerAPI.java#L338 +# 'to tell curl to read the format from stdin you write "@-"' -- http://curl.haxx.se/docs/manpage.html +cat /dev/null | curl -s --insecure -X POST -H "In-Progress: false" --data-binary @- https://$USERNAME:$PASSWORD@$DVN_SERVER/dvn/api/data-deposit/v1/swordv2/edit/study/$GLOBAL_ID \ +| xmllint --format - diff --git a/dataversedock/testdata/scripts/api/data-deposit/publish-dataverse b/dataversedock/testdata/scripts/api/data-deposit/publish-dataverse new file mode 100755 index 0000000..fa22c4b --- /dev/null +++ b/dataversedock/testdata/scripts/api/data-deposit/publish-dataverse @@ -0,0 +1,13 @@ +#!/bin/sh +USERNAME=pete +PASSWORD=pete +DVN_SERVER=localhost:8181 +if [ -z "$1" ]; then + echo "Please supply a dataverse alias" + exit 1 +else + DATAVERSE_ALIAS=$1 + #DATAVERSE_ALIAS=peteTop +fi +cat /dev/null | curl -s --insecure -X POST -H "In-Progress: false" --data-binary @- https://$USERNAME:$PASSWORD@$DVN_SERVER/dvn/api/data-deposit/v1/swordv2/edit/dataverse/$DATAVERSE_ALIAS \ +| xmllint --format - diff --git a/dataversedock/testdata/scripts/api/data-deposit/replace-dataset-metadata b/dataversedock/testdata/scripts/api/data-deposit/replace-dataset-metadata new file mode 100755 index 0000000..35cdf09 --- /dev/null +++ b/dataversedock/testdata/scripts/api/data-deposit/replace-dataset-metadata @@ -0,0 +1,12 @@ +#!/bin/sh +USERNAME=pete +PASSWORD=pete +SERVER=localhost:8181 +if [ -z "$1" ]; then + GLOBAL_ID=`scripts/api/data-deposit/list-datasets | xpath '//id/text()' | cut -d'/' -f11,12,13` +else + GLOBAL_ID=$1 + #GLOBAL_ID=doi:10.5072/FK2/5555 +fi +curl --insecure --upload-file "scripts/search/tests/data/dataset-versioning03-setup.xml" -H "Content-Type: application/atom+xml" https://$USERNAME:$PASSWORD@$SERVER/dvn/api/data-deposit/v1/swordv2/edit/study/$GLOBAL_ID \ +| xmllint -format - \ diff --git a/dataversedock/testdata/scripts/api/data-deposit/service-document b/dataversedock/testdata/scripts/api/data-deposit/service-document new file mode 100755 index 0000000..f59b5dc --- /dev/null +++ b/dataversedock/testdata/scripts/api/data-deposit/service-document @@ -0,0 +1,12 @@ +#!/bin/bash +. scripts/search/export-keys +if [ -z "$1" ]; then + HOSTNAME=localhost:8181 +else + HOSTNAME=$1 +fi +URL=https://$HOSTNAME/dvn/api/data-deposit/v1/swordv2/service-document +echo Retrieving service document from $URL >&2 +OUTPUT=`curl -s --insecure -u $ADMINKEY: $URL` +echo $OUTPUT +echo $OUTPUT | xmllint -format - diff --git a/dataversedock/testdata/scripts/api/data-deposit/show-atom-entry b/dataversedock/testdata/scripts/api/data-deposit/show-atom-entry new file mode 100755 index 0000000..fee29cf --- /dev/null +++ b/dataversedock/testdata/scripts/api/data-deposit/show-atom-entry @@ -0,0 +1,12 @@ +#!/bin/bash +USERNAME=pete +PASSWORD=pete +DVN_SERVER=localhost:8181 +if [ -z "$1" ]; then + GLOBAL_ID=`scripts/api/data-deposit/list-datasets | xpath '//id/text()' | cut -d'/' -f11,12,13` +else + GLOBAL_ID=$1 + #GLOBAL_ID=doi:10.5072/FK2/5555 +fi +curl --insecure -s -u $USERNAME:$PASSWORD https://$DVN_SERVER/dvn/api/data-deposit/v1/swordv2/edit/study/$GLOBAL_ID \ +| xmllint -format - diff --git a/dataversedock/testdata/scripts/api/data-deposit/show-files b/dataversedock/testdata/scripts/api/data-deposit/show-files new file mode 100755 index 0000000..9cf93fe --- /dev/null +++ b/dataversedock/testdata/scripts/api/data-deposit/show-files @@ -0,0 +1,3 @@ +#!/bin/sh +#scripts/api/data-deposit/show-statement | xpath "//entry/content/@*[name()='type' or name()='src']" +scripts/api/data-deposit/show-statement | xpath '//entry/id/text()' | cut -d'/' -f11,12,13 diff --git a/dataversedock/testdata/scripts/api/data-deposit/show-statement b/dataversedock/testdata/scripts/api/data-deposit/show-statement new file mode 100755 index 0000000..7170ab7 --- /dev/null +++ b/dataversedock/testdata/scripts/api/data-deposit/show-statement @@ -0,0 +1,13 @@ +#!/bin/sh +USERNAME=spruce +PASSWORD=spruce +DVN_SERVER=localhost:8181 +if [ -z "$1" ]; then + GLOBAL_ID=`scripts/api/data-deposit/list-datasets | xpath '//id/text()' | cut -d'/' -f11,12,13` +else + GLOBAL_ID=$1 + #GLOBAL_ID=doi:10.5072/FK2/5555 +fi +curl --insecure -s https://$USERNAME:$PASSWORD@$DVN_SERVER/dvn/api/data-deposit/v1/swordv2/statement/study/$GLOBAL_ID \ +| xmllint -format - \ +#| xpath '//entry/title' diff --git a/dataversedock/testdata/scripts/api/data-deposit/unsupported-download-files b/dataversedock/testdata/scripts/api/data-deposit/unsupported-download-files new file mode 100755 index 0000000..cefe963 --- /dev/null +++ b/dataversedock/testdata/scripts/api/data-deposit/unsupported-download-files @@ -0,0 +1,12 @@ +#!/bin/sh +USERNAME=pete +PASSWORD=pete +DVN_SERVER=localhost:8181 +if [ -z "$1" ]; then + GLOBAL_ID=`scripts/api/data-deposit/list-datasets | xpath '//id/text()' | cut -d'/' -f11,12,13` +else + GLOBAL_ID=$1 + #GLOBAL_ID=doi:10.5072/FK2/5555 +fi +curl -s --insecure https://$USERNAME:$PASSWORD@$DVN_SERVER/dvn/api/data-deposit/v1/swordv2/edit-media/study/$GLOBAL_ID \ +| xmllint -format - diff --git a/dataversedock/testdata/scripts/api/data-deposit/upload-file b/dataversedock/testdata/scripts/api/data-deposit/upload-file new file mode 100755 index 0000000..576603d --- /dev/null +++ b/dataversedock/testdata/scripts/api/data-deposit/upload-file @@ -0,0 +1,13 @@ +#!/bin/bash -x +USERNAME=spruce +PASSWORD=spruce +DVN_SERVER=localhost:8181 +if [ -z "$1" ]; then + EDIT_MEDIA_URL=`scripts/api/data-deposit/list-datasets | xpath 'string(//link/@href)' 2>/dev/null` +else + EDIT_MEDIA_URL=$1 +fi +curl -s --insecure --data-binary @scripts/search/data/binary/trees.zip -H "Content-Disposition: filename=trees.zip" -H "Content-Type: application/zip" -H "Packaging: http://purl.org/net/sword/package/SimpleZip" -u $USERNAME:$PASSWORD $EDIT_MEDIA_URL \ +| xmllint -format - +#curl -s --insecure --data-binary @scripts/search/data/binary/trees.zip -H "Content-Disposition: filename=trees.zip" -H "Content-Type: application/zip" -H "Packaging: http://purl.org/net/sword/package/SimpleZip" https://$USERNAME:$PASSWORD@$DVN_SERVER/dvn/api/data-deposit/v1/swordv2/edit-media/study/doi:10.5072/FK2/19 \ +#| xmllint -format - diff --git a/dataversedock/testdata/scripts/api/data/authentication-providers/base-oauth.json b/dataversedock/testdata/scripts/api/data/authentication-providers/base-oauth.json new file mode 100644 index 0000000..3d01cac --- /dev/null +++ b/dataversedock/testdata/scripts/api/data/authentication-providers/base-oauth.json @@ -0,0 +1,8 @@ +{ + "id":"base-oauth", + "factoryAlias":"oauth2", + "title":"sample base definition file for oauth2 providers.", + "subtitle":"A base file, though - do not run this.", + "factoryData":"type:idOfOAuthService | name1: value1|name2: value2 value2.1 value 2.1.1 | name: value42", + "enabled":true +} diff --git a/dataversedock/testdata/scripts/api/data/authentication-providers/builtin.json b/dataversedock/testdata/scripts/api/data/authentication-providers/builtin.json new file mode 100644 index 0000000..1c98e6c --- /dev/null +++ b/dataversedock/testdata/scripts/api/data/authentication-providers/builtin.json @@ -0,0 +1,8 @@ +{ + "id":"builtin", + "factoryAlias":"BuiltinAuthenticationProvider", + "title":"Dataverse Local", + "subtitle":"Datavers' Internal Authentication provider", + "factoryData":"", + "enabled":true +} diff --git a/dataversedock/testdata/scripts/api/data/authentication-providers/echo-dignified.json b/dataversedock/testdata/scripts/api/data/authentication-providers/echo-dignified.json new file mode 100644 index 0000000..177fd12 --- /dev/null +++ b/dataversedock/testdata/scripts/api/data/authentication-providers/echo-dignified.json @@ -0,0 +1,8 @@ +{ + "id":"echo-dignified", + "factoryAlias":"Echo", + "title":"Dignified Echo provider", + "subtitle":"Approves everyone, based on their credentials, and adds some flair", + "factoryData":"Sir,Esq.", + "enabled":true +} diff --git a/dataversedock/testdata/scripts/api/data/authentication-providers/orcid-sandbox.json b/dataversedock/testdata/scripts/api/data/authentication-providers/orcid-sandbox.json new file mode 100644 index 0000000..3a1c311 --- /dev/null +++ b/dataversedock/testdata/scripts/api/data/authentication-providers/orcid-sandbox.json @@ -0,0 +1,8 @@ +{ + "id":"orcid-v2-sandbox", + "factoryAlias":"oauth2", + "title":"ORCID Sandbox", + "subtitle":"ORCiD - sandbox (v2)", + "factoryData":"type: orcid | userEndpoint: https://api.sandbox.orcid.org/v2.0/{ORCID}/person | clientId: APP-HIV99BRM37FSWPH6 | clientSecret: ee844b70-f223-4f15-9b6f-4991bf8ed7f0", + "enabled":true +} diff --git a/dataversedock/testdata/scripts/api/data/dataset-bad-missingInitialVersion.json b/dataversedock/testdata/scripts/api/data/dataset-bad-missingInitialVersion.json new file mode 100644 index 0000000..8557020 --- /dev/null +++ b/dataversedock/testdata/scripts/api/data/dataset-bad-missingInitialVersion.json @@ -0,0 +1,6 @@ +{ + "authority": "anAuthority", + "identifier": "dataset-one", + "persistentUrl": "http://dx.doi.org/10.5072/FK2/9", + "protocol": "chadham-house-rule" +} \ No newline at end of file diff --git a/dataversedock/testdata/scripts/api/data/dataset-create-new.json b/dataversedock/testdata/scripts/api/data/dataset-create-new.json new file mode 100644 index 0000000..0017da1 --- /dev/null +++ b/dataversedock/testdata/scripts/api/data/dataset-create-new.json @@ -0,0 +1,124 @@ +{ + "authority": "anAuthority", + "identifier": "dataset-one", + "persistentUrl": "http://dx.doi.org/10.5072/FK2/9", + "protocol": "chadham-house-rule", + "datasetVersion": { + "metadataBlocks": { + "citation": { + "displayName": "Citation Metadata", + "fields": [ + { + "typeName": "title", + "multiple": false, + "typeClass": "primitive", + "value": "Dataset One" + }, + { + "typeName": "author", + "multiple": true, + "typeClass": "compound", + "value": [ + { + "authorName": { + "typeName": "authorName", + "multiple": false, + "typeClass": "primitive", + "value": "Smith, Robert" + }, + "authorAffiliation": { + "typeName": "authorAffiliation", + "multiple": false, + "typeClass": "primitive", + "value": "The Smiths" + } + }, + { + "authorName": { + "typeName": "authorName", + "multiple": false, + "typeClass": "primitive", + "value": "Kew, Susie" + }, + "authorAffiliation": { + "typeName": "authorAffiliation", + "multiple": false, + "typeClass": "primitive", + "value": "Creedence Clearwater Revival" + } + } + ] + }, + { + "typeName": "datasetContact", + "multiple": true, + "typeClass": "compound", + "value": [ + { + "datasetContactName": { + "typeName": "datasetContactName", + "multiple": false, + "typeClass": "primitive", + "value": "Dataverse, Admin" + }, + "datasetContactAffiliation": { + "typeName": "datasetContactAffiliation", + "multiple": false, + "typeClass": "primitive", + "value": "Sample Datasets, inc." + }, + "datasetContactEmail": { + "typeName": "datasetContactEmail", + "multiple": false, + "typeClass": "primitive", + "value": "sammi@sample.com" + } + } + ] + }, + { + "typeName": "dsDescription", + "multiple": true, + "typeClass": "compound", + "value": [ + { + "dsDescriptionValue": { + "typeName": "dsDescriptionValue", + "multiple": false, + "typeClass": "primitive", + "value": "This is a short text blurb describing the dataset. It is very informative and somewhat self-describing." + }, + "dsDescriptionDate": { + "typeName": "dsDescriptionDate", + "multiple": false, + "typeClass": "primitive", + "value": "2015-01-14" + } + } + ] + }, + { + "typeName": "subject", + "multiple": true, + "typeClass": "controlledVocabulary", + "value": [ + "Chemistry" + ] + }, + { + "typeName": "depositor", + "multiple": false, + "typeClass": "primitive", + "value": "Admin Dataverse" + }, + { + "typeName": "dateOfDeposit", + "multiple": false, + "typeClass": "primitive", + "value": "2015-01-14" + } + ] + } + } + } +} \ No newline at end of file diff --git a/dataversedock/testdata/scripts/api/data/dataset-create-new2.json b/dataversedock/testdata/scripts/api/data/dataset-create-new2.json new file mode 100644 index 0000000..14fabee --- /dev/null +++ b/dataversedock/testdata/scripts/api/data/dataset-create-new2.json @@ -0,0 +1,110 @@ +{ + "authority": "anAuthority", + "identifier": "dataset-two", + "persistentUrl": "http://dx.doi.org/10.5072/FK2/9", + "protocol": "chadham-house-rule", + "datasetVersion": { + "metadataBlocks": { + "citation": { + "displayName": "Citation Metadata", + "fields": [ + { + "typeName": "title", + "multiple": false, + "typeClass": "primitive", + "value": "Dataset Two" + }, + { + "typeName": "author", + "multiple": true, + "typeClass": "compound", + "value": [ + { + "authorName": { + "typeName": "authorName", + "multiple": false, + "typeClass": "primitive", + "value": "Gironi, Moe" + }, + "authorAffiliation": { + "typeName": "authorAffiliation", + "multiple": false, + "typeClass": "primitive", + "value": "Ciao Bella Ristorante" + } + } + ] + }, + { + "typeName": "datasetContact", + "multiple": true, + "typeClass": "compound", + "value": [ + { + "datasetContactName": { + "typeName": "datasetContactName", + "multiple": false, + "typeClass": "primitive", + "value": "Dataverse, Admin" + }, + "datasetContactAffiliation": { + "typeName": "datasetContactAffiliation", + "multiple": false, + "typeClass": "primitive", + "value": "Sample Datasets, ltd." + }, + "datasetContactEmail": { + "typeName": "datasetContactEmail", + "multiple": false, + "typeClass": "primitive", + "value": "susie@sample.com" + } + } + ] + }, + { + "typeName": "dsDescription", + "multiple": true, + "typeClass": "compound", + "value": [ + { + "dsDescriptionValue": { + "typeName": "dsDescriptionValue", + "multiple": false, + "typeClass": "primitive", + "value": "Description field filled with descriptions. " + }, + "dsDescriptionDate": { + "typeName": "dsDescriptionDate", + "multiple": false, + "typeClass": "primitive", + "value": "2015-01-19" + } + } + ] + }, + { + "typeName": "subject", + "multiple": true, + "typeClass": "controlledVocabulary", + "value": [ + "Chemistry" + ] + }, + { + "typeName": "depositor", + "multiple": false, + "typeClass": "primitive", + "value": "D. Positor" + }, + { + "typeName": "dateOfDeposit", + "multiple": false, + "typeClass": "primitive", + "value": "2015-01-19" + } + ] + } + } + } +} \ No newline at end of file diff --git a/dataversedock/testdata/scripts/api/data/dataset-create-new3.json b/dataversedock/testdata/scripts/api/data/dataset-create-new3.json new file mode 100644 index 0000000..d643ead --- /dev/null +++ b/dataversedock/testdata/scripts/api/data/dataset-create-new3.json @@ -0,0 +1,258 @@ +{ + "authority": "anAuthority", + "identifier": "dataset-three", + "persistentUrl": "http://dx.doi.org/10.5072/FK2/9", + "protocol": "chadham-house-rule", + "datasetVersion": { + "metadataBlocks": { + "citation": { + "fields": [ + { + "value": "SampleTitle", + "typeClass": "primitive", + "multiple": false, + "typeName": "title" + }, + { + "value": [ + { + "authorAffiliation": { + "value": "Top", + "typeClass": "primitive", + "multiple": false, + "typeName": "authorAffiliation" + }, + "authorName": { + "value": "Privileged, Pete", + "typeClass": "primitive", + "multiple": false, + "typeName": "authorName" + } + }, + { + "authorAffiliation": { + "value": "UMASS, Amherst", + "typeClass": "primitive", + "multiple": false, + "typeName": "authorAffiliation" + }, + "authorName": { + "value": "Borrator, Colla", + "typeClass": "primitive", + "multiple": false, + "typeName": "authorName" + } + } + ], + "typeClass": "compound", + "multiple": true, + "typeName": "author" + }, + { + "typeName": "datasetContact", + "multiple": true, + "typeClass": "compound", + "value": [ + { + "datasetContactName": { + "typeName": "datasetContactName", + "multiple": false, + "typeClass": "primitive", + "value": "Dataverse, Admin" + }, + "datasetContactAffiliation": { + "typeName": "datasetContactAffiliation", + "multiple": false, + "typeClass": "primitive", + "value": "Sample Datasets, ltd." + }, + "datasetContactEmail": { + "typeName": "datasetContactEmail", + "multiple": false, + "typeClass": "primitive", + "value": "susie@sample.com" + } + } + ] + }, + { + "typeName": "dsDescription", + "multiple": true, + "typeClass": "compound", + "value": [ + { + "dsDescriptionValue": { + "typeName": "dsDescriptionValue", + "multiple": false, + "typeClass": "primitive", + "value": "Description field filled with descriptions. " + }, + "dsDescriptionDate": { + "typeName": "dsDescriptionDate", + "multiple": false, + "typeClass": "primitive", + "value": "2015-01-19" + } + } + ] + }, + { + "value": [ + "Arts and Humanities", + "Astronomy and Astrophysics" + ], + "typeClass": "controlledVocabulary", + "multiple": true, + "typeName": "subject" + }, + { + "value": "Note note note note Note note note note Note note note note Note note note note Note note note note Note note note note Note note note note Note note note note Note note note note Note note note note Note note note note Note note note note Note note note note Note note note note Note note note note Note note note note Note note note note Note note note note.\r\nNOTANOTANOTANOTANnot.e\r\n", + "typeClass": "primitive", + "multiple": false, + "typeName": "notesText" + }, + { + "typeName": "distributor", + "multiple": true, + "typeClass": "compound", + "value": [ + { + "distributorName": { + "typeName": "distributorName", + "multiple": false, + "typeClass": "primitive", + "value": "Ibutor, Dist r." + }, + "distributorAffiliation": { + "typeName": "distributorAffiliation", + "multiple": false, + "typeClass": "primitive", + "value": "Science Mag" + }, + "distributorAbbreviation": { + "typeName": "distributorAbbreviation", + "multiple": false, + "typeClass": "primitive", + "value": "dst" + } + } + ] + }, + { + "value": [ + { + "otherIdAgency": { + "value": "NSF", + "typeClass": "primitive", + "multiple": false, + "typeName": "otherIdAgency" + }, + "otherIdValue": { + "value": "NSF1234", + "typeClass": "primitive", + "multiple": false, + "typeName": "otherIdValue" + } + } + ], + "typeClass": "compound", + "multiple": true, + "typeName": "otherId" + }, + { + "value": "2014-02-03", + "typeClass": "primitive", + "multiple": false, + "typeName": "productionDate" + }, + { + "value": "Cambridge, MA", + "typeClass": "primitive", + "multiple": false, + "typeName": "productionPlace" + }, + { + "value": [ + { + "grantNumberAgency": { + "value": "NIH", + "typeClass": "primitive", + "multiple": false, + "typeName": "grantNumberAgency" + }, + "grantNumberValue": { + "value": "NIH1231245154", + "typeClass": "primitive", + "multiple": false, + "typeName": "grantNumberValue" + } + }, + { + "grantNumberAgency": { + "value": "NIH", + "typeClass": "primitive", + "multiple": false, + "typeName": "grantNumberAgency" + }, + "grantNumberValue": { + "value": "NIH99999999", + "typeClass": "primitive", + "multiple": false, + "typeName": "grantNumberValue" + } + } + ], + "typeClass": "compound", + "multiple": true, + "typeName": "grantNumber" + }, + { + "value": "Privileged, Pete", + "typeClass": "primitive", + "multiple": false, + "typeName": "depositor" + }, + { + "value": "2014-05-20", + "typeClass": "primitive", + "multiple": false, + "typeName": "dateOfDeposit" + }, + { + "value": [ + "Bananas" + ], + "typeClass": "primitive", + "multiple": true, + "typeName": "relatedMaterial" + }, + { + "value": [ + "Data about bananas" + ], + "typeClass": "primitive", + "multiple": true, + "typeName": "relatedDatasets" + }, + { + "value": [ + "other ref other ref other ref ef ef" + ], + "typeClass": "primitive", + "multiple": true, + "typeName": "otherReferences" + } + ], + "displayName": "Citation Metadata" + } + }, + "createTime": "2014-05-20 11:52:55 -04", + "UNF": "UNF", + "id": 1, + "versionNumber": 1, + "versionMinorNumber": 0, + "versionState": "DRAFT", + "distributionDate": "Distribution Date", + "productionDate": "Production Date" + } +} \ No newline at end of file diff --git a/dataversedock/testdata/scripts/api/data/dataset-updated-version.json b/dataversedock/testdata/scripts/api/data/dataset-updated-version.json new file mode 100644 index 0000000..b39956a --- /dev/null +++ b/dataversedock/testdata/scripts/api/data/dataset-updated-version.json @@ -0,0 +1,241 @@ +{ + "createTime": "2014-05-20 11:52:55 -04", + "UNF": "UNF", + "versionNumber": 1, + "versionMinorNumber": 0, + "versionState": "DRAFT", + "distributionDate": "Distribution Date", + "productionDate": "Production Date", + "metadataBlocks": { + "citation": { + "fields": [ + { + "value": "UpdatedTitle", + "typeClass": "primitive", + "multiple": false, + "typeName": "title" + }, + { + "value": [ + { + "authorAffiliation": { + "value": "Tippie Top", + "typeClass": "primitive", + "multiple": false, + "typeName": "authorAffiliation" + }, + "authorName": { + "value": "McPrivileged, Pete", + "typeClass": "primitive", + "multiple": false, + "typeName": "authorName" + } + }, + { + "authorAffiliation": { + "value": "UNC", + "typeClass": "primitive", + "multiple": false, + "typeName": "authorAffiliation" + }, + "authorName": { + "value": "Borrator, Colla", + "typeClass": "primitive", + "multiple": false, + "typeName": "authorName" + } + }, + { + "authorAffiliation": { + "value": "NASA", + "typeClass": "primitive", + "multiple": false, + "typeName": "authorAffiliation" + }, + "authorName": { + "value": "Naut, Astro", + "typeClass": "primitive", + "multiple": false, + "typeName": "authorName" + } + } + ], + "typeClass": "compound", + "multiple": true, + "typeName": "author" + }, + { + "value": "Lorem ipsum dolor sit amet, consectetur adipisicing elit. Quos, eos, natus soluta porro harum beatae voluptatem unde rerum eius quaerat officiis maxime autem asperiores facere.", + "typeClass": "primitive", + "multiple": false, + "typeName": "dsDescription" + }, + { + "value": [ + "kw10", + "kw20", + "kw30" + ], + "typeClass": "primitive", + "multiple": true, + "typeName": "keyword" + }, + { + "value": [ + "Arts and Humanities", + "Astronomy and Astrophysics" + ], + "typeClass": "controlledVocabulary", + "multiple": true, + "typeName": "subject" + }, + { + "value": "Note note note note Note note note note Note note note note Note note note note Note note note note Note note note note Note note note note Note note note note Note note note note Note note note note Note note note note Note note note note Note note note note Note note note note Note note note note Note note note note Note note note note Note note note note.\r\nNOTANOTANOTANOTANnot.e\r\n", + "typeClass": "primitive", + "multiple": false, + "typeName": "notesText" + }, + { + "value": [ + { + "otherIdAgency": { + "value": "NSF", + "typeClass": "primitive", + "multiple": false, + "typeName": "otherIdAgency" + }, + "otherIdValue": { + "value": "NSF1234", + "typeClass": "primitive", + "multiple": false, + "typeName": "otherIdValue" + } + }, + { + "otherIdAgency": { + "value": "NIH", + "typeClass": "primitive", + "multiple": false, + "typeName": "otherIdAgency" + }, + "otherIdValue": { + "value": "NIH98765", + "typeClass": "primitive", + "multiple": false, + "typeName": "otherIdValue" + } + } + ], + "typeClass": "compound", + "multiple": true, + "typeName": "otherId" + }, + { + "value": [ + { + "contributorName": { + "value": "Dennis", + "typeClass": "primitive", + "multiple": false, + "typeName": "contributorName" + }, + "contributorType": { + "value": "Funder", + "typeClass": "controlledVocabulary", + "multiple": false, + "typeName": "contributorType" + } + } + ], + "typeClass": "compound", + "multiple": true, + "typeName": "contributor" + }, + { + "value": "2014-02-03", + "typeClass": "primitive", + "multiple": false, + "typeName": "productionDate" + }, + { + "value": "Cambridge, UK", + "typeClass": "primitive", + "multiple": false, + "typeName": "productionPlace" + }, + { + "value": [ + { + "grantNumberAgency": { + "value": "NIH", + "typeClass": "primitive", + "multiple": false, + "typeName": "grantNumberAgency" + }, + "grantNumberValue": { + "value": "NIH1231245154", + "typeClass": "primitive", + "multiple": false, + "typeName": "grantNumberValue" + } + }, + { + "grantNumberAgency": { + "value": "NIH", + "typeClass": "primitive", + "multiple": false, + "typeName": "grantNumberAgency" + }, + "grantNumberValue": { + "value": "NIH99999999", + "typeClass": "primitive", + "multiple": false, + "typeName": "grantNumberValue" + } + } + ], + "typeClass": "compound", + "multiple": true, + "typeName": "grantNumber" + }, + { + "value": "Privileged, Pete", + "typeClass": "primitive", + "multiple": false, + "typeName": "depositor" + }, + { + "value": "2014-05-20", + "typeClass": "primitive", + "multiple": false, + "typeName": "dateOfDeposit" + }, + { + "value": [ + "Bananas" + ], + "typeClass": "primitive", + "multiple": true, + "typeName": "relatedMaterial" + }, + { + "value": [ + "Data about bananas" + ], + "typeClass": "primitive", + "multiple": true, + "typeName": "relatedDatasets" + }, + { + "value": [ + "other ref other ref other ref ef ef" + ], + "typeClass": "primitive", + "multiple": true, + "typeName": "otherReferences" + } + ], + "displayName": "Citation Metadata" + } + } +} \ No newline at end of file diff --git a/dataversedock/testdata/scripts/api/data/dataset-updated-version2.json b/dataversedock/testdata/scripts/api/data/dataset-updated-version2.json new file mode 100644 index 0000000..f173eef --- /dev/null +++ b/dataversedock/testdata/scripts/api/data/dataset-updated-version2.json @@ -0,0 +1,218 @@ +{ + "UNF": "UNF", + "versionNumber": 1, + "versionMinorNumber": 0, + "versionState": "DRAFT", + "distributionDate": "Distribution Date", + "productionDate": "Production Date", + "metadataBlocks": { + "citation": { + "fields": [ + { + "value": "This is another title", + "typeClass": "primitive", + "multiple": false, + "typeName": "title" + }, + { + "value": [ + { + "authorAffiliation": { + "value": "Tippie Top", + "typeClass": "primitive", + "multiple": false, + "typeName": "authorAffiliation" + }, + "authorName": { + "value": "McPrivileged, Pete", + "typeClass": "primitive", + "multiple": false, + "typeName": "authorName" + } + }, + { + "authorAffiliation": { + "value": "Uber Under", + "typeClass": "primitive", + "multiple": false, + "typeName": "authorAffiliation" + }, + "authorName": { + "value": "McNew, Oldie", + "typeClass": "primitive", + "multiple": false, + "typeName": "authorName" + } + }, + { + "authorAffiliation": { + "value": "UNC", + "typeClass": "primitive", + "multiple": false, + "typeName": "authorAffiliation" + }, + "authorName": { + "value": "Borrator, Colla", + "typeClass": "primitive", + "multiple": false, + "typeName": "authorName" + } + }, + { + "authorAffiliation": { + "value": "NASA", + "typeClass": "primitive", + "multiple": false, + "typeName": "authorAffiliation" + }, + "authorName": { + "value": "Naut, Astro", + "typeClass": "primitive", + "multiple": false, + "typeName": "authorName" + } + } + ], + "typeClass": "compound", + "multiple": true, + "typeName": "author" + }, + + { + "value": [ + "Arts and Humanities", + "Astronomy and Astrophysics" + ], + "typeClass": "controlledVocabulary", + "multiple": true, + "typeName": "subject" + }, + { + "value": "Note note note note Note note note note Note note note note Note note note note Note note note note Note note note note Note note note note Note note note note Note note note note Note note note note Note note note note Note note note note Note note note note Note note note note Note note note note Note note note note Note note note note Note note note note.\r\nNOTANOTANOTANOTANnot.e\r\n", + "typeClass": "primitive", + "multiple": false, + "typeName": "notesText" + }, + { + "value": [ + { + "otherIdAgency": { + "value": "NSF", + "typeClass": "primitive", + "multiple": false, + "typeName": "otherIdAgency" + }, + "otherIdValue": { + "value": "NSF1234", + "typeClass": "primitive", + "multiple": false, + "typeName": "otherIdValue" + } + }, + { + "otherIdAgency": { + "value": "NIH", + "typeClass": "primitive", + "multiple": false, + "typeName": "otherIdAgency" + }, + "otherIdValue": { + "value": "NIH98765", + "typeClass": "primitive", + "multiple": false, + "typeName": "otherIdValue" + } + } + ], + "typeClass": "compound", + "multiple": true, + "typeName": "otherId" + }, + { + "value": "2014-02-03", + "typeClass": "primitive", + "multiple": false, + "typeName": "productionDate" + }, + { + "value": "Cambridge, UK", + "typeClass": "primitive", + "multiple": false, + "typeName": "productionPlace" + }, + { + "value": [ + { + "grantNumberAgency": { + "value": "NIH", + "typeClass": "primitive", + "multiple": false, + "typeName": "grantNumberAgency" + }, + "grantNumberValue": { + "value": "NIH1231245154", + "typeClass": "primitive", + "multiple": false, + "typeName": "grantNumberValue" + } + }, + { + "grantNumberAgency": { + "value": "NIH", + "typeClass": "primitive", + "multiple": false, + "typeName": "grantNumberAgency" + }, + "grantNumberValue": { + "value": "NIH99999999", + "typeClass": "primitive", + "multiple": false, + "typeName": "grantNumberValue" + } + } + ], + "typeClass": "compound", + "multiple": true, + "typeName": "grantNumber" + }, + { + "value": "Privileged, Pete", + "typeClass": "primitive", + "multiple": false, + "typeName": "depositor" + }, + { + "value": "2014-05-20", + "typeClass": "primitive", + "multiple": false, + "typeName": "dateOfDeposit" + }, + { + "value": [ + "Bananas" + ], + "typeClass": "primitive", + "multiple": true, + "typeName": "relatedMaterial" + }, + { + "value": [ + "Data about bananas" + ], + "typeClass": "primitive", + "multiple": true, + "typeName": "relatedDatasets" + }, + { + "value": [ + "other ref other ref other ref ef ef" + ], + "typeClass": "primitive", + "multiple": true, + "typeName": "otherReferences" + } + ], + "displayName": "Citation Metadata" + } + } +} \ No newline at end of file diff --git a/dataversedock/testdata/scripts/api/data/dataset-version.json b/dataversedock/testdata/scripts/api/data/dataset-version.json new file mode 100644 index 0000000..7037082 --- /dev/null +++ b/dataversedock/testdata/scripts/api/data/dataset-version.json @@ -0,0 +1,110 @@ +{ + "id": 2, + "versionState": "DRAFT", + "distributionDate": "Distribution Date", + "productionDate": "Production Date", + "lastUpdateTime": "2015-01-14 05:58:27 +02", + "createTime": "2015-01-14 05:48:30 +02", + "metadataBlocks": { + "citation": { + "displayName": "Citation Metadata", + "fields": [ + { + "typeName": "title", + "multiple": false, + "typeClass": "primitive", + "value": "Sample-published-dataset (updated)" + }, + { + "typeName": "author", + "multiple": true, + "typeClass": "compound", + "value": [ + { + "authorName": { + "typeName": "authorName", + "multiple": false, + "typeClass": "primitive", + "value": "Kew, Susie" + }, + "authorAffiliation": { + "typeName": "authorAffiliation", + "multiple": false, + "typeClass": "primitive", + "value": "Creedence Clearwater Revival" + } + } + ] + }, + { + "typeName": "datasetContact", + "multiple": true, + "typeClass": "compound", + "value": [ + { + "datasetContactName": { + "typeName": "datasetContactName", + "multiple": false, + "typeClass": "primitive", + "value": "Dataverse, Admin" + }, + "datasetContactAffiliation": { + "typeName": "datasetContactAffiliation", + "multiple": false, + "typeClass": "primitive", + "value": "Dataverse" + }, + "datasetContactEmail": { + "typeName": "datasetContactEmail", + "multiple": false, + "typeClass": "primitive", + "value": "admin@malinator.com" + } + } + ] + }, + { + "typeName": "dsDescription", + "multiple": true, + "typeClass": "compound", + "value": [ + { + "dsDescriptionValue": { + "typeName": "dsDescriptionValue", + "multiple": false, + "typeClass": "primitive", + "value": "This is a public dataset" + }, + "dsDescriptionDate": { + "typeName": "dsDescriptionDate", + "multiple": false, + "typeClass": "primitive", + "value": "2015-01-14" + } + } + ] + }, + { + "typeName": "subject", + "multiple": true, + "typeClass": "controlledVocabulary", + "value": [ + "Chemistry" + ] + }, + { + "typeName": "depositor", + "multiple": false, + "typeClass": "primitive", + "value": "Admin Dataverse" + }, + { + "typeName": "dateOfDeposit", + "multiple": false, + "typeClass": "primitive", + "value": "2015-01-14" + } + ] + } + } +} diff --git a/dataversedock/testdata/scripts/api/data/dv-pete-sub-normal.json b/dataversedock/testdata/scripts/api/data/dv-pete-sub-normal.json new file mode 100644 index 0000000..769eb66 --- /dev/null +++ b/dataversedock/testdata/scripts/api/data/dv-pete-sub-normal.json @@ -0,0 +1,13 @@ +{ + "alias": "peteSubNormal", + "name": "Pete's public place", + "affiliation": "Affiliation value", + "permissionRoot": false, + "description": "Where Pete stores normal data", + "dataverseContacts": [ + { + "contactEmail": "pete@mailinator.com" + } + ], + "dataverseSubjects": ["Law"] +} diff --git a/dataversedock/testdata/scripts/api/data/dv-pete-sub-restricted.json b/dataversedock/testdata/scripts/api/data/dv-pete-sub-restricted.json new file mode 100644 index 0000000..b76686a --- /dev/null +++ b/dataversedock/testdata/scripts/api/data/dv-pete-sub-restricted.json @@ -0,0 +1,13 @@ +{ + "alias": "peteSubRestricted", + "name": "Pete's restricted data", + "affiliation": "Affiliation value", + "permissionRoot": false, + "description": "Where Pete stores restricted data, to be shared in moderation", + "dataverseContacts": [ + { + "contactEmail": "pete@mailinator.com" + } + ], + "dataverseSubjects": ["Chemistry"] +} diff --git a/dataversedock/testdata/scripts/api/data/dv-pete-sub-secret.json b/dataversedock/testdata/scripts/api/data/dv-pete-sub-secret.json new file mode 100644 index 0000000..6a25d45 --- /dev/null +++ b/dataversedock/testdata/scripts/api/data/dv-pete-sub-secret.json @@ -0,0 +1,13 @@ +{ + "alias": "peteSubSecret", + "name": "Pete's secrets", + "affiliation": "Affiliation value", + "permissionRoot": true, + "description": "Where Pete stores secret data", + "dataverseContacts": [ + { + "contactEmail": "pete@mailinator.com" + } + ], + "dataverseSubjects": ["Astronomy and Astrophysics"] +} diff --git a/dataversedock/testdata/scripts/api/data/dv-pete-top.json b/dataversedock/testdata/scripts/api/data/dv-pete-top.json new file mode 100644 index 0000000..dfb949f --- /dev/null +++ b/dataversedock/testdata/scripts/api/data/dv-pete-top.json @@ -0,0 +1,13 @@ +{ + "alias": "peteTop", + "name": "Top dataverse of Pete", + "affiliation": "Affiliation value", + "permissionRoot": false, + "description": "Pete's top level dataverse", + "dataverseContacts": [ + { + "contactEmail": "pete@mailinator.com" + } + ], + "dataverseSubjects": ["Arts and Humanities"] +} diff --git a/dataversedock/testdata/scripts/api/data/dv-root.json b/dataversedock/testdata/scripts/api/data/dv-root.json new file mode 100644 index 0000000..bfbfedd --- /dev/null +++ b/dataversedock/testdata/scripts/api/data/dv-root.json @@ -0,0 +1,13 @@ +{ + "alias": "root", + "name": "Root", + "permissionRoot": false, + "facetRoot": true, + "description": "The root dataverse.", + "dataverseContacts": [ + { + "contactEmail": "root@mailinator.com" + } + ], + "dataverseSubjects": ["ALL"] +} diff --git a/dataversedock/testdata/scripts/api/data/dv-uma-deletable.json b/dataversedock/testdata/scripts/api/data/dv-uma-deletable.json new file mode 100644 index 0000000..03381dd --- /dev/null +++ b/dataversedock/testdata/scripts/api/data/dv-uma-deletable.json @@ -0,0 +1,13 @@ +{ + "alias": "umaDeletable", + "name": "Uma's deletable", + "affiliation": "Affiliation value", + "permissionRoot": true, + "description": "Forgettable, deletable, temporary.", + "dataverseContacts": [ + { + "contactEmail": "Uma@mailinator.com" + } + ], + "dataverseSubjects": ["Business and Management"] +} diff --git a/dataversedock/testdata/scripts/api/data/dv-uma-sub1.json b/dataversedock/testdata/scripts/api/data/dv-uma-sub1.json new file mode 100644 index 0000000..1f9ece2 --- /dev/null +++ b/dataversedock/testdata/scripts/api/data/dv-uma-sub1.json @@ -0,0 +1,13 @@ +{ + "alias": "umaSub1", + "name": "Uma's first", + "affiliation": "Affiliation value", + "permissionRoot": false, + "description": "Some data of Uma", + "dataverseContacts": [ + { + "contactEmail": "Uma@mailinator.com" + } + ], + "dataverseSubjects": ["Medicine, Health & Life Sciences"] +} diff --git a/dataversedock/testdata/scripts/api/data/dv-uma-sub2.json b/dataversedock/testdata/scripts/api/data/dv-uma-sub2.json new file mode 100644 index 0000000..590d144 --- /dev/null +++ b/dataversedock/testdata/scripts/api/data/dv-uma-sub2.json @@ -0,0 +1,13 @@ +{ + "alias": "umaSub2", + "name": "Uma's restricted", + "affiliation": "Affiliation value", + "permissionRoot": true, + "description": "Pete can't get here", + "dataverseContacts": [ + { + "contactEmail": "Uma@mailinator.com" + } + ], + "dataverseSubjects": ["Engineering"] +} diff --git a/dataversedock/testdata/scripts/api/data/dv-uma-top.json b/dataversedock/testdata/scripts/api/data/dv-uma-top.json new file mode 100644 index 0000000..d138619 --- /dev/null +++ b/dataversedock/testdata/scripts/api/data/dv-uma-top.json @@ -0,0 +1,13 @@ +{ + "alias": "umaTop", + "name": "Top dataverse of Uma", + "affiliation": "Affiliation value", + "permissionRoot": false, + "description": "Uma's top level dataverse", + "dataverseContacts": [ + { + "contactEmail": "Uma@mailinator.com" + } + ], + "dataverseSubjects": ["Mathematical Sciences"] +} diff --git a/dataversedock/testdata/scripts/api/data/explicit-group-2nd.json b/dataversedock/testdata/scripts/api/data/explicit-group-2nd.json new file mode 100644 index 0000000..9f3fac5 --- /dev/null +++ b/dataversedock/testdata/scripts/api/data/explicit-group-2nd.json @@ -0,0 +1,5 @@ +{ + "description":"The second explicit group", + "displayName":"Explicit Group number two", + "aliasInOwner":"EG:II" +} diff --git a/dataversedock/testdata/scripts/api/data/explicit-group-first-edit.json b/dataversedock/testdata/scripts/api/data/explicit-group-first-edit.json new file mode 100644 index 0000000..e1c9339 --- /dev/null +++ b/dataversedock/testdata/scripts/api/data/explicit-group-first-edit.json @@ -0,0 +1,5 @@ +{ + "description":"This is the description field", + "displayName":"Explicit Group number one (edited)", + "aliasInOwner":"EG-1" +} diff --git a/dataversedock/testdata/scripts/api/data/explicit-group-first.json b/dataversedock/testdata/scripts/api/data/explicit-group-first.json new file mode 100644 index 0000000..85b74ae --- /dev/null +++ b/dataversedock/testdata/scripts/api/data/explicit-group-first.json @@ -0,0 +1,5 @@ +{ + "description":"This is the description field", + "displayName":"Explicit Group number one", + "aliasInOwner":"EG-1" +} diff --git a/dataversedock/testdata/scripts/api/data/ipGroup-all-ipv4.json b/dataversedock/testdata/scripts/api/data/ipGroup-all-ipv4.json new file mode 100644 index 0000000..c5ff32d --- /dev/null +++ b/dataversedock/testdata/scripts/api/data/ipGroup-all-ipv4.json @@ -0,0 +1,5 @@ +{ + "alias":"all-ipv4", + "name":"IP group to match all IPv4 addresses", + "ranges" : [["0.0.0.0", "255.255.255.255"]] +} diff --git a/dataversedock/testdata/scripts/api/data/ipGroup-all.json b/dataversedock/testdata/scripts/api/data/ipGroup-all.json new file mode 100644 index 0000000..b9fc163 --- /dev/null +++ b/dataversedock/testdata/scripts/api/data/ipGroup-all.json @@ -0,0 +1,6 @@ +{ + "alias":"ipGroup3", + "name":"IP group to match all IPv4 and IPv6 addresses", + "ranges" : [["0.0.0.0", "255.255.255.255"], + ["::", "ffff:ffff:ffff:ffff:ffff:ffff:ffff:ffff"]] +} diff --git a/dataversedock/testdata/scripts/api/data/ipGroup-localhost.json b/dataversedock/testdata/scripts/api/data/ipGroup-localhost.json new file mode 100644 index 0000000..4f8d2f7 --- /dev/null +++ b/dataversedock/testdata/scripts/api/data/ipGroup-localhost.json @@ -0,0 +1,5 @@ +{ + "alias":"localhost", + "name":"Localhost connections", + "addresses": [ "::1", "127.0.0.1" ] +} diff --git a/dataversedock/testdata/scripts/api/data/ipGroup-single-IPv4.json b/dataversedock/testdata/scripts/api/data/ipGroup-single-IPv4.json new file mode 100644 index 0000000..515c512 --- /dev/null +++ b/dataversedock/testdata/scripts/api/data/ipGroup-single-IPv4.json @@ -0,0 +1,5 @@ +{ + "alias":"singleIPv4", + "name":"Single IPv4", + "addresses" : ["128.0.0.7"] +} diff --git a/dataversedock/testdata/scripts/api/data/ipGroup-single-IPv6.json b/dataversedock/testdata/scripts/api/data/ipGroup-single-IPv6.json new file mode 100644 index 0000000..73eaa8e --- /dev/null +++ b/dataversedock/testdata/scripts/api/data/ipGroup-single-IPv6.json @@ -0,0 +1,5 @@ +{ + "alias":"singleIPv6", + "name":"Single IPv6", + "addresses" : ["aa:bb:cc:dd:ee:ff::1"] +} diff --git a/dataversedock/testdata/scripts/api/data/ipGroup1.json b/dataversedock/testdata/scripts/api/data/ipGroup1.json new file mode 100644 index 0000000..bf4b114 --- /dev/null +++ b/dataversedock/testdata/scripts/api/data/ipGroup1.json @@ -0,0 +1,7 @@ +{ + "alias":"ipGroup1", + "name":"The first IP Group", + "ranges" : [["60.0.0.0", "60.0.0.255"], + ["128.0.0.0", "129.0.255.255"], + ["ff:abcd:eff::ffff", "ff:abcd:eff::0"]] +} \ No newline at end of file diff --git a/dataversedock/testdata/scripts/api/data/ipGroup2.json b/dataversedock/testdata/scripts/api/data/ipGroup2.json new file mode 100644 index 0000000..52c5e8c --- /dev/null +++ b/dataversedock/testdata/scripts/api/data/ipGroup2.json @@ -0,0 +1,8 @@ +{ + "alias":"ipGroup2", + "name":"The second IP Group", + "ranges" : [["207.0.0.0", "207.0.0.255"], + ["128.0.0.0", "129.0.255.255"], + ["dd:2:2:2:2:2:2:2","dd:a:a:a:a:a:a:a"] + ] +} diff --git a/dataversedock/testdata/scripts/api/data/ipGroupDuplicate-v1.json b/dataversedock/testdata/scripts/api/data/ipGroupDuplicate-v1.json new file mode 100644 index 0000000..eda0c8e --- /dev/null +++ b/dataversedock/testdata/scripts/api/data/ipGroupDuplicate-v1.json @@ -0,0 +1,7 @@ +{ + "alias":"ipGroup-dup", + "name":"IP Group with duplicate files (1)", + "description":"This is the FIRST version of the group", + "ranges" : [["60.0.0.0", "60.0.0.255"], + ["60::1", "60::ffff"]] +} diff --git a/dataversedock/testdata/scripts/api/data/ipGroupDuplicate-v2.json b/dataversedock/testdata/scripts/api/data/ipGroupDuplicate-v2.json new file mode 100644 index 0000000..8db88e9 --- /dev/null +++ b/dataversedock/testdata/scripts/api/data/ipGroupDuplicate-v2.json @@ -0,0 +1,7 @@ +{ + "alias":"ipGroup-dup", + "name":"IP Group with duplicate files-v2", + "description":"This is the second version of the group", + "ranges" : [["70.0.0.0", "70.0.0.255"], + ["70::1", "70::ffff"]] +} diff --git a/dataversedock/testdata/scripts/api/data/metadatablocks/astrophysics.tsv b/dataversedock/testdata/scripts/api/data/metadatablocks/astrophysics.tsv new file mode 100644 index 0000000..d6266d2 --- /dev/null +++ b/dataversedock/testdata/scripts/api/data/metadatablocks/astrophysics.tsv @@ -0,0 +1,54 @@ +#metadataBlock name dataverseAlias displayName + astrophysics Astronomy and Astrophysics Metadata +#datasetField name title description watermark fieldType displayOrder displayFormat advancedSearchField allowControlledVocabulary allowmultiples facetable displayoncreate required parent metadatablock_id + astroType Type The nature or genre of the content of the files in the dataset. text 0 TRUE TRUE TRUE TRUE FALSE FALSE astrophysics + astroFacility Facility The observatory or facility where the data was obtained. text 1 TRUE TRUE TRUE TRUE FALSE FALSE astrophysics + astroInstrument Instrument The instrument used to collect the data. text 2 TRUE TRUE TRUE TRUE FALSE FALSE astrophysics + astroObject Object Astronomical Objects represented in the data (Given as SIMBAD recognizable names preferred). text 3 TRUE FALSE TRUE TRUE FALSE FALSE astrophysics + resolution.Spatial Spatial Resolution The spatial (angular) resolution that is typical of the observations, in decimal degrees. text 4 TRUE FALSE FALSE TRUE FALSE FALSE astrophysics + resolution.Spectral Spectral Resolution The spectral resolution that is typical of the observations, given as the ratio λ/Δλ. text 5 TRUE FALSE FALSE TRUE FALSE FALSE astrophysics + resolution.Temporal Time Resolution The temporal resolution that is typical of the observations, given in seconds. text 6 FALSE FALSE FALSE FALSE FALSE FALSE astrophysics + coverage.Spectral.Bandpass Bandpass Conventional bandpass name text 7 TRUE TRUE TRUE TRUE FALSE FALSE astrophysics + coverage.Spectral.CentralWavelength Central Wavelength (m) The central wavelength of the spectral bandpass, in meters. Enter a floating-point number. float 8 TRUE FALSE TRUE TRUE FALSE FALSE astrophysics + coverage.Spectral.Wavelength Wavelength Range The minimum and maximum wavelength of the spectral bandpass. Enter a floating-point number. none 9 FALSE FALSE TRUE FALSE FALSE FALSE astrophysics + coverage.Spectral.MinimumWavelength Minimum (m) The minimum wavelength of the spectral bandpass, in meters. Enter a floating-point number. float 10 TRUE FALSE FALSE TRUE FALSE FALSE coverage.Spectral.Wavelength astrophysics + coverage.Spectral.MaximumWavelength Maximum (m) The maximum wavelength of the spectral bandpass, in meters. Enter a floating-point number. float 11 TRUE FALSE FALSE TRUE FALSE FALSE coverage.Spectral.Wavelength astrophysics + coverage.Temporal Dataset Date Range Time period covered by the data. none 12 FALSE FALSE TRUE FALSE FALSE FALSE astrophysics + coverage.Temporal.StartTime Start Dataset Start Date YYYY-MM-DD date 13 TRUE FALSE FALSE TRUE FALSE FALSE coverage.Temporal astrophysics + coverage.Temporal.StopTime End Dataset End Date YYYY-MM-DD date 14 TRUE FALSE FALSE TRUE FALSE FALSE coverage.Temporal astrophysics + coverage.Spatial Sky Coverage The sky coverage of the data object. text 15 FALSE FALSE TRUE FALSE FALSE FALSE astrophysics + coverage.Depth Depth Coverage The (typical) depth coverage, or sensitivity, of the data object in Jy. Enter a floating-point number. float 16 FALSE FALSE FALSE FALSE FALSE FALSE astrophysics + coverage.ObjectDensity Object Density The (typical) density of objects, catalog entries, telescope pointings, etc., on the sky, in number per square degree. Enter a floating-point number. float 17 FALSE FALSE FALSE FALSE FALSE FALSE astrophysics + coverage.ObjectCount Object Count The total number of objects, catalog entries, etc., in the data object. Enter an integer. int 18 FALSE FALSE FALSE FALSE FALSE FALSE astrophysics + coverage.SkyFraction Fraction of Sky The fraction of the sky represented in the observations, ranging from 0 to 1. Enter a floating-point number. float 19 FALSE FALSE FALSE FALSE FALSE FALSE astrophysics + coverage.Polarization Polarization The polarization coverage text 20 FALSE FALSE FALSE FALSE FALSE FALSE astrophysics + redshiftType RedshiftType RedshiftType string C "Redshift"; or "Optical" or "Radio" definitions of Doppler velocity used in the data object. text 21 FALSE FALSE FALSE FALSE FALSE FALSE astrophysics + resolution.Redshift Redshift Resolution The resolution in redshift (unitless) or Doppler velocity (km/s) in the data object. Enter a floating-point number. float 22 FALSE FALSE FALSE FALSE FALSE FALSE astrophysics + coverage.RedshiftValue Redshift Value The value of the redshift (unitless) or Doppler velocity (km/s in the data object. Enter a floating-point number. float 23 FALSE FALSE TRUE FALSE FALSE FALSE astrophysics + coverage.Redshift.MinimumValue Minimum The minimum value of the redshift (unitless) or Doppler velocity (km/s in the data object. Enter a floating-point number. float 24 FALSE FALSE FALSE FALSE FALSE FALSE coverage.RedshiftValue astrophysics + coverage.Redshift.MaximumValue Maximum The maximum value of the redshift (unitless) or Doppler velocity (km/s in the data object. Enter a floating-point number. float 25 FALSE FALSE FALSE FALSE FALSE FALSE coverage.RedshiftValue astrophysics +#controlledVocabulary DatasetField Value identifier displayOrder + astroType Image 0 + astroType Mosaic 1 + astroType EventList 2 + astroType Spectrum 3 + astroType Cube 4 + astroType Table 5 + astroType Catalog 6 + astroType LightCurve 7 + astroType Simulation 8 + astroType Figure 9 + astroType Artwork 10 + astroType Animation 11 + astroType PrettyPicture 12 + astroType Documentation 13 + astroType Other 14 + astroType Library 15 + astroType Press Release 16 + astroType Facsimile 17 + astroType Historical 18 + astroType Observation 19 + astroType Object 20 + astroType Value 21 + astroType ValuePair 22 + astroType Survey 23 \ No newline at end of file diff --git a/dataversedock/testdata/scripts/api/data/metadatablocks/biomedical.tsv b/dataversedock/testdata/scripts/api/data/metadatablocks/biomedical.tsv new file mode 100644 index 0000000..f45c584 --- /dev/null +++ b/dataversedock/testdata/scripts/api/data/metadatablocks/biomedical.tsv @@ -0,0 +1,295 @@ +#metadataBlock name dataverseAlias displayName + biomedical Life Sciences Metadata +#datasetField name title description watermark fieldType displayOrder displayFormat advancedSearchField allowControlledVocabulary allowmultiples facetable displayoncreate required parent metadatablock_id + studyDesignType Design Type Design types that are based on the overall experimental design. text 0 TRUE TRUE TRUE TRUE FALSE FALSE biomedical + studyFactorType Factor Type Factors used in the Dataset. text 1 TRUE TRUE TRUE TRUE FALSE FALSE biomedical + studyAssayOrganism Organism The taxonomic name of the organism used in the Dataset or from which the starting biological material derives. text 2 TRUE TRUE TRUE TRUE FALSE FALSE biomedical + studyAssayOtherOrganism Other Organism If Other was selected in Organism, list any other organisms that were used in this Dataset. Terms from the NCBI Taxonomy are recommended. text 3 TRUE FALSE TRUE TRUE FALSE FALSE biomedical + studyAssayMeasurementType Measurement Type A term to qualify the endpoint, or what is being measured (e.g. gene expression profiling; protein identification). text 4 TRUE TRUE TRUE TRUE FALSE FALSE biomedical + studyAssayOtherMeasurmentType Other Measurement Type If Other was selected in Measurement Type, list any other measurement types that were used. Terms from NCBO Bioportal are recommended. text 5 TRUE FALSE TRUE TRUE FALSE FALSE biomedical + studyAssayTechnologyType Technology Type A term to identify the technology used to perform the measurement (e.g. DNA microarray; mass spectrometry). text 6 TRUE TRUE TRUE TRUE FALSE FALSE biomedical + studyAssayPlatform Technology Platform The manufacturer and name of the technology platform used in the assay (e.g. Bruker AVANCE). text 7 TRUE TRUE TRUE TRUE FALSE FALSE biomedical + studyAssayCellType Cell Type The name of the cell line from which the source or sample derives. text 8 TRUE TRUE TRUE TRUE FALSE FALSE biomedical +#controlledVocabulary DatasetField Value identifier displayOrder + studyDesignType Case Control EFO_0001427 0 + studyDesignType Cross Sectional EFO_0001428 1 + studyDesignType Cohort Study OCRE100078 2 + studyDesignType Nested Case Control Design NCI_C48202 3 + studyDesignType Not Specified OTHER_DESIGN 4 + studyDesignType Parallel Group Design OBI_0500006 5 + studyDesignType Perturbation Design OBI_0001033 6 + studyDesignType Randomized Controlled Trial MESH_D016449 7 + studyDesignType Technological Design TECH_DESIGN 8 + studyFactorType Age EFO_0000246 0 + studyFactorType Biomarkers BIOMARKERS 1 + studyFactorType Cell Surface Markers CELL_SURFACE_M 2 + studyFactorType Cell Type/Cell Line EFO_0000324;EFO_0000322 3 + studyFactorType Developmental Stage EFO_0000399 4 + studyFactorType Disease State OBI_0001293 5 + studyFactorType Drug Susceptibility IDO_0000469 6 + studyFactorType Extract Molecule FBcv_0010001 7 + studyFactorType Genetic Characteristics OBI_0001404 8 + studyFactorType Immunoprecipitation Antibody OBI_0000690 9 + studyFactorType Organism OBI_0100026 10 + studyFactorType Other OTHER_FACTOR 11 + studyFactorType Passages PASSAGES_FACTOR 12 + studyFactorType Platform OBI_0000050 13 + studyFactorType Sex EFO_0000695 14 + studyFactorType Strain EFO_0005135 15 + studyFactorType Time Point EFO_0000724 16 + studyFactorType Tissue Type BTO_0001384 17 + studyFactorType Treatment Compound EFO_0000369 18 + studyFactorType Treatment Type EFO_0000727 19 + studyAssayMeasurementType cell counting ERO_0001899 0 + studyAssayMeasurementType cell sorting CHMO_0001085 1 + studyAssayMeasurementType clinical chemistry analysis OBI_0000520 2 + studyAssayMeasurementType copy number variation profiling OBI_0000537 3 + studyAssayMeasurementType DNA methylation profiling OBI_0000634 4 + studyAssayMeasurementType DNA methylation profiling (Bisulfite-Seq) OBI_0000748 5 + studyAssayMeasurementType DNA methylation profiling (MeDIP-Seq) _OBI_0000634 6 + studyAssayMeasurementType drug susceptibility _IDO_0000469 7 + studyAssayMeasurementType environmental gene survey ENV_GENE_SURVEY 8 + studyAssayMeasurementType genome sequencing ERO_0001183 9 + studyAssayMeasurementType hematology OBI_0000630 10 + studyAssayMeasurementType histology OBI_0600020 11 + studyAssayMeasurementType Histone Modification (ChIP-Seq) OBI_0002017 12 + studyAssayMeasurementType loss of heterozygosity profiling SO_0001786 13 + studyAssayMeasurementType metabolite profiling OBI_0000366 14 + studyAssayMeasurementType metagenome sequencing METAGENOME_SEQ 15 + studyAssayMeasurementType protein expression profiling OBI_0000615 16 + studyAssayMeasurementType protein identification ERO_0000346 17 + studyAssayMeasurementType protein-DNA binding site identification PROTEIN_DNA_BINDING 18 + studyAssayMeasurementType protein-protein interaction detection OBI_0000288 19 + studyAssayMeasurementType protein-RNA binding (RIP-Seq) PROTEIN_RNA_BINDING 20 + studyAssayMeasurementType SNP analysis OBI_0000435 21 + studyAssayMeasurementType targeted sequencing TARGETED_SEQ 22 + studyAssayMeasurementType transcription factor binding (ChIP-Seq) OBI_0002018 23 + studyAssayMeasurementType transcription factor binding site identification OBI_0000291 24 + studyAssayMeasurementType transcription profiling OBI_0000424 25 + studyAssayMeasurementType transcription profiling EFO_0001032 26 + studyAssayMeasurementType transcription profiling (Microarray) TRANSCRIPTION_PROF 27 + studyAssayMeasurementType transcription profiling (RNA-Seq) OBI_0001271 28 + studyAssayMeasurementType TRAP translational profiling TRAP_TRANS_PROF 29 + studyAssayMeasurementType Other OTHER_MEASUREMENT 30 + studyAssayOrganism Arabidopsis thaliana NCBITaxon_3702 0 + studyAssayOrganism Bos taurus NCBITaxon_9913 1 + studyAssayOrganism Caenorhabditis elegans NCBITaxon_6239 2 + studyAssayOrganism Chlamydomonas reinhardtii NCBITaxon_3055 3 + studyAssayOrganism Danio rerio (zebrafish) NCBITaxon_7955 4 + studyAssayOrganism Dictyostelium discoideum NCBITaxon_44689 5 + studyAssayOrganism Drosophila melanogaster NCBITaxon_7227 6 + studyAssayOrganism Escherichia coli NCBITaxon_562 7 + studyAssayOrganism Hepatitis C virus NCBITaxon_11103 8 + studyAssayOrganism Homo sapiens NCBITaxon_9606 9 + studyAssayOrganism Mus musculus NCBITaxon_10090 10 + studyAssayOrganism Mycobacterium africanum NCBITaxon_33894 11 + studyAssayOrganism Mycobacterium canetti NCBITaxon_78331 12 + studyAssayOrganism Mycobacterium tuberculosis NCBITaxon_1773 13 + studyAssayOrganism Mycoplasma pneumoniae NCBITaxon_2104 14 + studyAssayOrganism Oryza sativa NCBITaxon_4530 15 + studyAssayOrganism Plasmodium falciparum NCBITaxon_5833 16 + studyAssayOrganism Pneumocystis carinii NCBITaxon_4754 17 + studyAssayOrganism Rattus norvegicus NCBITaxon_10116 18 + studyAssayOrganism Saccharomyces cerevisiae (brewer's yeast) NCBITaxon_4932 19 + studyAssayOrganism Schizosaccharomyces pombe NCBITaxon_4896 20 + studyAssayOrganism Takifugu rubripes NCBITaxon_31033 21 + studyAssayOrganism Xenopus laevis NCBITaxon_8355 22 + studyAssayOrganism Zea mays NCBITaxon_4577 23 + studyAssayOrganism Other OTHER_TAXONOMY 24 + studyAssayTechnologyType culture based drug susceptibility testing, single concentration CULTURE_DRUG_TEST_SINGLE 0 + studyAssayTechnologyType culture based drug susceptibility testing, two concentrations CULTURE_DRUG_TEST_TWO 1 + studyAssayTechnologyType culture based drug susceptibility testing, three or more concentrations (minimium inhibitory concentration measurement) CULTURE_DRUG_TEST_THREE 2 + studyAssayTechnologyType DNA microarray OBI_0400148 3 + studyAssayTechnologyType flow cytometry OBI_0000916 4 + studyAssayTechnologyType gel electrophoresis OBI_0600053 5 + studyAssayTechnologyType mass spectrometry OBI_0000470 6 + studyAssayTechnologyType NMR spectroscopy OBI_0000623 7 + studyAssayTechnologyType nucleotide sequencing OBI_0000626 8 + studyAssayTechnologyType protein microarray OBI_0400149 9 + studyAssayTechnologyType real time PCR OBI_0000893 10 + studyAssayTechnologyType no technology required NO_TECHNOLOGY 11 + studyAssayTechnologyType Other OTHER_TECHNOLOGY 12 + studyAssayPlatform 210-MS GC Ion Trap (Varian) 210_MS_GC 0 + studyAssayPlatform 220-MS GC Ion Trap (Varian) 220_MS_GC 1 + studyAssayPlatform 225-MS GC Ion Trap (Varian) 225_MS_GC 2 + studyAssayPlatform 240-MS GC Ion Trap (Varian) 240_MS_GC 3 + studyAssayPlatform 300-MS quadrupole GC/MS (Varian) 300_MS_GCMS 4 + studyAssayPlatform 320-MS LC/MS (Varian) 320_MS_LCMS 5 + studyAssayPlatform 325-MS LC/MS (Varian) 325_MS_LCMS 6 + studyAssayPlatform 320-MS GC/MS (Varian) 500_MS_GCMS 7 + studyAssayPlatform 500-MS LC/MS (Varian) 500_MS_LCMS 8 + studyAssayPlatform 800D (Jeol) 800D 9 + studyAssayPlatform 910-MS TQ-FT (Varian) 910_MS_TQFT 10 + studyAssayPlatform 920-MS TQ-FT (Varian) 920_MS_TQFT 11 + studyAssayPlatform 3100 Mass Detector (Waters) 3100_MASS_D 12 + studyAssayPlatform 6110 Quadrupole LC/MS (Agilent) 6110_QUAD_LCMS 13 + studyAssayPlatform 6120 Quadrupole LC/MS (Agilent) 6120_QUAD_LCMS 14 + studyAssayPlatform 6130 Quadrupole LC/MS (Agilent) 6130_QUAD_LCMS 15 + studyAssayPlatform 6140 Quadrupole LC/MS (Agilent) 6140_QUAD_LCMS 16 + studyAssayPlatform 6310 Ion Trap LC/MS (Agilent) 6310_ION_LCMS 17 + studyAssayPlatform 6320 Ion Trap LC/MS (Agilent) 6320_ION_LCMS 18 + studyAssayPlatform 6330 Ion Trap LC/MS (Agilent) 6330_ION_LCMS 19 + studyAssayPlatform 6340 Ion Trap LC/MS (Agilent) 6340_ION_LCMS 20 + studyAssayPlatform 6410 Triple Quadrupole LC/MS (Agilent) 6410_TRIPLE_LCMS 21 + studyAssayPlatform 6430 Triple Quadrupole LC/MS (Agilent) 6430_TRIPLE_LCMS 22 + studyAssayPlatform 6460 Triple Quadrupole LC/MS (Agilent) 6460_TRIPLE_LCMS 23 + studyAssayPlatform 6490 Triple Quadrupole LC/MS (Agilent) 6490_TRIPLE_LCMS 24 + studyAssayPlatform 6530 Q-TOF LC/MS (Agilent) 6530_Q_TOF_LCMS 25 + studyAssayPlatform 6540 Q-TOF LC/MS (Agilent) 6540_Q_TOF_LCMS 26 + studyAssayPlatform 6210 TOF LC/MS (Agilent) 6210_Q_TOF_LCMS 27 + studyAssayPlatform 6220 TOF LC/MS (Agilent) 6220_Q_TOF_LCMS 28 + studyAssayPlatform 6230 TOF LC/MS (Agilent) 6230_Q_TOF_LCMS 29 + studyAssayPlatform 7000B Triple Quadrupole GC/MS (Agilent) 700B_TRIPLE_GCMS 30 + studyAssayPlatform AccuTO DART (Jeol) ACCUTO_DART 31 + studyAssayPlatform AccuTOF GC (Jeol) ACCUTOF_GC 32 + studyAssayPlatform AccuTOF LC (Jeol) ACCUTOF_LC 33 + studyAssayPlatform ACQUITY SQD (Waters) ACQUITY_SQD 34 + studyAssayPlatform ACQUITY TQD (Waters) ACQUITY_TQD 35 + studyAssayPlatform Agilent AGILENT 36 + studyAssayPlatform Agilent 5975E GC/MSD (Agilent) AGILENT_ 5975E_GCMSD 37 + studyAssayPlatform Agilent 5975T LTM GC/MSD (Agilent) AGILENT_5975T_LTM_GCMSD 38 + studyAssayPlatform 5975C Series GC/MSD (Agilent) 5975C_GCMSD 39 + studyAssayPlatform Affymetrix AFFYMETRIX 40 + studyAssayPlatform amaZon ETD ESI Ion Trap (Bruker) AMAZON_ETD_ESI 41 + studyAssayPlatform amaZon X ESI Ion Trap (Bruker) AMAZON_X_ESI 42 + studyAssayPlatform apex-ultra hybrid Qq-FTMS (Bruker) APEX_ULTRA_QQ_FTMS 43 + studyAssayPlatform API 2000 (AB Sciex) API_2000 44 + studyAssayPlatform API 3200 (AB Sciex) API_3200 45 + studyAssayPlatform API 3200 QTRAP (AB Sciex) API_3200_QTRAP 46 + studyAssayPlatform API 4000 (AB Sciex) API_4000 47 + studyAssayPlatform API 4000 QTRAP (AB Sciex) API_4000_QTRAP 48 + studyAssayPlatform API 5000 (AB Sciex) API_5000 49 + studyAssayPlatform API 5500 (AB Sciex) API_5500 50 + studyAssayPlatform API 5500 QTRAP (AB Sciex) API_5500_QTRAP 51 + studyAssayPlatform Applied Biosystems Group (ABI) APPLIED_BIOSYSTEMS 52 + studyAssayPlatform AQI Biosciences AQI_BIOSCIENCES 53 + studyAssayPlatform Atmospheric Pressure GC (Waters) ATMOS_GC 54 + studyAssayPlatform autoflex III MALDI-TOF MS (Bruker) AUTOFLEX_III_MALDI_TOF_MS 55 + studyAssayPlatform autoflex speed(Bruker) AUTOFLEX_SPEED 56 + studyAssayPlatform AutoSpec Premier (Waters) AUTOSPEC_PREMIER 57 + studyAssayPlatform AXIMA Mega TOF (Shimadzu) AXIMA_MEGA_TOF 58 + studyAssayPlatform AXIMA Performance MALDI TOF/TOF (Shimadzu) AXIMA_PERF_MALDI_TOF 59 + studyAssayPlatform A-10 Analyzer (Apogee) A_10_ANALYZER 60 + studyAssayPlatform A-40-MiniFCM (Apogee) A_40_MINIFCM 61 + studyAssayPlatform Bactiflow (Chemunex SA) BACTIFLOW 62 + studyAssayPlatform Base4innovation BASE4INNOVATION 63 + studyAssayPlatform BD BACTEC MGIT 320 BD_BACTEC_MGIT_320 64 + studyAssayPlatform BD BACTEC MGIT 960 BD_BACTEC_MGIT_960 65 + studyAssayPlatform BD Radiometric BACTEC 460TB BD_RADIO_BACTEC_460TB 66 + studyAssayPlatform BioNanomatrix BIONANOMATRIX 67 + studyAssayPlatform Cell Lab Quanta SC (Becman Coulter) CELL_LAB_QUANTA_SC 68 + studyAssayPlatform Clarus 560 D GC/MS (PerkinElmer) CLARUS_560_D_GCMS 69 + studyAssayPlatform Clarus 560 S GC/MS (PerkinElmer) CLARUS_560_S_GCMS 70 + studyAssayPlatform Clarus 600 GC/MS (PerkinElmer) CLARUS_600_GCMS 71 + studyAssayPlatform Complete Genomics COMPLETE_GENOMICS 72 + studyAssayPlatform Cyan (Dako Cytomation) CYAN 73 + studyAssayPlatform CyFlow ML (Partec) CYFLOW_ML 74 + studyAssayPlatform Cyow SL (Partec) CYFLOW_SL 75 + studyAssayPlatform CyFlow SL3 (Partec) CYFLOW_SL3 76 + studyAssayPlatform CytoBuoy (Cyto Buoy Inc) CYTOBUOY 77 + studyAssayPlatform CytoSence (Cyto Buoy Inc) CYTOSENCE 78 + studyAssayPlatform CytoSub (Cyto Buoy Inc) CYTOSUB 79 + studyAssayPlatform Danaher DANAHER 80 + studyAssayPlatform DFS (Thermo Scientific) DFS 81 + studyAssayPlatform Exactive(Thermo Scientific) EXACTIVE 82 + studyAssayPlatform FACS Canto (Becton Dickinson) FACS_CANTO 83 + studyAssayPlatform FACS Canto2 (Becton Dickinson) FACS_CANTO2 84 + studyAssayPlatform FACS Scan (Becton Dickinson) FACS_SCAN 85 + studyAssayPlatform FC 500 (Becman Coulter) FC_500 86 + studyAssayPlatform GCmate II GC/MS (Jeol) GCMATE_II 87 + studyAssayPlatform GCMS-QP2010 Plus (Shimadzu) GCMS_QP2010_PLUS 88 + studyAssayPlatform GCMS-QP2010S Plus (Shimadzu) GCMS_QP2010S_PLUS 89 + studyAssayPlatform GCT Premier (Waters) GCT_PREMIER 90 + studyAssayPlatform GENEQ GENEQ 91 + studyAssayPlatform Genome Corp. GENOME_CORP 92 + studyAssayPlatform GenoVoxx GENOVOXX 93 + studyAssayPlatform GnuBio GNUBIO 94 + studyAssayPlatform Guava EasyCyte Mini (Millipore) GUAVA_EASYCYTE_MINI 95 + studyAssayPlatform Guava EasyCyte Plus (Millipore) GUAVA_EASYCYTE_PLUS 96 + studyAssayPlatform Guava Personal Cell Analysis (Millipore) GUAVA_PERSONAL_CELL 97 + studyAssayPlatform Guava Personal Cell Analysis-96 (Millipore) GUAVA_PERSONAL_CELL_96 98 + studyAssayPlatform Helicos BioSciences HELICOS_BIO 99 + studyAssayPlatform Illumina ILLUMINA 100 + studyAssayPlatform Indirect proportion method on LJ medium INDIRECT_LJ_MEDIUM 101 + studyAssayPlatform Indirect proportion method on Middlebrook Agar 7H9 INDIRECT_AGAR_7H9 102 + studyAssayPlatform Indirect proportion method on Middlebrook Agar 7H10 INDIRECT_AGAR_7H10 103 + studyAssayPlatform Indirect proportion method on Middlebrook Agar 7H11 INDIRECT_AGAR_7H11 104 + studyAssayPlatform inFlux Analyzer (Cytopeia) INFLUX_ANALYZER 105 + studyAssayPlatform Intelligent Bio-Systems INTELLIGENT_BIOSYSTEMS 106 + studyAssayPlatform ITQ 700 (Thermo Scientific) ITQ_700 107 + studyAssayPlatform ITQ 900 (Thermo Scientific) ITQ_900 108 + studyAssayPlatform ITQ 1100 (Thermo Scientific) ITQ_1100 109 + studyAssayPlatform JMS-53000 SpiralTOF (Jeol) JMS_53000_SPIRAL 110 + studyAssayPlatform LaserGen LASERGEN 111 + studyAssayPlatform LCMS-2020 (Shimadzu) LCMS_2020 112 + studyAssayPlatform LCMS-2010EV (Shimadzu) LCMS_2010EV 113 + studyAssayPlatform LCMS-IT-TOF (Shimadzu) LCMS_IT_TOF 114 + studyAssayPlatform Li-Cor LI_COR 115 + studyAssayPlatform Life Tech LIFE_TECH 116 + studyAssayPlatform LightSpeed Genomics LIGHTSPEED_GENOMICS 117 + studyAssayPlatform LCT Premier XE (Waters) LCT_PREMIER_XE 118 + studyAssayPlatform LCQ Deca XP MAX (Thermo Scientific) LCQ_DECA_XP_MAX 119 + studyAssayPlatform LCQ Fleet (Thermo Scientific) LCQ_FLEET 120 + studyAssayPlatform LXQ (Thermo Scientific) LXQ_THERMO 121 + studyAssayPlatform LTQ Classic (Thermo Scientific) LTQ_CLASSIC 122 + studyAssayPlatform LTQ XL (Thermo Scientific) LTQ_XL 123 + studyAssayPlatform LTQ Velos (Thermo Scientific) LTQ_VELOS 124 + studyAssayPlatform LTQ Orbitrap Classic (Thermo Scientific) LTQ_ORBITRAP_CLASSIC 125 + studyAssayPlatform LTQ Orbitrap XL (Thermo Scientific) LTQ_ORBITRAP_XL 126 + studyAssayPlatform LTQ Orbitrap Discovery (Thermo Scientific) LTQ_ORBITRAP_DISCOVERY 127 + studyAssayPlatform LTQ Orbitrap Velos (Thermo Scientific) LTQ_ORBITRAP_VELOS 128 + studyAssayPlatform Luminex 100 (Luminex) LUMINEX_100 129 + studyAssayPlatform Luminex 200 (Luminex) LUMINEX_200 130 + studyAssayPlatform MACS Quant (Miltenyi) MACS_QUANT 131 + studyAssayPlatform MALDI SYNAPT G2 HDMS (Waters) MALDI_SYNAPT_G2_HDMS 132 + studyAssayPlatform MALDI SYNAPT G2 MS (Waters) MALDI_SYNAPT_G2_MS 133 + studyAssayPlatform MALDI SYNAPT HDMS (Waters) MALDI_SYNAPT_HDMS 134 + studyAssayPlatform MALDI SYNAPT MS (Waters) MALDI_SYNAPT_MS 135 + studyAssayPlatform MALDI micro MX (Waters) MALDI_MICROMX 136 + studyAssayPlatform maXis (Bruker) MAXIS 137 + studyAssayPlatform maXis G4 (Bruker) MAXISG4 138 + studyAssayPlatform microflex LT MALDI-TOF MS (Bruker) MICROFLEX_LT_MALDI_TOF_MS 139 + studyAssayPlatform microflex LRF MALDI-TOF MS (Bruker) MICROFLEX_LRF_MALDI_TOF_MS 140 + studyAssayPlatform microflex III MALDI-TOF MS (Bruker) MICROFLEX_III_TOF_MS 141 + studyAssayPlatform micrOTOF II ESI TOF (Bruker) MICROTOF_II_ESI_TOF 142 + studyAssayPlatform micrOTOF-Q II ESI-Qq-TOF (Bruker) MICROTOF_Q_II_ESI_QQ_TOF 143 + studyAssayPlatform microplate Alamar Blue (resazurin) colorimetric method MICROPLATE_ALAMAR_BLUE_COLORIMETRIC 144 + studyAssayPlatform Mstation (Jeol) MSTATION 145 + studyAssayPlatform MSQ Plus (Thermo Scientific) MSQ_PLUS 146 + studyAssayPlatform NABsys NABSYS 147 + studyAssayPlatform Nanophotonics Biosciences NANOPHOTONICS_BIOSCIENCES 148 + studyAssayPlatform Network Biosystems NETWORK_BIOSYSTEMS 149 + studyAssayPlatform Nimblegen NIMBLEGEN 150 + studyAssayPlatform Oxford Nanopore Technologies OXFORD_NANOPORE_TECHNOLOGIES 151 + studyAssayPlatform Pacific Biosciences PACIFIC_BIOSCIENCES 152 + studyAssayPlatform Population Genetics Technologies POPULATION_GENETICS_TECHNOLOGIES 153 + studyAssayPlatform Q1000GC UltraQuad (Jeol) Q1000GC_ULTRAQUAD 154 + studyAssayPlatform Quattro micro API (Waters) QUATTRO_MICRO_API 155 + studyAssayPlatform Quattro micro GC (Waters) QUATTRO_MICRO_GC 156 + studyAssayPlatform Quattro Premier XE (Waters) QUATTRO_PREMIER_XE 157 + studyAssayPlatform QSTAR (AB Sciex) QSTAR 158 + studyAssayPlatform Reveo REVEO 159 + studyAssayPlatform Roche ROCHE 160 + studyAssayPlatform Seirad SEIRAD 161 + studyAssayPlatform solariX hybrid Qq-FTMS (Bruker) SOLARIX_HYBRID_QQ_FTMS 162 + studyAssayPlatform Somacount (Bently Instruments) SOMACOUNT 163 + studyAssayPlatform SomaScope (Bently Instruments) SOMASCOPE 164 + studyAssayPlatform SYNAPT G2 HDMS (Waters) SYNAPT_G2_HDMS 165 + studyAssayPlatform SYNAPT G2 MS (Waters) SYNAPT_G2_MS 166 + studyAssayPlatform SYNAPT HDMS (Waters) SYNAPT_HDMS 167 + studyAssayPlatform SYNAPT MS (Waters) SYNAPT_MS 168 + studyAssayPlatform TripleTOF 5600 (AB Sciex) TRIPLETOF_5600 169 + studyAssayPlatform TSQ Quantum Ultra (Thermo Scientific) TSQ_QUANTUM_ULTRA 170 + studyAssayPlatform TSQ Quantum Access (Thermo Scientific) TSQ_QUANTUM_ACCESS 171 + studyAssayPlatform TSQ Quantum Access MAX (Thermo Scientific) TSQ_QUANTUM_ACCESS_MAX 172 + studyAssayPlatform TSQ Quantum Discovery MAX (Thermo Scientific) TSQ_QUANTUM_DISCOVERY_MAX 173 + studyAssayPlatform TSQ Quantum GC (Thermo Scientific) TSQ_QUANTUM_GC 174 + studyAssayPlatform TSQ Quantum XLS (Thermo Scientific) TSQ_QUANTUM_XLS 175 + studyAssayPlatform TSQ Vantage (Thermo Scientific) TSQ_VANTAGE 176 + studyAssayPlatform ultrafleXtreme MALDI-TOF MS (Bruker) ULTRAFLEXTREME_MALDI_TOF_MS 177 + studyAssayPlatform VisiGen Biotechnologies VISIGEN_BIO 178 + studyAssayPlatform Xevo G2 QTOF (Waters) XEVO_G2_QTOF 179 + studyAssayPlatform Xevo QTof MS (Waters) XEVO_QTOF_MS 180 + studyAssayPlatform Xevo TQ MS (Waters) XEVO_TQ_MS 181 + studyAssayPlatform Xevo TQ-S (Waters) XEVO_TQ_S 182 + studyAssayPlatform Other OTHER_PLATFORM 183 \ No newline at end of file diff --git a/dataversedock/testdata/scripts/api/data/metadatablocks/citation.tsv b/dataversedock/testdata/scripts/api/data/metadatablocks/citation.tsv new file mode 100644 index 0000000..f7c4447 --- /dev/null +++ b/dataversedock/testdata/scripts/api/data/metadatablocks/citation.tsv @@ -0,0 +1,318 @@ +#metadataBlock name dataverseAlias displayName + citation Citation Metadata +#datasetField name title description watermark fieldType displayOrder displayFormat advancedSearchField allowControlledVocabulary allowmultiples facetable displayoncreate required parent metadatablock_id + title Title Full title by which the Dataset is known. Enter title... text 0 TRUE FALSE FALSE FALSE TRUE TRUE citation + subtitle Subtitle A secondary title used to amplify or state certain limitations on the main title. text 1 FALSE FALSE FALSE FALSE FALSE FALSE citation + alternativeTitle Alternative Title A title by which the work is commonly referred, or an abbreviation of the title. text 2 FALSE FALSE FALSE FALSE FALSE FALSE citation + alternativeURL Alternative URL A URL where the dataset can be viewed, such as a personal or project website. Enter full URL, starting with http:// url 3 FALSE FALSE FALSE FALSE FALSE FALSE citation + otherId Other ID Another unique identifier that identifies this Dataset (e.g., producer's or another repository's number). none 4 : FALSE FALSE TRUE FALSE FALSE FALSE citation + otherIdAgency Agency Name of agency which generated this identifier. text 5 #VALUE FALSE FALSE FALSE FALSE FALSE FALSE otherId citation + otherIdValue Identifier Other identifier that corresponds to this Dataset. text 6 #VALUE FALSE FALSE FALSE FALSE FALSE FALSE otherId citation + author Author The person(s), corporate body(ies), or agency(ies) responsible for creating the work. none 7 FALSE FALSE TRUE FALSE TRUE FALSE citation + authorName Name The author's Family Name, Given Name or the name of the organization responsible for this Dataset. FamilyName, GivenName or Organization text 8 #VALUE TRUE FALSE FALSE TRUE TRUE TRUE author citation + authorAffiliation Affiliation The organization with which the author is affiliated. text 9 (#VALUE) TRUE FALSE FALSE TRUE TRUE FALSE author citation + authorIdentifierScheme Identifier Scheme Name of the identifier scheme (ORCID, ISNI). text 10 - #VALUE: FALSE TRUE FALSE FALSE TRUE FALSE author citation + authorIdentifier Identifier Uniquely identifies an individual author or organization, according to various schemes. text 11 #VALUE FALSE FALSE FALSE FALSE TRUE FALSE author citation + datasetContact Contact The contact(s) for this Dataset. none 12 FALSE FALSE TRUE FALSE TRUE FALSE citation + datasetContactName Name The contact's Family Name, Given Name or the name of the organization. FamilyName, GivenName or Organization text 13 #VALUE FALSE FALSE FALSE FALSE TRUE FALSE datasetContact citation + datasetContactAffiliation Affiliation The organization with which the contact is affiliated. text 14 (#VALUE) FALSE FALSE FALSE FALSE TRUE FALSE datasetContact citation + datasetContactEmail E-mail The e-mail address(es) of the contact(s) for the Dataset. This will not be displayed. email 15 #EMAIL FALSE FALSE FALSE FALSE TRUE TRUE datasetContact citation + dsDescription Description A summary describing the purpose, nature, and scope of the Dataset. none 16 FALSE FALSE TRUE FALSE TRUE FALSE citation + dsDescriptionValue Text A summary describing the purpose, nature, and scope of the Dataset. textbox 17 #VALUE TRUE FALSE FALSE FALSE TRUE TRUE dsDescription citation + dsDescriptionDate Date In cases where a Dataset contains more than one description (for example, one might be supplied by the data producer and another prepared by the data repository where the data are deposited), the date attribute is used to distinguish between the two descriptions. The date attribute follows the ISO convention of YYYY-MM-DD. YYYY-MM-DD date 18 (#VALUE) FALSE FALSE FALSE FALSE TRUE FALSE dsDescription citation + subject Subject Domain-specific Subject Categories that are topically relevant to the Dataset. text 19 TRUE TRUE TRUE TRUE TRUE TRUE citation + keyword Keyword Key terms that describe important aspects of the Dataset. none 20 FALSE FALSE TRUE FALSE TRUE FALSE citation + keywordValue Term Key terms that describe important aspects of the Dataset. Can be used for building keyword indexes and for classification and retrieval purposes. A controlled vocabulary can be employed. The vocab attribute is provided for specification of the controlled vocabulary in use, such as LCSH, MeSH, or others. The vocabURI attribute specifies the location for the full controlled vocabulary. text 21 #VALUE TRUE FALSE FALSE TRUE TRUE FALSE keyword citation + keywordVocabulary Vocabulary For the specification of the keyword controlled vocabulary in use, such as LCSH, MeSH, or others. text 22 (#VALUE) FALSE FALSE FALSE FALSE TRUE FALSE keyword citation + keywordVocabularyURI Vocabulary URL Keyword vocabulary URL points to the web presence that describes the keyword vocabulary, if appropriate. Enter an absolute URL where the keyword vocabulary web site is found, such as http://www.my.org. Enter full URL, starting with http:// url 23 #VALUE FALSE FALSE FALSE FALSE TRUE FALSE keyword citation + topicClassification Topic Classification The classification field indicates the broad important topic(s) and subjects that the data cover. Library of Congress subject terms may be used here. none 24 FALSE FALSE TRUE FALSE FALSE FALSE citation + topicClassValue Term Topic or Subject term that is relevant to this Dataset. text 25 #VALUE TRUE FALSE FALSE TRUE FALSE FALSE topicClassification citation + topicClassVocab Vocabulary Provided for specification of the controlled vocabulary in use, e.g., LCSH, MeSH, etc. text 26 (#VALUE) FALSE FALSE FALSE FALSE FALSE FALSE topicClassification citation + topicClassVocabURI Vocabulary URL Specifies the URL location for the full controlled vocabulary. Enter full URL, starting with http:// url 27 #VALUE FALSE FALSE FALSE FALSE FALSE FALSE topicClassification citation + publication Related Publication Publications that use the data from this Dataset. none 28 FALSE FALSE TRUE FALSE TRUE FALSE citation + publicationCitation Citation The full bibliographic citation for this related publication. textbox 29 #VALUE TRUE FALSE FALSE FALSE TRUE FALSE publication citation + publicationIDType ID Type The type of digital identifier used for this publication (e.g., Digital Object Identifier (DOI)). text 30 #VALUE: TRUE TRUE FALSE FALSE TRUE FALSE publication citation + publicationIDNumber ID Number The identifier for the selected ID type. text 31 #VALUE TRUE FALSE FALSE FALSE TRUE FALSE publication citation + publicationURL URL Link to the publication web page (e.g., journal article page, archive record page, or other). Enter full URL, starting with http:// url 32 #VALUE FALSE FALSE FALSE FALSE FALSE FALSE publication citation + notesText Notes Additional important information about the Dataset. textbox 33 FALSE FALSE FALSE FALSE TRUE FALSE citation + language Language Language of the Dataset text 34 TRUE TRUE TRUE TRUE FALSE FALSE citation + producer Producer Person or organization with the financial or administrative responsibility over this Dataset none 35 FALSE FALSE TRUE FALSE FALSE FALSE citation + producerName Name Producer name FamilyName, GivenName or Organization text 36 #VALUE TRUE FALSE FALSE TRUE FALSE FALSE producer citation + producerAffiliation Affiliation The organization with which the producer is affiliated. text 37 (#VALUE) FALSE FALSE FALSE FALSE FALSE FALSE producer citation + producerAbbreviation Abbreviation The abbreviation by which the producer is commonly known. (ex. IQSS, ICPSR) text 38 (#VALUE) FALSE FALSE FALSE FALSE FALSE FALSE producer citation + producerURL URL Producer URL points to the producer's web presence, if appropriate. Enter an absolute URL where the producer's web site is found, such as http://www.my.org. Enter full URL, starting with http:// url 39 #VALUE FALSE FALSE FALSE FALSE FALSE FALSE producer citation + producerLogoURL Logo URL URL for the producer's logo, which points to this producer's web-accessible logo image. Enter an absolute URL where the producer's logo image is found, such as http://www.my.org/images/logo.gif. Enter full URL for image, starting with http:// url 40
                            FALSE FALSE FALSE FALSE FALSE FALSE producer citation + productionDate Production Date Date when the data collection or other materials were produced (not distributed, published or archived). YYYY-MM-DD date 41 TRUE FALSE FALSE TRUE FALSE FALSE citation + productionPlace Production Place The location where the data collection and any other related materials were produced. text 42 FALSE FALSE FALSE FALSE FALSE FALSE citation + contributor Contributor The organization or person responsible for either collecting, managing, or otherwise contributing in some form to the development of the resource. none 43 : FALSE FALSE TRUE FALSE FALSE FALSE citation + contributorType Type The type of contributor of the resource. text 44 #VALUE TRUE TRUE FALSE TRUE FALSE FALSE contributor citation + contributorName Name The Family Name, Given Name or organization name of the contributor. FamilyName, GivenName or Organization text 45 #VALUE TRUE FALSE FALSE TRUE FALSE FALSE contributor citation + grantNumber Grant Information Grant Information none 46 : FALSE FALSE TRUE FALSE FALSE FALSE citation + grantNumberAgency Grant Agency Grant Number Agency text 47 #VALUE TRUE FALSE FALSE TRUE FALSE FALSE grantNumber citation + grantNumberValue Grant Number The grant or contract number of the project that sponsored the effort. text 48 #VALUE TRUE FALSE FALSE TRUE FALSE FALSE grantNumber citation + distributor Distributor The organization designated by the author or producer to generate copies of the particular work including any necessary editions or revisions. none 49 FALSE FALSE TRUE FALSE FALSE FALSE citation + distributorName Name Distributor name FamilyName, GivenName or Organization text 50 #VALUE TRUE FALSE FALSE TRUE FALSE FALSE distributor citation + distributorAffiliation Affiliation The organization with which the distributor contact is affiliated. text 51 (#VALUE) FALSE FALSE FALSE FALSE FALSE FALSE distributor citation + distributorAbbreviation Abbreviation The abbreviation by which this distributor is commonly known (e.g., IQSS, ICPSR). text 52 (#VALUE) FALSE FALSE FALSE FALSE FALSE FALSE distributor citation + distributorURL URL Distributor URL points to the distributor's web presence, if appropriate. Enter an absolute URL where the distributor's web site is found, such as http://www.my.org. Enter full URL, starting with http:// url 53 #VALUE FALSE FALSE FALSE FALSE FALSE FALSE distributor citation + distributorLogoURL Logo URL URL of the distributor's logo, which points to this distributor's web-accessible logo image. Enter an absolute URL where the distributor's logo image is found, such as http://www.my.org/images/logo.gif. Enter full URL for image, starting with http:// url 54
                            FALSE FALSE FALSE FALSE FALSE FALSE distributor citation + distributionDate Distribution Date Date that the work was made available for distribution/presentation. YYYY-MM-DD date 55 TRUE FALSE FALSE TRUE FALSE FALSE citation + depositor Depositor The person (Family Name, Given Name) or the name of the organization that deposited this Dataset to the repository. text 56 FALSE FALSE FALSE FALSE FALSE FALSE citation + dateOfDeposit Deposit Date Date that the Dataset was deposited into the repository. YYYY-MM-DD date 57 FALSE FALSE FALSE TRUE FALSE FALSE citation + timePeriodCovered Time Period Covered Time period to which the data refer. This item reflects the time period covered by the data, not the dates of coding or making documents machine-readable or the dates the data were collected. Also known as span. none 58 ; FALSE FALSE TRUE FALSE FALSE FALSE citation + timePeriodCoveredStart Start Start date which reflects the time period covered by the data, not the dates of coding or making documents machine-readable or the dates the data were collected. YYYY-MM-DD date 59 #NAME: #VALUE TRUE FALSE FALSE TRUE FALSE FALSE timePeriodCovered citation + timePeriodCoveredEnd End End date which reflects the time period covered by the data, not the dates of coding or making documents machine-readable or the dates the data were collected. YYYY-MM-DD date 60 #NAME: #VALUE TRUE FALSE FALSE TRUE FALSE FALSE timePeriodCovered citation + dateOfCollection Date of Collection Contains the date(s) when the data were collected. none 61 ; FALSE FALSE TRUE FALSE FALSE FALSE citation + dateOfCollectionStart Start Date when the data collection started. YYYY-MM-DD date 62 #NAME: #VALUE FALSE FALSE FALSE FALSE FALSE FALSE dateOfCollection citation + dateOfCollectionEnd End Date when the data collection ended. YYYY-MM-DD date 63 #NAME: #VALUE FALSE FALSE FALSE FALSE FALSE FALSE dateOfCollection citation + kindOfData Kind of Data Type of data included in the file: survey data, census/enumeration data, aggregate data, clinical data, event/transaction data, program source code, machine-readable text, administrative records data, experimental data, psychological test, textual data, coded textual, coded documents, time budget diaries, observation data/ratings, process-produced data, or other. text 64 TRUE FALSE TRUE TRUE FALSE FALSE citation + series Series Information about the Dataset series. none 65 : FALSE FALSE FALSE FALSE FALSE FALSE citation + seriesName Name Name of the dataset series to which the Dataset belongs. text 66 #VALUE TRUE FALSE FALSE TRUE FALSE FALSE series citation + seriesInformation Information History of the series and summary of those features that apply to the series as a whole. textbox 67 #VALUE FALSE FALSE FALSE FALSE FALSE FALSE series citation + software Software Information about the software used to generate the Dataset. none 68 , FALSE FALSE TRUE FALSE FALSE FALSE citation + softwareName Name Name of software used to generate the Dataset. text 69 #VALUE FALSE TRUE FALSE FALSE FALSE FALSE software citation + softwareVersion Version Version of the software used to generate the Dataset. text 70 #NAME: #VALUE FALSE FALSE FALSE FALSE FALSE FALSE software citation + relatedMaterial Related Material Any material related to this Dataset. textbox 71 FALSE FALSE TRUE FALSE FALSE FALSE citation + relatedDatasets Related Datasets Any Datasets that are related to this Dataset, such as previous research on this subject. textbox 72 FALSE FALSE TRUE FALSE FALSE FALSE citation + otherReferences Other References Any references that would serve as background or supporting material to this Dataset. text 73 FALSE FALSE TRUE FALSE FALSE FALSE citation + dataSources Data Sources List of books, articles, serials, or machine-readable data files that served as the sources of the data collection. textbox 74 FALSE FALSE TRUE FALSE FALSE FALSE citation + originOfSources Origin of Sources For historical materials, information about the origin of the sources and the rules followed in establishing the sources should be specified. textbox 75 FALSE FALSE FALSE FALSE FALSE FALSE citation + characteristicOfSources Characteristic of Sources Noted Assessment of characteristics and source material. textbox 76 FALSE FALSE FALSE FALSE FALSE FALSE citation + accessToSources Documentation and Access to Sources Level of documentation of the original sources. textbox 77 FALSE FALSE FALSE FALSE FALSE FALSE citation +#controlledVocabulary DatasetField Value identifier displayOrder + subject Agricultural Sciences D01 0 + subject Arts and Humanities D0 1 + subject Astronomy and Astrophysics D1 2 + subject Business and Management D2 3 + subject Chemistry D3 4 + subject Computer and Information Science D7 5 + subject Earth and Environmental Sciences D4 6 + subject Engineering D5 7 + subject Law D8 8 + subject Mathematical Sciences D9 9 + subject Medicine, Health and Life Sciences D6 10 + subject Physics D10 11 + subject Social Sciences D11 12 + subject Other D12 13 + publicationIDType ark 0 + publicationIDType arXiv 1 arxiv + publicationIDType bibcode 2 + publicationIDType doi 3 + publicationIDType ean13 4 + publicationIDType eissn 5 + publicationIDType handle 6 + publicationIDType isbn 7 + publicationIDType issn 8 + publicationIDType istc 9 + publicationIDType lissn 10 + publicationIDType lsid 11 + publicationIDType pmid 12 + publicationIDType purl 13 + publicationIDType upc 14 + publicationIDType url 15 + publicationIDType urn 16 + contributorType Data Collector 0 + contributorType Data Curator 1 + contributorType Data Manager 2 + contributorType Editor 3 + contributorType Funder 4 + contributorType Hosting Institution 5 + contributorType Project Leader 6 + contributorType Project Manager 7 + contributorType Project Member 8 + contributorType Related Person 9 + contributorType Researcher 10 + contributorType Research Group 11 + contributorType Rights Holder 12 + contributorType Sponsor 13 + contributorType Supervisor 14 + contributorType Work Package Leader 15 + contributorType Other 16 + authorIdentifierScheme ORCID 0 + authorIdentifierScheme ISNI 1 + authorIdentifierScheme LCNA 2 + language Abkhaz 0 + language Afar 1 + language Afrikaans 2 + language Akan 3 + language Albanian 4 + language Amharic 5 + language Arabic 6 + language Aragonese 7 + language Armenian 8 + language Assamese 9 + language Avaric 10 + language Avestan 11 + language Aymara 12 + language Azerbaijani 13 + language Bambara 14 + language Bashkir 15 + language Basque 16 + language Belarusian 17 + language Bengali, Bangla 18 + language Bihari 19 + language Bislama 20 + language Bosnian 21 + language Breton 22 + language Bulgarian 23 + language Burmese 24 + language Catalan,Valencian 25 + language Chamorro 26 + language Chechen 27 + language Chichewa, Chewa, Nyanja 28 + language Chinese 29 + language Chuvash 30 + language Cornish 31 + language Corsican 32 + language Cree 33 + language Croatian 34 + language Czech 35 + language Danish 36 + language Divehi, Dhivehi, Maldivian 37 + language Dutch 38 + language Dzongkha 39 + language English 40 + language Esperanto 41 + language Estonian 42 + language Ewe 43 + language Faroese 44 + language Fijian 45 + language Finnish 46 + language French 47 + language Fula, Fulah, Pulaar, Pular 48 + language Galician 49 + language Georgian 50 + language German 51 + language Greek (modern) 52 + language Guaraní 53 + language Gujarati 54 + language Haitian, Haitian Creole 55 + language Hausa 56 + language Hebrew (modern) 57 + language Herero 58 + language Hindi 59 + language Hiri Motu 60 + language Hungarian 61 + language Interlingua 62 + language Indonesian 63 + language Interlingue 64 + language Irish 65 + language Igbo 66 + language Inupiaq 67 + language Ido 68 + language Icelandic 69 + language Italian 70 + language Inuktitut 71 + language Japanese 72 + language Javanese 73 + language Kalaallisut, Greenlandic 74 + language Kannada 75 + language Kanuri 76 + language Kashmiri 77 + language Kazakh 78 + language Khmer 79 + language Kikuyu, Gikuyu 80 + language Kinyarwanda 81 + language Kyrgyz 82 + language Komi 83 + language Kongo 84 + language Korean 85 + language Kurdish 86 + language Kwanyama, Kuanyama 87 + language Latin 88 + language Luxembourgish, Letzeburgesch 89 + language Ganda 90 + language Limburgish, Limburgan, Limburger 91 + language Lingala 92 + language Lao 93 + language Lithuanian 94 + language Luba-Katanga 95 + language Latvian 96 + language Manx 97 + language Macedonian 98 + language Malagasy 99 + language Malay 100 + language Malayalam 101 + language Maltese 102 + language Māori 103 + language Marathi (Marāṭhī) 104 + language Marshallese 105 + language Mongolian 106 + language Nauru 107 + language Navajo, Navaho 108 + language Northern Ndebele 109 + language Nepali 110 + language Ndonga 111 + language Norwegian Bokmål 112 + language Norwegian Nynorsk 113 + language Norwegian 114 + language Nuosu 115 + language Southern Ndebele 116 + language Occitan 117 + language Ojibwe, Ojibwa 118 + language Old Church Slavonic,Church Slavonic,Old Bulgarian 119 + language Oromo 120 + language Oriya 121 + language Ossetian, Ossetic 122 + language Panjabi, Punjabi 123 + language Pāli 124 + language Persian (Farsi) 125 + language Polish 126 + language Pashto, Pushto 127 + language Portuguese 128 + language Quechua 129 + language Romansh 130 + language Kirundi 131 + language Romanian 132 + language Russian 133 + language Sanskrit (Saṁskṛta) 134 + language Sardinian 135 + language Sindhi 136 + language Northern Sami 137 + language Samoan 138 + language Sango 139 + language Serbian 140 + language Scottish Gaelic, Gaelic 141 + language Shona 142 + language Sinhala, Sinhalese 143 + language Slovak 144 + language Slovene 145 + language Somali 146 + language Southern Sotho 147 + language Spanish, Castilian 148 + language Sundanese 149 + language Swahili 150 + language Swati 151 + language Swedish 152 + language Tamil 153 + language Telugu 154 + language Tajik 155 + language Thai 156 + language Tigrinya 157 + language Tibetan Standard, Tibetan, Central 158 + language Turkmen 159 + language Tagalog 160 + language Tswana 161 + language Tonga (Tonga Islands) 162 + language Turkish 163 + language Tsonga 164 + language Tatar 165 + language Twi 166 + language Tahitian 167 + language Uyghur, Uighur 168 + language Ukrainian 169 + language Urdu 170 + language Uzbek 171 + language Venda 172 + language Vietnamese 173 + language Volapük 174 + language Walloon 175 + language Welsh 176 + language Wolof 177 + language Western Frisian 178 + language Xhosa 179 + language Yiddish 180 + language Yoruba 181 + language Zhuang, Chuang 182 + language Zulu 183 + language Not applicable 184 diff --git a/dataversedock/testdata/scripts/api/data/metadatablocks/customARCS.tsv b/dataversedock/testdata/scripts/api/data/metadatablocks/customARCS.tsv new file mode 100644 index 0000000..e287349 --- /dev/null +++ b/dataversedock/testdata/scripts/api/data/metadatablocks/customARCS.tsv @@ -0,0 +1,21 @@ +#metadataBlock name dataverseAlias displayName + customARCS Alliance for Research on Corporate Sustainability Metadata +#datasetField name title description watermark fieldType displayOrder displayFormat advancedSearchField allowControlledVocabulary allowmultiples facetable displayoncreate required parent metadatablock_id + ARCS1 1) Were any of these data sets a) purchased, b) obtained through licensed databases, or c) provided by an organization under a nondisclosure or other agreement? Licensed agreement of deposited data. text 0 FALSE TRUE FALSE FALSE FALSE FALSE customARCS + ARCS2 2) If you responded Yes to Q1, have you ensured that sharing the data does not violate terms of the agreement? If you responded No to Q1, please enter N/A here. Data sharing does not violate terms. text 1 FALSE TRUE FALSE FALSE FALSE FALSE customARCS + ARCS3 3) Do any of these data sets include individual-level data (either collected or pre-existing in the dataset) that might make them subject to U.S. or international human subjects considerations? Human subjects consideration. text 2 FALSE TRUE FALSE FALSE FALSE FALSE customARCS + ARCS4 4) If you responded Yes to Q3, are these data sets totally de-identified or was sharing approved by your institutional review board ( IRB)? If you responded No to Q3 please enter N/A here. Deidentified data/sharing approved by IRB. text 3 FALSE TRUE FALSE FALSE FALSE FALSE customARCS + ARCS5 5) Do these datasets contain sensitive or personally identifiable private information? (Harvard Research Data Security Policy {www.security.harvard.edu/research-data-security-policy} may apply because this Dataverse is hosted by Harvard University.) Data contain sensitive/identifiable private information. text 4 FALSE TRUE FALSE FALSE FALSE FALSE customARCS +#controlledVocabulary DatasetField Value identifier displayOrder + ARCS1 No 0 + ARCS1 Yes 1 + ARCS2 NA 0 + ARCS2 No 1 + ARCS2 Yes 2 + ARCS3 No 0 + ARCS3 Yes 1 + ARCS4 NA 0 + ARCS4 No 1 + ARCS4 Yes 2 + ARCS5 No 0 + ARCS5 Yes 1 \ No newline at end of file diff --git a/dataversedock/testdata/scripts/api/data/metadatablocks/customCHIA.tsv b/dataversedock/testdata/scripts/api/data/metadatablocks/customCHIA.tsv new file mode 100644 index 0000000..255981c --- /dev/null +++ b/dataversedock/testdata/scripts/api/data/metadatablocks/customCHIA.tsv @@ -0,0 +1,10 @@ +#metadataBlock name dataverseAlias displayName + customCHIA CHIA Metadata +#datasetField name title description watermark fieldType displayOrder displayFormat advancedSearchField allowControlledVocabulary allowmultiples facetable displayoncreate required parent metadatablock_id + sourceCHIA Source Source - This describes the source of the data. Is it from the Bureau of Labor and Statistics? Is it data from the United Nations? text 0 TRUE FALSE FALSE TRUE FALSE FALSE customCHIA + datesAdditionalInformationCHIA Dates - Additional Information Dates - Additional Information - Note any additional information about dates or time periods in the dataset including intervals (annual, decennial, centennial, etc.) Also note the column(s) in the dataset where dates and other temporal information can be found. text 1 TRUE FALSE FALSE FALSE FALSE FALSE customCHIA + variablesCHIA Variables Variables - Define the variables in this dataset. Please note the column in the dataset where variable information can be found. textbox 2 TRUE FALSE FALSE FALSE FALSE FALSE customCHIA + classificationSchemaCHIA Classification Schema Classification Schema - If there is a classification scheme in this dataset, please describe it. For example, M_20_24 should be read as Males, aged 20-24. textbox 3 TRUE FALSE FALSE TRUE FALSE FALSE customCHIA + provenanceCHIA Provenance Provenance - The provenance of the datasets is the record of ownership and will be used as a guide to the authenticity or quality of the data. For example, the Provenance statement might be, "This dataset was created from data collected by David Ruvolo during a data collection trip to Spain in 1992. Since that time, the data has not been altered other than to migrate it to more current formats." text 4 TRUE FALSE FALSE FALSE FALSE FALSE customCHIA + rightsAvailabilityCHIA Rights/Availability Rights/Availability - Do you have the rights to share this data? text 5 TRUE FALSE FALSE FALSE FALSE FALSE customCHIA +#controlledVocabulary DatasetField Value identifier displayOrder \ No newline at end of file diff --git a/dataversedock/testdata/scripts/api/data/metadatablocks/customDigaai.tsv b/dataversedock/testdata/scripts/api/data/metadatablocks/customDigaai.tsv new file mode 100644 index 0000000..8345d52 --- /dev/null +++ b/dataversedock/testdata/scripts/api/data/metadatablocks/customDigaai.tsv @@ -0,0 +1,47 @@ +#metadataBlock name dataverseAlias displayName + customDigaai Digaai Metadata +#datasetField name title description watermark fieldType displayOrder displayFormat advancedSearchField allowControlledVocabulary allowmultiples facetable displayoncreate required parent metadatablock_id + titulo Título Título do jornal ou revista. text 0 TRUE TRUE TRUE TRUE FALSE FALSE customDigaai + numero Número Número do jornal ou revista. text 1 TRUE FALSE FALSE TRUE FALSE FALSE customDigaai + datadePublicao Data de Publicação Entrar dia/mes/ano. dia/mes/ano text 2 TRUE FALSE FALSE TRUE FALSE FALSE customDigaai + localdePublicao Local de Publicação Local de Publicação. text 3 TRUE FALSE FALSE TRUE FALSE FALSE customDigaai + proprietrio Proprietário Proprietário text 4 TRUE FALSE FALSE TRUE FALSE FALSE customDigaai +#controlledVocabulary DatasetField Value identifier displayOrder + titulo Achei USA 0 + titulo Acontece Magazine 1 + titulo A Notícia 2 + titulo Brasil Best 3 + titulo Brasileiros & Brasileiras 4 + titulo Brasil USA 5 + titulo Brazil Explore 6 + titulo Brazilian Press 7 + titulo Brazilian Voice 8 + titulo Brazil News 9 + titulo Brazuca 10 + titulo Cia Brasil 11 + titulo Comunidade News 12 + titulo Diário do Brasil 13 + titulo FaceBrasil 14 + titulo Green and Yellow News 15 + titulo Jornal dos Sports 16 + titulo Jornal Moderno 17 + titulo Metropolitan 18 + titulo National 19 + titulo Negócio Fechado 20 + titulo Nossa Gente 21 + titulo Nossa Terra 22 + titulo O Brasileirinho 23 + titulo O Imigrante Cristão 24 + titulo O Jornal Brasileiro 25 + titulo O Novo Mundo 26 + titulo O Popular 27 + titulo Revista Linha Aberta 28 + titulo Revista MASSA 29 + titulo Revista Tititi 30 + titulo Sucesso USA 31 + titulo Ta na Mão 32 + titulo TC Brazil 33 + titulo Texas Magazine 34 + titulo The Brazilian Journal 35 + titulo Today Magazine 36 + titulo Viver Magazine 37 \ No newline at end of file diff --git a/dataversedock/testdata/scripts/api/data/metadatablocks/customGSD.tsv b/dataversedock/testdata/scripts/api/data/metadatablocks/customGSD.tsv new file mode 100644 index 0000000..d15a4e8 --- /dev/null +++ b/dataversedock/testdata/scripts/api/data/metadatablocks/customGSD.tsv @@ -0,0 +1,528 @@ +#metadataBlock name dataverseAlias displayName + customGSD Graduate School of Design Metadata +#datasetField name title description watermark fieldType displayOrder displayFormat advancedSearchField allowControlledVocabulary allowmultiples facetable displayoncreate required parent metadatablock_id + gsdStudentName Student Name Full name of the student: Last Name, First Name (example: Smith, Jane). Use the name that the GSD Administrator has on file. LastName, FirstName text 0 TRUE FALSE TRUE FALSE FALSE FALSE customGSD + gsdStudentProgram Student's Program of Study Student's program of study. text 1 TRUE TRUE TRUE TRUE FALSE FALSE customGSD + gsdCourseName Course Name Name of the course. text 2 TRUE TRUE FALSE TRUE FALSE FALSE customGSD + gsdFacultyName Faculty Name Name of the studio instructor. text 3 TRUE TRUE TRUE TRUE FALSE FALSE customGSD + gsdCoordinator Core Studio Coordinator Name of the studio coordinator(s). text 4 FALSE TRUE TRUE FALSE FALSE FALSE customGSD + gsdSemester Semester / Year Select the semester / year. text 5 TRUE TRUE FALSE TRUE FALSE FALSE customGSD + gsdRecommendation Faculty Recommendation Indicate the recommendation(s) from the faculty for this project. text 6 TRUE TRUE TRUE TRUE FALSE FALSE customGSD + gsdAccreditation Accreditation Selection made by faculty. text 7 TRUE TRUE FALSE TRUE FALSE FALSE customGSD + gsdSiteType Site Type Describe the type of building or site, based on function / purpose. Example: Military base. text 8 TRUE FALSE TRUE TRUE FALSE FALSE customGSD + gsdProgramBrief Program / Brief Example: redevelopment, restoration. textbox 9 TRUE FALSE TRUE TRUE FALSE FALSE customGSD + gsdTypes Types of Representation/ Medium/ Format Choose from the list. text 10 FALSE TRUE TRUE TRUE FALSE FALSE customGSD + gsdPrizes Prizes Choose from the list. text 11 TRUE TRUE FALSE TRUE FALSE FALSE customGSD + gsdTags GSD Tags Use tags to describe the project. Write one keyword per field. To add more tags, click on the plus sign on the right. text 12 TRUE FALSE TRUE TRUE FALSE FALSE customGSD +#controlledVocabulary DatasetField Value identifier displayOrder + gsdFacultyName Abalos, Inaki Abalos_Inaki 0 + gsdFacultyName Adjaye, David Adjaye_David 1 + gsdFacultyName Adofo-Wilson, Baye Adofo-Wilson_Baye 2 + gsdFacultyName Agre, Claire Agre_Claire 3 + gsdFacultyName Altringer, Beth Altringer_Beth 4 + gsdFacultyName Apfelbaum, Steven Apfelbaum_Steven 5 + gsdFacultyName Aquino, Gerdo Aquino_Gerdo 6 + gsdFacultyName Asensio Villoria, Leire Asensio_Villoria_Leire 7 + gsdFacultyName Baines, Bridget Baines_Bridget 8 + gsdFacultyName Bandy, Vincent Bandy_Vincent 9 + gsdFacultyName Barkan, Katy Barkan_Katy 10 + gsdFacultyName Barkow, Frank Barkow_Frank 11 + gsdFacultyName Beard, Peter Beard_Peter 12 + gsdFacultyName Belanger, Pierre Belanger_Pierre 13 + gsdFacultyName Benedito, Silvia Benedito_Silvia 14 + gsdFacultyName Berrizbeitia, Ann Berrizbeitia_Ann 15 + gsdFacultyName Bewtra, Manisha Bewtra_Manisha 16 + gsdFacultyName Blau, Eve Blau_Eve 17 + gsdFacultyName Bozdogan, Sibel Bozdogan_Sibel 18 + gsdFacultyName Brandlhuber, Arno Brandlhuber_Arno 19 + gsdFacultyName Brenner, Neil Brenner_Neil 20 + gsdFacultyName Buchard, Jeffry Buchard_Jeffry 21 + gsdFacultyName Buckler, Julie Buckler_Julie 22 + gsdFacultyName Burchard, Jeffry Burchard_Jeffry 23 + gsdFacultyName Busquets, Joan Busquets_Joan 24 + gsdFacultyName Callejas Mujica, Luis Rodrigo Callejas_Mujica_Luis_Rodrigo 25 + gsdFacultyName Calvillo, Nerea Calvillo_Nerea 26 + gsdFacultyName Cantrell, Bradley Cantrell_Bradley 27 + gsdFacultyName Carras, James Carras_James 28 + gsdFacultyName Castillo, Jose Castillo_Jose 29 + gsdFacultyName Cephas, Jana Cephas_Jana 30 + gsdFacultyName Cheng, Christine Cheng_Christine 31 + gsdFacultyName Cohen, Preston Scott Cohen_Preston_Scott 32 + gsdFacultyName Coignet, Philippe Coignet_Philippe 33 + gsdFacultyName Cook, Peter Cook_Peter 34 + gsdFacultyName Corneil, Janne Corneil_Janne 35 + gsdFacultyName Correa, Felipe Correa_Felipe 36 + gsdFacultyName Craig, Salmaan Craig_Salmaan 37 + gsdFacultyName Curtis, Lawrence Curtis_Lawrence 38 + gsdFacultyName Daoust, Renee Daoust_Renee 39 + gsdFacultyName Davis, Diane Davis_Diane 40 + gsdFacultyName de Broche des Combes, Eric de_Broche_des_Combes_Eric 41 + gsdFacultyName de Castro Mazarro, Alejandro de_Castro_Mazarro_Alejandro 42 + gsdFacultyName de Meuron, Pierre de_Meuron_Pierre 43 + gsdFacultyName Del Tredici, Peter Del_Tredici_Peter 44 + gsdFacultyName Desimini, Jill Desimini_Jill 45 + gsdFacultyName Desvigne, Michel Desvigne_Michel 46 + gsdFacultyName D'Oca, Daniel D_Oca_Daniel 47 + gsdFacultyName Doherty, Gareth Doherty_Gareth 48 + gsdFacultyName Doran, Kelly Doran_Kelly 49 + gsdFacultyName Duempelmann, Sonja Duempelmann_Sonja 50 + gsdFacultyName Echeverria, Inaki Echeverria_Inaki 51 + gsdFacultyName Eigen, Ed Eigen_Ed 52 + gsdFacultyName Elkin, Rosetta Elkin_Rosetta 53 + gsdFacultyName Ellis, Erle Ellis_Erle 54 + gsdFacultyName Etzler, Danielle Etzler_Danielle 55 + gsdFacultyName Evans, Teman Evans_Teman 56 + gsdFacultyName Flores Dewey, Onesimo Flores_Dewey_Onesimo 57 + gsdFacultyName Forsyth, Ann Forsyth_Ann 58 + gsdFacultyName Frederickson, Kristin Frederickson_Kristin 59 + gsdFacultyName Gamble, David Gamble_David 60 + gsdFacultyName Garcia Grinda, Efren Garcia_Grinda_Efren 61 + gsdFacultyName Garciavelez Alfaro, Carlos Garciavelez_Alfaro_Carlos 62 + gsdFacultyName Geers, Kersten Geers_Kersten 63 + gsdFacultyName Gelabert-Sanchez, Ana Gelabert-Sanchez_Ana 64 + gsdFacultyName Georgoulias, Andreas Georgoulias_Andreas 65 + gsdFacultyName Geuze, Adriaan Geuze_Adriaan 66 + gsdFacultyName Gillies-Smith, Shauna Gillies-Smith_Shauna 67 + gsdFacultyName Ham, Derek Ham_Derek 68 + gsdFacultyName Hansch, Inessa Hansch_Inessa 69 + gsdFacultyName Hansen, Andrea Hansen_Andrea 70 + gsdFacultyName Harabasz, Ewa Harabasz_Ewa 71 + gsdFacultyName Hays, K. Michael Hays_K._Michael 72 + gsdFacultyName Herzog, Jacques Herzog_Jacques 73 + gsdFacultyName Hilderbrand, Gary Hilderbrand_Gary 74 + gsdFacultyName Hoberman, Chuck Hoberman_Chuck 75 + gsdFacultyName Hong, Zaneta Hong_Zaneta 76 + gsdFacultyName Hooftman, Eelco Hooftman_Eelco 77 + gsdFacultyName Hooper, Michael Hooper_Michael 78 + gsdFacultyName Howeler, Eric Howeler_Eric 79 + gsdFacultyName Hoxie, Christopher Hoxie_Christopher 80 + gsdFacultyName Hung, Ying-Yu Hung_Ying-Yu 81 + gsdFacultyName Hunt, John Hunt_John 82 + gsdFacultyName Hutton, Jane Hutton_Jane 83 + gsdFacultyName Hyde, Timothy Hyde_Timothy 84 + gsdFacultyName Ibanez, Mariana Ibanez_Mariana 85 + gsdFacultyName Idenburg, Florian Idenburg_Florian 86 + gsdFacultyName Johnston, Sharon Johnston_Sharon 87 + gsdFacultyName Kayden, Jerold Kayden_Jerold 88 + gsdFacultyName Khamsi, James Khamsi_James 89 + gsdFacultyName Kiefer, Matthew Kiefer_Matthew 90 + gsdFacultyName Kirkwood, Niall Kirkwood_Niall 91 + gsdFacultyName Koolhaas, Remment Koolhaas_Remment 92 + gsdFacultyName Krieger, Alex Krieger_Alex 93 + gsdFacultyName Kuo, Max Kuo_Max 94 + gsdFacultyName La, Grace La_Grace 95 + gsdFacultyName Lacaton, Anne Lacaton_Anne 96 + gsdFacultyName Laszlo Tait, Rachel Laszlo_Tait_Rachel 97 + gsdFacultyName Leach, Neil Leach_Neil 98 + gsdFacultyName Lee, Chris Lee_Chris 99 + gsdFacultyName Lee, Christopher Lee_Christopher 100 + gsdFacultyName Lee, Mark Lee_Mark 101 + gsdFacultyName Legendre, George L. Legendre_George_L. 102 + gsdFacultyName Lehrer, Mia Lehrer_Mia 103 + gsdFacultyName Liaropoulos-Legendre, George Liaropoulos-Legendre_George 104 + gsdFacultyName Long, Judith Long_Judith 105 + gsdFacultyName Lopez-Pineiro, Sergio Lopez-Pineiro_Sergio 106 + gsdFacultyName Lott, Jonathan Lott_Jonathan 107 + gsdFacultyName Madden, Kathryn Madden_Kathryn 108 + gsdFacultyName Mah, David Mah_David 109 + gsdFacultyName Malkawi, Ali Malkawi_Ali 110 + gsdFacultyName Maltzan, Michael Maltzan_Michael 111 + gsdFacultyName Manfredi, Michael Manfredi_Michael 112 + gsdFacultyName Marchant, Edward Marchant_Edward 113 + gsdFacultyName Mateo, Josep Lluis Mateo_Josep_Lluis 114 + gsdFacultyName McCafferty, Patrick McCafferty_Patrick 115 + gsdFacultyName McIntosh, Alistair McIntosh_Alistair 116 + gsdFacultyName MCloskey, Karen MCloskey_Karen 117 + gsdFacultyName Mehrotra, Rahul Mehrotra_Rahul 118 + gsdFacultyName Menchaca, Alejandra Menchaca_Alejandra 119 + gsdFacultyName Menges, Achim Menges_Achim 120 + gsdFacultyName Menges, Achim Menges_Achim 121 + gsdFacultyName Michalatos, Panagiotis Michalatos_Panagiotis 122 + gsdFacultyName Moe, Kiel Moe_Kiel 123 + gsdFacultyName Molinsky, Jennifer Molinsky_Jennifer 124 + gsdFacultyName Moreno, Cristina Diaz Moreno_Cristina_Diaz 125 + gsdFacultyName Mori, Toshiko Mori_Toshiko 126 + gsdFacultyName Moussavi, Farshid Moussavi_Farshid 127 + gsdFacultyName Mulligan, Mark Mulligan_Mark 128 + gsdFacultyName Muro, Carles Muro_Carles 129 + gsdFacultyName Naginski, Erika Naginski_Erika 130 + gsdFacultyName Najle, Ciro Najle_Ciro 131 + gsdFacultyName Nakazawa, Paul Nakazawa_Paul 132 + gsdFacultyName Navarro Rios, Victor Navarro_Rios_Victor 133 + gsdFacultyName Nichols, Albert Nichols_Albert 134 + gsdFacultyName O'Carroll, Aisling O_Carroll_Aisling 135 + gsdFacultyName O'Donnell, Sheila O_Donnell_Sheila 136 + gsdFacultyName Oman, Rok Oman_Rok 137 + gsdFacultyName O'Neill-Uzgiris, Kelly Ann O_Neill-Uzgiris_Kelly_Ann 138 + gsdFacultyName Oppenheim, Chad Oppenheim_Chad 139 + gsdFacultyName Other Other 140 + gsdFacultyName Ozay, Erkin Ozay_Erkin 141 + gsdFacultyName Panzano, Megan Panzano_Megan 142 + gsdFacultyName Park, Peter Park_Peter 143 + gsdFacultyName Parsons, Katharine Parsons_Katharine 144 + gsdFacultyName Peiser, Richard Peiser_Richard 145 + gsdFacultyName Petcu, Constantin Petcu_Constantin 146 + gsdFacultyName Petrescu, Doina Petrescu_Doina 147 + gsdFacultyName Pietrusko, Robert Pietrusko_Robert 148 + gsdFacultyName Rahm, Philippe Rahm_Philippe 149 + gsdFacultyName Raspall Galli, Carlos Felix Raspall_Galli_Carlos_Felix 150 + gsdFacultyName Reed, Chris Reed_Chris 151 + gsdFacultyName Rein-Cano, Martin Rein-Cano_Martin 152 + gsdFacultyName Restrepo Ochoa, Camilo Restrepo_Ochoa_Camilo 153 + gsdFacultyName Rich, Damon Rich_Damon 154 + gsdFacultyName Rocker, Ingeborg Rocker_Ingeborg 155 + gsdFacultyName Rojo, Marcos Rojo_Marcos 156 + gsdFacultyName Rosenthal, Joyce Klein Rosenthal_Joyce_Klein 157 + gsdFacultyName Rowe, Peter Rowe_Peter 158 + gsdFacultyName Ryan, Thomas Ryan_Thomas 159 + gsdFacultyName Samuelson, Holly Samuelson_Holly 160 + gsdFacultyName Sarkis, A. Hashim Sarkis_A._Hashim 161 + gsdFacultyName Schumacher, Patrik Schumacher_Patrik 162 + gsdFacultyName Schwartz, Martha Schwartz_Martha 163 + gsdFacultyName Scogin, Buford Scogin_Buford 164 + gsdFacultyName Scogin, Mack Scogin_Mack 165 + gsdFacultyName Sennett, Richard Sennett_Richard 166 + gsdFacultyName Sentkiewicz, Renata Sentkiewicz_Renata 167 + gsdFacultyName Shigematsu, Shohei Shigematsu_Shohei 168 + gsdFacultyName Silman, Robert Silman_Robert 169 + gsdFacultyName Silver, Mitchell Silver_Mitchell 170 + gsdFacultyName Silvetti, Jorge Silvetti_Jorge 171 + gsdFacultyName Smith, Christine Smith_Christine 172 + gsdFacultyName Snyder, Susan Snyder_Susan 173 + gsdFacultyName Solano, Laura Solano_Laura 174 + gsdFacultyName Sorkin, Michael Sorkin_Michael 175 + gsdFacultyName Spiegelman, Kathy Spiegelman_Kathy 176 + gsdFacultyName Stilgoe, John Stilgoe_John 177 + gsdFacultyName Stockard, James Stockard_James 178 + gsdFacultyName Tato, Belinda Tato_Belinda 179 + gsdFacultyName Thomas, George Thomas_George 180 + gsdFacultyName Thompson, Maryann Thompson_Maryann 181 + gsdFacultyName Torto, Raymond Torto_Raymond 182 + gsdFacultyName Tuomey, John Tuomey_John 183 + gsdFacultyName Urbanski, Matthew Urbanski_Matthew 184 + gsdFacultyName Valenzuela, Luis Valenzuela_Luis 185 + gsdFacultyName Vallejo, Jose Luis Vallejo_Jose_Luis 186 + gsdFacultyName Van Valkenburgh, Michael Van_Valkenburgh_Michael 187 + gsdFacultyName VanDerSys, Keith VanDerSys_Keith 188 + gsdFacultyName Vecitis, Chad Vecitis_Chad 189 + gsdFacultyName Videcnik, Spela Videcnik_Spela 190 + gsdFacultyName Waldheim, Charles Waldheim_Charles 191 + gsdFacultyName Wang, Bing Wang_Bing 192 + gsdFacultyName Weitz, David Weitz_David 193 + gsdFacultyName Wendel, Delia Wendel_Delia 194 + gsdFacultyName Whittaker, Elizabeth Whittaker_Elizabeth 195 + gsdFacultyName Wickersham, Jay Wickersham_Jay 196 + gsdFacultyName Witt, Andrew Witt_Andrew 197 + gsdFacultyName Wodiczko, Krzysztof Wodiczko_Krzysztof 198 + gsdFacultyName Wood, Robert Wood_Robert 199 + gsdFacultyName Wu, Cameron Wu_Cameron 200 + gsdFacultyName Zickler, Todd Zickler_Todd 201 + gsdCoordinator Abalos, Inaki Abalos_Inaki 0 + gsdCoordinator Belanger, Pierre Belanger_Pierre 1 + gsdCoordinator Correa, Felipe Correa_Felipe 2 + gsdCoordinator Desimini, Jill Desimini_Jill 3 + gsdCoordinator Forsyth, Ann Forsyth_Ann 4 + gsdCoordinator Etzler, Danielle Etzler_Danielle 5 + gsdCoordinator Gelabert-Sanchez, Ana Gelabert-Sanchez_Ana 6 + gsdCoordinator Hilderbrand, Gary Hilderbrand_Gary 7 + gsdCoordinator Howeler, Eric Howeler_Eric 8 + gsdCoordinator Howler, Eric Howler_Eric 9 + gsdCoordinator Hutton, Jane Hutton_Jane 10 + gsdCoordinator Ibanez, Mariana Ibanez_Mariana 11 + gsdCoordinator Idenburg, Florian Idenburg_Florian 12 + gsdCoordinator La, Grace La_Grace 13 + gsdCoordinator Long, Judith Grant Long_Judith_Grant 14 + gsdCoordinator Moe, Kiel Moe_Kiel 15 + gsdCoordinator Muro, Carles Muro_Carles 16 + gsdCoordinator Wu, Cameron Wu_Cameron 17 + gsdCoordinator Other Other 18 + gsdStudentProgram DDes DDes 0 + gsdStudentProgram MArch I MArch_I 1 + gsdStudentProgram MArch II MArch_II 2 + gsdStudentProgram MAUD or MLAUD MAUD_or_MLAUD 3 + gsdStudentProgram MDes MDes 4 + gsdStudentProgram MLA I MLA_I 5 + gsdStudentProgram MLA I AP MLA_I_AP 6 + gsdStudentProgram MLA II MLA_II 7 + gsdStudentProgram MUD MUD 8 + gsdStudentProgram MUP MUP 9 + gsdStudentProgram MUP/MArch MUP_MArch 10 + gsdStudentProgram MUP/MAUD or MLAUD MUP_MAUD_or_MLAUD 11 + gsdStudentProgram MUP/MDes MUP_MDes 12 + gsdStudentProgram MUP/MLA MUP_MLA 13 + gsdStudentProgram Other Other 14 + gsdStudentProgram PhD PhD 15 + gsdSemester Fall 2013 Fall_2013 0 + gsdSemester Spring 2014 Spring_2014 1 + gsdSemester Fall 2014 Fall_2014 2 + gsdSemester Spring 2015 Spring_2015 3 + gsdSemester Fall 2015 Fall_2015 4 + gsdSemester Spring 2016 Spring_2016 5 + gsdSemester Fall 2016 Fall_2016 6 + gsdRecommendation Accreditation Accreditation 0 + gsdRecommendation Open House Open_House 1 + gsdRecommendation Platform Platform 2 + gsdRecommendation Website Website 3 + gsdTypes Animations Animations 0 + gsdTypes Axonometric drawings Axonometric_drawings 1 + gsdTypes Axonometric projections Axonometric_projections 2 + gsdTypes Diagrams Diagrams 3 + gsdTypes Drawings Drawings 4 + gsdTypes Elevations (drawings) Elevations_drawings 5 + gsdTypes Floor plans Floor_plans 6 + gsdTypes Isometric drawings Isometric_drawings 7 + gsdTypes Isometric projections Isometric_projections 8 + gsdTypes Maps Maps 9 + gsdTypes Master plans Master_plans 10 + gsdTypes Models (representations) Models_representations 11 + gsdTypes Other Other 12 + gsdTypes Perspective drawings Perspective_drawings 13 + gsdTypes Photographs Photographs 14 + gsdTypes Plans (drawings) Plans_drawings 15 + gsdTypes Plans (maps) Plans_maps 16 + gsdTypes Renderings Renderings 17 + gsdTypes Sectional elevations Sectional_elevations 18 + gsdTypes Sectional perspectives Sectional_perspectives 19 + gsdTypes Sections Sections 20 + gsdTypes Sections (orthographic projections) Sections_orthographic_projections 21 + gsdTypes Site plans Site_plans 22 + gsdTypes Sketches Sketches 23 + gsdTypes Videos Videos 24 + gsdPrizes Araldo Cossutta Annual Prize for Design Excellence Araldo_Cossutta_Annual_Prize_for_Design_Excellence 0 + gsdPrizes Award for Academic Excellence in Urban Design Award_for_Academic_Excellence_in_Urban_Design 1 + gsdPrizes Award for Academic Excellence in Urban Planning Award_for_Academic_Excellence_in_Urban_Planning 2 + gsdPrizes Award for Outstanding Leadership in Urban Design Award_for_Outstanding_Leadership_in_Urban_Design 3 + gsdPrizes Award for Outstanding Leadership in Urban Planning Award_for_Outstanding_Leadership_in_Urban_Planning 4 + gsdPrizes Charles Eliot Traveling Fellowship in Landscape Architecture Charles_Eliot_Traveling_Fellowship_in_Landscape_Architecture 5 + gsdPrizes Clifford Wong Prize in Housing Design Clifford_Wong_Prize_in_Housing_Design 6 + gsdPrizes Digital Design Prize Digital_Design_Prize 7 + gsdPrizes Dimitris Pikionis Award Dimitris_Pikionis_Award 8 + gsdPrizes Druker Traveling Fellowship Druker_Traveling_Fellowship 9 + gsdPrizes Ferdinand Colloredo-Mansfeld Prize for Superior Achievement in Real Estate Studies Ferdinand_Colloredo-Mansfeld_Prize_for_Superior_Achievement_in_Real_Estate_Studies 10 + gsdPrizes Frederick Sheldon Traveling Fellowship Frederick_Sheldon_Traveling_Fellowship 11 + gsdPrizes Howard T. Fisher Prize for Excellence in Geographic Information Science Howard_T_Fisher_Prize_for_Excellence_in_Geographic_Information_Science 12 + gsdPrizes Jacob Weidenmann Prize Jacob_Weidenmann_Prize 13 + gsdPrizes Julia Amory Appleton Traveling Fellowship in Architecture Julia_Amory_Appleton_Traveling_Fellowship_in_Architecture 14 + gsdPrizes Kevin V. Kieran Prize (Kevin Kieran Memorial Scholarship) Kevin_V_Kieran_Prize_(Kevin_Kieran_Memorial_Scholarship) 15 + gsdPrizes Norman T. Newton Prize Norman_T_Newton_Prize 16 + gsdPrizes Peter Rice Prize for Innovation in Architecture and Structural Design Peter_Rice_Prize_for_Innovation_in_Architecture_and_Structural_Design 17 + gsdPrizes Peter Walker & Partners Fellowship for Landscape Architecture Peter_Walker_&_Partners_Fellowship_for_Landscape_Architecture 18 + gsdPrizes Sinclair Kennedy Traveling Fellowship Sinclair_Kennedy_Traveling_Fellowship 19 + gsdPrizes The Daniel L. Schodek Award for Technology and Sustainability The_Daniel_L_Schodek_Award_for_Technology_and_Sustainability 20 + gsdAccreditation High High 0 + gsdAccreditation Medium Medium 1 + gsdAccreditation Low Low 2 + gsdCourseName 01101: First Semester Core: PROJECT 01101 0 + gsdCourseName 01102: Second Semester Core: SITUATE 01102 1 + gsdCourseName 01111: LA I: First Semester Core Studio 01111 2 + gsdCourseName 01112: Landscape Architecture II 01112 3 + gsdCourseName 01121: First Semester Core Urban Planning Studio 01121 4 + gsdCourseName 01122: Second Semester Core Urban Planning Studio 01122 5 + gsdCourseName 01201: Third Semester Core: INTEGRATE 01201 6 + gsdCourseName 01202: Fourth Semester Core: RELATE 01202 7 + gsdCourseName 01211: LA III: Third Semester Core Studio 01211 8 + gsdCourseName 01212: Landscape Architecture IV 01212 9 + gsdCourseName 01221: Elements of Urban Design 01221 10 + gsdCourseName 01301: Kyoto Studio II: Seasons and Architecture 01301 11 + gsdCourseName 01301: The Function of Time 01301 12 + gsdCourseName 01302: Architecture Club London 01302 13 + gsdCourseName 01302: Unfinished Work III 01302 14 + gsdCourseName 01303: Alimentary Design 01303 15 + gsdCourseName 01303: Workplan 01303 16 + gsdCourseName 01304: Alimentary Design 01304 17 + gsdCourseName 01304: Socio-Environmental Responsive Design 01304 18 + gsdCourseName 01305: Built Climates 01305 19 + gsdCourseName 01305: Parametric Semiology - High Performance Architecture for Apple, Google and Facebook 01305 20 + gsdCourseName 01306: 21st Cent. Arch.of Africa and the Diaspora 01306 21 + gsdCourseName 01306: Material Performance - Fibrous Tectonics 01306 22 + gsdCourseName 01307: La Strada Novissima 01307 23 + gsdCourseName 01307: Material Performance 01307 24 + gsdCourseName 01308: City of Artificial Extrusions 01308 25 + gsdCourseName 01308: Green Card Conversations 01308 26 + gsdCourseName 01309: Studio Alaska 01309 27 + gsdCourseName 01309: Theatre and the City 01309 28 + gsdCourseName 01310: Architecture of Cultural Prosthetics 01310 29 + gsdCourseName 01310: Rotterdam Study Abroad Studio Option: Elements of Architecture 01310 30 + gsdCourseName 01311: Apres Ski: Eco Village Les Diablerets 01311 31 + gsdCourseName 01311: The Forms of Transition 01311 32 + gsdCourseName 01312: "You Can't Die in Disney World" A ZOO 01312 33 + gsdCourseName 01312: Basel Study Abroad Studio Option 01312 34 + gsdCourseName 01313: Indebted Architecture 01313 35 + gsdCourseName 01314: IN THE LAND OF NANDUTi: following the lines, threads, and figures of the river 01314 36 + gsdCourseName 01315: Real and Imaginary Variables (Final): Global Arenas 01315 37 + gsdCourseName 01316: High-rise / High-density 01316 38 + gsdCourseName 01317: Another nature 01317 39 + gsdCourseName 01318: Borrominations, or the Auratic Dome 01318 40 + gsdCourseName 01319: Thermodynamic Materialism Applied to Dense Urban Conglomerates, Two Chinese Case Studies 01319 41 + gsdCourseName 01401: A New [Landscape] Infrastructure for Los Angeles 01401 42 + gsdCourseName 01401: Liminal Space 01401 43 + gsdCourseName 01402: Parallel Motion: Walden Pond, Concord / Central Park, New York 01402 44 + gsdCourseName 01402: Parallel Motion: Walden Pond, Concord/ Central Park , NY 01402 45 + gsdCourseName 01402: The Endless Landscape - River Hudson 01402 46 + gsdCourseName 01403: After La Villette 01403 47 + gsdCourseName 01403: After La Vilette (Paris) 01403 48 + gsdCourseName 01403: LIFE-STYLED - CHINA-TOWN 01403 49 + gsdCourseName 01404: California Limnolarium 01404 50 + gsdCourseName 01404: California Limnolarium (experiments in projective processes) 01404 51 + gsdCourseName 01404: Post-suburb - Nashua NH 01404 52 + gsdCourseName 01405: Airport Park Zurich 01405 53 + gsdCourseName 01405: Envisioning Miami: Simulated Natures 01405 54 + gsdCourseName 01406: The Ocean State 01406 55 + gsdCourseName 01407: From the City to the Object: Terre des Hommes 2017 01407 56 + gsdCourseName 01408: Caen Island: Public Space 01408 57 + gsdCourseName 01409: Negative Planning in Nanshahe, Haidian District, Beijing 01409 58 + gsdCourseName 01501: Haters Make Me Famous: The Newark Riverfront and the Post-Great Migration City 01501 59 + gsdCourseName 01501: RURBAN 01501 60 + gsdCourseName 01502: Networked Urbanism: Urban Waste - Urban Design 01502 61 + gsdCourseName 01502: The Storm, the Strife, and Everyday Life 01502 62 + gsdCourseName 01503: Planning and Development on the East Boston Waterfront 01503 63 + gsdCourseName 01503: The Countryside as a City 01503 64 + gsdCourseName 01504: Retrofitting the (post?) Industrial Metropolis 01504 65 + gsdCourseName 01505: Medellin: Urban Porosity as Social Infrastructure 01505 66 + gsdCourseName 01506: Obsolescence and Pathways to Redevelopment: 01506 67 + gsdCourseName 01507: Design and Politics - Managing Risks and Vulnerabilities 01507 68 + gsdCourseName 01601: Macau: Cross-border Cities 01601 69 + gsdCourseName 01602: Territorialism II 01602 70 + gsdCourseName 01603: Meydan: Designing the Surfaces of Public Space around Beyazit Square, Istanbul 01603 71 + gsdCourseName 01606: Los Angeles Study Abroad Studio: The Possibilities of the Wrong Scale 01606 72 + gsdCourseName 02121: Visual Studies 02121 73 + gsdCourseName 02122: Projective Representation in Architecture 02122 74 + gsdCourseName 02129: Spatial Analysis and Representation 02129 75 + gsdCourseName 02141: Landscape Representation I 02141 76 + gsdCourseName 02142: Landscape Representation I 02142 77 + gsdCourseName 02223: Digital Media I 02223 78 + gsdCourseName 02224: Digital Media II 02224 79 + gsdCourseName 02241: Landscape Representation II 02241 80 + gsdCourseName 02241: Landscape Representation III 02241 81 + gsdCourseName 02322: Digital Media for Design 02322 82 + gsdCourseName 02341: Communication for Designer 02341 83 + gsdCourseName 02415: Paper or Plastic 02415 84 + gsdCourseName 02444: Landscape Material Design Practice and Digital Media 02444 85 + gsdCourseName 02446: Drawing for Designers 02446 86 + gsdCourseName 02448: Landscape as Painting 02448 87 + gsdCourseName 02449: Immersive Landscape 02449 88 + gsdCourseName 02449: Landscape as Video Game 02449 89 + gsdCourseName 02450: Landscape as Weather/Atmosphere 02450 90 + gsdCourseName 02482: Art, Design and the Public Domain 02482 91 + gsdCourseName 02602: Basel Study Abroad Seminar 02602 92 + gsdCourseName 03241: Theories of Landscape as Urbanism 03241 93 + gsdCourseName 03241: Theories of Landscape as Urbanism, Landscape as Infrastructure 03241 94 + gsdCourseName 03242: Theories of Landscape Architecture 03242 95 + gsdCourseName 03330: Conservation of Older Buildings 03330 96 + gsdCourseName 03333: Culture, Conservation and Design 03333 97 + gsdCourseName 03338: carbonurbanism 03338 98 + gsdCourseName 03345: Emergence in Landscape Architecture 03345 99 + gsdCourseName 03375: Planning for Conservation: 03375 100 + gsdCourseName 03453: Light Structure I 03453 101 + gsdCourseName 03494: Design for Learning 03494 102 + gsdCourseName 03499: The Aperture Analyzed 03499 103 + gsdCourseName 03602: Study Abroad Seminar: Islands 03602 104 + gsdCourseName 03603: The Hitchhikers Guide to Hyperreality 03603 105 + gsdCourseName 04105: Studies of the Built North American Environment 04105 106 + gsdCourseName 04105: Studies of the Built North American Environment 1580 - Present 04105 107 + gsdCourseName 04115: History and Theory of Urban Interventions 04115 108 + gsdCourseName 04121: Buildings, Texts, and Contexts I 04121 109 + gsdCourseName 04141: Histories of Landscape Architecture 04141 110 + gsdCourseName 04142: Histories of Landscape Architecture II 04142 111 + gsdCourseName 04223: Buildings, Texts, and Contexts III 04223 112 + gsdCourseName 04303: Modernization in the Visual U.S. Environment 04303 113 + gsdCourseName 04304: North American Seacoasts + Landscapes Discovery Period to the Present 04304 114 + gsdCourseName 04304: North American Seacoasts and Landscape 04304 115 + gsdCourseName 04305: Adventure + Fantasy Simulation 1871-2036 04305 116 + gsdCourseName 04329: Urbanization in the East Asian Region 04329 117 + gsdCourseName 04358: Authority and Invention: Medieval Art and Architecture 04358 118 + gsdCourseName 04362: Structuring Urban Experience 04362 119 + gsdCourseName 04363: Walking 04363 120 + gsdCourseName 04405: Istanbul 04405 121 + gsdCourseName 04408: Situating the Modern 04408 122 + gsdCourseName 04439: "In the Manner of a Picture" 04439 123 + gsdCourseName 04444: Historical Ground 04444 124 + gsdCourseName 04445: Envisioning Landscape: Cultures of Vision in the Air and on the Ground 04445 125 + gsdCourseName 04446: A History of Nature Conservation and Cultural Landscape Preservation 04446 126 + gsdCourseName 04447: Forest, Grove, Tree 04447 127 + gsdCourseName 04477: Slums in Architectural History 04477 128 + gsdCourseName 05204: Real Estate Finance and Development 05204 129 + gsdCourseName 05206: Land Use and Environmental Law 05206 130 + gsdCourseName 05210: Cities by Design I 05210 131 + gsdCourseName 05212: Field Studies in Real Estate, Planning, and Urban Design 05212 132 + gsdCourseName 05213: Policy Making in Urban Settings 05213 133 + gsdCourseName 05222: Markets and Market Failures with Cases 05222 134 + gsdCourseName 05304: Transportation Planning and Development 05304 135 + gsdCourseName 05326: Housing and Urbanization in the United States 05326 136 + gsdCourseName 05330: Healthy Places 05330 137 + gsdCourseName 05338: Planning for the 21st Century 05338 138 + gsdCourseName 05342: Creating Resilient Cities 05342 139 + gsdCourseName 05343: Critical and Social Cartography 05343 140 + gsdCourseName 05360: Territorial Intelligence 05360 141 + gsdCourseName 05433: Modern Housing and Urban Districts 05433 142 + gsdCourseName 05492: Real Estate Finance and Development Fundamentals 05492 143 + gsdCourseName 05495: Market Analysis and Urban Economics 05495 144 + gsdCourseName 05502: Urban Governance and the Politics of Planning in the Developing World 05502 145 + gsdCourseName 06121 Construction Lab 06121 146 + gsdCourseName 06122 Energy in Architecture 06122 147 + gsdCourseName 06141: Ecologies, Techniques, Technologies I 06141 148 + gsdCourseName 06141: Ecologies, Techniques, Techs. I 06141 149 + gsdCourseName 06142: Ecologies, Techniques, Techs. II 06142 150 + gsdCourseName 06227: Structural Design 1 06227 151 + gsdCourseName 06230: Cases in Contemporary Construction 06230 152 + gsdCourseName 06241: Ecologies, Techniques, Technologies III 06241 153 + gsdCourseName 06241: Ecologies, Techniques, Techs. III 06241 154 + gsdCourseName 06242: Ecologies, Techniques, Techs. IV 06242 155 + gsdCourseName 06243: Ecologies, Techniques, Techs. V 06243 156 + gsdCourseName 06251: Research Seminar on Urban Ecology 06251 157 + gsdCourseName 06271: The Innovative Practice 06271 158 + gsdCourseName 06272: Innovation in Science and Engineering 06272 159 + gsdCourseName 06273: Water Engineering 06273 160 + gsdCourseName 06274: Advanced Introduction to Robotics 06274 161 + gsdCourseName 06275: Computer Vision 06275 162 + gsdCourseName 06317: Material Practice as Research 06317 163 + gsdCourseName 06318: Urban and Suburban Ecology 06318 164 + gsdCourseName 06322: Mapping: Geographic Representation 06322 165 + gsdCourseName 06323: Brownfields Practicum 06323 166 + gsdCourseName 06333: Aquatic Ecology 06333 167 + gsdCourseName 06335: Phytotechnologies 06335 168 + gsdCourseName 06337: Changing Natural and Built Coastal Environments 06337 169 + gsdCourseName 06337: Changing Natural and Built Coastal Environments 06337 170 + gsdCourseName 06338: Introduction to Computational Design 06338 171 + gsdCourseName 06436: Expanded Mechanisms / Empirical Materialisms 06436 172 + gsdCourseName 06450: High Performance Buildings and Systems Integration 06450 173 + gsdCourseName 06451: Research Seminar on Urban Ecology 06451 174 + gsdCourseName 06454: Poetics of Construction: Detail Design 06454 175 + gsdCourseName 06468: Design By Committee 06468 176 + gsdCourseName 06470: Energy Simulation for Design 06470 177 + gsdCourseName 06474: Natural Ventilation 06474 178 + gsdCourseName 06478: Informal Robotics 06478 179 + gsdCourseName 06479: Daylighting 06479 180 + gsdCourseName 07241: Practices of LA 07241 181 + gsdCourseName 07241: Practices of Landscape Architecture 07241 182 + gsdCourseName 07408: Frameworks of Contemporary Practice 07408 183 + gsdCourseName 07410: The Architect in History 07410 184 + gsdCourseName 09123: The Fourth Typology 09123 185 + gsdCourseName 09123: The Fourth Typology: Dominant Type + the Idea of the City 09123 186 + gsdCourseName 09127: Real Estate and City Making in China 09127 187 + gsdCourseName 09131: Cultivating Scale: Territorial Planting Strategies 09131 188 + gsdCourseName 09136: Teaching Creativity 09136 189 + gsdCourseName 09137: Mapping Cultural Space 09137 190 + gsdCourseName 09201: Independent Study Masters Degrees 09201 191 + gsdCourseName 09204: Preparation for Independent Thesis Proposal for MUP, MAUD, or MLAUD 09204 192 + gsdCourseName 09204: Thesis Prep for MUP, MAUD, or MLAUD 09204 193 + gsdCourseName 09301: Independent Thesis in Satisfaction of Degree MArch 09301 194 + gsdCourseName 09302: Independent Thesis in Satisfaction of the Degree MAUD, MLAUD, or MUP 09302 195 + gsdCourseName 09304: Independent Thesis for Mdes 09304 196 + gsdCourseName 09304: Independent Thesis for the Degree Master in Design Studies 09304 197 + gsdCourseName 09305: Master of Design Studies Final Project 09305 198 + gsdCourseName 09341: Preparation of Design Thesis Proposal for MLA 09341 199 + gsdCourseName 09341: Thesis Prep for MLA 09341 200 + gsdCourseName 09342: Independent Thesis 09342 201 + gsdCourseName 09342: Independent Thesis in Satisfaction of the Degree MLA 09342 202 + gsdCourseName 09503: Preparation of Doctoral Thesis Proposal 09503 203 + gsdCourseName 09504: Thesis in Satisfaction of the Degree Doctor of Design 09504 204 + gsdCourseName 09506: Thesis Extension in Satisfaction of Degree Doctor of Design 09506 205 + gsdCourseName 09601: MArch II Proseminar 09601 206 + gsdCourseName 09630: Urban Design Proseminar 09630 207 + gsdCourseName 09641: MLA Proseminar 09641 208 + gsdCourseName 09641: Proseminar in Landscape Architecture 09641 209 + gsdCourseName 09661: Proseminar in Urbanism, Landscape, Ecology 09661 210 + gsdCourseName 09663: Risk and Resilience Proseminar 09663 211 + gsdCourseName 09691: Doctoral Program Proseminar 09691 212 + gsdCourseName Other Other 213 \ No newline at end of file diff --git a/dataversedock/testdata/scripts/api/data/metadatablocks/customMRA.tsv b/dataversedock/testdata/scripts/api/data/metadatablocks/customMRA.tsv new file mode 100644 index 0000000..ea91557 --- /dev/null +++ b/dataversedock/testdata/scripts/api/data/metadatablocks/customMRA.tsv @@ -0,0 +1,16 @@ +#metadataBlock name dataverseAlias displayName + customMRA MRA Metadata +#datasetField name title description watermark fieldType displayOrder displayFormat advancedSearchField allowControlledVocabulary allowmultiples facetable displayoncreate required parent metadatablock_id + mraCollection Murray Research Archive Collection Browse the Murray Research Archive collection with the following terms. text 0 FALSE TRUE TRUE TRUE FALSE FALSE customMRA +#controlledVocabulary DatasetField Value identifier displayOrder + mraCollection Diversity samples: Race, Ethnicity, Sexual Orientation, Religion MRA0 0 + mraCollection Early Head Start Research and Evaluation Project, 1996 - 2001 MRA1 1 + mraCollection Economic Theory and Demography MRA2 2 + mraCollection Education MRA3 3 + mraCollection Family. Marriage. Women MRA4 4 + mraCollection Health MRA5 5 + mraCollection Politics and Government MRA6 6 + mraCollection Replications, Extensions and Followups MRA7 7 + mraCollection Studies with Audio Data MRA8 8 + mraCollection Studies with Video Data MRA9 9 + mraCollection Work MRA10 10 \ No newline at end of file diff --git a/dataversedock/testdata/scripts/api/data/metadatablocks/customPSI.tsv b/dataversedock/testdata/scripts/api/data/metadatablocks/customPSI.tsv new file mode 100644 index 0000000..b5103df --- /dev/null +++ b/dataversedock/testdata/scripts/api/data/metadatablocks/customPSI.tsv @@ -0,0 +1,106 @@ +#metadataBlock name dataverseAlias displayName + customPSI PSI Metadata +#datasetField name title description watermark fieldType displayOrder displayFormat advancedSearchField allowControlledVocabulary allowmultiples facetable displayoncreate required parent metadatablock_id + psiBehavior Behavior Behavior text 0 TRUE TRUE TRUE TRUE FALSE FALSE customPSI + psiDonor Donor Donor text 1 TRUE TRUE TRUE TRUE FALSE FALSE customPSI + psiHealthArea Health Area Health Area text 2 TRUE TRUE TRUE TRUE FALSE FALSE customPSI + psiIntervention Intervention Intervention text 3 TRUE TRUE TRUE TRUE FALSE FALSE customPSI + psiPopulation Population Population text 4 TRUE TRUE TRUE TRUE FALSE FALSE customPSI + psiProductsServices Products/Services Products/Services text 5 TRUE TRUE TRUE TRUE FALSE FALSE customPSI + psiStudyDesignElement Study Design Element Study Design Element text 6 TRUE TRUE TRUE TRUE FALSE FALSE customPSI + psiStudyType Study Type Study Type text 7 TRUE TRUE TRUE TRUE FALSE FALSE customPSI +#controlledVocabulary DatasetField Value identifier displayOrder + psiBehavior Abstinence 0 + psiBehavior Birth spacing 1 + psiBehavior Cervical cancer screening 2 + psiBehavior Condom use 3 + psiBehavior FGM 4 + psiBehavior HIV risk behaviors 5 + psiBehavior HIV/STI testing 6 + psiBehavior LLIN use 7 + psiBehavior Male circumcision 8 + psiBehavior Modern contraceptive use 9 + psiBehavior ORS use 10 + psiBehavior Partner reduction 11 + psiBehavior Referral uptake 12 + psiBehavior Treatment adherence 13 + psiBehavior Water treatment 14 + psiDonor CDC 0 + psiDonor DFID 1 + psiDonor Dutch 2 + psiDonor Gates Foundation 3 + psiDonor Global Fund 4 + psiDonor KfW 5 + psiDonor LAD 6 + psiDonor Other 7 + psiDonor PEPFAR 8 + psiDonor UNFPA 9 + psiDonor USAID 10 + psiHealthArea Diarrhea 0 + psiHealthArea GBV 1 + psiHealthArea HIV 2 + psiHealthArea ICM 3 + psiHealthArea Malaria 4 + psiHealthArea NCDs 5 + psiHealthArea Nutrition 6 + psiHealthArea Pneumonia 7 + psiHealthArea Reproductive health 8 + psiHealthArea TB 9 + psiIntervention BCC 0 + psiIntervention IPC 1 + psiIntervention Medical detailing 2 + psiIntervention mHealth 3 + psiIntervention Provider training 4 + psiIntervention Social franchising 5 + psiPopulation Caregivers 0 + psiPopulation Couples 1 + psiPopulation FSW 2 + psiPopulation General population 3 + psiPopulation IDUs 4 + psiPopulation MARPs 5 + psiPopulation Men 6 + psiPopulation MSM 7 + psiPopulation PLHIV 8 + psiPopulation Providers 9 + psiPopulation Truck drivers 10 + psiPopulation Women 11 + psiPopulation WRA 12 + psiPopulation Youth 13 + psiProductsServices ACT 0 + psiProductsServices ANC 1 + psiProductsServices Antibiotics 2 + psiProductsServices ART 3 + psiProductsServices Clean delivery kit 4 + psiProductsServices Condoms 5 + psiProductsServices Household water treatment 6 + psiProductsServices HTC 7 + psiProductsServices LLIN 8 + psiProductsServices Long-term Methods 9 + psiProductsServices Medicated Abortion 10 + psiProductsServices Misoprostol 11 + psiProductsServices Multivitamin 12 + psiProductsServices Needle and syringe 13 + psiProductsServices Nevirapine 14 + psiProductsServices ORS 15 + psiProductsServices PMTCT 16 + psiProductsServices Short-term methods 17 + psiProductsServices STI kit 18 + psiProductsServices TB DOTS 19 + psiProductsServices VMC 20 + psiProductsServices Zinc 21 + psiStudyDesignElement CEM 0 + psiStudyDesignElement Client exit interview 1 + psiStudyDesignElement Control group 2 + psiStudyDesignElement Cross-sectional 3 + psiStudyDesignElement Focus group 4 + psiStudyDesignElement In-depth interview 5 + psiStudyDesignElement Longitudinal 6 + psiStudyDesignElement LQAS 7 + psiStudyDesignElement Mystery client 8 + psiStudyDesignElement Pretesting 9 + psiStudyDesignElement TRaC 10 + psiStudyType MAP 0 + psiStudyType Mixed Methods 1 + psiStudyType Qualitative 2 + psiStudyType Quantitative 3 + psiStudyType Retail audit 4 \ No newline at end of file diff --git a/dataversedock/testdata/scripts/api/data/metadatablocks/customPSRI.tsv b/dataversedock/testdata/scripts/api/data/metadatablocks/customPSRI.tsv new file mode 100644 index 0000000..9493687 --- /dev/null +++ b/dataversedock/testdata/scripts/api/data/metadatablocks/customPSRI.tsv @@ -0,0 +1,38 @@ +#metadataBlock name dataverseAlias displayName + customPSRI Political Science Replication Initiative Metadata +#datasetField name title description watermark fieldType displayOrder displayFormat advancedSearchField allowControlledVocabulary allowmultiples facetable displayoncreate required parent metadatablock_id + PSRI1 Are the original data publicly available? Select from the list of options. text 0 FALSE TRUE FALSE FALSE FALSE FALSE customPSRI + PSRI2 Is the original code available? Select from the list of options. text 1 FALSE TRUE FALSE FALSE FALSE FALSE customPSRI + PSRI3 Where are the original data archived (name and url)? Answer if the data are publicly available. text 2 FALSE FALSE FALSE FALSE FALSE FALSE customPSRI + PSRI4 Where is the original code publicly archived (name and url)? Answer if the code is publicly available. text 3 FALSE FALSE FALSE FALSE FALSE FALSE customPSRI + PSRI5 Will you submit your replication code to this Dataverse (This is a PSRI requirement)? Select from the list of options. text 4 FALSE TRUE FALSE FALSE FALSE FALSE customPSRI + PSRI6 Will you submit your replication write-up to this Dataverse (This is a PSRI requirement)? Select from the list of options. text 5 FALSE TRUE FALSE FALSE FALSE FALSE customPSRI + PSRI7 Did you send the replication materials to the original author(s) and notify them that you'd be posting your replication on PSRI? Select from the list of options. text 6 FALSE TRUE FALSE FALSE FALSE FALSE customPSRI + PSRI8 Was the replication done in a course? (If so, please continue to answer the subsequent questions, and if not, select N/A) Select from the list of options. text 7 FALSE TRUE FALSE FALSE FALSE FALSE customPSRI + PSRI9 Did another student attempt to replicate the replication in the class? Select from the list of options. text 8 FALSE TRUE FALSE FALSE FALSE FALSE customPSRI + PSRI10 Did another student replicate this replication successfully? Select from the list of options. text 9 FALSE TRUE FALSE FALSE FALSE FALSE customPSRI + PSRI11 Did a professor read/review a draft before the final version? Select from the list of options. text 10 FALSE TRUE FALSE FALSE FALSE FALSE customPSRI +#controlledVocabulary DatasetField Value identifier displayOrder + PSRI1 No 0 + PSRI1 Yes 1 + PSRI2 No 0 + PSRI2 Yes 1 + PSRI2 NA 2 + PSRI5 No 0 + PSRI5 Yes 1 + PSRI6 No 0 + PSRI6 Yes 1 + PSRI7 No 0 + PSRI7 Yes 1 + PSRI8 No 0 + PSRI8 Yes 1 + PSRI8 NA 2 + PSRI9 NA 0 + PSRI9 No 1 + PSRI9 Yes 2 + PSRI10 NA 0 + PSRI10 No 1 + PSRI10 Yes 2 + PSRI11 NA 0 + PSRI11 No 1 + PSRI11 Yes 2 \ No newline at end of file diff --git a/dataversedock/testdata/scripts/api/data/metadatablocks/custom_hbgdki.tsv b/dataversedock/testdata/scripts/api/data/metadatablocks/custom_hbgdki.tsv new file mode 100644 index 0000000..bbb098d --- /dev/null +++ b/dataversedock/testdata/scripts/api/data/metadatablocks/custom_hbgdki.tsv @@ -0,0 +1,72 @@ +#metadataBlock name dataverseAlias displayName + custom_hbgdki HBGDki HBGDki Custom Metadata +#datasetField name title description watermark fieldType displayOrder displayFormat advancedSearchField allowControlledVocabulary allowmultiples facetable displayoncreate required parent metadatablock_id + hbgdkiStudyName Name of Study Name of the study. Limit to 20 characters. text 0 TRUE FALSE FALSE FALSE TRUE FALSE custom_hbgdki + hbgdkiStudyRegistry Study Registry Which study registry was used? none 1 FALSE FALSE TRUE FALSE TRUE FALSE custom_hbgdki + hbgdkiStudyRegistryType ID Type Which study registry was used? text 2 TRUE TRUE FALSE FALSE TRUE FALSE hbgdkiStudyRegistry custom_hbgdki + hbgdkiStudyRegistryNumber ID Number ID number for the study per the registry. text 3 TRUE FALSE FALSE FALSE TRUE FALSE hbgdkiStudyRegistry custom_hbgdki + hbgdkiStudyType Type of study Type of study. text 4 TRUE TRUE TRUE TRUE TRUE FALSE custom_hbgdki + hbgdkiIntervention Intervention If an interventional study, describe the interventions. textbox 5 TRUE FALSE FALSE FALSE TRUE FALSE custom_hbgdki + hbgdkiLowerLimitAge Lower limit of age at enrollment Lower limit of age at enrollment. int 6 TRUE FALSE FALSE FALSE TRUE FALSE custom_hbgdki + hbgdkiUnitsLowerLimitAge Units for lower age limit Units for lower age limit. text 7 TRUE FALSE FALSE FALSE TRUE FALSE custom_hbgdki + hbgdkiUpperLimitAge Upper limit of age at enrollment Upper limit of age at enrollment. int 8 TRUE FALSE FALSE FALSE TRUE FALSE custom_hbgdki + hbgdkiUnitsUpperLimitAge Units for upper age limit Units for upper age limit. text 9 TRUE FALSE FALSE FALSE TRUE FALSE custom_hbgdki + hbgdkiOther Other entry criteria Other entry criteria. textbox 10 TRUE FALSE FALSE FALSE TRUE FALSE custom_hbgdki + hbgdkiBiosampleType Types of biosamples collected, if any Types of biosamples used (e.g., Blood, Stool,...). text 11 TRUE FALSE TRUE TRUE TRUE FALSE custom_hbgdki + hbgdkiGestationalAge Gestational age Gestational age text 12 TRUE TRUE FALSE FALSE TRUE FALSE custom_hbgdki + hbgdkiAnthropometry Anthropometry Anthropometry text 13 TRUE TRUE FALSE FALSE TRUE FALSE custom_hbgdki + hbgdkiBirthWeight Birth weight Birth weight text 14 TRUE TRUE FALSE FALSE TRUE FALSE custom_hbgdki + hbgdkiNeurocognitiveDev Neurocognitive development Neurocognitive development text 15 TRUE TRUE FALSE FALSE TRUE FALSE custom_hbgdki + hbgdkiMaternalChar Maternal characteristics Dataset parameters can include: age, height, weight, obstetric history. text 16 TRUE TRUE FALSE FALSE TRUE FALSE custom_hbgdki + hbgdkiPregnancyBirth Pregnancy and birth Dataset parameters can include: Morbidity, nutrition, ANC, delivery method and setting. text 17 TRUE TRUE FALSE FALSE TRUE FALSE custom_hbgdki + hbgdkiSocioeconomicChar Socioeconomic characteristics Socioeconomic characteristics text 18 TRUE TRUE FALSE FALSE TRUE FALSE custom_hbgdki + hbgdkiFeedingCare Feeding care & practice Feeding care & practice text 19 TRUE TRUE FALSE FALSE TRUE FALSE custom_hbgdki + hbgdkiImmunizations Immunizations Immunizations text 20 TRUE TRUE FALSE FALSE TRUE FALSE custom_hbgdki + hbgdkiInfantChildhoodMorbidity Morbidity in infancy and childhood Morbidity in infancy and childhood text 21 TRUE TRUE FALSE FALSE TRUE FALSE custom_hbgdki + hbgdkiWaterSanHygiene Water, sanitation and hygiene standards Water, sanitation and hygiene standards text 22 TRUE TRUE FALSE FALSE TRUE FALSE custom_hbgdki +#controlledVocabulary DatasetField Value identifier displayOrder + hbgdkiStudyRegistryType International Clinical Trials Registry Platform (ICTRP) hbgdki_study_registry_type_ICTRP 0 + hbgdkiStudyRegistryType Australian New Zealand Clinical Trials Registry (ANZCTR) hbgdki_study_registry_type_ANZCTR 1 + hbgdkiStudyRegistryType Brazilian Clinical Trials Registry (ReBec) hbgdki_study_registry_type_ReBec 2 + hbgdkiStudyRegistryType Chinese Clinical Trial Registry (ChiCTR) hbgdki_study_registry_type_ChiCTR 3 + hbgdkiStudyRegistryType Clinical Research Information Service (CRiS), Republic of Korea hbgdki_study_registry_type_CRiS 4 + hbgdkiStudyRegistryType Clinical Trials Registry - India (CTRI) hbgdki_study_registry_type_CTRI 5 + hbgdkiStudyRegistryType Cuban Public Registry of Clinical Trials (RPCEC) hbgdki_study_registry_type_RPCEC 6 + hbgdkiStudyRegistryType EU Clinical Trials Register (EU-CTR) hbgdki_study_registry_type_EU-CTR 7 + hbgdkiStudyRegistryType German Clinical Trials Register (DRKS) hbgdki_study_registry_type_DRKS 8 + hbgdkiStudyRegistryType Iranian Registry of Clinical Trials (IRCT) hbgdki_study_registry_type_IRCT 9 + hbgdkiStudyRegistryType ISRCTN hbgdki_study_registry_type_ISRCTN 10 + hbgdkiStudyRegistryType Japan Primary Registries Network (JPRN) hbgdki_study_registry_type_JPRN 11 + hbgdkiStudyRegistryType Pan African Clinical Trial Registry (PACTR) hbgdki_study_registry_type_PACTR 12 + hbgdkiStudyRegistryType Sri Lanka Clinical Trials Registry (SLCTR) hbgdki_study_registry_type_SLCTR 13 + hbgdkiStudyRegistryType Thai Clinical Trials Registry (TCTR) hbgdki_study_registry_type_TCTR 14 + hbgdkiStudyRegistryType The Netherlands National Trial Register (NTR) hbgdki_study_registry_type_NTR 15 + hbgdkiStudyRegistryType US Clinical Trials Registry (clinicaltrials.gov) hbgdki_study_registry_type_USCTR 16 + hbgdkiStudyType Interventional hbgdki_interventional 0 + hbgdkiStudyType Observational hbgdki_observational 1 + hbgdkiStudyType Case Control hbgdki_case_control 2 + hbgdkiStudyType Meta-analysis hbgdki_meta_analysis 3 + hbgdkiStudyType Demographic & Health Survey hbgdki_demographic_health_survey 4 + hbgdkiStudyType Other Survey hbgdki_other_survey 5 + hbgdkiGestationalAge Yes hbgdki_gestation_y 0 + hbgdkiGestationalAge No hbgdki_gestation_n 1 + hbgdkiAnthropometry Yes hbgdki_anthropometry_y 0 + hbgdkiAnthropometry No hbgdki_anthropometry_n 1 + hbgdkiBirthWeight Yes hbgdki_birth_weight_y 0 + hbgdkiBirthWeight No hbgdki_birth_weight_n 1 + hbgdkiNeurocognitiveDev Yes hbgdki_neurocognitive_dev_y 0 + hbgdkiNeurocognitiveDev No hbgdki_neurocognitive_dev_n 1 + hbgdkiMaternalChar Yes hbgdki_maternal_char_y 0 + hbgdkiMaternalChar No hbgdki_maternal_char_ n 1 + hbgdkiPregnancyBirth Yes hbgdki_pregnancy_birth_y 0 + hbgdkiPregnancyBirth No hbgdki_pregnancy_birth_ n 1 + hbgdkiSocioeconomicChar Yes hbgdki_socioeconomic_char_y 0 + hbgdkiSocioeconomicChar No hbgdki_socioeconomic_char_n 1 + hbgdkiFeedingCare Yes hbgdki_feeding_care_y 0 + hbgdkiFeedingCare No hbgdki_feeding_care_n 1 + hbgdkiImmunizations Yes hbgdki_immunizations_y 0 + hbgdkiImmunizations No hbgdki_immunizations_n 1 + hbgdkiInfantChildhoodMorbidity Yes hbgdki_infant_childhood_morbidity_y 0 + hbgdkiInfantChildhoodMorbidity No hbgdki_infant_childhood_morbidity_n 1 + hbgdkiWaterSanHygiene Yes hbgdki_water_san_hygiene_y 0 + hbgdkiWaterSanHygiene No hbgdki_water_san_hygiene_n 1 \ No newline at end of file diff --git a/dataversedock/testdata/scripts/api/data/metadatablocks/geospatial.tsv b/dataversedock/testdata/scripts/api/data/metadatablocks/geospatial.tsv new file mode 100644 index 0000000..7464d51 --- /dev/null +++ b/dataversedock/testdata/scripts/api/data/metadatablocks/geospatial.tsv @@ -0,0 +1,264 @@ +#metadataBlock name dataverseAlias displayName + geospatial Geospatial Metadata +#datasetField name title description watermark fieldType displayOrder displayFormat advancedSearchField allowControlledVocabulary allowmultiples facetable displayoncreate required parent metadatablock_id + geographicCoverage Geographic Coverage Information on the geographic coverage of the data. Includes the total geographic scope of the data. none 0 FALSE FALSE TRUE FALSE FALSE FALSE geospatial + country Country / Nation The country or nation that the Dataset is about. text 1 TRUE TRUE FALSE TRUE FALSE FALSE geographicCoverage geospatial + state State / Province The state or province that the Dataset is about. Use GeoNames for correct spelling and avoid abbreviations. text 2 TRUE FALSE FALSE TRUE FALSE FALSE geographicCoverage geospatial + city City The name of the city that the Dataset is about. Use GeoNames for correct spelling and avoid abbreviations. text 3 TRUE FALSE FALSE TRUE FALSE FALSE geographicCoverage geospatial + otherGeographicCoverage Other Other information on the geographic coverage of the data. text 4 FALSE FALSE FALSE FALSE FALSE FALSE geographicCoverage geospatial + geographicUnit Geographic Unit Lowest level of geographic aggregation covered by the Dataset, e.g., village, county, region. text 5 TRUE FALSE TRUE TRUE FALSE FALSE geospatial + geographicBoundingBox Geographic Bounding Box The fundamental geometric description for any Dataset that models geography is the geographic bounding box. It describes the minimum box, defined by west and east longitudes and north and south latitudes, which includes the largest geographic extent of the Dataset's geographic coverage. This element is used in the first pass of a coordinate-based search. Inclusion of this element in the codebook is recommended, but is required if the bound polygon box is included. none 6 FALSE FALSE TRUE FALSE FALSE FALSE geospatial + westLongitude West Longitude Westernmost coordinate delimiting the geographic extent of the Dataset. A valid range of values, expressed in decimal degrees, is -180,0 <= West Bounding Longitude Value <= 180,0. text 7 FALSE FALSE FALSE FALSE FALSE FALSE geographicBoundingBox geospatial + eastLongitude East Longitude Easternmost coordinate delimiting the geographic extent of the Dataset. A valid range of values, expressed in decimal degrees, is -180,0 <= East Bounding Longitude Value <= 180,0. text 8 FALSE FALSE FALSE FALSE FALSE FALSE geographicBoundingBox geospatial + northLongitude North Latitude Northernmost coordinate delimiting the geographic extent of the Dataset. A valid range of values, expressed in decimal degrees, is -90,0 <= North Bounding Latitude Value <= 90,0. text 9 FALSE FALSE FALSE FALSE FALSE FALSE geographicBoundingBox geospatial + southLongitude South Latitude Southernmost coordinate delimiting the geographic extent of the Dataset. A valid range of values, expressed in decimal degrees, is -90,0 <= South Bounding Latitude Value <= 90,0. text 10 FALSE FALSE FALSE FALSE FALSE FALSE geographicBoundingBox geospatial +#controlledVocabulary DatasetField Value identifier displayOrder + country Afghanistan 0 + country Albania 1 + country Algeria 2 + country American Samoa 3 + country Andorra 4 + country Angola 5 + country Anguilla 6 + country Antarctica 7 + country Antigua and Barbuda 8 + country Argentina 9 + country Armenia 10 + country Aruba 11 + country Australia 12 + country Austria 13 + country Azerbaijan 14 + country Bahamas 15 + country Bahrain 16 + country Bangladesh 17 + country Barbados 18 + country Belarus 19 + country Belgium 20 + country Belize 21 + country Benin 22 + country Bermuda 23 + country Bhutan 24 + country Bolivia, Plurinational State of 25 + country Bonaire, Sint Eustatius and Saba 26 + country Bosnia and Herzegovina 27 + country Botswana 28 BOTSWANA + country Bouvet Island 29 + country Brazil 30 Brasil + country British Indian Ocean Territory 31 + country Brunei Darussalam 32 + country Bulgaria 33 + country Burkina Faso 34 + country Burundi 35 + country Cambodia 36 + country Cameroon 37 + country Canada 38 + country Cape Verde 39 + country Cayman Islands 40 + country Central African Republic 41 + country Chad 42 + country Chile 43 + country China 44 + country Christmas Island 45 + country Cocos (Keeling) Islands 46 + country Colombia 47 + country Comoros 48 + country Congo 49 + country Congo, the Democratic Republic of the 50 + country Cook Islands 51 + country Costa Rica 52 + country Croatia 53 + country Cuba 54 + country Curaçao 55 + country Cyprus 56 + country Czech Republic 57 + country Côte d'Ivoire 58 + country Denmark 59 + country Djibouti 60 + country Dominica 61 + country Dominican Republic 62 + country Ecuador 63 + country Egypt 64 + country El Salvador 65 + country Equatorial Guinea 66 + country Eritrea 67 + country Estonia 68 + country Ethiopia 69 + country Falkland Islands (Malvinas) 70 + country Faroe Islands 71 + country Fiji 72 + country Finland 73 + country France 74 + country French Guiana 75 + country French Polynesia 76 + country French Southern Territories 77 + country Gabon 78 + country Gambia 79 Gambia, The + country Georgia 80 + country Germany 81 Germany (Federal Republic of) + country Ghana 82 GHANA + country Gibraltar 83 + country Greece 84 + country Greenland 85 + country Grenada 86 + country Guadeloupe 87 + country Guam 88 + country Guatemala 89 + country Guernsey 90 + country Guinea 91 + country Guinea-Bissau 92 + country Guyana 93 + country Haiti 94 + country Heard Island and Mcdonald Islands 95 + country Holy See (Vatican City State) 96 + country Honduras 97 + country Hong Kong 98 + country Hungary 99 + country Iceland 100 + country India 101 INDIA + country Indonesia 102 Sumatra + country Iran, Islamic Republic of 103 Iran Iran (Islamic Republic of) + country Iraq 104 IRAQ + country Ireland 105 + country Isle of Man 106 + country Israel 107 + country Italy 108 + country Jamaica 109 + country Japan 110 + country Jersey 111 + country Jordan 112 + country Kazakhstan 113 + country Kenya 114 + country Kiribati 115 + country Korea, Democratic People's Republic of 116 + country Korea, Republic of 117 + country Kuwait 118 + country Kyrgyzstan 119 + country Lao People's Democratic Republic 120 Laos + country Latvia 121 + country Lebanon 122 + country Lesotho 123 LESOTHO + country Liberia 124 + country Libya 125 + country Liechtenstein 126 + country Lithuania 127 + country Luxembourg 128 + country Macao 129 + country Macedonia, the Former Yugoslav Republic of 130 + country Madagascar 131 + country Malawi 132 + country Malaysia 133 + country Maldives 134 + country Mali 135 + country Malta 136 + country Marshall Islands 137 + country Martinique 138 + country Mauritania 139 + country Mauritius 140 + country Mayotte 141 + country Mexico 142 + country Micronesia, Federated States of 143 + country Moldova, Republic of 144 + country Monaco 145 + country Mongolia 146 + country Montenegro 147 + country Montserrat 148 + country Morocco 149 + country Mozambique 150 MOZAMBIQUE + country Myanmar 151 + country Namibia 152 NAMIBIA + country Nauru 153 + country Nepal 154 + country Netherlands 155 + country New Caledonia 156 + country New Zealand 157 + country Nicaragua 158 + country Niger 159 + country Nigeria 160 + country Niue 161 + country Norfolk Island 162 + country Northern Mariana Islands 163 + country Norway 164 + country Oman 165 + country Pakistan 166 + country Palau 167 + country Palestine, State of 168 + country Panama 169 + country Papua New Guinea 170 + country Paraguay 171 + country Peru 172 + country Philippines 173 + country Pitcairn 174 + country Poland 175 + country Portugal 176 + country Puerto Rico 177 + country Qatar 178 + country Romania 179 + country Russian Federation 180 + country Rwanda 181 + country Réunion 182 + country Saint Barthélemy 183 + country Saint Helena, Ascension and Tristan da Cunha 184 + country Saint Kitts and Nevis 185 + country Saint Lucia 186 + country Saint Martin (French part) 187 + country Saint Pierre and Miquelon 188 + country Saint Vincent and the Grenadines 189 + country Samoa 190 + country San Marino 191 + country Sao Tome and Principe 192 + country Saudi Arabia 193 + country Senegal 194 + country Serbia 195 + country Seychelles 196 + country Sierra Leone 197 + country Singapore 198 + country Sint Maarten (Dutch part) 199 + country Slovakia 200 + country Slovenia 201 + country Solomon Islands 202 + country Somalia 203 + country South Africa 204 + country South Georgia and the South Sandwich Islands 205 + country South Sudan 206 + country Spain 207 + country Sri Lanka 208 + country Sudan 209 + country Suriname 210 + country Svalbard and Jan Mayen 211 + country Swaziland 212 SWAZILAND + country Sweden 213 + country Switzerland 214 + country Syrian Arab Republic 215 + country Taiwan, Province of China 216 Taiwan + country Tajikistan 217 + country Tanzania, United Republic of 218 Tanzania + country Thailand 219 + country Timor-Leste 220 + country Togo 221 + country Tokelau 222 + country Tonga 223 + country Trinidad and Tobago 224 + country Tunisia 225 + country Turkey 226 + country Turkmenistan 227 + country Turks and Caicos Islands 228 + country Tuvalu 229 + country Uganda 230 + country Ukraine 231 + country United Arab Emirates 232 UAE + country United Kingdom 233 + country United States 234 U.S.A USA United States of America U.S.A. + country United States Minor Outlying Islands 235 + country Uruguay 236 + country Uzbekistan 237 + country Vanuatu 238 + country Venezuela, Bolivarian Republic of 239 + country Viet Nam 240 + country Virgin Islands, British 241 + country Virgin Islands, U.S. 242 + country Wallis and Futuna 243 + country Western Sahara 244 + country Yemen 245 YEMEN + country Zambia 246 + country Zimbabwe 247 + country Åland Islands 248 \ No newline at end of file diff --git a/dataversedock/testdata/scripts/api/data/metadatablocks/journals.tsv b/dataversedock/testdata/scripts/api/data/metadatablocks/journals.tsv new file mode 100644 index 0000000..097f029 --- /dev/null +++ b/dataversedock/testdata/scripts/api/data/metadatablocks/journals.tsv @@ -0,0 +1,41 @@ +#metadataBlock name dataverseAlias displayName + journal Journal Metadata +#datasetField name title description watermark fieldType displayOrder displayFormat advancedSearchField allowControlledVocabulary allowmultiples facetable displayoncreate required parent metadatablock_id + journalVolumeIssue Journal Indicates the volume, issue and date of a journal, which this Dataset is associated with. none 0 FALSE FALSE TRUE FALSE FALSE FALSE journal + journalVolume Volume The journal volume which this Dataset is associated with (e.g., Volume 4). text 1 TRUE FALSE FALSE TRUE FALSE FALSE journalVolumeIssue journal + journalIssue Issue The journal issue number which this Dataset is associated with (e.g., Number 2, Autumn). text 2 TRUE FALSE FALSE TRUE FALSE FALSE journalVolumeIssue journal + journalPubDate Publication Date The publication date for this journal volume/issue, which this Dataset is associated with (e.g., 1999). YYYY or YYYY-MM or YYYY-MM-DD date 3 TRUE FALSE FALSE TRUE FALSE FALSE journalVolumeIssue journal + journalArticleType Type of Article Indicates what kind of article this is, for example, a research article, a commentary, a book or product review, a case report, a calendar, etc (based on JATS). text 4 TRUE TRUE FALSE TRUE FALSE FALSE journal +#controlledVocabulary DatasetField Value identifier displayOrder + journalArticleType abstract 0 + journalArticleType addendum 1 + journalArticleType announcement 2 + journalArticleType article-commentary 3 + journalArticleType book review 4 + journalArticleType books received 5 + journalArticleType brief report 6 + journalArticleType calendar 7 + journalArticleType case report 8 + journalArticleType collection 9 + journalArticleType correction 10 + journalArticleType data paper 11 + journalArticleType discussion 12 + journalArticleType dissertation 13 + journalArticleType editorial 14 + journalArticleType in brief 15 + journalArticleType introduction 16 + journalArticleType letter 17 + journalArticleType meeting report 18 + journalArticleType news 19 + journalArticleType obituary 20 + journalArticleType oration 21 + journalArticleType partial retraction 22 + journalArticleType product review 23 + journalArticleType rapid communication 24 + journalArticleType reply 25 + journalArticleType reprint 26 + journalArticleType research article 27 + journalArticleType retraction 28 + journalArticleType review article 29 + journalArticleType translation 30 + journalArticleType other 31 \ No newline at end of file diff --git a/dataversedock/testdata/scripts/api/data/metadatablocks/social_science.tsv b/dataversedock/testdata/scripts/api/data/metadatablocks/social_science.tsv new file mode 100644 index 0000000..7ef714c --- /dev/null +++ b/dataversedock/testdata/scripts/api/data/metadatablocks/social_science.tsv @@ -0,0 +1,29 @@ +#metadataBlock name dataverseAlias displayName + socialscience Social Science and Humanities Metadata +#datasetField name title description watermark fieldType displayOrder displayFormat advancedSearchField allowControlledVocabulary allowmultiples facetable displayoncreate required parent metadatablock_id + unitOfAnalysis Unit of Analysis Basic unit of analysis or observation that this Dataset describes, such as individuals, families/households, groups, institutions/organizations, administrative units, and more. For information about the DDI's controlled vocabulary for this element, please refer to the DDI web page at http://www.ddialliance.org/controlled-vocabularies. textbox 0 TRUE FALSE TRUE TRUE FALSE FALSE socialscience + universe Universe Description of the population covered by the data in the file; the group of people or other elements that are the object of the study and to which the study results refer. Age, nationality, and residence commonly help to delineate a given universe, but any number of other factors may be used, such as age limits, sex, marital status, race, ethnic group, nationality, income, veteran status, criminal convictions, and more. The universe may consist of elements other than persons, such as housing units, court cases, deaths, countries, and so on. In general, it should be possible to tell from the description of the universe whether a given individual or element is a member of the population under study. Also known as the universe of interest, population of interest, and target population. textbox 1 TRUE FALSE TRUE TRUE FALSE FALSE socialscience + timeMethod Time Method The time method or time dimension of the data collection, such as panel, cross-sectional, trend, time- series, or other. text 2 TRUE FALSE FALSE TRUE FALSE FALSE socialscience + dataCollector Data Collector Individual, agency or organization responsible for administering the questionnaire or interview or compiling the data. FamilyName, GivenName or Organization text 3 FALSE FALSE FALSE FALSE FALSE FALSE socialscience + collectorTraining Collector Training Type of training provided to the data collector text 4 FALSE FALSE FALSE FALSE FALSE FALSE socialscience + frequencyOfDataCollection Frequency If the data collected includes more than one point in time, indicate the frequency with which the data was collected; that is, monthly, quarterly, or other. text 5 TRUE FALSE FALSE TRUE FALSE FALSE socialscience + samplingProcedure Sampling Procedure Type of sample and sample design used to select the survey respondents to represent the population. May include reference to the target sample size and the sampling fraction. textbox 6 FALSE FALSE FALSE FALSE FALSE FALSE socialscience + targetSampleSize Target Sample Size Specific information regarding the target sample size, actual sample size, and the formula used to determine this. none 7 FALSE FALSE FALSE FALSE FALSE FALSE socialscience + targetSampleActualSize Actual Actual sample size. Enter an integer... int 8 FALSE FALSE FALSE FALSE FALSE FALSE targetSampleSize socialscience + targetSampleSizeFormula Formula Formula used to determine target sample size. text 9 FALSE FALSE FALSE FALSE FALSE FALSE targetSampleSize socialscience + deviationsFromSampleDesign Major Deviations for Sample Design Show correspondence as well as discrepancies between the sampled units (obtained) and available statistics for the population (age, sex-ratio, marital status, etc.) as a whole. text 10 FALSE FALSE FALSE FALSE FALSE FALSE socialscience + collectionMode Collection Mode Method used to collect the data; instrumentation characteristics (e.g., telephone interview, mail questionnaire, or other). textbox 11 FALSE FALSE FALSE FALSE FALSE FALSE socialscience + researchInstrument Type of Research Instrument Type of data collection instrument used. Structured indicates an instrument in which all respondents are asked the same questions/tests, possibly with precoded answers. If a small portion of such a questionnaire includes open-ended questions, provide appropriate comments. Semi-structured indicates that the research instrument contains mainly open-ended questions. Unstructured indicates that in-depth interviews were conducted. text 12 FALSE FALSE FALSE FALSE FALSE FALSE socialscience + dataCollectionSituation Characteristics of Data Collection Situation Description of noteworthy aspects of the data collection situation. Includes information on factors such as cooperativeness of respondents, duration of interviews, number of call backs, or similar. textbox 13 FALSE FALSE FALSE FALSE FALSE FALSE socialscience + actionsToMinimizeLoss Actions to Minimize Losses Summary of actions taken to minimize data loss. Include information on actions such as follow-up visits, supervisory checks, historical matching, estimation, and so on. text 14 FALSE FALSE FALSE FALSE FALSE FALSE socialscience + controlOperations Control Operations Control OperationsMethods to facilitate data control performed by the primary investigator or by the data archive. text 15 FALSE FALSE FALSE FALSE FALSE FALSE socialscience + weighting Weighting The use of sampling procedures might make it necessary to apply weights to produce accurate statistical results. Describes the criteria for using weights in analysis of a collection. If a weighting formula or coefficient was developed, the formula is provided, its elements are defined, and it is indicated how the formula was applied to the data. textbox 16 FALSE FALSE FALSE FALSE FALSE FALSE socialscience + cleaningOperations Cleaning Operations Methods used to clean the data collection, such as consistency checking, wildcode checking, or other. text 17 FALSE FALSE FALSE FALSE FALSE FALSE socialscience + datasetLevelErrorNotes Study Level Error Notes Note element used for any information annotating or clarifying the methodology and processing of the study. text 18 FALSE FALSE FALSE FALSE FALSE FALSE socialscience + responseRate Response Rate Percentage of sample members who provided information. textbox 19 TRUE FALSE FALSE TRUE FALSE FALSE socialscience + samplingErrorEstimates Estimates of Sampling Error Measure of how precisely one can estimate a population value from a given sample. text 20 FALSE FALSE FALSE FALSE FALSE FALSE socialscience + otherDataAppraisal Other Forms of Data Appraisal Other issues pertaining to the data appraisal. Describe issues such as response variance, nonresponse rate and testing for bias, interviewer and response bias, confidence levels, question bias, or similar. text 21 FALSE FALSE FALSE FALSE FALSE FALSE socialscience + socialScienceNotes Notes General notes about this Dataset. none 22 FALSE FALSE FALSE FALSE FALSE FALSE socialscience + socialScienceNotesType Type Type of note. text 23 FALSE FALSE FALSE FALSE FALSE FALSE socialScienceNotes socialscience + socialScienceNotesSubject Subject Note subject. text 24 FALSE FALSE FALSE FALSE FALSE FALSE socialScienceNotes socialscience + socialScienceNotesText Text Text for this note. textbox 25 FALSE FALSE FALSE FALSE FALSE FALSE socialScienceNotes socialscience diff --git a/dataversedock/testdata/scripts/api/data/role-admin.json b/dataversedock/testdata/scripts/api/data/role-admin.json new file mode 100644 index 0000000..6d13474 --- /dev/null +++ b/dataversedock/testdata/scripts/api/data/role-admin.json @@ -0,0 +1,8 @@ +{ + "alias":"admin", + "name":"Admin", + "description":"A person who has all permissions for dataverses, datasets, and files.", + "permissions":[ + "ALL" + ] +} diff --git a/dataversedock/testdata/scripts/api/data/role-assign-eg1-curator.json b/dataversedock/testdata/scripts/api/data/role-assign-eg1-curator.json new file mode 100644 index 0000000..c375704 --- /dev/null +++ b/dataversedock/testdata/scripts/api/data/role-assign-eg1-curator.json @@ -0,0 +1,4 @@ +{ + "assignee": "&explicit/1-EG-1", + "role": "curator" +} diff --git a/dataversedock/testdata/scripts/api/data/role-assign-localhost-curator.json b/dataversedock/testdata/scripts/api/data/role-assign-localhost-curator.json new file mode 100644 index 0000000..55e2642 --- /dev/null +++ b/dataversedock/testdata/scripts/api/data/role-assign-localhost-curator.json @@ -0,0 +1,4 @@ +{ + "assignee": "&ip/localhost", + "role": "curator" +} diff --git a/dataversedock/testdata/scripts/api/data/role-assign.json b/dataversedock/testdata/scripts/api/data/role-assign.json new file mode 100644 index 0000000..b1a08bb --- /dev/null +++ b/dataversedock/testdata/scripts/api/data/role-assign.json @@ -0,0 +1,4 @@ +{ + "assignee": "@gabbi", + "role": "curator" +} diff --git a/dataversedock/testdata/scripts/api/data/role-assignee-list.json b/dataversedock/testdata/scripts/api/data/role-assignee-list.json new file mode 100644 index 0000000..71c58af --- /dev/null +++ b/dataversedock/testdata/scripts/api/data/role-assignee-list.json @@ -0,0 +1 @@ +["@admin",":guest","&ip/ipGroup1",":authenticated-users"] diff --git a/dataversedock/testdata/scripts/api/data/role-contrib.json b/dataversedock/testdata/scripts/api/data/role-contrib.json new file mode 100644 index 0000000..40cde38 --- /dev/null +++ b/dataversedock/testdata/scripts/api/data/role-contrib.json @@ -0,0 +1,11 @@ +{ + "alias": "contrib", + "name": "Dataverse Contributor", + "description": "Someone that can add data to a dataverse, but not remove it.", + "permissions": [ + "Access", + "AccessRestrictedMetadata", + "UndoableEdit", + "EditMetadata" + ] +} diff --git a/dataversedock/testdata/scripts/api/data/role-creator.json b/dataversedock/testdata/scripts/api/data/role-creator.json new file mode 100644 index 0000000..947291d --- /dev/null +++ b/dataversedock/testdata/scripts/api/data/role-creator.json @@ -0,0 +1,9 @@ +{ + "alias": "creator", + "name": "Creator", + "description": "Allows creation of DataSet/Verse", + "permissions": [ + "CreateDataverse", + "CreateDataset" + ] +} diff --git a/dataversedock/testdata/scripts/api/data/role-curator.json b/dataversedock/testdata/scripts/api/data/role-curator.json new file mode 100644 index 0000000..2de5b2a --- /dev/null +++ b/dataversedock/testdata/scripts/api/data/role-curator.json @@ -0,0 +1,16 @@ +{ + "alias":"curator", + "name":"Curator", + "description":"For datasets, a person who can edit License + Terms, edit Permissions, and publish datasets.", + "permissions":[ + "ViewUnpublishedDataset", + "EditDataset", + "DownloadFile", + "DeleteDatasetDraft", + "PublishDataset", + "ManageDatasetPermissions", + "AddDataverse", + "AddDataset", + "ViewUnpublishedDataverse" + ] +} diff --git a/dataversedock/testdata/scripts/api/data/role-dsContributor.json b/dataversedock/testdata/scripts/api/data/role-dsContributor.json new file mode 100644 index 0000000..3cd854c --- /dev/null +++ b/dataversedock/testdata/scripts/api/data/role-dsContributor.json @@ -0,0 +1,8 @@ +{ + "alias": "dsContributor", + "name": "Dataset Creator", + "description": "A person who can add datasets within a dataverse.", + "permissions": [ + "AddDataset" + ] +} diff --git a/dataversedock/testdata/scripts/api/data/role-dvContributor.json b/dataversedock/testdata/scripts/api/data/role-dvContributor.json new file mode 100644 index 0000000..b0e264f --- /dev/null +++ b/dataversedock/testdata/scripts/api/data/role-dvContributor.json @@ -0,0 +1,8 @@ +{ + "alias": "dvContributor", + "name": "Dataverse Creator", + "description": "A person who can add subdataverses within a dataverse.", + "permissions": [ + "AddDataverse" + ] +} diff --git a/dataversedock/testdata/scripts/api/data/role-editor.json b/dataversedock/testdata/scripts/api/data/role-editor.json new file mode 100644 index 0000000..98c08ac --- /dev/null +++ b/dataversedock/testdata/scripts/api/data/role-editor.json @@ -0,0 +1,11 @@ +{ + "alias":"editor", + "name":"Contributor", + "description":"For datasets, a person who can edit License + Terms, and then submit them for review.", + "permissions":[ + "ViewUnpublishedDataset", + "EditDataset", + "DownloadFile", + "DeleteDatasetDraft" + ] +} diff --git a/dataversedock/testdata/scripts/api/data/role-filedownloader.json b/dataversedock/testdata/scripts/api/data/role-filedownloader.json new file mode 100644 index 0000000..3fb046f --- /dev/null +++ b/dataversedock/testdata/scripts/api/data/role-filedownloader.json @@ -0,0 +1,8 @@ +{ + "alias":"fileDownloader", + "name":"File Downloader", + "description":"A person who can download a published file.", + "permissions":[ + "DownloadFile" + ] +} diff --git a/dataversedock/testdata/scripts/api/data/role-fullContributor.json b/dataversedock/testdata/scripts/api/data/role-fullContributor.json new file mode 100644 index 0000000..c18fcc7 --- /dev/null +++ b/dataversedock/testdata/scripts/api/data/role-fullContributor.json @@ -0,0 +1,9 @@ +{ + "alias": "fullContributor", + "name": "Dataverse + Dataset Creator", + "description": "A person who can add subdataverses and datasets within a dataverse.", + "permissions": [ + "AddDataverse", + "AddDataset" + ] +} diff --git a/dataversedock/testdata/scripts/api/data/role-guest.json b/dataversedock/testdata/scripts/api/data/role-guest.json new file mode 100644 index 0000000..ef5d236 --- /dev/null +++ b/dataversedock/testdata/scripts/api/data/role-guest.json @@ -0,0 +1,8 @@ +{ + "alias": "guest-role", + "name": "What guests can do", + "description": "Guests can browse", + "permissions": [ + "Discover" + ] +} diff --git a/dataversedock/testdata/scripts/api/data/role-manager.json b/dataversedock/testdata/scripts/api/data/role-manager.json new file mode 100644 index 0000000..1105cbf --- /dev/null +++ b/dataversedock/testdata/scripts/api/data/role-manager.json @@ -0,0 +1,11 @@ +{ + "alias":"manager", + "name":"Curator", + "description":"For datasets, a person who can add a dataset, edit License + Terms, and submit datasets for review.", + "permissions":[ + "ViewUnpublishedDataset", + "EditDataset", + "DownloadFile", + "DeleteDatasetDraft" + ] +} diff --git a/dataversedock/testdata/scripts/api/data/role-member.json b/dataversedock/testdata/scripts/api/data/role-member.json new file mode 100644 index 0000000..d1e1456 --- /dev/null +++ b/dataversedock/testdata/scripts/api/data/role-member.json @@ -0,0 +1,10 @@ +{ + "alias":"member", + "name":"Member", + "description":"A person who can view both unpublished dataverses and datasets.", + "permissions":[ + "ViewUnpublishedDataset", + "ViewUnpublishedDataverse", + "DownloadFile" + ] +} diff --git a/dataversedock/testdata/scripts/api/data/shibGroupHarvard.json b/dataversedock/testdata/scripts/api/data/shibGroupHarvard.json new file mode 100644 index 0000000..d90cde6 --- /dev/null +++ b/dataversedock/testdata/scripts/api/data/shibGroupHarvard.json @@ -0,0 +1,5 @@ +{ + "name": "All Harvard PIN/Shibboleth Users", + "attribute": "Shib-Identity-Provider", + "pattern": "https://fed.huit.harvard.edu/idp/shibboleth" +} diff --git a/dataversedock/testdata/scripts/api/data/shibGroupMit.json b/dataversedock/testdata/scripts/api/data/shibGroupMit.json new file mode 100644 index 0000000..e61e81e --- /dev/null +++ b/dataversedock/testdata/scripts/api/data/shibGroupMit.json @@ -0,0 +1,5 @@ +{ + "name": "All MIT Shibboleth Users", + "attribute": "Shib-Identity-Provider", + "pattern": "urn:mace:incommon:mit.edu" +} diff --git a/dataversedock/testdata/scripts/api/data/shibGroupTestShib.json b/dataversedock/testdata/scripts/api/data/shibGroupTestShib.json new file mode 100644 index 0000000..01b2bd5 --- /dev/null +++ b/dataversedock/testdata/scripts/api/data/shibGroupTestShib.json @@ -0,0 +1,5 @@ +{ + "name": "All testshib.org Shibboleth Users", + "attribute": "Shib-Identity-Provider", + "pattern": "https://idp.testshib.org/idp/shibboleth" +} diff --git a/dataversedock/testdata/scripts/api/data/tsv/tsv2json b/dataversedock/testdata/scripts/api/data/tsv/tsv2json new file mode 100755 index 0000000..38a6ded --- /dev/null +++ b/dataversedock/testdata/scripts/api/data/tsv/tsv2json @@ -0,0 +1,38 @@ +#!/usr/bin/env python +import sys +from optparse import OptionParser +import csv +try: + import json +except ImportError: + import simplejson as json + +parser = OptionParser() +options, args = parser.parse_args() + +if args: + csv_file = open(args[0]) +else: + csv_file = sys.stdin + +reader = csv.DictReader(csv_file, delimiter="\t") +rows = [row for row in reader] +for row in rows: + row["permissionRoot"] = "false" + parent = row["parent"] + parts = parent.split("/") + if parts[1]: + target = parts[-1] + else: + target = "root" + del row["parent"] + creator = row["creator"] + del row["creator"] + # FIXME: don't simply strip out single quotes + row["description"] = row["description"].replace("'", "") + jsondata = "%s%s%s" % ("'",json.dumps(row),"'") + start = 'curl -H "Content-type:application/json" -X POST "http://localhost:8080/api/dataverses/' + print "echo creating dataverse ", row["alias"] + print "%s%s%s%s%s%s" % (start, target, "?key=",creator, "\" -d ", jsondata) + print +csv_file.close() diff --git a/dataversedock/testdata/scripts/api/data/user-admin.json b/dataversedock/testdata/scripts/api/data/user-admin.json new file mode 100644 index 0000000..938f34d --- /dev/null +++ b/dataversedock/testdata/scripts/api/data/user-admin.json @@ -0,0 +1,8 @@ +{ + "firstName":"Dataverse", + "lastName":"Admin", + "userName":"dataverseAdmin", + "affiliation":"Dataverse.org", + "position":"Admin", + "email":"dataverse@mailinator.com" +} diff --git a/dataversedock/testdata/scripts/api/data/userCathy.json b/dataversedock/testdata/scripts/api/data/userCathy.json new file mode 100644 index 0000000..b75fa8b --- /dev/null +++ b/dataversedock/testdata/scripts/api/data/userCathy.json @@ -0,0 +1,9 @@ +{ + "firstName":"Cathy", + "lastName":"Collaborator", + "userName":"cathy", + "affiliation":"mid", + "position":"Data Scientist", + "email":"cathy@malinator.com", + "phone":"(888) 888-8888" +} diff --git a/dataversedock/testdata/scripts/api/data/userGabbi.json b/dataversedock/testdata/scripts/api/data/userGabbi.json new file mode 100644 index 0000000..2fc8b73 --- /dev/null +++ b/dataversedock/testdata/scripts/api/data/userGabbi.json @@ -0,0 +1,9 @@ +{ + "firstName":"Gabbi", + "lastName":"Guest", + "userName":"gabbi", + "affiliation":"low", + "position":"A Guest", + "email":"gabbi@malinator.com", + "phone":"(888) 888-8888" +} \ No newline at end of file diff --git a/dataversedock/testdata/scripts/api/data/userNick.json b/dataversedock/testdata/scripts/api/data/userNick.json new file mode 100644 index 0000000..b4b796d --- /dev/null +++ b/dataversedock/testdata/scripts/api/data/userNick.json @@ -0,0 +1,9 @@ +{ + "firstName":"Nick", + "lastName":"NSA", + "userName":"nick", + "affiliation":"gov", + "position":"Signals Intelligence", + "email":"nick@malinator.com", + "phone":"(888) 888-8888" +} diff --git a/dataversedock/testdata/scripts/api/data/userPete.json b/dataversedock/testdata/scripts/api/data/userPete.json new file mode 100644 index 0000000..4958c99 --- /dev/null +++ b/dataversedock/testdata/scripts/api/data/userPete.json @@ -0,0 +1,9 @@ +{ + "firstName":"Pete", + "lastName":"Privileged", + "userName":"pete", + "affiliation":"Top", + "position":"The Boss", + "email":"pete@malinator.com", + "phone":"(888) 888-8888" +} \ No newline at end of file diff --git a/dataversedock/testdata/scripts/api/data/userUma.json b/dataversedock/testdata/scripts/api/data/userUma.json new file mode 100644 index 0000000..fb67daf --- /dev/null +++ b/dataversedock/testdata/scripts/api/data/userUma.json @@ -0,0 +1,9 @@ +{ + "firstName":"Uma", + "lastName":"Underprivileged", + "userName":"uma", + "affiliation":"mid", + "position":"The Intern", + "email":"Uma@malinator.com", + "phone":"(888) 888-8888" +} \ No newline at end of file diff --git a/dataversedock/testdata/scripts/api/data/workflows/internal-httpSR-workflow.json b/dataversedock/testdata/scripts/api/data/workflows/internal-httpSR-workflow.json new file mode 100644 index 0000000..8c233d7 --- /dev/null +++ b/dataversedock/testdata/scripts/api/data/workflows/internal-httpSR-workflow.json @@ -0,0 +1,32 @@ +{ + "name": "dump to local RSAL", + "steps": [ + { + "provider":":internal", + "stepType":"log", + "parameters": { + "message": "Pre-http request" + } + }, + { + "provider":":internal", + "stepType":"http/sr", + "parameters": { + "url":"http://localhost:5050/dump/${invocationId}", + "method":"POST", + "contentType":"text/plain", + "body":"${invocationId}\ndataset.id=${dataset.id} /\ndataset.identifier=${dataset.identifier} /dataset.globalId=${dataset.globalId} /\ndataset.displayName=${dataset.displayName} /\ndataset.citation=${dataset.citation} /\nminorVersion=${minorVersion} /\nmajorVersion=${majorVersion} /\nreleaseCompleted=${releaseStatus} /", + "expectedResponse":"OK.*", + "rollbackUrl":"http://localhost:5050/dump/${invocationId}", + "rollbackMethod":"DELETE" + } + }, + { + "provider":":internal", + "stepType":"log", + "parameters": { + "message": "Post-http request" + } + } + ] +} diff --git a/dataversedock/testdata/scripts/api/data/workflows/internal-no-pause-long-workflow.json b/dataversedock/testdata/scripts/api/data/workflows/internal-no-pause-long-workflow.json new file mode 100644 index 0000000..6675d14 --- /dev/null +++ b/dataversedock/testdata/scripts/api/data/workflows/internal-no-pause-long-workflow.json @@ -0,0 +1,58 @@ +{ + "name": "no pauses, long", + "steps": [ + { + "provider":":internal", + "stepType":"log", + "parameters": { + "memo":"first step" + } + }, + { + "provider":":internal", + "stepType":"log" + }, + + { + "provider":":internal", + "stepType":"log" + }, + + { + "provider":":internal", + "stepType":"log" + }, + + { + "provider":":internal", + "stepType":"log" + }, + + { + "provider":":internal", + "stepType":"log" + }, + + { + "provider":":internal", + "stepType":"log" + }, + + { + "provider":":internal", + "stepType":"log" + }, + + { + "provider":":internal", + "stepType":"log" + }, + { + "provider":":internal", + "stepType":"log", + "parameters" : { + "memo":"Last step" + } + } + ] +} diff --git a/dataversedock/testdata/scripts/api/data/workflows/internal-no-pause-workflow.json b/dataversedock/testdata/scripts/api/data/workflows/internal-no-pause-workflow.json new file mode 100644 index 0000000..df44527 --- /dev/null +++ b/dataversedock/testdata/scripts/api/data/workflows/internal-no-pause-workflow.json @@ -0,0 +1,21 @@ +{ + "name": "Internal steps only, no with pause", + "steps": [ + { + "provider":":internal", + "stepType":"log", + "parameters": { + "step":1, + "stepName":"first step" + } + }, + { + "provider":":internal", + "stepType":"log", + "parameters": { + "number":42, + "anotherMessage": "This is the last step before releasing." + } + } + ] +} diff --git a/dataversedock/testdata/scripts/api/data/workflows/internal-pause-workflow.json b/dataversedock/testdata/scripts/api/data/workflows/internal-pause-workflow.json new file mode 100644 index 0000000..c8a5d7b --- /dev/null +++ b/dataversedock/testdata/scripts/api/data/workflows/internal-pause-workflow.json @@ -0,0 +1,28 @@ +{ + "name": "Internal steps with pause", + "steps": [ + { + "provider":":internal", + "stepType":"log", + "parameters": { + "step":1, + "stepName":"first step" + } + }, + { + "provider":":internal", + "stepType":"pause", + "parameters": { + "paramName":"parameter value with a longer name." + } + }, + { + "provider":":internal", + "stepType":"log", + "parameters": { + "number":42, + "anotherMessage": "This is the last step before releasing." + } + } + ] +} diff --git a/dataversedock/testdata/scripts/api/download/.gitignore b/dataversedock/testdata/scripts/api/download/.gitignore new file mode 100644 index 0000000..9b815cf --- /dev/null +++ b/dataversedock/testdata/scripts/api/download/.gitignore @@ -0,0 +1,3 @@ +files.tsv +files +downloaded-files diff --git a/dataversedock/testdata/scripts/api/download/dbquery b/dataversedock/testdata/scripts/api/download/dbquery new file mode 100755 index 0000000..64229bc --- /dev/null +++ b/dataversedock/testdata/scripts/api/download/dbquery @@ -0,0 +1,13 @@ +#!/bin/sh +if [ -z "$1" ]; then + echo "No start date in YYYY-MM-DD format provided." + exit 1 +else + if [ -z "$2" ]; then + echo "No end date in YYYY-MM-DD format provided." + exit 1 + fi + START_DATE=$1 + END_DATE=$2 +fi +psql -h $DB_SERVER -U $DB_USER -p $DB_PORT $DB_NAME -F $'\t' --no-align --pset footer -c "select dvobject.id, dvobject.createdate, dvobject.owner_id, datafile.* from dvobject, datafile where dvobject.id=datafile.id and dvobject.dtype='DataFile' and dvobject.createdate>to_date('$START_DATE','YYYY-MM-DD') and dvobject.createdate files.tsv diff --git a/dataversedock/testdata/scripts/api/download/download b/dataversedock/testdata/scripts/api/download/download new file mode 100755 index 0000000..cb5ca24 --- /dev/null +++ b/dataversedock/testdata/scripts/api/download/download @@ -0,0 +1,10 @@ +#!/bin/sh +DOWNLOAD_DIR=downloaded-files +rm -rf $DOWNLOAD_DIR +mkdir -p $DOWNLOAD_DIR && \ +cat files | while read i; do + echo "Downloding file id $i..." + cd $DOWNLOAD_DIR && mkdir $i && cd $i && \ + curl -s -k -O -J https://$DATAVERSE_SERVER/api/access/datafile/$i?key=$API_TOKEN && \ + cd ../.. +done diff --git a/dataversedock/testdata/scripts/api/download/tsv2files b/dataversedock/testdata/scripts/api/download/tsv2files new file mode 100755 index 0000000..dde1bf6 --- /dev/null +++ b/dataversedock/testdata/scripts/api/download/tsv2files @@ -0,0 +1,2 @@ +#!/bin/sh +cut -f1 files.tsv | tail -n +2 > files diff --git a/dataversedock/testdata/scripts/api/post-install-api-block.sh b/dataversedock/testdata/scripts/api/post-install-api-block.sh new file mode 100755 index 0000000..4cc0ac7 --- /dev/null +++ b/dataversedock/testdata/scripts/api/post-install-api-block.sh @@ -0,0 +1,27 @@ +#!/bin/bash + +# This script can be run on a system that was set up with unrestricted access to +# the sensitive API endpoints, in order to block it for the general public. + +# First, revoke the authentication token from the built-in user: +curl -X DELETE $SERVER/admin/settings/BuiltinUsers.KEY + +# Block the sensitive endpoints: +# Relevant settings: +# - :BlockedApiPolicy - one of allow, drop, localhost-only, unblock-key +# - :BlockedApiKey - when using the unblock-key policy, pass this key in the unblock-key query param to allow the call to a blocked endpoint +# - :BlockedApiEndpoints - comma separated list of blocked api endpoints + +# This leaves /api/admin and /api/test blocked to all connections except from those +# coming from localhost: +curl -X PUT -d localhost-only http://localhost:8080/api/admin/settings/:BlockedApiPolicy +curl -X PUT -d admin,test http://localhost:8080/api/admin/settings/:BlockedApiEndpoints + +# In some situations, you may prefer an alternative solution - to block ALL connections to +# these endpoints completely; but allow connections authenticated with the defined +# "unblock key" (password): + +#curl -X PUT -d YOURSUPERSECRETUNBLOCKKEY http://localhost:8080/api/admin/settings/:BlockedApiKey +#curl -X PUT -d unblock-key http://localhost:8080/api/admin/settings/:BlockedApiPolicy + + diff --git a/dataversedock/testdata/scripts/api/py_api_wrapper/api_fun.py b/dataversedock/testdata/scripts/api/py_api_wrapper/api_fun.py new file mode 100644 index 0000000..e0ee07f --- /dev/null +++ b/dataversedock/testdata/scripts/api/py_api_wrapper/api_fun.py @@ -0,0 +1,153 @@ +import os, sys +import time +import json +from dataverse_api_link import DataverseAPILink + +def msg(s): print s +def dashes(char='-'): msg(40*char) +def msgt(s): dashes(); msg(s); dashes() +def msgx(s): dashes('\/'); msg(s); dashes('\/'); sys.exit(0) + +def get_dataverse_link_object(apikey='pete'): + server_with_api = 'http://localhost:8080/' + #server_with_api = 'https://dvn-build.hmdc.harvard.edu' + return DataverseAPILink(server_with_api, use_https=False, apikey=apikey) + +def check_dv(): + dat = get_dataverse_link_object() + dat.save_current_metadata('local-data') + + #add_and_publish_dataverses('local-data/dataverses_2014-0612_11.json','pete') + add_and_publish_dataverses('demo-data/dataverses_2014-0609_16.json','pete') + +def load_users_from_api_file(fname): + """ + Given the JSON results of the list users command (/api/builtin-users): + (a) Iterate through the list + (b) Check if a user exists (by id) + (c) If the user is not found, create the user + + :param fname: full path to a file with user info in JSON format + """ + if not os.path.isfile(fname): + msgx('File not found: %s' % fname) + + # Load the JSON file + user_dict = json.loads(open(fname,'r').read()) + + # Get a the DataverseAPILink object + dv_lnk_obj = get_dataverse_link_object('pete') + dv_lnk_obj.set_return_mode_python() + + # Iterate through json + for user_info in user_dict.get('data', []): + # check if user exists via api + current_user_info = dv_lnk_obj.get_user_data(user_info.get('id', None)) + if current_user_info and current_user_info.get('status') == 'OK': + continue # The user exist, loop to the next user + + user_info.pop('id') # Use the param, except for the 'id' + + # Create the user, passing user params and a password + # + + new_password = user_info.get('userName') + dv_lnk_obj.create_user(user_info, new_password) + +def add_and_publish_dataverses(fname, apikey): + if not os.path.isfile(fname): + msgx('File not found: %s' % fname) + + # Load the JSON file + dv_dict = json.loads(open(fname,'r').read()) + + # Get a the DataverseAPILink object + dv_lnk_obj = get_dataverse_link_object(apikey) + dv_lnk_obj.set_return_mode_python() + + # Iterate through json + previous_alias = "root" + for dv_info in dv_dict.get('data', []): + # check if user exists via api + current_dv_info = dv_lnk_obj.get_dataverse_by_id_or_alias(dv_info.get('id', None)) + + # DV exists, continue loop + if current_dv_info and current_dv_info.get('status') == 'OK': + msg('>>> FOUND IT') + previous_alias = current_dv_info['data']['alias'] + continue # The user exist, loop to the next user + + # No DV, create it + keys_not_needed = ['id', 'ownerID', 'creationDate', 'creator'] + for key in keys_not_needed: + if dv_info.has_key(key): + dv_info.pop(key) + + msg('params to send: %s' % dv_info) + # If created, publish it + json_resp = dv_lnk_obj.create_dataverse(previous_alias, dv_info) + if json_resp.get('status') == 'OK': + new_dv_data = json_resp.get('data', {}) + new_id = new_dv_data.get('id', None) + if new_id is not None: + dv_lnk_obj.publish_dataverse(new_id) + previous_alias = current_dv_info.get("alias", "root") + #break + +def add_dataverses(name, cnt=1, parent_dv_name_or_id=1, apikey='snoopy'): + # get the DataverseAPILink + dat = get_dataverse_link_object(apikey=apikey) + dat.set_return_mode_python() + + for x in range(249, 260): + dat.publish_dataverse(x) + return + for x in range(0, cnt): + num = x+1 + alias_str = "new_dv_%d" % num + dv_params_str = """{ "alias":"%s", + "name":"%s %s", + "affiliation":"Affiliation value", + "contactEmail":"pete@malinator.com", + "permissionRoot":true, + "description":"More API testing" + }""" % (alias_str, name, num) + + dv_params = json.loads(dv_params_str) + dat.create_dataverse(parent_dv_name_or_id, dv_params, ) + if x % 20 == 0: time.sleep(1) + + +def delete_dataverses_id_greather_than(id_num, apikey): + if not type(id_num) == int: + raise('id_num needs be an int--not a %s' % type(id_num)) + + # get the DataverseAPILink + dat = get_dataverse_link_object(apikey=apikey) + dat.set_return_mode_python() + + # List the dataverses + dv_json = dat.list_dataverses() + print dv_json + # Pull dataverse ids > 30 + dv_ids = [dv['id'] for dv in dv_json.get("data") if dv['id'] > id_num] + + # reverse order ids + dv_ids.sort() + dv_ids.reverse() + + # delete them + for dv_id in dv_ids: + print dat.delete_dataverse_by_id(dv_id) + #print dat.list_datasets() + +if __name__ == '__main__': + check_dv() + #load_users_from_api_file('demo-data/users_2014-0609_14.json') + #load_users_from_api_file('demo-data/rp_users.json') + #add_and_publish_dataverses('demo-data/dataverses_2014-0609_14.json', 'gromit') + #add_and_publish_dataverses('demo-data/rp_dataverses.json', 'gromit') + + #add_dataverses('Other DV #', 17, 23, 'snoopy') + #add_dataverses('Uma\'s Other Retricted DVs #', 7, 8, 'pete') + #delete_dataverses_id_greather_than(177, 'pete') diff --git a/dataversedock/testdata/scripts/api/py_api_wrapper/dataverse_api_link.py b/dataversedock/testdata/scripts/api/py_api_wrapper/dataverse_api_link.py new file mode 100644 index 0000000..4c6df59 --- /dev/null +++ b/dataversedock/testdata/scripts/api/py_api_wrapper/dataverse_api_link.py @@ -0,0 +1,430 @@ +""" +Use Dataverse native APIs described here: https://github.com/IQSS/dataverse/tree/master/scripts/api + +5/8/2013 - scratch work, examining API +6/5/2013 - Back to implementing some API work +6/6/2013 - Move function parameters into API_SPECS, create functions on init + +Requires the python requests library: http://docs.python-requests.org + +""" +import os +import sys +import json +import requests +from msg_util import * +import types # MethodType, FunctionType +from datetime import datetime +from single_api_spec import SingleAPISpec + +def msg(s): print s +def dashes(char='-'): msg(40*char) +def msgt(s): dashes(); msg(s); dashes() +def msgx(s): dashes('\/'); msg(s); dashes('\/'); sys.exit(0) + + +class DataverseAPILink: + """ + Convenience class to access the Dataverse API described in github: + + https://github.com/IQSS/dataverse/tree/master/scripts/api + + Example: + from dataverse_api_link import DataverseAPILink + server_with_api = 'https://dvn-build.hmdc.harvard.edu' + + dat = DataverseAPILink(server_with_api, use_https=False, apikey='pete') + dat.set_return_mode_python() + print dat.list_users() + print dat.list_roles() + print dat.list_dataverses() + print dat.list_datasets() + print dat.get_dataverse_by_id_or_alias(5) + print dat.view_dataset_metadata_by_id_version(123, 57) + print dat.view_root_dataverse() + print dat.get_user_data(1) + """ + RETURN_MODE_STR = 'RETURN_MODE_STR' + RETURN_MODE_PYTHON = 'RETURN_MODE_PYTHON' + HTTP_GET = 'GET' + HTTP_POST = 'POST' + HTTP_DELETE = 'DELETE' + HTTP_METHODS = [HTTP_GET, HTTP_POST, HTTP_DELETE] + + # Each List corresponds to 'new_function_name', 'name', 'url_path', 'use_api_key', 'num_id_vals', 'use_params_dict' + # + API_READ_SPECS = ( + # USERS + [ 'list_users', 'List Users', '/api/builtin-users', False, 0]\ + , ['get_user_data', 'Get metadata for a specific user', '/api/builtin-users/%s' % SingleAPISpec.URL_PLACEHOLDER, False, 1]\ + + # ROLES + , ['list_roles', 'List Roles', '/api/roles', False, 0]\ + + # Datasets + , ['list_datasets', 'List Datasets', '/api/datasets', True, 0]\ + , ['view_dataset_by_id', 'View Dataset By ID' \ + , '/api/datasets/%s' % (SingleAPISpec.URL_PLACEHOLDER,), True, 1]\ + #, ['view_dataset_versions_by_id', 'View Dataset By ID', '/api/datasets/%s/versions' % SingleAPISpec.URL_PLACEHOLDER, True, True]\ + # Dataverses + , ['list_dataverses', 'List Dataverses', '/api/dataverses', False, 0]\ + , ['get_dataverse_by_id_or_alias', 'View Dataverse by ID or Alias', '/api/dataverses/%s' % (SingleAPISpec.URL_PLACEHOLDER,), False, 1]\ + , ['view_root_dataverse', 'View Root Dataverse', '/api/dataverses/:root', False, 0]\ + + # Metadata + , ['list_metadata_blocks', 'List metadata blocks', '/api/metadatablocks', False, 0] + , ['view_dataset_metadata_by_id_version', 'View Dataset By ID'\ + , '/api/datasets/%s/versions/%s/metadata' % (SingleAPISpec.URL_PLACEHOLDER, SingleAPISpec.URL_PLACEHOLDER), True, 2]\ + + ) + + + API_WRITE_SPECS = ( + + # Create a Dataverse + # curl -H "Content-type:application/json" -X POST -d @data/dv-pete-top.json "http://localhost:8080/api/dataverses/root?key=pete" + # + #[ 'create_dataverse', 'Create Dataverse', '/api/dataverses/%s' % SingleAPISpec.URL_PLACEHOLDER, True, 1, True]\ + + # Create a User + # curl -H "Content-type:application/json" -X POST -d @data/userPete.json "http://localhost:8080/api/builtin-users?password=pete" + # + #[ 'create_user', 'Create User', '/api/builtin-users?password=%s' % SingleAPISpec.URL_PLACEHOLDER, False, 1, True]\ + #, + ) + + API_DELETE_SPECS = ( + # Dataset + [ 'delete_dataset', 'Delete Dataset', '/api/builtin-users/%s' % SingleAPISpec.URL_PLACEHOLDER, True, True]\ + #DELETE http://{{SERVER}}/api/datasets/{{id}}?key={{apikey}} + ) + + def __init__(self, server_name, use_https, apikey=None): + """ + :param server_name: e.g. dataverse.org, dvn-build.hmdc.harvard.edu, etc. + :type server_name: str + :param use_https: Use https for api calls? + :type use_https: boolean + """ + self.server_name = server_name + if len(self.server_name.split('//')) > 1: # remove accidental additional of http:// or https:// + self.server_name = self.server_name.split('//')[-1] + if self.server_name.endswith('/'): + self.server_name = self.server_name[:-1] + self.use_https = use_https + self.apikey = apikey + self.update_server_name() + self.return_mode = self.RETURN_MODE_STR + self.bind_basic_functions() + + def set_return_mode_python(self): + """API calls return JSON text response as a Python object + Uses json.loads(json_str) + """ + self.return_mode = self.RETURN_MODE_PYTHON + + def set_return_mode_string(self): + """API calls return JSON responses as a string""" + self.return_mode = self.RETURN_MODE_STR + + + def update_server_name(self): + if self.server_name is None: + raise Exception('Server name is None!') + + if self.server_name.endswith('/'): # cut trailing slash + self.server_name = self.server_name[-1] + + server_name_pieces = self.server_name.split('//') + if len(server_name_pieces) > 1: + self.server_name = server_name_pieces[1] + + def get_server_name(self): + + if self.use_https: + return 'https://' + self.server_name + return 'http://' + self.server_name + + def make_api_call(self, url_str, method, params={}, headers=None): + """ + Use the requests library to make the actual API call + + :param url_str: str, url to call + :param method: str indicating http method: GET, POST, DELETE, etc. Must be in self.HTTP_METHODS: GET, POST, DELETE, + :param params: dict containing python parameters + :param headers: optional dict containing headers. e.g. {'content-type': 'application/json'} + + :returns: response from the request + :rtype: depends on self.RETURN_MODE_PYTHON; either text or JSON converted to python dict + """ + + msg('url_str: [%s]\nmethod:[%s]\nparams:[%s]\nheaders:[%s]' % (url_str, method, params, headers)) + if url_str is None: + return None + if not method in self.HTTP_METHODS: + msgt('Error: Method not found: %s' % method) + if not type(params) == dict: + msgt('Params must be a python dict, {}') + + params = json.dumps(params) + + if method == self.HTTP_GET: + r = requests.get(url_str, data=params) + elif method == self.HTTP_POST: + if headers is not None: + r = requests.post(url_str, data=params, headers=headers) + else: + r = requests.post(url_str, data=params) + elif method == self.HTTP_DELETE: + r = requests.delete(url_str, data=params) + + msg('Status Code: %s' % r.status_code) + msg('Encoding: %s' % r.encoding) + msg('Text: %s' % r.text) + + if self.return_mode == self.RETURN_MODE_PYTHON: + return r.json() + + #print json.dumps(json.loads(s), indent=4) + try: + return json.dumps(json.loads(r.text), indent=4) + except: + pass + return r.text + + + def create_user(self, dv_params, new_password): + """ + Create a user + + :param dv_params: dict containing the parameters for the new user + :param new_password: str for the user's password + """ + msgt('create_user') + if not type(dv_params) is dict: + msgx('dv_params is None') + + # [ 'create_user', 'Create User', '/api/builtin-users?password=%s' % SingleAPISpec.URL_PLACEHOLDER, False, 1, True]\ + + url_str = self.get_server_name() + '/api/builtin-users?password=%s' % (new_password) + headers = {'content-type': 'application/json'} + return self.make_api_call(url_str, self.HTTP_POST, params=dv_params, headers=headers) + + + def create_dataverse(self, parent_dv_alias_or_id, dv_params): + """Create a dataverse + POST http://{{SERVER}}/api/dataverses/{{ parent_dv_name }}?key={{username}} + + :param parent_dv_alias_or_id: str or integer, the alias or id of an existing datavese + :param dv_params: dict containing the parameters for the new dataverse + + Sample: Create Dataverse + + from dataverse_api import DataverseAPILink + server_with_api = 'dvn-build.hmdc.harvard.edu' + dat = DataverseAPILink(server_with_api, use_https=False, apikey='pete') + dv_params = { + "alias":"hm_dv", + "name":"Home, Home on the Dataverse", + "affiliation":"Affiliation value", + "contactEmail":"pete@mailinator.com", + "permissionRoot":False, + "description":"API testing" + } + parent_dv_alias_or_id = 'root' + print dat.create_dataverse(parent_dv_alias_or_id, dv_params) + """ + msgt('create_dataverse') + if not type(dv_params) is dict: + msgx('dv_params is None') + + url_str = self.get_server_name() + '/api/dataverses/%s?key=%s' % (parent_dv_alias_or_id, self.apikey) + headers = {'content-type': 'application/json'} + return self.make_api_call(url_str, self.HTTP_POST, params=dv_params, headers=headers) + + def publish_dataverse(self, dv_id_or_name): + """ + Publish a dataverse based on its id or alias + #POST http://{{SERVER}}/api/dataverses/{{identifier}}/actions/:publish?key={{apikey}} + + :param dv_id_or_name: Dataverse id (str or int) or alias (str) + """ + msgt('publish_dataverse') + print 'dv_id_or_name', dv_id_or_name + if dv_id_or_name is None: + msgx('dv_id_or_name is None') + + url_str = self.get_server_name() + '/api/dataverses/%s/actions/:publish?key=%s' % (dv_id_or_name, self.apikey) + headers = {'content-type': 'application/json'} + return self.make_api_call(url_str, self.HTTP_POST) + + + def show_api_info(self): + for spec in self.API_READ_SPECS: + print spec[0] + + + def bind_single_function(self, spec_list, function_name_for_api_call): + """ + :param spec_list: list or tuple defining function sepcs + :param function_name_for_api_call: str naming coded function in the DataverseAPILink + """ + # Load the function specs + single_api_spec = SingleAPISpec(spec_list) + + # Pull the code to generate the function. e.g. def function_name(params): etc, etc + code_str = single_api_spec.get_code_str(function_name_for_api_call) # ---- GET ---- + + # Create the function + exec(code_str) + + # Bind the function to this instance of DataverseAPILink + self.__dict__[single_api_spec.new_function_name] = types.MethodType(eval(single_api_spec.new_function_name), self) + + + def bind_basic_functions(self): + """ + Go through API specs and add the functions to DataverseAPILink + """ + + # Add read functions + for spec in self.API_READ_SPECS: + self.bind_single_function(spec, 'make_api_get_call') + + # Decided to explicitly write add functions for clarity + # Add write functions + #for spec in self.API_WRITE_SPECS: + # self.bind_single_function(spec, 'make_api_write_call') + + + + + def make_api_write_call(self, call_name, url_path, use_api_key=False, id_val=None, params_dict={}): + msgt(call_name) + print 'params_dict', params_dict + if not type(params_dict) is dict: + msgx('params_dict is not a dict. Found: %s' % type(params_dict)) + + if use_api_key: + url_str = '%s%s?key=%s' % (self.get_server_name(), url_path, self.apikey) + else: + url_str = '%s%s' % (self.get_server_name(), url_path) + + headers = {'content-type': 'application/json'} + return self.make_api_call(url_str, self.HTTP_POST, params=params_dict, headers=headers) + + + + def make_api_get_call(self, call_name, url_path, use_api_key=False, id_val=None): + msgt(call_name) + if use_api_key: + url_str = '%s%s?key=%s' % (self.get_server_name(), url_path, self.apikey) + else: + url_str = '%s%s' % (self.get_server_name(), url_path) + + return self.make_api_call(url_str, self.HTTP_GET) + + + def make_api_delete_call(self, call_name, url_path, use_api_key=False, id_val=None): + msgt(call_name) + if use_api_key: + url_str = '%s%s?key=%s' % (self.get_server_name(), url_path, self.apikey) + else: + url_str = '%s%s' % (self.get_server_name(), url_path) + + return self.make_api_call(url_str, self.HTTP_DELETE)#, kwargs) + + + def save_to_file(self, fname, content): + dirname = os.path.dirname(fname) + if not os.path.isdir(dirname): + msgx('This directory does not exist: %s' % dirname) + fh = open(fname, 'w') + fh.write(content) + fh.close() + msg('File written: %s' % fname) + + + def save_current_metadata(self, output_dir): + """ + For the current server, save JSON with information on: + - Users + - Dataverses + - Datasets + """ + msgt('run_dataverse_backup') + if not os.path.isdir(output_dir): + msgx('This directory does not exist: %s' % output_dir) + + #date_str = datetime.now().strftime('%Y-%m%d_%H%M') + date_str = datetime.now().strftime('%Y-%m%d_%H') + + self.set_return_mode_string() + + #--------------------------- + # Retrieve the users + #--------------------------- + user_json = self.list_users() + self.save_to_file(os.path.join(output_dir, 'users_%s.json' % date_str), user_json) + + #--------------------------- + # Retrieve the roles + #--------------------------- + #roles_json = self.list_roles() + #self.save_to_file(os.path.join(output_dir, 'roles_%s.json' % date_str), roles_json) + + #--------------------------- + # Retrieve the dataverses + #--------------------------- + dv_json = self.list_dataverses() + self.save_to_file(os.path.join(output_dir, 'dataverses_%s.json' % date_str), dv_json) + + #--------------------------- + # Retrieve the datasets + #--------------------------- + dset_json = self.list_datasets() + self.save_to_file(os.path.join(output_dir, 'datasets_%s.json' % date_str), dset_json) + + + def delete_dataverse_by_id(self, id_val): + msgt('delete_dataverse_by_id: %s' % id_val) + url_str = self.get_server_name() + '/api/dataverses/%s?key=%s' % (id_val, self.apikey) + return self.make_api_call(url_str, self.HTTP_DELETE) + + + + +if __name__=='__main__': + import time + + #POST http://{{SERVER}}/api/dataverses/{{identifier}}/actions/:publish?key={{apikey}} + + server_with_api = 'https://dvn-build.hmdc.harvard.edu' + dat = DataverseAPILink(server_with_api, use_https=False, apikey='pete') + #dat.save_current_metadata('demo-data') + #sys.exit(0) + #dat.set_return_mode_string() + + """ """ + dv_params = { + "alias":"hm_dv", + "name":"Home, Home on the Dataverse", + "affiliation":"Affiliation value", + "contactEmail":"pete@mailinator.com", + "permissionRoot":False, + "description":"API testing" + } + print dat.create_dataverse('root', dv_params) + #print dat.create_user('some_pw', dv_params) + """ + print dat.get_dataverse_by_id_or_alias(5) + print dat.view_dataset_metadata_by_id_version(123, 57) + print dat.list_users() + print dat.list_roles() + print dat.list_datasets() + print dat.list_dataverses() + print dat.view_root_dataverse() + print dat.get_user_data(1) + print dat.list_metadata_blocks() + """ + \ No newline at end of file diff --git a/dataversedock/testdata/scripts/api/py_api_wrapper/msg_util.py b/dataversedock/testdata/scripts/api/py_api_wrapper/msg_util.py new file mode 100644 index 0000000..bd16af1 --- /dev/null +++ b/dataversedock/testdata/scripts/api/py_api_wrapper/msg_util.py @@ -0,0 +1,10 @@ +import sys +def msg(s): print s +def dashes(char='-'): msg(40*char) +def msgt(s): dashes(); msg(s); dashes() +def msgx(s): dashes('\/'); msg(s); dashes('\/'); sys.exit(0) + +""" + +curl -H "Content-type:application/json" -X POST -d user_params.json "http://dvn-build.hmdc.harvard.edu/api/builtin-users?password=linus" +""" diff --git a/dataversedock/testdata/scripts/api/py_api_wrapper/readme.md b/dataversedock/testdata/scripts/api/py_api_wrapper/readme.md new file mode 100644 index 0000000..f062567 --- /dev/null +++ b/dataversedock/testdata/scripts/api/py_api_wrapper/readme.md @@ -0,0 +1,92 @@ +## note: not yet updated to work with new permissions +---- + +# Python API Wrapper Guide + +(6/5/2014 - work in progress) + +This a python class "DataverseAPILink" which may be used to make the API calls described in the Dataverse [API Guide](https://github.com/IQSS/dataverse/tree/master/scripts/api/readme.md) + +Results of API calls may by returned as JSON (string format) or as python dictionaries. + + +## Dependency + +[python requests module](http://docs.python-requests.org/) + +## Quick example + +List the dataverses + + +```python +from dataverse_api_link import DataverseAPILink + +server_with_api = 'demo.dataverse.org' +dal = DataverseAPILink(server_with_api, use_https=False, apikey='admin') +json_text = dal.list_dataverses() +print json_text +``` + +Output: +```javascript +{ + "status":"OK", + "data":[ + { + "id":93, + "alias":"b", + "name":"b", + "affiliation":"b", + "contactEmail":"b@b", + "permissionRoot":false, + "creator":{ + "id":13, + "firstName":"b", + "lastName":"b", + "userName":"b", + "affiliation":"b", + "position":"b", + "email":"b@b" + }, + "description":"b", + "ownerId":1, + "creationDate":"2014-05-12 02:38:36 -04" + }, + + (etc, etc) +``` + +Return the same list as a python object + +```python + +dat.set_return_mode_python() # Return python dict instead of a string +d = dat.list_dataverses() # python dictionary {} +print d.keys() +dv_names = [dv_info.get('name', 'no name?') for dv_info in d['data']] +print dv_names +``` + +Output: +```python +[u'status', u'data'] +[u'b', u'Beta Candidate', u'kc58', u'Kevin Smoke Test 5/8', u'Penultimate Smoke Test', u"Pete's public place", u"Pete's restricted data", u"Pete's secrets", u'Root', u'smoke 5/7', u'testadd', u'testauthor', u'Test Cliosed', u'Test Open', u'testpete', u'Top dataverse of Pete', u'Top dataverse of Uma', u"Uma's first", u"Uma's restricted"] +``` +### Users + +List Users: + +```python +dat.set_return_mode_python() +user_info = dat.list_users() +print user_info +``` + +Iterate through each user and pull the same data by 'id' + +```python +user_ids = [info['id'] for info in user_info['data'] if info['id'] is not None] +for uid in user_ids: + print dat.get_user_data(uid) +``` diff --git a/dataversedock/testdata/scripts/api/py_api_wrapper/single_api_spec.py b/dataversedock/testdata/scripts/api/py_api_wrapper/single_api_spec.py new file mode 100644 index 0000000..0dafaaa --- /dev/null +++ b/dataversedock/testdata/scripts/api/py_api_wrapper/single_api_spec.py @@ -0,0 +1,73 @@ + +class SingleAPISpec: + """ + Convenience class used to help DataverseAPILink when making API functions + """ + + ATTR_NAMES = ['new_function_name', 'name', 'url_path', 'use_api_key', 'num_id_vals', 'use_param_dict'] + URL_PLACEHOLDER = '{{ID_VAL}}' + + def __init__(self, spec_list): + if not type(spec_list) in (list,tuple): + raise Exception('Bad spec. Expected list or tuple.\nReceived: %s' % type(spec_list)) + + num_params = len(spec_list) + if not num_params in (5,6): + raise Exception('Expected 5 or 6 values.\nReceived: %s' % spec_list) + + # Lazy way to add attributes + for idx, attr in enumerate(self.ATTR_NAMES): + if (idx) == num_params: + self.__dict__[attr] = None # only 5 params given, param_dict not needed + else: + self.__dict__[attr] = spec_list[idx] + # e.g., 1st iteration is equivalent of "self.new_function_name = spec_list[0]" + + + def get_code_str(self, dv_link_function_to_call='make_api_get_call'): + """ + Used to create functions within the DataverseAPILink class + """ + if self.use_param_dict is True: + # call_name, url_path, use_api_key=False, id_val=None, params_dict={} + code_str = """ +def %s(self, param_dict, *args): + url_path = '%s' + if args: + for val in args: + if not type(val) in (str, unicode): + val = `val` + url_path = url_path.replace('%s', val, 1) + #url_path += '/' + str(id_val) + print 'OK!' + print 'param_dict', param_dict + return self.%s('%s', url_path, %s, None, param_dict)""" \ + % (self.new_function_name\ + , self.url_path + , SingleAPISpec.URL_PLACEHOLDER + , dv_link_function_to_call + , self.name + , self.use_api_key) + + else: + code_str = """ +def %s(self, *args): + url_path = '%s' + if args: + for val in args: + if not type(val) in (str, unicode): + val = `val` + url_path = url_path.replace('%s', val, 1) + #url_path += '/' + str(id_val) + + return self.%s('%s', url_path, %s)""" \ + % (self.new_function_name\ + , self.url_path + , SingleAPISpec.URL_PLACEHOLDER + , dv_link_function_to_call + , self.name + , self.use_api_key) + print code_str + return code_str + + \ No newline at end of file diff --git a/dataversedock/testdata/scripts/api/setup-all.sh b/dataversedock/testdata/scripts/api/setup-all.sh new file mode 100755 index 0000000..bde54c1 --- /dev/null +++ b/dataversedock/testdata/scripts/api/setup-all.sh @@ -0,0 +1,99 @@ +#!/bin/bash + +SECURESETUP=1 + +for opt in $* +do + case $opt in + "--insecure") + SECURESETUP=0 + ;; + "-insecure") + SECURESETUP=0; + ;; + *) + echo "invalid option: $opt" + exit 1 >&2 + ;; + esac +done + +command -v jq >/dev/null 2>&1 || { echo >&2 '`jq` ("sed for JSON") is required, but not installed. Download the binary for your platform from http://stedolan.github.io/jq/ and make sure it is in your $PATH (/usr/bin/jq is fine) and executable with `sudo chmod +x /usr/bin/jq`. On Mac, you can install it with `brew install jq` if you use homebrew: http://brew.sh . Aborting.'; exit 1; } + +echo "deleting all data from Solr" +curl http://localhost:8983/solr/update/json?commit=true -H "Content-type: application/json" -X POST -d "{\"delete\": { \"query\":\"*:*\"}}" + +SERVER=http://localhost:8080/api + +# Everything + the kitchen sink, in a single script +# - Setup the metadata blocks and controlled vocabulary +# - Setup the builtin roles +# - Setup the authentication providers +# - setup the settings (local sign-in) +# - Create admin user and root dataverse +# - (optional) Setup optional users and dataverses + + +echo "Setup the metadata blocks" +./setup-datasetfields.sh + +echo "Setup the builtin roles" +./setup-builtin-roles.sh + +echo "Setup the authentication providers" +./setup-identity-providers.sh + +echo "Setting up the settings" +echo "- Allow internal signup" +curl -X PUT -d yes "$SERVER/admin/settings/:AllowSignUp" +curl -X PUT -d /dataverseuser.xhtml?editMode=CREATE "$SERVER/admin/settings/:SignUpUrl" + +curl -X PUT -d doi "$SERVER/admin/settings/:Protocol" +curl -X PUT -d 10.5072/FK2 "$SERVER/admin/settings/:Authority" +curl -X PUT -d EZID "$SERVER/admin/settings/:DoiProvider" +curl -X PUT -d / "$SERVER/admin/settings/:DoiSeparator" +curl -X PUT -d burrito $SERVER/admin/settings/BuiltinUsers.KEY +curl -X PUT -d localhost-only $SERVER/admin/settings/:BlockedApiPolicy +echo + +echo "Setting up the admin user (and as superuser)" +adminResp=$(curl -s -H "Content-type:application/json" -X POST -d @data/user-admin.json "$SERVER/builtin-users?password=admin&key=burrito") +echo $adminResp +curl -X POST "$SERVER/admin/superuser/dataverseAdmin" +echo + +echo "Setting up the root dataverse" +adminKey=$(echo $adminResp | jq .data.apiToken | tr -d \") +curl -s -H "Content-type:application/json" -X POST -d @data/dv-root.json "$SERVER/dataverses/?key=$adminKey" +echo +echo "Set the metadata block for Root" +curl -s -X POST -H "Content-type:application/json" -d "[\"citation\"]" $SERVER/dataverses/:root/metadatablocks/?key=$adminKey +echo +echo "Set the default facets for Root" +curl -s -X POST -H "Content-type:application/json" -d "[\"authorName\",\"subject\",\"keywordValue\",\"dateOfDeposit\"]" $SERVER/dataverses/:root/facets/?key=$adminKey +echo + +# OPTIONAL USERS AND DATAVERSES +#./setup-optional.sh + +if [ $SECURESETUP = 1 ] +then + # Revoke the "burrito" super-key; + # Block the sensitive API endpoints; + curl -X DELETE $SERVER/admin/settings/BuiltinUsers.KEY + curl -X PUT -d admin,test $SERVER/admin/settings/:BlockedApiEndpoints + echo "Access to the /api/admin and /api/test is now disabled, except for connections from localhost." +else + echo "IMPORTANT!!!" + echo "You have run the setup script in the INSECURE mode!" + echo "Do keep in mind, that access to your admin API is now WIDE-OPEN!" + echo "Also, your built-in user is still set up with the default authentication token" + echo "(that is distributed as part of this script, hence EVERYBODY KNOWS WHAT IT IS!)" + echo "Please consider the consequences of this choice. You can block access to the" + echo "/api/admin and /api/test endpoints, for all connections except from localhost," + echo "and revoke the authentication token from the built-in user by executing the" + echo "script post-install-api-block.sh." +fi + +echo +echo "Setup done." diff --git a/dataversedock/testdata/scripts/api/setup-builtin-roles.sh b/dataversedock/testdata/scripts/api/setup-builtin-roles.sh new file mode 100755 index 0000000..0f3c1c1 --- /dev/null +++ b/dataversedock/testdata/scripts/api/setup-builtin-roles.sh @@ -0,0 +1,34 @@ +SERVER=http://localhost:8080/api + +# Setup the builtin roles +echo "Setting up admin role" +curl -H "Content-type:application/json" -d @data/role-admin.json http://localhost:8080/api/admin/roles/ +echo + +echo "Setting up file downloader role" +curl -H "Content-type:application/json" -d @data/role-filedownloader.json http://localhost:8080/api/admin/roles/ +echo + +echo "Setting up full contributor role" +curl -H "Content-type:application/json" -d @data/role-fullContributor.json http://localhost:8080/api/admin/roles/ +echo + +echo "Setting up dv contributor role" +curl -H "Content-type:application/json" -d @data/role-dvContributor.json http://localhost:8080/api/admin/roles/ +echo + +echo "Setting up ds contributor role" +curl -H "Content-type:application/json" -d @data/role-dsContributor.json http://localhost:8080/api/admin/roles/ +echo + +echo "Setting up editor role" +curl -H "Content-type:application/json" -d @data/role-editor.json http://localhost:8080/api/admin/roles/ +echo + +echo "Setting up curator role" +curl -H "Content-type:application/json" -d @data/role-curator.json http://localhost:8080/api/admin/roles/ +echo + +echo "Setting up member role" +curl -H "Content-type:application/json" -d @data/role-member.json http://localhost:8080/api/admin/roles/ +echo diff --git a/dataversedock/testdata/scripts/api/setup-datasetfields.sh b/dataversedock/testdata/scripts/api/setup-datasetfields.sh new file mode 100755 index 0000000..4ce27bc --- /dev/null +++ b/dataversedock/testdata/scripts/api/setup-datasetfields.sh @@ -0,0 +1,8 @@ +#!/bin/sh +curl http://localhost:8080/api/admin/datasetfield/loadNAControlledVocabularyValue +curl http://localhost:8080/api/admin/datasetfield/load -X POST --data-binary @data/metadatablocks/citation.tsv -H "Content-type: text/tab-separated-values" +curl http://localhost:8080/api/admin/datasetfield/load -X POST --data-binary @data/metadatablocks/geospatial.tsv -H "Content-type: text/tab-separated-values" +curl http://localhost:8080/api/admin/datasetfield/load -X POST --data-binary @data/metadatablocks/social_science.tsv -H "Content-type: text/tab-separated-values" +curl http://localhost:8080/api/admin/datasetfield/load -X POST --data-binary @data/metadatablocks/astrophysics.tsv -H "Content-type: text/tab-separated-values" +curl http://localhost:8080/api/admin/datasetfield/load -X POST --data-binary @data/metadatablocks/biomedical.tsv -H "Content-type: text/tab-separated-values" +curl http://localhost:8080/api/admin/datasetfield/load -X POST --data-binary @data/metadatablocks/journals.tsv -H "Content-type: text/tab-separated-values" diff --git a/dataversedock/testdata/scripts/api/setup-dvs.sh b/dataversedock/testdata/scripts/api/setup-dvs.sh new file mode 100755 index 0000000..9110935 --- /dev/null +++ b/dataversedock/testdata/scripts/api/setup-dvs.sh @@ -0,0 +1,34 @@ +#!/bin/bash -f +SERVER=http://localhost:8080/api +echo Setting up dataverses on $SERVER +echo ============================================== +if [ $# -eq 0 ] + then + echo "Please supply Pete and Uma's API keys like so:" + echo "$0 [pete's key] [uma's key]" + echo "The keys are printed at the end of the setup-users.sh script" + echo "Or, just get them from the database" + exit 1 +fi + +echo Pete +curl -s -H "Content-type:application/json" -X POST -d @data/dv-pete-top.json "$SERVER/dataverses/root?key=$1" +echo +curl -s -H "Content-type:application/json" -X POST -d @data/dv-pete-sub-normal.json "$SERVER/dataverses/peteTop?key=$1" +echo +curl -s -H "Content-type:application/json" -X POST -d @data/dv-pete-sub-restricted.json "$SERVER/dataverses/peteTop?key=$1" +echo +curl -s -H "Content-type:application/json" -X POST -d @data/dv-pete-sub-secret.json "$SERVER/dataverses/peteTop?key=$1" +echo + +echo Uma +echo Pete creates top-level for Uma +curl -s -H "Content-type:application/json" -H "X-Dataverse-key:$1" -X POST -d @data/dv-uma-top.json "$SERVER/dataverses/root" +echo +echo Pete makes Uma an admin on her own DV +curl -s -H "Content-type:application/json" -H "X-Dataverse-key:$1" -X POST -d"{\"assignee\":\"@uma\",\"role\":\"admin\"}" $SERVER/dataverses/umaTop/assignments/ +echo +curl -s -H "Content-type:application/json" -H "X-Dataverse-key:$2" -X POST -d @data/dv-uma-sub1.json "$SERVER/dataverses/umaTop" +echo +curl -s -H "Content-type:application/json" -H "X-Dataverse-key:$2" -X POST -d @data/dv-uma-sub2.json "$SERVER/dataverses/umaTop" +echo diff --git a/dataversedock/testdata/scripts/api/setup-identity-providers.sh b/dataversedock/testdata/scripts/api/setup-identity-providers.sh new file mode 100755 index 0000000..89ac59d --- /dev/null +++ b/dataversedock/testdata/scripts/api/setup-identity-providers.sh @@ -0,0 +1,9 @@ +SERVER=http://localhost:8080/api + +# Setup the authentication providers +echo "Setting up internal user provider" +curl -H "Content-type:application/json" -d @data/authentication-providers/builtin.json http://localhost:8080/api/admin/authenticationProviders/ + +#echo "Setting up Echo providers" +#curl -H "Content-type:application/json" -d @data/authentication-providers/echo.json http://localhost:8080/api/admin/authenticationProviders/ +#curl -H "Content-type:application/json" -d @data/authentication-providers/echo-dignified.json http://localhost:8080/api/admin/authenticationProviders/ diff --git a/dataversedock/testdata/scripts/api/setup-optional-harvard.sh b/dataversedock/testdata/scripts/api/setup-optional-harvard.sh new file mode 100755 index 0000000..a5553a6 --- /dev/null +++ b/dataversedock/testdata/scripts/api/setup-optional-harvard.sh @@ -0,0 +1,55 @@ +#!/bin/bash +SERVER=http://localhost:8080/api + +echo "Setting up Harvard-specific settings" +echo "- Application Status header" +curl -s -X PUT -d 'Upgrade in progress...' $SERVER/admin/settings/:StatusMessageHeader +echo "- Application Status message" +curl -s -X PUT -d 'Dataverse is currently being upgraded. You can see the features, bug fixes, and other upgrades for this release in the Dataverse Roadmap.' $SERVER/admin/settings/:StatusMessageText +echo "- Harvard Privacy Policy" +curl -s -X PUT -d http://best-practices.dataverse.org/harvard-policies/harvard-privacy-policy.html $SERVER/admin/settings/:ApplicationPrivacyPolicyUrl +curl -s -X PUT -d http://best-practices.dataverse.org/harvard-policies/harvard-api-tou.html $SERVER/admin/settings/:ApiTermsOfUse +echo "- Configuring Harvard's password policy in Dataverse" +# Min length is 10 because that is the minimum Harvard requires without periodic expiration +curl -s -X PUT -d 10 $SERVER/admin/settings/:PVMinLength +# If password 20+ characters, other rules do not apply +curl -s -X PUT -d 20 $SERVER/admin/settings/:PVGoodStrength +# The character classes users can choose between and the number of each needed +curl -X PUT -d 'UpperCase:1,Digit:1,LowerCase:1,Special:1' $SERVER/admin/settings/:PVCharacterRules +# The number of character classes a password needs to be valid +curl -s -X PUT -d 3 $SERVER/admin/settings/:PVNumberOfCharacteristics +# The number of character classes a password needs to be valid +curl -s -X PUT -d 4 $SERVER/admin/settings/:PVNumberOfConsecutiveDigitsAllowed +# Harvard requires a dictionary check on common words & names. We use the unix 'words' file, removing ones less than 4 characters. Policy clarification received by Harvard Key was no words 4 characters or longer. +DIR="/usr/local/glassfish4/glassfish/domains/domain1/files" #this can be replaced with a different file path for storing the dictionary +sed '/^.\{,3\}$/d' /usr/share/dict/words > $DIR/pwdictionary +curl -s -X PUT -d "$DIR/pwdictionary" $SERVER/admin/settings/:PVDictionaries +echo "- Adjust Solr frag size" +curl -s -X PUT -d 320 $SERVER/admin/settings/:SearchHighlightFragmentSize +echo "- Google Analytics setting" +curl -X PUT -d true "$SERVER/admin/settings/:ScrubMigrationData" +echo "- Enabling Shibboleth" +curl -X POST -H "Content-type: application/json" http://localhost:8080/api/admin/authenticationProviders --upload-file ../../doc/sphinx-guides/source/_static/installation/files/etc/shibboleth/shibAuthProvider.json +echo "- Enabling TwoRavens" +curl -s -X PUT -d true "$SERVER/admin/settings/:TwoRavensTabularView" +echo "- Enabling Geoconnect" +curl -s -X PUT -d true "$SERVER/admin/settings/:GeoconnectCreateEditMaps" +curl -s -X PUT -d true "$SERVER/admin/settings/:GeoconnectViewMaps" +echo "- Setting system email" +curl -X PUT -d "Harvard Dataverse Support " http://localhost:8080/api/admin/settings/:SystemEmail +curl -X PUT -d ", The President & Fellows of Harvard College" http://localhost:8080/api/admin/settings/:FooterCopyright +echo "- Setting up the Harvard Shibboleth institutional group" +curl -s -X POST -H 'Content-type:application/json' --upload-file data/shibGroupHarvard.json "$SERVER/admin/groups/shib?key=$adminKey" +echo +echo "- Setting up the MIT Shibboleth institutional group" +curl -s -X POST -H 'Content-type:application/json' --upload-file data/shibGroupMit.json "$SERVER/admin/groups/shib?key=$adminKey" +echo +curl http://localhost:8080/api/admin/datasetfield/load -X POST --data-binary @data/metadatablocks/customMRA.tsv -H "Content-type: text/tab-separated-values" +curl http://localhost:8080/api/admin/datasetfield/load -X POST --data-binary @data/metadatablocks/customGSD.tsv -H "Content-type: text/tab-separated-values" +curl http://localhost:8080/api/admin/datasetfield/load -X POST --data-binary @data/metadatablocks/customARCS.tsv -H "Content-type: text/tab-separated-values" +curl http://localhost:8080/api/admin/datasetfield/load -X POST --data-binary @data/metadatablocks/customPSRI.tsv -H "Content-type: text/tab-separated-values" +curl http://localhost:8080/api/admin/datasetfield/load -X POST --data-binary @data/metadatablocks/customPSI.tsv -H "Content-type: text/tab-separated-values" +curl http://localhost:8080/api/admin/datasetfield/load -X POST --data-binary @data/metadatablocks/customCHIA.tsv -H "Content-type: text/tab-separated-values" +curl http://localhost:8080/api/admin/datasetfield/load -X POST --data-binary @data/metadatablocks/customDigaai.tsv -H "Content-type: text/tab-separated-values" +curl http://localhost:8080/api/admin/datasetfield/load -X POST --data-binary @data/metadatablocks/custom_hbgdki.tsv -H "Content-type: text/tab-separated-values" +echo diff --git a/dataversedock/testdata/scripts/api/setup-optional-publish-terms.sh b/dataversedock/testdata/scripts/api/setup-optional-publish-terms.sh new file mode 100644 index 0000000..f0bb0ff --- /dev/null +++ b/dataversedock/testdata/scripts/api/setup-optional-publish-terms.sh @@ -0,0 +1,8 @@ +#!/bin/bash + + +SERVER=http://localhost:8080/api + +echo "- Enabling Publish Popup Custom Text" +curl -s -X PUT -d true "$SERVER/admin/settings/:DatasetPublishPopupCustomTextOnAllVersions" +curl -X PUT -d "By default datasets are published with the CC0-“Public Domain Dedication” waiver. Learn more about the CC0 waiver here.

                            To publish with custom Terms of Use, click the Cancel button and go to the Terms tab for this dataset." $SERVER/admin/settings/:DatasetPublishPopupCustomText \ No newline at end of file diff --git a/dataversedock/testdata/scripts/api/setup-optional.sh b/dataversedock/testdata/scripts/api/setup-optional.sh new file mode 100755 index 0000000..9a8c852 --- /dev/null +++ b/dataversedock/testdata/scripts/api/setup-optional.sh @@ -0,0 +1,13 @@ +#!/bin/bash +command -v jq >/dev/null 2>&1 || { echo >&2 '`jq` ("sed for JSON") is required, but not installed. Download the binary for your platform from http://stedolan.github.io/jq/ and make sure it is in your $PATH (/usr/bin/jq is fine) and executable with `sudo chmod +x /usr/bin/jq`. On Mac, you can install it with `brew install jq` if you use homebrew: http://brew.sh . Aborting.'; exit 1; } + +# OPTIONAL USERS AND DATAVERSES +TMP=setup.temp +./setup-users.sh | tee $TMP + +PETE=$(cat $TMP | grep :result: | grep Pete | cut -d: -f4) +UMA=$(cat $TMP | grep :result: | grep Uma | cut -d: -f4) + +./setup-dvs.sh $PETE $UMA + +rm $TMP diff --git a/dataversedock/testdata/scripts/api/setup-users.sh b/dataversedock/testdata/scripts/api/setup-users.sh new file mode 100755 index 0000000..141e1b3 --- /dev/null +++ b/dataversedock/testdata/scripts/api/setup-users.sh @@ -0,0 +1,30 @@ +#!/bin/bash -f +command -v jq >/dev/null 2>&1 || { echo >&2 "jq required, but it's not installed. On mac, use brew (http://brew.sh) to install it. Aborting."; exit 1; } + +SERVER=http://localhost:8080/api +echo Setting up users on $SERVER +echo ============================================== + +curl -X PUT -d burrito $SERVER/admin/settings/BuiltinUsers.KEY + + +peteResp=$(curl -s -H "Content-type:application/json" -X POST -d @data/userPete.json "$SERVER/builtin-users?password=pete&key=burrito") +echo $peteResp + +umaResp=$(curl -s -H "Content-type:application/json" -X POST -d @data/userUma.json "$SERVER/builtin-users?password=uma&key=burrito") +echo $umaResp + +curl -s -H "Content-type:application/json" -X POST -d @data/userGabbi.json "$SERVER/builtin-users?password=gabbi&key=burrito" +echo + +curl -s -H "Content-type:application/json" -X POST -d @data/userCathy.json "$SERVER/builtin-users?password=cathy&key=burrito" +echo + +curl -s -H "Content-type:application/json" -X POST -d @data/userNick.json "$SERVER/builtin-users?password=nick&key=burrito" +echo + +echo reporting API keys +peteKey=$(echo $peteResp | jq .data.apiToken | tr -d \") +echo :result: Pete\'s key is: $peteKey +umaKey=$(echo $umaResp | jq .data.apiToken | tr -d \") +echo :result: Uma\'s key is: $umaKey \ No newline at end of file diff --git a/dataversedock/testdata/scripts/api/testBlockEndpoints.sh b/dataversedock/testdata/scripts/api/testBlockEndpoints.sh new file mode 100755 index 0000000..59d375e --- /dev/null +++ b/dataversedock/testdata/scripts/api/testBlockEndpoints.sh @@ -0,0 +1,53 @@ +#!/bin/bash + +ADMIN_KEY=$1 + +echo Testing Groups +curl http://localhost:8080/api/admin/groups/ip/?key=$ADMIN_KEY +echo + +echo blocking groups +curl -X PUT -d groups http://localhost:8080/api/admin/settings/:BlockedApiEndpoints +echo + +echo Testing Groups again - expecting 503 Unavailable +curl -v http://localhost:8080/api/admin/groups/ip/?key=$ADMIN_KEY +echo + +echo Unblocking groups +curl -X DELETE http://localhost:8080/api/admin/settings/:BlockedApiEndpoints +echo + +echo Testing Groups +curl http://localhost:8080/api/admin/groups/ip/?key=$ADMIN_KEY +echo + +echo blocking groups, Roles +curl -X PUT -d groups,roles http://localhost:8080/api/admin/settings/:BlockedApiEndpoints +echo + +echo Testing Groups again - expecting 503 Unavailable +curl -v http://localhost:8080/api/admin/groups/ip/?key=$ADMIN_KEY +echo + +echo Testing Roles - expecting 503 Unavailable +curl -v http://localhost:8080/api/roles/?key=$ADMIN_KEY +echo + +echo blocking Roles only +curl -X PUT -d roles http://localhost:8080/api/admin/settings/:BlockedApiEndpoints +echo + +echo Testing Groups again +curl -v http://localhost:8080/api/admin/groups/ip/?key=$ADMIN_KEY +echo + +echo Testing Roles - expecting 503 Unavailable +curl -v http://localhost:8080/api/roles/?key=$ADMIN_KEY +echo + +echo Unblocking all +curl -X DELETE http://localhost:8080/api/admin/settings/:BlockedApiEndpoints +echo + +echo DONE diff --git a/dataversedock/testdata/scripts/api/update-datasetfields.sh b/dataversedock/testdata/scripts/api/update-datasetfields.sh new file mode 100644 index 0000000..ae099f8 --- /dev/null +++ b/dataversedock/testdata/scripts/api/update-datasetfields.sh @@ -0,0 +1,7 @@ +#!/bin/sh +curl http://localhost:8080/api/admin/datasetfield/load -X POST --data-binary @data/metadatablocks/citation.tsv -H "Content-type: text/tab-separated-values" +curl http://localhost:8080/api/admin/datasetfield/load -X POST --data-binary @data/metadatablocks/geospatial.tsv -H "Content-type: text/tab-separated-values" +curl http://localhost:8080/api/admin/datasetfield/load -X POST --data-binary @data/metadatablocks/social_science.tsv -H "Content-type: text/tab-separated-values" +curl http://localhost:8080/api/admin/datasetfield/load -X POST --data-binary @data/metadatablocks/astrophysics.tsv -H "Content-type: text/tab-separated-values" +curl http://localhost:8080/api/admin/datasetfield/load -X POST --data-binary @data/metadatablocks/biomedical.tsv -H "Content-type: text/tab-separated-values" +curl http://localhost:8080/api/admin/datasetfield/load -X POST --data-binary @data/metadatablocks/journals.tsv -H "Content-type: text/tab-separated-values" \ No newline at end of file diff --git a/dataversedock/testdata/scripts/backup/run_backup/README_HOWTO.txt b/dataversedock/testdata/scripts/backup/run_backup/README_HOWTO.txt new file mode 100644 index 0000000..2e2a0a8 --- /dev/null +++ b/dataversedock/testdata/scripts/backup/run_backup/README_HOWTO.txt @@ -0,0 +1,205 @@ +Introduction +============ + +The script, run_backup.py is run on schedule (by a crontab, most +likely). It will back up the files stored in your Dataverse on a +remote storage system. + +As currently implemented, the script can read Dataverse files stored +either on the filesystem or S3; and back them up on a remote storage +server via ssh/scp. It can be easily expanded to support other storage +and backup types (more information is provided below). + +Requirements +============ + +The backup script is written in Python. It was tested with Python v. 2.6 and 2.7. +The following extra modules are required: + +psycopg2 [2.7.3.2] - PostgreSQL driver +boto3 [1.4.7] - AWS sdk, for accessing S3 storage +paramiko [2.2.1] - SSH client, for transferring files via SFTP + +(see below for the exact versions tested) + +Also, an incomplete implementation for backing up files on a remote +swift node is provided. To fully add swift support (left as an +exercise for the reader) an additional module, swiftclient will be +needed. + +Test platforms: + +MacOS 10.12 +----------- + +Python: 2.7.2 - part of standard distribution +paramiko: 2.2.1 - standard +psycopg2: 2.7.3.2 - built with "pip install psycopg2" +boto3: 1.4.7 - built with "pip install boto3" + +CentOS 6 +-------- + +Python: 2.6.6 (from the base distribution for CentOS 6; default /usr/bin/python) +paramiko: 1.7.5 (base distribution) + +distributed as an rpm, python-paramiko.noarch, via the yum repo "base". +if not installed: + yum install python-paramiko + +psycopg2: 2.0.14 (base distribution) +distributed as an rpm, python-psycopg2.x86_64, via the yum repo "base". +if not installed: + yum install python-psycopg2 + +boto3: 1.4.8 (built with "pip install boto3") + +- quick and easy build; +make sure you have pip installed. ("yum install python-pip", if not) + +NOTE: v. 2.6 of Python is considered obsolete; the only reason we are +using it is that it is the default version that comes with an equally +obsolete distribution v.6 of CentOS; which just happened to be what we +had available to test this setup on. Similarly, the versions of +paramiko and psycopg2, above, are quite old too. But everything +appears to be working. + +CentOS 7: +--------- + +(TODO) + + +Usage +===== + +In the default mode, the script will attempt to retrieve and back up +only the files that have been created in the Dataverse since the +createdate timestamp on the most recent file already in the backup +database; or all the files, if this is the first run (see the section +below on what the "backup databse" is). + +When run with the "--rerun" option (python run_backup.py --rerun) the +script will retrieve the list of ALL the files currently in the +dataverse, but will only attempt to back up the ones not yet backed up +successfully. (i.e. it will skip the files already in the backup +database with the 'OK' backup status) + + +Configuration +============= + +Access credentials, for the Dataverse +and the remote storage system are configured in the file config.ini. + +The following config.ini sections must be configured for the +whole thing to work: + +1. Database. + +The script needs to be able to access the Dataverse database, in order to +obtain the lists of files that have changed since the last backup and +need to be copied. The script can use PostgreSQL running on a +remote server. Just make sure that the remote server is configured to +allow connections from the host running the backup script; and that +PostgreSQL is allowing database access from this host too. + +Configure the access credentials as in the example below: + +[Database] +Host: localhost +Port: 5432 +Database: dvndb +Username: dvnapp +Password: xxxxx + +In addition to the main Dataverse database, the script maintains its +own database for keeping track of the backup status of individual +files. The name of the database is specified in the following setting: + +BackupDatabase: backupdb + +The database must be created prior to running of the script. For +example, on the command line: + createdb -U postgres backupdb --owner=dvnapp + +NOTE that the current assumption is that this Postgres database lives +on the same server as the main Dataverse database and is owned by the +same user. + +Also, one table must be created *in this database* (NOT in the main +Dataverse database) before the script can be run. The script +backupdb.sql is provided in this directory. NOTE that the Postgres +user name dvnapp is hard-coded in the script; change it to reflect the +name of the database user on your system, if necessary. + +You can use the standard psql command to create the table; for example: + + psql -d backupdb -f backupdb.sql + +(please note that the example above assumes "backupdb" as the name of +the backup database) + +2. Repository + +This section configures access to the datafiles stored in your +Dataverse. In its present form, the script can read files stored on +the filesystem and S3. There is no support for reading files stored +via swift as of yet. Adding swift support should be straightforward, +by supplying another storage module - similarly to the existing +storage_filesystem.py and storage_s3.py. If you'd like to work on +this, please get in touch. + +For the filesystem storage: the assumption is that the script has +direct access to the filesystem where the files live. Meaning that in +order for the script to work on a server that's different from the one +running the Dataverse application, the filesystem must be readable by +the server via NFS, or similarly shared with it. + +The filesystem access requires the single configuration setting, as in +the example below: + +[Repository] +FileSystemDirectory: /usr/local/glassfish4/glassfish/domains/domain1/files + +For S3, no configuration is needed in the config.ini. But AWS +access must be properly configured for the user running the backup +module, in the standard ~/.aws location. + + +3. Backup section. + +This section specifies the method for storing the files on the remote +("secondary") storage subsystem: + +[Backup] +StorageType: ssh + +The currently supported methods are "ssh" (the files are transferred +to the remote location via SSH/SFTP) and "swift" (untested, and +possibly incomplete implementation is provided; see +README_IMPLEMENTATION.txt for more details). + +For ssh access, the following configuration entries are needed: + +SshHost: yyy.zzz.edu +SshPort: 22 +SshUsername: xxxxx + +Additionally, SSH access to the remote server (SshHost, above) must be +provided for the user specified (SshUsername) via ssh keys. + +4. Email notifications + +Once the script completes a backup run it will send a (very minimal) +status report to the email address specified in the config.ini file; +for example: + +[Notifications] +Email: xxx@yyy.zzz.edu + +As currently implemented, the report will only specify how many files +have been processed, and how many succeeded or failed. In order to get +more detailed information about the individual files you'll need to +consult the datafilestatus table in the backup database. + diff --git a/dataversedock/testdata/scripts/backup/run_backup/README_IMPLEMENTATION.txt b/dataversedock/testdata/scripts/backup/run_backup/README_IMPLEMENTATION.txt new file mode 100644 index 0000000..7e78e0e --- /dev/null +++ b/dataversedock/testdata/scripts/backup/run_backup/README_IMPLEMENTATION.txt @@ -0,0 +1,102 @@ +The backup script is implemented in Python (developed and tested with +v. 2.7.10). The following extra modules are needed: + +(versions tested as of the writing of this doc, 11.14.2017) + +psycopg2 [2.7.3.2] - PostgresQL driver +boto3 [1.4.7] - AWS sdk, for accessing S3 storage +paramiko [2.2.1] - SSH client, for transferring files via SFTP +swiftclient [2.7.0] - for reading [not yet implemented] and writing [incomplete implementation provided] swift objects. + +1. Database access. + +The module uses psycopg2 to access the Dataverse database, to obtain +the lists of files that have changed since the last backup that need +to be copied over. Additionally, it maintains its own database for +keeping track of the backup status of individual files. As of now, +this extra database must reside on the same server as the main +Dataverse database and is owned by the same Postgres user. + +Consult README_HOWTO.txt on how to set up this backup database (needs +to be done prior to running the backup script) + +2. Storage access + +Currently implemented storage access methods, for local filesystem and +S3 are isolated in the files storage_filesystem.py and storage_s3.py, +respectively. To add support for swift a similar fragment of code will +need to be provided, with an open_storage_object... method that can go +to the configured swift end node and return the byte stream associated +with the datafile. Use storage_filesystem.py as the model. Then the +top-level storage.py class will need to be modified to import and use +the extra storage method. + +3. Backup (write) access. + +Similarly, storage type-specific code for writing backed up objects is +isolated in the backup_...py files. The currently implemented storage +methods are ssh/ftp (backup_ssh.py, default) and swift +(backup_swift.py; experimental, untested). To add support for other +storage systems, use backup_ssh.py as the model to create your own +backup_... classes, implementing similar methods, that a) copy the +byte stream associated with a Dataverse datafile onto this storage +system and b) verify the copy against the checksum (MD5 or SHA1) +provided by the Dataverse. In the SSH/SFTP implementation, we can do +the verification step by simply executing md5sum/sha1sum on the remote +server via ssh, once the file is copied. With swift, the only way to +verify against the checksum is to read the file *back* from the swift +end note, and calculate the checksum on the obtained stream. + +4. Keeping track of the backup status + +The module uses the table datafilestatus in the "backup database" to +maintain the backup status information for the individual +datafiles. For the successfully backed up files the 'OK' status is +stored. If the module fails to read the file from the Dataverse +storage, the status 'FAIL_READ' is stored; if it fails to copy over or +verify the backup copy against the checksum, the status 'FAIL_WRITE' +is stored. The Dataverse "createdate" timestamp of the Datafile is +also stored in the database; this way, for incremental backups, the +script tries to retrieve only the Datafiles created after the latest +createdate timestamp currently in the backup db. + +5. TODOs + + +As currently implemented, the status notification report will only +specify how many files have been processed, and how many succeeded or +failed. In order to get more detailed information about the individual +files you'll need to consult the datafilestatus table in the backup +database. + +It could be useful to perhaps extend it to provide a list of specific +files that have been backed up successfully or failed. + +Note that the script relies on the *nix 'mail' command to send the +email notification. I chose to do it this way because it felt easier +than to require the user to configure which smtp server to use in +order to send it from python code... But this requires the mail +command to be there, and the system configured to be able to send +email from the command line. + +If for whatever reason this is not an option, and mail needs to be +sent via remote SMTP, the provided email_notification.py could be +easily modified to use something like + + +import smtplib +from email.mime.text import MIMEText + +... + +msg = MIMEText(text) + +msg['Subject'] = subject_str +msg['To'] = ConfigSectionMap("Notifications")['email'] + +... + +s = smtplib.SMTP(ConfigSectionMap("Notifications")['smtpserver']) +s.sendmail(from, ConfigSectionMap("Notifications")['email'], msg.as_string()) +s.quit() + diff --git a/dataversedock/testdata/scripts/backup/run_backup/backup.py b/dataversedock/testdata/scripts/backup/run_backup/backup.py new file mode 100644 index 0000000..6004f21 --- /dev/null +++ b/dataversedock/testdata/scripts/backup/run_backup/backup.py @@ -0,0 +1,17 @@ +import io +import re +#import backup_swift #TODO +from backup_ssh import (backup_file_ssh) +from config import (ConfigSectionMap) + +def backup_file (file_input, dataset_authority, dataset_identifier, storage_identifier, checksum_type, checksum_value, file_size): + storage_type = ConfigSectionMap("Backup")['storagetype'] + + if storage_type == 'swift': + #backup_file_swift(file_input, dataset_authority, dataset_identifier, storage_identifier, checksum_type, checksum_value, file_size) + raise NotImplementedError('no backup_swift yet') + elif storage_type == 'ssh': + backup_file_ssh(file_input, dataset_authority, dataset_identifier, storage_identifier, checksum_type, checksum_value, file_size) + else: + raise ValueError("only ssh/sftp and swift are supported as backup storage media") + diff --git a/dataversedock/testdata/scripts/backup/run_backup/backup_ssh.py b/dataversedock/testdata/scripts/backup/run_backup/backup_ssh.py new file mode 100644 index 0000000..3355b9c --- /dev/null +++ b/dataversedock/testdata/scripts/backup/run_backup/backup_ssh.py @@ -0,0 +1,149 @@ +# Dataverse backup, ssh io module + +import sys +import io +import paramiko +import os +import re +from config import (ConfigSectionMap) + +my_ssh_client = None + +def open_ssh_client(): + ssh_host = ConfigSectionMap("Backup")['sshhost'] + ssh_port = ConfigSectionMap("Backup")['sshport'] + ssh_username = ConfigSectionMap("Backup")['sshusername'] + + print "SSH Host: %s" % (ssh_host) + print "SSH Port: %s" % (ssh_port) + print "SSH Username: %s" % (ssh_username) + + + ssh_client=paramiko.SSHClient() + ssh_client.set_missing_host_key_policy(paramiko.AutoAddPolicy()) + ssh_client.connect(hostname=ssh_host,username=ssh_username) + + print "Connected!" + + return ssh_client + +# Transfers the file "local_flo" over ssh/sftp to the configured remote server. +# local_flo can be either a string specifying the file path, or a file-like object (stream). +# Note that if a stream is supplied, the method also needs the file size to be specified, +# via the parameter byte_size. +def transfer_file(local_flo, dataset_authority, dataset_identifier, storage_identifier, byte_size): + sftp_client=my_ssh_client.open_sftp() + + remote_dir = dataset_authority + "/" + dataset_identifier + + subdirs = remote_dir.split("/") + + cdir = ConfigSectionMap("Backup")['backupdirectory'] + "/" + for subdir in subdirs: + try: + cdir = cdir + subdir + "/" + sftpattr=sftp_client.stat(cdir) + except IOError: + #print "directory "+cdir+" does not exist (creating)" + sftp_client.mkdir(cdir) + #else: + # print "directory "+cdir+" already exists" + + m = re.search('^([a-z0-9]*)://(.*)$', storage_identifier) + if m is not None: + storageTag = m.group(1) + storage_identifier = re.sub('^.*:', '', storage_identifier) + + remote_file = cdir + storage_identifier + + if (type(local_flo) is str): + sftp_client.put(local_flo,remote_file) + else: + # assume it's a stream: + # sftp_client.putfo() is convenient, but appears to be unavailable in older + # versions of paramiko; so we'll be using .read() and .write() instead: + #sftp_client.putfo(local_flo,remote_file,byte_size) + sftp_stream = sftp_client.open(remote_file,"wb") + while True: + buffer = local_flo.read(32*1024) + if len(buffer) == 0: + break; + sftp_stream.write (buffer) + sftp_stream.close() + + sftp_client.close() + + print "File transfered." + + return remote_file + +def verify_remote_file(remote_file, checksum_type, checksum_value): + try: + stdin,stdout,stderr=my_ssh_client.exec_command("ls "+remote_file) + remote_file_checked = stdout.readlines()[0].rstrip("\n\r") + except: + raise ValueError("remote file check failed (" + remote_file + ")") + + if (remote_file != remote_file_checked): + raise ValueError("remote file NOT FOUND! (" + remote_file_checked + ")") + + if (checksum_type == "MD5"): + remote_command = "md5sum" + elif (checksum_type == "SHA1"): + remote_command = "sha1sum" + + try: + stdin,stdout,stderr=my_ssh_client.exec_command(remote_command+" "+remote_file) + remote_checksum_value = (stdout.readlines()[0]).split(" ")[0] + except: + raise ValueError("remote checksum check failed (" + remote_file + ")") + + if (checksum_value != remote_checksum_value): + raise ValueError("remote checksum BAD! (" + remote_checksum_value + ")") + + +def backup_file_ssh(file_input, dataset_authority, dataset_identifier, storage_identifier, checksum_type, checksum_value, byte_size=0): + global my_ssh_client + if (my_ssh_client is None): + my_ssh_client = open_ssh_client() + print "ssh client is not defined" + else: + print "reusing the existing ssh client" + + try: + file_transfered = transfer_file(file_input, dataset_authority, dataset_identifier, storage_identifier, byte_size) + except: + raise ValueError("failed to transfer file") + + verify_remote_file(file_transfered, checksum_type, checksum_value) + +def main(): + + print "entering ssh (standalone mode)" + + + print "testing local file:" + try: + file_path="config.ini" + backup_file_ssh("config.ini", "1902.1", "XYZ", "config.ini", "MD5", "8e6995806b1cf27df47c5900869fdd27") + except ValueError: + print "failed to verify file (\"config.ini\")" + else: + print "file ok" + + print "testing file stream:" + try: + file_size = os.stat(file_path).st_size + print ("file size: %d" % file_size) + file_stream = io.open("config.ini", "rb") + backup_file_ssh(file_stream, "1902.1", "XYZ", "config.ini", "MD5", "8e6995806b1cf27df47c5900869fdd27", file_size) + except ValueError: + print "failed to verify file (\"config.ini\")" + else: + print "file ok" + + +if __name__ == "__main__": + main() + + diff --git a/dataversedock/testdata/scripts/backup/run_backup/backup_swift.py b/dataversedock/testdata/scripts/backup/run_backup/backup_swift.py new file mode 100644 index 0000000..463c8de --- /dev/null +++ b/dataversedock/testdata/scripts/backup/run_backup/backup_swift.py @@ -0,0 +1,25 @@ +import io +import re +import swiftclient +from config import (ConfigSectionMap) + +def backup_file_swift (file_input, dataset_authority, dataset_identifier, storage_identifier): + auth_url = ConfigSectionMap("Backup")['swiftauthurl'] + auth_version = ConfigSectionMap("Backup")['swiftauthversion'] + user = ConfigSectionMap("Backup")['swiftuser'] + tenant = ConfigSectionMap("Backup")['swifttenant'] + key = ConfigSectionMap("Backup")['swiftkey'] + + conn = swiftclient.Connection( + authurl=auth_url, + user=user, + key=key, + tenant_name=tenant, + auth_version=auth_version + ) + + container_name = dataset_authority + ":" + dataset_identifier + conn.put(container_name) + + conn.put_object(container_name, storage_identifier, file_input) + diff --git a/dataversedock/testdata/scripts/backup/run_backup/backupdb.sql b/dataversedock/testdata/scripts/backup/run_backup/backupdb.sql new file mode 100644 index 0000000..85acb2f --- /dev/null +++ b/dataversedock/testdata/scripts/backup/run_backup/backupdb.sql @@ -0,0 +1,31 @@ +CREATE TABLE datafilestatus ( + id integer NOT NULL, + datasetidentifier character varying(255), + storageidentifier character varying(255), + status character varying(255), + createdate timestamp without time zone, + lastbackuptime timestamp without time zone, + lastbackupmethod character varying(16) +); + +ALTER TABLE datafilestatus OWNER TO dvnapp; + +CREATE SEQUENCE datafilestatus_id_seq + START WITH 1 + INCREMENT BY 1 + NO MINVALUE + NO MAXVALUE + CACHE 1; + + +ALTER TABLE datafilestatus_id_seq OWNER TO dvnapp; + +ALTER SEQUENCE datafilestatus_id_seq OWNED BY datafilestatus.id; + +ALTER TABLE ONLY datafilestatus + ADD CONSTRAINT datafilestatus_pkey PRIMARY KEY (id); + +ALTER TABLE ONLY datafilestatus ALTER COLUMN id SET DEFAULT nextval('datafilestatus_id_seq'::regclass); + +ALTER TABLE ONLY datafilestatus + ADD CONSTRAINT datafilestatus_storageidentifier_key UNIQUE (storageidentifier); \ No newline at end of file diff --git a/dataversedock/testdata/scripts/backup/run_backup/config.ini b/dataversedock/testdata/scripts/backup/run_backup/config.ini new file mode 100644 index 0000000..b6bc7a8 --- /dev/null +++ b/dataversedock/testdata/scripts/backup/run_backup/config.ini @@ -0,0 +1,66 @@ +[Database] +; Dataverse database access configuration +; Note that this section is REQUIRED! - +; you must be able to access the database in order to run the backup module. +; The database can run on a remote server; but make sure you configure the +; host and access creds (below) correctly, and make sure Postgres is accepting +; connections from this server address. + +Host: localhost +Port: 5432 +Database: dvndb +Username: dvnapp +Password: xxxxxx +BackupDatabase: backupdb + +[Repository] +; This section provides configuration for accessing (reading) the files stored +; in this Dataverse. Note that the files can be physicall stored on different +; physical media; if you have files in your Dataverse stored via different +; supported storage drivers - filesystem, swift, S3 - as long as access is properly +; configured here, this script should be able to back them up. + +; configuration for files stored on the filesystem +; (the filesystem needs to be accessible by the system running the backup module) + +FileSystemDirectory: /usr/local/glassfish4/glassfish/domains/domain1/files + +; no configuration needed here for reading files stored on AWS/S3 +; (but the S3 authentication credentials need to be provided in the +; standard ~/.aws location) + +; configuration for files stored on openstack/swift: +; swift NOT SUPPORTED yet + +[Backup] +; ssh configuration: +; (i.e., backup to remote storage accessible via ssh/sftp; default) + +StorageType: ssh +SshHost: backup.dataverse.edu +; ssh port is optional, defaults to 22 +SshPort: 22 +SshUsername: backup +; (the remote server must have ssh key access configured for the user +; specified above) +; the directory on the remote server where the files will be copied to: +BackupDirectory: /dataverse_backup + +; Swift configuration: + +;StorageType: swift +SwiftAuthUrl: https://something.dataverse.edu/swift/v2.0/tokens +SwiftAuthVersion: 2 +SwiftUser: xxx +SwiftKey: yyy +; Note that the 'tenant' setting is only needed for Auth v.1 and 2. +SwiftTenant: zzz +SwiftEndPoint: https://something.dataverse.edu/swift/v1 + +; S3 configuration: +; Dataverse files will be backed up onto AWS/S3, in the bucket specified. +; S3 authentication credentials are stored in the +; standard ~/.aws location + +[Notifications] +Email: somebody@dataverse.edu diff --git a/dataversedock/testdata/scripts/backup/run_backup/config.py b/dataversedock/testdata/scripts/backup/run_backup/config.py new file mode 100644 index 0000000..8faaa4f --- /dev/null +++ b/dataversedock/testdata/scripts/backup/run_backup/config.py @@ -0,0 +1,17 @@ +import ConfigParser +import sys +Config = ConfigParser.ConfigParser() +Config.read("config.ini") + +def ConfigSectionMap(section): + dict1 = {} + options = Config.options(section) + for option in options: + try: + dict1[option] = Config.get(section, option) + if dict1[option] == -1: + sys.stderr.write("skip: %s\n" % option) + except: + print("exception on %s!" % option) + dict1[option] = None + return dict1 diff --git a/dataversedock/testdata/scripts/backup/run_backup/database.py b/dataversedock/testdata/scripts/backup/run_backup/database.py new file mode 100644 index 0000000..9c08038 --- /dev/null +++ b/dataversedock/testdata/scripts/backup/run_backup/database.py @@ -0,0 +1,138 @@ +import psycopg2 +import sys +import pprint +from time import (time) +from datetime import (datetime, timedelta) +from config import (ConfigSectionMap) + +dataverse_db_connection=None +backup_db_connection=None + +def create_database_connection(database='database'): + Host = ConfigSectionMap("Database")['host'] + Port = ConfigSectionMap("Database")['port'] + Database = ConfigSectionMap("Database")[database] + Username = ConfigSectionMap("Database")['username'] + Password = ConfigSectionMap("Database")['password'] + + #print "Database Host: %s" % (Host) + #print "Database Port: %s" % (Port) + #print "Database Name: %s" % (Database) + #print "Username: %s" % (Username) + #print "Password: %s" % (Password) + + #Define our connection string + conn_string = "host='"+Host+"' dbname='"+Database+"' user='"+Username+"' password='"+Password+"'" + + #print "Connecting to database\n->%s" % (conn_string) + + # get a connection, if a connect cannot be made an exception will be raised here + conn = psycopg2.connect(conn_string) + + #print "Connected!\n" + + return conn + +def get_backupdb_connection(): + global backup_db_connection + + if backup_db_connection is None: + backup_db_connection = create_database_connection('backupdatabase') + + return backup_db_connection + +def query_database(sinceTimestamp=None): + global dataverse_db_connection + + dataverse_db_connection = create_database_connection() + + cursor = dataverse_db_connection.cursor() + + # Select data files from the database + # The query below is a bit monstrous, as we try to get all the information about the stored file + # from multiple tables in the single request. Note the "LEFT JOIN" in it - we want it to return + # the "datatable" object referencing this datafile, if such exists, or NULL otherwise. If the + # value is not NULL, we know this is a tabular data file. + dataverse_query="SELECT s.authority, s.identifier, o.storageidentifier, f.checksumtype, f.checksumvalue, f.filesize,o.createdate, datatable.id FROM datafile f LEFT JOIN datatable ON f.id = datatable.datafile_id, dataset s, dvobject o WHERE o.id = f.id AND o.owner_id = s.id AND s.harvestingclient_id IS null" + if sinceTimestamp is None: + cursor.execute(dataverse_query) + else: + dataverse_query = dataverse_query+" AND o.createdate > %s" + cursor.execute(dataverse_query, (sinceTimestamp,)) + + + records = cursor.fetchall() + + return records + +def get_last_timestamp(): + backup_db_connection = get_backupdb_connection() + + cursor = backup_db_connection.cursor() + + # select the last timestamp from the datafilestatus table: + dataverse_query="SELECT createdate FROM datafilestatus ORDER BY createdate DESC LIMIT 1" + + cursor.execute(dataverse_query) + + record = cursor.fetchone() + + if record is None: + #print "table is empty" + return None + + #timestamp = record[0] + timedelta(seconds=1) + timestamp = record[0] + # milliseconds are important! + timestamp_str = timestamp.strftime('%Y-%m-%d %H:%M:%S.%f') + + return timestamp_str + +def get_datafile_status(dataset_authority, dataset_identifier, storage_identifier): + backup_db_connection = get_backupdb_connection() + cursor = backup_db_connection.cursor() + + # select the last timestamp from the datafilestatus table: + + dataverse_query="SELECT status FROM datafilestatus WHERE datasetidentifier=%s AND storageidentifier=%s;" + + dataset_id=dataset_authority+"/"+dataset_identifier + + cursor.execute(dataverse_query, (dataset_id, storage_identifier)) + + record = cursor.fetchone() + + if record is None: + #print "no backup status for this file" + return None + + backupstatus = record[0] + #print "last backup status: "+backupstatus + return backupstatus + +def record_datafile_status(dataset_authority, dataset_identifier, storage_identifier, status, createdate): + current_status = get_datafile_status(dataset_authority, dataset_identifier, storage_identifier) + + backup_db_connection = get_backupdb_connection() + cursor = backup_db_connection.cursor() + + createdate_str = createdate.strftime('%Y-%m-%d %H:%M:%S.%f') + nowdate_str = datetime.fromtimestamp(time()).strftime('%Y-%m-%d %H:%M:%S') + + if current_status is None: + query = "INSERT INTO datafilestatus (status, createdate, lastbackuptime, lastbackupmethod, datasetidentifier, storageidentifier) VALUES (%s, %s, %s, %s, %s, %s);" + else: + query = "UPDATE datafilestatus SET status=%s, createdate=%s, lastbackuptime=%s, lastbackupmethod=%s WHERE datasetidentifier=%s AND storageidentifier=%s;" + + dataset_id=dataset_authority+"/"+dataset_identifier + backup_method = ConfigSectionMap("Backup")['storagetype'] + + cursor.execute(query, (status, createdate_str, nowdate_str, backup_method, dataset_id, storage_identifier)) + + # finalize transaction: + backup_db_connection.commit() + cursor.close() + + + + diff --git a/dataversedock/testdata/scripts/backup/run_backup/email_notification.py b/dataversedock/testdata/scripts/backup/run_backup/email_notification.py new file mode 100644 index 0000000..ed3504b --- /dev/null +++ b/dataversedock/testdata/scripts/backup/run_backup/email_notification.py @@ -0,0 +1,25 @@ +from config import (ConfigSectionMap) +from subprocess import Popen, PIPE, STDOUT +from time import (time) +from datetime import (datetime) + +def send_notification(text): + try: + notification_address = ConfigSectionMap("Notifications")['email'] + except: + notification_address = None + + if (notification_address is None): + raise ValueError('Notification email address is not configured') + + nowdate_str = datetime.fromtimestamp(time()).strftime('%Y-%m-%d %H:%M') + subject_str = ('Dataverse datafile backup report [%s]' % nowdate_str) + + p = Popen(['mail','-s',subject_str,notification_address], stdout=PIPE, stdin=PIPE, stderr=PIPE) + stdout_data = p.communicate(input=text)[0] + +def main(): + send_notification('backup report: test, please disregard') + +if __name__ == "__main__": + main() diff --git a/dataversedock/testdata/scripts/backup/run_backup/requirements.txt b/dataversedock/testdata/scripts/backup/run_backup/requirements.txt new file mode 100644 index 0000000..5696d13 --- /dev/null +++ b/dataversedock/testdata/scripts/backup/run_backup/requirements.txt @@ -0,0 +1,6 @@ +# python2 requirements + +psycopg2 +boto3 +paramiko +# TODO: where to get `swiftclient` from diff --git a/dataversedock/testdata/scripts/backup/run_backup/run_backup.py b/dataversedock/testdata/scripts/backup/run_backup/run_backup.py new file mode 100644 index 0000000..7124d21 --- /dev/null +++ b/dataversedock/testdata/scripts/backup/run_backup/run_backup.py @@ -0,0 +1,99 @@ +#!/usr/bin/env python + +import ConfigParser +import psycopg2 +import sys +import io +import re +from database import (query_database, get_last_timestamp, record_datafile_status, get_datafile_status) +from storage import (open_dataverse_file) +from backup import (backup_file) +from email_notification import (send_notification) + +def main(): + rrmode = False + + if (len(sys.argv) > 1 and sys.argv[1] == '--rerun'): + rrmode = True + + if rrmode: + time_stamp = None + else: + time_stamp = get_last_timestamp() + + if time_stamp is None: + print "No time stamp! first run (or a full re-run)." + records = query_database() + else: + print "last backup: "+time_stamp + records = query_database(time_stamp) + + files_total=0 + files_success=0 + files_failed=0 + files_skipped=0 + + for result in records: + dataset_authority = result[0] + dataset_identifier = result[1] + storage_identifier = result[2] + checksum_type = result[3] + checksum_value = result[4] + file_size = result[5] + create_time = result[6] + is_tabular_data = result[7] + + if (checksum_value is None): + checksum_value = "MISSING" + + + if (storage_identifier is not None and dataset_identifier is not None and dataset_authority is not None): + files_total += 1 + print dataset_authority + "/" + dataset_identifier + "/" + storage_identifier + ", " + checksum_type + ": " + checksum_value + + file_input=None + + # if this is a re-run, we are only re-trying the files that have failed previously: + if (rrmode and get_datafile_status(dataset_authority, dataset_identifier, storage_identifier) == 'OK'): + files_skipped += 1 + continue + + try: + file_input = open_dataverse_file(dataset_authority, dataset_identifier, storage_identifier, is_tabular_data) + except: + print "failed to open file "+storage_identifier + file_input=None + + + if (file_input is not None): + try: + backup_file(file_input, dataset_authority, dataset_identifier, storage_identifier, checksum_type, checksum_value, file_size) + print "backed up file "+storage_identifier + record_datafile_status(dataset_authority, dataset_identifier, storage_identifier, 'OK', create_time) + files_success += 1 + except ValueError, ve: + exception_message = str(ve) + print "failed to back up file "+storage_identifier+": "+exception_message + if (re.match("^remote", exception_message) is not None): + record_datafile_status(dataset_authority, dataset_identifier, storage_identifier, 'FAIL_VERIFY', create_time) + else: + record_datafile_status(dataset_authority, dataset_identifier, storage_identifier, 'FAIL_WRITE', create_time) + files_failed += 1 + #TODO: add a separate failure status 'FAIL_VERIFY' - for when it looked like we were able to copy the file + # onto the remote storage system, but the checksum verification failed (?) + else: + record_datafile_status(dataset_authority, dataset_identifier, storage_identifier, 'FAIL_READ', create_time) + files_failed += 1 + + if (files_skipped > 0): + report = ('backup script run report: %d files processed; %d skipped (already backed up), %d success, %d failed' % (files_total, files_skipped, files_success, files_failed)) + else: + report = ('backup script run report: %d files processed; %d success, %d failed' % (files_total, files_success, files_failed)) + print report + send_notification(report) + +if __name__ == "__main__": + main() + + + diff --git a/dataversedock/testdata/scripts/backup/run_backup/storage.py b/dataversedock/testdata/scripts/backup/run_backup/storage.py new file mode 100644 index 0000000..b831e7e --- /dev/null +++ b/dataversedock/testdata/scripts/backup/run_backup/storage.py @@ -0,0 +1,28 @@ +import io +import re +import boto3 +from config import (ConfigSectionMap) +from storage_filesystem import (open_storage_object_filesystem) +from storage_s3 import (open_storage_object_s3) + + +def open_dataverse_file(dataset_authority, dataset_identifier, storage_identifier, is_tabular_data): + m = re.search('^([a-z0-9]*)://(.*)$', storage_identifier) + if m is None: + # no storage identifier tag. (defaulting to filesystem storage) + storageTag = 'file' + objectLocation = storage_identifier; + else: + storageTag = m.group(1) + objectLocation = m.group(2) + + if storageTag == 'file': + byteStream = open_storage_object_filesystem(dataset_authority, dataset_identifier, objectLocation, is_tabular_data) + return byteStream + elif storageTag == 's3': + byteStream = open_storage_object_s3(dataset_authority, dataset_identifier, objectLocation, is_tabular_data) + return byteStream + elif storageTag == 'swift': + raise ValueError("backup of swift objects not supported yet") + + raise ValueError("Unknown or unsupported storage method: "+storage_identifier) diff --git a/dataversedock/testdata/scripts/backup/run_backup/storage_filesystem.py b/dataversedock/testdata/scripts/backup/run_backup/storage_filesystem.py new file mode 100644 index 0000000..f5cff99 --- /dev/null +++ b/dataversedock/testdata/scripts/backup/run_backup/storage_filesystem.py @@ -0,0 +1,11 @@ +import io +import re +from config import (ConfigSectionMap) + +def open_storage_object_filesystem(dataset_authority, dataset_identifier, object_location, is_tabular_data): + filesystem_directory = ConfigSectionMap("Repository")['filesystemdirectory'] + if (is_tabular_data is not None): + object_location += ".orig" + file_path = filesystem_directory+"/"+dataset_authority+"/"+dataset_identifier+"/"+object_location + byte_stream = io.open(file_path, "rb") + return byte_stream diff --git a/dataversedock/testdata/scripts/backup/run_backup/storage_s3.py b/dataversedock/testdata/scripts/backup/run_backup/storage_s3.py new file mode 100644 index 0000000..94858ee --- /dev/null +++ b/dataversedock/testdata/scripts/backup/run_backup/storage_s3.py @@ -0,0 +1,13 @@ +import io +import re +import boto3 + +def open_storage_object_s3(dataset_authority, dataset_identifier, object_location, is_tabular_data): + s3 = boto3.resource('s3') + bucket_name,object_name = object_location.split(":",1) + key = dataset_authority + "/" + dataset_identifier + "/" + object_name; + if (is_tabular_data is not None): + key += ".orig" + s3_obj = s3.Object(bucket_name=bucket_name, key=key) + # "Body" is a byte stream associated with the object: + return s3_obj.get()['Body'] diff --git a/dataversedock/testdata/scripts/database/3561-update.sql b/dataversedock/testdata/scripts/database/3561-update.sql new file mode 100644 index 0000000..8ddd3d3 --- /dev/null +++ b/dataversedock/testdata/scripts/database/3561-update.sql @@ -0,0 +1,24 @@ +-- create the workflow tables +CREATE TABLE WORKFLOW (ID SERIAL NOT NULL, NAME VARCHAR(255), PRIMARY KEY (ID)); +CREATE TABLE PENDINGWORKFLOWINVOCATION (INVOCATIONID VARCHAR(255) NOT NULL, DOIPROVIDER VARCHAR(255), IPADDRESS VARCHAR(255), NEXTMINORVERSIONNUMBER BIGINT, NEXTVERSIONNUMBER BIGINT, PENDINGSTEPIDX INTEGER, TYPEORDINAL INTEGER, USERID VARCHAR(255), WORKFLOW_ID BIGINT, DATASET_ID BIGINT, PRIMARY KEY (INVOCATIONID)); + +CREATE INDEX INDEX_DATASETLOCK_user_id ON DATASETLOCK (user_id); +CREATE INDEX INDEX_DATASETLOCK_dataset_id ON DATASETLOCK (dataset_id); + +-- Alter Dataset lock +ALTER TABLE DATASETLOCK ADD CONSTRAINT FK_DATASETLOCK_DATASET_ID FOREIGN KEY (DATASET_ID) REFERENCES DVOBJECT (ID); +ALTER TABLE DATASETLOCK ADD CONSTRAINT FK_DATASETLOCK_USER_ID FOREIGN KEY (USER_ID) REFERENCES AUTHENTICATEDUSER (ID); +ALTER TABLE DATASETLOCK ADD COLUMN REASON VARCHAR(255); + +-- All existing dataset locks are due to ingest. +UPDATE DATASETLOCK set REASON='Ingest'; + +-- /!\ Important! +-- change "1" to the an admin user id. +-- +INSERT INTO datasetlock (info, starttime, dataset_id, user_id, reason) +SELECT '', localtimestamp, dataset_id, 1, 'InReview' +FROM datasetversion +WHERE inreview=true; + +ALTER TABLE DATASETVERSION DROP COLUMN inreview; diff --git a/dataversedock/testdata/scripts/database/drop-all.sh b/dataversedock/testdata/scripts/database/drop-all.sh new file mode 100755 index 0000000..782465c --- /dev/null +++ b/dataversedock/testdata/scripts/database/drop-all.sh @@ -0,0 +1,8 @@ +#!/bin/bash +PSQL=psql +DB_NAME=dvndb +SQL_FILENAME=dropall.sql + +$PSQL $DB_NAME -t -c"SELECT 'drop table \"' || tablename || '\" cascade;' FROM pg_tables WHERE schemaname='public';" > $SQL_FILENAME +$PSQL $DB_NAME -a -f $SQL_FILENAME +rm $SQL_FILENAME diff --git a/dataversedock/testdata/scripts/database/drop-create.sh b/dataversedock/testdata/scripts/database/drop-create.sh new file mode 100755 index 0000000..04138ee --- /dev/null +++ b/dataversedock/testdata/scripts/database/drop-create.sh @@ -0,0 +1,12 @@ +#!/bin/bash + +# Drops and creates the database. Assumes pg_dump and psql are in $PATH, and that the db does not need password. +DUMP=pg_dump +PSQL=psql +DB=dvndb +DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )" + +$DUMP -s $DB > temp-schema.sql +$PSQL -d $DB -f $DIR/drop-all.sql +$PSQL -d $DB -f temp-schema.sql +rm temp-schema.sql \ No newline at end of file diff --git a/dataversedock/testdata/scripts/database/facetlist.sql b/dataversedock/testdata/scripts/database/facetlist.sql new file mode 100644 index 0000000..83eb0f6 --- /dev/null +++ b/dataversedock/testdata/scripts/database/facetlist.sql @@ -0,0 +1,5 @@ +-- default facets defined in https://redmine.hmdc.harvard.edu/issues/3490 +-- show selected facets by displayorder +SELECT title,name,datasetfield.id FROM dataversefacet, datasetfield WHERE dataversefacet.datasetfield_id = datasetfield.id ORDER BY dataversefacet.displayorder; +-- more detail +-- SELECT dataversefacet.id, title, name, datasetfield.id, dataversefacet.displayorder, dataverse_id FROM dataversefacet, datasetfield WHERE dataversefacet.datasetfield_id = datasetfield.id ORDER BY displayorder; diff --git a/dataversedock/testdata/scripts/database/fedora/rebuild-and-test b/dataversedock/testdata/scripts/database/fedora/rebuild-and-test new file mode 100755 index 0000000..07e3b1c --- /dev/null +++ b/dataversedock/testdata/scripts/database/fedora/rebuild-and-test @@ -0,0 +1,20 @@ +#!/bin/sh +GLASSFISH_HOME=$HOME/tools/devguide-gf4/glassfish4 +ASADMIN=$GLASSFISH_HOME/glassfish/bin/asadmin +$ASADMIN stop-domain +psql -U postgres -c 'DROP DATABASE "dvnDb"' +scripts/search/clear +psql -U postgres -c 'CREATE DATABASE "dvnDb" WITH OWNER = "dvnApp"' +mvn package +$ASADMIN start-domain +# should probably use this instead: https://maven-glassfish-plugin.java.net/deploy-mojo.html +cp target/dataverse-4.0.war $GLASSFISH_HOME/glassfish/domains/domain1/autodeploy +sleep 30 +psql -U postgres dvnDb -f scripts/database/reference_data.sql +cd scripts/api +./datasetfields.sh +./setup-users.sh +./setup-dvs.sh +cd ../.. +scripts/search/tests/permissions +scripts/search/tests/delete-dataverse diff --git a/dataversedock/testdata/scripts/database/homebrew/convert b/dataversedock/testdata/scripts/database/homebrew/convert new file mode 100755 index 0000000..bd4ccbd --- /dev/null +++ b/dataversedock/testdata/scripts/database/homebrew/convert @@ -0,0 +1,4 @@ +#!/bin/sh +sed -i -e "s/postgres/$USER/" /tmp/dataverse_db.sql +sed -i -e 's/dvnapp/dataverse_app/' /tmp/dataverse_db.sql +sed -i -e 's/dvn-vm7.hmdc.harvard.edu:8983/localhost:8983/' /tmp/dataverse_db.sql diff --git a/dataversedock/testdata/scripts/database/homebrew/create-database b/dataversedock/testdata/scripts/database/homebrew/create-database new file mode 100755 index 0000000..20b1c75 --- /dev/null +++ b/dataversedock/testdata/scripts/database/homebrew/create-database @@ -0,0 +1,2 @@ +#!/bin/sh +~/.homebrew/bin/psql -c 'CREATE DATABASE "dataverse_db" WITH OWNER = "dataverse_app"' template1 diff --git a/dataversedock/testdata/scripts/database/homebrew/create-role b/dataversedock/testdata/scripts/database/homebrew/create-role new file mode 100755 index 0000000..114526f --- /dev/null +++ b/dataversedock/testdata/scripts/database/homebrew/create-role @@ -0,0 +1,2 @@ +#!/bin/sh +~/.homebrew/bin/psql -c "CREATE ROLE dataverse_app UNENCRYPTED PASSWORD 'secret' NOSUPERUSER CREATEDB CREATEROLE NOINHERIT LOGIN" template1 diff --git a/dataversedock/testdata/scripts/database/homebrew/create-role-superuser b/dataversedock/testdata/scripts/database/homebrew/create-role-superuser new file mode 100755 index 0000000..a8b2913 --- /dev/null +++ b/dataversedock/testdata/scripts/database/homebrew/create-role-superuser @@ -0,0 +1,3 @@ +#!/bin/sh +# so you don't have to sudo to postgres to create roles, etc. +~/.homebrew/bin/psql -c "CREATE ROLE $USER UNENCRYPTED PASSWORD 'secret' SUPERUSER CREATEDB CREATEROLE INHERIT LOGIN REPLICATION" template1 diff --git a/dataversedock/testdata/scripts/database/homebrew/custom-build-number b/dataversedock/testdata/scripts/database/homebrew/custom-build-number new file mode 100755 index 0000000..abc074e --- /dev/null +++ b/dataversedock/testdata/scripts/database/homebrew/custom-build-number @@ -0,0 +1,8 @@ +#!/bin/sh +if [ -z "$1" ]; then + BRANCH_COMMIT=$(git rev-parse --abbrev-ref HEAD)-$(git log --oneline | head -1 | awk '{print $1}') + echo "No custom build number specified. Using $BRANCH_COMMIT" + echo "build.number=$BRANCH_COMMIT" > src/main/java/BuildNumber.properties +else + echo "build.number=$@" > src/main/java/BuildNumber.properties +fi diff --git a/dataversedock/testdata/scripts/database/homebrew/delete-all b/dataversedock/testdata/scripts/database/homebrew/delete-all new file mode 100755 index 0000000..2d2c211 --- /dev/null +++ b/dataversedock/testdata/scripts/database/homebrew/delete-all @@ -0,0 +1,7 @@ +#!/bin/sh +/Applications/NetBeans/glassfish4/glassfish/bin/asadmin stop-domain +rm -rf /Applications/NetBeans/glassfish4/glassfish/domains/domain1/generated +scripts/database/homebrew/drop-database +scripts/search/clear +rm -rf ~/dataverse/files +scripts/database/homebrew/create-database diff --git a/dataversedock/testdata/scripts/database/homebrew/devinstall b/dataversedock/testdata/scripts/database/homebrew/devinstall new file mode 100755 index 0000000..89284b5 --- /dev/null +++ b/dataversedock/testdata/scripts/database/homebrew/devinstall @@ -0,0 +1,20 @@ +#!/bin/sh +cd scripts/installer +export DB_NAME=dataverse_db +export DB_PORT=5432 +export DB_HOST=localhost +export DB_USER=dataverse_app +export DB_PASS=secret +export RSERVE_HOST=localhost +export RSERVE_PORT=6311 +export RSERVE_USER=rserve +export RSERVE_PASS=rserve +export SMTP_SERVER=localhost +export HOST_ADDRESS=`hostname` +export FILES_DIR=$HOME/dataverse/files +export MEM_HEAP_SIZE=2048 +export GLASSFISH_DOMAIN=domain1 +export GLASSFISH_ROOT=/Applications/NetBeans/glassfish4 +cp pgdriver/postgresql-9.1-902.jdbc4.jar $GLASSFISH_ROOT/glassfish/lib +cp ../../conf/jhove/jhove.conf $GLASSFISH_ROOT/glassfish/domains/$GLASSFISH_DOMAIN/config/jhove.conf +./glassfish-setup.sh diff --git a/dataversedock/testdata/scripts/database/homebrew/drop-database b/dataversedock/testdata/scripts/database/homebrew/drop-database new file mode 100755 index 0000000..d0d4c3a --- /dev/null +++ b/dataversedock/testdata/scripts/database/homebrew/drop-database @@ -0,0 +1,2 @@ +#!/bin/sh +~/.homebrew/bin/psql -c 'DROP DATABASE "dataverse_db"' template1 diff --git a/dataversedock/testdata/scripts/database/homebrew/drop-role b/dataversedock/testdata/scripts/database/homebrew/drop-role new file mode 100755 index 0000000..a42711d --- /dev/null +++ b/dataversedock/testdata/scripts/database/homebrew/drop-role @@ -0,0 +1,2 @@ +#!/bin/sh +~/.homebrew/bin/psql -c "DROP ROLE dataverse_app" template1 diff --git a/dataversedock/testdata/scripts/database/homebrew/dump b/dataversedock/testdata/scripts/database/homebrew/dump new file mode 100755 index 0000000..94e71d1 --- /dev/null +++ b/dataversedock/testdata/scripts/database/homebrew/dump @@ -0,0 +1,2 @@ +#!/bin/sh +~/.homebrew/bin/pg_dump dataverse_db -f /tmp/dataverse_db.sql diff --git a/dataversedock/testdata/scripts/database/homebrew/keys2tmp b/dataversedock/testdata/scripts/database/homebrew/keys2tmp new file mode 100755 index 0000000..f91be78 --- /dev/null +++ b/dataversedock/testdata/scripts/database/homebrew/keys2tmp @@ -0,0 +1,17 @@ +#!/bin/sh +DIR=/tmp/keys +mkdir -p $DIR +key2tmp () { + #export $2=`grep apiToken /tmp/setup-all.sh.out | grep $1 | jq .data.apiToken | grep -v null | sed s/\"//g` + echo `grep apiToken /tmp/setup-all.sh.out | grep $1 | jq .data.apiToken | grep -v null | sed s/\"//g` > $DIR/$1 +} +key2tmp pete PETEKEY +key2tmp uma UMAKEY +key2tmp gabbi GABBIKEY +key2tmp cathy CATHYKEY +key2tmp nick NICKKEY +#echo "pete's key: $PETEKEY" +#echo "uma's key: $UMAKEY" +#echo "gabbi's key: $GABBIKEY" +#echo "cathy's key: $CATHYKEY" +#echo "nick's key: $NICKKEY" diff --git a/dataversedock/testdata/scripts/database/homebrew/kill9glassfish b/dataversedock/testdata/scripts/database/homebrew/kill9glassfish new file mode 100755 index 0000000..678a8a5 --- /dev/null +++ b/dataversedock/testdata/scripts/database/homebrew/kill9glassfish @@ -0,0 +1,2 @@ +#!/bin/sh +kill -9 `jps | grep ASMain | awk '{print $1}'` diff --git a/dataversedock/testdata/scripts/database/homebrew/rebuild-and-test b/dataversedock/testdata/scripts/database/homebrew/rebuild-and-test new file mode 100755 index 0000000..670fb84 --- /dev/null +++ b/dataversedock/testdata/scripts/database/homebrew/rebuild-and-test @@ -0,0 +1,11 @@ +#!/bin/sh +scripts/database/homebrew/run-post-create-post-deploy +echo "Publishing root dataverse" +scripts/search/tests/publish-dataverse-root +echo "---" +echo "Creating search users" +scripts/search/populate-users > /dev/null +scripts/search/create-users > /dev/null +scripts/search/tests/grant-authusers-add-on-root +scripts/search/tests/create-all-and-test +#scripts/search/tests/create-saved-search-and-test diff --git a/dataversedock/testdata/scripts/database/homebrew/restore b/dataversedock/testdata/scripts/database/homebrew/restore new file mode 100755 index 0000000..75e6a22 --- /dev/null +++ b/dataversedock/testdata/scripts/database/homebrew/restore @@ -0,0 +1,2 @@ +#!/bin/sh +~/.homebrew/bin/psql dataverse_db -f /tmp/dataverse_db.sql diff --git a/dataversedock/testdata/scripts/database/homebrew/run-post-create-post-deploy b/dataversedock/testdata/scripts/database/homebrew/run-post-create-post-deploy new file mode 100755 index 0000000..671958e --- /dev/null +++ b/dataversedock/testdata/scripts/database/homebrew/run-post-create-post-deploy @@ -0,0 +1,7 @@ +#!/bin/sh +scripts/database/homebrew/run-reference_data.sql > /tmp/run-reference_data.sql +psql dataverse_db -f doc/sphinx-guides/source/_static/util/createsequence.sql +psql -c 'ALTER TABLE datasetidentifier_seq OWNER TO "dataverse_app";' dataverse_db +cd scripts/api +./setup-all.sh --insecure > /tmp/setup-all.sh.out 2> /tmp/setup-all.sh.err +cd ../.. diff --git a/dataversedock/testdata/scripts/database/homebrew/run-reference_data.sql b/dataversedock/testdata/scripts/database/homebrew/run-reference_data.sql new file mode 100755 index 0000000..99fa05b --- /dev/null +++ b/dataversedock/testdata/scripts/database/homebrew/run-reference_data.sql @@ -0,0 +1,2 @@ +#!/bin/sh +~/.homebrew/bin/psql dataverse_db -f $HOME/NetBeansProjects/dataverse/scripts/database/reference_data.sql diff --git a/dataversedock/testdata/scripts/database/homebrew/set-env-for-setup b/dataversedock/testdata/scripts/database/homebrew/set-env-for-setup new file mode 100755 index 0000000..98cbb98 --- /dev/null +++ b/dataversedock/testdata/scripts/database/homebrew/set-env-for-setup @@ -0,0 +1,22 @@ +#!/bin/bash +# put these variables into your env with `source path/to/script` + +export GLASSFISH_ROOT='/Applications/NetBeans/glassfish4' +export -n GLASSFISH_ROOT +unset GLASSFISH_ROOT +echo $GLASSFISH_ROOT + +export DB_NAME_CUSTOM='dataverse_db' +#export -n DB_NAME_CUSTOM +#unset DB_NAME_CUSTOM +echo $DB_NAME_CUSTOM + +export DB_USER_CUSTOM='dataverse_app' +#export -n DB_USER_CUSTOM +#unset DB_USER_CUSTOM +echo $DB_USER_CUSTOM + +export DB_PASS_CUSTOM='secret' +#export -n DB_PASS_CUSTOM +#unset DB_PASS_CUSTOM +echo $DB_PASS_CUSTOM diff --git a/dataversedock/testdata/scripts/database/homebrew/superuser-password-update b/dataversedock/testdata/scripts/database/homebrew/superuser-password-update new file mode 100755 index 0000000..a955ce6 --- /dev/null +++ b/dataversedock/testdata/scripts/database/homebrew/superuser-password-update @@ -0,0 +1,5 @@ +#!/bin/sh +# default "admin" password +# $2a$10$H8jca9BBbvCQAs2fU6TaseQeyD6ho3vZuIBKdlknDaR5lh69effde +~/.homebrew/bin/psql -c 'select username,encryptedpassword from builtinuser where id = 1' dataverse_db +~/.homebrew/bin/psql -c "update builtinuser set encryptedpassword='\$2a\$10\$H8jca9BBbvCQAs2fU6TaseQeyD6ho3vZuIBKdlknDaR5lh69effde' where id = 1" dataverse_db diff --git a/dataversedock/testdata/scripts/database/reference_data.sql b/dataversedock/testdata/scripts/database/reference_data.sql new file mode 100644 index 0000000..2e71f0b --- /dev/null +++ b/dataversedock/testdata/scripts/database/reference_data.sql @@ -0,0 +1,51 @@ +-- using http://dublincore.org/schemas/xmls/qdc/dcterms.xsd because at http://dublincore.org/schemas/xmls/ it's the schema location for http://purl.org/dc/terms/ which is referenced in http://swordapp.github.io/SWORDv2-Profile/SWORDProfile.html +INSERT INTO foreignmetadataformatmapping(id, name, startelement, displayName, schemalocation) VALUES (1, 'http://purl.org/dc/terms/', 'entry', 'dcterms: DCMI Metadata Terms', 'http://dublincore.org/schemas/xmls/qdc/dcterms.xsd'); +INSERT INTO foreignmetadatafieldmapping (id, foreignfieldxpath, datasetfieldname, isattribute, parentfieldmapping_id, foreignmetadataformatmapping_id) VALUES (1, ':title', 'title', FALSE, NULL, 1 ); +INSERT INTO foreignmetadatafieldmapping (id, foreignfieldxpath, datasetfieldname, isattribute, parentfieldmapping_id, foreignmetadataformatmapping_id) VALUES (2, ':identifier', 'otherIdValue', FALSE, NULL, 1 ); +INSERT INTO foreignmetadatafieldmapping (id, foreignfieldxpath, datasetfieldname, isattribute, parentfieldmapping_id, foreignmetadataformatmapping_id) VALUES (3, ':creator', 'authorName', FALSE, NULL, 1 ); +INSERT INTO foreignmetadatafieldmapping (id, foreignfieldxpath, datasetfieldname, isattribute, parentfieldmapping_id, foreignmetadataformatmapping_id) VALUES (4, ':date', 'productionDate', FALSE, NULL, 1 ); +INSERT INTO foreignmetadatafieldmapping (id, foreignfieldxpath, datasetfieldname, isattribute, parentfieldmapping_id, foreignmetadataformatmapping_id) VALUES (5, ':subject', 'keywordValue', FALSE, NULL, 1 ); +INSERT INTO foreignmetadatafieldmapping (id, foreignfieldxpath, datasetfieldname, isattribute, parentfieldmapping_id, foreignmetadataformatmapping_id) VALUES (6, ':description', 'dsDescriptionValue', FALSE, NULL, 1 ); +INSERT INTO foreignmetadatafieldmapping (id, foreignfieldxpath, datasetfieldname, isattribute, parentfieldmapping_id, foreignmetadataformatmapping_id) VALUES (7, ':relation', 'relatedMaterial', FALSE, NULL, 1 ); +INSERT INTO foreignmetadatafieldmapping (id, foreignfieldxpath, datasetfieldname, isattribute, parentfieldmapping_id, foreignmetadataformatmapping_id) VALUES (8, ':isReferencedBy', 'publicationCitation', FALSE, NULL, 1 ); +INSERT INTO foreignmetadatafieldmapping (id, foreignfieldxpath, datasetfieldname, isattribute, parentfieldmapping_id, foreignmetadataformatmapping_id) VALUES (9, 'holdingsURI', 'publicationURL', TRUE, 8, 1 ); +INSERT INTO foreignmetadatafieldmapping (id, foreignfieldxpath, datasetfieldname, isattribute, parentfieldmapping_id, foreignmetadataformatmapping_id) VALUES (10, 'agency', 'publicationIDType', TRUE, 8, 1 ); +INSERT INTO foreignmetadatafieldmapping (id, foreignfieldxpath, datasetfieldname, isattribute, parentfieldmapping_id, foreignmetadataformatmapping_id) VALUES (11, 'IDNo', 'publicationIDNumber', TRUE, 8, 1 ); +INSERT INTO foreignmetadatafieldmapping (id, foreignfieldxpath, datasetfieldname, isattribute, parentfieldmapping_id, foreignmetadataformatmapping_id) VALUES (12, ':coverage', 'otherGeographicCoverage', FALSE, NULL, 1 ); +INSERT INTO foreignmetadatafieldmapping (id, foreignfieldxpath, datasetfieldname, isattribute, parentfieldmapping_id, foreignmetadataformatmapping_id) VALUES (13, ':type', 'kindOfData', FALSE, NULL, 1 ); +INSERT INTO foreignmetadatafieldmapping (id, foreignfieldxpath, datasetfieldname, isattribute, parentfieldmapping_id, foreignmetadataformatmapping_id) VALUES (14, ':source', 'dataSources', FALSE, NULL, 1 ); +INSERT INTO foreignmetadatafieldmapping (id, foreignfieldxpath, datasetfieldname, isattribute, parentfieldmapping_id, foreignmetadataformatmapping_id) VALUES (15, 'affiliation', 'authorAffiliation', TRUE, 3, 1 ); +INSERT INTO foreignmetadatafieldmapping (id, foreignfieldxpath, datasetfieldname, isattribute, parentfieldmapping_id, foreignmetadataformatmapping_id) VALUES (16, ':contributor', 'contributorName', FALSE, NULL, 1 ); +INSERT INTO foreignmetadatafieldmapping (id, foreignfieldxpath, datasetfieldname, isattribute, parentfieldmapping_id, foreignmetadataformatmapping_id) VALUES (17, 'type', 'contributorType', TRUE, 16, 1 ); +INSERT INTO foreignmetadatafieldmapping (id, foreignfieldxpath, datasetfieldname, isattribute, parentfieldmapping_id, foreignmetadataformatmapping_id) VALUES (18, ':publisher', 'producerName', FALSE, NULL, 1 ); +INSERT INTO foreignmetadatafieldmapping (id, foreignfieldxpath, datasetfieldname, isattribute, parentfieldmapping_id, foreignmetadataformatmapping_id) VALUES (19, ':language', 'language', FALSE, NULL, 1 ); + +INSERT INTO guestbook( + emailrequired, enabled, institutionrequired, createtime, + "name", namerequired, positionrequired, dataverse_id) + VALUES ( false, true, false, now(), + 'Default', false, false, null); + +-- TODO: Remove if http://stackoverflow.com/questions/25743191/how-to-add-a-case-insensitive-jpa-unique-constraint +-- gets an answer. See also https://github.com/IQSS/dataverse/issues/2598#issuecomment-158219334 +CREATE UNIQUE INDEX dataverse_alias_unique_idx on dataverse (LOWER(alias)); +CREATE UNIQUE INDEX index_authenticateduser_lower_email ON authenticateduser (lower(email)); +CREATE UNIQUE INDEX index_builtinuser_lower_email ON builtinuser (lower(email)); + +--Edit Dataset: Investigate and correct multiple draft issue: https://github.com/IQSS/dataverse/issues/2132 +--This unique index will prevent the multiple draft issue +CREATE UNIQUE INDEX one_draft_version_per_dataset ON datasetversion +(dataset_id) WHERE versionstate='DRAFT'; + + +INSERT INTO worldmapauth_tokentype +( name, + created, + contactemail, hostname, ipaddress, + mapitlink, md5, + modified, timelimitminutes) + VALUES ( 'GEOCONNECT', current_timestamp, + 'support@dataverse.org', 'geoconnect.datascience.iq.harvard.edu', '140.247.115.127', + 'http://geoconnect.datascience.iq.harvard.edu/shapefile/map-it', + '38c0a931b2d582a5c43fc79405b30c22', + current_timestamp, 30); diff --git a/dataversedock/testdata/scripts/database/upgrades/upgrade_v4.0.1_to_v4.1.sql b/dataversedock/testdata/scripts/database/upgrades/upgrade_v4.0.1_to_v4.1.sql new file mode 100644 index 0000000..4ac1789 --- /dev/null +++ b/dataversedock/testdata/scripts/database/upgrades/upgrade_v4.0.1_to_v4.1.sql @@ -0,0 +1,10 @@ +/* ---------------------------------------- + Add unique constraint to prevent multiple drafts + Ticket 2132 +*/ ---------------------------------------- + +ALTER TABLE datasetversion +ADD CONSTRAINT uq_datasetversion UNIQUE(dataset_id, versionnumber, minorversionnumber); + +-- make sure Member role has DownloadFilePermission +update dataverserole set permissionbits=28 where alias='member'; \ No newline at end of file diff --git a/dataversedock/testdata/scripts/database/upgrades/upgrade_v4.0_to_v4.0.1.sql b/dataversedock/testdata/scripts/database/upgrades/upgrade_v4.0_to_v4.0.1.sql new file mode 100644 index 0000000..c72962a --- /dev/null +++ b/dataversedock/testdata/scripts/database/upgrades/upgrade_v4.0_to_v4.0.1.sql @@ -0,0 +1,196 @@ +/* ---------------------------------------- + Description: These SQL statements in this file relate to the following tickets + + (1) "Index Check" - https://github.com/IQSS/dataverse/issues/1880 + Summary: Add indices to existing columns. + +*/ ---------------------------------------- +/* ---------------------------------------- + actionlogrecord indices (ActionLogRecord.java) +*/ ---------------------------------------- +CREATE INDEX index_actionlogrecord_useridentifier ON actionlogrecord (useridentifier); +CREATE INDEX index_actionlogrecord_actiontype ON actionlogrecord (actiontype); +CREATE INDEX index_actionlogrecord_starttime ON actionlogrecord (starttime); +/* ---------------------------------------- + authenticationproviderrow index (AuthenticationProviderRow.java) +*/ ---------------------------------------- +CREATE INDEX index_authenticationproviderrow_enabled ON authenticationproviderrow (enabled); +/* ---------------------------------------- + builtinuser index (BuiltInUser.java) +*/ ---------------------------------------- +CREATE INDEX index_builtinuser_lastname ON builtinuser (lastname); +/* ---------------------------------------- + controlledvocabalternate indices (ControlledVocabAlternate.java) +*/ ---------------------------------------- +CREATE INDEX index_controlledvocabalternate_controlledvocabularyvalue_id ON controlledvocabalternate (controlledvocabularyvalue_id); +CREATE INDEX index_controlledvocabalternate_datasetfieldtype_id ON controlledvocabalternate (datasetfieldtype_id); +/* ---------------------------------------- + controlledvocabularyvalue indices (ControlledVocabularyValue.java) +*/ ---------------------------------------- +CREATE INDEX index_controlledvocabularyvalue_datasetfieldtype_id ON controlledvocabularyvalue (datasetfieldtype_id); +CREATE INDEX index_controlledvocabularyvalue_displayorder ON controlledvocabularyvalue (displayorder); +/* ---------------------------------------- + customfieldmap indices (CustomFieldMap.java) +*/ ---------------------------------------- +CREATE INDEX index_customfieldmap_sourcedatasetfield ON customfieldmap (sourcedatasetfield); +CREATE INDEX index_customfieldmap_sourcetemplate ON customfieldmap (sourcetemplate); +/* ---------------------------------------- + datafile indices (DataFile.java) +*/ ---------------------------------------- +CREATE INDEX index_datafile_ingeststatus ON datafile (ingeststatus); +CREATE INDEX index_datafile_md5 ON datafile (md5); +CREATE INDEX index_datafile_contenttype ON datafile (contenttype); +CREATE INDEX index_datafile_restricted ON datafile (restricted); +/* ---------------------------------------- + datasetfielddefaultvalue indices (DatasetFieldDefaultValue.java) +*/ ---------------------------------------- +CREATE INDEX index_datasetfielddefaultvalue_datasetfield_id ON datasetfielddefaultvalue (datasetfield_id); +CREATE INDEX index_datasetfielddefaultvalue_defaultvalueset_id ON datasetfielddefaultvalue (defaultvalueset_id); +CREATE INDEX index_datasetfielddefaultvalue_parentdatasetfielddefaultvalue_id ON datasetfielddefaultvalue (parentdatasetfielddefaultvalue_id); +CREATE INDEX index_datasetfielddefaultvalue_displayorder ON datasetfielddefaultvalue (displayorder); +/* ---------------------------------------- + datasetlock indices (DatasetLock.java) +*/ ---------------------------------------- +CREATE INDEX index_datasetlock_user_id ON datasetlock (user_id); +CREATE INDEX index_datasetlock_dataset_id ON datasetlock (dataset_id); +/* ---------------------------------------- + datasetversionuser indices (DatasetVersionUser.java) +*/ ---------------------------------------- +CREATE INDEX index_datasetversionuser_authenticateduser_id ON datasetversionuser (authenticateduser_id); +CREATE INDEX index_datasetversionuser_datasetversion_id ON datasetversionuser (datasetversion_id); +/* ---------------------------------------- + dataverse indices (Dataverse.java) +*/ ---------------------------------------- +CREATE INDEX index_dataverse_fk_dataverse_id ON dataverse (fk_dataverse_id); +CREATE INDEX index_dataverse_defaultcontributorrole_id ON dataverse (defaultcontributorrole_id); +CREATE INDEX index_dataverse_defaulttemplate_id ON dataverse (defaulttemplate_id); +CREATE INDEX index_dataverse_alias ON dataverse (alias); +CREATE INDEX index_dataverse_affiliation ON dataverse (affiliation); +CREATE INDEX index_dataverse_dataversetype ON dataverse (dataversetype); +CREATE INDEX index_dataverse_facetroot ON dataverse (facetroot); +CREATE INDEX index_dataverse_guestbookroot ON dataverse (guestbookroot); +CREATE INDEX index_dataverse_metadatablockroot ON dataverse (metadatablockroot); +CREATE INDEX index_dataverse_templateroot ON dataverse (templateroot); +CREATE INDEX index_dataverse_permissionroot ON dataverse (permissionroot); +CREATE INDEX index_dataverse_themeroot ON dataverse (themeroot); +/* ---------------------------------------- + dataversecontact indices (DataverseContact.java) +*/ ---------------------------------------- +CREATE INDEX index_dataversecontact_dataverse_id ON dataversecontact (dataverse_id); +CREATE INDEX index_dataversecontact_contactemail ON dataversecontact (contactemail); +CREATE INDEX index_dataversecontact_displayorder ON dataversecontact (displayorder); +/* ---------------------------------------- + dataversefacet indices (DataverseFacet.java) +*/ ---------------------------------------- +CREATE INDEX index_dataversefacet_dataverse_id ON dataversefacet (dataverse_id); +CREATE INDEX index_dataversefacet_datasetfieldtype_id ON dataversefacet (datasetfieldtype_id); +CREATE INDEX index_dataversefacet_displayorder ON dataversefacet (displayorder); +/* ---------------------------------------- + dataversefeatureddataverse indices (DataverseFeaturedDataverse.java) +*/ ---------------------------------------- +CREATE INDEX index_dataversefeatureddataverse_dataverse_id ON dataversefeatureddataverse (dataverse_id); +CREATE INDEX index_dataversefeatureddataverse_featureddataverse_id ON dataversefeatureddataverse (featureddataverse_id); +CREATE INDEX index_dataversefeatureddataverse_displayorder ON dataversefeatureddataverse (displayorder); +/* ---------------------------------------- + dataversefieldtypeinputlevel indices (DataverseFieldTypeInputLevel.java) +*/ ---------------------------------------- +CREATE INDEX index_dataversefieldtypeinputlevel_dataverse_id ON dataversefieldtypeinputlevel (dataverse_id); +CREATE INDEX index_dataversefieldtypeinputlevel_datasetfieldtype_id ON dataversefieldtypeinputlevel (datasetfieldtype_id); +CREATE INDEX index_dataversefieldtypeinputlevel_required ON dataversefieldtypeinputlevel (required); +/* ---------------------------------------- + dataverserole indices (DataverseRole.java) +*/ ---------------------------------------- +CREATE INDEX index_dataverserole_owner_id ON dataverserole (owner_id); +CREATE INDEX index_dataverserole_name ON dataverserole (name); +CREATE INDEX index_dataverserole_alias ON dataverserole (alias); +/* ---------------------------------------- + dvobject indices (DvObject.java) +*/ ---------------------------------------- +CREATE INDEX index_dvobject_dtype ON dvobject (dtype); +/* Should already exist: +CREATE INDEX index_dvobject_owner_id ON dvobject (owner_id); +CREATE INDEX index_dvobject_creator_id ON dvobject (creator_id); +CREATE INDEX index_dvobject_releaseuser_id ON dvobject (releaseuser_id); +*/ +/* ---------------------------------------- + explicitgroup indices (ExplicitGroup.java) +*/ ---------------------------------------- +CREATE INDEX index_explicitgroup_owner_id ON explicitgroup (owner_id); +CREATE INDEX index_explicitgroup_groupalias ON explicitgroup (groupalias); +CREATE INDEX index_explicitgroup_groupaliasinowner ON explicitgroup (groupaliasinowner); +/* ---------------------------------------- + foreignmetadatafieldmapping indices (ForeignMetadataFieldMapping.java) +*/ ---------------------------------------- +CREATE INDEX index_foreignmetadatafieldmapping_foreignmetadataformatmapping_id ON foreignmetadatafieldmapping (foreignmetadataformatmapping_id); +CREATE INDEX index_foreignmetadatafieldmapping_foreignfieldxpath ON foreignmetadatafieldmapping (foreignfieldxpath); +CREATE INDEX index_foreignmetadatafieldmapping_parentfieldmapping_id ON foreignmetadatafieldmapping (parentfieldmapping_id); +/* ---------------------------------------- + foreignmetadataformatmapping index (ForeignMetadataFormatMapping.java) +*/ ---------------------------------------- +CREATE INDEX index_foreignmetadataformatmapping_name ON foreignmetadataformatmapping (name); +/* ---------------------------------------- + harvestingdataverseconfig indices (HarvestingDataverseConfig.java) +*/ ---------------------------------------- +CREATE INDEX index_harvestingdataverseconfig_dataverse_id ON harvestingdataverseconfig (dataverse_id); +CREATE INDEX index_harvestingdataverseconfig_harvesttype ON harvestingdataverseconfig (harvesttype); +CREATE INDEX index_harvestingdataverseconfig_harveststyle ON harvestingdataverseconfig (harveststyle); +CREATE INDEX index_harvestingdataverseconfig_harvestingurl ON harvestingdataverseconfig (harvestingurl); +/* ---------------------------------------- + ipv4range index (IPv4Range.java) +*/ ---------------------------------------- +CREATE INDEX index_ipv4range_owner_id ON ipv4range (owner_id); +/* ---------------------------------------- + ipv6range index (IPv6Range.java) +*/ ---------------------------------------- +CREATE INDEX index_ipv6range_owner_id ON ipv6range (owner_id); +/* ---------------------------------------- + maplayermetadata indices (MapLayerMetadata.java) +*/ ---------------------------------------- +CREATE INDEX index_maplayermetadata_dataset_id ON maplayermetadata (dataset_id); +CREATE INDEX index_maplayermetadata_datafile_id ON maplayermetadata (datafile_id); +/* ---------------------------------------- + metadatablock indices (MetadataBlock.java) +*/ ---------------------------------------- +CREATE INDEX index_metadatablock_name ON metadatablock (name); +CREATE INDEX index_metadatablock_owner_id ON metadatablock (owner_id); +/* ---------------------------------------- + passwordresetdata indices (PasswordResetData.java) +*/ ---------------------------------------- +CREATE INDEX index_passwordresetdata_token ON passwordresetdata (token); +CREATE INDEX index_passwordresetdata_builtinuser_id ON passwordresetdata (builtinuser_id); +/* ---------------------------------------- + persistedglobalgroup indices (PersistedGlobalGroup.java) +*/ ---------------------------------------- +CREATE INDEX index_persistedglobalgroup_persistedgroupalias ON persistedglobalgroup (persistedgroupalias); +CREATE INDEX index_persistedglobalgroup_dtype ON persistedglobalgroup (dtype); +/* ---------------------------------------- + roleassignment indices (RoleAssignment.java) +*/ ---------------------------------------- +CREATE INDEX index_roleassignment_assigneeidentifier ON roleassignment (assigneeidentifier); +CREATE INDEX index_roleassignment_definitionpoint_id ON roleassignment (definitionpoint_id); +CREATE INDEX index_roleassignment_role_id ON roleassignment (role_id); +/* ---------------------------------------- + savedsearch indices (SavedSearch.java) +*/ ---------------------------------------- +CREATE INDEX index_savedsearch_definitionpoint_id ON savedsearch (definitionpoint_id); +CREATE INDEX index_savedsearch_creator_id ON savedsearch (creator_id); +/* ---------------------------------------- + savedsearchfilterquery index (SavedSearchFilterQuery.java) +*/ ---------------------------------------- +CREATE INDEX index_savedsearchfilterquery_savedsearch_id ON savedsearchfilterquery (savedsearch_id); +/* ---------------------------------------- + template index (Template.java) +*/ ---------------------------------------- +CREATE INDEX index_template_dataverse_id ON template (dataverse_id); +/* ---------------------------------------- + worldmapauth_token indices (WorldMapToken.java) +*/ ---------------------------------------- +CREATE INDEX index_worldmapauth_token_application_id ON worldmapauth_token (application_id); +CREATE INDEX index_worldmapauth_token_datafile_id ON worldmapauth_token (datafile_id); +CREATE INDEX index_worldmapauth_token_dataverseuser_id ON worldmapauth_token (dataverseuser_id); +/*------------------------------------------ + Add Compound Unique Constraint to dataversefieldtypeinputlevel + combining dataverse_id and datasetfieldtype_id +*/------------------------------------------ +ALTER TABLE dataversefieldtypeinputlevel + ADD CONSTRAINT unq_dataversefieldtypeinputlevel_add UNIQUE (dataverse_id, datasetfieldtype_id); \ No newline at end of file diff --git a/dataversedock/testdata/scripts/database/upgrades/upgrade_v4.1_to_v4.2.sql b/dataversedock/testdata/scripts/database/upgrades/upgrade_v4.1_to_v4.2.sql new file mode 100644 index 0000000..89a8954 --- /dev/null +++ b/dataversedock/testdata/scripts/database/upgrades/upgrade_v4.1_to_v4.2.sql @@ -0,0 +1,36 @@ +/* --------------------------------------- +Separate Terms of Use and Access from Dataset Version +and add to Template +*/ --------------------------------------- +ALTER TABLE template +ADD termsofuseandaccess_id bigint; + +ALTER TABLE datasetversion +ADD termsofuseandaccess_id bigint; + +/* ------------------------------------------------- +Migrate terms of use and access to the new table +reset counter of the id for the new table +*/ ------------------------------------------------- + +INSERT INTO termsofuseandaccess + (id, availabilitystatus, citationrequirements, conditions, confidentialitydeclaration, +contactforaccess, depositorrequirements, disclaimer, fileaccessrequest, license, originalarchive, restrictions, sizeofcollection, +specialpermissions, studycompletion, termsofaccess, termsofuse) +SELECT id, availabilitystatus, citationrequirements, conditions, confidentialitydeclaration, +contactforaccess, depositorrequirements, disclaimer, fileaccessrequest, license, originalarchive, restrictions, sizeofcollection, +specialpermissions, studycompletion, termsofaccess, termsofuse + FROM datasetversion; + +update datasetversion set termsofuseandaccess_id = id; + +SELECT setval(pg_get_serial_sequence('termsofuseandaccess', 'id'), coalesce(max(id),0) + 1, false) FROM datasetversion; + +/*------------------------------------------- +Clean up bad data where datasets in review +did NOT have their flags reset +on publish +*/------------------------------------------- + +UPDATE datasetversion SET inreview = false where inreview = true +and versionstate = 'RELEASED'; \ No newline at end of file diff --git a/dataversedock/testdata/scripts/database/upgrades/upgrade_v4.2.1_to_v4.2.2.sql b/dataversedock/testdata/scripts/database/upgrades/upgrade_v4.2.1_to_v4.2.2.sql new file mode 100644 index 0000000..f58a763 --- /dev/null +++ b/dataversedock/testdata/scripts/database/upgrades/upgrade_v4.2.1_to_v4.2.2.sql @@ -0,0 +1,23 @@ +-- A dataverse alias should not be case sensitive: https://github.com/IQSS/dataverse/issues/2598 +CREATE UNIQUE INDEX dataverse_alias_unique_idx on dataverse (LOWER(alias)); +-- If creating the index fails, check for dataverse with the same alias using this query: +-- select alias from dataverse where lower(alias) in (select lower(alias) from dataverse group by lower(alias) having count(*) >1) order by lower(alias); + + +--Edit Dataset: Investigate and correct multiple draft issue: https://github.com/IQSS/dataverse/issues/2132 +--This unique index will prevent the multiple draft issue +CREATE UNIQUE INDEX one_draft_version_per_dataset ON datasetversion +(dataset_id) WHERE versionstate='DRAFT'; +--It may not be applied until all of the datasets with +--multiple drafts have been resolved + + +--Guestbook: Entering more text in any textbox field, custom or not, fails to write to db but still downloads file.: https://github.com/IQSS/dataverse/issues/2752 +--Modify column to allow essay responses to guestbook custom questions +ALTER TABLE customquestionresponse + ALTER COLUMN response TYPE text; + +-- A new boolean in the DvObject table, to indicate that we have a generated thumbnail/preview image +-- for this object. +-- Added by Leonid, Nov. 23 2015 +ALTER TABLE dvobject ADD COLUMN previewImageAvailable BOOLEAN; diff --git a/dataversedock/testdata/scripts/database/upgrades/upgrade_v4.2.4_to_4.3.sql b/dataversedock/testdata/scripts/database/upgrades/upgrade_v4.2.4_to_4.3.sql new file mode 100644 index 0000000..8950619 --- /dev/null +++ b/dataversedock/testdata/scripts/database/upgrades/upgrade_v4.2.4_to_4.3.sql @@ -0,0 +1,59 @@ +/* + * To change this license header, choose License Headers in Project Properties. + * To change this template file, choose Tools | Templates + * and open the template in the editor. + */ +/** + * Author: skraffmi + * Created: Mar 4, 2016 + */ + + +-- remove non used columns from datasetversion +alter table +datasetversion +drop column if exists availabilitystatus, +drop column if exists citationrequirements, +drop column if exists conditions, +drop column if exists confidentialitydeclaration, +drop column if exists contactforaccess, +drop column if exists dataaccessplace, +drop column if exists depositorrequirements, +drop column if exists disclaimer, +drop column if exists fileaccessrequest, +drop column if exists license, +drop column if exists originalarchive, +drop column if exists restrictions, +drop column if exists sizeofcollection, +drop column if exists specialpermissions, +drop column if exists studycompletion, +drop column if exists termsofaccess, +drop column if exists termsofuse; + + +-- Add new foreign key to dataset for citation date (from datasetfieldtype) +ALTER TABLE dataset ADD COLUMN citationdatedatasetfieldtype_id bigint; + +ALTER TABLE dataset + ADD CONSTRAINT fk_dataset_citationdatedatasetfieldtype_id FOREIGN KEY (citationdatedatasetfieldtype_id) + REFERENCES datasetfieldtype (id) MATCH SIMPLE + ON UPDATE NO ACTION ON DELETE NO ACTION; + + +-- Add new indices for case insensitive e-mails +CREATE UNIQUE INDEX index_authenticateduser_lower_email ON authenticateduser (lower(email)); +CREATE UNIQUE INDEX index_builtinuser_lower_email ON builtinuser (lower(email)); + + +/* + For ticket #2957, additional columns for mapping of tabular data +*/ +-- > Distinguishes a mapped Tabular file from a shapefile +ALTER TABLE maplayermetadata ADD COLUMN isjoinlayer BOOLEAN default false; +-- > Description of the tabular join. e.g. joined to layer XYZ on column TRACT, etc +ALTER TABLE maplayermetadata ADD COLUMN joindescription TEXT default NULL; +-- > For all maps, store the WorldMap links to generate alternative versions, +-- e.g. PNG, zipped shapefile, GeoJSON, Excel, etc +ALTER TABLE maplayermetadata ADD COLUMN maplayerlinks TEXT default NULL; + + diff --git a/dataversedock/testdata/scripts/database/upgrades/upgrade_v4.4_to_v4.5.sql b/dataversedock/testdata/scripts/database/upgrades/upgrade_v4.4_to_v4.5.sql new file mode 100644 index 0000000..6f9a0ca --- /dev/null +++ b/dataversedock/testdata/scripts/database/upgrades/upgrade_v4.4_to_v4.5.sql @@ -0,0 +1,11 @@ +-- A Private URL is a specialized role assignment with a token. +ALTER TABLE roleassignment ADD COLUMN privateurltoken character varying(255); +-- "Last Export Time" added to the dataset: +ALTER TABLE dataset ADD COLUMN lastExportTime TIMESTAMP; +-- Direct link to the harvesting configuration, for harvested datasets: +ALTER TABLE dataset ADD COLUMN harvestingClient_id bigint; +-- For harveted datasets, native OAI identifier used by the original OAI server +ALTER TABLE dataset ADD COLUMN harvestIdentifier VARCHAR(255); +-- Add extra rules to the Dublin Core import logic: +INSERT INTO foreignmetadatafieldmapping (id, foreignfieldxpath, datasetfieldname, isattribute, parentfieldmapping_id, foreignmetadataformatmapping_id) VALUES (18, ':publisher', 'producerName', FALSE, NULL, 1 ); +INSERT INTO foreignmetadatafieldmapping (id, foreignfieldxpath, datasetfieldname, isattribute, parentfieldmapping_id, foreignmetadataformatmapping_id) VALUES (19, ':language', 'language', FALSE, NULL, 1 ); diff --git a/dataversedock/testdata/scripts/database/upgrades/upgrade_v4.5.1_to_v4.6.sql b/dataversedock/testdata/scripts/database/upgrades/upgrade_v4.5.1_to_v4.6.sql new file mode 100644 index 0000000..eb7d954 --- /dev/null +++ b/dataversedock/testdata/scripts/database/upgrades/upgrade_v4.5.1_to_v4.6.sql @@ -0,0 +1,8 @@ +ALTER TABLE datafile ADD COLUMN checksumtype character varying(255); +UPDATE datafile SET checksumtype = 'MD5'; +ALTER TABLE datafile ALTER COLUMN checksumtype SET NOT NULL; +-- alternate statement for sbgrid.org and others interested in SHA-1 support +-- note that in the database we use "SHA1" (no hyphen) but the GUI will show "SHA-1" +--UPDATE datafile SET checksumtype = 'SHA1'; +ALTER TABLE datafile RENAME md5 TO checksumvalue; +ALTER TABLE filemetadata ADD COLUMN directorylabel character varying(255); diff --git a/dataversedock/testdata/scripts/database/upgrades/upgrade_v4.5_to_v4.5.1.sql b/dataversedock/testdata/scripts/database/upgrades/upgrade_v4.5_to_v4.5.1.sql new file mode 100644 index 0000000..6296fca --- /dev/null +++ b/dataversedock/testdata/scripts/database/upgrades/upgrade_v4.5_to_v4.5.1.sql @@ -0,0 +1 @@ +ALTER TABLE authenticateduser ADD COLUMN emailconfirmed timestamp without time zone; diff --git a/dataversedock/testdata/scripts/database/upgrades/upgrade_v4.6.1_to_v4.6.2.sql b/dataversedock/testdata/scripts/database/upgrades/upgrade_v4.6.1_to_v4.6.2.sql new file mode 100644 index 0000000..bc06f11 --- /dev/null +++ b/dataversedock/testdata/scripts/database/upgrades/upgrade_v4.6.1_to_v4.6.2.sql @@ -0,0 +1,3 @@ +ALTER TABLE dataset ADD COLUMN useGenericThumbnail boolean; +ALTER TABLE maplayermetadata ADD COLUMN lastverifiedtime timestamp without time zone; +ALTER TABLE maplayermetadata ADD COLUMN lastverifiedstatus bigint; diff --git a/dataversedock/testdata/scripts/database/upgrades/upgrade_v4.6.2_to_v4.7.sql b/dataversedock/testdata/scripts/database/upgrades/upgrade_v4.6.2_to_v4.7.sql new file mode 100644 index 0000000..08d73a6 --- /dev/null +++ b/dataversedock/testdata/scripts/database/upgrades/upgrade_v4.6.2_to_v4.7.sql @@ -0,0 +1,2 @@ +--Uncomment to preserve "Dataverse" at end of each dataverse name. +--UPDATE dataverse SET name = name || ' Dataverse'; \ No newline at end of file diff --git a/dataversedock/testdata/scripts/database/upgrades/upgrade_v4.6_to_v4.6.1.sql b/dataversedock/testdata/scripts/database/upgrades/upgrade_v4.6_to_v4.6.1.sql new file mode 100644 index 0000000..d4da4c2 --- /dev/null +++ b/dataversedock/testdata/scripts/database/upgrades/upgrade_v4.6_to_v4.6.1.sql @@ -0,0 +1,7 @@ +DELETE FROM authenticationproviderrow where id = 'echo-simple'; +DELETE FROM authenticationproviderrow where id = 'echo-dignified'; +-- For DataFile, file replace functionality: +ALTER TABLE datafile ADD COLUMN rootdatafileid bigint default -1; +ALTER TABLE datafile ADD COLUMN previousdatafileid bigint default null; +-- For existing DataFile objects, update rootDataFileId values: +UPDATE datafile SET rootdatafileid = -1; diff --git a/dataversedock/testdata/scripts/database/upgrades/upgrade_v4.7.1_to_v4.8.sql b/dataversedock/testdata/scripts/database/upgrades/upgrade_v4.7.1_to_v4.8.sql new file mode 100644 index 0000000..2ec3121 --- /dev/null +++ b/dataversedock/testdata/scripts/database/upgrades/upgrade_v4.7.1_to_v4.8.sql @@ -0,0 +1,15 @@ +-- Updates the database to add a storage identifier to each DvObject +ALTER TABLE dvobject ADD COLUMN storageidentifier character varying(255); + +UPDATE dvobject +SET storageidentifier=(SELECT datafile.filesystemname +FROM datafile +WHERE datafile.id=dvobject.id AND dvobject.dtype='DataFile') where dvobject.dtype='DataFile'; + +UPDATE dvobject +SET storageidentifier=(select concat('file://',authority::text,ds.doiseparator::text,ds.identifier::text) +FROM dataset ds +WHERE dvobject.id=ds.id) +WHERE storageidentifier IS NULL; + +ALTER TABLE datafile DROP COLUMN filesystemname; diff --git a/dataversedock/testdata/scripts/database/upgrades/upgrade_v4.7_to_v4.7.1.sql b/dataversedock/testdata/scripts/database/upgrades/upgrade_v4.7_to_v4.7.1.sql new file mode 100644 index 0000000..5eef4a2 --- /dev/null +++ b/dataversedock/testdata/scripts/database/upgrades/upgrade_v4.7_to_v4.7.1.sql @@ -0,0 +1,35 @@ +-- Adding new columns for "createdtime", "lastlogintime", and "lastapiusetime" +-- Default "createdtime" to 1/1/2000 +-- Dropping "modificationtime" as it is inconsistent between user auths and best replaced by the new columns. +ALTER TABLE authenticateduser ADD COLUMN createdtime TIMESTAMP NOT NULL DEFAULT '01-01-2000 00:00:00'; +ALTER TABLE authenticateduser ADD COLUMN lastlogintime TIMESTAMP DEFAULT NULL; +ALTER TABLE authenticateduser ADD COLUMN lastapiusetime TIMESTAMP DEFAULT NULL; +ALTER TABLE authenticateduser DROP COLUMN modificationtime; + +-- Removing authenticated builtin users who do not exist in the builtin table because they were created through faulty validation +-- creates view containing authentication ids that you will be deleting +CREATE TEMP VIEW useridstodelete AS (SELECT DISTINCT a.id FROM authenticateduserlookup al, authenticateduser a WHERE al.authenticateduser_id = a.id AND al.authenticationproviderid = 'builtin' AND a.useridentifier NOT IN (SELECT username FROM builtinuser)); +-- commands to remove the users from the appropriate tables +DELETE FROM confirmemaildata WHERE authenticateduser_id IN (SELECT * FROM useridstodelete); +DELETE FROM usernotification WHERE user_id IN (SELECT * FROM useridstodelete); +DELETE FROM guestbookresponse WHERE authenticateduser_id IN (SELECT * FROM useridstodelete); +DELETE FROM authenticateduserlookup WHERE authenticateduser_id IN (SELECT * FROM useridstodelete); +DELETE FROM authenticateduser WHERE id NOT IN (SELECT authenticateduser_id FROM authenticateduserlookup); + +/* +Add validationFormat to DatasetFieldType to + */ +ALTER TABLE datasetfieldtype +ADD COLUMN validationFormat character varying(255); + +/* +for testing display format +This adds a display format that links out to an outside site. The format of the #VALUE is +four characters alpha numeric (3fki works) + +update datasetfieldtype +set displayformat = 'PDB (RCSB) #VALUE', +fieldType= 'TEXT' +where id = xxx; + +*/ diff --git a/dataversedock/testdata/scripts/database/upgrades/upgrade_v4.8.3_to_v4.8.4.sql b/dataversedock/testdata/scripts/database/upgrades/upgrade_v4.8.3_to_v4.8.4.sql new file mode 100644 index 0000000..670a2d1 --- /dev/null +++ b/dataversedock/testdata/scripts/database/upgrades/upgrade_v4.8.3_to_v4.8.4.sql @@ -0,0 +1,2 @@ +-- Google login has used 131 characters. 64 is not enough. +ALTER TABLE oauth2tokendata ALTER COLUMN accesstoken TYPE text; diff --git a/dataversedock/testdata/scripts/deploy/phoenix.dataverse.org/cert.md b/dataversedock/testdata/scripts/deploy/phoenix.dataverse.org/cert.md new file mode 100644 index 0000000..d68910f --- /dev/null +++ b/dataversedock/testdata/scripts/deploy/phoenix.dataverse.org/cert.md @@ -0,0 +1,13 @@ +Note that `-sha256` is used but the important thing is making sure SHA-1 is not selected when uploading the CSR to https://cert-manager.com/customer/InCommon + + openssl genrsa -out phoenix.dataverse.org.key 2048 + + openssl req -new -sha256 -key phoenix.dataverse.org.key -out phoenix.dataverse.org.csr + + Country Name (2 letter code) [XX]:US + State or Province Name (full name) []:Massachusetts + Locality Name (eg, city) [Default City]:Cambridge + Organization Name (eg, company) [Default Company Ltd]:Harvard College + Organizational Unit Name (eg, section) []:IQSS + Common Name (eg, your name or your server's hostname) []:phoenix.dataverse.org + Email Address []:support@dataverse.org diff --git a/dataversedock/testdata/scripts/deploy/phoenix.dataverse.org/deploy b/dataversedock/testdata/scripts/deploy/phoenix.dataverse.org/deploy new file mode 100755 index 0000000..f45d7d6 --- /dev/null +++ b/dataversedock/testdata/scripts/deploy/phoenix.dataverse.org/deploy @@ -0,0 +1,4 @@ +#!/bin/sh +scripts/deploy/phoenix.dataverse.org/prep +sudo /home/jenkins/dataverse/scripts/deploy/phoenix.dataverse.org/rebuild +scripts/deploy/phoenix.dataverse.org/post diff --git a/dataversedock/testdata/scripts/deploy/phoenix.dataverse.org/dv-root.json b/dataversedock/testdata/scripts/deploy/phoenix.dataverse.org/dv-root.json new file mode 100644 index 0000000..20fa890 --- /dev/null +++ b/dataversedock/testdata/scripts/deploy/phoenix.dataverse.org/dv-root.json @@ -0,0 +1,15 @@ +{ + "alias": "root", + "name": "Root", + "permissionRoot": false, + "facetRoot": true, + "description": "Welcome! phoenix.dataverse.org is so named because data here is deleted on every build of the latest Dataverse code: http://guides.dataverse.org/en/latest/developers", + "dataverseSubjects": [ + "Other" + ], + "dataverseContacts": [ + { + "contactEmail": "root@mailinator.com" +} + ] +} diff --git a/dataversedock/testdata/scripts/deploy/phoenix.dataverse.org/install b/dataversedock/testdata/scripts/deploy/phoenix.dataverse.org/install new file mode 100755 index 0000000..f3df88a --- /dev/null +++ b/dataversedock/testdata/scripts/deploy/phoenix.dataverse.org/install @@ -0,0 +1,20 @@ +#!/bin/sh +export HOST_ADDRESS=phoenix.dataverse.org +export GLASSFISH_ROOT=/usr/local/glassfish4 +export FILES_DIR=/usr/local/glassfish4/glassfish/domains/domain1/files +export DB_NAME=dvndb +export DB_PORT=5432 +export DB_HOST=localhost +export DB_USER=dvnapp +export DB_PASS=secret +export RSERVE_HOST=localhost +export RSERVE_PORT=6311 +export RSERVE_USER=rserve +export RSERVE_PASS=rserve +export SMTP_SERVER=localhost +export MEM_HEAP_SIZE=2048 +export GLASSFISH_DOMAIN=domain1 +cd scripts/installer +cp pgdriver/postgresql-8.4-703.jdbc4.jar $GLASSFISH_ROOT/glassfish/lib +cp ../../conf/jhove/jhove.conf $GLASSFISH_ROOT/glassfish/domains/$GLASSFISH_DOMAIN/config/jhove.conf +./glassfish-setup.sh diff --git a/dataversedock/testdata/scripts/deploy/phoenix.dataverse.org/post b/dataversedock/testdata/scripts/deploy/phoenix.dataverse.org/post new file mode 100755 index 0000000..7716fa8 --- /dev/null +++ b/dataversedock/testdata/scripts/deploy/phoenix.dataverse.org/post @@ -0,0 +1,15 @@ +#/bin/sh +cd scripts/api +./setup-all.sh --insecure | tee /tmp/setup-all.sh.out +cd ../.. +psql -U dvnapp dvndb -f scripts/database/reference_data.sql +psql -U dvnapp dvndb -f doc/sphinx-guides/source/_static/util/pg8-createsequence-prep.sql +psql -U dvnapp dvndb -f doc/sphinx-guides/source/_static/util/createsequence.sql +scripts/search/tests/publish-dataverse-root +git checkout scripts/api/data/dv-root.json +scripts/search/tests/grant-authusers-add-on-root +scripts/search/populate-users +scripts/search/create-users +scripts/search/tests/create-all-and-test +scripts/search/tests/publish-spruce1-and-test +java -jar downloads/schemaSpy_5.0.0.jar -t pgsql -host localhost -db dvndb -u postgres -p secret -s public -dp scripts/installer/pgdriver/postgresql-9.1-902.jdbc4.jar -o /var/www/html/schemaspy/latest diff --git a/dataversedock/testdata/scripts/deploy/phoenix.dataverse.org/prep b/dataversedock/testdata/scripts/deploy/phoenix.dataverse.org/prep new file mode 100755 index 0000000..4660125 --- /dev/null +++ b/dataversedock/testdata/scripts/deploy/phoenix.dataverse.org/prep @@ -0,0 +1,2 @@ +#/bin/bash -x +cp scripts/deploy/phoenix.dataverse.org/dv-root.json scripts/api/data/dv-root.json diff --git a/dataversedock/testdata/scripts/deploy/phoenix.dataverse.org/rebuild b/dataversedock/testdata/scripts/deploy/phoenix.dataverse.org/rebuild new file mode 100755 index 0000000..ca92ef5 --- /dev/null +++ b/dataversedock/testdata/scripts/deploy/phoenix.dataverse.org/rebuild @@ -0,0 +1,18 @@ +#!/bin/sh +LIST_APP=$(/usr/local/glassfish4/glassfish/bin/asadmin list-applications -t) +echo "deployed: $LIST_APP" +OLD_WAR=$(echo $LIST_APP | awk '{print $1}') +NEW_WAR=/tmp/dataverse.war +/usr/local/glassfish4/glassfish/bin/asadmin undeploy $OLD_WAR +/usr/local/glassfish4/glassfish/bin/asadmin stop-domain +# blow away "generated" directory to avoid EJB Timer Service is not available" https://github.com/IQSS/dataverse/issues/3336 +rm -rf /usr/local/glassfish4/glassfish/domains/domain1/generated +rm -rf /usr/local/glassfish4/glassfish/domains/domain1/files +#psql -U postgres -c "CREATE ROLE dvnapp UNENCRYPTED PASSWORD 'secret' SUPERUSER CREATEDB CREATEROLE INHERIT LOGIN" template1 +psql -U dvnapp -c 'DROP DATABASE "dvndb"' template1 +echo $? +curl http://localhost:8983/solr/update/json?commit=true -H "Content-type: application/json" -X POST -d "{\"delete\": { \"query\":\"*:*\"}}" +psql -U dvnapp -c 'CREATE DATABASE "dvndb" WITH OWNER = "dvnapp"' template1 +echo $? +/usr/local/glassfish4/glassfish/bin/asadmin start-domain +/usr/local/glassfish4/glassfish/bin/asadmin deploy $NEW_WAR diff --git a/dataversedock/testdata/scripts/installer/Makefile b/dataversedock/testdata/scripts/installer/Makefile new file mode 100644 index 0000000..046e6cb --- /dev/null +++ b/dataversedock/testdata/scripts/installer/Makefile @@ -0,0 +1,76 @@ +INSTALLER_ZIP_DIR=dvinstall +DISTRIBUTION_WAR_FILE=${INSTALLER_ZIP_DIR}/dataverse.war +GLASSFISH_SETUP_SCRIPT=${INSTALLER_ZIP_DIR}/glassfish-setup.sh +POSTGRES_DRIVERS=${INSTALLER_ZIP_DIR}/pgdriver +API_SCRIPTS=${INSTALLER_ZIP_DIR}/setup-datasetfields.sh ${INSTALLER_ZIP_DIR}/setup-users.sh ${INSTALLER_ZIP_DIR}/setup-builtin-roles.sh ${INSTALLER_ZIP_DIR}/setup-dvs.sh ${INSTALLER_ZIP_DIR}/data ${INSTALLER_ZIP_DIR}/setup-identity-providers.sh ${INSTALLER_ZIP_DIR}/setup-all.sh ${INSTALLER_ZIP_DIR}/post-install-api-block.sh +DB_SCRIPT=${INSTALLER_ZIP_DIR}/reference_data.sql +JHOVE_CONFIG=${INSTALLER_ZIP_DIR}/jhove.conf +JHOVE_SCHEMA=${INSTALLER_ZIP_DIR}/jhoveConfig.xsd +SOLR_SCHEMA=${INSTALLER_ZIP_DIR}/schema.xml +INSTALL_SCRIPT=${INSTALLER_ZIP_DIR}/install + +installer: dvinstall.zip + +clean: + /bin/rm -rf ${INSTALLER_ZIP_DIR} dvinstall.zip + +dvinstall.zip: ${GLASSFISH_SETUP_SCRIPT} ${POSTGRES_DRIVERS} ${DISTRIBUTION_WAR_FILE} ${API_SCRIPTS} ${DB_SCRIPT} ${JHOVE_CONFIG} ${JHOVE_SCHEMA} ${SOLR_SCHEMA} ${INSTALL_SCRIPT} + @echo making installer... + zip -r dvinstall.zip ${GLASSFISH_SETUP_SCRIPT} ${POSTGRES_DRIVERS} ${DISTRIBUTION_WAR_FILE} ${API_SCRIPTS} ${DB_SCRIPT} ${JHOVE_CONFIG} ${JHOVE_SCHEMA} ${SOLR_SCHEMA} ${INSTALL_SCRIPT} + @echo + @echo "Done!" + +${INSTALL_SCRIPT}: install + @echo copying the installer script + mkdir -p ${INSTALLER_ZIP_DIR} + /bin/cp install ${INSTALLER_ZIP_DIR} + +${DISTRIBUTION_WAR_FILE}: + @echo copying war file... + @mkdir -p ${INSTALLER_ZIP_DIR} + @VERSION_NUMBER=`grep -m1 '' ../../pom.xml | sed 's/ *<\/*version>//g'`; export VERSION_NUMBER; \ + BUILT_WAR_FILE=../../target/dataverse-$$VERSION_NUMBER.war; export BUILT_WAR_FILE; \ + if [ -f $$BUILT_WAR_FILE ]; \ + then \ + /bin/cp $$BUILT_WAR_FILE ${DISTRIBUTION_WAR_FILE}; \ + else \ + echo "ERROR: can't find application .war file ($${BUILT_WAR_FILE})!"; \ + echo "Build the Dataverse application war file, then try again."; \ + exit 1; \ + fi + +${GLASSFISH_SETUP_SCRIPT}: glassfish-setup.sh + @echo copying glassfish setup + mkdir -p ${INSTALLER_ZIP_DIR} + /bin/cp glassfish-setup.sh ${INSTALLER_ZIP_DIR} + + +${POSTGRES_DRIVERS}: pgdriver/postgresql-8.4-703.jdbc4.jar pgdriver/postgresql-9.0-802.jdbc4.jar pgdriver/postgresql-9.1-902.jdbc4.jar pgdriver/postgresql-9.2-1004.jdbc4.jar pgdriver/postgresql-9.3-1104.jdbc4.jar pgdriver/postgresql-9.4.1212.jar pgdriver/postgresql-42.1.4.jar + @echo copying postgres drviers + @mkdir -p ${POSTGRES_DRIVERS} + /bin/cp pgdriver/postgresql-8.4-703.jdbc4.jar pgdriver/postgresql-9.0-802.jdbc4.jar pgdriver/postgresql-9.1-902.jdbc4.jar pgdriver/postgresql-9.2-1004.jdbc4.jar pgdriver/postgresql-9.3-1104.jdbc4.jar pgdriver/postgresql-9.4.1212.jar pgdriver/postgresql-42.1.4.jar ${INSTALLER_ZIP_DIR}/pgdriver + +${API_SCRIPTS}: ../api/setup-datasetfields.sh ../api/setup-users.sh ../api/setup-dvs.sh ../api/setup-identity-providers.sh ../api/setup-all.sh ../api/post-install-api-block.sh ../api/setup-builtin-roles.sh ../api/data + @echo copying api scripts + @mkdir -p ${INSTALLER_ZIP_DIR} + /bin/cp -R ../api/setup-datasetfields.sh ../api/setup-users.sh ../api/setup-dvs.sh ../api/setup-identity-providers.sh ../api/setup-all.sh ../api/post-install-api-block.sh ../api/setup-builtin-roles.sh ../api/data ${INSTALLER_ZIP_DIR} + +${DB_SCRIPT}: ../database/reference_data.sql + @echo copying reference data sql script + @mkdir -p ${INSTALLER_ZIP_DIR} + /bin/cp ../database/reference_data.sql ${INSTALLER_ZIP_DIR} + +${JHOVE_CONFIG}: ../../conf/jhove/jhove.conf + @echo copying jhove config file + @mkdir -p ${INSTALLER_ZIP_DIR} + /bin/cp ../../conf/jhove/jhove.conf ${INSTALLER_ZIP_DIR} + +${JHOVE_SCHEMA}: ../../conf/jhove/jhoveConfig.xsd + @echo copying jhove schema file + @mkdir -p ${INSTALLER_ZIP_DIR} + /bin/cp ../../conf/jhove/jhoveConfig.xsd ${INSTALLER_ZIP_DIR} + +${SOLR_SCHEMA}: ../../conf/solr/4.6.0/schema.xml + @echo copying Solr schema file + @mkdir -p ${INSTALLER_ZIP_DIR} + /bin/cp ../../conf/solr/4.6.0/schema.xml ${INSTALLER_ZIP_DIR} diff --git a/dataversedock/testdata/scripts/installer/README.txt b/dataversedock/testdata/scripts/installer/README.txt new file mode 100644 index 0000000..fb41f4f --- /dev/null +++ b/dataversedock/testdata/scripts/installer/README.txt @@ -0,0 +1,45 @@ +The installer script (install) can be run either by a developer (inside the source tree), or by an end-user installing the Dataverse. The latter will obtain the script as part of the distribution bundle; and they will be running it inside the unzipped bundle directory. + +In the former (developer) case, the installer will be looking for the files it needs in the other directories in the source tree. +For example, the war file (once built) can be found in ../../target/. The name of the war file will be dataverse-{VERSION}.war, where +{VERSION} is the version number of the Dataverse, obtained from the pom file (../../pom.xml). For example, as of writing this README.txt (July 2015) the war file is ../../target/dataverse-4.1.war/ + +When building a distribution archive, the Makefile will pile all the files that the installer needs in one directory (./dvinstall here) and then zip it up. We upload the resulting zip bundle on github as the actual software release. This way the end user only gets the files they actually need to install the Dataverse app. So they can do so without pulling the entire source tree. + + +The installer script itself (the perl script ./install) knows to look for all these files in 2 places (for example, it will look for the war file in ../../target/; if it's not there, it'll assume this is a distribution bundle and look for it as ./dataverse.war) + +Here's the list of the files that the installer needs: + +the war file: +target/dataverse-{VERSION}.war + +and also: + +from scripts/installer (this directory): + +install +glassfish-setup.sh +pgdriver (the entire directory with all its contents) + +from scripts/api: + +setup-all.sh +setup-builtin-roles.sh +setup-datasetfields.sh +setup-dvs.sh +setup-identity-providers.sh +setup-users.sh +data (the entire directory with all its contents) + +from scripts/database: + +reference_data.sql + +from conf/jhove: + +jhove.conf + +SOLR schema file, from conf/solr/4.6.0: + +schema.xml diff --git a/dataversedock/testdata/scripts/installer/dvinstall/glassfish-setup.sh b/dataversedock/testdata/scripts/installer/dvinstall/glassfish-setup.sh new file mode 100755 index 0000000..397cebf --- /dev/null +++ b/dataversedock/testdata/scripts/installer/dvinstall/glassfish-setup.sh @@ -0,0 +1,261 @@ +#!/bin/bash +# YOU (THE HUMAN USER) SHOULD NEVER RUN THIS SCRIPT DIRECTLY! +# It should be run by higher-level installers. +# The following arguments should be passed to it +# as environmental variables: +# (no defaults for these values are provided here!) +# +# glassfish configuration: +# GLASSFISH_ROOT +# GLASSFISH_DOMAIN +# ASADMIN_OPTS +# MEM_HEAP_SIZE +# +# database configuration: +# DB_PORT +# DB_HOST +# DB_NAME +# DB_USER +# DB_PASS +# +# Rserve configuration: +# RSERVE_HOST +# RSERVE_PORT +# RSERVE_USER +# RSERVE_PASS +# +# other local configuration: +# HOST_ADDRESS +# SMTP_SERVER +# FILES_DIR + +# The script is going to fail and exit if any of the +# parameters aren't supplied. It is the job of the +# parent script to set all these env. variables, +# providing default values, if none are supplied by +# the user, etc. + +if [ -z "$DB_NAME" ] + then + echo "You must specify database name (DB_NAME)." + echo "PLEASE NOTE THAT YOU (THE HUMAN USER) SHOULD NEVER RUN THIS SCRIPT DIRECTLY!" + echo "IT SHOULD ONLY BE RUN BY OTHER SCRIPTS." + exit 1 +fi + +if [ -z "$DB_PORT" ] + then + echo "You must specify database port (DB_PORT)." + exit 1 +fi + +if [ -z "$DB_HOST" ] + then + echo "You must specify database host (DB_HOST)." + exit 1 +fi + +if [ -z "$DB_USER" ] + then + echo "You must specify database user (DB_USER)." + exit 1 +fi + +if [ -z "$DB_PASS" ] + then + echo "You must specify database password (DB_PASS)." + exit 1 +fi + +if [ -z "$RSERVE_HOST" ] + then + echo "You must specify Rserve host (RSERVE_HOST)." + exit 1 +fi + +if [ -z "$RSERVE_PORT" ] + then + echo "You must specify Rserve port (RSERVE_PORT)." + exit 1 +fi + +if [ -z "$RSERVE_USER" ] + then + echo "You must specify Rserve user (RSERVE_USER)." + exit 1 +fi + +if [ -z "$RSERVE_PASS" ] + then + echo "You must specify Rserve password (RSERVE_PASS)." + exit 1 +fi + +if [ -z "$SMTP_SERVER" ] + then + echo "You must specify smtp server (SMTP_SERVER)." + exit 1 +fi + +if [ -z "$HOST_ADDRESS" ] + then + echo "You must specify host address (HOST_ADDRESS)." + exit 1 +fi + +if [ -z "$FILES_DIR" ] + then + echo "You must specify files directory (FILES_DIR)." + exit 1 +fi + +if [ -z "$MEM_HEAP_SIZE" ] + then + echo "You must specify the memory heap size for glassfish (MEM_HEAP_SIZE)." + exit 1 +fi + +if [ -z "$GLASSFISH_DOMAIN" ] + then + echo "You must specify glassfish domain (GLASSFISH_DOMAIN)." + exit 1 +fi + +echo "checking glassfish root:"${GLASSFISH_ROOT} + +if [ ! -d "$GLASSFISH_ROOT" ] + then + echo Glassfish root '$GLASSFISH_ROOT' does not exist + exit 1 +fi +GLASSFISH_BIN_DIR=$GLASSFISH_ROOT/bin + +echo "checking glassfish domain:"${GLASSFISH_ROOT}/glassfish/domains/$GLASSFISH_DOMAIN + +DOMAIN_DIR=$GLASSFISH_ROOT/glassfish/domains/$GLASSFISH_DOMAIN +if [ ! -d "$DOMAIN_DIR" ] + then + echo Domain directory '$DOMAIN_DIR' does not exist + exit 2 +fi + +echo "Setting up your glassfish4 to support Dataverse" +echo "Glassfish directory: "$GLASSFISH_ROOT +echo "Domain directory: "$DOMAIN_DIR + +# Move to the glassfish dir +pushd $GLASSFISH_BIN_DIR + +### +# take the domain up, if needed. +DOMAIN_DOWN=$(./asadmin list-domains | grep "$DOMAIN " | grep "not running") +if [ $(echo $DOMAIN_DOWN|wc -c) -ne 1 ]; + then + echo Trying to start domain $GLASSFISH_DOMAIN up... + ./asadmin $ASADMIN_OPTS start-domain $GLASSFISH_DOMAIN + else + echo domain running +fi + +# undeploy the app, if running: + +./asadmin $ASADMIN_OPTS undeploy dataverse-4.0 + +# avoid OutOfMemoryError: PermGen per http://eugenedvorkin.com/java-lang-outofmemoryerror-permgen-space-error-during-deployment-to-glassfish/ +#./asadmin $ASADMIN_OPTS list-jvm-options +./asadmin $ASADMIN_OPTS delete-jvm-options "-XX\:MaxPermSize=192m" +./asadmin $ASADMIN_OPTS create-jvm-options "-XX\:MaxPermSize=512m" +./asadmin $ASADMIN_OPTS create-jvm-options "-XX\:PermSize=256m" +./asadmin $ASADMIN_OPTS delete-jvm-options -Xmx512m +./asadmin $ASADMIN_OPTS create-jvm-options "-Xmx${MEM_HEAP_SIZE}m" +./asadmin $ASADMIN_OPTS delete-jvm-options -client +./asadmin $ASADMIN_OPTS create-jvm-options "-server" + +### +# JDBC connection pool + +# we'll try to delete a pool with this name, if already exists. +# - in case the database name has changed since the last time it +# was configured. +./asadmin $ASADMIN_OPTS delete-jdbc-connection-pool --cascade=true dvnDbPool + + +./asadmin $ASADMIN_OPTS create-jdbc-connection-pool --restype javax.sql.DataSource \ + --datasourceclassname org.postgresql.ds.PGPoolingDataSource \ + --property create=true:User=$DB_USER:PortNumber=$DB_PORT:databaseName=$DB_NAME:password=$DB_PASS:ServerName=$DB_HOST \ + dvnDbPool + +### +# Create data sources +./asadmin $ASADMIN_OPTS create-jdbc-resource --connectionpoolid dvnDbPool jdbc/VDCNetDS + +### +# Set up the data source for the timers +./asadmin $ASADMIN_OPTS set configs.config.server-config.ejb-container.ejb-timer-service.timer-datasource=jdbc/VDCNetDS + +### +# Add the necessary JVM options: +# +# location of the datafiles directory: +# (defaults to dataverse/files in the users home directory) +./asadmin $ASADMIN_OPTS create-jvm-options "\-Ddataverse.files.directory=${FILES_DIR}" +# Rserve-related JVM options: +./asadmin $ASADMIN_OPTS create-jvm-options "\-Ddataverse.rserve.host=${RSERVE_HOST}" +./asadmin $ASADMIN_OPTS create-jvm-options "\-Ddataverse.rserve.port=${RSERVE_PORT}" +./asadmin $ASADMIN_OPTS create-jvm-options "\-Ddataverse.rserve.user=${RSERVE_USER}" +./asadmin $ASADMIN_OPTS create-jvm-options "\-Ddataverse.rserve.password=${RSERVE_PASS}" +# Data Deposit API options +./asadmin $ASADMIN_OPTS create-jvm-options "\-Ddataverse.fqdn=${HOST_ADDRESS}" +# password reset token timeout in minutes +./asadmin $ASADMIN_OPTS create-jvm-options "\-Ddataverse.auth.password-reset-timeout-in-minutes=60" + +./asadmin $ASADMIN_OPTS create-jvm-options "\-Djavax.xml.parsers.SAXParserFactory=com.sun.org.apache.xerces.internal.jaxp.SAXParserFactoryImpl" + +# EZID DOI Settings +./asadmin $ASADMIN_OPTS create-jvm-options "\-Ddoi.password=apitest" +./asadmin $ASADMIN_OPTS create-jvm-options "\-Ddoi.username=apitest" +./asadmin $ASADMIN_OPTS create-jvm-options "\-Ddoi.baseurlstring=https\://ezid.cdlib.org" +# "I am the timer server" option: +./asadmin $ASADMIN_OPTS create-jvm-options "-Ddataverse.timerServer=true" + +# enable comet support +./asadmin $ASADMIN_OPTS set server-config.network-config.protocols.protocol.http-listener-1.http.comet-support-enabled="true" + +./asadmin $ASADMIN_OPTS delete-connector-connection-pool --cascade=true jms/__defaultConnectionFactory-Connection-Pool + +# no need to explicitly delete the connector resource for the connection pool deleted in the step +# above - the cascade delete takes care of it. +#./asadmin $ASADMIN_OPTS delete-connector-resource jms/__defaultConnectionFactory-Connection-Pool + +# http://docs.oracle.com/cd/E19798-01/821-1751/gioce/index.html +./asadmin $ASADMIN_OPTS create-connector-connection-pool --steadypoolsize 1 --maxpoolsize 250 --poolresize 2 --maxwait 60000 --raname jmsra --connectiondefinition javax.jms.QueueConnectionFactory jms/IngestQueueConnectionFactoryPool + +# http://docs.oracle.com/cd/E18930_01/html/821-2416/abllx.html#giogt +./asadmin $ASADMIN_OPTS create-connector-resource --poolname jms/IngestQueueConnectionFactoryPool --description "ingest connector resource" jms/IngestQueueConnectionFactory + +# http://docs.oracle.com/cd/E18930_01/html/821-2416/ablmc.html#giolr +./asadmin $ASADMIN_OPTS create-admin-object --restype javax.jms.Queue --raname jmsra --description "sample administered object" --property Name=DataverseIngest jms/DataverseIngest + +# no need to explicitly create the resource reference for the connection factory created above - +# the "create-connector-resource" creates the reference automatically. +#./asadmin $ASADMIN_OPTS create-resource-ref --target Cluster1 jms/IngestQueueConnectionFactory + +# created mail configuration: + +./asadmin $ASADMIN_OPTS create-javamail-resource --mailhost "$SMTP_SERVER" --mailuser "dataversenotify" --fromaddress "do-not-reply@${HOST_ADDRESS}" mail/notifyMailSession + +# so we can front with apache httpd ( ProxyPass / ajp://localhost:8009/ ) +./asadmin $ASADMIN_OPTS create-network-listener --protocol http-listener-1 --listenerport 8009 --jkenabled true jk-connector + +### +# Restart +echo Updates done. Restarting... +./asadmin $ASADMIN_OPTS restart-domain $GLASSFISH_DOMAIN + +### +# Clean up +popd + +echo "Glassfish setup complete" +date + diff --git a/dataversedock/testdata/scripts/installer/dvinstall/pgdriver/postgresql-42.1.4.jar b/dataversedock/testdata/scripts/installer/dvinstall/pgdriver/postgresql-42.1.4.jar new file mode 100644 index 0000000..08a54b1 Binary files /dev/null and b/dataversedock/testdata/scripts/installer/dvinstall/pgdriver/postgresql-42.1.4.jar differ diff --git a/dataversedock/testdata/scripts/installer/dvinstall/pgdriver/postgresql-8.4-703.jdbc4.jar b/dataversedock/testdata/scripts/installer/dvinstall/pgdriver/postgresql-8.4-703.jdbc4.jar new file mode 100644 index 0000000..7c8d5f8 Binary files /dev/null and b/dataversedock/testdata/scripts/installer/dvinstall/pgdriver/postgresql-8.4-703.jdbc4.jar differ diff --git a/dataversedock/testdata/scripts/installer/dvinstall/pgdriver/postgresql-9.0-802.jdbc4.jar b/dataversedock/testdata/scripts/installer/dvinstall/pgdriver/postgresql-9.0-802.jdbc4.jar new file mode 100644 index 0000000..9e16af0 Binary files /dev/null and b/dataversedock/testdata/scripts/installer/dvinstall/pgdriver/postgresql-9.0-802.jdbc4.jar differ diff --git a/dataversedock/testdata/scripts/installer/dvinstall/pgdriver/postgresql-9.1-902.jdbc4.jar b/dataversedock/testdata/scripts/installer/dvinstall/pgdriver/postgresql-9.1-902.jdbc4.jar new file mode 100644 index 0000000..078f379 Binary files /dev/null and b/dataversedock/testdata/scripts/installer/dvinstall/pgdriver/postgresql-9.1-902.jdbc4.jar differ diff --git a/dataversedock/testdata/scripts/installer/dvinstall/pgdriver/postgresql-9.2-1004.jdbc4.jar b/dataversedock/testdata/scripts/installer/dvinstall/pgdriver/postgresql-9.2-1004.jdbc4.jar new file mode 100644 index 0000000..b9270d2 Binary files /dev/null and b/dataversedock/testdata/scripts/installer/dvinstall/pgdriver/postgresql-9.2-1004.jdbc4.jar differ diff --git a/dataversedock/testdata/scripts/installer/dvinstall/pgdriver/postgresql-9.3-1104.jdbc4.jar b/dataversedock/testdata/scripts/installer/dvinstall/pgdriver/postgresql-9.3-1104.jdbc4.jar new file mode 100644 index 0000000..a79525d Binary files /dev/null and b/dataversedock/testdata/scripts/installer/dvinstall/pgdriver/postgresql-9.3-1104.jdbc4.jar differ diff --git a/dataversedock/testdata/scripts/installer/dvinstall/pgdriver/postgresql-9.4.1212.jar b/dataversedock/testdata/scripts/installer/dvinstall/pgdriver/postgresql-9.4.1212.jar new file mode 100644 index 0000000..b0de752 Binary files /dev/null and b/dataversedock/testdata/scripts/installer/dvinstall/pgdriver/postgresql-9.4.1212.jar differ diff --git a/dataversedock/testdata/scripts/installer/glassfish-setup.sh b/dataversedock/testdata/scripts/installer/glassfish-setup.sh new file mode 100755 index 0000000..397cebf --- /dev/null +++ b/dataversedock/testdata/scripts/installer/glassfish-setup.sh @@ -0,0 +1,261 @@ +#!/bin/bash +# YOU (THE HUMAN USER) SHOULD NEVER RUN THIS SCRIPT DIRECTLY! +# It should be run by higher-level installers. +# The following arguments should be passed to it +# as environmental variables: +# (no defaults for these values are provided here!) +# +# glassfish configuration: +# GLASSFISH_ROOT +# GLASSFISH_DOMAIN +# ASADMIN_OPTS +# MEM_HEAP_SIZE +# +# database configuration: +# DB_PORT +# DB_HOST +# DB_NAME +# DB_USER +# DB_PASS +# +# Rserve configuration: +# RSERVE_HOST +# RSERVE_PORT +# RSERVE_USER +# RSERVE_PASS +# +# other local configuration: +# HOST_ADDRESS +# SMTP_SERVER +# FILES_DIR + +# The script is going to fail and exit if any of the +# parameters aren't supplied. It is the job of the +# parent script to set all these env. variables, +# providing default values, if none are supplied by +# the user, etc. + +if [ -z "$DB_NAME" ] + then + echo "You must specify database name (DB_NAME)." + echo "PLEASE NOTE THAT YOU (THE HUMAN USER) SHOULD NEVER RUN THIS SCRIPT DIRECTLY!" + echo "IT SHOULD ONLY BE RUN BY OTHER SCRIPTS." + exit 1 +fi + +if [ -z "$DB_PORT" ] + then + echo "You must specify database port (DB_PORT)." + exit 1 +fi + +if [ -z "$DB_HOST" ] + then + echo "You must specify database host (DB_HOST)." + exit 1 +fi + +if [ -z "$DB_USER" ] + then + echo "You must specify database user (DB_USER)." + exit 1 +fi + +if [ -z "$DB_PASS" ] + then + echo "You must specify database password (DB_PASS)." + exit 1 +fi + +if [ -z "$RSERVE_HOST" ] + then + echo "You must specify Rserve host (RSERVE_HOST)." + exit 1 +fi + +if [ -z "$RSERVE_PORT" ] + then + echo "You must specify Rserve port (RSERVE_PORT)." + exit 1 +fi + +if [ -z "$RSERVE_USER" ] + then + echo "You must specify Rserve user (RSERVE_USER)." + exit 1 +fi + +if [ -z "$RSERVE_PASS" ] + then + echo "You must specify Rserve password (RSERVE_PASS)." + exit 1 +fi + +if [ -z "$SMTP_SERVER" ] + then + echo "You must specify smtp server (SMTP_SERVER)." + exit 1 +fi + +if [ -z "$HOST_ADDRESS" ] + then + echo "You must specify host address (HOST_ADDRESS)." + exit 1 +fi + +if [ -z "$FILES_DIR" ] + then + echo "You must specify files directory (FILES_DIR)." + exit 1 +fi + +if [ -z "$MEM_HEAP_SIZE" ] + then + echo "You must specify the memory heap size for glassfish (MEM_HEAP_SIZE)." + exit 1 +fi + +if [ -z "$GLASSFISH_DOMAIN" ] + then + echo "You must specify glassfish domain (GLASSFISH_DOMAIN)." + exit 1 +fi + +echo "checking glassfish root:"${GLASSFISH_ROOT} + +if [ ! -d "$GLASSFISH_ROOT" ] + then + echo Glassfish root '$GLASSFISH_ROOT' does not exist + exit 1 +fi +GLASSFISH_BIN_DIR=$GLASSFISH_ROOT/bin + +echo "checking glassfish domain:"${GLASSFISH_ROOT}/glassfish/domains/$GLASSFISH_DOMAIN + +DOMAIN_DIR=$GLASSFISH_ROOT/glassfish/domains/$GLASSFISH_DOMAIN +if [ ! -d "$DOMAIN_DIR" ] + then + echo Domain directory '$DOMAIN_DIR' does not exist + exit 2 +fi + +echo "Setting up your glassfish4 to support Dataverse" +echo "Glassfish directory: "$GLASSFISH_ROOT +echo "Domain directory: "$DOMAIN_DIR + +# Move to the glassfish dir +pushd $GLASSFISH_BIN_DIR + +### +# take the domain up, if needed. +DOMAIN_DOWN=$(./asadmin list-domains | grep "$DOMAIN " | grep "not running") +if [ $(echo $DOMAIN_DOWN|wc -c) -ne 1 ]; + then + echo Trying to start domain $GLASSFISH_DOMAIN up... + ./asadmin $ASADMIN_OPTS start-domain $GLASSFISH_DOMAIN + else + echo domain running +fi + +# undeploy the app, if running: + +./asadmin $ASADMIN_OPTS undeploy dataverse-4.0 + +# avoid OutOfMemoryError: PermGen per http://eugenedvorkin.com/java-lang-outofmemoryerror-permgen-space-error-during-deployment-to-glassfish/ +#./asadmin $ASADMIN_OPTS list-jvm-options +./asadmin $ASADMIN_OPTS delete-jvm-options "-XX\:MaxPermSize=192m" +./asadmin $ASADMIN_OPTS create-jvm-options "-XX\:MaxPermSize=512m" +./asadmin $ASADMIN_OPTS create-jvm-options "-XX\:PermSize=256m" +./asadmin $ASADMIN_OPTS delete-jvm-options -Xmx512m +./asadmin $ASADMIN_OPTS create-jvm-options "-Xmx${MEM_HEAP_SIZE}m" +./asadmin $ASADMIN_OPTS delete-jvm-options -client +./asadmin $ASADMIN_OPTS create-jvm-options "-server" + +### +# JDBC connection pool + +# we'll try to delete a pool with this name, if already exists. +# - in case the database name has changed since the last time it +# was configured. +./asadmin $ASADMIN_OPTS delete-jdbc-connection-pool --cascade=true dvnDbPool + + +./asadmin $ASADMIN_OPTS create-jdbc-connection-pool --restype javax.sql.DataSource \ + --datasourceclassname org.postgresql.ds.PGPoolingDataSource \ + --property create=true:User=$DB_USER:PortNumber=$DB_PORT:databaseName=$DB_NAME:password=$DB_PASS:ServerName=$DB_HOST \ + dvnDbPool + +### +# Create data sources +./asadmin $ASADMIN_OPTS create-jdbc-resource --connectionpoolid dvnDbPool jdbc/VDCNetDS + +### +# Set up the data source for the timers +./asadmin $ASADMIN_OPTS set configs.config.server-config.ejb-container.ejb-timer-service.timer-datasource=jdbc/VDCNetDS + +### +# Add the necessary JVM options: +# +# location of the datafiles directory: +# (defaults to dataverse/files in the users home directory) +./asadmin $ASADMIN_OPTS create-jvm-options "\-Ddataverse.files.directory=${FILES_DIR}" +# Rserve-related JVM options: +./asadmin $ASADMIN_OPTS create-jvm-options "\-Ddataverse.rserve.host=${RSERVE_HOST}" +./asadmin $ASADMIN_OPTS create-jvm-options "\-Ddataverse.rserve.port=${RSERVE_PORT}" +./asadmin $ASADMIN_OPTS create-jvm-options "\-Ddataverse.rserve.user=${RSERVE_USER}" +./asadmin $ASADMIN_OPTS create-jvm-options "\-Ddataverse.rserve.password=${RSERVE_PASS}" +# Data Deposit API options +./asadmin $ASADMIN_OPTS create-jvm-options "\-Ddataverse.fqdn=${HOST_ADDRESS}" +# password reset token timeout in minutes +./asadmin $ASADMIN_OPTS create-jvm-options "\-Ddataverse.auth.password-reset-timeout-in-minutes=60" + +./asadmin $ASADMIN_OPTS create-jvm-options "\-Djavax.xml.parsers.SAXParserFactory=com.sun.org.apache.xerces.internal.jaxp.SAXParserFactoryImpl" + +# EZID DOI Settings +./asadmin $ASADMIN_OPTS create-jvm-options "\-Ddoi.password=apitest" +./asadmin $ASADMIN_OPTS create-jvm-options "\-Ddoi.username=apitest" +./asadmin $ASADMIN_OPTS create-jvm-options "\-Ddoi.baseurlstring=https\://ezid.cdlib.org" +# "I am the timer server" option: +./asadmin $ASADMIN_OPTS create-jvm-options "-Ddataverse.timerServer=true" + +# enable comet support +./asadmin $ASADMIN_OPTS set server-config.network-config.protocols.protocol.http-listener-1.http.comet-support-enabled="true" + +./asadmin $ASADMIN_OPTS delete-connector-connection-pool --cascade=true jms/__defaultConnectionFactory-Connection-Pool + +# no need to explicitly delete the connector resource for the connection pool deleted in the step +# above - the cascade delete takes care of it. +#./asadmin $ASADMIN_OPTS delete-connector-resource jms/__defaultConnectionFactory-Connection-Pool + +# http://docs.oracle.com/cd/E19798-01/821-1751/gioce/index.html +./asadmin $ASADMIN_OPTS create-connector-connection-pool --steadypoolsize 1 --maxpoolsize 250 --poolresize 2 --maxwait 60000 --raname jmsra --connectiondefinition javax.jms.QueueConnectionFactory jms/IngestQueueConnectionFactoryPool + +# http://docs.oracle.com/cd/E18930_01/html/821-2416/abllx.html#giogt +./asadmin $ASADMIN_OPTS create-connector-resource --poolname jms/IngestQueueConnectionFactoryPool --description "ingest connector resource" jms/IngestQueueConnectionFactory + +# http://docs.oracle.com/cd/E18930_01/html/821-2416/ablmc.html#giolr +./asadmin $ASADMIN_OPTS create-admin-object --restype javax.jms.Queue --raname jmsra --description "sample administered object" --property Name=DataverseIngest jms/DataverseIngest + +# no need to explicitly create the resource reference for the connection factory created above - +# the "create-connector-resource" creates the reference automatically. +#./asadmin $ASADMIN_OPTS create-resource-ref --target Cluster1 jms/IngestQueueConnectionFactory + +# created mail configuration: + +./asadmin $ASADMIN_OPTS create-javamail-resource --mailhost "$SMTP_SERVER" --mailuser "dataversenotify" --fromaddress "do-not-reply@${HOST_ADDRESS}" mail/notifyMailSession + +# so we can front with apache httpd ( ProxyPass / ajp://localhost:8009/ ) +./asadmin $ASADMIN_OPTS create-network-listener --protocol http-listener-1 --listenerport 8009 --jkenabled true jk-connector + +### +# Restart +echo Updates done. Restarting... +./asadmin $ASADMIN_OPTS restart-domain $GLASSFISH_DOMAIN + +### +# Clean up +popd + +echo "Glassfish setup complete" +date + diff --git a/dataversedock/testdata/scripts/installer/install b/dataversedock/testdata/scripts/installer/install new file mode 100755 index 0000000..9edb8d6 --- /dev/null +++ b/dataversedock/testdata/scripts/installer/install @@ -0,0 +1,1544 @@ +#!/usr/bin/perl + +use strict; +use warnings; +use Getopt::Long; +use Socket; +use File::Copy; + +my $verbose; +my $pg_only; +my $hostname; +my $gfuser; +my $gfdir; +my $mailserver; +my $yes; +my $force; +my $nogfpasswd; +my $admin_email; +my ($rez) = GetOptions( + #"length=i" => \$length, # numeric + #"file=s" => \$data, # string + "verbose" => \$verbose, + "pg_only" => \$pg_only, + "hostname=s" => \$hostname, + "gfuser=s" => \$gfuser, + "gfdir=s" => \$gfdir, + "mailserver=s" => \$mailserver, + "y|yes" => \$yes, + "f|force" => \$force, + "nogfpasswd" => \$nogfpasswd, + "admin_email=s" => \$admin_email, +); + +my @CONFIG_VARIABLES; + +my $postgresonly = 0; + +if ($pg_only) +{ + @CONFIG_VARIABLES = + ( 'POSTGRES_SERVER', 'POSTGRES_PORT', 'POSTGRES_DATABASE', 'POSTGRES_USER', 'POSTGRES_PASSWORD', 'POSTGRES_ADMIN_PASSWORD' ); + + $postgresonly = 1; +} +else +{ + + @CONFIG_VARIABLES = ( + 'HOST_DNS_ADDRESS', + 'GLASSFISH_USER', + 'GLASSFISH_DIRECTORY', + 'ADMIN_EMAIL', + 'MAIL_SERVER', + + 'POSTGRES_SERVER', + 'POSTGRES_PORT', + 'POSTGRES_ADMIN_PASSWORD', + 'POSTGRES_DATABASE', + 'POSTGRES_USER', + 'POSTGRES_PASSWORD', + + 'SOLR_LOCATION', + + 'TWORAVENS_LOCATION', + + 'RSERVE_HOST', + 'RSERVE_PORT', + 'RSERVE_USER', + 'RSERVE_PASSWORD' + + ); +} + +my %CONFIG_DEFAULTS = ( + 'HOST_DNS_ADDRESS', 'localhost', + 'GLASSFISH_USER', '', + 'GLASSFISH_DIRECTORY', '/usr/local/glassfish4', + 'GLASSFISH_USER', '', + 'ADMIN_EMAIL', '', + 'MAIL_SERVER', 'mail.hmdc.harvard.edu', + + 'POSTGRES_ADMIN_PASSWORD', 'secret', + 'POSTGRES_SERVER', '127.0.0.1', + 'POSTGRES_PORT', 5432, + 'POSTGRES_DATABASE', 'dvndb', + 'POSTGRES_USER', 'dvnapp', + 'POSTGRES_PASSWORD', 'secret', + + 'SOLR_LOCATION', 'LOCAL', + + 'TWORAVENS_LOCATION', 'NOT INSTALLED', + + 'RSERVE_HOST', 'localhost', + 'RSERVE_PORT', 6311, + 'RSERVE_USER', 'rserve', + 'RSERVE_PASSWORD', 'rserve' + +); +my %CONFIG_PROMPTS = ( + 'HOST_DNS_ADDRESS', 'Fully Qualified Domain Name of your host', + 'GLASSFISH_USER', 'Glassfish service account username', + 'GLASSFISH_DIRECTORY', 'Glassfish Directory', + 'ADMIN_EMAIL', 'Administrator email address for this Dataverse', + 'MAIL_SERVER', 'SMTP (mail) server to relay notification messages', + + 'POSTGRES_SERVER', 'Postgres Server Address', + 'POSTGRES_PORT', 'Postgres Server Port', + 'POSTGRES_ADMIN_PASSWORD', 'Postgres ADMIN password', + 'POSTGRES_DATABASE', 'Name of the Postgres Database', + 'POSTGRES_USER', 'Name of the Postgres User', + 'POSTGRES_PASSWORD', 'Postgres user password', + + 'SOLR_LOCATION', 'Remote SOLR indexing service', + + 'TWORAVENS_LOCATION', 'Will this Dataverse be using TwoRavens application', + + 'RSERVE_HOST', 'Rserve Server', + 'RSERVE_PORT', 'Rserve Server Port', + 'RSERVE_USER', 'Rserve User Name', + 'RSERVE_PASSWORD', 'Rserve User Password' + +); + + +my %CONFIG_COMMENTS = ( + 'HOST_DNS_ADDRESS', ":\n(enter numeric IP address, if FQDN is unavailable) ", + 'GLASSFISH_USER', ":\nThis user will be running Glassfish service on your system.\n - If this is a dev. environment, this should be your own username; \n - In production, we suggest \"glassfish\" or another unprivileged user\n: ", + 'GLASSFISH_DIRECTORY', '', + 'ADMIN_EMAIL', ":\n(please enter a valid email address!) ", + 'MAIL_SERVER', '', + + 'POSTGRES_SERVER', '', + 'POSTGRES_PORT', '', + 'POSTGRES_ADMIN_PASSWORD', ":\n - We will need this to create the user and database that the Dataverse application will be using.\n (Hit RETURN if access control is set to \"trust\" for this connection in pg_hba.conf)\n: ", + 'POSTGRES_USER', ":\n - This is the Postgres user that the Dataverse app will be using to talk to the database\n: ", + 'POSTGRES_DATABASE', '', + 'POSTGRES_PASSWORD', '', + + 'SOLR_LOCATION', "? \n - Leave this set to \"LOCAL\" if the SOLR will be running on the same (this) server.\n Otherwise, please enter the host AND THE PORT NUMBER of the remote SOLR service, colon-separated\n (for example: foo.edu:8983)\n: ", + + 'TWORAVENS_LOCATION', "? \n - If so, please provide the complete URL of the TwoRavens GUI under rApache,\n for example, \"https://foo.edu/dataexplore/gui.html\".\n (PLEASE NOTE, TwoRavens will need to be installed separately! - see the installation docs for more info)\n: ", + + 'RSERVE_HOST', '', + 'RSERVE_PORT', '', + 'RSERVE_USER', '', + 'RSERVE_PASSWORD', '' + +); + + +my $API_URL = "http://localhost:8080/api"; + +# Supported Posstgres JDBC drivers: +# (have to be configured explicitely, so that Perl "taint" (security) mode +# doesn't get paranoid) + +my %POSTGRES_DRIVERS = ( + "8_4", "postgresql-8.4-703.jdbc4.jar", + "9_0", "postgresql-9.0-802.jdbc4.jar", + "9_1", "postgresql-9.1-902.jdbc4.jar", + "9_2", "postgresql-9.2-1004.jdbc4.jar", + "9_3", "postgresql-9.3-1104.jdbc4.jar", + "9_4", "postgresql-9.4.1212.jar", + "9_5", "postgresql-42.1.4.jar", + "9_6", "postgresql-42.1.4.jar" +); + +# A few preliminary checks: + +# OS: + +my $uname_out = `uname -a`; + +# hostname: + +my $hostname_from_cmdline = `hostname`; +chop $hostname_from_cmdline; + +if ($hostname) { + $CONFIG_DEFAULTS{'HOST_DNS_ADDRESS'} = $hostname; +} +else { + $CONFIG_DEFAULTS{'HOST_DNS_ADDRESS'} = $hostname_from_cmdline; +} + +# read default configuration values from tab separated file "default.config" if it exists +# moved after the $hostname_from_cmdline section to avoid excessively complicating the logic +# of command line argument, automatic selection, or config file. +sub trim { my $s = shift; $s =~ s/^\s+|\s+$//g; return $s }; +my $config_default_file = "default.config"; +if ( -e $config_default_file ) +{ + print("loading default configuration values from $config_default_file\n"); + open( my $inp_cfg, $config_default_file ); + while( my $ln = <$inp_cfg> ) + { + my @xs = split('\t', $ln ); + if ( 2 == @xs ) + { + my $k = $xs[0]; + my $v = trim($xs[1]); + $CONFIG_DEFAULTS{$k}=$v; + } + } +} +else +{ + print("using hard-coded default configuration values ($config_default_file not found)\n"); +} + +# get current user. first one wins. +my $current_user = $ENV{LOGNAME} || $ENV{USER} || getpwuid($<); + +if (!$CONFIG_DEFAULTS{'GLASSFISH_USER'}) { + $CONFIG_DEFAULTS{'GLASSFISH_USER'} = $current_user; + print "No pre-configured user found; using $current_user.\n"; +} + +# command-line argument takes precendence +if ($gfuser) { + print "Using CLI-specified user $gfuser.\n"; + $CONFIG_DEFAULTS{'GLASSFISH_USER'} = $gfuser; +} + +# prefer that we not install as root. +unless ( $< != 0 ) { +print "####################################################################\n"; +print " It is recommended that this script not be run as root.\n"; +print " Consider creating a glassfish service account, giving it ownership\n"; +print " on the glassfish/domains/domain1/ and glassfish/lib/ directories,\n"; +print " along with the JVM-specified files.dir location, and running\n"; +print " this installer as the user who will launch Glassfish.\n"; +print "####################################################################\n"; +} + +# ensure $gfuser exists or bail +my $gfidcmd="id $CONFIG_DEFAULTS{'GLASSFISH_USER'}"; +my $gfreturncode=system($gfidcmd); +if ($gfreturncode != 0) { + die "Couldn't find user $gfuser. Please ensure the account exists and is readable by the user running this installer.\n"; +} + +if ($mailserver) { + $CONFIG_DEFAULTS{'MAIL_SERVER'} = $mailserver; +} + +if ($gfdir) { + $CONFIG_DEFAULTS{'GLASSFISH_DIRECTORY'} = $gfdir; +} + +print "\nWelcome to the Dataverse installer.\n"; +unless ($postgresonly) { + print "You will be guided through the process of setting up a NEW\n"; + print "instance of the dataverse application\n"; +} +else { + print "You will be guided through the process of configuring the\n"; + print "LOCAL instance of PostgreSQL database for use by the DVN\n"; + print "application.\n"; +} + +my @uname_tokens = split( " ", $uname_out ); + +my $WORKING_OS; +if ( $uname_tokens[0] eq "Darwin" ) { + print "\nThis appears to be a MacOS X system; good.\n"; + # TODO: check the OS version + + $WORKING_OS = "MacOSX"; +} +elsif ( $uname_tokens[0] eq "Linux" ) { + if ( -f "/etc/redhat-release" ) { + print "\nThis appears to be a RedHat system; good.\n"; + $WORKING_OS = "RedHat"; + # TODO: check the distro version + } + else { + print "\nThis appears to be a non-RedHat Linux system;\n"; + print "this installation *may* succeed; but we're not making any promises!\n"; + $WORKING_OS = "Linux"; + } +} +else { + print "\nWARNING: This appears to be neither a Linux or MacOS X system!\n"; + print "This installer script will most likely fail. Please refer to the\n"; + print "DVN Installers Guide for more information.\n\n"; + + $WORKING_OS = "Unknown"; + + print "Do you wish to continue?\n [y/n] "; + + my $yesnocont; + + if ($yes) { + $yesnocont = "y"; + } + else { + print "here"; + exit; + $yesnocont = <>; + chop $yesnocont; + } + + while ( $yesnocont ne "y" && $yesnocont ne "n" ) { + print "Please enter 'y' or 'n'!\n"; + print "(or ctrl-C to exit the installer)\n"; + $yesnocont = <>; + chop $yesnocont; + } + + if ( $yesnocont eq "n" ) { + exit 0; + } + +} + +ENTERCONFIG: + +print "\n"; +print "Please enter the following configuration values:\n"; +print "(hit [RETURN] to accept the default value)\n"; +print "\n"; + +for my $ENTRY (@CONFIG_VARIABLES) +{ + my $config_prompt = $CONFIG_PROMPTS{$ENTRY}; + my $config_comment = $CONFIG_COMMENTS{$ENTRY}; + + if ( $config_comment eq '' ) + { + print $config_prompt . ": "; + print "[" . $CONFIG_DEFAULTS{$ENTRY} . "] "; + } + else + { + print $config_prompt . $config_comment; + print "[" . $CONFIG_DEFAULTS{$ENTRY} . "] "; + } + + my $user_entry = ""; + + unless ($yes) + { + $user_entry = <>; + chop $user_entry; + + if ( $user_entry ne "" ) { + $CONFIG_DEFAULTS{$ENTRY} = $user_entry; + } + + + # for some values, we'll try to do some validation right here, in real time: + + if ($ENTRY eq 'ADMIN_EMAIL') + { + $user_entry = $CONFIG_DEFAULTS{$ENTRY}; + my $attempts = 0; + while ($user_entry !~/[A-Za-z0-9._%+-]+@[A-Za-z0-9.-]+\.[A-Za-z]{2,4}/) + { + $attempts++; + print "Please enter a valid email address: "; + $user_entry = <>; + chop $user_entry; + } + + if ($attempts) + { + print "OK, looks legit.\n"; + $CONFIG_DEFAULTS{$ENTRY} = $user_entry; + } + + } + elsif ($ENTRY eq 'GLASSFISH_DIRECTORY') + { + # 5a. CHECK IF GLASSFISH DIR LOOKS OK: + print "\nChecking your Glassfish installation..."; + + my $g_dir = $CONFIG_DEFAULTS{'GLASSFISH_DIRECTORY'}; + + + unless ( -d $g_dir . "/glassfish/domains/domain1" ) + { + # TODO: need better check than this + + while ( !( -d $g_dir . "/glassfish/domains/domain1" ) ) + { + print "\nInvalid Glassfish directory " . $g_dir . "!\n"; + print "Enter the root directory of your Glassfish installation:\n"; + print "(Or ctrl-C to exit the installer): "; + + $g_dir = <>; + chop $g_dir; + } + + # TODO: + # verify that we can write in the Glassfish directory + # (now that we are no longer requiring to run the installer as root) + + my $g_testdir = $g_dir . "/glassfish/domains/domain1"; + my $g_libdir = $g_dir . "/glassfish/lib"; + if (!(-w $g_testdir)) { + die("$g_testdir not writable. Have you created a glassfish user, and given it write permission on $g_testdir?\n"); + } elsif (!(-w $g_libdir)) { + die("$g_libdir not writable. Have you created a glassfish user, and given it write permission on $g_libdir?\n"); + } + + } + + print "OK!\n"; + $CONFIG_DEFAULTS{'GLASSFISH_DIRECTORY'} = $g_dir; + + } + elsif ($ENTRY eq 'MAIL_SERVER') + { + my $smtp_server = ""; + while (! &validate_smtp_server() ) + { + print "Enter a valid SMTP (mail) server:\n"; + print "(Or ctrl-C to exit the installer): "; + + $smtp_server = <>; + chop $smtp_server; + + $CONFIG_DEFAULTS{'MAIL_SERVER'} = $smtp_server unless $smtp_server eq ''; + } + } + } + + print "\n"; +} + +# CONFIRM VALUES ENTERED: + +print "\nOK, please confirm what you've entered:\n\n"; + +for my $ENTRY (@CONFIG_VARIABLES) { + print $CONFIG_PROMPTS{$ENTRY} . ": " . $CONFIG_DEFAULTS{$ENTRY} . "\n"; +} + +my $yesno; +if ($yes) { + $yesno = "y"; +} +else { + print "\nIs this correct? [y/n] "; + $yesno = <>; + chop $yesno; +} + +while ( $yesno ne "y" && $yesno ne "n" ) { + print "Please enter 'y' or 'n'!\n"; + print "(or ctrl-C to exit the installer)\n"; + $yesno = <>; + chop $yesno; +} + +if ( $yesno eq "n" ) { + goto ENTERCONFIG; +} + +# VALIDATION/VERIFICATION OF THE CONFIGURATION VALUES: +# 1. VERIFY/VALIDATE THE MAIL SERVER THEY CONFIGURED: +# (has been moved to the top, so that it's validated in real time, when the user enters the value) + +# 2. CHECK IF THE WAR FILE IS AVAILABLE: + +print "\nChecking if the application .war file is available... "; + +# if this installation is running out of the installer zib bundle directory, +# the war file will be sitting right here, named "dataverse.war": + +my $WARFILE_LOCATION = "dataverse.war"; + +# but if it's not here, this is probably a personal development +# setup, so their build should be up in their source tree: + +unless ( -f $WARFILE_LOCATION ) { + my $DATAVERSE_VERSION = ""; + my $DATAVERSE_POM_FILE = "../../pom.xml"; + if ( -f $DATAVERSE_POM_FILE ) + { + open DPF, $DATAVERSE_POM_FILE; + my $pom_line; + while ($pom_line=) + { + chop $pom_line; + if ($pom_line =~/^[ \t]*([0-9\.]+)<\/version>/) + { + $DATAVERSE_VERSION=$1; + last; + } + } + close DPF; + + if ($DATAVERSE_VERSION ne "") { + $WARFILE_LOCATION = "../../target/dataverse-" . $DATAVERSE_VERSION . ".war"; + } + } +} + +# But, if the war file cannot be found in either of the 2 +# places - we'll just have to give up: + +unless ( -f $WARFILE_LOCATION ) { + print "\nWARNING: Can't find the project .war file!\n"; + print "\tAre you running the installer in the right directory?\n"; + print "\tHave you built the war file?\n"; + print "\t(if not, build the project and run the installer again)\n"; + + exit 0; +} + +print " Yes, it is!\n"; + + +# check the working (installer) dir: +my $cwd; +chomp( $cwd = `pwd` ); + +# 2b. CHECK IF THE SQL TEMPLATE IS IN PLACE AND CREATE THE SQL FILE + +#my $SQL_REFERENCE_DATA = "reference_data_filtered.sql"; +my $SQL_REFERENCE_TEMPLATE = "../database/reference_data.sql"; + +unless ( -f $SQL_REFERENCE_TEMPLATE ) { + $SQL_REFERENCE_TEMPLATE = "reference_data.sql"; +} + +unless ( -f $SQL_REFERENCE_TEMPLATE ) { + print "\nWARNING: Can't find .sql data template!\n"; + print "(are you running the installer in the right directory?)\n"; + + exit 0; +} + +#open DATATEMPLATEIN, $SQL_REFERENCE_TEMPLATE || die $@; +#open SQLDATAOUT, '>' . $SQL_REFERENCE_DATA || die $@; +# +#while () { +# s/dvnapp/$CONFIG_DEFAULTS{'POSTGRES_USER'}/g; +# print SQLDATAOUT $_; +#} + +#close DATATEMPLATEIN; +#close SQLDATAOUT; + +# 3. CHECK POSTGRES AND JQ AVAILABILITY: + +my $pg_local_connection = 0; +my $psql_exec; +my $jq_exec = ""; +my $pg_major_version = 0; +my $pg_minor_version = 0; + +my $POSTGRES_SYS_UID; +if ( $CONFIG_DEFAULTS{'POSTGRES_SERVER'} eq 'localhost' || $CONFIG_DEFAULTS{'POSTGRES_SERVER'} eq '127.0.0.1' ) +{ + $pg_local_connection = 1; +} +elsif ($pg_only) +{ + print "In the --pg_only mode the script can only be run LOCALLY,\n"; + print "i.e., on the server where PostgresQL is running, with the\n"; + print "Postgres server address as localhost - \"127.0.0.1\".\n"; + exit 1; +} + +### 3a. CHECK FOR USER postgres: (NO LONGER USED!) + +###print "\nChecking system user \"postgres\"... "; + +###my $POSTGRES_SYS_NAME = "postgres"; +###$POSTGRES_SYS_UID = ( getpwnam("postgres") )[2]; + +# 3b. LOCATE THE EXECUTABLES, FOR jq AND psql: + +my $sys_path = $ENV{'PATH'}; +my @sys_path_dirs = split( ":", $sys_path ); + +for my $sys_path_dir (@sys_path_dirs) { + if ( -x $sys_path_dir . "/jq" ) { + $jq_exec = $sys_path_dir; + last; + } +} +if ( $jq_exec eq "" ) { + print STDERR "\nERROR: I haven't been able to find the jq command in your PATH! Please install it from http://stedolan.github.io/jq/\n"; + exit 1; + +} + + +$psql_exec = ""; + +for my $sys_path_dir (@sys_path_dirs) { + if ( -x $sys_path_dir . "/psql" ) { + $psql_exec = $sys_path_dir; + last; + } +} + + +my $psql_major_version = 0; +my $psql_minor_version = 0; + +# 3c. IF PSQL WAS FOUND IN THE PATH, CHECK ITS VERSION: + +unless ( $psql_exec eq "" ) { + open( PSQLOUT, $psql_exec . "/psql --version|" ); + + my $psql_version_line = ; + chop $psql_version_line; + close PSQLOUT; + + my ( $postgresName, $postgresNameLong, $postgresVersion ) = split( " ", $psql_version_line ); + + unless ( $postgresName eq "psql" && $postgresVersion =~ /^[0-9][0-9\.]*$/ ) { + print STDERR "\nWARNING: Unexpected output from psql command!\n"; + } + else { + my (@psql_version_tokens) = split( '\.', $postgresVersion ); + + print "\n\nFound Postgres psql command, version $postgresVersion.\n\n"; + + $psql_major_version = $psql_version_tokens[0]; + $psql_minor_version = $psql_version_tokens[1]; + + $pg_major_version = $psql_major_version; + $pg_minor_version = $psql_minor_version; + + } +} + +# a frequent problem with MacOSX is that the copy of psql found in the PATH +# belongs to the older version of PostgresQL supplied with the OS, which happens +# to be incompatible with the newer builds from the Postgres project; which are +# recommended to be used with Dataverse. So if this is a MacOSX box, we'll +# check what other versions of PG are available, and select the highest version +# we can find: + +if ( $WORKING_OS eq "MacOSX" ) { + my $macos_pg_major_version = 0; + my $macos_pg_minor_version = 0; + + for $macos_pg_minor_version ( "5", "4", "3", "2", "1", "0" ) { + if ( -x "/Library/PostgreSQL/9." . $macos_pg_minor_version . "/bin/psql" ) { + $macos_pg_major_version = 9; + if ( ( $macos_pg_major_version > $psql_major_version ) + || ( $macos_pg_minor_version >= $psql_minor_version ) ) + { + $psql_exec = "/Library/PostgreSQL/9." . $macos_pg_minor_version . "/bin"; + $pg_major_version = $macos_pg_major_version; + $pg_minor_version = $macos_pg_minor_version; + } + last; + } + } + + # And if we haven't found an 9.* version of postgresql installed, we'll also check + # for version 8.* available: + + if ( $macos_pg_major_version < 9 ) { + for $macos_pg_minor_version ( "4", "3" ) + # TODO: + # Do we even want to support postgres 8.3? + { + if ( -x "/Library/PostgreSQL/8." . $macos_pg_minor_version . "/bin/psql" ) { + $macos_pg_major_version = 8; + if ( $macos_pg_major_version > $psql_major_version + || $macos_pg_minor_version > $psql_minor_version ) + { + $psql_exec = "/Library/PostgreSQL/8." . $macos_pg_minor_version . "/bin"; + $pg_major_version = $macos_pg_major_version; + $pg_minor_version = $macos_pg_minor_version; + } + last; + } + } + } +} + + + +my $psql_admin_exec = ""; + +if ( $psql_exec eq "" ) +{ + if ( $pg_local_connection ) + { + print STDERR "\nERROR: I haven't been able to find the psql command in your PATH!\n"; + print STDERR "Please make sure PostgresQL is properly installed; if necessary, add\n"; + print STDERR "the location of psql to the PATH, then try again.\n\n"; + + exit 1; + } + else + { + print "WARNING: I haven't been able to find the psql command in your PATH!\n"; + print "But since we are configuring a Dataverse instance to use a remote Postgres server,\n"; + print "we can still set up the database by running a setup script on that remote server\n"; + print "(see below for instructions).\n"; + } +} else { + + if ( $pg_major_version == 0 ) { + # hmm? + } + + print "(We'll be Using psql version " . $pg_major_version . "." . $pg_minor_version . ")\n"; + + + $psql_admin_exec = "PGPASSWORD=" . $CONFIG_DEFAULTS{'POSTGRES_ADMIN_PASSWORD'} . "; export PGPASSWORD; " . $psql_exec; + $psql_exec = "PGPASSWORD=" . $CONFIG_DEFAULTS{'POSTGRES_PASSWORD'} . "; export PGPASSWORD; " . $psql_exec; + + print "Checking if we can talk to Postgres as the admin user...\n"; +} + +# 4. CONFIGURE POSTGRES: + +# 4a. BUT FIRST, CHECK IF WE CAN TALK TO POSTGRES AS THE ADMIN: + +if ( $psql_admin_exec eq "" || system( $psql_admin_exec . "/psql -h " . $CONFIG_DEFAULTS{'POSTGRES_SERVER'} . " -U postgres -d postgres -c 'SELECT * FROM pg_roles' > /dev/null 2>&1" ) ) +{ + # No, we can't. :( + if ($pg_local_connection) + { + # If Postgres is running locally, this is a fatal condition. + # We'll give them some (potentially) helpful pointers and exit. + print "Nope, I haven't been able to connect to the local instance of PostgresQL as the admin user.\n"; + print "\nIs postgresql running? \n"; + print " On a RedHat-like system, you can check the status of the daemon with\n\n"; + print " service postgresql status\n\n"; + print " and, if it's not running, start the daemon with\n\n"; + print " service postgresql start\n\n"; + print " On MacOSX, use Applications -> PostgresQL -> Start Server.\n"; + print " (or, if there's no \"Start Server\" item in your PostgresQL folder, \n"; + print " simply restart your MacOSX system!)\n"; + print "\nAlso, please make sure that the daemon is listening to network connections!\n"; + print " - at least on the localhost interface. (See \"Installing Postgres\" section\n"; + print " of the installation manual).\n"; + print "\nFinally, did you supply the correct admin password?\n"; + print " Don't know the admin password for your Postgres installation?\n"; + print " - then simply set the access level to \"trust\" temporarily (for localhost only!)\n"; + print " in your pg_hba.conf file. Again, please consult the \n"; + print " installation manual).\n"; + exit 1; + } + else + { + # If we are configuring the Dataverse instance to use a Postgres server + # running on a remote host, it is possible to configure the database + # without opening remote access for the admin user. They will simply + # have to run this script in the "postgres-only" mode on that server, locally, + # then resume the installation here: + + print "Nope, I haven't been able to connect to the remote Postgres server as the admin user.\n"; + print "(Or you simply don't have psql installed on this server)\n"; + print "It IS possible to configure a database for your Dataverse on a remote server,\n"; + print "without having admin access to that remote Postgres installation.\n\n"; + print "In order to do that, pleaes copy the installer (the entire package) to the server\n"; + print "where PostgresQL is running and run the installer with the \"--pg_only\" option:\n\n"; + print " ./install --pg_only\n\n"; + + print "Press any key to continue the installation process once that has been\n"; + print "done. Or press ctrl-C to exit the installer.\n\n"; + + system "stty cbreak /dev/tty 2>&1"; + my $key = getc(STDIN); + system "stty -cbreak /dev/tty 2>&1"; + print "\n"; + + # Find out what Postgres version is running remotely: + + $pg_major_version = 9; + $pg_minor_version = 1; + + print "What version of PostgresQL is installed on the remote server?\n [" + . $pg_major_version . "." + . $pg_minor_version . "] "; + + my $postgresVersion = <>; + chop $postgresVersion; + + while ( $postgresVersion ne "" && !( $postgresVersion =~ /^[0-9]+\.[0-9]+$/ ) ) { + print "Please enter valid Postgres version!\n"; + print "(or ctrl-C to exit the installer)\n"; + $postgresVersion = <>; + chop $postgresVersion; + } + + unless ( $postgresVersion eq "" ) { + my (@postgres_version_tokens) = split( '\.', $postgresVersion ); + + unless ( ( $postgres_version_tokens[0] == 8 && $postgres_version_tokens[1] >= 4 ) + || ( $postgres_version_tokens[0] >= 9 ) ) + { + print STDERR "\nERROR: PostgresQL version 8.4, or newer, is required!\n"; + print STDERR "Please make sure the right version of PostgresQL is properly installed\n"; + print STDERR "on the remote server, then try again.\n"; + + exit 1; + } + + $pg_major_version = $postgres_version_tokens[0]; + $pg_minor_version = $postgres_version_tokens[1]; + } + } +} +else +{ + print "Yes, we can!\n"; + + # ok, we can proceed with configuring things... + + print "\nConfiguring Postgres Database:\n"; + + # 4c. CHECK IF THIS DB ALREADY EXISTS: + + my $psql_command_dbcheck = + $psql_admin_exec . "/psql -h " . $CONFIG_DEFAULTS{'POSTGRES_SERVER'} . " -U postgres -c \"\" -d " . $CONFIG_DEFAULTS{'POSTGRES_DATABASE'} . ">/dev/null 2>&1"; + + if ( ( my $exitcode = system($psql_command_dbcheck) ) == 0 ) + { + if ($force) + { + print "WARNING! Database " + . $CONFIG_DEFAULTS{'POSTGRES_DATABASE'} + . " already exists but --force given... continuing.\n"; + } + else + { + print "WARNING! Database " . $CONFIG_DEFAULTS{'POSTGRES_DATABASE'} . " already exists!\n"; + print "\nPlease note that you can only use this installer to create a blank, \n"; + print "new and shiny Dataverse database. I.e., you cannot install on top of an \n"; + print "existing one. Please enter a different name for the DVN database.\n"; + print "\nPress any key to continue, or ctrl-C to exit the installer...\n\n"; + + system "stty cbreak /dev/tty 2>&1"; + my $key = getc(STDIN); + system "stty -cbreak /dev/tty 2>&1"; + print "\n"; + + goto ENTERCONFIG; + } + } + + # 4d. CHECK IF THIS USER ALREADY EXISTS: + + my $psql_command_rolecheck = + $psql_exec . "/psql -h " . $CONFIG_DEFAULTS{'POSTGRES_SERVER'} . " -c \"\" -d postgres " . $CONFIG_DEFAULTS{'POSTGRES_USER'} . " >/dev/null 2>&1"; + if ( ( my $exitcode = system($psql_command_rolecheck) ) == 0 ) + { + print "User (role) " . $CONFIG_DEFAULTS{'POSTGRES_USER'} . " already exists;\n"; + print "Proceeding."; + } + else + { + # 4e. CREATE DVN DB USER: + + print "\nCreating Postgres user (role) for the DVN:\n"; + + open TMPCMD, ">/tmp/pgcmd.$$.tmp"; + + # with md5-encrypted password: + my $pg_password_md5 = + &create_pg_hash( $CONFIG_DEFAULTS{'POSTGRES_USER'}, $CONFIG_DEFAULTS{'POSTGRES_PASSWORD'} ); + my $sql_command = + "CREATE ROLE \"" + . $CONFIG_DEFAULTS{'POSTGRES_USER'} + . "\" PASSWORD 'md5" + . $pg_password_md5 + . "' NOSUPERUSER CREATEDB CREATEROLE INHERIT LOGIN"; + + print TMPCMD $sql_command; + close TMPCMD; + + my $psql_commandline = $psql_admin_exec . "/psql -h " . $CONFIG_DEFAULTS{'POSTGRES_SERVER'} . " -U postgres -d postgres -f /tmp/pgcmd.$$.tmp >/dev/null 2>&1"; + + my $out = qx($psql_commandline 2>&1); + my $exitcode = $?; + unless ( $exitcode == 0 ) + { + print STDERR "Could not create the DVN Postgres user role!\n"; + print STDERR "(SQL: " . $sql_command . ")\n"; + print STDERR "(psql exit code: " . $exitcode . ")\n"; + print STDERR "(STDERR and STDOUT was: " . $out . ")\n"; + exit 1; + } + + unlink "/tmp/pgcmd.$$.tmp"; + print "done.\n"; + } + + # 4f. CREATE DVN DB: + + print "\nCreating Postgres database:\n"; + + my $psql_command = + $psql_exec + . "/createdb -h " . $CONFIG_DEFAULTS{'POSTGRES_SERVER'} . " -U $CONFIG_DEFAULTS{'POSTGRES_USER'} " + . $CONFIG_DEFAULTS{'POSTGRES_DATABASE'} + . " --owner=" + . $CONFIG_DEFAULTS{'POSTGRES_USER'}; + + my $out = qx($psql_command 2>&1); + my $exitcode = $?; + unless ( $exitcode == 0 ) + { + print STDERR "Could not create Postgres database for the Dataverse app!\n"; + print STDERR "(command: " . $psql_command . ")\n"; + print STDERR "(psql exit code: " . $exitcode . ")\n"; + print STDERR "(STDOUT and STDERR: " . $out . ")\n"; + if ($force) + { + print STDERR "\n--force called, continuing\n"; + } + else + { + print STDERR "\naborting the installation (sorry!)\n\n"; + exit 1; + } + } + +} + +if ($postgresonly) { + print "\nOK, done.\n"; + print "You can now resume the installation on the main Dataverse host.\n\n"; + + exit 0; +} + + +# Whether the user and the database were created locally or remotely, we'll now +# verify that we can talk to that database, with the credentials of the database +# user that we want the Dataverse application to be using: + +if ( system( $psql_exec . "/psql -h " . $CONFIG_DEFAULTS{'POSTGRES_SERVER'} . " -U " . $CONFIG_DEFAULTS{'POSTGRES_USER'} . " -d " . $CONFIG_DEFAULTS{'POSTGRES_DATABASE'} . " -c 'SELECT * FROM pg_roles' > /dev/null 2>&1" ) ) +{ + print STDERR "Oops, haven't been able to connect to the database " . $CONFIG_DEFAULTS{'POSTGRES_DATABASE'} . ",\n"; + print STDERR "running on " . $CONFIG_DEFAULTS{'POSTGRES_SERVER'} . ", as user " . $CONFIG_DEFAULTS{'POSTGRES_USER'} . ".\n\n"; + print STDERR "Aborting the installation (sorry!)\n"; + exit 1; +} + + +# 5. CONFIGURE GLASSFISH + +my $glassfish_dir = $CONFIG_DEFAULTS{'GLASSFISH_DIRECTORY'}; + +print "\nProceeding with the Glassfish setup.\n"; + +# 5b. DETERMINE HOW MUCH MEMORY TO GIVE TO GLASSFISH AS HEAP: + +my $gf_heap_default = "2048m"; +my $sys_mem_total = 0; + +if ( -e "/proc/meminfo" && open MEMINFO, "/proc/meminfo" ) { + # Linux + + while ( my $mline = ) { + if ( $mline =~ /MemTotal:[ \t]*([0-9]*) kB/ ) { + $sys_mem_total = $1; + } + } + + close MEMINFO; + +# TODO: Figure out how to determine the amount of memory when running in Docker +# because we're wondering if Dataverse can run in the free OpenShift Online +# offering that only gives you 1 GB of memory. Obviously, if this is someone's +# first impression of Dataverse, we want to to run well! What if you try to +# ingest a large file or perform other memory-intensive operations? For more +# context, see https://github.com/IQSS/dataverse/issues/4040#issuecomment-331282286 + if ( -e "/sys/fs/cgroup/memory/memory.limit_in_bytes" && open CGROUPMEM, "/sys/fs/cgroup/memory/memory.limit_in_bytes" ) { + print "We must be running in Docker! Fancy!\n"; + while ( my $limitline = ) { + # The goal of this cgroup check is for + # "Setting the heap limit for Glassfish to 750MB" + # to change to some other value, based on memory available. + print "/sys/fs/cgroup/memory/memory.limit_in_bytes: $limitline\n"; + my $limit_in_kb = $limitline / 1024; + print "Docker limit_in_kb = $limit_in_kb but ignoring\n"; + # In openshift.json, notice how PostgreSQL and Solr have + # resources.limits.memory set to "256Mi". + # If you try to give the Dataverse/Glassfish container twice + # as much memory (512 MB) and allow $sys_mem_total to + # be set below, you should see the following: + # "Setting the heap limit for Glassfish to 192MB." + # FIXME: dataverse.war will not deploy with only 512 GB of memory. + # Again, the goal is 1 GB total (512MB + 256MB + 256MB) for + # Glassfish, PostgreSQL, and Solr to fit in the free OpenShift tier. + #print "setting sys_mem_total to: $limit_in_kb\n"; + #$sys_mem_total = $limit_in_kb; + } + close CGROUPMEM; + } +} +elsif ( -x "/usr/sbin/sysctl" ) { + # MacOS X, probably... + + $sys_mem_total = `/usr/sbin/sysctl -n hw.memsize`; + chop $sys_mem_total; + if ( $sys_mem_total > 0 ) { + $sys_mem_total = int( $sys_mem_total / 1024 ); + # size in kb + } +} + +if ( $sys_mem_total > 0 ) { + # setting the default heap size limit to 3/8 of the available + # amount of memory: + $gf_heap_default = ( int( $sys_mem_total / ( 8 / 3 * 1024 ) ) ); + + print "\nSetting the heap limit for Glassfish to " . $gf_heap_default . "MB. \n"; + print "You may need to adjust this setting to better suit \n"; + print "your system.\n\n"; + + #$gf_heap_default .= "m"; + +} +else { + print "\nCould not determine the amount of memory on your system.\n"; + print "Setting the heap limit for Glassfish to 2GB. You may need \n"; + print "to adjust the value to better suit your system.\n\n"; +} + +push @CONFIG_VARIABLES, "DEF_MEM_SIZE"; +$CONFIG_DEFAULTS{"DEF_MEM_SIZE"} = $gf_heap_default; + +# TODO: +# if the system has more than 4GB of memory (I believe), glassfish must +# be run with the 64 bit flag set explicitly (at least that was the case +# with the MacOS glassfish build...). Verify, and if still the case, +# add a check. + +print "\nInstalling the Glassfish PostgresQL driver... "; + +my $install_driver_jar = ""; + +$install_driver_jar = $POSTGRES_DRIVERS{ $pg_major_version . "_" . $pg_minor_version }; + +unless ( $install_driver_jar && -e "pgdriver/" . $install_driver_jar ) { + die "Installer could not find POSTGRES JDBC driver for your version of PostgresQL!\n(" + . $pg_major_version . "." + . $pg_minor_version . ")"; + +} + +system( "/bin/cp", "pgdriver/" . $install_driver_jar, $glassfish_dir . "/glassfish/lib" ); +# more diagnostics needed? + +print "done!\n"; + +print "\n*********************\n"; +print "PLEASE NOTE, SOME OF THE ASADMIN COMMANDS ARE GOING TO FAIL,\n"; +print "FOR EXAMPLE, IF A CONFIGURATION SETTING THAT WE ARE TRYING\n"; +print "TO CREATE ALREADY EXISTS; OR IF A JVM OPTION THAT WE ARE\n"; +print "DELETING DOESN'T. THESE \"FAILURES\" ARE NORMAL!\n"; +print "*********************\n\n"; +print "When/if asadmin asks you to \"Enter admin user name\",\n"; +print "it should be safe to hit return and accept the default\n"; +print "(which is \"admin\").\n"; + +print "\nPress any key to continue...\n\n"; + +system "stty cbreak /dev/tty 2>&1"; +unless ($yes) { + my $key = getc(STDIN); +} +system "stty -cbreak /dev/tty 2>&1"; +print "\n"; + +# start domain, if not running: + +my $javacheck = `java -version`; +my $exitcode = $?; +unless ( $exitcode == 0 ) { + print STDERR "$javacheck\n" if $javacheck; + print STDERR "Do you have java installed?\n"; + exit 1; +} +my $DOMAIN = "domain1"; +my $DOMAIN_DOWN = + `$CONFIG_DEFAULTS{'GLASSFISH_DIRECTORY'}/bin/asadmin list-domains | grep "$DOMAIN " | grep "not running"`; +print STDERR $DOMAIN_DOWN . "\n"; +if ($DOMAIN_DOWN) { + print "Trying to start domain up...\n"; + system( "sudo -u $CONFIG_DEFAULTS{'GLASSFISH_USER'} " . $CONFIG_DEFAULTS{'GLASSFISH_DIRECTORY'} . "/bin/asadmin start-domain domain1" ); + + # TODO: (?) - retest that the domain is running now? +} +else { + print "domain appears to be up...\n"; +} + +# create asadmin login, so that the user doesn't have to enter +# the username and password for every asadmin command, if +# access to :4848 is password-protected: + +system( $glassfish_dir. "/bin/asadmin login" ); + +# NEW: configure glassfish using ASADMIN commands: + +my $success = &setup_glassfish(); + +# CHECK EXIT STATUS, BARF IF SETUP SCRIPT FAILED: + +unless ($success) { + print "\nERROR! Failed to configure Glassfish domain!\n"; + print "(see the error messages above - if any)\n"; + print "Aborting...\n"; + + exit 1; +} + +# Additional config files: + +my $JHOVE_CONFIG = "jhove.conf"; +my $JHOVE_CONF_SCHEMA = "jhoveConfig.xsd"; + + +my $JHOVE_CONFIG_DIST = $JHOVE_CONFIG; +my $JHOVE_CONF_SCHEMA_DIST = $JHOVE_CONF_SCHEMA; + +# (if the installer is being run NOT as part of a distribution zipped bundle, but +# from inside the source tree - adjust the locations of the jhove config files: + +unless ( -f $JHOVE_CONFIG ) { + $JHOVE_CONFIG_DIST = "../../conf/jhove/jhove.conf"; + $JHOVE_CONF_SCHEMA_DIST = "../../conf/jhove/jhoveConfig.xsd"; +} + +# but if we can't find the files in either location, it must mean +# that they are not running the script in the correct directory - so +# nothing else left for us to do but give up: + +unless ( -f $JHOVE_CONFIG_DIST && -f $JHOVE_CONF_SCHEMA_DIST ) { + print "\nERROR! JHOVE configuration files not found in the config dir!\n"; + print "(are you running the installer in the right directory?\n"; + print "Aborting...\n"; + exit 1; +} + +print "\nCopying additional configuration files... "; + +system( "/bin/cp -f " . $JHOVE_CONF_SCHEMA_DIST . " " . $glassfish_dir . "/glassfish/domains/domain1/config" ); + +# The JHOVE conf file has an absolute PATH of the JHOVE config schema file (uh, yeah...) +# - so it may need to be readjusted here: + +if ( $glassfish_dir ne "/usr/local/glassfish4" ) +{ + system( "sed 's:/usr/local/glassfish4:$glassfish_dir:g' < " . $JHOVE_CONFIG_DIST . " > " . $glassfish_dir . "/glassfish/domains/domain1/config/" . $JHOVE_CONFIG); +} +else +{ + system( "/bin/cp -f " . $JHOVE_CONFIG_DIST . " " . $glassfish_dir . "/glassfish/domains/domain1/config" ); +} + +print "done!\n"; + +# check if glassfish is running: +# TODO. + +# 6. DEPLOY THE APPLICATION: + +print "\nAttempting to deploy the application.\n"; +print "Command line: " . $glassfish_dir . "/bin/asadmin deploy " . $WARFILE_LOCATION . "\n"; +unless (( + my $exit_code = + system( $glassfish_dir . "/bin/asadmin deploy " . $WARFILE_LOCATION ) + ) == 0 ) +{ + print STDERR "Failed to deploy the application! WAR file: " . $WARFILE_LOCATION . ".\n"; + print STDERR "(exit code: " . $exit_code . ")\n"; + print STDERR "Aborting.\n"; + exit 1; +} + + +# 7. PRE-POPULATE THE DATABASE: +# (in this step some pre-supplied content is inserted into the database that we have just created; +# it is not *necessary* for the application to run in the very basic mode; but some features - certain +# types of metadata imports, for example - will be unavailable if it's not done. + +print "\nPre-populating the database:\n\n"; + +my $psql_command = $psql_exec . "/psql -h " . $CONFIG_DEFAULTS{'POSTGRES_SERVER'} . " -U " . $CONFIG_DEFAULTS{'POSTGRES_USER'} . " -d $CONFIG_DEFAULTS{'POSTGRES_DATABASE'} -f $SQL_REFERENCE_TEMPLATE"; + +unless ( ( my $exitcode = system("$psql_command") ) == 0 ) +{ + print "WARNING: Could not pre-populate Postgres database for the Dataverse application!\n"; + print "(command: " . $psql_command . ")\n"; + print "(psql exit code: " . $exitcode . ")\n"; + print "\nYou must populate the database in order for all the features of your \n"; + print "new Dataverse to be available. \n"; + print "\n"; + print "You can try this again, by executing the following on the command line:\n"; + print " psql -U $CONFIG_DEFAULTS{'POSTGRES_USER'} -d $CONFIG_DEFAULTS{'POSTGRES_DATABASE'} -f $SQL_REFERENCE_TEMPLATE\n"; + print "then re-start glassfish with \n\n"; + print " " . $glassfish_dir . "/bin/asadmin stop-domain domain1\n\n"; + print " " . $glassfish_dir . "/bin/asadmin start-domain domain1\n\n"; + print "\n"; + print "If it's still failing, please consult the installation manual and/or\n"; + print "seek support from the Dataverse team.\n\n"; + + print "Press any key to continue... "; + + system "stty cbreak /dev/tty 2>&1"; + my $key = getc(STDIN); + system "stty -cbreak /dev/tty 2>&1"; + print "\n"; +} +else +{ + print "\nOK, done!\n"; +} + +# Check if the App is running: + +unless (( + my $exit_code = + system( $glassfish_dir . "/bin/asadmin list-applications | grep -q '^dataverse'" ) + ) == 0 ) +{ + # If the "asadmin list-applications" has failed, it may only mean that an earlier + # "asadmin login" had failed, and asadmin is now failing to run without the user + # supplying the username and password. (And the fact that we are trying to pile the + # output to grep prevents it from providing the prompts). + # So before we give up, we'll try an alternative: + + unless (( + my $exit_code_2 = + system( "curl http://localhost:8080/robots.txt | grep -q '^User-agent'" ) + ) == 0 ) + { + print STDERR "It appears that the Dataverse application is not running...\n"; + print STDERR "Even though the \"asadmin deploy\" command had succeeded earlier.\n\n"; + print STDERR "Aborting - sorry...\n\n"; + } +} + + +print "\nOK, the Dataverse application appears to be running...\n\n"; + +# Run the additional setup scripts, that populate the metadata block field values, create users +# and dataverses, etc. + +unless ( -d "data" && -f "setup-datasetfields.sh" && -f "setup-users.sh" && -f "setup-dvs.sh" && -f "setup-all.sh" ) { + chdir("../api"); +} + +unless ( -d "data" && -f "setup-datasetfields.sh" && -f "setup-users.sh" && -f "setup-dvs.sh" && -f "setup-builtin-roles.sh" && -f "setup-all.sh" ) { + print "\nERROR: Can't find the metadata and user/dataverse setup scripts!\n"; + print "\tAre you running the installer in the right directory?\n"; + exit 1; +} + +# if there's an admin_email set from arguments, replace the value in `dv-root.json` (called by `setup-all.sh`) +if ($admin_email) +{ + print "setting contact email for root dataverse to: $admin_email\n"; + set_root_contact_email( $admin_email ); +} +else +{ + print "using default contact email for root dataverse\n"; +} + +for my $script ( "setup-all.sh" ) { + # (there's only 1 setup script to run now - it runs all the other required scripts) + print "Executing post-deployment setup script " . $script . "... "; + + my $my_hostname = $CONFIG_DEFAULTS{'HOST_DNS_ADDRESS'}; + + # We used to filter the supplied scripts, replacing "localhost" and the port, in + # case they are running Dataverse on a different port... Now we are simply requiring + # that the port 8080 is still configured in domain.xml when they are running the + # installer: + my $run_script; + #if ( $my_hostname ne "localhost" ) { + # system( "sed 's/localhost:8080/$my_hostname/g' < " . $script . " > tmpscript.sh; chmod +x tmpscript.sh" ); + # $run_script = "tmpscript.sh"; + #} + #else { + $run_script = $script; + #} + + unless ( my $exit_code = system( "./" . $run_script . " > $run_script.$$.log 2>&1") == 0 ) + { + print "\nERROR executing script " . $script . "!\n"; + exit 1; + } + print "done!\n"; +} + +# SOME ADDITIONAL SETTINGS THAT ARE NOT TAKEN CARE OF BY THE setup-all SCRIPT +# NEED TO BE CONFIGURED HERE: + +print "Making additional configuration changes...\n\n"; + + +# a. Configure the Admin email in the Dataverse settings: + +print "Executing " . "curl -X PUT -d " . $CONFIG_DEFAULTS{'ADMIN_EMAIL'} . " " . $API_URL . "/admin/settings/:SystemEmail" . "\n"; + +my $exit_code = system("curl -X PUT -d " . $CONFIG_DEFAULTS{'ADMIN_EMAIL'} . " " . $API_URL . "/admin/settings/:SystemEmail"); +if ( $exit_code ) +{ + print "WARNING: failed to configure the admin email in the Dataverse settings!\n\n"; +} +else +{ + print "OK.\n\n"; +} + +# b. If this installation is going to be using TwoRavens, configure its location in the Dataverse settings; +# Otherwise, set the "NO TwoRavens FOR YOU!" option in the settings: + + +if ($CONFIG_DEFAULTS{'TWORAVENS_LOCATION'} ne 'NOT INSTALLED') +{ + print "Executing " . "curl -X PUT -d " . $CONFIG_DEFAULTS{'TWORAVENS_LOCATION'} . " " . $API_URL . "/admin/settings/:TwoRavensUrl" . "\n"; + my $exit_code = system("curl -X PUT -d " . $CONFIG_DEFAULTS{'TWORAVENS_LOCATION'} . " " . $API_URL . "/admin/settings/:TwoRavensUrl"); + if ( $exit_code ) + { + print "WARNING: failed to configure the location of the TwoRavens app in the Dataverse settings!\n\n"; + } + else + { + print "OK.\n\n"; + } + + # (and, we also need to explicitly set the tworavens option to "true": + $exit_code = system("curl -X PUT -d true " . $API_URL . "/admin/settings/:TwoRavensTabularView"); + +} else { + print "Executing " . "curl -X PUT -d false " . $API_URL . "/admin/settings/:TwoRavensTabularView" . "\n"; + my $exit_code = system("curl -X PUT -d false " . $API_URL . "/admin/settings/:TwoRavensTabularView"); + if ( $exit_code ) + { + print "WARNING: failed to disable the TwoRavens app in the Dataverse settings!\n\n"; + } + else + { + print "OK.\n\n"; + } +} + +# c. If this installation is going to be using a remote SOLR search engine service, configure its location in the settings: + +if ($CONFIG_DEFAULTS{'SOLR_LOCATION'} ne 'LOCAL') +{ + print "Executing " . "curl -X PUT -d " . $CONFIG_DEFAULTS{'SOLR_LOCATION'} . " " . $API_URL . "/admin/settings/:SolrHostColonPort" . "\n"; + my $exit_code = system("curl -X PUT -d " . $CONFIG_DEFAULTS{'SOLR_LOCATION'} . " " . $API_URL . "/admin/settings/:SolrHostColonPort"); + if ( $exit_code ) + { + print "WARNING: failed to configure the location of the remote SOLR service!\n\n"; + } + else + { + print "OK.\n\n"; + } +} + + + +chdir($cwd); + +print "\n\nYou should now have a running DVN instance at\n"; +print " http://" . $CONFIG_DEFAULTS{'HOST_DNS_ADDRESS'} . ":8080\n\n\n"; + +# (going to skip the Rserve check, for now) + +exit 0; + +# 9. FINALLY, CHECK IF RSERVE IS RUNNING: +print "\n\nFinally, checking if Rserve is running and accessible...\n"; + +unless ( $CONFIG_DEFAULTS{'RSERVE_PORT'} =~ /^[0-9][0-9]*$/ ) { + print $CONFIG_DEFAULTS{'RSERVE_HOST'} . " does not look like a valid port number,\n"; + print "defaulting to 6311.\n\n"; + + $CONFIG_DEFAULTS{'RSERVE_PORT'} = 6311; +} + +my ( $rserve_iaddr, $rserve_paddr, $rserve_proto ); + +unless ( $rserve_iaddr = inet_aton( $CONFIG_DEFAULTS{'RSERVE_HOST'} ) ) { + print STDERR "Could not look up $CONFIG_DEFAULTS{'RSERVE_HOST'},\n"; + print STDERR "the host you specified as your R server.\n"; + print STDERR "\nDVN can function without a working R server, but\n"; + print STDERR "much of the functionality concerning running statistics\n"; + print STDERR "and analysis on quantitative data will not be available.\n"; + print STDERR "Please consult the Installers guide for more info.\n"; + + exit 0; +} + +$rserve_paddr = sockaddr_in( $CONFIG_DEFAULTS{'RSERVE_PORT'}, $rserve_iaddr ); +$rserve_proto = getprotobyname('tcp'); + +unless ( socket( SOCK, PF_INET, SOCK_STREAM, $rserve_proto ) + && connect( SOCK, $rserve_paddr ) ) +{ + print STDERR "Could not establish connection to $CONFIG_DEFAULTS{'RSERVE_HOST'}\n"; + print STDERR "on port $CONFIG_DEFAULTS{'RSERVE_PORT'}, the address you provided\n"; + print STDERR "for your R server.\n"; + print STDERR "DVN can function without a working R server, but\n"; + print STDERR "much of the functionality concerning running statistics\n"; + print STDERR "and analysis on quantitative data will not be available.\n"; + print STDERR "Please consult the \"Installing R\" section in the Installers guide\n"; + print STDERR "for more info.\n"; + + exit 0; + +} + +close(SOCK); +print "\nOK!\n"; + +sub setup_glassfish { + my $success = 1; + my $failure = 0; + + # We are going to run a standalone shell script with a bunch of asadmin + # commands to set up all the glassfish components for the application. + # All the parameters must be passed to that script as environmental + # variables: + + $ENV{'GLASSFISH_ROOT'} = $CONFIG_DEFAULTS{'GLASSFISH_DIRECTORY'}; + $ENV{'GLASSFISH_DOMAIN'} = "domain1"; + $ENV{'ASADMIN_OPTS'} = ""; + $ENV{'MEM_HEAP_SIZE'} = $CONFIG_DEFAULTS{'DEF_MEM_SIZE'}; + + $ENV{'DB_PORT'} = $CONFIG_DEFAULTS{'POSTGRES_PORT'}; + $ENV{'DB_HOST'} = $CONFIG_DEFAULTS{'POSTGRES_SERVER'}; + $ENV{'DB_NAME'} = $CONFIG_DEFAULTS{'POSTGRES_DATABASE'}; + $ENV{'DB_USER'} = $CONFIG_DEFAULTS{'POSTGRES_USER'}; + $ENV{'DB_PASS'} = $CONFIG_DEFAULTS{'POSTGRES_PASSWORD'}; + + $ENV{'RSERVE_HOST'} = $CONFIG_DEFAULTS{'RSERVE_HOST'}; + $ENV{'RSERVE_PORT'} = $CONFIG_DEFAULTS{'RSERVE_PORT'}; + $ENV{'RSERVE_USER'} = $CONFIG_DEFAULTS{'RSERVE_USER'}; + $ENV{'RSERVE_PASS'} = $CONFIG_DEFAULTS{'RSERVE_PASSWORD'}; + + $ENV{'HOST_ADDRESS'} = $CONFIG_DEFAULTS{'HOST_DNS_ADDRESS'}; + $ENV{'SMTP_SERVER'} = $CONFIG_DEFAULTS{'MAIL_SERVER'}; + $ENV{'FILES_DIR'} = + $CONFIG_DEFAULTS{'GLASSFISH_DIRECTORY'} . "/glassfish/domains/" . $ENV{'GLASSFISH_DOMAIN'} . "/files"; + + system("./glassfish-setup.sh"); + + if ($?) { + return $failure; + } + return $success; +} + +sub create_pg_hash { + my $pg_username = shift @_; + my $pg_password = shift @_; + + my $encode_line = $pg_password . $pg_username; + + # for Redhat: + + ##print STDERR "executing /bin/echo -n $encode_line | md5sum\n"; + + my $hash; + if ( $WORKING_OS eq "MacOSX" ) { + $hash = `/bin/echo -n $encode_line | md5`; + } + else { + $hash = `/bin/echo -n $encode_line | md5sum`; + } + + chop $hash; + + $hash =~ s/ \-$//; + + if ( ( length($hash) != 32 ) || ( $hash !~ /^[0-9a-f]*$/ ) ) { + print STDERR "Failed to generate a MD5-encrypted password hash for the Postgres database.\n"; + exit 1; + } + + return $hash; +} + +sub validate_smtp_server { + my ( $mail_server_iaddr, $mail_server__paddr, $mail_server_proto, $mail_server_status ); + + $mail_server_status = 1; + + unless ( $mail_server_iaddr = inet_aton( $CONFIG_DEFAULTS{'MAIL_SERVER'} ) ) { + print STDERR "Could not look up $CONFIG_DEFAULTS{'MAIL_SERVER'},\n"; + print STDERR "the host you specified as your mail server\n"; + $mail_server_status = 0; + } + + if ($mail_server_status) { + my $mail_server_paddr = sockaddr_in( 25, $mail_server_iaddr ); + $mail_server_proto = getprotobyname('tcp'); + + unless ( socket( SOCK, PF_INET, SOCK_STREAM, $mail_server_proto ) + && connect( SOCK, $mail_server_paddr ) ) + { + print STDERR "Could not establish connection to $CONFIG_DEFAULTS{'MAIL_SERVER'},\n"; + print STDERR "the address you provided for your Mail server.\n"; + print STDERR "Please select a valid mail server, and try again.\n\n"; + + $mail_server_status = 0; + } + + close(SOCK); + } + + return $mail_server_status; +} + +# support function for set_root_contact_email +sub search_replace_file +{ + my ($infile, $pattern, $replacement, $outfile) = @_; + open (my $inp, $infile); + local $/ = undef; + my $txt = <$inp>; + close $inp; + $txt =~s/$pattern/$replacement/g; + open (my $opf, '>:encoding(UTF-8)', $outfile); + print $opf $txt; + close $opf; + return; +} +# set the email address for the default `dataverseAdmin` account +sub set_root_contact_email +{ + my ($contact_email) = @_; + my $config_json = "data/user-admin.json"; + search_replace_file($config_json,"\"email\":\"dataverse\@mailinator.com\"","\"email\":\"$contact_email\"",$config_json); + return; +} + diff --git a/dataversedock/testdata/scripts/installer/pgdriver/postgresql-42.1.4.jar b/dataversedock/testdata/scripts/installer/pgdriver/postgresql-42.1.4.jar new file mode 100644 index 0000000..08a54b1 Binary files /dev/null and b/dataversedock/testdata/scripts/installer/pgdriver/postgresql-42.1.4.jar differ diff --git a/dataversedock/testdata/scripts/installer/pgdriver/postgresql-8.4-703.jdbc4.jar b/dataversedock/testdata/scripts/installer/pgdriver/postgresql-8.4-703.jdbc4.jar new file mode 100644 index 0000000..7c8d5f8 Binary files /dev/null and b/dataversedock/testdata/scripts/installer/pgdriver/postgresql-8.4-703.jdbc4.jar differ diff --git a/dataversedock/testdata/scripts/installer/pgdriver/postgresql-9.0-802.jdbc4.jar b/dataversedock/testdata/scripts/installer/pgdriver/postgresql-9.0-802.jdbc4.jar new file mode 100644 index 0000000..9e16af0 Binary files /dev/null and b/dataversedock/testdata/scripts/installer/pgdriver/postgresql-9.0-802.jdbc4.jar differ diff --git a/dataversedock/testdata/scripts/installer/pgdriver/postgresql-9.1-902.jdbc4.jar b/dataversedock/testdata/scripts/installer/pgdriver/postgresql-9.1-902.jdbc4.jar new file mode 100644 index 0000000..078f379 Binary files /dev/null and b/dataversedock/testdata/scripts/installer/pgdriver/postgresql-9.1-902.jdbc4.jar differ diff --git a/dataversedock/testdata/scripts/installer/pgdriver/postgresql-9.2-1004.jdbc4.jar b/dataversedock/testdata/scripts/installer/pgdriver/postgresql-9.2-1004.jdbc4.jar new file mode 100644 index 0000000..b9270d2 Binary files /dev/null and b/dataversedock/testdata/scripts/installer/pgdriver/postgresql-9.2-1004.jdbc4.jar differ diff --git a/dataversedock/testdata/scripts/installer/pgdriver/postgresql-9.3-1104.jdbc4.jar b/dataversedock/testdata/scripts/installer/pgdriver/postgresql-9.3-1104.jdbc4.jar new file mode 100644 index 0000000..a79525d Binary files /dev/null and b/dataversedock/testdata/scripts/installer/pgdriver/postgresql-9.3-1104.jdbc4.jar differ diff --git a/dataversedock/testdata/scripts/installer/pgdriver/postgresql-9.4.1212.jar b/dataversedock/testdata/scripts/installer/pgdriver/postgresql-9.4.1212.jar new file mode 100644 index 0000000..b0de752 Binary files /dev/null and b/dataversedock/testdata/scripts/installer/pgdriver/postgresql-9.4.1212.jar differ diff --git a/dataversedock/testdata/scripts/issues/1262/create-sparrow1 b/dataversedock/testdata/scripts/issues/1262/create-sparrow1 new file mode 100755 index 0000000..6837ca5 --- /dev/null +++ b/dataversedock/testdata/scripts/issues/1262/create-sparrow1 @@ -0,0 +1,2 @@ +#!/bin/sh +curl -s -X POST -H "Content-type:application/json" -d @scripts/issues/1262/sparrow1.json "http://localhost:8080/api/dataverses/sparrows/datasets/?key=$SPARROWKEY" diff --git a/dataversedock/testdata/scripts/issues/1262/search-sparrow b/dataversedock/testdata/scripts/issues/1262/search-sparrow new file mode 100755 index 0000000..393baf7 --- /dev/null +++ b/dataversedock/testdata/scripts/issues/1262/search-sparrow @@ -0,0 +1,3 @@ +#!/bin/sh +# relies on experimental SearchApiNonPublicAllowed feature, see https://github.com/IQSS/dataverse/issues/1299 +curl "http://localhost:8080/api/search?key=$SPARROWKEY&show_relevance=true&q=sparrow" diff --git a/dataversedock/testdata/scripts/issues/1262/sparrow1.json b/dataversedock/testdata/scripts/issues/1262/sparrow1.json new file mode 100644 index 0000000..9235d60 --- /dev/null +++ b/dataversedock/testdata/scripts/issues/1262/sparrow1.json @@ -0,0 +1,78 @@ +{ + "datasetVersion": { + "metadataBlocks": { + "citation": { + "displayName": "Citation Metadata", + "fields": [ + { + "typeName": "title", + "multiple": false, + "typeClass": "primitive", + "value": "The Sparrow" + }, + { + "typeName": "author", + "multiple": true, + "typeClass": "compound", + "value": [ + { + "authorName": { + "typeName": "authorName", + "multiple": false, + "typeClass": "primitive", + "value": "Hoxha, Adil" + } + } + ] + }, + { + "typeName": "datasetContact", + "multiple": true, + "typeClass": "compound", + "value": [ + { + "datasetContactEmail": { + "value": "sparrow@mailinator.com", + "typeName": "datasetContactEmail", + "multiple": false, + "typeClass": "primitive" + } + } + ] + }, + { + "typeName": "dsDescription", + "multiple": true, + "typeClass": "compound", + "value": [ + { + "dsDescriptionValue": { + "typeName": "dsDescriptionValue", + "typeClass": "primitive", + "multiple": false, + "value": "The habits and habitats of Albanian sparrows." + } + }, + { + "dsDescriptionValue": { + "typeName": "dsDescriptionValue", + "typeClass": "primitive", + "multiple": false, + "value": "Sparrows as shutterbugs." + } + } + ] + }, + { + "typeName": "subject", + "multiple": true, + "typeClass": "controlledVocabulary", + "value": [ + "Medicine, Health & Life Sciences" + ] + } + ] + } + } + } +} diff --git a/dataversedock/testdata/scripts/issues/1380/01-add.localhost.sh b/dataversedock/testdata/scripts/issues/1380/01-add.localhost.sh new file mode 100755 index 0000000..331011d --- /dev/null +++ b/dataversedock/testdata/scripts/issues/1380/01-add.localhost.sh @@ -0,0 +1,2 @@ +# Add the localhost group to the system. +curl -X POST -H"Content-Type:application/json" -d@../../api/data/ipGroup-localhost.json localhost:8080/api/admin/groups/ip diff --git a/dataversedock/testdata/scripts/issues/1380/02-build-dv-structure.sh b/dataversedock/testdata/scripts/issues/1380/02-build-dv-structure.sh new file mode 100755 index 0000000..f0936e3 --- /dev/null +++ b/dataversedock/testdata/scripts/issues/1380/02-build-dv-structure.sh @@ -0,0 +1,12 @@ +#!/bin/bash + +echo Run this after running setup-users.sh, and making Pete an +echo admin on the root dataverse. + + +PETE=$(grep :result: users.out | grep Pete | cut -f4 -d: | tr -d \ ) +UMA=$(grep :result: users.out | grep Uma | cut -f4 -d: | tr -d \ ) + +pushd ../../api +./setup-dvs.sh $PETE $UMA +popd diff --git a/dataversedock/testdata/scripts/issues/1380/add-ip-group.sh b/dataversedock/testdata/scripts/issues/1380/add-ip-group.sh new file mode 100755 index 0000000..2fba944 --- /dev/null +++ b/dataversedock/testdata/scripts/issues/1380/add-ip-group.sh @@ -0,0 +1,4 @@ +#!/bin/bash + +# Add the passed group to the system. +curl -X POST -H"Content-Type:application/json" -d@../../api/data/$1 localhost:8080/api/admin/groups/ip diff --git a/dataversedock/testdata/scripts/issues/1380/add-user b/dataversedock/testdata/scripts/issues/1380/add-user new file mode 100755 index 0000000..1781181 --- /dev/null +++ b/dataversedock/testdata/scripts/issues/1380/add-user @@ -0,0 +1,3 @@ +#!/bin/bash +# add-user dv group user api-token +curl -H "Content-type:application/json" -X POST -d"[$3]" localhost:8080/api/dataverses/$1/groups/$2/roleAssignees?key=$4 diff --git a/dataversedock/testdata/scripts/issues/1380/data/3-eg1.json b/dataversedock/testdata/scripts/issues/1380/data/3-eg1.json new file mode 100644 index 0000000..a874d69 --- /dev/null +++ b/dataversedock/testdata/scripts/issues/1380/data/3-eg1.json @@ -0,0 +1 @@ +["&explicit/3-eg1"] diff --git a/dataversedock/testdata/scripts/issues/1380/data/guest.json b/dataversedock/testdata/scripts/issues/1380/data/guest.json new file mode 100644 index 0000000..3e4188a --- /dev/null +++ b/dataversedock/testdata/scripts/issues/1380/data/guest.json @@ -0,0 +1 @@ +[":guest"] diff --git a/dataversedock/testdata/scripts/issues/1380/data/locals.json b/dataversedock/testdata/scripts/issues/1380/data/locals.json new file mode 100644 index 0000000..8bb5e3e --- /dev/null +++ b/dataversedock/testdata/scripts/issues/1380/data/locals.json @@ -0,0 +1 @@ +["&ip/localhost"] diff --git a/dataversedock/testdata/scripts/issues/1380/data/pete.json b/dataversedock/testdata/scripts/issues/1380/data/pete.json new file mode 100644 index 0000000..298e813 --- /dev/null +++ b/dataversedock/testdata/scripts/issues/1380/data/pete.json @@ -0,0 +1 @@ +["@pete"] diff --git a/dataversedock/testdata/scripts/issues/1380/data/uma.json b/dataversedock/testdata/scripts/issues/1380/data/uma.json new file mode 100644 index 0000000..3caf8c5 --- /dev/null +++ b/dataversedock/testdata/scripts/issues/1380/data/uma.json @@ -0,0 +1 @@ +["@uma"] diff --git a/dataversedock/testdata/scripts/issues/1380/db-list-dvs b/dataversedock/testdata/scripts/issues/1380/db-list-dvs new file mode 100755 index 0000000..4161f7f --- /dev/null +++ b/dataversedock/testdata/scripts/issues/1380/db-list-dvs @@ -0,0 +1 @@ +psql dvndb -c "select dvobject.id, name, alias, owner_id from dvobject inner join dataverse on dvobject.id = dataverse.id" diff --git a/dataversedock/testdata/scripts/issues/1380/delete-ip-group b/dataversedock/testdata/scripts/issues/1380/delete-ip-group new file mode 100755 index 0000000..b6138d9 --- /dev/null +++ b/dataversedock/testdata/scripts/issues/1380/delete-ip-group @@ -0,0 +1,9 @@ +#/bin/bahx +if [ $# -eq 0 ] + then + echo "Please provide IP group id" + echo "e.g $0 845" + exit 1 +fi + +curl -X DELETE http://localhost:8080/api/admin/groups/ip/$1 diff --git a/dataversedock/testdata/scripts/issues/1380/dvs.gv b/dataversedock/testdata/scripts/issues/1380/dvs.gv new file mode 100644 index 0000000..5260660 --- /dev/null +++ b/dataversedock/testdata/scripts/issues/1380/dvs.gv @@ -0,0 +1,19 @@ +digraph { +d1[label="Root"] +d2[label="Top dataverse of Pete"] +d3[label="Pete's public place"] +d4[label="Pete's restricted data"] +d5[label="Pete's secrets"] +d6[label="Top dataverse of Uma"] +d7[label="Uma's first"] +d8[label="Uma's restricted"] + +d1 -> d2 +d2 -> d3 +d2 -> d4 +d2 -> d5 +d1 -> d6 +d6 -> d7 +d6 -> d8 + +} diff --git a/dataversedock/testdata/scripts/issues/1380/dvs.pdf b/dataversedock/testdata/scripts/issues/1380/dvs.pdf new file mode 100644 index 0000000..5169f44 Binary files /dev/null and b/dataversedock/testdata/scripts/issues/1380/dvs.pdf differ diff --git a/dataversedock/testdata/scripts/issues/1380/explicitGroup1.json b/dataversedock/testdata/scripts/issues/1380/explicitGroup1.json new file mode 100644 index 0000000..337a0b6 --- /dev/null +++ b/dataversedock/testdata/scripts/issues/1380/explicitGroup1.json @@ -0,0 +1,5 @@ +{ + "description":"Sample Explicit Group", + "displayName":"Close Collaborators", + "aliasInOwner":"eg1" +} diff --git a/dataversedock/testdata/scripts/issues/1380/explicitGroup2.json b/dataversedock/testdata/scripts/issues/1380/explicitGroup2.json new file mode 100644 index 0000000..fbac263 --- /dev/null +++ b/dataversedock/testdata/scripts/issues/1380/explicitGroup2.json @@ -0,0 +1,5 @@ +{ + "description":"Sample Explicit Group", + "displayName":"Not-So-Close Collaborators", + "aliasInOwner":"eg2" +} diff --git a/dataversedock/testdata/scripts/issues/1380/keys.txt b/dataversedock/testdata/scripts/issues/1380/keys.txt new file mode 100644 index 0000000..9dc47d3 --- /dev/null +++ b/dataversedock/testdata/scripts/issues/1380/keys.txt @@ -0,0 +1,3 @@ +Keys for P e t e and U m a. Produced by running setup-all.sh from the /scripts/api folder. +Pete:757a6493-456a-4bf0-943e-9b559d551a3f +Uma:8797f19b-b8aa-4f96-a789-1b99506f2eab diff --git a/dataversedock/testdata/scripts/issues/1380/list-groups-for b/dataversedock/testdata/scripts/issues/1380/list-groups-for new file mode 100755 index 0000000..063b92c --- /dev/null +++ b/dataversedock/testdata/scripts/issues/1380/list-groups-for @@ -0,0 +1,2 @@ +#!/bin/bash +curl -s -X GET http://localhost:8080/api/test/explicitGroups/$1 | jq . diff --git a/dataversedock/testdata/scripts/issues/1380/list-ip-groups.sh b/dataversedock/testdata/scripts/issues/1380/list-ip-groups.sh new file mode 100755 index 0000000..fba29cc --- /dev/null +++ b/dataversedock/testdata/scripts/issues/1380/list-ip-groups.sh @@ -0,0 +1,2 @@ +#!/bin/bash +curl -X GET http://localhost:8080/api/admin/groups/ip | jq . diff --git a/dataversedock/testdata/scripts/issues/1380/truth-table.numbers b/dataversedock/testdata/scripts/issues/1380/truth-table.numbers new file mode 100644 index 0000000..86f6738 Binary files /dev/null and b/dataversedock/testdata/scripts/issues/1380/truth-table.numbers differ diff --git a/dataversedock/testdata/scripts/issues/1380/users.out b/dataversedock/testdata/scripts/issues/1380/users.out new file mode 100644 index 0000000..337b9e2 --- /dev/null +++ b/dataversedock/testdata/scripts/issues/1380/users.out @@ -0,0 +1,6 @@ +{"status":"OK","data":{"user":{"id":4,"firstName":"Gabbi","lastName":"Guest","userName":"gabbi","affiliation":"low","position":"A Guest","email":"gabbi@malinator.com"},"authenticatedUser":{"id":4,"identifier":"@gabbi","displayName":"Gabbi Guest","firstName":"Gabbi","lastName":"Guest","email":"gabbi@malinator.com","superuser":false,"affiliation":"low","position":"A Guest","persistentUserId":"gabbi","authenticationProviderId":"builtin"},"apiToken":"d1940786-c315-491e-9812-a8ff809289cc"}} +{"status":"OK","data":{"user":{"id":5,"firstName":"Cathy","lastName":"Collaborator","userName":"cathy","affiliation":"mid","position":"Data Scientist","email":"cathy@malinator.com"},"authenticatedUser":{"id":5,"identifier":"@cathy","displayName":"Cathy Collaborator","firstName":"Cathy","lastName":"Collaborator","email":"cathy@malinator.com","superuser":false,"affiliation":"mid","position":"Data Scientist","persistentUserId":"cathy","authenticationProviderId":"builtin"},"apiToken":"0ddfcb1e-fb51-4ce7-88ab-308b23e13e9a"}} +{"status":"OK","data":{"user":{"id":6,"firstName":"Nick","lastName":"NSA","userName":"nick","affiliation":"gov","position":"Signals Intelligence","email":"nick@malinator.com"},"authenticatedUser":{"id":6,"identifier":"@nick","displayName":"Nick NSA","firstName":"Nick","lastName":"NSA","email":"nick@malinator.com","superuser":false,"affiliation":"gov","position":"Signals Intelligence","persistentUserId":"nick","authenticationProviderId":"builtin"},"apiToken":"6d74745d-1733-459a-ae29-422110056ec0"}} +reporting API keys +:result: Pete's key is: 757a6493-456a-4bf0-943e-9b559d551a3f +:result: Uma's key is: 8797f19b-b8aa-4f96-a789-1b99506f2eab \ No newline at end of file diff --git a/dataversedock/testdata/scripts/issues/2013/download-zip.sh b/dataversedock/testdata/scripts/issues/2013/download-zip.sh new file mode 100755 index 0000000..dd801d4 --- /dev/null +++ b/dataversedock/testdata/scripts/issues/2013/download-zip.sh @@ -0,0 +1,5 @@ +#!/bin/bash +APACHE_PORT=8888 +GLASSFISH_PORT=8088 +PORT=$APACHE_PORT +count=0; while true; do echo "downloading 4 GB file as zip attempt $((++count))"; curl -s http://127.0.0.1:$PORT/api/access/datafiles/3 > /tmp/3; done diff --git a/dataversedock/testdata/scripts/issues/2013/hit-homepage.sh b/dataversedock/testdata/scripts/issues/2013/hit-homepage.sh new file mode 100755 index 0000000..41be470 --- /dev/null +++ b/dataversedock/testdata/scripts/issues/2013/hit-homepage.sh @@ -0,0 +1,2 @@ +#!/bin/bash +count=0; while true; echo "hitting homepage attempt $((++count))"; do (curl -s -i http://127.0.0.1:8888 | head -9); sleep 3; done diff --git a/dataversedock/testdata/scripts/issues/2021/sort-files b/dataversedock/testdata/scripts/issues/2021/sort-files new file mode 100755 index 0000000..e3abc6b --- /dev/null +++ b/dataversedock/testdata/scripts/issues/2021/sort-files @@ -0,0 +1,4 @@ +#!/bin/bash -x +OUT=`curl -s "http://localhost:8080/api/admin/index/filemetadata/50825?maxResults=0&sort=$1&order=$2"` +echo $OUT +echo $OUT | jq . diff --git a/dataversedock/testdata/scripts/issues/2036/delete-ned-assignment b/dataversedock/testdata/scripts/issues/2036/delete-ned-assignment new file mode 100755 index 0000000..0b5fe43 --- /dev/null +++ b/dataversedock/testdata/scripts/issues/2036/delete-ned-assignment @@ -0,0 +1,7 @@ +#!/bin/sh +~/.homebrew/bin/psql -c " +select * from roleassignment where assigneeidentifier = '@ned' +" dataverse_db +~/.homebrew/bin/psql -c " +delete from roleassignment where assigneeidentifier = '@ned' +" dataverse_db diff --git a/dataversedock/testdata/scripts/issues/2036/grant-role-then-revoke b/dataversedock/testdata/scripts/issues/2036/grant-role-then-revoke new file mode 100755 index 0000000..4049b73 --- /dev/null +++ b/dataversedock/testdata/scripts/issues/2036/grant-role-then-revoke @@ -0,0 +1,22 @@ +#!/bin/sh +SERVER=http://localhost:8080 +if [ -z "$1" ]; then + DATAVERSE=togo +else + DATAVERSE=$1 +fi +USERID="@pdurbin" +ROLE=admin + +echo "Assigning $ROLE to $USERID on $DATAVERSE..." +OUT_ASSIGN=`time curl -s -X POST -H "Content-type:application/json" -d "{\"assignee\": \"$USERID\",\"role\": \"$ROLE\"}" "$SERVER/api/dataverses/$DATAVERSE/assignments?key=$API_TOKEN"` +echo $OUT_ASSIGN | jq '.data | {assignee,_roleAlias}' + +echo "Retrieving ID of role to revoke..." +ASSIGNMENTS=`time curl -s "$SERVER/api/dataverses/$DATAVERSE/assignments?key=$API_TOKEN"` +echo $ASSIGNMENTS | jq ".data[] | select(.assignee==\"$USERID\") | ." + +echo "Revoking $ROLE from $USERID on $DATAVERSE..." +ID_TO_REVOKE=`echo $ASSIGNMENTS | jq ".data[] | select(.assignee==\"$USERID\") | .id"` +OUT_REVOKE=`time curl -s -X DELETE "$SERVER/api/dataverses/$DATAVERSE/assignments/$ID_TO_REVOKE?key=$API_TOKEN"` +echo $OUT_REVOKE | jq '.data.message' diff --git a/dataversedock/testdata/scripts/issues/2102/dataset-metadata-next.json b/dataversedock/testdata/scripts/issues/2102/dataset-metadata-next.json new file mode 100644 index 0000000..a7619d7 --- /dev/null +++ b/dataversedock/testdata/scripts/issues/2102/dataset-metadata-next.json @@ -0,0 +1,105 @@ +{ + "id": 1, + "versionNumber": 1, + "versionMinorNumber": 0, + "versionState": "RELEASED", + "productionDate": "Production Date", + "lastUpdateTime": "2015-06-08T19:40:14Z", + "releaseTime": "2015-06-08T19:40:14Z", + "createTime": "2015-06-08T17:30:09Z", + "metadataBlocks": { + "citation": { + "displayName": "Citation Metadata", + "fields": [{ + "typeName": "title", + "multiple": false, + "typeClass": "primitive", + "value": "Second Version of Sample Dataset" + }, { + "typeName": "author", + "multiple": true, + "typeClass": "compound", + "value": [{ + "authorName": { + "typeName": "authorName", + "multiple": false, + "typeClass": "primitive", + "value": "Admin, Dataverse" + }, + "authorAffiliation": { + "typeName": "authorAffiliation", + "multiple": false, + "typeClass": "primitive", + "value": "Dataverse.org" + } + }] + }, { + "typeName": "datasetContact", + "multiple": true, + "typeClass": "compound", + "value": [{ + "datasetContactName": { + "typeName": "datasetContactName", + "multiple": false, + "typeClass": "primitive", + "value": "Admin, Dataverse" + }, + "datasetContactAffiliation": { + "typeName": "datasetContactAffiliation", + "multiple": false, + "typeClass": "primitive", + "value": "Dataverse.org" + }, + "datasetContactEmail": { + "typeName": "datasetContactEmail", + "multiple": false, + "typeClass": "primitive", + "value": "dataverse@mailinator.com" + } + }] + }, { + "typeName": "dsDescription", + "multiple": true, + "typeClass": "compound", + "value": [{ + "dsDescriptionValue": { + "typeName": "dsDescriptionValue", + "multiple": false, + "typeClass": "primitive", + "value": "We need to add files to this Dataset." + } + }] + }, { + "typeName": "subject", + "multiple": true, + "typeClass": "controlledVocabulary", + "value": ["Arts and Humanities"] + }, { + "typeName": "depositor", + "multiple": false, + "typeClass": "primitive", + "value": "Admin, Dataverse" + }, { + "typeName": "dateOfDeposit", + "multiple": false, + "typeClass": "primitive", + "value": "2015-06-08" + }] + } + }, + "files": [{ + "description": "This is a description of the file.", + "label": "2001, Palestinian Proposal at the Taba Conference.kmz", + "version": 1, + "datasetVersionId": 1, + "datafile": { + "id": 4, + "name": "2001, Palestinian Proposal at the Taba Conference.kmz", + "contentType": "application/vnd.google-earth.kmz", + "filename": "14dd48f37d9-68789d517db2", + "originalFormatLabel": "UNKNOWN", + "md5": "cfaad1e9562443bb07119fcdbe11ccd2", + "description": "This is a description of the file." + } + }] +} diff --git a/dataversedock/testdata/scripts/issues/2102/dataset-metadata.json b/dataversedock/testdata/scripts/issues/2102/dataset-metadata.json new file mode 100644 index 0000000..7b92d27 --- /dev/null +++ b/dataversedock/testdata/scripts/issues/2102/dataset-metadata.json @@ -0,0 +1 @@ +{"status":"OK","data":{"id":1,"versionNumber":1,"versionMinorNumber":0,"versionState":"RELEASED","productionDate":"Production Date","lastUpdateTime":"2015-06-08T19:40:14Z","releaseTime":"2015-06-08T19:40:14Z","createTime":"2015-06-08T17:30:09Z","metadataBlocks":{"citation":{"displayName":"Citation Metadata","fields":[{"typeName":"title","multiple":false,"typeClass":"primitive","value":"Sample Dataset"},{"typeName":"author","multiple":true,"typeClass":"compound","value":[{"authorName":{"typeName":"authorName","multiple":false,"typeClass":"primitive","value":"Admin, Dataverse"},"authorAffiliation":{"typeName":"authorAffiliation","multiple":false,"typeClass":"primitive","value":"Dataverse.org"}}]},{"typeName":"datasetContact","multiple":true,"typeClass":"compound","value":[{"datasetContactName":{"typeName":"datasetContactName","multiple":false,"typeClass":"primitive","value":"Admin, Dataverse"},"datasetContactAffiliation":{"typeName":"datasetContactAffiliation","multiple":false,"typeClass":"primitive","value":"Dataverse.org"},"datasetContactEmail":{"typeName":"datasetContactEmail","multiple":false,"typeClass":"primitive","value":"dataverse@mailinator.com"}}]},{"typeName":"dsDescription","multiple":true,"typeClass":"compound","value":[{"dsDescriptionValue":{"typeName":"dsDescriptionValue","multiple":false,"typeClass":"primitive","value":"We need to add files to this Dataset."}}]},{"typeName":"subject","multiple":true,"typeClass":"controlledVocabulary","value":["Arts and Humanities"]},{"typeName":"depositor","multiple":false,"typeClass":"primitive","value":"Admin, Dataverse"},{"typeName":"dateOfDeposit","multiple":false,"typeClass":"primitive","value":"2015-06-08"}]}},"files":[{"description":"This is a description of the file.","label":"2001, Palestinian Proposal at the Taba Conference.kmz","version":1,"datasetVersionId":1,"datafile":{"id":4,"name":"2001, Palestinian Proposal at the Taba Conference.kmz","contentType":"application/vnd.google-earth.kmz","filename":"14dd48f37d9-68789d517db2","originalFormatLabel":"UNKNOWN","md5":"cfaad1e9562443bb07119fcdbe11ccd2","description":"This is a description of the file."}}]}} \ No newline at end of file diff --git a/dataversedock/testdata/scripts/issues/2102/ready-state.sql b/dataversedock/testdata/scripts/issues/2102/ready-state.sql new file mode 100644 index 0000000..96ccf58 --- /dev/null +++ b/dataversedock/testdata/scripts/issues/2102/ready-state.sql @@ -0,0 +1,7269 @@ +-- +-- PostgreSQL database dump +-- + +SET statement_timeout = 0; +SET client_encoding = 'UTF8'; +SET standard_conforming_strings = on; +SET check_function_bodies = false; +SET client_min_messages = warning; + +-- +-- Name: plpgsql; Type: EXTENSION; Schema: -; Owner: +-- + +CREATE EXTENSION IF NOT EXISTS plpgsql WITH SCHEMA pg_catalog; + + +-- +-- Name: EXTENSION plpgsql; Type: COMMENT; Schema: -; Owner: +-- + +COMMENT ON EXTENSION plpgsql IS 'PL/pgSQL procedural language'; + + +SET search_path = public, pg_catalog; + +SET default_tablespace = ''; + +SET default_with_oids = false; + +-- +-- Name: actionlogrecord; Type: TABLE; Schema: public; Owner: dataverse_app; Tablespace: +-- + +CREATE TABLE actionlogrecord ( + id character varying(36) NOT NULL, + actionresult character varying(255), + actionsubtype character varying(255), + actiontype character varying(255), + endtime timestamp without time zone, + info character varying(1024), + starttime timestamp without time zone, + useridentifier character varying(255) +); + + +ALTER TABLE public.actionlogrecord OWNER TO dataverse_app; + +-- +-- Name: apitoken; Type: TABLE; Schema: public; Owner: dataverse_app; Tablespace: +-- + +CREATE TABLE apitoken ( + id integer NOT NULL, + createtime timestamp without time zone NOT NULL, + disabled boolean NOT NULL, + expiretime timestamp without time zone NOT NULL, + tokenstring character varying(255) NOT NULL, + authenticateduser_id bigint NOT NULL +); + + +ALTER TABLE public.apitoken OWNER TO dataverse_app; + +-- +-- Name: apitoken_id_seq; Type: SEQUENCE; Schema: public; Owner: dataverse_app +-- + +CREATE SEQUENCE apitoken_id_seq + START WITH 1 + INCREMENT BY 1 + NO MINVALUE + NO MAXVALUE + CACHE 1; + + +ALTER TABLE public.apitoken_id_seq OWNER TO dataverse_app; + +-- +-- Name: apitoken_id_seq; Type: SEQUENCE OWNED BY; Schema: public; Owner: dataverse_app +-- + +ALTER SEQUENCE apitoken_id_seq OWNED BY apitoken.id; + + +-- +-- Name: apitoken_id_seq; Type: SEQUENCE SET; Schema: public; Owner: dataverse_app +-- + +SELECT pg_catalog.setval('apitoken_id_seq', 1, true); + + +-- +-- Name: authenticateduser; Type: TABLE; Schema: public; Owner: dataverse_app; Tablespace: +-- + +CREATE TABLE authenticateduser ( + id integer NOT NULL, + affiliation character varying(255), + email character varying(255) NOT NULL, + firstname character varying(255), + lastname character varying(255), + modificationtime timestamp without time zone, + name character varying(255), + "position" character varying(255), + superuser boolean, + useridentifier character varying(255) NOT NULL +); + + +ALTER TABLE public.authenticateduser OWNER TO dataverse_app; + +-- +-- Name: authenticateduser_id_seq; Type: SEQUENCE; Schema: public; Owner: dataverse_app +-- + +CREATE SEQUENCE authenticateduser_id_seq + START WITH 1 + INCREMENT BY 1 + NO MINVALUE + NO MAXVALUE + CACHE 1; + + +ALTER TABLE public.authenticateduser_id_seq OWNER TO dataverse_app; + +-- +-- Name: authenticateduser_id_seq; Type: SEQUENCE OWNED BY; Schema: public; Owner: dataverse_app +-- + +ALTER SEQUENCE authenticateduser_id_seq OWNED BY authenticateduser.id; + + +-- +-- Name: authenticateduser_id_seq; Type: SEQUENCE SET; Schema: public; Owner: dataverse_app +-- + +SELECT pg_catalog.setval('authenticateduser_id_seq', 1, true); + + +-- +-- Name: authenticateduserlookup; Type: TABLE; Schema: public; Owner: dataverse_app; Tablespace: +-- + +CREATE TABLE authenticateduserlookup ( + id integer NOT NULL, + authenticationproviderid character varying(255), + persistentuserid character varying(255), + authenticateduser_id bigint NOT NULL +); + + +ALTER TABLE public.authenticateduserlookup OWNER TO dataverse_app; + +-- +-- Name: authenticateduserlookup_id_seq; Type: SEQUENCE; Schema: public; Owner: dataverse_app +-- + +CREATE SEQUENCE authenticateduserlookup_id_seq + START WITH 1 + INCREMENT BY 1 + NO MINVALUE + NO MAXVALUE + CACHE 1; + + +ALTER TABLE public.authenticateduserlookup_id_seq OWNER TO dataverse_app; + +-- +-- Name: authenticateduserlookup_id_seq; Type: SEQUENCE OWNED BY; Schema: public; Owner: dataverse_app +-- + +ALTER SEQUENCE authenticateduserlookup_id_seq OWNED BY authenticateduserlookup.id; + + +-- +-- Name: authenticateduserlookup_id_seq; Type: SEQUENCE SET; Schema: public; Owner: dataverse_app +-- + +SELECT pg_catalog.setval('authenticateduserlookup_id_seq', 1, true); + + +-- +-- Name: authenticationproviderrow; Type: TABLE; Schema: public; Owner: dataverse_app; Tablespace: +-- + +CREATE TABLE authenticationproviderrow ( + id character varying(255) NOT NULL, + enabled boolean, + factoryalias character varying(255), + factorydata text, + subtitle character varying(255), + title character varying(255) +); + + +ALTER TABLE public.authenticationproviderrow OWNER TO dataverse_app; + +-- +-- Name: builtinuser; Type: TABLE; Schema: public; Owner: dataverse_app; Tablespace: +-- + +CREATE TABLE builtinuser ( + id integer NOT NULL, + affiliation character varying(255), + email character varying(255) NOT NULL, + encryptedpassword character varying(255), + firstname character varying(255), + lastname character varying(255), + passwordencryptionversion integer, + "position" character varying(255), + username character varying(255) NOT NULL +); + + +ALTER TABLE public.builtinuser OWNER TO dataverse_app; + +-- +-- Name: builtinuser_id_seq; Type: SEQUENCE; Schema: public; Owner: dataverse_app +-- + +CREATE SEQUENCE builtinuser_id_seq + START WITH 1 + INCREMENT BY 1 + NO MINVALUE + NO MAXVALUE + CACHE 1; + + +ALTER TABLE public.builtinuser_id_seq OWNER TO dataverse_app; + +-- +-- Name: builtinuser_id_seq; Type: SEQUENCE OWNED BY; Schema: public; Owner: dataverse_app +-- + +ALTER SEQUENCE builtinuser_id_seq OWNED BY builtinuser.id; + + +-- +-- Name: builtinuser_id_seq; Type: SEQUENCE SET; Schema: public; Owner: dataverse_app +-- + +SELECT pg_catalog.setval('builtinuser_id_seq', 1, true); + + +-- +-- Name: controlledvocabalternate; Type: TABLE; Schema: public; Owner: dataverse_app; Tablespace: +-- + +CREATE TABLE controlledvocabalternate ( + id integer NOT NULL, + strvalue text, + controlledvocabularyvalue_id bigint NOT NULL, + datasetfieldtype_id bigint NOT NULL +); + + +ALTER TABLE public.controlledvocabalternate OWNER TO dataverse_app; + +-- +-- Name: controlledvocabalternate_id_seq; Type: SEQUENCE; Schema: public; Owner: dataverse_app +-- + +CREATE SEQUENCE controlledvocabalternate_id_seq + START WITH 1 + INCREMENT BY 1 + NO MINVALUE + NO MAXVALUE + CACHE 1; + + +ALTER TABLE public.controlledvocabalternate_id_seq OWNER TO dataverse_app; + +-- +-- Name: controlledvocabalternate_id_seq; Type: SEQUENCE OWNED BY; Schema: public; Owner: dataverse_app +-- + +ALTER SEQUENCE controlledvocabalternate_id_seq OWNED BY controlledvocabalternate.id; + + +-- +-- Name: controlledvocabalternate_id_seq; Type: SEQUENCE SET; Schema: public; Owner: dataverse_app +-- + +SELECT pg_catalog.setval('controlledvocabalternate_id_seq', 24, true); + + +-- +-- Name: controlledvocabularyvalue; Type: TABLE; Schema: public; Owner: dataverse_app; Tablespace: +-- + +CREATE TABLE controlledvocabularyvalue ( + id integer NOT NULL, + displayorder integer, + identifier character varying(255), + strvalue text, + datasetfieldtype_id bigint +); + + +ALTER TABLE public.controlledvocabularyvalue OWNER TO dataverse_app; + +-- +-- Name: controlledvocabularyvalue_id_seq; Type: SEQUENCE; Schema: public; Owner: dataverse_app +-- + +CREATE SEQUENCE controlledvocabularyvalue_id_seq + START WITH 1 + INCREMENT BY 1 + NO MINVALUE + NO MAXVALUE + CACHE 1; + + +ALTER TABLE public.controlledvocabularyvalue_id_seq OWNER TO dataverse_app; + +-- +-- Name: controlledvocabularyvalue_id_seq; Type: SEQUENCE OWNED BY; Schema: public; Owner: dataverse_app +-- + +ALTER SEQUENCE controlledvocabularyvalue_id_seq OWNED BY controlledvocabularyvalue.id; + + +-- +-- Name: controlledvocabularyvalue_id_seq; Type: SEQUENCE SET; Schema: public; Owner: dataverse_app +-- + +SELECT pg_catalog.setval('controlledvocabularyvalue_id_seq', 824, true); + + +-- +-- Name: customfieldmap; Type: TABLE; Schema: public; Owner: dataverse_app; Tablespace: +-- + +CREATE TABLE customfieldmap ( + id integer NOT NULL, + sourcedatasetfield character varying(255), + sourcetemplate character varying(255), + targetdatasetfield character varying(255) +); + + +ALTER TABLE public.customfieldmap OWNER TO dataverse_app; + +-- +-- Name: customfieldmap_id_seq; Type: SEQUENCE; Schema: public; Owner: dataverse_app +-- + +CREATE SEQUENCE customfieldmap_id_seq + START WITH 1 + INCREMENT BY 1 + NO MINVALUE + NO MAXVALUE + CACHE 1; + + +ALTER TABLE public.customfieldmap_id_seq OWNER TO dataverse_app; + +-- +-- Name: customfieldmap_id_seq; Type: SEQUENCE OWNED BY; Schema: public; Owner: dataverse_app +-- + +ALTER SEQUENCE customfieldmap_id_seq OWNED BY customfieldmap.id; + + +-- +-- Name: customfieldmap_id_seq; Type: SEQUENCE SET; Schema: public; Owner: dataverse_app +-- + +SELECT pg_catalog.setval('customfieldmap_id_seq', 1, false); + + +-- +-- Name: customquestion; Type: TABLE; Schema: public; Owner: dataverse_app; Tablespace: +-- + +CREATE TABLE customquestion ( + id integer NOT NULL, + displayorder integer, + hidden boolean, + questionstring character varying(255) NOT NULL, + questiontype character varying(255) NOT NULL, + required boolean, + guestbook_id bigint NOT NULL +); + + +ALTER TABLE public.customquestion OWNER TO dataverse_app; + +-- +-- Name: customquestion_id_seq; Type: SEQUENCE; Schema: public; Owner: dataverse_app +-- + +CREATE SEQUENCE customquestion_id_seq + START WITH 1 + INCREMENT BY 1 + NO MINVALUE + NO MAXVALUE + CACHE 1; + + +ALTER TABLE public.customquestion_id_seq OWNER TO dataverse_app; + +-- +-- Name: customquestion_id_seq; Type: SEQUENCE OWNED BY; Schema: public; Owner: dataverse_app +-- + +ALTER SEQUENCE customquestion_id_seq OWNED BY customquestion.id; + + +-- +-- Name: customquestion_id_seq; Type: SEQUENCE SET; Schema: public; Owner: dataverse_app +-- + +SELECT pg_catalog.setval('customquestion_id_seq', 1, false); + + +-- +-- Name: customquestionresponse; Type: TABLE; Schema: public; Owner: dataverse_app; Tablespace: +-- + +CREATE TABLE customquestionresponse ( + id integer NOT NULL, + response character varying(255), + customquestion_id bigint NOT NULL, + guestbookresponse_id bigint NOT NULL +); + + +ALTER TABLE public.customquestionresponse OWNER TO dataverse_app; + +-- +-- Name: customquestionresponse_id_seq; Type: SEQUENCE; Schema: public; Owner: dataverse_app +-- + +CREATE SEQUENCE customquestionresponse_id_seq + START WITH 1 + INCREMENT BY 1 + NO MINVALUE + NO MAXVALUE + CACHE 1; + + +ALTER TABLE public.customquestionresponse_id_seq OWNER TO dataverse_app; + +-- +-- Name: customquestionresponse_id_seq; Type: SEQUENCE OWNED BY; Schema: public; Owner: dataverse_app +-- + +ALTER SEQUENCE customquestionresponse_id_seq OWNED BY customquestionresponse.id; + + +-- +-- Name: customquestionresponse_id_seq; Type: SEQUENCE SET; Schema: public; Owner: dataverse_app +-- + +SELECT pg_catalog.setval('customquestionresponse_id_seq', 1, false); + + +-- +-- Name: customquestionvalue; Type: TABLE; Schema: public; Owner: dataverse_app; Tablespace: +-- + +CREATE TABLE customquestionvalue ( + id integer NOT NULL, + displayorder integer, + valuestring character varying(255) NOT NULL, + customquestion_id bigint NOT NULL +); + + +ALTER TABLE public.customquestionvalue OWNER TO dataverse_app; + +-- +-- Name: customquestionvalue_id_seq; Type: SEQUENCE; Schema: public; Owner: dataverse_app +-- + +CREATE SEQUENCE customquestionvalue_id_seq + START WITH 1 + INCREMENT BY 1 + NO MINVALUE + NO MAXVALUE + CACHE 1; + + +ALTER TABLE public.customquestionvalue_id_seq OWNER TO dataverse_app; + +-- +-- Name: customquestionvalue_id_seq; Type: SEQUENCE OWNED BY; Schema: public; Owner: dataverse_app +-- + +ALTER SEQUENCE customquestionvalue_id_seq OWNED BY customquestionvalue.id; + + +-- +-- Name: customquestionvalue_id_seq; Type: SEQUENCE SET; Schema: public; Owner: dataverse_app +-- + +SELECT pg_catalog.setval('customquestionvalue_id_seq', 1, false); + + +-- +-- Name: datafile; Type: TABLE; Schema: public; Owner: dataverse_app; Tablespace: +-- + +CREATE TABLE datafile ( + id bigint NOT NULL, + contenttype character varying(255) NOT NULL, + filesystemname character varying(255) NOT NULL, + filesize bigint, + ingeststatus character(1), + md5 character varying(255) NOT NULL, + name character varying(255), + restricted boolean +); + + +ALTER TABLE public.datafile OWNER TO dataverse_app; + +-- +-- Name: datafilecategory; Type: TABLE; Schema: public; Owner: dataverse_app; Tablespace: +-- + +CREATE TABLE datafilecategory ( + id integer NOT NULL, + name character varying(255) NOT NULL, + dataset_id bigint NOT NULL +); + + +ALTER TABLE public.datafilecategory OWNER TO dataverse_app; + +-- +-- Name: datafilecategory_id_seq; Type: SEQUENCE; Schema: public; Owner: dataverse_app +-- + +CREATE SEQUENCE datafilecategory_id_seq + START WITH 1 + INCREMENT BY 1 + NO MINVALUE + NO MAXVALUE + CACHE 1; + + +ALTER TABLE public.datafilecategory_id_seq OWNER TO dataverse_app; + +-- +-- Name: datafilecategory_id_seq; Type: SEQUENCE OWNED BY; Schema: public; Owner: dataverse_app +-- + +ALTER SEQUENCE datafilecategory_id_seq OWNED BY datafilecategory.id; + + +-- +-- Name: datafilecategory_id_seq; Type: SEQUENCE SET; Schema: public; Owner: dataverse_app +-- + +SELECT pg_catalog.setval('datafilecategory_id_seq', 1, true); + + +-- +-- Name: datafiletag; Type: TABLE; Schema: public; Owner: dataverse_app; Tablespace: +-- + +CREATE TABLE datafiletag ( + id integer NOT NULL, + type integer NOT NULL, + datafile_id bigint NOT NULL +); + + +ALTER TABLE public.datafiletag OWNER TO dataverse_app; + +-- +-- Name: datafiletag_id_seq; Type: SEQUENCE; Schema: public; Owner: dataverse_app +-- + +CREATE SEQUENCE datafiletag_id_seq + START WITH 1 + INCREMENT BY 1 + NO MINVALUE + NO MAXVALUE + CACHE 1; + + +ALTER TABLE public.datafiletag_id_seq OWNER TO dataverse_app; + +-- +-- Name: datafiletag_id_seq; Type: SEQUENCE OWNED BY; Schema: public; Owner: dataverse_app +-- + +ALTER SEQUENCE datafiletag_id_seq OWNED BY datafiletag.id; + + +-- +-- Name: datafiletag_id_seq; Type: SEQUENCE SET; Schema: public; Owner: dataverse_app +-- + +SELECT pg_catalog.setval('datafiletag_id_seq', 1, false); + + +-- +-- Name: dataset; Type: TABLE; Schema: public; Owner: dataverse_app; Tablespace: +-- + +CREATE TABLE dataset ( + id bigint NOT NULL, + authority character varying(255), + doiseparator character varying(255), + fileaccessrequest boolean, + globalidcreatetime timestamp without time zone, + identifier character varying(255) NOT NULL, + protocol character varying(255), + guestbook_id bigint, + thumbnailfile_id bigint +); + + +ALTER TABLE public.dataset OWNER TO dataverse_app; + +-- +-- Name: datasetfield; Type: TABLE; Schema: public; Owner: dataverse_app; Tablespace: +-- + +CREATE TABLE datasetfield ( + id integer NOT NULL, + datasetfieldtype_id bigint NOT NULL, + datasetversion_id bigint, + parentdatasetfieldcompoundvalue_id bigint, + template_id bigint +); + + +ALTER TABLE public.datasetfield OWNER TO dataverse_app; + +-- +-- Name: datasetfield_controlledvocabularyvalue; Type: TABLE; Schema: public; Owner: dataverse_app; Tablespace: +-- + +CREATE TABLE datasetfield_controlledvocabularyvalue ( + datasetfield_id bigint NOT NULL, + controlledvocabularyvalues_id bigint NOT NULL +); + + +ALTER TABLE public.datasetfield_controlledvocabularyvalue OWNER TO dataverse_app; + +-- +-- Name: datasetfield_id_seq; Type: SEQUENCE; Schema: public; Owner: dataverse_app +-- + +CREATE SEQUENCE datasetfield_id_seq + START WITH 1 + INCREMENT BY 1 + NO MINVALUE + NO MAXVALUE + CACHE 1; + + +ALTER TABLE public.datasetfield_id_seq OWNER TO dataverse_app; + +-- +-- Name: datasetfield_id_seq; Type: SEQUENCE OWNED BY; Schema: public; Owner: dataverse_app +-- + +ALTER SEQUENCE datasetfield_id_seq OWNED BY datasetfield.id; + + +-- +-- Name: datasetfield_id_seq; Type: SEQUENCE SET; Schema: public; Owner: dataverse_app +-- + +SELECT pg_catalog.setval('datasetfield_id_seq', 14, true); + + +-- +-- Name: datasetfieldcompoundvalue; Type: TABLE; Schema: public; Owner: dataverse_app; Tablespace: +-- + +CREATE TABLE datasetfieldcompoundvalue ( + id integer NOT NULL, + displayorder integer, + parentdatasetfield_id bigint +); + + +ALTER TABLE public.datasetfieldcompoundvalue OWNER TO dataverse_app; + +-- +-- Name: datasetfieldcompoundvalue_id_seq; Type: SEQUENCE; Schema: public; Owner: dataverse_app +-- + +CREATE SEQUENCE datasetfieldcompoundvalue_id_seq + START WITH 1 + INCREMENT BY 1 + NO MINVALUE + NO MAXVALUE + CACHE 1; + + +ALTER TABLE public.datasetfieldcompoundvalue_id_seq OWNER TO dataverse_app; + +-- +-- Name: datasetfieldcompoundvalue_id_seq; Type: SEQUENCE OWNED BY; Schema: public; Owner: dataverse_app +-- + +ALTER SEQUENCE datasetfieldcompoundvalue_id_seq OWNED BY datasetfieldcompoundvalue.id; + + +-- +-- Name: datasetfieldcompoundvalue_id_seq; Type: SEQUENCE SET; Schema: public; Owner: dataverse_app +-- + +SELECT pg_catalog.setval('datasetfieldcompoundvalue_id_seq', 3, true); + + +-- +-- Name: datasetfielddefaultvalue; Type: TABLE; Schema: public; Owner: dataverse_app; Tablespace: +-- + +CREATE TABLE datasetfielddefaultvalue ( + id integer NOT NULL, + displayorder integer, + strvalue text, + datasetfield_id bigint NOT NULL, + defaultvalueset_id bigint NOT NULL, + parentdatasetfielddefaultvalue_id bigint +); + + +ALTER TABLE public.datasetfielddefaultvalue OWNER TO dataverse_app; + +-- +-- Name: datasetfielddefaultvalue_id_seq; Type: SEQUENCE; Schema: public; Owner: dataverse_app +-- + +CREATE SEQUENCE datasetfielddefaultvalue_id_seq + START WITH 1 + INCREMENT BY 1 + NO MINVALUE + NO MAXVALUE + CACHE 1; + + +ALTER TABLE public.datasetfielddefaultvalue_id_seq OWNER TO dataverse_app; + +-- +-- Name: datasetfielddefaultvalue_id_seq; Type: SEQUENCE OWNED BY; Schema: public; Owner: dataverse_app +-- + +ALTER SEQUENCE datasetfielddefaultvalue_id_seq OWNED BY datasetfielddefaultvalue.id; + + +-- +-- Name: datasetfielddefaultvalue_id_seq; Type: SEQUENCE SET; Schema: public; Owner: dataverse_app +-- + +SELECT pg_catalog.setval('datasetfielddefaultvalue_id_seq', 1, false); + + +-- +-- Name: datasetfieldtype; Type: TABLE; Schema: public; Owner: dataverse_app; Tablespace: +-- + +CREATE TABLE datasetfieldtype ( + id integer NOT NULL, + advancedsearchfieldtype boolean, + allowcontrolledvocabulary boolean, + allowmultiples boolean, + description text, + displayformat character varying(255), + displayoncreate boolean, + displayorder integer, + facetable boolean, + fieldtype character varying(255) NOT NULL, + name text, + required boolean, + title text, + watermark character varying(255), + metadatablock_id bigint, + parentdatasetfieldtype_id bigint +); + + +ALTER TABLE public.datasetfieldtype OWNER TO dataverse_app; + +-- +-- Name: datasetfieldtype_id_seq; Type: SEQUENCE; Schema: public; Owner: dataverse_app +-- + +CREATE SEQUENCE datasetfieldtype_id_seq + START WITH 1 + INCREMENT BY 1 + NO MINVALUE + NO MAXVALUE + CACHE 1; + + +ALTER TABLE public.datasetfieldtype_id_seq OWNER TO dataverse_app; + +-- +-- Name: datasetfieldtype_id_seq; Type: SEQUENCE OWNED BY; Schema: public; Owner: dataverse_app +-- + +ALTER SEQUENCE datasetfieldtype_id_seq OWNED BY datasetfieldtype.id; + + +-- +-- Name: datasetfieldtype_id_seq; Type: SEQUENCE SET; Schema: public; Owner: dataverse_app +-- + +SELECT pg_catalog.setval('datasetfieldtype_id_seq', 154, true); + + +-- +-- Name: datasetfieldvalue; Type: TABLE; Schema: public; Owner: dataverse_app; Tablespace: +-- + +CREATE TABLE datasetfieldvalue ( + id integer NOT NULL, + displayorder integer, + value text, + datasetfield_id bigint NOT NULL +); + + +ALTER TABLE public.datasetfieldvalue OWNER TO dataverse_app; + +-- +-- Name: datasetfieldvalue_id_seq; Type: SEQUENCE; Schema: public; Owner: dataverse_app +-- + +CREATE SEQUENCE datasetfieldvalue_id_seq + START WITH 1 + INCREMENT BY 1 + NO MINVALUE + NO MAXVALUE + CACHE 1; + + +ALTER TABLE public.datasetfieldvalue_id_seq OWNER TO dataverse_app; + +-- +-- Name: datasetfieldvalue_id_seq; Type: SEQUENCE OWNED BY; Schema: public; Owner: dataverse_app +-- + +ALTER SEQUENCE datasetfieldvalue_id_seq OWNED BY datasetfieldvalue.id; + + +-- +-- Name: datasetfieldvalue_id_seq; Type: SEQUENCE SET; Schema: public; Owner: dataverse_app +-- + +SELECT pg_catalog.setval('datasetfieldvalue_id_seq', 9, true); + + +-- +-- Name: datasetlinkingdataverse; Type: TABLE; Schema: public; Owner: dataverse_app; Tablespace: +-- + +CREATE TABLE datasetlinkingdataverse ( + id integer NOT NULL, + linkcreatetime timestamp without time zone NOT NULL, + dataset_id bigint NOT NULL, + linkingdataverse_id bigint NOT NULL +); + + +ALTER TABLE public.datasetlinkingdataverse OWNER TO dataverse_app; + +-- +-- Name: datasetlinkingdataverse_id_seq; Type: SEQUENCE; Schema: public; Owner: dataverse_app +-- + +CREATE SEQUENCE datasetlinkingdataverse_id_seq + START WITH 1 + INCREMENT BY 1 + NO MINVALUE + NO MAXVALUE + CACHE 1; + + +ALTER TABLE public.datasetlinkingdataverse_id_seq OWNER TO dataverse_app; + +-- +-- Name: datasetlinkingdataverse_id_seq; Type: SEQUENCE OWNED BY; Schema: public; Owner: dataverse_app +-- + +ALTER SEQUENCE datasetlinkingdataverse_id_seq OWNED BY datasetlinkingdataverse.id; + + +-- +-- Name: datasetlinkingdataverse_id_seq; Type: SEQUENCE SET; Schema: public; Owner: dataverse_app +-- + +SELECT pg_catalog.setval('datasetlinkingdataverse_id_seq', 1, false); + + +-- +-- Name: datasetlock; Type: TABLE; Schema: public; Owner: dataverse_app; Tablespace: +-- + +CREATE TABLE datasetlock ( + id integer NOT NULL, + info character varying(255), + starttime timestamp without time zone, + user_id bigint NOT NULL, + dataset_id bigint NOT NULL +); + + +ALTER TABLE public.datasetlock OWNER TO dataverse_app; + +-- +-- Name: datasetlock_id_seq; Type: SEQUENCE; Schema: public; Owner: dataverse_app +-- + +CREATE SEQUENCE datasetlock_id_seq + START WITH 1 + INCREMENT BY 1 + NO MINVALUE + NO MAXVALUE + CACHE 1; + + +ALTER TABLE public.datasetlock_id_seq OWNER TO dataverse_app; + +-- +-- Name: datasetlock_id_seq; Type: SEQUENCE OWNED BY; Schema: public; Owner: dataverse_app +-- + +ALTER SEQUENCE datasetlock_id_seq OWNED BY datasetlock.id; + + +-- +-- Name: datasetlock_id_seq; Type: SEQUENCE SET; Schema: public; Owner: dataverse_app +-- + +SELECT pg_catalog.setval('datasetlock_id_seq', 1, false); + + +-- +-- Name: datasetversion; Type: TABLE; Schema: public; Owner: dataverse_app; Tablespace: +-- + +CREATE TABLE datasetversion ( + id integer NOT NULL, + unf character varying(255), + archivenote character varying(1000), + archivetime timestamp without time zone, + availabilitystatus text, + citationrequirements text, + conditions text, + confidentialitydeclaration text, + contactforaccess text, + createtime timestamp without time zone NOT NULL, + dataaccessplace text, + deaccessionlink character varying(255), + depositorrequirements text, + disclaimer text, + fileaccessrequest boolean, + inreview boolean, + lastupdatetime timestamp without time zone NOT NULL, + license character varying(255), + minorversionnumber bigint, + originalarchive text, + releasetime timestamp without time zone, + restrictions text, + sizeofcollection text, + specialpermissions text, + studycompletion text, + termsofaccess text, + termsofuse text, + version bigint, + versionnote character varying(1000), + versionnumber bigint, + versionstate character varying(255), + dataset_id bigint +); + + +ALTER TABLE public.datasetversion OWNER TO dataverse_app; + +-- +-- Name: datasetversion_id_seq; Type: SEQUENCE; Schema: public; Owner: dataverse_app +-- + +CREATE SEQUENCE datasetversion_id_seq + START WITH 1 + INCREMENT BY 1 + NO MINVALUE + NO MAXVALUE + CACHE 1; + + +ALTER TABLE public.datasetversion_id_seq OWNER TO dataverse_app; + +-- +-- Name: datasetversion_id_seq; Type: SEQUENCE OWNED BY; Schema: public; Owner: dataverse_app +-- + +ALTER SEQUENCE datasetversion_id_seq OWNED BY datasetversion.id; + + +-- +-- Name: datasetversion_id_seq; Type: SEQUENCE SET; Schema: public; Owner: dataverse_app +-- + +SELECT pg_catalog.setval('datasetversion_id_seq', 1, true); + + +-- +-- Name: datasetversionuser; Type: TABLE; Schema: public; Owner: dataverse_app; Tablespace: +-- + +CREATE TABLE datasetversionuser ( + id integer NOT NULL, + lastupdatedate timestamp without time zone NOT NULL, + authenticateduser_id bigint, + datasetversion_id bigint +); + + +ALTER TABLE public.datasetversionuser OWNER TO dataverse_app; + +-- +-- Name: datasetversionuser_id_seq; Type: SEQUENCE; Schema: public; Owner: dataverse_app +-- + +CREATE SEQUENCE datasetversionuser_id_seq + START WITH 1 + INCREMENT BY 1 + NO MINVALUE + NO MAXVALUE + CACHE 1; + + +ALTER TABLE public.datasetversionuser_id_seq OWNER TO dataverse_app; + +-- +-- Name: datasetversionuser_id_seq; Type: SEQUENCE OWNED BY; Schema: public; Owner: dataverse_app +-- + +ALTER SEQUENCE datasetversionuser_id_seq OWNED BY datasetversionuser.id; + + +-- +-- Name: datasetversionuser_id_seq; Type: SEQUENCE SET; Schema: public; Owner: dataverse_app +-- + +SELECT pg_catalog.setval('datasetversionuser_id_seq', 1, true); + + +-- +-- Name: datatable; Type: TABLE; Schema: public; Owner: dataverse_app; Tablespace: +-- + +CREATE TABLE datatable ( + id integer NOT NULL, + casequantity bigint, + originalfileformat character varying(255), + originalformatversion character varying(255), + recordspercase bigint, + unf character varying(255) NOT NULL, + varquantity bigint, + datafile_id bigint NOT NULL +); + + +ALTER TABLE public.datatable OWNER TO dataverse_app; + +-- +-- Name: datatable_id_seq; Type: SEQUENCE; Schema: public; Owner: dataverse_app +-- + +CREATE SEQUENCE datatable_id_seq + START WITH 1 + INCREMENT BY 1 + NO MINVALUE + NO MAXVALUE + CACHE 1; + + +ALTER TABLE public.datatable_id_seq OWNER TO dataverse_app; + +-- +-- Name: datatable_id_seq; Type: SEQUENCE OWNED BY; Schema: public; Owner: dataverse_app +-- + +ALTER SEQUENCE datatable_id_seq OWNED BY datatable.id; + + +-- +-- Name: datatable_id_seq; Type: SEQUENCE SET; Schema: public; Owner: dataverse_app +-- + +SELECT pg_catalog.setval('datatable_id_seq', 1, false); + + +-- +-- Name: datavariable; Type: TABLE; Schema: public; Owner: dataverse_app; Tablespace: +-- + +CREATE TABLE datavariable ( + id integer NOT NULL, + fileendposition bigint, + fileorder integer, + filestartposition bigint, + format character varying(255), + formatcategory character varying(255), + "interval" integer, + label text, + name character varying(255), + numberofdecimalpoints bigint, + orderedfactor boolean, + recordsegmentnumber bigint, + type integer, + unf character varying(255), + universe character varying(255), + weighted boolean, + datatable_id bigint NOT NULL +); + + +ALTER TABLE public.datavariable OWNER TO dataverse_app; + +-- +-- Name: datavariable_id_seq; Type: SEQUENCE; Schema: public; Owner: dataverse_app +-- + +CREATE SEQUENCE datavariable_id_seq + START WITH 1 + INCREMENT BY 1 + NO MINVALUE + NO MAXVALUE + CACHE 1; + + +ALTER TABLE public.datavariable_id_seq OWNER TO dataverse_app; + +-- +-- Name: datavariable_id_seq; Type: SEQUENCE OWNED BY; Schema: public; Owner: dataverse_app +-- + +ALTER SEQUENCE datavariable_id_seq OWNED BY datavariable.id; + + +-- +-- Name: datavariable_id_seq; Type: SEQUENCE SET; Schema: public; Owner: dataverse_app +-- + +SELECT pg_catalog.setval('datavariable_id_seq', 1, false); + + +-- +-- Name: dataverse; Type: TABLE; Schema: public; Owner: dataverse_app; Tablespace: +-- + +CREATE TABLE dataverse ( + id bigint NOT NULL, + affiliation character varying(255), + alias character varying(255) NOT NULL, + dataversetype character varying(255) NOT NULL, + description text, + facetroot boolean, + guestbookroot boolean, + metadatablockroot boolean, + name character varying(255) NOT NULL, + permissionroot boolean, + templateroot boolean, + themeroot boolean, + defaultcontributorrole_id bigint NOT NULL, + defaulttemplate_id bigint +); + + +ALTER TABLE public.dataverse OWNER TO dataverse_app; + +-- +-- Name: dataverse_metadatablock; Type: TABLE; Schema: public; Owner: dataverse_app; Tablespace: +-- + +CREATE TABLE dataverse_metadatablock ( + dataverse_id bigint NOT NULL, + metadatablocks_id bigint NOT NULL +); + + +ALTER TABLE public.dataverse_metadatablock OWNER TO dataverse_app; + +-- +-- Name: dataversecontact; Type: TABLE; Schema: public; Owner: dataverse_app; Tablespace: +-- + +CREATE TABLE dataversecontact ( + id integer NOT NULL, + contactemail character varying(255) NOT NULL, + displayorder integer, + dataverse_id bigint +); + + +ALTER TABLE public.dataversecontact OWNER TO dataverse_app; + +-- +-- Name: dataversecontact_id_seq; Type: SEQUENCE; Schema: public; Owner: dataverse_app +-- + +CREATE SEQUENCE dataversecontact_id_seq + START WITH 1 + INCREMENT BY 1 + NO MINVALUE + NO MAXVALUE + CACHE 1; + + +ALTER TABLE public.dataversecontact_id_seq OWNER TO dataverse_app; + +-- +-- Name: dataversecontact_id_seq; Type: SEQUENCE OWNED BY; Schema: public; Owner: dataverse_app +-- + +ALTER SEQUENCE dataversecontact_id_seq OWNED BY dataversecontact.id; + + +-- +-- Name: dataversecontact_id_seq; Type: SEQUENCE SET; Schema: public; Owner: dataverse_app +-- + +SELECT pg_catalog.setval('dataversecontact_id_seq', 2, true); + + +-- +-- Name: dataversefacet; Type: TABLE; Schema: public; Owner: dataverse_app; Tablespace: +-- + +CREATE TABLE dataversefacet ( + id integer NOT NULL, + displayorder integer, + datasetfieldtype_id bigint, + dataverse_id bigint +); + + +ALTER TABLE public.dataversefacet OWNER TO dataverse_app; + +-- +-- Name: dataversefacet_id_seq; Type: SEQUENCE; Schema: public; Owner: dataverse_app +-- + +CREATE SEQUENCE dataversefacet_id_seq + START WITH 1 + INCREMENT BY 1 + NO MINVALUE + NO MAXVALUE + CACHE 1; + + +ALTER TABLE public.dataversefacet_id_seq OWNER TO dataverse_app; + +-- +-- Name: dataversefacet_id_seq; Type: SEQUENCE OWNED BY; Schema: public; Owner: dataverse_app +-- + +ALTER SEQUENCE dataversefacet_id_seq OWNED BY dataversefacet.id; + + +-- +-- Name: dataversefacet_id_seq; Type: SEQUENCE SET; Schema: public; Owner: dataverse_app +-- + +SELECT pg_catalog.setval('dataversefacet_id_seq', 4, true); + + +-- +-- Name: dataversefeatureddataverse; Type: TABLE; Schema: public; Owner: dataverse_app; Tablespace: +-- + +CREATE TABLE dataversefeatureddataverse ( + id integer NOT NULL, + displayorder integer, + dataverse_id bigint, + featureddataverse_id bigint +); + + +ALTER TABLE public.dataversefeatureddataverse OWNER TO dataverse_app; + +-- +-- Name: dataversefeatureddataverse_id_seq; Type: SEQUENCE; Schema: public; Owner: dataverse_app +-- + +CREATE SEQUENCE dataversefeatureddataverse_id_seq + START WITH 1 + INCREMENT BY 1 + NO MINVALUE + NO MAXVALUE + CACHE 1; + + +ALTER TABLE public.dataversefeatureddataverse_id_seq OWNER TO dataverse_app; + +-- +-- Name: dataversefeatureddataverse_id_seq; Type: SEQUENCE OWNED BY; Schema: public; Owner: dataverse_app +-- + +ALTER SEQUENCE dataversefeatureddataverse_id_seq OWNED BY dataversefeatureddataverse.id; + + +-- +-- Name: dataversefeatureddataverse_id_seq; Type: SEQUENCE SET; Schema: public; Owner: dataverse_app +-- + +SELECT pg_catalog.setval('dataversefeatureddataverse_id_seq', 1, false); + + +-- +-- Name: dataversefieldtypeinputlevel; Type: TABLE; Schema: public; Owner: dataverse_app; Tablespace: +-- + +CREATE TABLE dataversefieldtypeinputlevel ( + id integer NOT NULL, + include boolean, + required boolean, + datasetfieldtype_id bigint, + dataverse_id bigint +); + + +ALTER TABLE public.dataversefieldtypeinputlevel OWNER TO dataverse_app; + +-- +-- Name: dataversefieldtypeinputlevel_id_seq; Type: SEQUENCE; Schema: public; Owner: dataverse_app +-- + +CREATE SEQUENCE dataversefieldtypeinputlevel_id_seq + START WITH 1 + INCREMENT BY 1 + NO MINVALUE + NO MAXVALUE + CACHE 1; + + +ALTER TABLE public.dataversefieldtypeinputlevel_id_seq OWNER TO dataverse_app; + +-- +-- Name: dataversefieldtypeinputlevel_id_seq; Type: SEQUENCE OWNED BY; Schema: public; Owner: dataverse_app +-- + +ALTER SEQUENCE dataversefieldtypeinputlevel_id_seq OWNED BY dataversefieldtypeinputlevel.id; + + +-- +-- Name: dataversefieldtypeinputlevel_id_seq; Type: SEQUENCE SET; Schema: public; Owner: dataverse_app +-- + +SELECT pg_catalog.setval('dataversefieldtypeinputlevel_id_seq', 1, false); + + +-- +-- Name: dataverselinkingdataverse; Type: TABLE; Schema: public; Owner: dataverse_app; Tablespace: +-- + +CREATE TABLE dataverselinkingdataverse ( + id integer NOT NULL, + linkcreatetime timestamp without time zone, + dataverse_id bigint NOT NULL, + linkingdataverse_id bigint NOT NULL +); + + +ALTER TABLE public.dataverselinkingdataverse OWNER TO dataverse_app; + +-- +-- Name: dataverselinkingdataverse_id_seq; Type: SEQUENCE; Schema: public; Owner: dataverse_app +-- + +CREATE SEQUENCE dataverselinkingdataverse_id_seq + START WITH 1 + INCREMENT BY 1 + NO MINVALUE + NO MAXVALUE + CACHE 1; + + +ALTER TABLE public.dataverselinkingdataverse_id_seq OWNER TO dataverse_app; + +-- +-- Name: dataverselinkingdataverse_id_seq; Type: SEQUENCE OWNED BY; Schema: public; Owner: dataverse_app +-- + +ALTER SEQUENCE dataverselinkingdataverse_id_seq OWNED BY dataverselinkingdataverse.id; + + +-- +-- Name: dataverselinkingdataverse_id_seq; Type: SEQUENCE SET; Schema: public; Owner: dataverse_app +-- + +SELECT pg_catalog.setval('dataverselinkingdataverse_id_seq', 1, false); + + +-- +-- Name: dataverserole; Type: TABLE; Schema: public; Owner: dataverse_app; Tablespace: +-- + +CREATE TABLE dataverserole ( + id integer NOT NULL, + alias character varying(255) NOT NULL, + description character varying(255), + name character varying(255) NOT NULL, + permissionbits bigint, + owner_id bigint +); + + +ALTER TABLE public.dataverserole OWNER TO dataverse_app; + +-- +-- Name: dataverserole_id_seq; Type: SEQUENCE; Schema: public; Owner: dataverse_app +-- + +CREATE SEQUENCE dataverserole_id_seq + START WITH 1 + INCREMENT BY 1 + NO MINVALUE + NO MAXVALUE + CACHE 1; + + +ALTER TABLE public.dataverserole_id_seq OWNER TO dataverse_app; + +-- +-- Name: dataverserole_id_seq; Type: SEQUENCE OWNED BY; Schema: public; Owner: dataverse_app +-- + +ALTER SEQUENCE dataverserole_id_seq OWNED BY dataverserole.id; + + +-- +-- Name: dataverserole_id_seq; Type: SEQUENCE SET; Schema: public; Owner: dataverse_app +-- + +SELECT pg_catalog.setval('dataverserole_id_seq', 8, true); + + +-- +-- Name: dataversesubjects; Type: TABLE; Schema: public; Owner: dataverse_app; Tablespace: +-- + +CREATE TABLE dataversesubjects ( + dataverse_id bigint NOT NULL, + controlledvocabularyvalue_id bigint NOT NULL +); + + +ALTER TABLE public.dataversesubjects OWNER TO dataverse_app; + +-- +-- Name: dataversetheme; Type: TABLE; Schema: public; Owner: dataverse_app; Tablespace: +-- + +CREATE TABLE dataversetheme ( + id integer NOT NULL, + backgroundcolor character varying(255), + linkcolor character varying(255), + linkurl character varying(255), + logo character varying(255), + logoalignment character varying(255), + logobackgroundcolor character varying(255), + logoformat character varying(255), + tagline character varying(255), + textcolor character varying(255), + dataverse_id bigint +); + + +ALTER TABLE public.dataversetheme OWNER TO dataverse_app; + +-- +-- Name: dataversetheme_id_seq; Type: SEQUENCE; Schema: public; Owner: dataverse_app +-- + +CREATE SEQUENCE dataversetheme_id_seq + START WITH 1 + INCREMENT BY 1 + NO MINVALUE + NO MAXVALUE + CACHE 1; + + +ALTER TABLE public.dataversetheme_id_seq OWNER TO dataverse_app; + +-- +-- Name: dataversetheme_id_seq; Type: SEQUENCE OWNED BY; Schema: public; Owner: dataverse_app +-- + +ALTER SEQUENCE dataversetheme_id_seq OWNED BY dataversetheme.id; + + +-- +-- Name: dataversetheme_id_seq; Type: SEQUENCE SET; Schema: public; Owner: dataverse_app +-- + +SELECT pg_catalog.setval('dataversetheme_id_seq', 1, false); + + +-- +-- Name: defaultvalueset; Type: TABLE; Schema: public; Owner: dataverse_app; Tablespace: +-- + +CREATE TABLE defaultvalueset ( + id integer NOT NULL, + name character varying(255) NOT NULL +); + + +ALTER TABLE public.defaultvalueset OWNER TO dataverse_app; + +-- +-- Name: defaultvalueset_id_seq; Type: SEQUENCE; Schema: public; Owner: dataverse_app +-- + +CREATE SEQUENCE defaultvalueset_id_seq + START WITH 1 + INCREMENT BY 1 + NO MINVALUE + NO MAXVALUE + CACHE 1; + + +ALTER TABLE public.defaultvalueset_id_seq OWNER TO dataverse_app; + +-- +-- Name: defaultvalueset_id_seq; Type: SEQUENCE OWNED BY; Schema: public; Owner: dataverse_app +-- + +ALTER SEQUENCE defaultvalueset_id_seq OWNED BY defaultvalueset.id; + + +-- +-- Name: defaultvalueset_id_seq; Type: SEQUENCE SET; Schema: public; Owner: dataverse_app +-- + +SELECT pg_catalog.setval('defaultvalueset_id_seq', 1, false); + + +-- +-- Name: dvobject; Type: TABLE; Schema: public; Owner: dataverse_app; Tablespace: +-- + +CREATE TABLE dvobject ( + id integer NOT NULL, + dtype character varying(31), + createdate timestamp without time zone NOT NULL, + indextime timestamp without time zone, + modificationtime timestamp without time zone NOT NULL, + permissionindextime timestamp without time zone, + permissionmodificationtime timestamp without time zone, + publicationdate timestamp without time zone, + creator_id bigint, + owner_id bigint, + releaseuser_id bigint +); + + +ALTER TABLE public.dvobject OWNER TO dataverse_app; + +-- +-- Name: dvobject_id_seq; Type: SEQUENCE; Schema: public; Owner: dataverse_app +-- + +CREATE SEQUENCE dvobject_id_seq + START WITH 1 + INCREMENT BY 1 + NO MINVALUE + NO MAXVALUE + CACHE 1; + + +ALTER TABLE public.dvobject_id_seq OWNER TO dataverse_app; + +-- +-- Name: dvobject_id_seq; Type: SEQUENCE OWNED BY; Schema: public; Owner: dataverse_app +-- + +ALTER SEQUENCE dvobject_id_seq OWNED BY dvobject.id; + + +-- +-- Name: dvobject_id_seq; Type: SEQUENCE SET; Schema: public; Owner: dataverse_app +-- + +SELECT pg_catalog.setval('dvobject_id_seq', 4, true); + + +-- +-- Name: explicitgroup; Type: TABLE; Schema: public; Owner: dataverse_app; Tablespace: +-- + +CREATE TABLE explicitgroup ( + id integer NOT NULL, + description character varying(1024), + displayname character varying(255), + groupalias character varying(255), + groupaliasinowner character varying(255), + owner_id bigint +); + + +ALTER TABLE public.explicitgroup OWNER TO dataverse_app; + +-- +-- Name: explicitgroup_authenticateduser; Type: TABLE; Schema: public; Owner: dataverse_app; Tablespace: +-- + +CREATE TABLE explicitgroup_authenticateduser ( + explicitgroup_id bigint NOT NULL, + containedauthenticatedusers_id bigint NOT NULL +); + + +ALTER TABLE public.explicitgroup_authenticateduser OWNER TO dataverse_app; + +-- +-- Name: explicitgroup_containedroleassignees; Type: TABLE; Schema: public; Owner: dataverse_app; Tablespace: +-- + +CREATE TABLE explicitgroup_containedroleassignees ( + explicitgroup_id bigint, + containedroleassignees character varying(255) +); + + +ALTER TABLE public.explicitgroup_containedroleassignees OWNER TO dataverse_app; + +-- +-- Name: explicitgroup_explicitgroup; Type: TABLE; Schema: public; Owner: dataverse_app; Tablespace: +-- + +CREATE TABLE explicitgroup_explicitgroup ( + explicitgroup_id bigint NOT NULL, + containedexplicitgroups_id bigint NOT NULL +); + + +ALTER TABLE public.explicitgroup_explicitgroup OWNER TO dataverse_app; + +-- +-- Name: explicitgroup_id_seq; Type: SEQUENCE; Schema: public; Owner: dataverse_app +-- + +CREATE SEQUENCE explicitgroup_id_seq + START WITH 1 + INCREMENT BY 1 + NO MINVALUE + NO MAXVALUE + CACHE 1; + + +ALTER TABLE public.explicitgroup_id_seq OWNER TO dataverse_app; + +-- +-- Name: explicitgroup_id_seq; Type: SEQUENCE OWNED BY; Schema: public; Owner: dataverse_app +-- + +ALTER SEQUENCE explicitgroup_id_seq OWNED BY explicitgroup.id; + + +-- +-- Name: explicitgroup_id_seq; Type: SEQUENCE SET; Schema: public; Owner: dataverse_app +-- + +SELECT pg_catalog.setval('explicitgroup_id_seq', 1, false); + + +-- +-- Name: fileaccessrequests; Type: TABLE; Schema: public; Owner: dataverse_app; Tablespace: +-- + +CREATE TABLE fileaccessrequests ( + datafile_id bigint NOT NULL, + authenticated_user_id bigint NOT NULL +); + + +ALTER TABLE public.fileaccessrequests OWNER TO dataverse_app; + +-- +-- Name: filemetadata; Type: TABLE; Schema: public; Owner: dataverse_app; Tablespace: +-- + +CREATE TABLE filemetadata ( + id integer NOT NULL, + description text, + label character varying(255) NOT NULL, + restricted boolean, + version bigint, + datafile_id bigint NOT NULL, + datasetversion_id bigint NOT NULL +); + + +ALTER TABLE public.filemetadata OWNER TO dataverse_app; + +-- +-- Name: filemetadata_datafilecategory; Type: TABLE; Schema: public; Owner: dataverse_app; Tablespace: +-- + +CREATE TABLE filemetadata_datafilecategory ( + filecategories_id bigint NOT NULL, + filemetadatas_id bigint NOT NULL +); + + +ALTER TABLE public.filemetadata_datafilecategory OWNER TO dataverse_app; + +-- +-- Name: filemetadata_id_seq; Type: SEQUENCE; Schema: public; Owner: dataverse_app +-- + +CREATE SEQUENCE filemetadata_id_seq + START WITH 1 + INCREMENT BY 1 + NO MINVALUE + NO MAXVALUE + CACHE 1; + + +ALTER TABLE public.filemetadata_id_seq OWNER TO dataverse_app; + +-- +-- Name: filemetadata_id_seq; Type: SEQUENCE OWNED BY; Schema: public; Owner: dataverse_app +-- + +ALTER SEQUENCE filemetadata_id_seq OWNED BY filemetadata.id; + + +-- +-- Name: filemetadata_id_seq; Type: SEQUENCE SET; Schema: public; Owner: dataverse_app +-- + +SELECT pg_catalog.setval('filemetadata_id_seq', 1, true); + + +-- +-- Name: foreignmetadatafieldmapping; Type: TABLE; Schema: public; Owner: dataverse_app; Tablespace: +-- + +CREATE TABLE foreignmetadatafieldmapping ( + id integer NOT NULL, + datasetfieldname text, + foreignfieldxpath text, + isattribute boolean, + foreignmetadataformatmapping_id bigint, + parentfieldmapping_id bigint +); + + +ALTER TABLE public.foreignmetadatafieldmapping OWNER TO dataverse_app; + +-- +-- Name: foreignmetadatafieldmapping_id_seq; Type: SEQUENCE; Schema: public; Owner: dataverse_app +-- + +CREATE SEQUENCE foreignmetadatafieldmapping_id_seq + START WITH 1 + INCREMENT BY 1 + NO MINVALUE + NO MAXVALUE + CACHE 1; + + +ALTER TABLE public.foreignmetadatafieldmapping_id_seq OWNER TO dataverse_app; + +-- +-- Name: foreignmetadatafieldmapping_id_seq; Type: SEQUENCE OWNED BY; Schema: public; Owner: dataverse_app +-- + +ALTER SEQUENCE foreignmetadatafieldmapping_id_seq OWNED BY foreignmetadatafieldmapping.id; + + +-- +-- Name: foreignmetadatafieldmapping_id_seq; Type: SEQUENCE SET; Schema: public; Owner: dataverse_app +-- + +SELECT pg_catalog.setval('foreignmetadatafieldmapping_id_seq', 1, false); + + +-- +-- Name: foreignmetadataformatmapping; Type: TABLE; Schema: public; Owner: dataverse_app; Tablespace: +-- + +CREATE TABLE foreignmetadataformatmapping ( + id integer NOT NULL, + displayname character varying(255) NOT NULL, + name character varying(255) NOT NULL, + schemalocation character varying(255), + startelement character varying(255) +); + + +ALTER TABLE public.foreignmetadataformatmapping OWNER TO dataverse_app; + +-- +-- Name: foreignmetadataformatmapping_id_seq; Type: SEQUENCE; Schema: public; Owner: dataverse_app +-- + +CREATE SEQUENCE foreignmetadataformatmapping_id_seq + START WITH 1 + INCREMENT BY 1 + NO MINVALUE + NO MAXVALUE + CACHE 1; + + +ALTER TABLE public.foreignmetadataformatmapping_id_seq OWNER TO dataverse_app; + +-- +-- Name: foreignmetadataformatmapping_id_seq; Type: SEQUENCE OWNED BY; Schema: public; Owner: dataverse_app +-- + +ALTER SEQUENCE foreignmetadataformatmapping_id_seq OWNED BY foreignmetadataformatmapping.id; + + +-- +-- Name: foreignmetadataformatmapping_id_seq; Type: SEQUENCE SET; Schema: public; Owner: dataverse_app +-- + +SELECT pg_catalog.setval('foreignmetadataformatmapping_id_seq', 1, false); + + +-- +-- Name: guestbook; Type: TABLE; Schema: public; Owner: dataverse_app; Tablespace: +-- + +CREATE TABLE guestbook ( + id integer NOT NULL, + createtime timestamp without time zone NOT NULL, + emailrequired boolean, + enabled boolean, + institutionrequired boolean, + name character varying(255), + namerequired boolean, + positionrequired boolean, + dataverse_id bigint +); + + +ALTER TABLE public.guestbook OWNER TO dataverse_app; + +-- +-- Name: guestbook_id_seq; Type: SEQUENCE; Schema: public; Owner: dataverse_app +-- + +CREATE SEQUENCE guestbook_id_seq + START WITH 1 + INCREMENT BY 1 + NO MINVALUE + NO MAXVALUE + CACHE 1; + + +ALTER TABLE public.guestbook_id_seq OWNER TO dataverse_app; + +-- +-- Name: guestbook_id_seq; Type: SEQUENCE OWNED BY; Schema: public; Owner: dataverse_app +-- + +ALTER SEQUENCE guestbook_id_seq OWNED BY guestbook.id; + + +-- +-- Name: guestbook_id_seq; Type: SEQUENCE SET; Schema: public; Owner: dataverse_app +-- + +SELECT pg_catalog.setval('guestbook_id_seq', 1, false); + + +-- +-- Name: guestbookresponse; Type: TABLE; Schema: public; Owner: dataverse_app; Tablespace: +-- + +CREATE TABLE guestbookresponse ( + id integer NOT NULL, + downloadtype character varying(255), + email character varying(255), + institution character varying(255), + name character varying(255), + "position" character varying(255), + responsetime timestamp without time zone, + sessionid character varying(255), + authenticateduser_id bigint, + datafile_id bigint NOT NULL, + dataset_id bigint NOT NULL, + datasetversion_id bigint, + guestbook_id bigint NOT NULL +); + + +ALTER TABLE public.guestbookresponse OWNER TO dataverse_app; + +-- +-- Name: guestbookresponse_id_seq; Type: SEQUENCE; Schema: public; Owner: dataverse_app +-- + +CREATE SEQUENCE guestbookresponse_id_seq + START WITH 1 + INCREMENT BY 1 + NO MINVALUE + NO MAXVALUE + CACHE 1; + + +ALTER TABLE public.guestbookresponse_id_seq OWNER TO dataverse_app; + +-- +-- Name: guestbookresponse_id_seq; Type: SEQUENCE OWNED BY; Schema: public; Owner: dataverse_app +-- + +ALTER SEQUENCE guestbookresponse_id_seq OWNED BY guestbookresponse.id; + + +-- +-- Name: guestbookresponse_id_seq; Type: SEQUENCE SET; Schema: public; Owner: dataverse_app +-- + +SELECT pg_catalog.setval('guestbookresponse_id_seq', 1, false); + + +-- +-- Name: harvestingdataverseconfig; Type: TABLE; Schema: public; Owner: dataverse_app; Tablespace: +-- + +CREATE TABLE harvestingdataverseconfig ( + id bigint NOT NULL, + archivedescription text, + archiveurl character varying(255), + harveststyle character varying(255), + harvesttype character varying(255), + harvestingset character varying(255), + harvestingurl character varying(255), + dataverse_id bigint +); + + +ALTER TABLE public.harvestingdataverseconfig OWNER TO dataverse_app; + +-- +-- Name: ingestreport; Type: TABLE; Schema: public; Owner: dataverse_app; Tablespace: +-- + +CREATE TABLE ingestreport ( + id integer NOT NULL, + endtime timestamp without time zone, + report character varying(255), + starttime timestamp without time zone, + status integer, + type integer, + datafile_id bigint NOT NULL +); + + +ALTER TABLE public.ingestreport OWNER TO dataverse_app; + +-- +-- Name: ingestreport_id_seq; Type: SEQUENCE; Schema: public; Owner: dataverse_app +-- + +CREATE SEQUENCE ingestreport_id_seq + START WITH 1 + INCREMENT BY 1 + NO MINVALUE + NO MAXVALUE + CACHE 1; + + +ALTER TABLE public.ingestreport_id_seq OWNER TO dataverse_app; + +-- +-- Name: ingestreport_id_seq; Type: SEQUENCE OWNED BY; Schema: public; Owner: dataverse_app +-- + +ALTER SEQUENCE ingestreport_id_seq OWNED BY ingestreport.id; + + +-- +-- Name: ingestreport_id_seq; Type: SEQUENCE SET; Schema: public; Owner: dataverse_app +-- + +SELECT pg_catalog.setval('ingestreport_id_seq', 1, false); + + +-- +-- Name: ingestrequest; Type: TABLE; Schema: public; Owner: dataverse_app; Tablespace: +-- + +CREATE TABLE ingestrequest ( + id integer NOT NULL, + controlcard character varying(255), + labelsfile character varying(255), + textencoding character varying(255), + datafile_id bigint +); + + +ALTER TABLE public.ingestrequest OWNER TO dataverse_app; + +-- +-- Name: ingestrequest_id_seq; Type: SEQUENCE; Schema: public; Owner: dataverse_app +-- + +CREATE SEQUENCE ingestrequest_id_seq + START WITH 1 + INCREMENT BY 1 + NO MINVALUE + NO MAXVALUE + CACHE 1; + + +ALTER TABLE public.ingestrequest_id_seq OWNER TO dataverse_app; + +-- +-- Name: ingestrequest_id_seq; Type: SEQUENCE OWNED BY; Schema: public; Owner: dataverse_app +-- + +ALTER SEQUENCE ingestrequest_id_seq OWNED BY ingestrequest.id; + + +-- +-- Name: ingestrequest_id_seq; Type: SEQUENCE SET; Schema: public; Owner: dataverse_app +-- + +SELECT pg_catalog.setval('ingestrequest_id_seq', 1, false); + + +-- +-- Name: ipv4range; Type: TABLE; Schema: public; Owner: dataverse_app; Tablespace: +-- + +CREATE TABLE ipv4range ( + id bigint NOT NULL, + bottomaslong bigint, + topaslong bigint, + owner_id bigint +); + + +ALTER TABLE public.ipv4range OWNER TO dataverse_app; + +-- +-- Name: ipv6range; Type: TABLE; Schema: public; Owner: dataverse_app; Tablespace: +-- + +CREATE TABLE ipv6range ( + id bigint NOT NULL, + bottoma bigint, + bottomb bigint, + bottomc bigint, + bottomd bigint, + topa bigint, + topb bigint, + topc bigint, + topd bigint, + owner_id bigint +); + + +ALTER TABLE public.ipv6range OWNER TO dataverse_app; + +-- +-- Name: maplayermetadata; Type: TABLE; Schema: public; Owner: dataverse_app; Tablespace: +-- + +CREATE TABLE maplayermetadata ( + id integer NOT NULL, + embedmaplink character varying(255) NOT NULL, + layerlink character varying(255) NOT NULL, + layername character varying(255) NOT NULL, + mapimagelink character varying(255), + worldmapusername character varying(255) NOT NULL, + dataset_id bigint NOT NULL, + datafile_id bigint NOT NULL +); + + +ALTER TABLE public.maplayermetadata OWNER TO dataverse_app; + +-- +-- Name: maplayermetadata_id_seq; Type: SEQUENCE; Schema: public; Owner: dataverse_app +-- + +CREATE SEQUENCE maplayermetadata_id_seq + START WITH 1 + INCREMENT BY 1 + NO MINVALUE + NO MAXVALUE + CACHE 1; + + +ALTER TABLE public.maplayermetadata_id_seq OWNER TO dataverse_app; + +-- +-- Name: maplayermetadata_id_seq; Type: SEQUENCE OWNED BY; Schema: public; Owner: dataverse_app +-- + +ALTER SEQUENCE maplayermetadata_id_seq OWNED BY maplayermetadata.id; + + +-- +-- Name: maplayermetadata_id_seq; Type: SEQUENCE SET; Schema: public; Owner: dataverse_app +-- + +SELECT pg_catalog.setval('maplayermetadata_id_seq', 1, false); + + +-- +-- Name: metadatablock; Type: TABLE; Schema: public; Owner: dataverse_app; Tablespace: +-- + +CREATE TABLE metadatablock ( + id integer NOT NULL, + displayname character varying(255) NOT NULL, + name character varying(255) NOT NULL, + owner_id bigint +); + + +ALTER TABLE public.metadatablock OWNER TO dataverse_app; + +-- +-- Name: metadatablock_id_seq; Type: SEQUENCE; Schema: public; Owner: dataverse_app +-- + +CREATE SEQUENCE metadatablock_id_seq + START WITH 1 + INCREMENT BY 1 + NO MINVALUE + NO MAXVALUE + CACHE 1; + + +ALTER TABLE public.metadatablock_id_seq OWNER TO dataverse_app; + +-- +-- Name: metadatablock_id_seq; Type: SEQUENCE OWNED BY; Schema: public; Owner: dataverse_app +-- + +ALTER SEQUENCE metadatablock_id_seq OWNED BY metadatablock.id; + + +-- +-- Name: metadatablock_id_seq; Type: SEQUENCE SET; Schema: public; Owner: dataverse_app +-- + +SELECT pg_catalog.setval('metadatablock_id_seq', 6, true); + + +-- +-- Name: passwordresetdata; Type: TABLE; Schema: public; Owner: dataverse_app; Tablespace: +-- + +CREATE TABLE passwordresetdata ( + id integer NOT NULL, + created timestamp without time zone NOT NULL, + expires timestamp without time zone NOT NULL, + reason character varying(255), + token character varying(255), + builtinuser_id bigint NOT NULL +); + + +ALTER TABLE public.passwordresetdata OWNER TO dataverse_app; + +-- +-- Name: passwordresetdata_id_seq; Type: SEQUENCE; Schema: public; Owner: dataverse_app +-- + +CREATE SEQUENCE passwordresetdata_id_seq + START WITH 1 + INCREMENT BY 1 + NO MINVALUE + NO MAXVALUE + CACHE 1; + + +ALTER TABLE public.passwordresetdata_id_seq OWNER TO dataverse_app; + +-- +-- Name: passwordresetdata_id_seq; Type: SEQUENCE OWNED BY; Schema: public; Owner: dataverse_app +-- + +ALTER SEQUENCE passwordresetdata_id_seq OWNED BY passwordresetdata.id; + + +-- +-- Name: passwordresetdata_id_seq; Type: SEQUENCE SET; Schema: public; Owner: dataverse_app +-- + +SELECT pg_catalog.setval('passwordresetdata_id_seq', 1, false); + + +-- +-- Name: persistedglobalgroup; Type: TABLE; Schema: public; Owner: dataverse_app; Tablespace: +-- + +CREATE TABLE persistedglobalgroup ( + id bigint NOT NULL, + dtype character varying(31), + description character varying(255), + displayname character varying(255), + persistedgroupalias character varying(255) +); + + +ALTER TABLE public.persistedglobalgroup OWNER TO dataverse_app; + +-- +-- Name: roleassignment; Type: TABLE; Schema: public; Owner: dataverse_app; Tablespace: +-- + +CREATE TABLE roleassignment ( + id integer NOT NULL, + assigneeidentifier character varying(255) NOT NULL, + definitionpoint_id bigint NOT NULL, + role_id bigint NOT NULL +); + + +ALTER TABLE public.roleassignment OWNER TO dataverse_app; + +-- +-- Name: roleassignment_id_seq; Type: SEQUENCE; Schema: public; Owner: dataverse_app +-- + +CREATE SEQUENCE roleassignment_id_seq + START WITH 1 + INCREMENT BY 1 + NO MINVALUE + NO MAXVALUE + CACHE 1; + + +ALTER TABLE public.roleassignment_id_seq OWNER TO dataverse_app; + +-- +-- Name: roleassignment_id_seq; Type: SEQUENCE OWNED BY; Schema: public; Owner: dataverse_app +-- + +ALTER SEQUENCE roleassignment_id_seq OWNED BY roleassignment.id; + + +-- +-- Name: roleassignment_id_seq; Type: SEQUENCE SET; Schema: public; Owner: dataverse_app +-- + +SELECT pg_catalog.setval('roleassignment_id_seq', 3, true); + + +-- +-- Name: savedsearch; Type: TABLE; Schema: public; Owner: dataverse_app; Tablespace: +-- + +CREATE TABLE savedsearch ( + id integer NOT NULL, + query text, + creator_id bigint NOT NULL, + definitionpoint_id bigint NOT NULL +); + + +ALTER TABLE public.savedsearch OWNER TO dataverse_app; + +-- +-- Name: savedsearch_id_seq; Type: SEQUENCE; Schema: public; Owner: dataverse_app +-- + +CREATE SEQUENCE savedsearch_id_seq + START WITH 1 + INCREMENT BY 1 + NO MINVALUE + NO MAXVALUE + CACHE 1; + + +ALTER TABLE public.savedsearch_id_seq OWNER TO dataverse_app; + +-- +-- Name: savedsearch_id_seq; Type: SEQUENCE OWNED BY; Schema: public; Owner: dataverse_app +-- + +ALTER SEQUENCE savedsearch_id_seq OWNED BY savedsearch.id; + + +-- +-- Name: savedsearch_id_seq; Type: SEQUENCE SET; Schema: public; Owner: dataverse_app +-- + +SELECT pg_catalog.setval('savedsearch_id_seq', 1, false); + + +-- +-- Name: savedsearchfilterquery; Type: TABLE; Schema: public; Owner: dataverse_app; Tablespace: +-- + +CREATE TABLE savedsearchfilterquery ( + id integer NOT NULL, + filterquery text, + savedsearch_id bigint NOT NULL +); + + +ALTER TABLE public.savedsearchfilterquery OWNER TO dataverse_app; + +-- +-- Name: savedsearchfilterquery_id_seq; Type: SEQUENCE; Schema: public; Owner: dataverse_app +-- + +CREATE SEQUENCE savedsearchfilterquery_id_seq + START WITH 1 + INCREMENT BY 1 + NO MINVALUE + NO MAXVALUE + CACHE 1; + + +ALTER TABLE public.savedsearchfilterquery_id_seq OWNER TO dataverse_app; + +-- +-- Name: savedsearchfilterquery_id_seq; Type: SEQUENCE OWNED BY; Schema: public; Owner: dataverse_app +-- + +ALTER SEQUENCE savedsearchfilterquery_id_seq OWNED BY savedsearchfilterquery.id; + + +-- +-- Name: savedsearchfilterquery_id_seq; Type: SEQUENCE SET; Schema: public; Owner: dataverse_app +-- + +SELECT pg_catalog.setval('savedsearchfilterquery_id_seq', 1, false); + + +-- +-- Name: sequence; Type: TABLE; Schema: public; Owner: dataverse_app; Tablespace: +-- + +CREATE TABLE sequence ( + seq_name character varying(50) NOT NULL, + seq_count numeric(38,0) +); + + +ALTER TABLE public.sequence OWNER TO dataverse_app; + +-- +-- Name: setting; Type: TABLE; Schema: public; Owner: dataverse_app; Tablespace: +-- + +CREATE TABLE setting ( + name character varying(255) NOT NULL, + content text +); + + +ALTER TABLE public.setting OWNER TO dataverse_app; + +-- +-- Name: shibgroup; Type: TABLE; Schema: public; Owner: dataverse_app; Tablespace: +-- + +CREATE TABLE shibgroup ( + id integer NOT NULL, + attribute character varying(255) NOT NULL, + name character varying(255) NOT NULL, + pattern character varying(255) NOT NULL +); + + +ALTER TABLE public.shibgroup OWNER TO dataverse_app; + +-- +-- Name: shibgroup_id_seq; Type: SEQUENCE; Schema: public; Owner: dataverse_app +-- + +CREATE SEQUENCE shibgroup_id_seq + START WITH 1 + INCREMENT BY 1 + NO MINVALUE + NO MAXVALUE + CACHE 1; + + +ALTER TABLE public.shibgroup_id_seq OWNER TO dataverse_app; + +-- +-- Name: shibgroup_id_seq; Type: SEQUENCE OWNED BY; Schema: public; Owner: dataverse_app +-- + +ALTER SEQUENCE shibgroup_id_seq OWNED BY shibgroup.id; + + +-- +-- Name: shibgroup_id_seq; Type: SEQUENCE SET; Schema: public; Owner: dataverse_app +-- + +SELECT pg_catalog.setval('shibgroup_id_seq', 1, false); + + +-- +-- Name: summarystatistic; Type: TABLE; Schema: public; Owner: dataverse_app; Tablespace: +-- + +CREATE TABLE summarystatistic ( + id integer NOT NULL, + type integer, + value character varying(255), + datavariable_id bigint NOT NULL +); + + +ALTER TABLE public.summarystatistic OWNER TO dataverse_app; + +-- +-- Name: summarystatistic_id_seq; Type: SEQUENCE; Schema: public; Owner: dataverse_app +-- + +CREATE SEQUENCE summarystatistic_id_seq + START WITH 1 + INCREMENT BY 1 + NO MINVALUE + NO MAXVALUE + CACHE 1; + + +ALTER TABLE public.summarystatistic_id_seq OWNER TO dataverse_app; + +-- +-- Name: summarystatistic_id_seq; Type: SEQUENCE OWNED BY; Schema: public; Owner: dataverse_app +-- + +ALTER SEQUENCE summarystatistic_id_seq OWNED BY summarystatistic.id; + + +-- +-- Name: summarystatistic_id_seq; Type: SEQUENCE SET; Schema: public; Owner: dataverse_app +-- + +SELECT pg_catalog.setval('summarystatistic_id_seq', 1, false); + + +-- +-- Name: template; Type: TABLE; Schema: public; Owner: dataverse_app; Tablespace: +-- + +CREATE TABLE template ( + id integer NOT NULL, + createtime timestamp without time zone NOT NULL, + name character varying(255) NOT NULL, + usagecount bigint, + dataverse_id bigint +); + + +ALTER TABLE public.template OWNER TO dataverse_app; + +-- +-- Name: template_id_seq; Type: SEQUENCE; Schema: public; Owner: dataverse_app +-- + +CREATE SEQUENCE template_id_seq + START WITH 1 + INCREMENT BY 1 + NO MINVALUE + NO MAXVALUE + CACHE 1; + + +ALTER TABLE public.template_id_seq OWNER TO dataverse_app; + +-- +-- Name: template_id_seq; Type: SEQUENCE OWNED BY; Schema: public; Owner: dataverse_app +-- + +ALTER SEQUENCE template_id_seq OWNED BY template.id; + + +-- +-- Name: template_id_seq; Type: SEQUENCE SET; Schema: public; Owner: dataverse_app +-- + +SELECT pg_catalog.setval('template_id_seq', 1, false); + + +-- +-- Name: usernotification; Type: TABLE; Schema: public; Owner: dataverse_app; Tablespace: +-- + +CREATE TABLE usernotification ( + id integer NOT NULL, + emailed boolean, + objectid bigint, + readnotification boolean, + senddate timestamp without time zone, + type integer NOT NULL, + user_id bigint NOT NULL +); + + +ALTER TABLE public.usernotification OWNER TO dataverse_app; + +-- +-- Name: usernotification_id_seq; Type: SEQUENCE; Schema: public; Owner: dataverse_app +-- + +CREATE SEQUENCE usernotification_id_seq + START WITH 1 + INCREMENT BY 1 + NO MINVALUE + NO MAXVALUE + CACHE 1; + + +ALTER TABLE public.usernotification_id_seq OWNER TO dataverse_app; + +-- +-- Name: usernotification_id_seq; Type: SEQUENCE OWNED BY; Schema: public; Owner: dataverse_app +-- + +ALTER SEQUENCE usernotification_id_seq OWNED BY usernotification.id; + + +-- +-- Name: usernotification_id_seq; Type: SEQUENCE SET; Schema: public; Owner: dataverse_app +-- + +SELECT pg_catalog.setval('usernotification_id_seq', 2, true); + + +-- +-- Name: variablecategory; Type: TABLE; Schema: public; Owner: dataverse_app; Tablespace: +-- + +CREATE TABLE variablecategory ( + id integer NOT NULL, + catorder integer, + frequency double precision, + label character varying(255), + missing boolean, + value character varying(255), + datavariable_id bigint NOT NULL +); + + +ALTER TABLE public.variablecategory OWNER TO dataverse_app; + +-- +-- Name: variablecategory_id_seq; Type: SEQUENCE; Schema: public; Owner: dataverse_app +-- + +CREATE SEQUENCE variablecategory_id_seq + START WITH 1 + INCREMENT BY 1 + NO MINVALUE + NO MAXVALUE + CACHE 1; + + +ALTER TABLE public.variablecategory_id_seq OWNER TO dataverse_app; + +-- +-- Name: variablecategory_id_seq; Type: SEQUENCE OWNED BY; Schema: public; Owner: dataverse_app +-- + +ALTER SEQUENCE variablecategory_id_seq OWNED BY variablecategory.id; + + +-- +-- Name: variablecategory_id_seq; Type: SEQUENCE SET; Schema: public; Owner: dataverse_app +-- + +SELECT pg_catalog.setval('variablecategory_id_seq', 1, false); + + +-- +-- Name: variablerange; Type: TABLE; Schema: public; Owner: dataverse_app; Tablespace: +-- + +CREATE TABLE variablerange ( + id integer NOT NULL, + beginvalue character varying(255), + beginvaluetype integer, + endvalue character varying(255), + endvaluetype integer, + datavariable_id bigint NOT NULL +); + + +ALTER TABLE public.variablerange OWNER TO dataverse_app; + +-- +-- Name: variablerange_id_seq; Type: SEQUENCE; Schema: public; Owner: dataverse_app +-- + +CREATE SEQUENCE variablerange_id_seq + START WITH 1 + INCREMENT BY 1 + NO MINVALUE + NO MAXVALUE + CACHE 1; + + +ALTER TABLE public.variablerange_id_seq OWNER TO dataverse_app; + +-- +-- Name: variablerange_id_seq; Type: SEQUENCE OWNED BY; Schema: public; Owner: dataverse_app +-- + +ALTER SEQUENCE variablerange_id_seq OWNED BY variablerange.id; + + +-- +-- Name: variablerange_id_seq; Type: SEQUENCE SET; Schema: public; Owner: dataverse_app +-- + +SELECT pg_catalog.setval('variablerange_id_seq', 1, false); + + +-- +-- Name: variablerangeitem; Type: TABLE; Schema: public; Owner: dataverse_app; Tablespace: +-- + +CREATE TABLE variablerangeitem ( + id integer NOT NULL, + value numeric(38,0), + datavariable_id bigint NOT NULL +); + + +ALTER TABLE public.variablerangeitem OWNER TO dataverse_app; + +-- +-- Name: variablerangeitem_id_seq; Type: SEQUENCE; Schema: public; Owner: dataverse_app +-- + +CREATE SEQUENCE variablerangeitem_id_seq + START WITH 1 + INCREMENT BY 1 + NO MINVALUE + NO MAXVALUE + CACHE 1; + + +ALTER TABLE public.variablerangeitem_id_seq OWNER TO dataverse_app; + +-- +-- Name: variablerangeitem_id_seq; Type: SEQUENCE OWNED BY; Schema: public; Owner: dataverse_app +-- + +ALTER SEQUENCE variablerangeitem_id_seq OWNED BY variablerangeitem.id; + + +-- +-- Name: variablerangeitem_id_seq; Type: SEQUENCE SET; Schema: public; Owner: dataverse_app +-- + +SELECT pg_catalog.setval('variablerangeitem_id_seq', 1, false); + + +-- +-- Name: worldmapauth_token; Type: TABLE; Schema: public; Owner: dataverse_app; Tablespace: +-- + +CREATE TABLE worldmapauth_token ( + id integer NOT NULL, + created timestamp without time zone NOT NULL, + hasexpired boolean NOT NULL, + lastrefreshtime timestamp without time zone NOT NULL, + modified timestamp without time zone NOT NULL, + token character varying(255), + application_id bigint NOT NULL, + datafile_id bigint NOT NULL, + dataverseuser_id bigint NOT NULL +); + + +ALTER TABLE public.worldmapauth_token OWNER TO dataverse_app; + +-- +-- Name: worldmapauth_token_id_seq; Type: SEQUENCE; Schema: public; Owner: dataverse_app +-- + +CREATE SEQUENCE worldmapauth_token_id_seq + START WITH 1 + INCREMENT BY 1 + NO MINVALUE + NO MAXVALUE + CACHE 1; + + +ALTER TABLE public.worldmapauth_token_id_seq OWNER TO dataverse_app; + +-- +-- Name: worldmapauth_token_id_seq; Type: SEQUENCE OWNED BY; Schema: public; Owner: dataverse_app +-- + +ALTER SEQUENCE worldmapauth_token_id_seq OWNED BY worldmapauth_token.id; + + +-- +-- Name: worldmapauth_token_id_seq; Type: SEQUENCE SET; Schema: public; Owner: dataverse_app +-- + +SELECT pg_catalog.setval('worldmapauth_token_id_seq', 1, false); + + +-- +-- Name: worldmapauth_tokentype; Type: TABLE; Schema: public; Owner: dataverse_app; Tablespace: +-- + +CREATE TABLE worldmapauth_tokentype ( + id integer NOT NULL, + contactemail character varying(255), + created timestamp without time zone NOT NULL, + hostname character varying(255), + ipaddress character varying(255), + mapitlink character varying(255) NOT NULL, + md5 character varying(255) NOT NULL, + modified timestamp without time zone NOT NULL, + name character varying(255) NOT NULL, + timelimitminutes integer DEFAULT 30, + timelimitseconds bigint DEFAULT 1800 +); + + +ALTER TABLE public.worldmapauth_tokentype OWNER TO dataverse_app; + +-- +-- Name: worldmapauth_tokentype_id_seq; Type: SEQUENCE; Schema: public; Owner: dataverse_app +-- + +CREATE SEQUENCE worldmapauth_tokentype_id_seq + START WITH 1 + INCREMENT BY 1 + NO MINVALUE + NO MAXVALUE + CACHE 1; + + +ALTER TABLE public.worldmapauth_tokentype_id_seq OWNER TO dataverse_app; + +-- +-- Name: worldmapauth_tokentype_id_seq; Type: SEQUENCE OWNED BY; Schema: public; Owner: dataverse_app +-- + +ALTER SEQUENCE worldmapauth_tokentype_id_seq OWNED BY worldmapauth_tokentype.id; + + +-- +-- Name: worldmapauth_tokentype_id_seq; Type: SEQUENCE SET; Schema: public; Owner: dataverse_app +-- + +SELECT pg_catalog.setval('worldmapauth_tokentype_id_seq', 1, false); + + +-- +-- Name: id; Type: DEFAULT; Schema: public; Owner: dataverse_app +-- + +ALTER TABLE ONLY apitoken ALTER COLUMN id SET DEFAULT nextval('apitoken_id_seq'::regclass); + + +-- +-- Name: id; Type: DEFAULT; Schema: public; Owner: dataverse_app +-- + +ALTER TABLE ONLY authenticateduser ALTER COLUMN id SET DEFAULT nextval('authenticateduser_id_seq'::regclass); + + +-- +-- Name: id; Type: DEFAULT; Schema: public; Owner: dataverse_app +-- + +ALTER TABLE ONLY authenticateduserlookup ALTER COLUMN id SET DEFAULT nextval('authenticateduserlookup_id_seq'::regclass); + + +-- +-- Name: id; Type: DEFAULT; Schema: public; Owner: dataverse_app +-- + +ALTER TABLE ONLY builtinuser ALTER COLUMN id SET DEFAULT nextval('builtinuser_id_seq'::regclass); + + +-- +-- Name: id; Type: DEFAULT; Schema: public; Owner: dataverse_app +-- + +ALTER TABLE ONLY controlledvocabalternate ALTER COLUMN id SET DEFAULT nextval('controlledvocabalternate_id_seq'::regclass); + + +-- +-- Name: id; Type: DEFAULT; Schema: public; Owner: dataverse_app +-- + +ALTER TABLE ONLY controlledvocabularyvalue ALTER COLUMN id SET DEFAULT nextval('controlledvocabularyvalue_id_seq'::regclass); + + +-- +-- Name: id; Type: DEFAULT; Schema: public; Owner: dataverse_app +-- + +ALTER TABLE ONLY customfieldmap ALTER COLUMN id SET DEFAULT nextval('customfieldmap_id_seq'::regclass); + + +-- +-- Name: id; Type: DEFAULT; Schema: public; Owner: dataverse_app +-- + +ALTER TABLE ONLY customquestion ALTER COLUMN id SET DEFAULT nextval('customquestion_id_seq'::regclass); + + +-- +-- Name: id; Type: DEFAULT; Schema: public; Owner: dataverse_app +-- + +ALTER TABLE ONLY customquestionresponse ALTER COLUMN id SET DEFAULT nextval('customquestionresponse_id_seq'::regclass); + + +-- +-- Name: id; Type: DEFAULT; Schema: public; Owner: dataverse_app +-- + +ALTER TABLE ONLY customquestionvalue ALTER COLUMN id SET DEFAULT nextval('customquestionvalue_id_seq'::regclass); + + +-- +-- Name: id; Type: DEFAULT; Schema: public; Owner: dataverse_app +-- + +ALTER TABLE ONLY datafilecategory ALTER COLUMN id SET DEFAULT nextval('datafilecategory_id_seq'::regclass); + + +-- +-- Name: id; Type: DEFAULT; Schema: public; Owner: dataverse_app +-- + +ALTER TABLE ONLY datafiletag ALTER COLUMN id SET DEFAULT nextval('datafiletag_id_seq'::regclass); + + +-- +-- Name: id; Type: DEFAULT; Schema: public; Owner: dataverse_app +-- + +ALTER TABLE ONLY datasetfield ALTER COLUMN id SET DEFAULT nextval('datasetfield_id_seq'::regclass); + + +-- +-- Name: id; Type: DEFAULT; Schema: public; Owner: dataverse_app +-- + +ALTER TABLE ONLY datasetfieldcompoundvalue ALTER COLUMN id SET DEFAULT nextval('datasetfieldcompoundvalue_id_seq'::regclass); + + +-- +-- Name: id; Type: DEFAULT; Schema: public; Owner: dataverse_app +-- + +ALTER TABLE ONLY datasetfielddefaultvalue ALTER COLUMN id SET DEFAULT nextval('datasetfielddefaultvalue_id_seq'::regclass); + + +-- +-- Name: id; Type: DEFAULT; Schema: public; Owner: dataverse_app +-- + +ALTER TABLE ONLY datasetfieldtype ALTER COLUMN id SET DEFAULT nextval('datasetfieldtype_id_seq'::regclass); + + +-- +-- Name: id; Type: DEFAULT; Schema: public; Owner: dataverse_app +-- + +ALTER TABLE ONLY datasetfieldvalue ALTER COLUMN id SET DEFAULT nextval('datasetfieldvalue_id_seq'::regclass); + + +-- +-- Name: id; Type: DEFAULT; Schema: public; Owner: dataverse_app +-- + +ALTER TABLE ONLY datasetlinkingdataverse ALTER COLUMN id SET DEFAULT nextval('datasetlinkingdataverse_id_seq'::regclass); + + +-- +-- Name: id; Type: DEFAULT; Schema: public; Owner: dataverse_app +-- + +ALTER TABLE ONLY datasetlock ALTER COLUMN id SET DEFAULT nextval('datasetlock_id_seq'::regclass); + + +-- +-- Name: id; Type: DEFAULT; Schema: public; Owner: dataverse_app +-- + +ALTER TABLE ONLY datasetversion ALTER COLUMN id SET DEFAULT nextval('datasetversion_id_seq'::regclass); + + +-- +-- Name: id; Type: DEFAULT; Schema: public; Owner: dataverse_app +-- + +ALTER TABLE ONLY datasetversionuser ALTER COLUMN id SET DEFAULT nextval('datasetversionuser_id_seq'::regclass); + + +-- +-- Name: id; Type: DEFAULT; Schema: public; Owner: dataverse_app +-- + +ALTER TABLE ONLY datatable ALTER COLUMN id SET DEFAULT nextval('datatable_id_seq'::regclass); + + +-- +-- Name: id; Type: DEFAULT; Schema: public; Owner: dataverse_app +-- + +ALTER TABLE ONLY datavariable ALTER COLUMN id SET DEFAULT nextval('datavariable_id_seq'::regclass); + + +-- +-- Name: id; Type: DEFAULT; Schema: public; Owner: dataverse_app +-- + +ALTER TABLE ONLY dataversecontact ALTER COLUMN id SET DEFAULT nextval('dataversecontact_id_seq'::regclass); + + +-- +-- Name: id; Type: DEFAULT; Schema: public; Owner: dataverse_app +-- + +ALTER TABLE ONLY dataversefacet ALTER COLUMN id SET DEFAULT nextval('dataversefacet_id_seq'::regclass); + + +-- +-- Name: id; Type: DEFAULT; Schema: public; Owner: dataverse_app +-- + +ALTER TABLE ONLY dataversefeatureddataverse ALTER COLUMN id SET DEFAULT nextval('dataversefeatureddataverse_id_seq'::regclass); + + +-- +-- Name: id; Type: DEFAULT; Schema: public; Owner: dataverse_app +-- + +ALTER TABLE ONLY dataversefieldtypeinputlevel ALTER COLUMN id SET DEFAULT nextval('dataversefieldtypeinputlevel_id_seq'::regclass); + + +-- +-- Name: id; Type: DEFAULT; Schema: public; Owner: dataverse_app +-- + +ALTER TABLE ONLY dataverselinkingdataverse ALTER COLUMN id SET DEFAULT nextval('dataverselinkingdataverse_id_seq'::regclass); + + +-- +-- Name: id; Type: DEFAULT; Schema: public; Owner: dataverse_app +-- + +ALTER TABLE ONLY dataverserole ALTER COLUMN id SET DEFAULT nextval('dataverserole_id_seq'::regclass); + + +-- +-- Name: id; Type: DEFAULT; Schema: public; Owner: dataverse_app +-- + +ALTER TABLE ONLY dataversetheme ALTER COLUMN id SET DEFAULT nextval('dataversetheme_id_seq'::regclass); + + +-- +-- Name: id; Type: DEFAULT; Schema: public; Owner: dataverse_app +-- + +ALTER TABLE ONLY defaultvalueset ALTER COLUMN id SET DEFAULT nextval('defaultvalueset_id_seq'::regclass); + + +-- +-- Name: id; Type: DEFAULT; Schema: public; Owner: dataverse_app +-- + +ALTER TABLE ONLY dvobject ALTER COLUMN id SET DEFAULT nextval('dvobject_id_seq'::regclass); + + +-- +-- Name: id; Type: DEFAULT; Schema: public; Owner: dataverse_app +-- + +ALTER TABLE ONLY explicitgroup ALTER COLUMN id SET DEFAULT nextval('explicitgroup_id_seq'::regclass); + + +-- +-- Name: id; Type: DEFAULT; Schema: public; Owner: dataverse_app +-- + +ALTER TABLE ONLY filemetadata ALTER COLUMN id SET DEFAULT nextval('filemetadata_id_seq'::regclass); + + +-- +-- Name: id; Type: DEFAULT; Schema: public; Owner: dataverse_app +-- + +ALTER TABLE ONLY foreignmetadatafieldmapping ALTER COLUMN id SET DEFAULT nextval('foreignmetadatafieldmapping_id_seq'::regclass); + + +-- +-- Name: id; Type: DEFAULT; Schema: public; Owner: dataverse_app +-- + +ALTER TABLE ONLY foreignmetadataformatmapping ALTER COLUMN id SET DEFAULT nextval('foreignmetadataformatmapping_id_seq'::regclass); + + +-- +-- Name: id; Type: DEFAULT; Schema: public; Owner: dataverse_app +-- + +ALTER TABLE ONLY guestbook ALTER COLUMN id SET DEFAULT nextval('guestbook_id_seq'::regclass); + + +-- +-- Name: id; Type: DEFAULT; Schema: public; Owner: dataverse_app +-- + +ALTER TABLE ONLY guestbookresponse ALTER COLUMN id SET DEFAULT nextval('guestbookresponse_id_seq'::regclass); + + +-- +-- Name: id; Type: DEFAULT; Schema: public; Owner: dataverse_app +-- + +ALTER TABLE ONLY ingestreport ALTER COLUMN id SET DEFAULT nextval('ingestreport_id_seq'::regclass); + + +-- +-- Name: id; Type: DEFAULT; Schema: public; Owner: dataverse_app +-- + +ALTER TABLE ONLY ingestrequest ALTER COLUMN id SET DEFAULT nextval('ingestrequest_id_seq'::regclass); + + +-- +-- Name: id; Type: DEFAULT; Schema: public; Owner: dataverse_app +-- + +ALTER TABLE ONLY maplayermetadata ALTER COLUMN id SET DEFAULT nextval('maplayermetadata_id_seq'::regclass); + + +-- +-- Name: id; Type: DEFAULT; Schema: public; Owner: dataverse_app +-- + +ALTER TABLE ONLY metadatablock ALTER COLUMN id SET DEFAULT nextval('metadatablock_id_seq'::regclass); + + +-- +-- Name: id; Type: DEFAULT; Schema: public; Owner: dataverse_app +-- + +ALTER TABLE ONLY passwordresetdata ALTER COLUMN id SET DEFAULT nextval('passwordresetdata_id_seq'::regclass); + + +-- +-- Name: id; Type: DEFAULT; Schema: public; Owner: dataverse_app +-- + +ALTER TABLE ONLY roleassignment ALTER COLUMN id SET DEFAULT nextval('roleassignment_id_seq'::regclass); + + +-- +-- Name: id; Type: DEFAULT; Schema: public; Owner: dataverse_app +-- + +ALTER TABLE ONLY savedsearch ALTER COLUMN id SET DEFAULT nextval('savedsearch_id_seq'::regclass); + + +-- +-- Name: id; Type: DEFAULT; Schema: public; Owner: dataverse_app +-- + +ALTER TABLE ONLY savedsearchfilterquery ALTER COLUMN id SET DEFAULT nextval('savedsearchfilterquery_id_seq'::regclass); + + +-- +-- Name: id; Type: DEFAULT; Schema: public; Owner: dataverse_app +-- + +ALTER TABLE ONLY shibgroup ALTER COLUMN id SET DEFAULT nextval('shibgroup_id_seq'::regclass); + + +-- +-- Name: id; Type: DEFAULT; Schema: public; Owner: dataverse_app +-- + +ALTER TABLE ONLY summarystatistic ALTER COLUMN id SET DEFAULT nextval('summarystatistic_id_seq'::regclass); + + +-- +-- Name: id; Type: DEFAULT; Schema: public; Owner: dataverse_app +-- + +ALTER TABLE ONLY template ALTER COLUMN id SET DEFAULT nextval('template_id_seq'::regclass); + + +-- +-- Name: id; Type: DEFAULT; Schema: public; Owner: dataverse_app +-- + +ALTER TABLE ONLY usernotification ALTER COLUMN id SET DEFAULT nextval('usernotification_id_seq'::regclass); + + +-- +-- Name: id; Type: DEFAULT; Schema: public; Owner: dataverse_app +-- + +ALTER TABLE ONLY variablecategory ALTER COLUMN id SET DEFAULT nextval('variablecategory_id_seq'::regclass); + + +-- +-- Name: id; Type: DEFAULT; Schema: public; Owner: dataverse_app +-- + +ALTER TABLE ONLY variablerange ALTER COLUMN id SET DEFAULT nextval('variablerange_id_seq'::regclass); + + +-- +-- Name: id; Type: DEFAULT; Schema: public; Owner: dataverse_app +-- + +ALTER TABLE ONLY variablerangeitem ALTER COLUMN id SET DEFAULT nextval('variablerangeitem_id_seq'::regclass); + + +-- +-- Name: id; Type: DEFAULT; Schema: public; Owner: dataverse_app +-- + +ALTER TABLE ONLY worldmapauth_token ALTER COLUMN id SET DEFAULT nextval('worldmapauth_token_id_seq'::regclass); + + +-- +-- Name: id; Type: DEFAULT; Schema: public; Owner: dataverse_app +-- + +ALTER TABLE ONLY worldmapauth_tokentype ALTER COLUMN id SET DEFAULT nextval('worldmapauth_tokentype_id_seq'::regclass); + + +-- +-- Data for Name: actionlogrecord; Type: TABLE DATA; Schema: public; Owner: dataverse_app +-- + +COPY actionlogrecord (id, actionresult, actionsubtype, actiontype, endtime, info, starttime, useridentifier) FROM stdin; +111734e5-cc21-4ef1-917c-d5100e596be5 OK loadDatasetFields Admin 2015-06-08 13:08:17.955 rep4508757747349037455tmp 2015-06-08 13:08:15.768 \N +7e484d19-611e-47c6-b0d2-f9f50b63f2f3 OK loadDatasetFields Admin 2015-06-08 13:08:19.44 rep937678722988769217tmp 2015-06-08 13:08:17.985 \N +d6dc80fd-2d43-416e-9df8-2d7b3d552c73 OK loadDatasetFields Admin 2015-06-08 13:08:19.58 rep3716520730701613426tmp 2015-06-08 13:08:19.465 \N +64431d29-3993-4750-aaae-349df637f7a4 OK loadDatasetFields Admin 2015-06-08 13:08:19.825 rep6974913189748432210tmp 2015-06-08 13:08:19.601 \N +ec39e535-02db-4ea3-b92c-24232dc58ce2 OK loadDatasetFields Admin 2015-06-08 13:08:21.104 rep851714502082007892tmp 2015-06-08 13:08:19.863 \N +fbea7dcb-4903-4066-8ac9-df6a2679a9ae OK loadDatasetFields Admin 2015-06-08 13:08:21.268 rep342120996714352751tmp 2015-06-08 13:08:21.127 \N +c5dc0649-80a3-4fe0-953d-8d919558ddbf OK createBuiltInRole Admin 2015-06-08 13:08:21.571 admin:A person who has all permissions for dataverses, datasets, and files. 2015-06-08 13:08:21.557 \N +3f8be9a1-9a63-4205-b083-e9037cd2313d OK createBuiltInRole Admin 2015-06-08 13:08:21.602 fileDownloader:A person who can download a file. 2015-06-08 13:08:21.599 \N +1578195e-87b3-4482-a3ee-3496d92ef66a OK createBuiltInRole Admin 2015-06-08 13:08:21.628 fullContributor:A person who can add subdataverses and datasets within a dataverse. 2015-06-08 13:08:21.625 \N +d9e83295-2c89-44cd-afbe-2f555e48e00e OK createBuiltInRole Admin 2015-06-08 13:08:21.652 dvContributor:A person who can add subdataverses within a dataverse. 2015-06-08 13:08:21.65 \N +59661f33-746a-4d69-a412-92c9c4b1d66e OK createBuiltInRole Admin 2015-06-08 13:08:21.675 dsContributor:A person who can add datasets within a dataverse. 2015-06-08 13:08:21.672 \N +c027269c-e06b-4685-97ce-a16ea73e7307 OK createBuiltInRole Admin 2015-06-08 13:08:21.7 editor:For datasets, a person who can edit License + Terms, and then submit them for review. 2015-06-08 13:08:21.698 \N +c6989a37-1b0f-4d10-aad6-6cdc3369d72b OK createBuiltInRole Admin 2015-06-08 13:08:21.754 curator:For datasets, a person who can edit License + Terms, edit Permissions, and publish datasets. 2015-06-08 13:08:21.752 \N +ba926273-10b2-4c19-a945-f48e50d9a6f8 OK createBuiltInRole Admin 2015-06-08 13:08:21.778 member:A person who can view both unpublished dataverses and datasets. 2015-06-08 13:08:21.776 \N +d842e3ac-3982-4c20-ba5a-64486e08c0c1 OK deregisterProvider Auth 2015-06-08 13:08:21.823 builtin 2015-06-08 13:08:21.823 \N +b1cdc146-466e-4a12-bb00-2d46e318f0c2 OK registerProvider Auth 2015-06-08 13:08:21.827 builtin:Build-in Provider 2015-06-08 13:08:21.826 \N +a7a100fa-de33-4fb9-9892-f7452a8aaa5c OK deregisterProvider Auth 2015-06-08 13:08:21.856 echo-simple 2015-06-08 13:08:21.856 \N +63ce8b53-9abe-4cde-b7bf-c2b3afc178c8 OK registerProvider Auth 2015-06-08 13:08:21.858 echo-simple:Echo provider 2015-06-08 13:08:21.858 \N +949b0e25-b0e6-40a5-905d-3a693d209f82 OK deregisterProvider Auth 2015-06-08 13:08:21.879 echo-dignified 2015-06-08 13:08:21.879 \N +0d8067b9-2bc0-4ec1-be8b-f00a2ec6dac8 OK registerProvider Auth 2015-06-08 13:08:21.881 echo-dignified:Dignified Echo provider 2015-06-08 13:08:21.881 \N +0b4e73b1-f5a1-4dcd-9b4a-00ada47cdc62 OK set Setting 2015-06-08 13:08:21.908 :AllowSignUp: yes 2015-06-08 13:08:21.908 \N +036e7053-7ca0-4500-9e74-0b2754cb7f4f OK set Setting 2015-06-08 13:08:21.932 :SignUpUrl: /dataverseuser.xhtml?editMode=CREATE 2015-06-08 13:08:21.932 \N +206f3de4-c5be-4912-a17d-38647b22ccfd OK set Setting 2015-06-08 13:08:21.953 :Protocol: doi 2015-06-08 13:08:21.953 \N +9280cf0a-fe45-4a99-8fc5-91c26ce88fac OK set Setting 2015-06-08 13:08:21.977 :Authority: 10.5072/FK2 2015-06-08 13:08:21.977 \N +41ecc86a-d851-4738-8347-ebb3fd06da30 OK set Setting 2015-06-08 13:08:22.002 :DoiProvider: EZID 2015-06-08 13:08:22.001 \N +96eca709-071d-4ce8-8af4-43fca2b01595 OK set Setting 2015-06-08 13:08:22.023 :DoiSeparator: / 2015-06-08 13:08:22.023 \N +4a5b8a1a-af57-49f5-8c52-8e9331f85723 OK set Setting 2015-06-08 13:08:22.043 BuiltinUsers.KEY: burrito 2015-06-08 13:08:22.043 \N +8651ac19-16a7-4cf1-88d2-54d949d52b0b OK set Setting 2015-06-08 13:08:22.064 :BlockedApiKey: empanada 2015-06-08 13:08:22.064 \N +68db6078-d857-4e3b-93a4-67286e18bcdc OK set Setting 2015-06-08 13:08:22.083 :BlockedApiPolicy: localhost-only 2015-06-08 13:08:22.083 \N +908e955e-1b95-4811-b1a1-b5388382f192 OK createUser Auth 2015-06-08 13:08:22.253 @dataverseAdmin 2015-06-08 13:08:22.253 \N +7e06b039-4e9b-45ef-b332-6dbb63755761 OK create BuiltinUser 2015-06-08 13:08:22.276 builtinUser:dataverseAdmin authenticatedUser:@dataverseAdmin 2015-06-08 13:08:22.116 \N +dbe67569-f670-492f-a894-60a6c580ce6b OK toggleSuperuser Admin 2015-06-08 13:08:22.302 dataverseAdmin 2015-06-08 13:08:22.296 \N +00a8631d-83fe-4e72-a0b3-642e7aa2a94a OK edu.harvard.iq.dataverse.engine.command.impl.CreateDataverseCommand Command 2015-06-08 13:08:22.462 : 2015-06-08 13:08:22.367 @dataverseAdmin +3b5e97dd-502e-48ce-a5ff-0a424e9b5ae2 OK edu.harvard.iq.dataverse.engine.command.impl.UpdateDataverseMetadataBlocksCommand.SetRoot Command 2015-06-08 13:08:22.589 :[1 Root] 2015-06-08 13:08:22.578 @dataverseAdmin +58a1dc66-778c-4522-ad5c-de1fa7c477cd OK edu.harvard.iq.dataverse.engine.command.impl.UpdateDataverseMetadataBlocksCommand.SetBlocks Command 2015-06-08 13:08:22.591 :[1 Root] 2015-06-08 13:08:22.516 @dataverseAdmin +0c31f247-fdfe-4a5e-a3e5-791ce25f8c2e OK edu.harvard.iq.dataverse.engine.command.impl.UpdateDataverseCommand Command 2015-06-08 13:08:22.666 :[1 Root] 2015-06-08 13:08:22.629 @dataverseAdmin +14acfd86-aade-4ea5-aed9-a5d7f5d0ff4d OK updateUser Auth 2015-06-08 13:21:29.017 @dataverseAdmin 2015-06-08 13:21:29.017 \N +4a9910ce-ab98-4918-9baa-96fe423e4195 OK login SessionManagement 2015-06-08 13:21:29.023 \N 2015-06-08 13:21:29.023 @dataverseAdmin +c5180d89-c5b9-47c9-a961-e3e4a3879d56 OK edu.harvard.iq.dataverse.engine.command.impl.CreateDataverseCommand Command 2015-06-08 13:29:07.634 :[1 Root] 2015-06-08 13:29:07.303 @dataverseAdmin +b5706636-4797-4202-9cd0-ff2a8a079958 OK edu.harvard.iq.dataverse.engine.command.impl.PublishDataverseCommand Command 2015-06-08 13:29:18.388 :[1 Root] 2015-06-08 13:29:18.363 @dataverseAdmin +49d75936-04bb-4237-823a-7535cdd76ec5 OK edu.harvard.iq.dataverse.engine.command.impl.CreateDatasetCommand Command 2015-06-08 13:30:10.011 :[2 testDV] 2015-06-08 13:30:08.565 @dataverseAdmin +d1310999-acb9-4b09-831c-2b8ad4f2b00e OK registerProvider Auth 2015-06-08 14:27:00.22 builtin:Build-in Provider 2015-06-08 14:27:00.214 \N +989f37fb-48b7-4fb5-ae0f-9302cd5e87d0 OK registerProvider Auth 2015-06-08 14:27:00.231 echo-simple:Echo provider 2015-06-08 14:27:00.231 \N +03541856-1c8e-4267-9461-ce1328fc29d4 OK registerProvider Auth 2015-06-08 14:27:00.233 echo-dignified:Dignified Echo provider 2015-06-08 14:27:00.233 \N +3d683e6c-2a75-441d-893a-cb302725ad7f OK updateUser Auth 2015-06-08 14:27:07.812 @dataverseAdmin 2015-06-08 14:27:07.811 \N +30710fd9-5947-46b4-8829-5a8eccf9c58d OK login SessionManagement 2015-06-08 14:27:07.824 \N 2015-06-08 14:27:07.823 @dataverseAdmin +e2a0c5d5-d91a-460b-88ed-89ceb6339c6a OK edu.harvard.iq.dataverse.engine.command.impl.UpdateDatasetCommand Command 2015-06-08 15:05:01.065 :[3 Sample Dataset] 2015-06-08 15:05:00.469 @dataverseAdmin +42d5c863-48ca-4bee-b0ba-ad9f00a3487f OK registerProvider Auth 2015-06-08 15:40:06.501 builtin:Build-in Provider 2015-06-08 15:40:06.5 \N +628a317e-2b61-406e-89ce-6c05452f2007 OK registerProvider Auth 2015-06-08 15:40:06.506 echo-simple:Echo provider 2015-06-08 15:40:06.506 \N +7025d857-4e0d-43e3-9c84-8e3112279a88 OK registerProvider Auth 2015-06-08 15:40:06.508 echo-dignified:Dignified Echo provider 2015-06-08 15:40:06.508 \N +3a7f405a-7223-48cb-9059-4aa757089367 OK updateUser Auth 2015-06-08 15:40:09.28 @dataverseAdmin 2015-06-08 15:40:09.279 \N +48c9ad6d-1ab2-4886-8d59-fc0a909edde8 OK login SessionManagement 2015-06-08 15:40:09.285 \N 2015-06-08 15:40:09.285 @dataverseAdmin +5b0570c8-a702-46a5-a346-50bf76ead788 OK edu.harvard.iq.dataverse.engine.command.impl.PublishDataverseCommand Command 2015-06-08 15:40:14.328 :[2 testDV] 2015-06-08 15:40:14.147 @dataverseAdmin +b7f4217c-8c53-486f-b3d4-7e42536be1c6 OK edu.harvard.iq.dataverse.engine.command.impl.PublishDatasetCommand Command 2015-06-08 15:40:17.632 :[3 Sample Dataset] 2015-06-08 15:40:14.334 @dataverseAdmin +\. + + +-- +-- Data for Name: apitoken; Type: TABLE DATA; Schema: public; Owner: dataverse_app +-- + +COPY apitoken (id, createtime, disabled, expiretime, tokenstring, authenticateduser_id) FROM stdin; +1 2015-06-08 13:08:22.264 f 2016-06-08 13:08:22.264 a65048f8-875c-4479-a91d-33cb8cd12821 1 +\. + + +-- +-- Data for Name: authenticateduser; Type: TABLE DATA; Schema: public; Owner: dataverse_app +-- + +COPY authenticateduser (id, affiliation, email, firstname, lastname, modificationtime, name, "position", superuser, useridentifier) FROM stdin; +1 Dataverse.org dataverse@mailinator.com Dataverse Admin 2015-06-08 15:40:09.283 \N Admin t dataverseAdmin +\. + + +-- +-- Data for Name: authenticateduserlookup; Type: TABLE DATA; Schema: public; Owner: dataverse_app +-- + +COPY authenticateduserlookup (id, authenticationproviderid, persistentuserid, authenticateduser_id) FROM stdin; +1 builtin dataverseAdmin 1 +\. + + +-- +-- Data for Name: authenticationproviderrow; Type: TABLE DATA; Schema: public; Owner: dataverse_app +-- + +COPY authenticationproviderrow (id, enabled, factoryalias, factorydata, subtitle, title) FROM stdin; +builtin t BuiltinAuthenticationProvider Datavers' Internal Authentication provider Dataverse Local +echo-simple t Echo , Approves everyone, based on their credentials Echo provider +echo-dignified t Echo Sir,Esq. Approves everyone, based on their credentials, and adds some flair Dignified Echo provider +\. + + +-- +-- Data for Name: builtinuser; Type: TABLE DATA; Schema: public; Owner: dataverse_app +-- + +COPY builtinuser (id, affiliation, email, encryptedpassword, firstname, lastname, passwordencryptionversion, "position", username) FROM stdin; +1 Dataverse.org dataverse@mailinator.com $2a$10$NGp3jxhSh4IBfiGIb5CPsOUovwfZ2xT7sklweW.LInjKtAZcbWokO Dataverse Admin 1 Admin dataverseAdmin +\. + + +-- +-- Data for Name: controlledvocabalternate; Type: TABLE DATA; Schema: public; Owner: dataverse_app +-- + +COPY controlledvocabalternate (id, strvalue, controlledvocabularyvalue_id, datasetfieldtype_id) FROM stdin; +1 arxiv 17 30 +2 BOTSWANA 266 79 +3 Brasil 268 79 +4 Gambia, The 317 79 +5 Germany (Federal Republic of) 319 79 +6 GHANA 320 79 +7 INDIA 339 79 +8 Sumatra 340 79 +9 Iran 341 79 +10 Iran (Islamic Republic of) 341 79 +11 IRAQ 342 79 +12 Laos 358 79 +13 LESOTHO 361 79 +14 MOZAMBIQUE 388 79 +15 NAMIBIA 390 79 +16 SWAZILAND 450 79 +17 Taiwan 454 79 +18 Tanzania 456 79 +19 UAE 470 79 +20 USA 472 79 +21 U.S.A 472 79 +22 United States of America 472 79 +23 U.S.A. 472 79 +24 YEMEN 483 79 +\. + + +-- +-- Data for Name: controlledvocabularyvalue; Type: TABLE DATA; Schema: public; Owner: dataverse_app +-- + +COPY controlledvocabularyvalue (id, displayorder, identifier, strvalue, datasetfieldtype_id) FROM stdin; +1 0 \N N/A \N +2 0 D01 Agricultural Sciences 19 +3 1 D0 Arts and Humanities 19 +4 2 D1 Astronomy and Astrophysics 19 +5 3 D2 Business and Management 19 +6 4 D3 Chemistry 19 +7 5 D4 Earth and Environmental Sciences 19 +8 6 D5 Engineering 19 +9 7 D6 Medicine, Health and Life Sciences 19 +10 8 D7 Computer and Information Science 19 +11 9 D8 Law 19 +12 10 D9 Mathematical Sciences 19 +13 11 D10 Physics 19 +14 12 D11 Social Sciences 19 +15 13 D12 Other 19 +16 0 ark 30 +17 1 arXiv 30 +18 2 bibcode 30 +19 3 doi 30 +20 4 ean13 30 +21 5 eissn 30 +22 6 handle 30 +23 7 isbn 30 +24 8 issn 30 +25 9 istc 30 +26 10 lissn 30 +27 11 lsid 30 +28 12 pmid 30 +29 13 purl 30 +30 14 upc 30 +31 15 url 30 +32 16 urn 30 +33 0 Data Collector 44 +34 1 Data Curator 44 +35 2 Data Manager 44 +36 3 Editor 44 +37 4 Funder 44 +38 5 Hosting Institution 44 +39 6 Project Leader 44 +40 7 Project Manager 44 +41 8 Project Member 44 +42 9 Related Person 44 +43 10 Researcher 44 +44 11 Research Group 44 +45 12 Rights Holder 44 +46 13 Sponsor 44 +47 14 Supervisor 44 +48 15 Work Package Leader 44 +49 16 Other 44 +50 0 ORCID 10 +51 1 ISNI 10 +52 2 LCNA 10 +53 0 Abkhaz 34 +54 1 Afar 34 +55 2 Afrikaans 34 +56 3 Akan 34 +57 4 Albanian 34 +58 5 Amharic 34 +59 6 Arabic 34 +60 7 Aragonese 34 +61 8 Armenian 34 +62 9 Assamese 34 +63 10 Avaric 34 +64 11 Avestan 34 +65 12 Aymara 34 +66 13 Azerbaijani 34 +67 14 Bambara 34 +68 15 Bashkir 34 +69 16 Basque 34 +70 17 Belarusian 34 +71 18 Bengali, Bangla 34 +72 19 Bihari 34 +73 20 Bislama 34 +74 21 Bosnian 34 +75 22 Breton 34 +76 23 Bulgarian 34 +77 24 Burmese 34 +78 25 Catalan,Valencian 34 +79 26 Chamorro 34 +80 27 Chechen 34 +81 28 Chichewa, Chewa, Nyanja 34 +82 29 Chinese 34 +83 30 Chuvash 34 +84 31 Cornish 34 +85 32 Corsican 34 +86 33 Cree 34 +87 34 Croatian 34 +88 35 Czech 34 +89 36 Danish 34 +90 37 Divehi, Dhivehi, Maldivian 34 +91 38 Dutch 34 +92 39 Dzongkha 34 +93 40 English 34 +94 41 Esperanto 34 +95 42 Estonian 34 +96 43 Ewe 34 +97 44 Faroese 34 +98 45 Fijian 34 +99 46 Finnish 34 +100 47 French 34 +101 48 Fula, Fulah, Pulaar, Pular 34 +102 49 Galician 34 +103 50 Georgian 34 +104 51 German 34 +105 52 Greek (modern) 34 +106 53 Guaraní 34 +107 54 Gujarati 34 +108 55 Haitian, Haitian Creole 34 +109 56 Hausa 34 +110 57 Hebrew (modern) 34 +111 58 Herero 34 +112 59 Hindi 34 +113 60 Hiri Motu 34 +114 61 Hungarian 34 +115 62 Interlingua 34 +116 63 Indonesian 34 +117 64 Interlingue 34 +118 65 Irish 34 +119 66 Igbo 34 +120 67 Inupiaq 34 +121 68 Ido 34 +122 69 Icelandic 34 +123 70 Italian 34 +124 71 Inuktitut 34 +125 72 Japanese 34 +126 73 Javanese 34 +127 74 Kalaallisut, Greenlandic 34 +128 75 Kannada 34 +129 76 Kanuri 34 +130 77 Kashmiri 34 +131 78 Kazakh 34 +132 79 Khmer 34 +133 80 Kikuyu, Gikuyu 34 +134 81 Kinyarwanda 34 +135 82 Kyrgyz 34 +136 83 Komi 34 +137 84 Kongo 34 +138 85 Korean 34 +139 86 Kurdish 34 +140 87 Kwanyama, Kuanyama 34 +141 88 Latin 34 +142 89 Luxembourgish, Letzeburgesch 34 +143 90 Ganda 34 +144 91 Limburgish, Limburgan, Limburger 34 +145 92 Lingala 34 +146 93 Lao 34 +147 94 Lithuanian 34 +148 95 Luba-Katanga 34 +149 96 Latvian 34 +150 97 Manx 34 +151 98 Macedonian 34 +152 99 Malagasy 34 +153 100 Malay 34 +154 101 Malayalam 34 +155 102 Maltese 34 +156 103 Māori 34 +157 104 Marathi (Marāṭhī) 34 +158 105 Marshallese 34 +159 106 Mongolian 34 +160 107 Nauru 34 +161 108 Navajo, Navaho 34 +162 109 Northern Ndebele 34 +163 110 Nepali 34 +164 111 Ndonga 34 +165 112 Norwegian Bokmål 34 +166 113 Norwegian Nynorsk 34 +167 114 Norwegian 34 +168 115 Nuosu 34 +169 116 Southern Ndebele 34 +170 117 Occitan 34 +171 118 Ojibwe, Ojibwa 34 +172 119 Old Church Slavonic,Church Slavonic,Old Bulgarian 34 +173 120 Oromo 34 +174 121 Oriya 34 +175 122 Ossetian, Ossetic 34 +176 123 Panjabi, Punjabi 34 +177 124 Pāli 34 +178 125 Persian (Farsi) 34 +179 126 Polish 34 +180 127 Pashto, Pushto 34 +181 128 Portuguese 34 +182 129 Quechua 34 +183 130 Romansh 34 +184 131 Kirundi 34 +185 132 Romanian 34 +186 133 Russian 34 +187 134 Sanskrit (Saṁskṛta) 34 +188 135 Sardinian 34 +189 136 Sindhi 34 +190 137 Northern Sami 34 +191 138 Samoan 34 +192 139 Sango 34 +193 140 Serbian 34 +194 141 Scottish Gaelic, Gaelic 34 +195 142 Shona 34 +196 143 Sinhala, Sinhalese 34 +197 144 Slovak 34 +198 145 Slovene 34 +199 146 Somali 34 +200 147 Southern Sotho 34 +201 148 Spanish, Castilian 34 +202 149 Sundanese 34 +203 150 Swahili 34 +204 151 Swati 34 +205 152 Swedish 34 +206 153 Tamil 34 +207 154 Telugu 34 +208 155 Tajik 34 +209 156 Thai 34 +210 157 Tigrinya 34 +211 158 Tibetan Standard, Tibetan, Central 34 +212 159 Turkmen 34 +213 160 Tagalog 34 +214 161 Tswana 34 +215 162 Tonga (Tonga Islands) 34 +216 163 Turkish 34 +217 164 Tsonga 34 +218 165 Tatar 34 +219 166 Twi 34 +220 167 Tahitian 34 +221 168 Uyghur, Uighur 34 +222 169 Ukrainian 34 +223 170 Urdu 34 +224 171 Uzbek 34 +225 172 Venda 34 +226 173 Vietnamese 34 +227 174 Volapük 34 +228 175 Walloon 34 +229 176 Welsh 34 +230 177 Wolof 34 +231 178 Western Frisian 34 +232 179 Xhosa 34 +233 180 Yiddish 34 +234 181 Yoruba 34 +235 182 Zhuang, Chuang 34 +236 183 Zulu 34 +237 184 Not applicable 34 +238 0 Afghanistan 79 +239 1 Albania 79 +240 2 Algeria 79 +241 3 American Samoa 79 +242 4 Andorra 79 +243 5 Angola 79 +244 6 Anguilla 79 +245 7 Antarctica 79 +246 8 Antigua and Barbuda 79 +247 9 Argentina 79 +248 10 Armenia 79 +249 11 Aruba 79 +250 12 Australia 79 +251 13 Austria 79 +252 14 Azerbaijan 79 +253 15 Bahamas 79 +254 16 Bahrain 79 +255 17 Bangladesh 79 +256 18 Barbados 79 +257 19 Belarus 79 +258 20 Belgium 79 +259 21 Belize 79 +260 22 Benin 79 +261 23 Bermuda 79 +262 24 Bhutan 79 +263 25 Bolivia, Plurinational State of 79 +264 26 Bonaire, Sint Eustatius and Saba 79 +265 27 Bosnia and Herzegovina 79 +266 28 Botswana 79 +267 29 Bouvet Island 79 +268 30 Brazil 79 +269 31 British Indian Ocean Territory 79 +270 32 Brunei Darussalam 79 +271 33 Bulgaria 79 +272 34 Burkina Faso 79 +273 35 Burundi 79 +274 36 Cambodia 79 +275 37 Cameroon 79 +276 38 Canada 79 +277 39 Cape Verde 79 +278 40 Cayman Islands 79 +279 41 Central African Republic 79 +280 42 Chad 79 +281 43 Chile 79 +282 44 China 79 +283 45 Christmas Island 79 +284 46 Cocos (Keeling) Islands 79 +285 47 Colombia 79 +286 48 Comoros 79 +287 49 Congo 79 +288 50 Congo, the Democratic Republic of the 79 +289 51 Cook Islands 79 +290 52 Costa Rica 79 +291 53 Croatia 79 +292 54 Cuba 79 +293 55 Curaçao 79 +294 56 Cyprus 79 +295 57 Czech Republic 79 +296 58 Côte d'Ivoire 79 +297 59 Denmark 79 +298 60 Djibouti 79 +299 61 Dominica 79 +300 62 Dominican Republic 79 +301 63 Ecuador 79 +302 64 Egypt 79 +303 65 El Salvador 79 +304 66 Equatorial Guinea 79 +305 67 Eritrea 79 +306 68 Estonia 79 +307 69 Ethiopia 79 +308 70 Falkland Islands (Malvinas) 79 +309 71 Faroe Islands 79 +310 72 Fiji 79 +311 73 Finland 79 +312 74 France 79 +313 75 French Guiana 79 +314 76 French Polynesia 79 +315 77 French Southern Territories 79 +316 78 Gabon 79 +317 79 Gambia 79 +318 80 Georgia 79 +319 81 Germany 79 +320 82 Ghana 79 +321 83 Gibraltar 79 +322 84 Greece 79 +323 85 Greenland 79 +324 86 Grenada 79 +325 87 Guadeloupe 79 +326 88 Guam 79 +327 89 Guatemala 79 +328 90 Guernsey 79 +329 91 Guinea 79 +330 92 Guinea-Bissau 79 +331 93 Guyana 79 +332 94 Haiti 79 +333 95 Heard Island and Mcdonald Islands 79 +334 96 Holy See (Vatican City State) 79 +335 97 Honduras 79 +336 98 Hong Kong 79 +337 99 Hungary 79 +338 100 Iceland 79 +339 101 India 79 +340 102 Indonesia 79 +341 103 Iran, Islamic Republic of 79 +342 104 Iraq 79 +343 105 Ireland 79 +344 106 Isle of Man 79 +345 107 Israel 79 +346 108 Italy 79 +347 109 Jamaica 79 +348 110 Japan 79 +349 111 Jersey 79 +350 112 Jordan 79 +351 113 Kazakhstan 79 +352 114 Kenya 79 +353 115 Kiribati 79 +354 116 Korea, Democratic People's Republic of 79 +355 117 Korea, Republic of 79 +356 118 Kuwait 79 +357 119 Kyrgyzstan 79 +358 120 Lao People's Democratic Republic 79 +359 121 Latvia 79 +360 122 Lebanon 79 +361 123 Lesotho 79 +362 124 Liberia 79 +363 125 Libya 79 +364 126 Liechtenstein 79 +365 127 Lithuania 79 +366 128 Luxembourg 79 +367 129 Macao 79 +368 130 Macedonia, the Former Yugoslav Republic of 79 +369 131 Madagascar 79 +370 132 Malawi 79 +371 133 Malaysia 79 +372 134 Maldives 79 +373 135 Mali 79 +374 136 Malta 79 +375 137 Marshall Islands 79 +376 138 Martinique 79 +377 139 Mauritania 79 +378 140 Mauritius 79 +379 141 Mayotte 79 +380 142 Mexico 79 +381 143 Micronesia, Federated States of 79 +382 144 Moldova, Republic of 79 +383 145 Monaco 79 +384 146 Mongolia 79 +385 147 Montenegro 79 +386 148 Montserrat 79 +387 149 Morocco 79 +388 150 Mozambique 79 +389 151 Myanmar 79 +390 152 Namibia 79 +391 153 Nauru 79 +392 154 Nepal 79 +393 155 Netherlands 79 +394 156 New Caledonia 79 +395 157 New Zealand 79 +396 158 Nicaragua 79 +397 159 Niger 79 +398 160 Nigeria 79 +399 161 Niue 79 +400 162 Norfolk Island 79 +401 163 Northern Mariana Islands 79 +402 164 Norway 79 +403 165 Oman 79 +404 166 Pakistan 79 +405 167 Palau 79 +406 168 Palestine, State of 79 +407 169 Panama 79 +408 170 Papua New Guinea 79 +409 171 Paraguay 79 +410 172 Peru 79 +411 173 Philippines 79 +412 174 Pitcairn 79 +413 175 Poland 79 +414 176 Portugal 79 +415 177 Puerto Rico 79 +416 178 Qatar 79 +417 179 Romania 79 +418 180 Russian Federation 79 +419 181 Rwanda 79 +420 182 Réunion 79 +421 183 Saint Barthélemy 79 +422 184 Saint Helena, Ascension and Tristan da Cunha 79 +423 185 Saint Kitts and Nevis 79 +424 186 Saint Lucia 79 +425 187 Saint Martin (French part) 79 +426 188 Saint Pierre and Miquelon 79 +427 189 Saint Vincent and the Grenadines 79 +428 190 Samoa 79 +429 191 San Marino 79 +430 192 Sao Tome and Principe 79 +431 193 Saudi Arabia 79 +432 194 Senegal 79 +433 195 Serbia 79 +434 196 Seychelles 79 +435 197 Sierra Leone 79 +436 198 Singapore 79 +437 199 Sint Maarten (Dutch part) 79 +438 200 Slovakia 79 +439 201 Slovenia 79 +440 202 Solomon Islands 79 +441 203 Somalia 79 +442 204 South Africa 79 +443 205 South Georgia and the South Sandwich Islands 79 +444 206 South Sudan 79 +445 207 Spain 79 +446 208 Sri Lanka 79 +447 209 Sudan 79 +448 210 Suriname 79 +449 211 Svalbard and Jan Mayen 79 +450 212 Swaziland 79 +451 213 Sweden 79 +452 214 Switzerland 79 +453 215 Syrian Arab Republic 79 +454 216 Taiwan, Province of China 79 +455 217 Tajikistan 79 +456 218 Tanzania, United Republic of 79 +457 219 Thailand 79 +458 220 Timor-Leste 79 +459 221 Togo 79 +460 222 Tokelau 79 +461 223 Tonga 79 +462 224 Trinidad and Tobago 79 +463 225 Tunisia 79 +464 226 Turkey 79 +465 227 Turkmenistan 79 +466 228 Turks and Caicos Islands 79 +467 229 Tuvalu 79 +468 230 Uganda 79 +469 231 Ukraine 79 +470 232 United Arab Emirates 79 +471 233 United Kingdom 79 +472 234 United States 79 +473 235 United States Minor Outlying Islands 79 +474 236 Uruguay 79 +475 237 Uzbekistan 79 +476 238 Vanuatu 79 +477 239 Venezuela, Bolivarian Republic of 79 +478 240 Viet Nam 79 +479 241 Virgin Islands, British 79 +480 242 Virgin Islands, U.S. 79 +481 243 Wallis and Futuna 79 +482 244 Western Sahara 79 +483 245 Yemen 79 +484 246 Zambia 79 +485 247 Zimbabwe 79 +486 248 Åland Islands 79 +487 0 Image 115 +488 1 Mosaic 115 +489 2 EventList 115 +490 3 Spectrum 115 +491 4 Cube 115 +492 5 Table 115 +493 6 Catalog 115 +494 7 LightCurve 115 +495 8 Simulation 115 +496 9 Figure 115 +497 10 Artwork 115 +498 11 Animation 115 +499 12 PrettyPicture 115 +500 13 Documentation 115 +501 14 Other 115 +502 15 Library 115 +503 16 Press Release 115 +504 17 Facsimile 115 +505 18 Historical 115 +506 19 Observation 115 +507 20 Object 115 +508 21 Value 115 +509 22 ValuePair 115 +510 23 Survey 115 +511 0 EFO_0001427 Case Control 141 +512 1 EFO_0001428 Cross Sectional 141 +513 2 OCRE100078 Cohort Study 141 +514 3 NCI_C48202 Nested Case Control Design 141 +515 4 OTHER_DESIGN Not Specified 141 +516 5 OBI_0500006 Parallel Group Design 141 +517 6 OBI_0001033 Perturbation Design 141 +518 7 MESH_D016449 Randomized Controlled Trial 141 +519 8 TECH_DESIGN Technological Design 141 +520 0 EFO_0000246 Age 142 +521 1 BIOMARKERS Biomarkers 142 +522 2 CELL_SURFACE_M Cell Surface Markers 142 +523 3 EFO_0000324;EFO_0000322 Cell Type/Cell Line 142 +524 4 EFO_0000399 Developmental Stage 142 +525 5 OBI_0001293 Disease State 142 +526 6 IDO_0000469 Drug Susceptibility 142 +527 7 FBcv_0010001 Extract Molecule 142 +528 8 OBI_0001404 Genetic Characteristics 142 +529 9 OBI_0000690 Immunoprecipitation Antibody 142 +530 10 OBI_0100026 Organism 142 +531 11 OTHER_FACTOR Other 142 +532 12 PASSAGES_FACTOR Passages 142 +533 13 OBI_0000050 Platform 142 +534 14 EFO_0000695 Sex 142 +535 15 EFO_0005135 Strain 142 +536 16 EFO_0000724 Time Point 142 +537 17 BTO_0001384 Tissue Type 142 +538 18 EFO_0000369 Treatment Compound 142 +539 19 EFO_0000727 Treatment Type 142 +540 0 ERO_0001899 cell counting 145 +541 1 CHMO_0001085 cell sorting 145 +542 2 OBI_0000520 clinical chemistry analysis 145 +543 3 OBI_0000537 copy number variation profiling 145 +544 4 OBI_0000634 DNA methylation profiling 145 +545 5 OBI_0000748 DNA methylation profiling (Bisulfite-Seq) 145 +546 6 _OBI_0000634 DNA methylation profiling (MeDIP-Seq) 145 +547 7 _IDO_0000469 drug susceptibility 145 +548 8 ENV_GENE_SURVEY environmental gene survey 145 +549 9 ERO_0001183 genome sequencing 145 +550 10 OBI_0000630 hematology 145 +551 11 OBI_0600020 histology 145 +552 12 OBI_0002017 Histone Modification (ChIP-Seq) 145 +553 13 SO_0001786 loss of heterozygosity profiling 145 +554 14 OBI_0000366 metabolite profiling 145 +555 15 METAGENOME_SEQ metagenome sequencing 145 +556 16 OBI_0000615 protein expression profiling 145 +557 17 ERO_0000346 protein identification 145 +558 18 PROTEIN_DNA_BINDING protein-DNA binding site identification 145 +559 19 OBI_0000288 protein-protein interaction detection 145 +560 20 PROTEIN_RNA_BINDING protein-RNA binding (RIP-Seq) 145 +561 21 OBI_0000435 SNP analysis 145 +562 22 TARGETED_SEQ targeted sequencing 145 +563 23 OBI_0002018 transcription factor binding (ChIP-Seq) 145 +564 24 OBI_0000291 transcription factor binding site identification 145 +565 25 OBI_0000424 transcription profiling 145 +566 26 EFO_0001032 transcription profiling 145 +567 27 TRANSCRIPTION_PROF transcription profiling (Microarray) 145 +568 28 OBI_0001271 transcription profiling (RNA-Seq) 145 +569 29 TRAP_TRANS_PROF TRAP translational profiling 145 +570 30 OTHER_MEASUREMENT Other 145 +571 0 NCBITaxon_3702 Arabidopsis thaliana 143 +572 1 NCBITaxon_9913 Bos taurus 143 +573 2 NCBITaxon_6239 Caenorhabditis elegans 143 +574 3 NCBITaxon_3055 Chlamydomonas reinhardtii 143 +575 4 NCBITaxon_7955 Danio rerio (zebrafish) 143 +576 5 NCBITaxon_44689 Dictyostelium discoideum 143 +577 6 NCBITaxon_7227 Drosophila melanogaster 143 +578 7 NCBITaxon_562 Escherichia coli 143 +579 8 NCBITaxon_11103 Hepatitis C virus 143 +580 9 NCBITaxon_9606 Homo sapiens 143 +581 10 NCBITaxon_10090 Mus musculus 143 +582 11 NCBITaxon_33894 Mycobacterium africanum 143 +583 12 NCBITaxon_78331 Mycobacterium canetti 143 +584 13 NCBITaxon_1773 Mycobacterium tuberculosis 143 +585 14 NCBITaxon_2104 Mycoplasma pneumoniae 143 +586 15 NCBITaxon_4530 Oryza sativa 143 +587 16 NCBITaxon_5833 Plasmodium falciparum 143 +588 17 NCBITaxon_4754 Pneumocystis carinii 143 +589 18 NCBITaxon_10116 Rattus norvegicus 143 +590 19 NCBITaxon_4932 Saccharomyces cerevisiae (brewer's yeast) 143 +591 20 NCBITaxon_4896 Schizosaccharomyces pombe 143 +592 21 NCBITaxon_31033 Takifugu rubripes 143 +593 22 NCBITaxon_8355 Xenopus laevis 143 +594 23 NCBITaxon_4577 Zea mays 143 +595 24 OTHER_TAXONOMY Other 143 +596 0 CULTURE_DRUG_TEST_SINGLE culture based drug susceptibility testing, single concentration 147 +597 1 CULTURE_DRUG_TEST_TWO culture based drug susceptibility testing, two concentrations 147 +598 2 CULTURE_DRUG_TEST_THREE culture based drug susceptibility testing, three or more concentrations (minimium inhibitory concentration measurement) 147 +599 3 OBI_0400148 DNA microarray 147 +600 4 OBI_0000916 flow cytometry 147 +601 5 OBI_0600053 gel electrophoresis 147 +602 6 OBI_0000470 mass spectrometry 147 +603 7 OBI_0000623 NMR spectroscopy 147 +604 8 OBI_0000626 nucleotide sequencing 147 +605 9 OBI_0400149 protein microarray 147 +606 10 OBI_0000893 real time PCR 147 +607 11 NO_TECHNOLOGY no technology required 147 +608 12 OTHER_TECHNOLOGY Other 147 +609 0 210_MS_GC 210-MS GC Ion Trap (Varian) 148 +610 1 220_MS_GC 220-MS GC Ion Trap (Varian) 148 +611 2 225_MS_GC 225-MS GC Ion Trap (Varian) 148 +612 3 240_MS_GC 240-MS GC Ion Trap (Varian) 148 +613 4 300_MS_GCMS 300-MS quadrupole GC/MS (Varian) 148 +614 5 320_MS_LCMS 320-MS LC/MS (Varian) 148 +615 6 325_MS_LCMS 325-MS LC/MS (Varian) 148 +616 7 500_MS_GCMS 320-MS GC/MS (Varian) 148 +617 8 500_MS_LCMS 500-MS LC/MS (Varian) 148 +618 9 800D 800D (Jeol) 148 +619 10 910_MS_TQFT 910-MS TQ-FT (Varian) 148 +620 11 920_MS_TQFT 920-MS TQ-FT (Varian) 148 +621 12 3100_MASS_D 3100 Mass Detector (Waters) 148 +622 13 6110_QUAD_LCMS 6110 Quadrupole LC/MS (Agilent) 148 +623 14 6120_QUAD_LCMS 6120 Quadrupole LC/MS (Agilent) 148 +624 15 6130_QUAD_LCMS 6130 Quadrupole LC/MS (Agilent) 148 +625 16 6140_QUAD_LCMS 6140 Quadrupole LC/MS (Agilent) 148 +626 17 6310_ION_LCMS 6310 Ion Trap LC/MS (Agilent) 148 +627 18 6320_ION_LCMS 6320 Ion Trap LC/MS (Agilent) 148 +628 19 6330_ION_LCMS 6330 Ion Trap LC/MS (Agilent) 148 +629 20 6340_ION_LCMS 6340 Ion Trap LC/MS (Agilent) 148 +630 21 6410_TRIPLE_LCMS 6410 Triple Quadrupole LC/MS (Agilent) 148 +631 22 6430_TRIPLE_LCMS 6430 Triple Quadrupole LC/MS (Agilent) 148 +632 23 6460_TRIPLE_LCMS 6460 Triple Quadrupole LC/MS (Agilent) 148 +633 24 6490_TRIPLE_LCMS 6490 Triple Quadrupole LC/MS (Agilent) 148 +634 25 6530_Q_TOF_LCMS 6530 Q-TOF LC/MS (Agilent) 148 +635 26 6540_Q_TOF_LCMS 6540 Q-TOF LC/MS (Agilent) 148 +636 27 6210_Q_TOF_LCMS 6210 TOF LC/MS (Agilent) 148 +637 28 6220_Q_TOF_LCMS 6220 TOF LC/MS (Agilent) 148 +638 29 6230_Q_TOF_LCMS 6230 TOF LC/MS (Agilent) 148 +639 30 700B_TRIPLE_GCMS 7000B Triple Quadrupole GC/MS (Agilent) 148 +640 31 ACCUTO_DART AccuTO DART (Jeol) 148 +641 32 ACCUTOF_GC AccuTOF GC (Jeol) 148 +642 33 ACCUTOF_LC AccuTOF LC (Jeol) 148 +643 34 ACQUITY_SQD ACQUITY SQD (Waters) 148 +644 35 ACQUITY_TQD ACQUITY TQD (Waters) 148 +645 36 AGILENT Agilent 148 +646 37 AGILENT_ 5975E_GCMSD Agilent 5975E GC/MSD (Agilent) 148 +647 38 AGILENT_5975T_LTM_GCMSD Agilent 5975T LTM GC/MSD (Agilent) 148 +648 39 5975C_GCMSD 5975C Series GC/MSD (Agilent) 148 +649 40 AFFYMETRIX Affymetrix 148 +650 41 AMAZON_ETD_ESI amaZon ETD ESI Ion Trap (Bruker) 148 +651 42 AMAZON_X_ESI amaZon X ESI Ion Trap (Bruker) 148 +652 43 APEX_ULTRA_QQ_FTMS apex-ultra hybrid Qq-FTMS (Bruker) 148 +653 44 API_2000 API 2000 (AB Sciex) 148 +654 45 API_3200 API 3200 (AB Sciex) 148 +655 46 API_3200_QTRAP API 3200 QTRAP (AB Sciex) 148 +656 47 API_4000 API 4000 (AB Sciex) 148 +657 48 API_4000_QTRAP API 4000 QTRAP (AB Sciex) 148 +658 49 API_5000 API 5000 (AB Sciex) 148 +659 50 API_5500 API 5500 (AB Sciex) 148 +660 51 API_5500_QTRAP API 5500 QTRAP (AB Sciex) 148 +661 52 APPLIED_BIOSYSTEMS Applied Biosystems Group (ABI) 148 +662 53 AQI_BIOSCIENCES AQI Biosciences 148 +663 54 ATMOS_GC Atmospheric Pressure GC (Waters) 148 +664 55 AUTOFLEX_III_MALDI_TOF_MS autoflex III MALDI-TOF MS (Bruker) 148 +665 56 AUTOFLEX_SPEED autoflex speed(Bruker) 148 +666 57 AUTOSPEC_PREMIER AutoSpec Premier (Waters) 148 +667 58 AXIMA_MEGA_TOF AXIMA Mega TOF (Shimadzu) 148 +668 59 AXIMA_PERF_MALDI_TOF AXIMA Performance MALDI TOF/TOF (Shimadzu) 148 +669 60 A_10_ANALYZER A-10 Analyzer (Apogee) 148 +670 61 A_40_MINIFCM A-40-MiniFCM (Apogee) 148 +671 62 BACTIFLOW Bactiflow (Chemunex SA) 148 +672 63 BASE4INNOVATION Base4innovation 148 +673 64 BD_BACTEC_MGIT_320 BD BACTEC MGIT 320 148 +674 65 BD_BACTEC_MGIT_960 BD BACTEC MGIT 960 148 +675 66 BD_RADIO_BACTEC_460TB BD Radiometric BACTEC 460TB 148 +676 67 BIONANOMATRIX BioNanomatrix 148 +677 68 CELL_LAB_QUANTA_SC Cell Lab Quanta SC (Becman Coulter) 148 +678 69 CLARUS_560_D_GCMS Clarus 560 D GC/MS (PerkinElmer) 148 +679 70 CLARUS_560_S_GCMS Clarus 560 S GC/MS (PerkinElmer) 148 +680 71 CLARUS_600_GCMS Clarus 600 GC/MS (PerkinElmer) 148 +681 72 COMPLETE_GENOMICS Complete Genomics 148 +682 73 CYAN Cyan (Dako Cytomation) 148 +683 74 CYFLOW_ML CyFlow ML (Partec) 148 +684 75 CYFLOW_SL Cyow SL (Partec) 148 +685 76 CYFLOW_SL3 CyFlow SL3 (Partec) 148 +686 77 CYTOBUOY CytoBuoy (Cyto Buoy Inc) 148 +687 78 CYTOSENCE CytoSence (Cyto Buoy Inc) 148 +688 79 CYTOSUB CytoSub (Cyto Buoy Inc) 148 +689 80 DANAHER Danaher 148 +690 81 DFS DFS (Thermo Scientific) 148 +691 82 EXACTIVE Exactive(Thermo Scientific) 148 +692 83 FACS_CANTO FACS Canto (Becton Dickinson) 148 +693 84 FACS_CANTO2 FACS Canto2 (Becton Dickinson) 148 +694 85 FACS_SCAN FACS Scan (Becton Dickinson) 148 +695 86 FC_500 FC 500 (Becman Coulter) 148 +696 87 GCMATE_II GCmate II GC/MS (Jeol) 148 +697 88 GCMS_QP2010_PLUS GCMS-QP2010 Plus (Shimadzu) 148 +698 89 GCMS_QP2010S_PLUS GCMS-QP2010S Plus (Shimadzu) 148 +699 90 GCT_PREMIER GCT Premier (Waters) 148 +700 91 GENEQ GENEQ 148 +701 92 GENOME_CORP Genome Corp. 148 +702 93 GENOVOXX GenoVoxx 148 +703 94 GNUBIO GnuBio 148 +704 95 GUAVA_EASYCYTE_MINI Guava EasyCyte Mini (Millipore) 148 +705 96 GUAVA_EASYCYTE_PLUS Guava EasyCyte Plus (Millipore) 148 +706 97 GUAVA_PERSONAL_CELL Guava Personal Cell Analysis (Millipore) 148 +707 98 GUAVA_PERSONAL_CELL_96 Guava Personal Cell Analysis-96 (Millipore) 148 +708 99 HELICOS_BIO Helicos BioSciences 148 +709 100 ILLUMINA Illumina 148 +710 101 INDIRECT_LJ_MEDIUM Indirect proportion method on LJ medium 148 +711 102 INDIRECT_AGAR_7H9 Indirect proportion method on Middlebrook Agar 7H9 148 +712 103 INDIRECT_AGAR_7H10 Indirect proportion method on Middlebrook Agar 7H10 148 +713 104 INDIRECT_AGAR_7H11 Indirect proportion method on Middlebrook Agar 7H11 148 +714 105 INFLUX_ANALYZER inFlux Analyzer (Cytopeia) 148 +715 106 INTELLIGENT_BIOSYSTEMS Intelligent Bio-Systems 148 +716 107 ITQ_700 ITQ 700 (Thermo Scientific) 148 +717 108 ITQ_900 ITQ 900 (Thermo Scientific) 148 +718 109 ITQ_1100 ITQ 1100 (Thermo Scientific) 148 +719 110 JMS_53000_SPIRAL JMS-53000 SpiralTOF (Jeol) 148 +720 111 LASERGEN LaserGen 148 +721 112 LCMS_2020 LCMS-2020 (Shimadzu) 148 +722 113 LCMS_2010EV LCMS-2010EV (Shimadzu) 148 +723 114 LCMS_IT_TOF LCMS-IT-TOF (Shimadzu) 148 +724 115 LI_COR Li-Cor 148 +725 116 LIFE_TECH Life Tech 148 +726 117 LIGHTSPEED_GENOMICS LightSpeed Genomics 148 +727 118 LCT_PREMIER_XE LCT Premier XE (Waters) 148 +728 119 LCQ_DECA_XP_MAX LCQ Deca XP MAX (Thermo Scientific) 148 +729 120 LCQ_FLEET LCQ Fleet (Thermo Scientific) 148 +730 121 LXQ_THERMO LXQ (Thermo Scientific) 148 +731 122 LTQ_CLASSIC LTQ Classic (Thermo Scientific) 148 +732 123 LTQ_XL LTQ XL (Thermo Scientific) 148 +733 124 LTQ_VELOS LTQ Velos (Thermo Scientific) 148 +734 125 LTQ_ORBITRAP_CLASSIC LTQ Orbitrap Classic (Thermo Scientific) 148 +735 126 LTQ_ORBITRAP_XL LTQ Orbitrap XL (Thermo Scientific) 148 +736 127 LTQ_ORBITRAP_DISCOVERY LTQ Orbitrap Discovery (Thermo Scientific) 148 +737 128 LTQ_ORBITRAP_VELOS LTQ Orbitrap Velos (Thermo Scientific) 148 +738 129 LUMINEX_100 Luminex 100 (Luminex) 148 +739 130 LUMINEX_200 Luminex 200 (Luminex) 148 +740 131 MACS_QUANT MACS Quant (Miltenyi) 148 +741 132 MALDI_SYNAPT_G2_HDMS MALDI SYNAPT G2 HDMS (Waters) 148 +742 133 MALDI_SYNAPT_G2_MS MALDI SYNAPT G2 MS (Waters) 148 +743 134 MALDI_SYNAPT_HDMS MALDI SYNAPT HDMS (Waters) 148 +744 135 MALDI_SYNAPT_MS MALDI SYNAPT MS (Waters) 148 +745 136 MALDI_MICROMX MALDI micro MX (Waters) 148 +746 137 MAXIS maXis (Bruker) 148 +747 138 MAXISG4 maXis G4 (Bruker) 148 +748 139 MICROFLEX_LT_MALDI_TOF_MS microflex LT MALDI-TOF MS (Bruker) 148 +749 140 MICROFLEX_LRF_MALDI_TOF_MS microflex LRF MALDI-TOF MS (Bruker) 148 +750 141 MICROFLEX_III_TOF_MS microflex III MALDI-TOF MS (Bruker) 148 +751 142 MICROTOF_II_ESI_TOF micrOTOF II ESI TOF (Bruker) 148 +752 143 MICROTOF_Q_II_ESI_QQ_TOF micrOTOF-Q II ESI-Qq-TOF (Bruker) 148 +753 144 MICROPLATE_ALAMAR_BLUE_COLORIMETRIC microplate Alamar Blue (resazurin) colorimetric method 148 +754 145 MSTATION Mstation (Jeol) 148 +755 146 MSQ_PLUS MSQ Plus (Thermo Scientific) 148 +756 147 NABSYS NABsys 148 +757 148 NANOPHOTONICS_BIOSCIENCES Nanophotonics Biosciences 148 +758 149 NETWORK_BIOSYSTEMS Network Biosystems 148 +759 150 NIMBLEGEN Nimblegen 148 +760 151 OXFORD_NANOPORE_TECHNOLOGIES Oxford Nanopore Technologies 148 +761 152 PACIFIC_BIOSCIENCES Pacific Biosciences 148 +762 153 POPULATION_GENETICS_TECHNOLOGIES Population Genetics Technologies 148 +763 154 Q1000GC_ULTRAQUAD Q1000GC UltraQuad (Jeol) 148 +764 155 QUATTRO_MICRO_API Quattro micro API (Waters) 148 +765 156 QUATTRO_MICRO_GC Quattro micro GC (Waters) 148 +766 157 QUATTRO_PREMIER_XE Quattro Premier XE (Waters) 148 +767 158 QSTAR QSTAR (AB Sciex) 148 +768 159 REVEO Reveo 148 +769 160 ROCHE Roche 148 +770 161 SEIRAD Seirad 148 +771 162 SOLARIX_HYBRID_QQ_FTMS solariX hybrid Qq-FTMS (Bruker) 148 +772 163 SOMACOUNT Somacount (Bently Instruments) 148 +773 164 SOMASCOPE SomaScope (Bently Instruments) 148 +774 165 SYNAPT_G2_HDMS SYNAPT G2 HDMS (Waters) 148 +775 166 SYNAPT_G2_MS SYNAPT G2 MS (Waters) 148 +776 167 SYNAPT_HDMS SYNAPT HDMS (Waters) 148 +777 168 SYNAPT_MS SYNAPT MS (Waters) 148 +778 169 TRIPLETOF_5600 TripleTOF 5600 (AB Sciex) 148 +779 170 TSQ_QUANTUM_ULTRA TSQ Quantum Ultra (Thermo Scientific) 148 +780 171 TSQ_QUANTUM_ACCESS TSQ Quantum Access (Thermo Scientific) 148 +781 172 TSQ_QUANTUM_ACCESS_MAX TSQ Quantum Access MAX (Thermo Scientific) 148 +782 173 TSQ_QUANTUM_DISCOVERY_MAX TSQ Quantum Discovery MAX (Thermo Scientific) 148 +783 174 TSQ_QUANTUM_GC TSQ Quantum GC (Thermo Scientific) 148 +784 175 TSQ_QUANTUM_XLS TSQ Quantum XLS (Thermo Scientific) 148 +785 176 TSQ_VANTAGE TSQ Vantage (Thermo Scientific) 148 +786 177 ULTRAFLEXTREME_MALDI_TOF_MS ultrafleXtreme MALDI-TOF MS (Bruker) 148 +787 178 VISIGEN_BIO VisiGen Biotechnologies 148 +788 179 XEVO_G2_QTOF Xevo G2 QTOF (Waters) 148 +789 180 XEVO_QTOF_MS Xevo QTof MS (Waters) 148 +790 181 XEVO_TQ_MS Xevo TQ MS (Waters) 148 +791 182 XEVO_TQ_S Xevo TQ-S (Waters) 148 +792 183 OTHER_PLATFORM Other 148 +793 0 abstract 154 +794 1 addendum 154 +795 2 announcement 154 +796 3 article-commentary 154 +797 4 book review 154 +798 5 books received 154 +799 6 brief report 154 +800 7 calendar 154 +801 8 case report 154 +802 9 collection 154 +803 10 correction 154 +804 11 data paper 154 +805 12 discussion 154 +806 13 dissertation 154 +807 14 editorial 154 +808 15 in brief 154 +809 16 introduction 154 +810 17 letter 154 +811 18 meeting report 154 +812 19 news 154 +813 20 obituary 154 +814 21 oration 154 +815 22 partial retraction 154 +816 23 product review 154 +817 24 rapid communication 154 +818 25 reply 154 +819 26 reprint 154 +820 27 research article 154 +821 28 retraction 154 +822 29 review article 154 +823 30 translation 154 +824 31 other 154 +\. + + +-- +-- Data for Name: customfieldmap; Type: TABLE DATA; Schema: public; Owner: dataverse_app +-- + +COPY customfieldmap (id, sourcedatasetfield, sourcetemplate, targetdatasetfield) FROM stdin; +\. + + +-- +-- Data for Name: customquestion; Type: TABLE DATA; Schema: public; Owner: dataverse_app +-- + +COPY customquestion (id, displayorder, hidden, questionstring, questiontype, required, guestbook_id) FROM stdin; +\. + + +-- +-- Data for Name: customquestionresponse; Type: TABLE DATA; Schema: public; Owner: dataverse_app +-- + +COPY customquestionresponse (id, response, customquestion_id, guestbookresponse_id) FROM stdin; +\. + + +-- +-- Data for Name: customquestionvalue; Type: TABLE DATA; Schema: public; Owner: dataverse_app +-- + +COPY customquestionvalue (id, displayorder, valuestring, customquestion_id) FROM stdin; +\. + + +-- +-- Data for Name: datafile; Type: TABLE DATA; Schema: public; Owner: dataverse_app +-- + +COPY datafile (id, contenttype, filesystemname, filesize, ingeststatus, md5, name, restricted) FROM stdin; +4 application/vnd.google-earth.kmz 14dd48f37d9-68789d517db2 0 A cfaad1e9562443bb07119fcdbe11ccd2 \N f +\. + + +-- +-- Data for Name: datafilecategory; Type: TABLE DATA; Schema: public; Owner: dataverse_app +-- + +COPY datafilecategory (id, name, dataset_id) FROM stdin; +1 Code 3 +\. + + +-- +-- Data for Name: datafiletag; Type: TABLE DATA; Schema: public; Owner: dataverse_app +-- + +COPY datafiletag (id, type, datafile_id) FROM stdin; +\. + + +-- +-- Data for Name: dataset; Type: TABLE DATA; Schema: public; Owner: dataverse_app +-- + +COPY dataset (id, authority, doiseparator, fileaccessrequest, globalidcreatetime, identifier, protocol, guestbook_id, thumbnailfile_id) FROM stdin; +3 10.5072/FK2 / f 2015-06-08 13:30:09.023 A0Y3TZ doi \N \N +\. + + +-- +-- Data for Name: datasetfield; Type: TABLE DATA; Schema: public; Owner: dataverse_app +-- + +COPY datasetfield (id, datasetfieldtype_id, datasetversion_id, parentdatasetfieldcompoundvalue_id, template_id) FROM stdin; +1 16 1 \N \N +2 12 1 \N \N +3 7 1 \N \N +4 1 1 \N \N +5 14 \N 2 \N +6 8 \N 3 \N +7 19 1 \N \N +8 17 \N 1 \N +9 57 1 \N \N +10 10 \N 3 \N +11 13 \N 2 \N +12 15 \N 2 \N +13 9 \N 3 \N +14 56 1 \N \N +\. + + +-- +-- Data for Name: datasetfield_controlledvocabularyvalue; Type: TABLE DATA; Schema: public; Owner: dataverse_app +-- + +COPY datasetfield_controlledvocabularyvalue (datasetfield_id, controlledvocabularyvalues_id) FROM stdin; +7 3 +\. + + +-- +-- Data for Name: datasetfieldcompoundvalue; Type: TABLE DATA; Schema: public; Owner: dataverse_app +-- + +COPY datasetfieldcompoundvalue (id, displayorder, parentdatasetfield_id) FROM stdin; +1 0 1 +2 0 2 +3 0 3 +\. + + +-- +-- Data for Name: datasetfielddefaultvalue; Type: TABLE DATA; Schema: public; Owner: dataverse_app +-- + +COPY datasetfielddefaultvalue (id, displayorder, strvalue, datasetfield_id, defaultvalueset_id, parentdatasetfielddefaultvalue_id) FROM stdin; +\. + + +-- +-- Data for Name: datasetfieldtype; Type: TABLE DATA; Schema: public; Owner: dataverse_app +-- + +COPY datasetfieldtype (id, advancedsearchfieldtype, allowcontrolledvocabulary, allowmultiples, description, displayformat, displayoncreate, displayorder, facetable, fieldtype, name, required, title, watermark, metadatablock_id, parentdatasetfieldtype_id) FROM stdin; +1 t f f Full title by which the Dataset is known. t 0 f TEXT title t Title Enter title... 1 \N +2 f f f A secondary title used to amplify or state certain limitations on the main title. f 1 f TEXT subtitle f Subtitle 1 \N +3 f f f A title by which the work is commonly referred, or an abbreviation of the title. f 2 f TEXT alternativeTitle f Alternative Title 1 \N +4 f f t Another unique identifier that identifies this Dataset (e.g., producer's or another repository's number). : f 3 f NONE otherId f Other ID 1 \N +5 f f f Name of agency which generated this identifier. #VALUE f 4 f TEXT otherIdAgency f Agency 1 4 +6 f f f Other identifier that corresponds to this Dataset. #VALUE f 5 f TEXT otherIdValue f Identifier 1 4 +7 f f t The person(s), corporate body(ies), or agency(ies) responsible for creating the work. t 6 f NONE author f Author 1 \N +8 t f f The author's Family Name, Given Name or the name of the organization responsible for this Dataset. #VALUE t 7 t TEXT authorName t Name FamilyName, GivenName or Organization 1 7 +9 t f f The organization with which the author is affiliated. (#VALUE) t 8 t TEXT authorAffiliation f Affiliation 1 7 +10 f t f Name of the identifier scheme (ORCID, ISNI). - #VALUE: t 9 f TEXT authorIdentifierScheme f Identifier Scheme 1 7 +11 f f f Uniquely identifies an individual author or organization, according to various schemes. #VALUE t 10 f TEXT authorIdentifier f Identifier 1 7 +12 f f t The contact(s) for this Dataset. t 11 f NONE datasetContact f Contact 1 \N +13 f f f The contact's Family Name, Given Name or the name of the organization. #VALUE t 12 f TEXT datasetContactName f Name FamilyName, GivenName or Organization 1 12 +14 f f f The organization with which the contact is affiliated. (#VALUE) t 13 f TEXT datasetContactAffiliation f Affiliation 1 12 +15 f f f The e-mail address(es) of the contact(s) for the Dataset. This will not be displayed. #EMAIL t 14 f EMAIL datasetContactEmail t E-mail 1 12 +16 f f t A summary describing the purpose, nature, and scope of the Dataset. t 15 f NONE dsDescription f Description 1 \N +17 t f f A summary describing the purpose, nature, and scope of the Dataset. #VALUE t 16 f TEXTBOX dsDescriptionValue t Text 1 16 +18 f f f In cases where a Dataset contains more than one description (for example, one might be supplied by the data producer and another prepared by the data repository where the data are deposited), the date attribute is used to distinguish between the two descriptions. The date attribute follows the ISO convention of YYYY-MM-DD. (#VALUE) t 17 f DATE dsDescriptionDate f Date YYYY-MM-DD 1 16 +19 t t t Domain-specific Subject Categories that are topically relevant to the Dataset. t 18 t TEXT subject t Subject 1 \N +20 f f t Key terms that describe important aspects of the Dataset. t 19 f NONE keyword f Keyword 1 \N +21 t f f Key terms that describe important aspects of the Dataset. Can be used for building keyword indexes and for classification and retrieval purposes. A controlled vocabulary can be employed. The vocab attribute is provided for specification of the controlled vocabulary in use, such as LCSH, MeSH, or others. The vocabURI attribute specifies the location for the full controlled vocabulary. #VALUE t 20 t TEXT keywordValue f Term 1 20 +22 f f f For the specification of the keyword controlled vocabulary in use, such as LCSH, MeSH, or others. (#VALUE) t 21 f TEXT keywordVocabulary f Vocabulary 1 20 +23 f f f Keyword vocabulary URL points to the web presence that describes the keyword vocabulary, if appropriate. Enter an absolute URL where the keyword vocabulary web site is found, such as http://www.my.org. #VALUE t 22 f URL keywordVocabularyURI f Vocabulary URL Enter full URL, starting with http:// 1 20 +24 f f t The classification field indicates the broad important topic(s) and subjects that the data cover. Library of Congress subject terms may be used here. f 23 f NONE topicClassification f Topic Classification 1 \N +25 t f f Topic or Subject term that is relevant to this Dataset. #VALUE f 24 t TEXT topicClassValue f Term 1 24 +26 f f f Provided for specification of the controlled vocabulary in use, e.g., LCSH, MeSH, etc. (#VALUE) f 25 f TEXT topicClassVocab f Vocabulary 1 24 +27 f f f Specifies the URL location for the full controlled vocabulary. #VALUE f 26 f URL topicClassVocabURI f Vocabulary URL Enter full URL, starting with http:// 1 24 +28 f f t Publications that use the data from this Dataset. f 27 f NONE publication f Related Publication 1 \N +29 t f f The full bibliographic citation for this related publication. #VALUE f 28 f TEXTBOX publicationCitation f Citation 1 28 +30 t t f The type of digital identifier used for this publication (e.g., Digital Object Identifier (DOI)). #VALUE: f 29 f TEXT publicationIDType f ID Type 1 28 +31 t f f The identifier for the selected ID type. #VALUE f 30 f TEXT publicationIDNumber f ID Number 1 28 +32 f f f Link to the publication web page (e.g., journal article page, archive record page, or other). #VALUE f 31 f URL publicationURL f URL Enter full URL, starting with http:// 1 28 +33 f f f Additional important information about the Dataset. t 32 f TEXTBOX notesText f Notes 1 \N +34 t t t Language of the Dataset f 33 t TEXT language f Language 1 \N +35 f f t Person or organization with the financial or administrative responsibility over this Dataset f 34 f NONE producer f Producer 1 \N +36 t f f Producer name #VALUE f 35 t TEXT producerName f Name FamilyName, GivenName or Organization 1 35 +37 f f f The organization with which the producer is affiliated. (#VALUE) f 36 f TEXT producerAffiliation f Affiliation 1 35 +38 f f f The abbreviation by which the producer is commonly known. (ex. IQSS, ICPSR) (#VALUE) f 37 f TEXT producerAbbreviation f Abbreviation 1 35 +39 f f f Producer URL points to the producer's web presence, if appropriate. Enter an absolute URL where the producer's web site is found, such as http://www.my.org. #VALUE f 38 f URL producerURL f URL Enter full URL, starting with http:// 1 35 +40 f f f URL for the producer's logo, which points to this producer's web-accessible logo image. Enter an absolute URL where the producer's logo image is found, such as http://www.my.org/images/logo.gif.
                            f 39 f URL producerLogoURL f Logo URL Enter full URL for image, starting with http:// 1 35 +41 t f f Date when the data collection or other materials were produced (not distributed, published or archived). f 40 t DATE productionDate f Production Date YYYY-MM-DD 1 \N +42 f f f The location where the data collection and any other related materials were produced. f 41 f TEXT productionPlace f Production Place 1 \N +43 f f t The organization or person responsible for either collecting, managing, or otherwise contributing in some form to the development of the resource. : f 42 f NONE contributor f Contributor 1 \N +44 t t f The type of contributor of the resource. #VALUE f 43 t TEXT contributorType f Type 1 43 +45 t f f The Family Name, Given Name or organization name of the contributor. #VALUE f 44 t TEXT contributorName f Name FamilyName, GivenName or Organization 1 43 +46 f f t Grant Information : f 45 f NONE grantNumber f Grant Information 1 \N +47 f f f Grant Number Agency #VALUE f 46 f TEXT grantNumberAgency f Grant Agency 1 46 +48 f f f The grant or contract number of the project that sponsored the effort. #VALUE f 47 f TEXT grantNumberValue f Grant Number 1 46 +49 f f t The organization designated by the author or producer to generate copies of the particular work including any necessary editions or revisions. f 48 f NONE distributor f Distributor 1 \N +50 t f f Distributor name #VALUE f 49 t TEXT distributorName f Name FamilyName, GivenName or Organization 1 49 +51 f f f The organization with which the distributor contact is affiliated. (#VALUE) f 50 f TEXT distributorAffiliation f Affiliation 1 49 +52 f f f The abbreviation by which this distributor is commonly known (e.g., IQSS, ICPSR). (#VALUE) f 51 f TEXT distributorAbbreviation f Abbreviation 1 49 +53 f f f Distributor URL points to the distributor's web presence, if appropriate. Enter an absolute URL where the distributor's web site is found, such as http://www.my.org. #VALUE f 52 f URL distributorURL f URL Enter full URL, starting with http:// 1 49 +54 f f f URL of the distributor's logo, which points to this distributor's web-accessible logo image. Enter an absolute URL where the distributor's logo image is found, such as http://www.my.org/images/logo.gif.
                            f 53 f URL distributorLogoURL f Logo URL Enter full URL for image, starting with http:// 1 49 +55 t f f Date that the work was made available for distribution/presentation. f 54 t DATE distributionDate f Distribution Date YYYY-MM-DD 1 \N +56 f f f The person (Family Name, Given Name) or the name of the organization that deposited this Dataset to the repository. f 55 f TEXT depositor f Depositor 1 \N +57 f f f Date that the Dataset was deposited into the repository. f 56 t DATE dateOfDeposit f Deposit Date YYYY-MM-DD 1 \N +58 f f t Time period to which the data refer. This item reflects the time period covered by the data, not the dates of coding or making documents machine-readable or the dates the data were collected. Also known as span. ; f 57 f NONE timePeriodCovered f Time Period Covered 1 \N +59 t f f Start date which reflects the time period covered by the data, not the dates of coding or making documents machine-readable or the dates the data were collected. #NAME: #VALUE f 58 t DATE timePeriodCoveredStart f Start YYYY-MM-DD 1 58 +60 t f f End date which reflects the time period covered by the data, not the dates of coding or making documents machine-readable or the dates the data were collected. #NAME: #VALUE f 59 t DATE timePeriodCoveredEnd f End YYYY-MM-DD 1 58 +61 f f t Contains the date(s) when the data were collected. ; f 60 f NONE dateOfCollection f Date of Collection 1 \N +62 f f f Date when the data collection started. #NAME: #VALUE f 61 f DATE dateOfCollectionStart f Start YYYY-MM-DD 1 61 +63 f f f Date when the data collection ended. #NAME: #VALUE f 62 f DATE dateOfCollectionEnd f End YYYY-MM-DD 1 61 +64 t f t Type of data included in the file: survey data, census/enumeration data, aggregate data, clinical data, event/transaction data, program source code, machine-readable text, administrative records data, experimental data, psychological test, textual data, coded textual, coded documents, time budget diaries, observation data/ratings, process-produced data, or other. f 63 t TEXT kindOfData f Kind of Data 1 \N +65 f f f Information about the Dataset series. : f 64 f NONE series f Series 1 \N +66 t f f Name of the dataset series to which the Dataset belongs. #VALUE f 65 t TEXT seriesName f Name 1 65 +67 f f f History of the series and summary of those features that apply to the series as a whole. #VALUE f 66 f TEXTBOX seriesInformation f Information 1 65 +68 f f t Information about the software used to generate the Dataset. , f 67 f NONE software f Software 1 \N +69 f t f Name of software used to generate the Dataset. #VALUE f 68 f TEXT softwareName f Name 1 68 +70 f f f Version of the software used to generate the Dataset. #NAME: #VALUE f 69 f TEXT softwareVersion f Version 1 68 +71 f f t Any material related to this Dataset. f 70 f TEXTBOX relatedMaterial f Related Material 1 \N +72 f f t Any Datasets that are related to this Dataset, such as previous research on this subject. f 71 f TEXTBOX relatedDatasets f Related Datasets 1 \N +73 f f t Any references that would serve as background or supporting material to this Dataset. f 72 f TEXT otherReferences f Other References 1 \N +74 f f t List of books, articles, serials, or machine-readable data files that served as the sources of the data collection. f 73 f TEXTBOX dataSources f Data Sources 1 \N +75 f f f For historical materials, information about the origin of the sources and the rules followed in establishing the sources should be specified. f 74 f TEXTBOX originOfSources f Origin of Sources 1 \N +76 f f f Assessment of characteristics and source material. f 75 f TEXTBOX characteristicOfSources f Characteristic of Sources Noted 1 \N +77 f f f Level of documentation of the original sources. f 76 f TEXTBOX accessToSources f Documentation and Access to Sources 1 \N +78 f f t Information on the geographic coverage of the data. Includes the total geographic scope of the data. f 0 f NONE geographicCoverage f Geographic Coverage 2 \N +79 t t f The country or nation that the Dataset is about. f 1 t TEXT country f Country / Nation 2 78 +80 t f f The state or province that the Dataset is about. Use GeoNames for correct spelling and avoid abbreviations. f 2 t TEXT state f State / Province 2 78 +81 t f f The name of the city that the Dataset is about. Use GeoNames for correct spelling and avoid abbreviations. f 3 t TEXT city f City 2 78 +82 f f f Other information on the geographic coverage of the data. f 4 f TEXT otherGeographicCoverage f Other 2 78 +83 t f t Lowest level of geographic aggregation covered by the Dataset, e.g., village, county, region. f 5 t TEXT geographicUnit f Geographic Unit 2 \N +84 f f t The fundamental geometric description for any Dataset that models geography is the geographic bounding box. It describes the minimum box, defined by west and east longitudes and north and south latitudes, which includes the largest geographic extent of the Dataset's geographic coverage. This element is used in the first pass of a coordinate-based search. Inclusion of this element in the codebook is recommended, but is required if the bound polygon box is included. f 6 f NONE geographicBoundingBox f Geographic Bounding Box 2 \N +85 f f f Westernmost coordinate delimiting the geographic extent of the Dataset. A valid range of values, expressed in decimal degrees, is -180,0 <= West Bounding Longitude Value <= 180,0. f 7 f TEXT westLongitude f West Longitude 2 84 +86 f f f Easternmost coordinate delimiting the geographic extent of the Dataset. A valid range of values, expressed in decimal degrees, is -180,0 <= East Bounding Longitude Value <= 180,0. f 8 f TEXT eastLongitude f East Longitude 2 84 +87 f f f Northernmost coordinate delimiting the geographic extent of the Dataset. A valid range of values, expressed in decimal degrees, is -90,0 <= North Bounding Latitude Value <= 90,0. f 9 f TEXT northLongitude f North Latitude 2 84 +88 f f f Southernmost coordinate delimiting the geographic extent of the Dataset. A valid range of values, expressed in decimal degrees, is -90,0 <= South Bounding Latitude Value <= 90,0. f 10 f TEXT southLongitude f South Latitude 2 84 +89 t f t Basic unit of analysis or observation that this Dataset describes, such as individuals, families/households, groups, institutions/organizations, administrative units, and more. For information about the DDI's controlled vocabulary for this element, please refer to the DDI web page at http://www.ddialliance.org/Specification/DDI-CV/. f 0 t TEXTBOX unitOfAnalysis f Unit of Analysis 3 \N +90 t f t Description of the population covered by the data in the file; the group of people or other elements that are the object of the study and to which the study results refer. Age, nationality, and residence commonly help to delineate a given universe, but any number of other factors may be used, such as age limits, sex, marital status, race, ethnic group, nationality, income, veteran status, criminal convictions, and more. The universe may consist of elements other than persons, such as housing units, court cases, deaths, countries, and so on. In general, it should be possible to tell from the description of the universe whether a given individual or element is a member of the population under study. Also known as the universe of interest, population of interest, and target population. f 1 t TEXTBOX universe f Universe 3 \N +91 t f f The time method or time dimension of the data collection, such as panel, cross-sectional, trend, time- series, or other. f 2 t TEXT timeMethod f Time Method 3 \N +92 f f f Individual, agency or organization responsible for administering the questionnaire or interview or compiling the data. f 3 f TEXT dataCollector f Data Collector FamilyName, GivenName or Organization 3 \N +93 f f f Type of training provided to the data collector f 4 f TEXT collectorTraining f Collector Training 3 \N +94 t f f If the data collected includes more than one point in time, indicate the frequency with which the data was collected; that is, monthly, quarterly, or other. f 5 t TEXT frequencyOfDataCollection f Frequency 3 \N +95 f f f Type of sample and sample design used to select the survey respondents to represent the population. May include reference to the target sample size and the sampling fraction. f 6 f TEXTBOX samplingProcedure f Sampling Procedure 3 \N +96 f f f Specific information regarding the target sample size, actual sample size, and the formula used to determine this. f 7 f NONE targetSampleSize f Target Sample Size 3 \N +97 f f f Actual sample size. f 8 f INT targetSampleActualSize f Actual Enter an integer... 3 96 +98 f f f Formula used to determine target sample size. f 9 f TEXT targetSampleSizeFormula f Formula 3 96 +99 f f f Show correspondence as well as discrepancies between the sampled units (obtained) and available statistics for the population (age, sex-ratio, marital status, etc.) as a whole. f 10 f TEXT deviationsFromSampleDesign f Major Deviations for Sample Design 3 \N +100 f f f Method used to collect the data; instrumentation characteristics (e.g., telephone interview, mail questionnaire, or other). f 11 f TEXTBOX collectionMode f Collection Mode 3 \N +101 f f f Type of data collection instrument used. Structured indicates an instrument in which all respondents are asked the same questions/tests, possibly with precoded answers. If a small portion of such a questionnaire includes open-ended questions, provide appropriate comments. Semi-structured indicates that the research instrument contains mainly open-ended questions. Unstructured indicates that in-depth interviews were conducted. f 12 f TEXT researchInstrument f Type of Research Instrument 3 \N +102 f f f Description of noteworthy aspects of the data collection situation. Includes information on factors such as cooperativeness of respondents, duration of interviews, number of call backs, or similar. f 13 f TEXTBOX dataCollectionSituation f Characteristics of Data Collection Situation 3 \N +103 f f f Summary of actions taken to minimize data loss. Include information on actions such as follow-up visits, supervisory checks, historical matching, estimation, and so on. f 14 f TEXT actionsToMinimizeLoss f Actions to Minimize Losses 3 \N +104 f f f Control OperationsMethods to facilitate data control performed by the primary investigator or by the data archive. f 15 f TEXT controlOperations f Control Operations 3 \N +105 f f f The use of sampling procedures might make it necessary to apply weights to produce accurate statistical results. Describes the criteria for using weights in analysis of a collection. If a weighting formula or coefficient was developed, the formula is provided, its elements are defined, and it is indicated how the formula was applied to the data. f 16 f TEXTBOX weighting f Weighting 3 \N +106 f f f Methods used to clean the data collection, such as consistency checking, wildcode checking, or other. f 17 f TEXT cleaningOperations f Cleaning Operations 3 \N +107 f f f Note element used for any information annotating or clarifying the methodology and processing of the study. f 18 f TEXT datasetLevelErrorNotes f Study Level Error Notes 3 \N +108 t f f Percentage of sample members who provided information. f 19 t TEXTBOX responseRate f Response Rate 3 \N +109 f f f Measure of how precisely one can estimate a population value from a given sample. f 20 f TEXT samplingErrorEstimates f Estimates of Sampling Error 3 \N +110 f f f Other issues pertaining to the data appraisal. Describe issues such as response variance, nonresponse rate and testing for bias, interviewer and response bias, confidence levels, question bias, or similar. f 21 f TEXT otherDataAppraisal f Other Forms of Data Appraisal 3 \N +111 f f f General notes about this Dataset. f 22 f NONE socialScienceNotes f Notes 3 \N +112 f f f Type of note. f 23 f TEXT socialScienceNotesType f Type 3 111 +113 f f f Note subject. f 24 f TEXT socialScienceNotesSubject f Subject 3 111 +114 f f f Text for this note. f 25 f TEXTBOX socialScienceNotesText f Text 3 111 +115 t t t The nature or genre of the content of the files in the dataset. f 0 t TEXT astroType f Type 4 \N +116 t t t The observatory or facility where the data was obtained. f 1 t TEXT astroFacility f Facility 4 \N +117 t t t The instrument used to collect the data. f 2 t TEXT astroInstrument f Instrument 4 \N +118 t f t Astronomical Objects represented in the data (Given as SIMBAD recognizable names preferred). f 3 t TEXT astroObject f Object 4 \N +119 t f f The spatial (angular) resolution that is typical of the observations, in decimal degrees. f 4 t TEXT resolution.Spatial f Spatial Resolution 4 \N +120 t f f The spectral resolution that is typical of the observations, given as the ratio λ/Δλ. f 5 t TEXT resolution.Spectral f Spectral Resolution 4 \N +121 f f f The temporal resolution that is typical of the observations, given in seconds. f 6 f TEXT resolution.Temporal f Time Resolution 4 \N +122 t t t Conventional bandpass name f 7 t TEXT coverage.Spectral.Bandpass f Bandpass 4 \N +123 t f t The central wavelength of the spectral bandpass, in meters. f 8 t FLOAT coverage.Spectral.CentralWavelength f Central Wavelength (m) Enter a floating-point number. 4 \N +124 f f t The minimum and maximum wavelength of the spectral bandpass. f 9 f NONE coverage.Spectral.Wavelength f Wavelength Range Enter a floating-point number. 4 \N +125 t f f The minimum wavelength of the spectral bandpass, in meters. f 10 t FLOAT coverage.Spectral.MinimumWavelength f Minimum (m) Enter a floating-point number. 4 124 +126 t f f The maximum wavelength of the spectral bandpass, in meters. f 11 t FLOAT coverage.Spectral.MaximumWavelength f Maximum (m) Enter a floating-point number. 4 124 +127 f f t Time period covered by the data. f 12 f NONE coverage.Temporal f Dataset Date Range 4 \N +128 t f f Dataset Start Date f 13 t DATE coverage.Temporal.StartTime f Start YYYY-MM-DD 4 127 +129 t f f Dataset End Date f 14 t DATE coverage.Temporal.StopTime f End YYYY-MM-DD 4 127 +130 f f t The sky coverage of the data object. f 15 f TEXT coverage.Spatial f Sky Coverage 4 \N +131 f f f The (typical) depth coverage, or sensitivity, of the data object in Jy. f 16 f FLOAT coverage.Depth f Depth Coverage Enter a floating-point number. 4 \N +132 f f f The (typical) density of objects, catalog entries, telescope pointings, etc., on the sky, in number per square degree. f 17 f FLOAT coverage.ObjectDensity f Object Density Enter a floating-point number. 4 \N +133 f f f The total number of objects, catalog entries, etc., in the data object. f 18 f INT coverage.ObjectCount f Object Count Enter an integer. 4 \N +134 f f f The fraction of the sky represented in the observations, ranging from 0 to 1. f 19 f FLOAT coverage.SkyFraction f Fraction of Sky Enter a floating-point number. 4 \N +135 f f f The polarization coverage f 20 f TEXT coverage.Polarization f Polarization 4 \N +136 f f f RedshiftType string C "Redshift"; or "Optical" or "Radio" definitions of Doppler velocity used in the data object. f 21 f TEXT redshiftType f RedshiftType 4 \N +137 f f f The resolution in redshift (unitless) or Doppler velocity (km/s) in the data object. f 22 f FLOAT resolution.Redshift f Redshift Resolution Enter a floating-point number. 4 \N +138 f f t The value of the redshift (unitless) or Doppler velocity (km/s in the data object. f 23 f FLOAT coverage.RedshiftValue f Redshift Value Enter a floating-point number. 4 \N +139 f f f The minimum value of the redshift (unitless) or Doppler velocity (km/s in the data object. f 24 f FLOAT coverage.Redshift.MinimumValue f Minimum Enter a floating-point number. 4 138 +140 f f f The maximum value of the redshift (unitless) or Doppler velocity (km/s in the data object. f 25 f FLOAT coverage.Redshift.MaximumValue f Maximum Enter a floating-point number. 4 138 +141 t t t Design types that are based on the overall experimental design. f 0 t TEXT studyDesignType f Design Type 5 \N +142 t t t Factors used in the Dataset. f 1 t TEXT studyFactorType f Factor Type 5 \N +143 t t t The taxonomic name of the organism used in the Dataset or from which the starting biological material derives. f 2 t TEXT studyAssayOrganism f Organism 5 \N +144 t f t If Other was selected in Organism, list any other organisms that were used in this Dataset. Terms from the NCBI Taxonomy are recommended. f 3 t TEXT studyAssayOtherOrganism f Other Organism 5 \N +145 t t t A term to qualify the endpoint, or what is being measured (e.g. gene expression profiling; protein identification). f 4 t TEXT studyAssayMeasurementType f Measurement Type 5 \N +146 t f t If Other was selected in Measurement Type, list any other measurement types that were used. Terms from NCBO Bioportal are recommended. f 5 t TEXT studyAssayOtherMeasurmentType f Other Measurement Type 5 \N +147 t t t A term to identify the technology used to perform the measurement (e.g. DNA microarray; mass spectrometry). f 6 t TEXT studyAssayTechnologyType f Technology Type 5 \N +148 t t t The manufacturer and name of the technology platform used in the assay (e.g. Bruker AVANCE). f 7 t TEXT studyAssayPlatform f Technology Platform 5 \N +149 t t t The name of the cell line from which the source or sample derives. f 8 t TEXT studyAssayCellType f Cell Type 5 \N +150 f f t Indicates the volume, issue and date of a journal, which this Dataset is associated with. f 0 f NONE journalVolumeIssue f Journal 6 \N +151 t f f The journal volume which this Dataset is associated with (e.g., Volume 4). f 1 t TEXT journalVolume f Volume 6 150 +152 t f f The journal issue number which this Dataset is associated with (e.g., Number 2, Autumn). f 2 t TEXT journalIssue f Issue 6 150 +153 t f f The publication date for this journal volume/issue, which this Dataset is associated with (e.g., 1999). f 3 t DATE journalPubDate f Publication Date YYYY or YYYY-MM or YYYY-MM-DD 6 150 +154 t t f Indicates what kind of article this is, for example, a research article, a commentary, a book or product review, a case report, a calendar, etc (based on JATS). f 4 t TEXT journalArticleType f Type of Article 6 \N +\. + + +-- +-- Data for Name: datasetfieldvalue; Type: TABLE DATA; Schema: public; Owner: dataverse_app +-- + +COPY datasetfieldvalue (id, displayorder, value, datasetfield_id) FROM stdin; +1 0 We need to add files to this Dataset. 8 +2 0 Sample Dataset 4 +3 0 Dataverse.org 13 +4 0 Admin, Dataverse 6 +5 0 Admin, Dataverse 11 +6 0 2015-06-08 9 +7 0 dataverse@mailinator.com 12 +8 0 Admin, Dataverse 14 +9 0 Dataverse.org 5 +\. + + +-- +-- Data for Name: datasetlinkingdataverse; Type: TABLE DATA; Schema: public; Owner: dataverse_app +-- + +COPY datasetlinkingdataverse (id, linkcreatetime, dataset_id, linkingdataverse_id) FROM stdin; +\. + + +-- +-- Data for Name: datasetlock; Type: TABLE DATA; Schema: public; Owner: dataverse_app +-- + +COPY datasetlock (id, info, starttime, user_id, dataset_id) FROM stdin; +\. + + +-- +-- Data for Name: datasetversion; Type: TABLE DATA; Schema: public; Owner: dataverse_app +-- + +COPY datasetversion (id, unf, archivenote, archivetime, availabilitystatus, citationrequirements, conditions, confidentialitydeclaration, contactforaccess, createtime, dataaccessplace, deaccessionlink, depositorrequirements, disclaimer, fileaccessrequest, inreview, lastupdatetime, license, minorversionnumber, originalarchive, releasetime, restrictions, sizeofcollection, specialpermissions, studycompletion, termsofaccess, termsofuse, version, versionnote, versionnumber, versionstate, dataset_id) FROM stdin; +1 \N \N \N \N \N \N \N \N 2015-06-08 13:30:09.023 \N \N \N \N f f 2015-06-08 15:40:14.341 CC0 0 \N 2015-06-08 15:40:14.341 \N \N \N \N \N \N 2 \N 1 RELEASED 3 +\. + + +-- +-- Data for Name: datasetversionuser; Type: TABLE DATA; Schema: public; Owner: dataverse_app +-- + +COPY datasetversionuser (id, lastupdatedate, authenticateduser_id, datasetversion_id) FROM stdin; +1 2015-06-08 15:40:14.341 1 1 +\. + + +-- +-- Data for Name: datatable; Type: TABLE DATA; Schema: public; Owner: dataverse_app +-- + +COPY datatable (id, casequantity, originalfileformat, originalformatversion, recordspercase, unf, varquantity, datafile_id) FROM stdin; +\. + + +-- +-- Data for Name: datavariable; Type: TABLE DATA; Schema: public; Owner: dataverse_app +-- + +COPY datavariable (id, fileendposition, fileorder, filestartposition, format, formatcategory, "interval", label, name, numberofdecimalpoints, orderedfactor, recordsegmentnumber, type, unf, universe, weighted, datatable_id) FROM stdin; +\. + + +-- +-- Data for Name: dataverse; Type: TABLE DATA; Schema: public; Owner: dataverse_app +-- + +COPY dataverse (id, affiliation, alias, dataversetype, description, facetroot, guestbookroot, metadatablockroot, name, permissionroot, templateroot, themeroot, defaultcontributorrole_id, defaulttemplate_id) FROM stdin; +1 \N root UNCATEGORIZED The root dataverse. t f t Root t f t 6 \N +2 Dataverse.org test-dv RESEARCHERS \N f f f testDV t f t 6 \N +\. + + +-- +-- Data for Name: dataverse_metadatablock; Type: TABLE DATA; Schema: public; Owner: dataverse_app +-- + +COPY dataverse_metadatablock (dataverse_id, metadatablocks_id) FROM stdin; +1 1 +\. + + +-- +-- Data for Name: dataversecontact; Type: TABLE DATA; Schema: public; Owner: dataverse_app +-- + +COPY dataversecontact (id, contactemail, displayorder, dataverse_id) FROM stdin; +1 root@mailinator.com 0 1 +2 dataverse@mailinator.com 0 2 +\. + + +-- +-- Data for Name: dataversefacet; Type: TABLE DATA; Schema: public; Owner: dataverse_app +-- + +COPY dataversefacet (id, displayorder, datasetfieldtype_id, dataverse_id) FROM stdin; +1 3 57 1 +2 2 21 1 +3 0 8 1 +4 1 19 1 +\. + + +-- +-- Data for Name: dataversefeatureddataverse; Type: TABLE DATA; Schema: public; Owner: dataverse_app +-- + +COPY dataversefeatureddataverse (id, displayorder, dataverse_id, featureddataverse_id) FROM stdin; +\. + + +-- +-- Data for Name: dataversefieldtypeinputlevel; Type: TABLE DATA; Schema: public; Owner: dataverse_app +-- + +COPY dataversefieldtypeinputlevel (id, include, required, datasetfieldtype_id, dataverse_id) FROM stdin; +\. + + +-- +-- Data for Name: dataverselinkingdataverse; Type: TABLE DATA; Schema: public; Owner: dataverse_app +-- + +COPY dataverselinkingdataverse (id, linkcreatetime, dataverse_id, linkingdataverse_id) FROM stdin; +\. + + +-- +-- Data for Name: dataverserole; Type: TABLE DATA; Schema: public; Owner: dataverse_app +-- + +COPY dataverserole (id, alias, description, name, permissionbits, owner_id) FROM stdin; +1 admin A person who has all permissions for dataverses, datasets, and files. Admin 8191 \N +2 fileDownloader A person who can download a file. File Downloader 16 \N +3 fullContributor A person who can add subdataverses and datasets within a dataverse. Dataverse + Dataset Creator 3 \N +4 dvContributor A person who can add subdataverses within a dataverse. Dataverse Creator 1 \N +5 dsContributor A person who can add datasets within a dataverse. Dataset Creator 2 \N +6 editor For datasets, a person who can edit License + Terms, and then submit them for review. Contributor 4184 \N +7 curator For datasets, a person who can edit License + Terms, edit Permissions, and publish datasets. Curator 5471 \N +8 member A person who can view both unpublished dataverses and datasets. Member 12 \N +\. + + +-- +-- Data for Name: dataversesubjects; Type: TABLE DATA; Schema: public; Owner: dataverse_app +-- + +COPY dataversesubjects (dataverse_id, controlledvocabularyvalue_id) FROM stdin; +2 3 +1 3 +\. + + +-- +-- Data for Name: dataversetheme; Type: TABLE DATA; Schema: public; Owner: dataverse_app +-- + +COPY dataversetheme (id, backgroundcolor, linkcolor, linkurl, logo, logoalignment, logobackgroundcolor, logoformat, tagline, textcolor, dataverse_id) FROM stdin; +\. + + +-- +-- Data for Name: defaultvalueset; Type: TABLE DATA; Schema: public; Owner: dataverse_app +-- + +COPY defaultvalueset (id, name) FROM stdin; +\. + + +-- +-- Data for Name: dvobject; Type: TABLE DATA; Schema: public; Owner: dataverse_app +-- + +COPY dvobject (id, dtype, createdate, indextime, modificationtime, permissionindextime, permissionmodificationtime, publicationdate, creator_id, owner_id, releaseuser_id) FROM stdin; +1 Dataverse 2015-06-08 13:08:22.373 \N 2015-06-08 13:29:18.365 2015-06-08 13:29:18.388 2015-06-08 13:08:22.45 2015-06-08 13:29:18.365 1 \N 1 +4 DataFile 2015-06-08 15:05:00.586 \N 2015-06-08 15:05:00.586 2015-06-08 15:40:14.657 2015-06-08 15:04:25.299 2015-06-08 15:40:14.341 1 3 \N +3 Dataset 2015-06-08 13:30:09.023 2015-06-08 15:40:14.504 2015-06-08 15:40:14.341 2015-06-08 15:40:14.691 2015-06-08 13:30:09.845 2015-06-08 15:40:14.341 1 2 1 +2 Dataverse 2015-06-08 13:29:07.308 2015-06-08 15:40:14.739 2015-06-08 15:40:14.152 2015-06-08 15:40:14.768 2015-06-08 13:29:07.485 2015-06-08 15:40:14.152 1 1 1 +\. + + +-- +-- Data for Name: explicitgroup; Type: TABLE DATA; Schema: public; Owner: dataverse_app +-- + +COPY explicitgroup (id, description, displayname, groupalias, groupaliasinowner, owner_id) FROM stdin; +\. + + +-- +-- Data for Name: explicitgroup_authenticateduser; Type: TABLE DATA; Schema: public; Owner: dataverse_app +-- + +COPY explicitgroup_authenticateduser (explicitgroup_id, containedauthenticatedusers_id) FROM stdin; +\. + + +-- +-- Data for Name: explicitgroup_containedroleassignees; Type: TABLE DATA; Schema: public; Owner: dataverse_app +-- + +COPY explicitgroup_containedroleassignees (explicitgroup_id, containedroleassignees) FROM stdin; +\. + + +-- +-- Data for Name: explicitgroup_explicitgroup; Type: TABLE DATA; Schema: public; Owner: dataverse_app +-- + +COPY explicitgroup_explicitgroup (explicitgroup_id, containedexplicitgroups_id) FROM stdin; +\. + + +-- +-- Data for Name: fileaccessrequests; Type: TABLE DATA; Schema: public; Owner: dataverse_app +-- + +COPY fileaccessrequests (datafile_id, authenticated_user_id) FROM stdin; +\. + + +-- +-- Data for Name: filemetadata; Type: TABLE DATA; Schema: public; Owner: dataverse_app +-- + +COPY filemetadata (id, description, label, restricted, version, datafile_id, datasetversion_id) FROM stdin; +1 This is a description of the file. 2001, Palestinian Proposal at the Taba Conference.kmz f 1 4 1 +\. + + +-- +-- Data for Name: filemetadata_datafilecategory; Type: TABLE DATA; Schema: public; Owner: dataverse_app +-- + +COPY filemetadata_datafilecategory (filecategories_id, filemetadatas_id) FROM stdin; +1 1 +\. + + +-- +-- Data for Name: foreignmetadatafieldmapping; Type: TABLE DATA; Schema: public; Owner: dataverse_app +-- + +COPY foreignmetadatafieldmapping (id, datasetfieldname, foreignfieldxpath, isattribute, foreignmetadataformatmapping_id, parentfieldmapping_id) FROM stdin; +\. + + +-- +-- Data for Name: foreignmetadataformatmapping; Type: TABLE DATA; Schema: public; Owner: dataverse_app +-- + +COPY foreignmetadataformatmapping (id, displayname, name, schemalocation, startelement) FROM stdin; +\. + + +-- +-- Data for Name: guestbook; Type: TABLE DATA; Schema: public; Owner: dataverse_app +-- + +COPY guestbook (id, createtime, emailrequired, enabled, institutionrequired, name, namerequired, positionrequired, dataverse_id) FROM stdin; +\. + + +-- +-- Data for Name: guestbookresponse; Type: TABLE DATA; Schema: public; Owner: dataverse_app +-- + +COPY guestbookresponse (id, downloadtype, email, institution, name, "position", responsetime, sessionid, authenticateduser_id, datafile_id, dataset_id, datasetversion_id, guestbook_id) FROM stdin; +\. + + +-- +-- Data for Name: harvestingdataverseconfig; Type: TABLE DATA; Schema: public; Owner: dataverse_app +-- + +COPY harvestingdataverseconfig (id, archivedescription, archiveurl, harveststyle, harvesttype, harvestingset, harvestingurl, dataverse_id) FROM stdin; +\. + + +-- +-- Data for Name: ingestreport; Type: TABLE DATA; Schema: public; Owner: dataverse_app +-- + +COPY ingestreport (id, endtime, report, starttime, status, type, datafile_id) FROM stdin; +\. + + +-- +-- Data for Name: ingestrequest; Type: TABLE DATA; Schema: public; Owner: dataverse_app +-- + +COPY ingestrequest (id, controlcard, labelsfile, textencoding, datafile_id) FROM stdin; +\. + + +-- +-- Data for Name: ipv4range; Type: TABLE DATA; Schema: public; Owner: dataverse_app +-- + +COPY ipv4range (id, bottomaslong, topaslong, owner_id) FROM stdin; +\. + + +-- +-- Data for Name: ipv6range; Type: TABLE DATA; Schema: public; Owner: dataverse_app +-- + +COPY ipv6range (id, bottoma, bottomb, bottomc, bottomd, topa, topb, topc, topd, owner_id) FROM stdin; +\. + + +-- +-- Data for Name: maplayermetadata; Type: TABLE DATA; Schema: public; Owner: dataverse_app +-- + +COPY maplayermetadata (id, embedmaplink, layerlink, layername, mapimagelink, worldmapusername, dataset_id, datafile_id) FROM stdin; +\. + + +-- +-- Data for Name: metadatablock; Type: TABLE DATA; Schema: public; Owner: dataverse_app +-- + +COPY metadatablock (id, displayname, name, owner_id) FROM stdin; +1 Citation Metadata citation \N +2 Geospatial Metadata geospatial \N +3 Social Science and Humanities Metadata socialscience \N +4 Astronomy and Astrophysics Metadata astrophysics \N +5 Life Sciences Metadata biomedical \N +6 Journal Metadata journal \N +\. + + +-- +-- Data for Name: passwordresetdata; Type: TABLE DATA; Schema: public; Owner: dataverse_app +-- + +COPY passwordresetdata (id, created, expires, reason, token, builtinuser_id) FROM stdin; +\. + + +-- +-- Data for Name: persistedglobalgroup; Type: TABLE DATA; Schema: public; Owner: dataverse_app +-- + +COPY persistedglobalgroup (id, dtype, description, displayname, persistedgroupalias) FROM stdin; +\. + + +-- +-- Data for Name: roleassignment; Type: TABLE DATA; Schema: public; Owner: dataverse_app +-- + +COPY roleassignment (id, assigneeidentifier, definitionpoint_id, role_id) FROM stdin; +1 @dataverseAdmin 1 1 +2 @dataverseAdmin 2 1 +3 @dataverseAdmin 3 6 +\. + + +-- +-- Data for Name: savedsearch; Type: TABLE DATA; Schema: public; Owner: dataverse_app +-- + +COPY savedsearch (id, query, creator_id, definitionpoint_id) FROM stdin; +\. + + +-- +-- Data for Name: savedsearchfilterquery; Type: TABLE DATA; Schema: public; Owner: dataverse_app +-- + +COPY savedsearchfilterquery (id, filterquery, savedsearch_id) FROM stdin; +\. + + +-- +-- Data for Name: sequence; Type: TABLE DATA; Schema: public; Owner: dataverse_app +-- + +COPY sequence (seq_name, seq_count) FROM stdin; +SEQ_GEN 0 +\. + + +-- +-- Data for Name: setting; Type: TABLE DATA; Schema: public; Owner: dataverse_app +-- + +COPY setting (name, content) FROM stdin; +:AllowSignUp yes +:SignUpUrl /dataverseuser.xhtml?editMode=CREATE +:Protocol doi +:Authority 10.5072/FK2 +:DoiProvider EZID +:DoiSeparator / +BuiltinUsers.KEY burrito +:BlockedApiKey empanada +:BlockedApiPolicy localhost-only +\. + + +-- +-- Data for Name: shibgroup; Type: TABLE DATA; Schema: public; Owner: dataverse_app +-- + +COPY shibgroup (id, attribute, name, pattern) FROM stdin; +\. + + +-- +-- Data for Name: summarystatistic; Type: TABLE DATA; Schema: public; Owner: dataverse_app +-- + +COPY summarystatistic (id, type, value, datavariable_id) FROM stdin; +\. + + +-- +-- Data for Name: template; Type: TABLE DATA; Schema: public; Owner: dataverse_app +-- + +COPY template (id, createtime, name, usagecount, dataverse_id) FROM stdin; +\. + + +-- +-- Data for Name: usernotification; Type: TABLE DATA; Schema: public; Owner: dataverse_app +-- + +COPY usernotification (id, emailed, objectid, readnotification, senddate, type, user_id) FROM stdin; +1 f 2 f 2015-06-08 13:29:07.308 0 1 +2 f 1 f 2015-06-08 13:30:09.023 1 1 +\. + + +-- +-- Data for Name: variablecategory; Type: TABLE DATA; Schema: public; Owner: dataverse_app +-- + +COPY variablecategory (id, catorder, frequency, label, missing, value, datavariable_id) FROM stdin; +\. + + +-- +-- Data for Name: variablerange; Type: TABLE DATA; Schema: public; Owner: dataverse_app +-- + +COPY variablerange (id, beginvalue, beginvaluetype, endvalue, endvaluetype, datavariable_id) FROM stdin; +\. + + +-- +-- Data for Name: variablerangeitem; Type: TABLE DATA; Schema: public; Owner: dataverse_app +-- + +COPY variablerangeitem (id, value, datavariable_id) FROM stdin; +\. + + +-- +-- Data for Name: worldmapauth_token; Type: TABLE DATA; Schema: public; Owner: dataverse_app +-- + +COPY worldmapauth_token (id, created, hasexpired, lastrefreshtime, modified, token, application_id, datafile_id, dataverseuser_id) FROM stdin; +\. + + +-- +-- Data for Name: worldmapauth_tokentype; Type: TABLE DATA; Schema: public; Owner: dataverse_app +-- + +COPY worldmapauth_tokentype (id, contactemail, created, hostname, ipaddress, mapitlink, md5, modified, name, timelimitminutes, timelimitseconds) FROM stdin; +\. + + +-- +-- Name: actionlogrecord_pkey; Type: CONSTRAINT; Schema: public; Owner: dataverse_app; Tablespace: +-- + +ALTER TABLE ONLY actionlogrecord + ADD CONSTRAINT actionlogrecord_pkey PRIMARY KEY (id); + + +-- +-- Name: apitoken_pkey; Type: CONSTRAINT; Schema: public; Owner: dataverse_app; Tablespace: +-- + +ALTER TABLE ONLY apitoken + ADD CONSTRAINT apitoken_pkey PRIMARY KEY (id); + + +-- +-- Name: apitoken_tokenstring_key; Type: CONSTRAINT; Schema: public; Owner: dataverse_app; Tablespace: +-- + +ALTER TABLE ONLY apitoken + ADD CONSTRAINT apitoken_tokenstring_key UNIQUE (tokenstring); + + +-- +-- Name: authenticateduser_email_key; Type: CONSTRAINT; Schema: public; Owner: dataverse_app; Tablespace: +-- + +ALTER TABLE ONLY authenticateduser + ADD CONSTRAINT authenticateduser_email_key UNIQUE (email); + + +-- +-- Name: authenticateduser_pkey; Type: CONSTRAINT; Schema: public; Owner: dataverse_app; Tablespace: +-- + +ALTER TABLE ONLY authenticateduser + ADD CONSTRAINT authenticateduser_pkey PRIMARY KEY (id); + + +-- +-- Name: authenticateduser_useridentifier_key; Type: CONSTRAINT; Schema: public; Owner: dataverse_app; Tablespace: +-- + +ALTER TABLE ONLY authenticateduser + ADD CONSTRAINT authenticateduser_useridentifier_key UNIQUE (useridentifier); + + +-- +-- Name: authenticateduserlookup_authenticateduser_id_key; Type: CONSTRAINT; Schema: public; Owner: dataverse_app; Tablespace: +-- + +ALTER TABLE ONLY authenticateduserlookup + ADD CONSTRAINT authenticateduserlookup_authenticateduser_id_key UNIQUE (authenticateduser_id); + + +-- +-- Name: authenticateduserlookup_pkey; Type: CONSTRAINT; Schema: public; Owner: dataverse_app; Tablespace: +-- + +ALTER TABLE ONLY authenticateduserlookup + ADD CONSTRAINT authenticateduserlookup_pkey PRIMARY KEY (id); + + +-- +-- Name: authenticationproviderrow_pkey; Type: CONSTRAINT; Schema: public; Owner: dataverse_app; Tablespace: +-- + +ALTER TABLE ONLY authenticationproviderrow + ADD CONSTRAINT authenticationproviderrow_pkey PRIMARY KEY (id); + + +-- +-- Name: builtinuser_email_key; Type: CONSTRAINT; Schema: public; Owner: dataverse_app; Tablespace: +-- + +ALTER TABLE ONLY builtinuser + ADD CONSTRAINT builtinuser_email_key UNIQUE (email); + + +-- +-- Name: builtinuser_pkey; Type: CONSTRAINT; Schema: public; Owner: dataverse_app; Tablespace: +-- + +ALTER TABLE ONLY builtinuser + ADD CONSTRAINT builtinuser_pkey PRIMARY KEY (id); + + +-- +-- Name: builtinuser_username_key; Type: CONSTRAINT; Schema: public; Owner: dataverse_app; Tablespace: +-- + +ALTER TABLE ONLY builtinuser + ADD CONSTRAINT builtinuser_username_key UNIQUE (username); + + +-- +-- Name: controlledvocabalternate_pkey; Type: CONSTRAINT; Schema: public; Owner: dataverse_app; Tablespace: +-- + +ALTER TABLE ONLY controlledvocabalternate + ADD CONSTRAINT controlledvocabalternate_pkey PRIMARY KEY (id); + + +-- +-- Name: controlledvocabularyvalue_pkey; Type: CONSTRAINT; Schema: public; Owner: dataverse_app; Tablespace: +-- + +ALTER TABLE ONLY controlledvocabularyvalue + ADD CONSTRAINT controlledvocabularyvalue_pkey PRIMARY KEY (id); + + +-- +-- Name: customfieldmap_pkey; Type: CONSTRAINT; Schema: public; Owner: dataverse_app; Tablespace: +-- + +ALTER TABLE ONLY customfieldmap + ADD CONSTRAINT customfieldmap_pkey PRIMARY KEY (id); + + +-- +-- Name: customquestion_pkey; Type: CONSTRAINT; Schema: public; Owner: dataverse_app; Tablespace: +-- + +ALTER TABLE ONLY customquestion + ADD CONSTRAINT customquestion_pkey PRIMARY KEY (id); + + +-- +-- Name: customquestionresponse_pkey; Type: CONSTRAINT; Schema: public; Owner: dataverse_app; Tablespace: +-- + +ALTER TABLE ONLY customquestionresponse + ADD CONSTRAINT customquestionresponse_pkey PRIMARY KEY (id); + + +-- +-- Name: customquestionvalue_pkey; Type: CONSTRAINT; Schema: public; Owner: dataverse_app; Tablespace: +-- + +ALTER TABLE ONLY customquestionvalue + ADD CONSTRAINT customquestionvalue_pkey PRIMARY KEY (id); + + +-- +-- Name: datafile_pkey; Type: CONSTRAINT; Schema: public; Owner: dataverse_app; Tablespace: +-- + +ALTER TABLE ONLY datafile + ADD CONSTRAINT datafile_pkey PRIMARY KEY (id); + + +-- +-- Name: datafilecategory_pkey; Type: CONSTRAINT; Schema: public; Owner: dataverse_app; Tablespace: +-- + +ALTER TABLE ONLY datafilecategory + ADD CONSTRAINT datafilecategory_pkey PRIMARY KEY (id); + + +-- +-- Name: datafiletag_pkey; Type: CONSTRAINT; Schema: public; Owner: dataverse_app; Tablespace: +-- + +ALTER TABLE ONLY datafiletag + ADD CONSTRAINT datafiletag_pkey PRIMARY KEY (id); + + +-- +-- Name: dataset_pkey; Type: CONSTRAINT; Schema: public; Owner: dataverse_app; Tablespace: +-- + +ALTER TABLE ONLY dataset + ADD CONSTRAINT dataset_pkey PRIMARY KEY (id); + + +-- +-- Name: datasetfield_controlledvocabularyvalue_pkey; Type: CONSTRAINT; Schema: public; Owner: dataverse_app; Tablespace: +-- + +ALTER TABLE ONLY datasetfield_controlledvocabularyvalue + ADD CONSTRAINT datasetfield_controlledvocabularyvalue_pkey PRIMARY KEY (datasetfield_id, controlledvocabularyvalues_id); + + +-- +-- Name: datasetfield_pkey; Type: CONSTRAINT; Schema: public; Owner: dataverse_app; Tablespace: +-- + +ALTER TABLE ONLY datasetfield + ADD CONSTRAINT datasetfield_pkey PRIMARY KEY (id); + + +-- +-- Name: datasetfieldcompoundvalue_pkey; Type: CONSTRAINT; Schema: public; Owner: dataverse_app; Tablespace: +-- + +ALTER TABLE ONLY datasetfieldcompoundvalue + ADD CONSTRAINT datasetfieldcompoundvalue_pkey PRIMARY KEY (id); + + +-- +-- Name: datasetfielddefaultvalue_pkey; Type: CONSTRAINT; Schema: public; Owner: dataverse_app; Tablespace: +-- + +ALTER TABLE ONLY datasetfielddefaultvalue + ADD CONSTRAINT datasetfielddefaultvalue_pkey PRIMARY KEY (id); + + +-- +-- Name: datasetfieldtype_pkey; Type: CONSTRAINT; Schema: public; Owner: dataverse_app; Tablespace: +-- + +ALTER TABLE ONLY datasetfieldtype + ADD CONSTRAINT datasetfieldtype_pkey PRIMARY KEY (id); + + +-- +-- Name: datasetfieldvalue_pkey; Type: CONSTRAINT; Schema: public; Owner: dataverse_app; Tablespace: +-- + +ALTER TABLE ONLY datasetfieldvalue + ADD CONSTRAINT datasetfieldvalue_pkey PRIMARY KEY (id); + + +-- +-- Name: datasetlinkingdataverse_pkey; Type: CONSTRAINT; Schema: public; Owner: dataverse_app; Tablespace: +-- + +ALTER TABLE ONLY datasetlinkingdataverse + ADD CONSTRAINT datasetlinkingdataverse_pkey PRIMARY KEY (id); + + +-- +-- Name: datasetlock_pkey; Type: CONSTRAINT; Schema: public; Owner: dataverse_app; Tablespace: +-- + +ALTER TABLE ONLY datasetlock + ADD CONSTRAINT datasetlock_pkey PRIMARY KEY (id); + + +-- +-- Name: datasetversion_pkey; Type: CONSTRAINT; Schema: public; Owner: dataverse_app; Tablespace: +-- + +ALTER TABLE ONLY datasetversion + ADD CONSTRAINT datasetversion_pkey PRIMARY KEY (id); + + +-- +-- Name: datasetversionuser_pkey; Type: CONSTRAINT; Schema: public; Owner: dataverse_app; Tablespace: +-- + +ALTER TABLE ONLY datasetversionuser + ADD CONSTRAINT datasetversionuser_pkey PRIMARY KEY (id); + + +-- +-- Name: datatable_pkey; Type: CONSTRAINT; Schema: public; Owner: dataverse_app; Tablespace: +-- + +ALTER TABLE ONLY datatable + ADD CONSTRAINT datatable_pkey PRIMARY KEY (id); + + +-- +-- Name: datavariable_pkey; Type: CONSTRAINT; Schema: public; Owner: dataverse_app; Tablespace: +-- + +ALTER TABLE ONLY datavariable + ADD CONSTRAINT datavariable_pkey PRIMARY KEY (id); + + +-- +-- Name: dataverse_alias_key; Type: CONSTRAINT; Schema: public; Owner: dataverse_app; Tablespace: +-- + +ALTER TABLE ONLY dataverse + ADD CONSTRAINT dataverse_alias_key UNIQUE (alias); + + +-- +-- Name: dataverse_metadatablock_pkey; Type: CONSTRAINT; Schema: public; Owner: dataverse_app; Tablespace: +-- + +ALTER TABLE ONLY dataverse_metadatablock + ADD CONSTRAINT dataverse_metadatablock_pkey PRIMARY KEY (dataverse_id, metadatablocks_id); + + +-- +-- Name: dataverse_pkey; Type: CONSTRAINT; Schema: public; Owner: dataverse_app; Tablespace: +-- + +ALTER TABLE ONLY dataverse + ADD CONSTRAINT dataverse_pkey PRIMARY KEY (id); + + +-- +-- Name: dataversecontact_pkey; Type: CONSTRAINT; Schema: public; Owner: dataverse_app; Tablespace: +-- + +ALTER TABLE ONLY dataversecontact + ADD CONSTRAINT dataversecontact_pkey PRIMARY KEY (id); + + +-- +-- Name: dataversefacet_pkey; Type: CONSTRAINT; Schema: public; Owner: dataverse_app; Tablespace: +-- + +ALTER TABLE ONLY dataversefacet + ADD CONSTRAINT dataversefacet_pkey PRIMARY KEY (id); + + +-- +-- Name: dataversefeatureddataverse_pkey; Type: CONSTRAINT; Schema: public; Owner: dataverse_app; Tablespace: +-- + +ALTER TABLE ONLY dataversefeatureddataverse + ADD CONSTRAINT dataversefeatureddataverse_pkey PRIMARY KEY (id); + + +-- +-- Name: dataversefieldtypeinputlevel_pkey; Type: CONSTRAINT; Schema: public; Owner: dataverse_app; Tablespace: +-- + +ALTER TABLE ONLY dataversefieldtypeinputlevel + ADD CONSTRAINT dataversefieldtypeinputlevel_pkey PRIMARY KEY (id); + + +-- +-- Name: dataverselinkingdataverse_pkey; Type: CONSTRAINT; Schema: public; Owner: dataverse_app; Tablespace: +-- + +ALTER TABLE ONLY dataverselinkingdataverse + ADD CONSTRAINT dataverselinkingdataverse_pkey PRIMARY KEY (id); + + +-- +-- Name: dataverserole_alias_key; Type: CONSTRAINT; Schema: public; Owner: dataverse_app; Tablespace: +-- + +ALTER TABLE ONLY dataverserole + ADD CONSTRAINT dataverserole_alias_key UNIQUE (alias); + + +-- +-- Name: dataverserole_pkey; Type: CONSTRAINT; Schema: public; Owner: dataverse_app; Tablespace: +-- + +ALTER TABLE ONLY dataverserole + ADD CONSTRAINT dataverserole_pkey PRIMARY KEY (id); + + +-- +-- Name: dataversesubjects_pkey; Type: CONSTRAINT; Schema: public; Owner: dataverse_app; Tablespace: +-- + +ALTER TABLE ONLY dataversesubjects + ADD CONSTRAINT dataversesubjects_pkey PRIMARY KEY (dataverse_id, controlledvocabularyvalue_id); + + +-- +-- Name: dataversetheme_pkey; Type: CONSTRAINT; Schema: public; Owner: dataverse_app; Tablespace: +-- + +ALTER TABLE ONLY dataversetheme + ADD CONSTRAINT dataversetheme_pkey PRIMARY KEY (id); + + +-- +-- Name: defaultvalueset_pkey; Type: CONSTRAINT; Schema: public; Owner: dataverse_app; Tablespace: +-- + +ALTER TABLE ONLY defaultvalueset + ADD CONSTRAINT defaultvalueset_pkey PRIMARY KEY (id); + + +-- +-- Name: dvobject_pkey; Type: CONSTRAINT; Schema: public; Owner: dataverse_app; Tablespace: +-- + +ALTER TABLE ONLY dvobject + ADD CONSTRAINT dvobject_pkey PRIMARY KEY (id); + + +-- +-- Name: explicitgroup_authenticateduser_pkey; Type: CONSTRAINT; Schema: public; Owner: dataverse_app; Tablespace: +-- + +ALTER TABLE ONLY explicitgroup_authenticateduser + ADD CONSTRAINT explicitgroup_authenticateduser_pkey PRIMARY KEY (explicitgroup_id, containedauthenticatedusers_id); + + +-- +-- Name: explicitgroup_explicitgroup_pkey; Type: CONSTRAINT; Schema: public; Owner: dataverse_app; Tablespace: +-- + +ALTER TABLE ONLY explicitgroup_explicitgroup + ADD CONSTRAINT explicitgroup_explicitgroup_pkey PRIMARY KEY (explicitgroup_id, containedexplicitgroups_id); + + +-- +-- Name: explicitgroup_groupalias_key; Type: CONSTRAINT; Schema: public; Owner: dataverse_app; Tablespace: +-- + +ALTER TABLE ONLY explicitgroup + ADD CONSTRAINT explicitgroup_groupalias_key UNIQUE (groupalias); + + +-- +-- Name: explicitgroup_pkey; Type: CONSTRAINT; Schema: public; Owner: dataverse_app; Tablespace: +-- + +ALTER TABLE ONLY explicitgroup + ADD CONSTRAINT explicitgroup_pkey PRIMARY KEY (id); + + +-- +-- Name: fileaccessrequests_pkey; Type: CONSTRAINT; Schema: public; Owner: dataverse_app; Tablespace: +-- + +ALTER TABLE ONLY fileaccessrequests + ADD CONSTRAINT fileaccessrequests_pkey PRIMARY KEY (datafile_id, authenticated_user_id); + + +-- +-- Name: filemetadata_datafilecategory_pkey; Type: CONSTRAINT; Schema: public; Owner: dataverse_app; Tablespace: +-- + +ALTER TABLE ONLY filemetadata_datafilecategory + ADD CONSTRAINT filemetadata_datafilecategory_pkey PRIMARY KEY (filecategories_id, filemetadatas_id); + + +-- +-- Name: filemetadata_pkey; Type: CONSTRAINT; Schema: public; Owner: dataverse_app; Tablespace: +-- + +ALTER TABLE ONLY filemetadata + ADD CONSTRAINT filemetadata_pkey PRIMARY KEY (id); + + +-- +-- Name: foreignmetadatafieldmapping_pkey; Type: CONSTRAINT; Schema: public; Owner: dataverse_app; Tablespace: +-- + +ALTER TABLE ONLY foreignmetadatafieldmapping + ADD CONSTRAINT foreignmetadatafieldmapping_pkey PRIMARY KEY (id); + + +-- +-- Name: foreignmetadataformatmapping_pkey; Type: CONSTRAINT; Schema: public; Owner: dataverse_app; Tablespace: +-- + +ALTER TABLE ONLY foreignmetadataformatmapping + ADD CONSTRAINT foreignmetadataformatmapping_pkey PRIMARY KEY (id); + + +-- +-- Name: guestbook_pkey; Type: CONSTRAINT; Schema: public; Owner: dataverse_app; Tablespace: +-- + +ALTER TABLE ONLY guestbook + ADD CONSTRAINT guestbook_pkey PRIMARY KEY (id); + + +-- +-- Name: guestbookresponse_pkey; Type: CONSTRAINT; Schema: public; Owner: dataverse_app; Tablespace: +-- + +ALTER TABLE ONLY guestbookresponse + ADD CONSTRAINT guestbookresponse_pkey PRIMARY KEY (id); + + +-- +-- Name: harvestingdataverseconfig_pkey; Type: CONSTRAINT; Schema: public; Owner: dataverse_app; Tablespace: +-- + +ALTER TABLE ONLY harvestingdataverseconfig + ADD CONSTRAINT harvestingdataverseconfig_pkey PRIMARY KEY (id); + + +-- +-- Name: ingestreport_pkey; Type: CONSTRAINT; Schema: public; Owner: dataverse_app; Tablespace: +-- + +ALTER TABLE ONLY ingestreport + ADD CONSTRAINT ingestreport_pkey PRIMARY KEY (id); + + +-- +-- Name: ingestrequest_pkey; Type: CONSTRAINT; Schema: public; Owner: dataverse_app; Tablespace: +-- + +ALTER TABLE ONLY ingestrequest + ADD CONSTRAINT ingestrequest_pkey PRIMARY KEY (id); + + +-- +-- Name: ipv4range_pkey; Type: CONSTRAINT; Schema: public; Owner: dataverse_app; Tablespace: +-- + +ALTER TABLE ONLY ipv4range + ADD CONSTRAINT ipv4range_pkey PRIMARY KEY (id); + + +-- +-- Name: ipv6range_pkey; Type: CONSTRAINT; Schema: public; Owner: dataverse_app; Tablespace: +-- + +ALTER TABLE ONLY ipv6range + ADD CONSTRAINT ipv6range_pkey PRIMARY KEY (id); + + +-- +-- Name: maplayermetadata_datafile_id_key; Type: CONSTRAINT; Schema: public; Owner: dataverse_app; Tablespace: +-- + +ALTER TABLE ONLY maplayermetadata + ADD CONSTRAINT maplayermetadata_datafile_id_key UNIQUE (datafile_id); + + +-- +-- Name: maplayermetadata_pkey; Type: CONSTRAINT; Schema: public; Owner: dataverse_app; Tablespace: +-- + +ALTER TABLE ONLY maplayermetadata + ADD CONSTRAINT maplayermetadata_pkey PRIMARY KEY (id); + + +-- +-- Name: metadatablock_pkey; Type: CONSTRAINT; Schema: public; Owner: dataverse_app; Tablespace: +-- + +ALTER TABLE ONLY metadatablock + ADD CONSTRAINT metadatablock_pkey PRIMARY KEY (id); + + +-- +-- Name: passwordresetdata_pkey; Type: CONSTRAINT; Schema: public; Owner: dataverse_app; Tablespace: +-- + +ALTER TABLE ONLY passwordresetdata + ADD CONSTRAINT passwordresetdata_pkey PRIMARY KEY (id); + + +-- +-- Name: persistedglobalgroup_persistedgroupalias_key; Type: CONSTRAINT; Schema: public; Owner: dataverse_app; Tablespace: +-- + +ALTER TABLE ONLY persistedglobalgroup + ADD CONSTRAINT persistedglobalgroup_persistedgroupalias_key UNIQUE (persistedgroupalias); + + +-- +-- Name: persistedglobalgroup_pkey; Type: CONSTRAINT; Schema: public; Owner: dataverse_app; Tablespace: +-- + +ALTER TABLE ONLY persistedglobalgroup + ADD CONSTRAINT persistedglobalgroup_pkey PRIMARY KEY (id); + + +-- +-- Name: roleassignment_pkey; Type: CONSTRAINT; Schema: public; Owner: dataverse_app; Tablespace: +-- + +ALTER TABLE ONLY roleassignment + ADD CONSTRAINT roleassignment_pkey PRIMARY KEY (id); + + +-- +-- Name: savedsearch_pkey; Type: CONSTRAINT; Schema: public; Owner: dataverse_app; Tablespace: +-- + +ALTER TABLE ONLY savedsearch + ADD CONSTRAINT savedsearch_pkey PRIMARY KEY (id); + + +-- +-- Name: savedsearchfilterquery_pkey; Type: CONSTRAINT; Schema: public; Owner: dataverse_app; Tablespace: +-- + +ALTER TABLE ONLY savedsearchfilterquery + ADD CONSTRAINT savedsearchfilterquery_pkey PRIMARY KEY (id); + + +-- +-- Name: sequence_pkey; Type: CONSTRAINT; Schema: public; Owner: dataverse_app; Tablespace: +-- + +ALTER TABLE ONLY sequence + ADD CONSTRAINT sequence_pkey PRIMARY KEY (seq_name); + + +-- +-- Name: setting_pkey; Type: CONSTRAINT; Schema: public; Owner: dataverse_app; Tablespace: +-- + +ALTER TABLE ONLY setting + ADD CONSTRAINT setting_pkey PRIMARY KEY (name); + + +-- +-- Name: shibgroup_pkey; Type: CONSTRAINT; Schema: public; Owner: dataverse_app; Tablespace: +-- + +ALTER TABLE ONLY shibgroup + ADD CONSTRAINT shibgroup_pkey PRIMARY KEY (id); + + +-- +-- Name: summarystatistic_pkey; Type: CONSTRAINT; Schema: public; Owner: dataverse_app; Tablespace: +-- + +ALTER TABLE ONLY summarystatistic + ADD CONSTRAINT summarystatistic_pkey PRIMARY KEY (id); + + +-- +-- Name: template_pkey; Type: CONSTRAINT; Schema: public; Owner: dataverse_app; Tablespace: +-- + +ALTER TABLE ONLY template + ADD CONSTRAINT template_pkey PRIMARY KEY (id); + + +-- +-- Name: unq_authenticateduserlookup_0; Type: CONSTRAINT; Schema: public; Owner: dataverse_app; Tablespace: +-- + +ALTER TABLE ONLY authenticateduserlookup + ADD CONSTRAINT unq_authenticateduserlookup_0 UNIQUE (persistentuserid, authenticationproviderid); + + +-- +-- Name: unq_dataset_0; Type: CONSTRAINT; Schema: public; Owner: dataverse_app; Tablespace: +-- + +ALTER TABLE ONLY dataset + ADD CONSTRAINT unq_dataset_0 UNIQUE (authority, protocol, identifier, doiseparator); + + +-- +-- Name: unq_dataversefieldtypeinputlevel_0; Type: CONSTRAINT; Schema: public; Owner: dataverse_app; Tablespace: +-- + +ALTER TABLE ONLY dataversefieldtypeinputlevel + ADD CONSTRAINT unq_dataversefieldtypeinputlevel_0 UNIQUE (dataverse_id, datasetfieldtype_id); + + +-- +-- Name: unq_foreignmetadatafieldmapping_0; Type: CONSTRAINT; Schema: public; Owner: dataverse_app; Tablespace: +-- + +ALTER TABLE ONLY foreignmetadatafieldmapping + ADD CONSTRAINT unq_foreignmetadatafieldmapping_0 UNIQUE (foreignmetadataformatmapping_id, foreignfieldxpath); + + +-- +-- Name: unq_roleassignment_0; Type: CONSTRAINT; Schema: public; Owner: dataverse_app; Tablespace: +-- + +ALTER TABLE ONLY roleassignment + ADD CONSTRAINT unq_roleassignment_0 UNIQUE (assigneeidentifier, role_id, definitionpoint_id); + + +-- +-- Name: usernotification_pkey; Type: CONSTRAINT; Schema: public; Owner: dataverse_app; Tablespace: +-- + +ALTER TABLE ONLY usernotification + ADD CONSTRAINT usernotification_pkey PRIMARY KEY (id); + + +-- +-- Name: variablecategory_pkey; Type: CONSTRAINT; Schema: public; Owner: dataverse_app; Tablespace: +-- + +ALTER TABLE ONLY variablecategory + ADD CONSTRAINT variablecategory_pkey PRIMARY KEY (id); + + +-- +-- Name: variablerange_pkey; Type: CONSTRAINT; Schema: public; Owner: dataverse_app; Tablespace: +-- + +ALTER TABLE ONLY variablerange + ADD CONSTRAINT variablerange_pkey PRIMARY KEY (id); + + +-- +-- Name: variablerangeitem_pkey; Type: CONSTRAINT; Schema: public; Owner: dataverse_app; Tablespace: +-- + +ALTER TABLE ONLY variablerangeitem + ADD CONSTRAINT variablerangeitem_pkey PRIMARY KEY (id); + + +-- +-- Name: worldmapauth_token_pkey; Type: CONSTRAINT; Schema: public; Owner: dataverse_app; Tablespace: +-- + +ALTER TABLE ONLY worldmapauth_token + ADD CONSTRAINT worldmapauth_token_pkey PRIMARY KEY (id); + + +-- +-- Name: worldmapauth_tokentype_pkey; Type: CONSTRAINT; Schema: public; Owner: dataverse_app; Tablespace: +-- + +ALTER TABLE ONLY worldmapauth_tokentype + ADD CONSTRAINT worldmapauth_tokentype_pkey PRIMARY KEY (id); + + +-- +-- Name: application_name; Type: INDEX; Schema: public; Owner: dataverse_app; Tablespace: +-- + +CREATE UNIQUE INDEX application_name ON worldmapauth_tokentype USING btree (name); + + +-- +-- Name: index_actionlogrecord_actiontype; Type: INDEX; Schema: public; Owner: dataverse_app; Tablespace: +-- + +CREATE INDEX index_actionlogrecord_actiontype ON actionlogrecord USING btree (actiontype); + + +-- +-- Name: index_actionlogrecord_starttime; Type: INDEX; Schema: public; Owner: dataverse_app; Tablespace: +-- + +CREATE INDEX index_actionlogrecord_starttime ON actionlogrecord USING btree (starttime); + + +-- +-- Name: index_actionlogrecord_useridentifier; Type: INDEX; Schema: public; Owner: dataverse_app; Tablespace: +-- + +CREATE INDEX index_actionlogrecord_useridentifier ON actionlogrecord USING btree (useridentifier); + + +-- +-- Name: index_apitoken_authenticateduser_id; Type: INDEX; Schema: public; Owner: dataverse_app; Tablespace: +-- + +CREATE INDEX index_apitoken_authenticateduser_id ON apitoken USING btree (authenticateduser_id); + + +-- +-- Name: index_authenticationproviderrow_enabled; Type: INDEX; Schema: public; Owner: dataverse_app; Tablespace: +-- + +CREATE INDEX index_authenticationproviderrow_enabled ON authenticationproviderrow USING btree (enabled); + + +-- +-- Name: index_builtinuser_lastname; Type: INDEX; Schema: public; Owner: dataverse_app; Tablespace: +-- + +CREATE INDEX index_builtinuser_lastname ON builtinuser USING btree (lastname); + + +-- +-- Name: index_controlledvocabalternate_controlledvocabularyvalue_id; Type: INDEX; Schema: public; Owner: dataverse_app; Tablespace: +-- + +CREATE INDEX index_controlledvocabalternate_controlledvocabularyvalue_id ON controlledvocabalternate USING btree (controlledvocabularyvalue_id); + + +-- +-- Name: index_controlledvocabalternate_datasetfieldtype_id; Type: INDEX; Schema: public; Owner: dataverse_app; Tablespace: +-- + +CREATE INDEX index_controlledvocabalternate_datasetfieldtype_id ON controlledvocabalternate USING btree (datasetfieldtype_id); + + +-- +-- Name: index_controlledvocabularyvalue_datasetfieldtype_id; Type: INDEX; Schema: public; Owner: dataverse_app; Tablespace: +-- + +CREATE INDEX index_controlledvocabularyvalue_datasetfieldtype_id ON controlledvocabularyvalue USING btree (datasetfieldtype_id); + + +-- +-- Name: index_controlledvocabularyvalue_displayorder; Type: INDEX; Schema: public; Owner: dataverse_app; Tablespace: +-- + +CREATE INDEX index_controlledvocabularyvalue_displayorder ON controlledvocabularyvalue USING btree (displayorder); + + +-- +-- Name: index_customfieldmap_sourcedatasetfield; Type: INDEX; Schema: public; Owner: dataverse_app; Tablespace: +-- + +CREATE INDEX index_customfieldmap_sourcedatasetfield ON customfieldmap USING btree (sourcedatasetfield); + + +-- +-- Name: index_customfieldmap_sourcetemplate; Type: INDEX; Schema: public; Owner: dataverse_app; Tablespace: +-- + +CREATE INDEX index_customfieldmap_sourcetemplate ON customfieldmap USING btree (sourcetemplate); + + +-- +-- Name: index_customquestion_guestbook_id; Type: INDEX; Schema: public; Owner: dataverse_app; Tablespace: +-- + +CREATE INDEX index_customquestion_guestbook_id ON customquestion USING btree (guestbook_id); + + +-- +-- Name: index_customquestionresponse_guestbookresponse_id; Type: INDEX; Schema: public; Owner: dataverse_app; Tablespace: +-- + +CREATE INDEX index_customquestionresponse_guestbookresponse_id ON customquestionresponse USING btree (guestbookresponse_id); + + +-- +-- Name: index_datafile_contenttype; Type: INDEX; Schema: public; Owner: dataverse_app; Tablespace: +-- + +CREATE INDEX index_datafile_contenttype ON datafile USING btree (contenttype); + + +-- +-- Name: index_datafile_ingeststatus; Type: INDEX; Schema: public; Owner: dataverse_app; Tablespace: +-- + +CREATE INDEX index_datafile_ingeststatus ON datafile USING btree (ingeststatus); + + +-- +-- Name: index_datafile_md5; Type: INDEX; Schema: public; Owner: dataverse_app; Tablespace: +-- + +CREATE INDEX index_datafile_md5 ON datafile USING btree (md5); + + +-- +-- Name: index_datafile_restricted; Type: INDEX; Schema: public; Owner: dataverse_app; Tablespace: +-- + +CREATE INDEX index_datafile_restricted ON datafile USING btree (restricted); + + +-- +-- Name: index_datafilecategory_dataset_id; Type: INDEX; Schema: public; Owner: dataverse_app; Tablespace: +-- + +CREATE INDEX index_datafilecategory_dataset_id ON datafilecategory USING btree (dataset_id); + + +-- +-- Name: index_datafiletag_datafile_id; Type: INDEX; Schema: public; Owner: dataverse_app; Tablespace: +-- + +CREATE INDEX index_datafiletag_datafile_id ON datafiletag USING btree (datafile_id); + + +-- +-- Name: index_dataset_guestbook_id; Type: INDEX; Schema: public; Owner: dataverse_app; Tablespace: +-- + +CREATE INDEX index_dataset_guestbook_id ON dataset USING btree (guestbook_id); + + +-- +-- Name: index_dataset_thumbnailfile_id; Type: INDEX; Schema: public; Owner: dataverse_app; Tablespace: +-- + +CREATE INDEX index_dataset_thumbnailfile_id ON dataset USING btree (thumbnailfile_id); + + +-- +-- Name: index_datasetfield_controlledvocabularyvalue_controlledvocabula; Type: INDEX; Schema: public; Owner: dataverse_app; Tablespace: +-- + +CREATE INDEX index_datasetfield_controlledvocabularyvalue_controlledvocabula ON datasetfield_controlledvocabularyvalue USING btree (controlledvocabularyvalues_id); + + +-- +-- Name: index_datasetfield_controlledvocabularyvalue_datasetfield_id; Type: INDEX; Schema: public; Owner: dataverse_app; Tablespace: +-- + +CREATE INDEX index_datasetfield_controlledvocabularyvalue_datasetfield_id ON datasetfield_controlledvocabularyvalue USING btree (datasetfield_id); + + +-- +-- Name: index_datasetfield_datasetfieldtype_id; Type: INDEX; Schema: public; Owner: dataverse_app; Tablespace: +-- + +CREATE INDEX index_datasetfield_datasetfieldtype_id ON datasetfield USING btree (datasetfieldtype_id); + + +-- +-- Name: index_datasetfield_datasetversion_id; Type: INDEX; Schema: public; Owner: dataverse_app; Tablespace: +-- + +CREATE INDEX index_datasetfield_datasetversion_id ON datasetfield USING btree (datasetversion_id); + + +-- +-- Name: index_datasetfield_parentdatasetfieldcompoundvalue_id; Type: INDEX; Schema: public; Owner: dataverse_app; Tablespace: +-- + +CREATE INDEX index_datasetfield_parentdatasetfieldcompoundvalue_id ON datasetfield USING btree (parentdatasetfieldcompoundvalue_id); + + +-- +-- Name: index_datasetfield_template_id; Type: INDEX; Schema: public; Owner: dataverse_app; Tablespace: +-- + +CREATE INDEX index_datasetfield_template_id ON datasetfield USING btree (template_id); + + +-- +-- Name: index_datasetfieldcompoundvalue_parentdatasetfield_id; Type: INDEX; Schema: public; Owner: dataverse_app; Tablespace: +-- + +CREATE INDEX index_datasetfieldcompoundvalue_parentdatasetfield_id ON datasetfieldcompoundvalue USING btree (parentdatasetfield_id); + + +-- +-- Name: index_datasetfielddefaultvalue_datasetfield_id; Type: INDEX; Schema: public; Owner: dataverse_app; Tablespace: +-- + +CREATE INDEX index_datasetfielddefaultvalue_datasetfield_id ON datasetfielddefaultvalue USING btree (datasetfield_id); + + +-- +-- Name: index_datasetfielddefaultvalue_defaultvalueset_id; Type: INDEX; Schema: public; Owner: dataverse_app; Tablespace: +-- + +CREATE INDEX index_datasetfielddefaultvalue_defaultvalueset_id ON datasetfielddefaultvalue USING btree (defaultvalueset_id); + + +-- +-- Name: index_datasetfielddefaultvalue_displayorder; Type: INDEX; Schema: public; Owner: dataverse_app; Tablespace: +-- + +CREATE INDEX index_datasetfielddefaultvalue_displayorder ON datasetfielddefaultvalue USING btree (displayorder); + + +-- +-- Name: index_datasetfielddefaultvalue_parentdatasetfielddefaultvalue_i; Type: INDEX; Schema: public; Owner: dataverse_app; Tablespace: +-- + +CREATE INDEX index_datasetfielddefaultvalue_parentdatasetfielddefaultvalue_i ON datasetfielddefaultvalue USING btree (parentdatasetfielddefaultvalue_id); + + +-- +-- Name: index_datasetfieldtype_metadatablock_id; Type: INDEX; Schema: public; Owner: dataverse_app; Tablespace: +-- + +CREATE INDEX index_datasetfieldtype_metadatablock_id ON datasetfieldtype USING btree (metadatablock_id); + + +-- +-- Name: index_datasetfieldtype_parentdatasetfieldtype_id; Type: INDEX; Schema: public; Owner: dataverse_app; Tablespace: +-- + +CREATE INDEX index_datasetfieldtype_parentdatasetfieldtype_id ON datasetfieldtype USING btree (parentdatasetfieldtype_id); + + +-- +-- Name: index_datasetfieldvalue_datasetfield_id; Type: INDEX; Schema: public; Owner: dataverse_app; Tablespace: +-- + +CREATE INDEX index_datasetfieldvalue_datasetfield_id ON datasetfieldvalue USING btree (datasetfield_id); + + +-- +-- Name: index_datasetlinkingdataverse_dataset_id; Type: INDEX; Schema: public; Owner: dataverse_app; Tablespace: +-- + +CREATE INDEX index_datasetlinkingdataverse_dataset_id ON datasetlinkingdataverse USING btree (dataset_id); + + +-- +-- Name: index_datasetlinkingdataverse_linkingdataverse_id; Type: INDEX; Schema: public; Owner: dataverse_app; Tablespace: +-- + +CREATE INDEX index_datasetlinkingdataverse_linkingdataverse_id ON datasetlinkingdataverse USING btree (linkingdataverse_id); + + +-- +-- Name: index_datasetlock_dataset_id; Type: INDEX; Schema: public; Owner: dataverse_app; Tablespace: +-- + +CREATE INDEX index_datasetlock_dataset_id ON datasetlock USING btree (dataset_id); + + +-- +-- Name: index_datasetlock_user_id; Type: INDEX; Schema: public; Owner: dataverse_app; Tablespace: +-- + +CREATE INDEX index_datasetlock_user_id ON datasetlock USING btree (user_id); + + +-- +-- Name: index_datasetversion_dataset_id; Type: INDEX; Schema: public; Owner: dataverse_app; Tablespace: +-- + +CREATE INDEX index_datasetversion_dataset_id ON datasetversion USING btree (dataset_id); + + +-- +-- Name: index_datasetversionuser_authenticateduser_id; Type: INDEX; Schema: public; Owner: dataverse_app; Tablespace: +-- + +CREATE INDEX index_datasetversionuser_authenticateduser_id ON datasetversionuser USING btree (authenticateduser_id); + + +-- +-- Name: index_datasetversionuser_datasetversion_id; Type: INDEX; Schema: public; Owner: dataverse_app; Tablespace: +-- + +CREATE INDEX index_datasetversionuser_datasetversion_id ON datasetversionuser USING btree (datasetversion_id); + + +-- +-- Name: index_datatable_datafile_id; Type: INDEX; Schema: public; Owner: dataverse_app; Tablespace: +-- + +CREATE INDEX index_datatable_datafile_id ON datatable USING btree (datafile_id); + + +-- +-- Name: index_datavariable_datatable_id; Type: INDEX; Schema: public; Owner: dataverse_app; Tablespace: +-- + +CREATE INDEX index_datavariable_datatable_id ON datavariable USING btree (datatable_id); + + +-- +-- Name: index_dataverse_affiliation; Type: INDEX; Schema: public; Owner: dataverse_app; Tablespace: +-- + +CREATE INDEX index_dataverse_affiliation ON dataverse USING btree (affiliation); + + +-- +-- Name: index_dataverse_alias; Type: INDEX; Schema: public; Owner: dataverse_app; Tablespace: +-- + +CREATE INDEX index_dataverse_alias ON dataverse USING btree (alias); + + +-- +-- Name: index_dataverse_dataversetype; Type: INDEX; Schema: public; Owner: dataverse_app; Tablespace: +-- + +CREATE INDEX index_dataverse_dataversetype ON dataverse USING btree (dataversetype); + + +-- +-- Name: index_dataverse_defaultcontributorrole_id; Type: INDEX; Schema: public; Owner: dataverse_app; Tablespace: +-- + +CREATE INDEX index_dataverse_defaultcontributorrole_id ON dataverse USING btree (defaultcontributorrole_id); + + +-- +-- Name: index_dataverse_defaulttemplate_id; Type: INDEX; Schema: public; Owner: dataverse_app; Tablespace: +-- + +CREATE INDEX index_dataverse_defaulttemplate_id ON dataverse USING btree (defaulttemplate_id); + + +-- +-- Name: index_dataverse_facetroot; Type: INDEX; Schema: public; Owner: dataverse_app; Tablespace: +-- + +CREATE INDEX index_dataverse_facetroot ON dataverse USING btree (facetroot); + + +-- +-- Name: index_dataverse_guestbookroot; Type: INDEX; Schema: public; Owner: dataverse_app; Tablespace: +-- + +CREATE INDEX index_dataverse_guestbookroot ON dataverse USING btree (guestbookroot); + + +-- +-- Name: index_dataverse_metadatablockroot; Type: INDEX; Schema: public; Owner: dataverse_app; Tablespace: +-- + +CREATE INDEX index_dataverse_metadatablockroot ON dataverse USING btree (metadatablockroot); + + +-- +-- Name: index_dataverse_permissionroot; Type: INDEX; Schema: public; Owner: dataverse_app; Tablespace: +-- + +CREATE INDEX index_dataverse_permissionroot ON dataverse USING btree (permissionroot); + + +-- +-- Name: index_dataverse_templateroot; Type: INDEX; Schema: public; Owner: dataverse_app; Tablespace: +-- + +CREATE INDEX index_dataverse_templateroot ON dataverse USING btree (templateroot); + + +-- +-- Name: index_dataverse_themeroot; Type: INDEX; Schema: public; Owner: dataverse_app; Tablespace: +-- + +CREATE INDEX index_dataverse_themeroot ON dataverse USING btree (themeroot); + + +-- +-- Name: index_dataversecontact_contactemail; Type: INDEX; Schema: public; Owner: dataverse_app; Tablespace: +-- + +CREATE INDEX index_dataversecontact_contactemail ON dataversecontact USING btree (contactemail); + + +-- +-- Name: index_dataversecontact_dataverse_id; Type: INDEX; Schema: public; Owner: dataverse_app; Tablespace: +-- + +CREATE INDEX index_dataversecontact_dataverse_id ON dataversecontact USING btree (dataverse_id); + + +-- +-- Name: index_dataversecontact_displayorder; Type: INDEX; Schema: public; Owner: dataverse_app; Tablespace: +-- + +CREATE INDEX index_dataversecontact_displayorder ON dataversecontact USING btree (displayorder); + + +-- +-- Name: index_dataversefacet_datasetfieldtype_id; Type: INDEX; Schema: public; Owner: dataverse_app; Tablespace: +-- + +CREATE INDEX index_dataversefacet_datasetfieldtype_id ON dataversefacet USING btree (datasetfieldtype_id); + + +-- +-- Name: index_dataversefacet_dataverse_id; Type: INDEX; Schema: public; Owner: dataverse_app; Tablespace: +-- + +CREATE INDEX index_dataversefacet_dataverse_id ON dataversefacet USING btree (dataverse_id); + + +-- +-- Name: index_dataversefacet_displayorder; Type: INDEX; Schema: public; Owner: dataverse_app; Tablespace: +-- + +CREATE INDEX index_dataversefacet_displayorder ON dataversefacet USING btree (displayorder); + + +-- +-- Name: index_dataversefeatureddataverse_dataverse_id; Type: INDEX; Schema: public; Owner: dataverse_app; Tablespace: +-- + +CREATE INDEX index_dataversefeatureddataverse_dataverse_id ON dataversefeatureddataverse USING btree (dataverse_id); + + +-- +-- Name: index_dataversefeatureddataverse_displayorder; Type: INDEX; Schema: public; Owner: dataverse_app; Tablespace: +-- + +CREATE INDEX index_dataversefeatureddataverse_displayorder ON dataversefeatureddataverse USING btree (displayorder); + + +-- +-- Name: index_dataversefeatureddataverse_featureddataverse_id; Type: INDEX; Schema: public; Owner: dataverse_app; Tablespace: +-- + +CREATE INDEX index_dataversefeatureddataverse_featureddataverse_id ON dataversefeatureddataverse USING btree (featureddataverse_id); + + +-- +-- Name: index_dataversefieldtypeinputlevel_datasetfieldtype_id; Type: INDEX; Schema: public; Owner: dataverse_app; Tablespace: +-- + +CREATE INDEX index_dataversefieldtypeinputlevel_datasetfieldtype_id ON dataversefieldtypeinputlevel USING btree (datasetfieldtype_id); + + +-- +-- Name: index_dataversefieldtypeinputlevel_dataverse_id; Type: INDEX; Schema: public; Owner: dataverse_app; Tablespace: +-- + +CREATE INDEX index_dataversefieldtypeinputlevel_dataverse_id ON dataversefieldtypeinputlevel USING btree (dataverse_id); + + +-- +-- Name: index_dataversefieldtypeinputlevel_required; Type: INDEX; Schema: public; Owner: dataverse_app; Tablespace: +-- + +CREATE INDEX index_dataversefieldtypeinputlevel_required ON dataversefieldtypeinputlevel USING btree (required); + + +-- +-- Name: index_dataverselinkingdataverse_dataverse_id; Type: INDEX; Schema: public; Owner: dataverse_app; Tablespace: +-- + +CREATE INDEX index_dataverselinkingdataverse_dataverse_id ON dataverselinkingdataverse USING btree (dataverse_id); + + +-- +-- Name: index_dataverselinkingdataverse_linkingdataverse_id; Type: INDEX; Schema: public; Owner: dataverse_app; Tablespace: +-- + +CREATE INDEX index_dataverselinkingdataverse_linkingdataverse_id ON dataverselinkingdataverse USING btree (linkingdataverse_id); + + +-- +-- Name: index_dataverserole_alias; Type: INDEX; Schema: public; Owner: dataverse_app; Tablespace: +-- + +CREATE INDEX index_dataverserole_alias ON dataverserole USING btree (alias); + + +-- +-- Name: index_dataverserole_name; Type: INDEX; Schema: public; Owner: dataverse_app; Tablespace: +-- + +CREATE INDEX index_dataverserole_name ON dataverserole USING btree (name); + + +-- +-- Name: index_dataverserole_owner_id; Type: INDEX; Schema: public; Owner: dataverse_app; Tablespace: +-- + +CREATE INDEX index_dataverserole_owner_id ON dataverserole USING btree (owner_id); + + +-- +-- Name: index_dataversetheme_dataverse_id; Type: INDEX; Schema: public; Owner: dataverse_app; Tablespace: +-- + +CREATE INDEX index_dataversetheme_dataverse_id ON dataversetheme USING btree (dataverse_id); + + +-- +-- Name: index_dvobject_creator_id; Type: INDEX; Schema: public; Owner: dataverse_app; Tablespace: +-- + +CREATE INDEX index_dvobject_creator_id ON dvobject USING btree (creator_id); + + +-- +-- Name: index_dvobject_dtype; Type: INDEX; Schema: public; Owner: dataverse_app; Tablespace: +-- + +CREATE INDEX index_dvobject_dtype ON dvobject USING btree (dtype); + + +-- +-- Name: index_dvobject_owner_id; Type: INDEX; Schema: public; Owner: dataverse_app; Tablespace: +-- + +CREATE INDEX index_dvobject_owner_id ON dvobject USING btree (owner_id); + + +-- +-- Name: index_dvobject_releaseuser_id; Type: INDEX; Schema: public; Owner: dataverse_app; Tablespace: +-- + +CREATE INDEX index_dvobject_releaseuser_id ON dvobject USING btree (releaseuser_id); + + +-- +-- Name: index_explicitgroup_groupaliasinowner; Type: INDEX; Schema: public; Owner: dataverse_app; Tablespace: +-- + +CREATE INDEX index_explicitgroup_groupaliasinowner ON explicitgroup USING btree (groupaliasinowner); + + +-- +-- Name: index_explicitgroup_owner_id; Type: INDEX; Schema: public; Owner: dataverse_app; Tablespace: +-- + +CREATE INDEX index_explicitgroup_owner_id ON explicitgroup USING btree (owner_id); + + +-- +-- Name: index_filemetadata_datafile_id; Type: INDEX; Schema: public; Owner: dataverse_app; Tablespace: +-- + +CREATE INDEX index_filemetadata_datafile_id ON filemetadata USING btree (datafile_id); + + +-- +-- Name: index_filemetadata_datafilecategory_filecategories_id; Type: INDEX; Schema: public; Owner: dataverse_app; Tablespace: +-- + +CREATE INDEX index_filemetadata_datafilecategory_filecategories_id ON filemetadata_datafilecategory USING btree (filecategories_id); + + +-- +-- Name: index_filemetadata_datafilecategory_filemetadatas_id; Type: INDEX; Schema: public; Owner: dataverse_app; Tablespace: +-- + +CREATE INDEX index_filemetadata_datafilecategory_filemetadatas_id ON filemetadata_datafilecategory USING btree (filemetadatas_id); + + +-- +-- Name: index_filemetadata_datasetversion_id; Type: INDEX; Schema: public; Owner: dataverse_app; Tablespace: +-- + +CREATE INDEX index_filemetadata_datasetversion_id ON filemetadata USING btree (datasetversion_id); + + +-- +-- Name: index_foreignmetadatafieldmapping_foreignfieldxpath; Type: INDEX; Schema: public; Owner: dataverse_app; Tablespace: +-- + +CREATE INDEX index_foreignmetadatafieldmapping_foreignfieldxpath ON foreignmetadatafieldmapping USING btree (foreignfieldxpath); + + +-- +-- Name: index_foreignmetadatafieldmapping_foreignmetadataformatmapping_; Type: INDEX; Schema: public; Owner: dataverse_app; Tablespace: +-- + +CREATE INDEX index_foreignmetadatafieldmapping_foreignmetadataformatmapping_ ON foreignmetadatafieldmapping USING btree (foreignmetadataformatmapping_id); + + +-- +-- Name: index_foreignmetadatafieldmapping_parentfieldmapping_id; Type: INDEX; Schema: public; Owner: dataverse_app; Tablespace: +-- + +CREATE INDEX index_foreignmetadatafieldmapping_parentfieldmapping_id ON foreignmetadatafieldmapping USING btree (parentfieldmapping_id); + + +-- +-- Name: index_foreignmetadataformatmapping_name; Type: INDEX; Schema: public; Owner: dataverse_app; Tablespace: +-- + +CREATE INDEX index_foreignmetadataformatmapping_name ON foreignmetadataformatmapping USING btree (name); + + +-- +-- Name: index_guestbookresponse_datafile_id; Type: INDEX; Schema: public; Owner: dataverse_app; Tablespace: +-- + +CREATE INDEX index_guestbookresponse_datafile_id ON guestbookresponse USING btree (datafile_id); + + +-- +-- Name: index_guestbookresponse_dataset_id; Type: INDEX; Schema: public; Owner: dataverse_app; Tablespace: +-- + +CREATE INDEX index_guestbookresponse_dataset_id ON guestbookresponse USING btree (dataset_id); + + +-- +-- Name: index_guestbookresponse_guestbook_id; Type: INDEX; Schema: public; Owner: dataverse_app; Tablespace: +-- + +CREATE INDEX index_guestbookresponse_guestbook_id ON guestbookresponse USING btree (guestbook_id); + + +-- +-- Name: index_harvestingdataverseconfig_dataverse_id; Type: INDEX; Schema: public; Owner: dataverse_app; Tablespace: +-- + +CREATE INDEX index_harvestingdataverseconfig_dataverse_id ON harvestingdataverseconfig USING btree (dataverse_id); + + +-- +-- Name: index_harvestingdataverseconfig_harvestingurl; Type: INDEX; Schema: public; Owner: dataverse_app; Tablespace: +-- + +CREATE INDEX index_harvestingdataverseconfig_harvestingurl ON harvestingdataverseconfig USING btree (harvestingurl); + + +-- +-- Name: index_harvestingdataverseconfig_harveststyle; Type: INDEX; Schema: public; Owner: dataverse_app; Tablespace: +-- + +CREATE INDEX index_harvestingdataverseconfig_harveststyle ON harvestingdataverseconfig USING btree (harveststyle); + + +-- +-- Name: index_harvestingdataverseconfig_harvesttype; Type: INDEX; Schema: public; Owner: dataverse_app; Tablespace: +-- + +CREATE INDEX index_harvestingdataverseconfig_harvesttype ON harvestingdataverseconfig USING btree (harvesttype); + + +-- +-- Name: index_ingestreport_datafile_id; Type: INDEX; Schema: public; Owner: dataverse_app; Tablespace: +-- + +CREATE INDEX index_ingestreport_datafile_id ON ingestreport USING btree (datafile_id); + + +-- +-- Name: index_ingestrequest_datafile_id; Type: INDEX; Schema: public; Owner: dataverse_app; Tablespace: +-- + +CREATE INDEX index_ingestrequest_datafile_id ON ingestrequest USING btree (datafile_id); + + +-- +-- Name: index_ipv4range_owner_id; Type: INDEX; Schema: public; Owner: dataverse_app; Tablespace: +-- + +CREATE INDEX index_ipv4range_owner_id ON ipv4range USING btree (owner_id); + + +-- +-- Name: index_ipv6range_owner_id; Type: INDEX; Schema: public; Owner: dataverse_app; Tablespace: +-- + +CREATE INDEX index_ipv6range_owner_id ON ipv6range USING btree (owner_id); + + +-- +-- Name: index_maplayermetadata_dataset_id; Type: INDEX; Schema: public; Owner: dataverse_app; Tablespace: +-- + +CREATE INDEX index_maplayermetadata_dataset_id ON maplayermetadata USING btree (dataset_id); + + +-- +-- Name: index_metadatablock_name; Type: INDEX; Schema: public; Owner: dataverse_app; Tablespace: +-- + +CREATE INDEX index_metadatablock_name ON metadatablock USING btree (name); + + +-- +-- Name: index_metadatablock_owner_id; Type: INDEX; Schema: public; Owner: dataverse_app; Tablespace: +-- + +CREATE INDEX index_metadatablock_owner_id ON metadatablock USING btree (owner_id); + + +-- +-- Name: index_passwordresetdata_builtinuser_id; Type: INDEX; Schema: public; Owner: dataverse_app; Tablespace: +-- + +CREATE INDEX index_passwordresetdata_builtinuser_id ON passwordresetdata USING btree (builtinuser_id); + + +-- +-- Name: index_passwordresetdata_token; Type: INDEX; Schema: public; Owner: dataverse_app; Tablespace: +-- + +CREATE INDEX index_passwordresetdata_token ON passwordresetdata USING btree (token); + + +-- +-- Name: index_persistedglobalgroup_dtype; Type: INDEX; Schema: public; Owner: dataverse_app; Tablespace: +-- + +CREATE INDEX index_persistedglobalgroup_dtype ON persistedglobalgroup USING btree (dtype); + + +-- +-- Name: index_roleassignment_assigneeidentifier; Type: INDEX; Schema: public; Owner: dataverse_app; Tablespace: +-- + +CREATE INDEX index_roleassignment_assigneeidentifier ON roleassignment USING btree (assigneeidentifier); + + +-- +-- Name: index_roleassignment_definitionpoint_id; Type: INDEX; Schema: public; Owner: dataverse_app; Tablespace: +-- + +CREATE INDEX index_roleassignment_definitionpoint_id ON roleassignment USING btree (definitionpoint_id); + + +-- +-- Name: index_roleassignment_role_id; Type: INDEX; Schema: public; Owner: dataverse_app; Tablespace: +-- + +CREATE INDEX index_roleassignment_role_id ON roleassignment USING btree (role_id); + + +-- +-- Name: index_savedsearch_creator_id; Type: INDEX; Schema: public; Owner: dataverse_app; Tablespace: +-- + +CREATE INDEX index_savedsearch_creator_id ON savedsearch USING btree (creator_id); + + +-- +-- Name: index_savedsearch_definitionpoint_id; Type: INDEX; Schema: public; Owner: dataverse_app; Tablespace: +-- + +CREATE INDEX index_savedsearch_definitionpoint_id ON savedsearch USING btree (definitionpoint_id); + + +-- +-- Name: index_savedsearchfilterquery_savedsearch_id; Type: INDEX; Schema: public; Owner: dataverse_app; Tablespace: +-- + +CREATE INDEX index_savedsearchfilterquery_savedsearch_id ON savedsearchfilterquery USING btree (savedsearch_id); + + +-- +-- Name: index_summarystatistic_datavariable_id; Type: INDEX; Schema: public; Owner: dataverse_app; Tablespace: +-- + +CREATE INDEX index_summarystatistic_datavariable_id ON summarystatistic USING btree (datavariable_id); + + +-- +-- Name: index_template_dataverse_id; Type: INDEX; Schema: public; Owner: dataverse_app; Tablespace: +-- + +CREATE INDEX index_template_dataverse_id ON template USING btree (dataverse_id); + + +-- +-- Name: index_usernotification_user_id; Type: INDEX; Schema: public; Owner: dataverse_app; Tablespace: +-- + +CREATE INDEX index_usernotification_user_id ON usernotification USING btree (user_id); + + +-- +-- Name: index_variablecategory_datavariable_id; Type: INDEX; Schema: public; Owner: dataverse_app; Tablespace: +-- + +CREATE INDEX index_variablecategory_datavariable_id ON variablecategory USING btree (datavariable_id); + + +-- +-- Name: index_variablerange_datavariable_id; Type: INDEX; Schema: public; Owner: dataverse_app; Tablespace: +-- + +CREATE INDEX index_variablerange_datavariable_id ON variablerange USING btree (datavariable_id); + + +-- +-- Name: index_variablerangeitem_datavariable_id; Type: INDEX; Schema: public; Owner: dataverse_app; Tablespace: +-- + +CREATE INDEX index_variablerangeitem_datavariable_id ON variablerangeitem USING btree (datavariable_id); + + +-- +-- Name: index_worldmapauth_token_application_id; Type: INDEX; Schema: public; Owner: dataverse_app; Tablespace: +-- + +CREATE INDEX index_worldmapauth_token_application_id ON worldmapauth_token USING btree (application_id); + + +-- +-- Name: index_worldmapauth_token_datafile_id; Type: INDEX; Schema: public; Owner: dataverse_app; Tablespace: +-- + +CREATE INDEX index_worldmapauth_token_datafile_id ON worldmapauth_token USING btree (datafile_id); + + +-- +-- Name: index_worldmapauth_token_dataverseuser_id; Type: INDEX; Schema: public; Owner: dataverse_app; Tablespace: +-- + +CREATE INDEX index_worldmapauth_token_dataverseuser_id ON worldmapauth_token USING btree (dataverseuser_id); + + +-- +-- Name: token_value; Type: INDEX; Schema: public; Owner: dataverse_app; Tablespace: +-- + +CREATE UNIQUE INDEX token_value ON worldmapauth_token USING btree (token); + + +-- +-- Name: dtasetfieldcontrolledvocabularyvaluecntrolledvocabularyvaluesid; Type: FK CONSTRAINT; Schema: public; Owner: dataverse_app +-- + +ALTER TABLE ONLY datasetfield_controlledvocabularyvalue + ADD CONSTRAINT dtasetfieldcontrolledvocabularyvaluecntrolledvocabularyvaluesid FOREIGN KEY (controlledvocabularyvalues_id) REFERENCES controlledvocabularyvalue(id); + + +-- +-- Name: explicitgroup_authenticateduser_containedauthenticatedusers_id; Type: FK CONSTRAINT; Schema: public; Owner: dataverse_app +-- + +ALTER TABLE ONLY explicitgroup_authenticateduser + ADD CONSTRAINT explicitgroup_authenticateduser_containedauthenticatedusers_id FOREIGN KEY (containedauthenticatedusers_id) REFERENCES authenticateduser(id); + + +-- +-- Name: fk_apitoken_authenticateduser_id; Type: FK CONSTRAINT; Schema: public; Owner: dataverse_app +-- + +ALTER TABLE ONLY apitoken + ADD CONSTRAINT fk_apitoken_authenticateduser_id FOREIGN KEY (authenticateduser_id) REFERENCES authenticateduser(id); + + +-- +-- Name: fk_authenticateduserlookup_authenticateduser_id; Type: FK CONSTRAINT; Schema: public; Owner: dataverse_app +-- + +ALTER TABLE ONLY authenticateduserlookup + ADD CONSTRAINT fk_authenticateduserlookup_authenticateduser_id FOREIGN KEY (authenticateduser_id) REFERENCES authenticateduser(id); + + +-- +-- Name: fk_controlledvocabalternate_controlledvocabularyvalue_id; Type: FK CONSTRAINT; Schema: public; Owner: dataverse_app +-- + +ALTER TABLE ONLY controlledvocabalternate + ADD CONSTRAINT fk_controlledvocabalternate_controlledvocabularyvalue_id FOREIGN KEY (controlledvocabularyvalue_id) REFERENCES controlledvocabularyvalue(id); + + +-- +-- Name: fk_controlledvocabalternate_datasetfieldtype_id; Type: FK CONSTRAINT; Schema: public; Owner: dataverse_app +-- + +ALTER TABLE ONLY controlledvocabalternate + ADD CONSTRAINT fk_controlledvocabalternate_datasetfieldtype_id FOREIGN KEY (datasetfieldtype_id) REFERENCES datasetfieldtype(id); + + +-- +-- Name: fk_controlledvocabularyvalue_datasetfieldtype_id; Type: FK CONSTRAINT; Schema: public; Owner: dataverse_app +-- + +ALTER TABLE ONLY controlledvocabularyvalue + ADD CONSTRAINT fk_controlledvocabularyvalue_datasetfieldtype_id FOREIGN KEY (datasetfieldtype_id) REFERENCES datasetfieldtype(id); + + +-- +-- Name: fk_customquestion_guestbook_id; Type: FK CONSTRAINT; Schema: public; Owner: dataverse_app +-- + +ALTER TABLE ONLY customquestion + ADD CONSTRAINT fk_customquestion_guestbook_id FOREIGN KEY (guestbook_id) REFERENCES guestbook(id); + + +-- +-- Name: fk_customquestionresponse_customquestion_id; Type: FK CONSTRAINT; Schema: public; Owner: dataverse_app +-- + +ALTER TABLE ONLY customquestionresponse + ADD CONSTRAINT fk_customquestionresponse_customquestion_id FOREIGN KEY (customquestion_id) REFERENCES customquestion(id); + + +-- +-- Name: fk_customquestionresponse_guestbookresponse_id; Type: FK CONSTRAINT; Schema: public; Owner: dataverse_app +-- + +ALTER TABLE ONLY customquestionresponse + ADD CONSTRAINT fk_customquestionresponse_guestbookresponse_id FOREIGN KEY (guestbookresponse_id) REFERENCES guestbookresponse(id); + + +-- +-- Name: fk_customquestionvalue_customquestion_id; Type: FK CONSTRAINT; Schema: public; Owner: dataverse_app +-- + +ALTER TABLE ONLY customquestionvalue + ADD CONSTRAINT fk_customquestionvalue_customquestion_id FOREIGN KEY (customquestion_id) REFERENCES customquestion(id); + + +-- +-- Name: fk_datafile_id; Type: FK CONSTRAINT; Schema: public; Owner: dataverse_app +-- + +ALTER TABLE ONLY datafile + ADD CONSTRAINT fk_datafile_id FOREIGN KEY (id) REFERENCES dvobject(id); + + +-- +-- Name: fk_datafilecategory_dataset_id; Type: FK CONSTRAINT; Schema: public; Owner: dataverse_app +-- + +ALTER TABLE ONLY datafilecategory + ADD CONSTRAINT fk_datafilecategory_dataset_id FOREIGN KEY (dataset_id) REFERENCES dvobject(id); + + +-- +-- Name: fk_datafiletag_datafile_id; Type: FK CONSTRAINT; Schema: public; Owner: dataverse_app +-- + +ALTER TABLE ONLY datafiletag + ADD CONSTRAINT fk_datafiletag_datafile_id FOREIGN KEY (datafile_id) REFERENCES dvobject(id); + + +-- +-- Name: fk_dataset_guestbook_id; Type: FK CONSTRAINT; Schema: public; Owner: dataverse_app +-- + +ALTER TABLE ONLY dataset + ADD CONSTRAINT fk_dataset_guestbook_id FOREIGN KEY (guestbook_id) REFERENCES guestbook(id); + + +-- +-- Name: fk_dataset_id; Type: FK CONSTRAINT; Schema: public; Owner: dataverse_app +-- + +ALTER TABLE ONLY dataset + ADD CONSTRAINT fk_dataset_id FOREIGN KEY (id) REFERENCES dvobject(id); + + +-- +-- Name: fk_dataset_thumbnailfile_id; Type: FK CONSTRAINT; Schema: public; Owner: dataverse_app +-- + +ALTER TABLE ONLY dataset + ADD CONSTRAINT fk_dataset_thumbnailfile_id FOREIGN KEY (thumbnailfile_id) REFERENCES dvobject(id); + + +-- +-- Name: fk_datasetfield_controlledvocabularyvalue_datasetfield_id; Type: FK CONSTRAINT; Schema: public; Owner: dataverse_app +-- + +ALTER TABLE ONLY datasetfield_controlledvocabularyvalue + ADD CONSTRAINT fk_datasetfield_controlledvocabularyvalue_datasetfield_id FOREIGN KEY (datasetfield_id) REFERENCES datasetfield(id); + + +-- +-- Name: fk_datasetfield_datasetfieldtype_id; Type: FK CONSTRAINT; Schema: public; Owner: dataverse_app +-- + +ALTER TABLE ONLY datasetfield + ADD CONSTRAINT fk_datasetfield_datasetfieldtype_id FOREIGN KEY (datasetfieldtype_id) REFERENCES datasetfieldtype(id); + + +-- +-- Name: fk_datasetfield_datasetversion_id; Type: FK CONSTRAINT; Schema: public; Owner: dataverse_app +-- + +ALTER TABLE ONLY datasetfield + ADD CONSTRAINT fk_datasetfield_datasetversion_id FOREIGN KEY (datasetversion_id) REFERENCES datasetversion(id); + + +-- +-- Name: fk_datasetfield_parentdatasetfieldcompoundvalue_id; Type: FK CONSTRAINT; Schema: public; Owner: dataverse_app +-- + +ALTER TABLE ONLY datasetfield + ADD CONSTRAINT fk_datasetfield_parentdatasetfieldcompoundvalue_id FOREIGN KEY (parentdatasetfieldcompoundvalue_id) REFERENCES datasetfieldcompoundvalue(id); + + +-- +-- Name: fk_datasetfield_template_id; Type: FK CONSTRAINT; Schema: public; Owner: dataverse_app +-- + +ALTER TABLE ONLY datasetfield + ADD CONSTRAINT fk_datasetfield_template_id FOREIGN KEY (template_id) REFERENCES template(id); + + +-- +-- Name: fk_datasetfieldcompoundvalue_parentdatasetfield_id; Type: FK CONSTRAINT; Schema: public; Owner: dataverse_app +-- + +ALTER TABLE ONLY datasetfieldcompoundvalue + ADD CONSTRAINT fk_datasetfieldcompoundvalue_parentdatasetfield_id FOREIGN KEY (parentdatasetfield_id) REFERENCES datasetfield(id); + + +-- +-- Name: fk_datasetfielddefaultvalue_datasetfield_id; Type: FK CONSTRAINT; Schema: public; Owner: dataverse_app +-- + +ALTER TABLE ONLY datasetfielddefaultvalue + ADD CONSTRAINT fk_datasetfielddefaultvalue_datasetfield_id FOREIGN KEY (datasetfield_id) REFERENCES datasetfieldtype(id); + + +-- +-- Name: fk_datasetfielddefaultvalue_defaultvalueset_id; Type: FK CONSTRAINT; Schema: public; Owner: dataverse_app +-- + +ALTER TABLE ONLY datasetfielddefaultvalue + ADD CONSTRAINT fk_datasetfielddefaultvalue_defaultvalueset_id FOREIGN KEY (defaultvalueset_id) REFERENCES defaultvalueset(id); + + +-- +-- Name: fk_datasetfielddefaultvalue_parentdatasetfielddefaultvalue_id; Type: FK CONSTRAINT; Schema: public; Owner: dataverse_app +-- + +ALTER TABLE ONLY datasetfielddefaultvalue + ADD CONSTRAINT fk_datasetfielddefaultvalue_parentdatasetfielddefaultvalue_id FOREIGN KEY (parentdatasetfielddefaultvalue_id) REFERENCES datasetfielddefaultvalue(id); + + +-- +-- Name: fk_datasetfieldtype_metadatablock_id; Type: FK CONSTRAINT; Schema: public; Owner: dataverse_app +-- + +ALTER TABLE ONLY datasetfieldtype + ADD CONSTRAINT fk_datasetfieldtype_metadatablock_id FOREIGN KEY (metadatablock_id) REFERENCES metadatablock(id); + + +-- +-- Name: fk_datasetfieldtype_parentdatasetfieldtype_id; Type: FK CONSTRAINT; Schema: public; Owner: dataverse_app +-- + +ALTER TABLE ONLY datasetfieldtype + ADD CONSTRAINT fk_datasetfieldtype_parentdatasetfieldtype_id FOREIGN KEY (parentdatasetfieldtype_id) REFERENCES datasetfieldtype(id); + + +-- +-- Name: fk_datasetfieldvalue_datasetfield_id; Type: FK CONSTRAINT; Schema: public; Owner: dataverse_app +-- + +ALTER TABLE ONLY datasetfieldvalue + ADD CONSTRAINT fk_datasetfieldvalue_datasetfield_id FOREIGN KEY (datasetfield_id) REFERENCES datasetfield(id); + + +-- +-- Name: fk_datasetlinkingdataverse_dataset_id; Type: FK CONSTRAINT; Schema: public; Owner: dataverse_app +-- + +ALTER TABLE ONLY datasetlinkingdataverse + ADD CONSTRAINT fk_datasetlinkingdataverse_dataset_id FOREIGN KEY (dataset_id) REFERENCES dvobject(id); + + +-- +-- Name: fk_datasetlinkingdataverse_linkingdataverse_id; Type: FK CONSTRAINT; Schema: public; Owner: dataverse_app +-- + +ALTER TABLE ONLY datasetlinkingdataverse + ADD CONSTRAINT fk_datasetlinkingdataverse_linkingdataverse_id FOREIGN KEY (linkingdataverse_id) REFERENCES dvobject(id); + + +-- +-- Name: fk_datasetlock_dataset_id; Type: FK CONSTRAINT; Schema: public; Owner: dataverse_app +-- + +ALTER TABLE ONLY datasetlock + ADD CONSTRAINT fk_datasetlock_dataset_id FOREIGN KEY (dataset_id) REFERENCES dvobject(id); + + +-- +-- Name: fk_datasetlock_user_id; Type: FK CONSTRAINT; Schema: public; Owner: dataverse_app +-- + +ALTER TABLE ONLY datasetlock + ADD CONSTRAINT fk_datasetlock_user_id FOREIGN KEY (user_id) REFERENCES authenticateduser(id); + + +-- +-- Name: fk_datasetversion_dataset_id; Type: FK CONSTRAINT; Schema: public; Owner: dataverse_app +-- + +ALTER TABLE ONLY datasetversion + ADD CONSTRAINT fk_datasetversion_dataset_id FOREIGN KEY (dataset_id) REFERENCES dvobject(id); + + +-- +-- Name: fk_datasetversionuser_authenticateduser_id; Type: FK CONSTRAINT; Schema: public; Owner: dataverse_app +-- + +ALTER TABLE ONLY datasetversionuser + ADD CONSTRAINT fk_datasetversionuser_authenticateduser_id FOREIGN KEY (authenticateduser_id) REFERENCES authenticateduser(id); + + +-- +-- Name: fk_datasetversionuser_datasetversion_id; Type: FK CONSTRAINT; Schema: public; Owner: dataverse_app +-- + +ALTER TABLE ONLY datasetversionuser + ADD CONSTRAINT fk_datasetversionuser_datasetversion_id FOREIGN KEY (datasetversion_id) REFERENCES datasetversion(id); + + +-- +-- Name: fk_datatable_datafile_id; Type: FK CONSTRAINT; Schema: public; Owner: dataverse_app +-- + +ALTER TABLE ONLY datatable + ADD CONSTRAINT fk_datatable_datafile_id FOREIGN KEY (datafile_id) REFERENCES dvobject(id); + + +-- +-- Name: fk_datavariable_datatable_id; Type: FK CONSTRAINT; Schema: public; Owner: dataverse_app +-- + +ALTER TABLE ONLY datavariable + ADD CONSTRAINT fk_datavariable_datatable_id FOREIGN KEY (datatable_id) REFERENCES datatable(id); + + +-- +-- Name: fk_dataverse_defaultcontributorrole_id; Type: FK CONSTRAINT; Schema: public; Owner: dataverse_app +-- + +ALTER TABLE ONLY dataverse + ADD CONSTRAINT fk_dataverse_defaultcontributorrole_id FOREIGN KEY (defaultcontributorrole_id) REFERENCES dataverserole(id); + + +-- +-- Name: fk_dataverse_defaulttemplate_id; Type: FK CONSTRAINT; Schema: public; Owner: dataverse_app +-- + +ALTER TABLE ONLY dataverse + ADD CONSTRAINT fk_dataverse_defaulttemplate_id FOREIGN KEY (defaulttemplate_id) REFERENCES template(id); + + +-- +-- Name: fk_dataverse_id; Type: FK CONSTRAINT; Schema: public; Owner: dataverse_app +-- + +ALTER TABLE ONLY dataverse + ADD CONSTRAINT fk_dataverse_id FOREIGN KEY (id) REFERENCES dvobject(id); + + +-- +-- Name: fk_dataverse_metadatablock_dataverse_id; Type: FK CONSTRAINT; Schema: public; Owner: dataverse_app +-- + +ALTER TABLE ONLY dataverse_metadatablock + ADD CONSTRAINT fk_dataverse_metadatablock_dataverse_id FOREIGN KEY (dataverse_id) REFERENCES dvobject(id); + + +-- +-- Name: fk_dataverse_metadatablock_metadatablocks_id; Type: FK CONSTRAINT; Schema: public; Owner: dataverse_app +-- + +ALTER TABLE ONLY dataverse_metadatablock + ADD CONSTRAINT fk_dataverse_metadatablock_metadatablocks_id FOREIGN KEY (metadatablocks_id) REFERENCES metadatablock(id); + + +-- +-- Name: fk_dataversecontact_dataverse_id; Type: FK CONSTRAINT; Schema: public; Owner: dataverse_app +-- + +ALTER TABLE ONLY dataversecontact + ADD CONSTRAINT fk_dataversecontact_dataverse_id FOREIGN KEY (dataverse_id) REFERENCES dvobject(id); + + +-- +-- Name: fk_dataversefacet_datasetfieldtype_id; Type: FK CONSTRAINT; Schema: public; Owner: dataverse_app +-- + +ALTER TABLE ONLY dataversefacet + ADD CONSTRAINT fk_dataversefacet_datasetfieldtype_id FOREIGN KEY (datasetfieldtype_id) REFERENCES datasetfieldtype(id); + + +-- +-- Name: fk_dataversefacet_dataverse_id; Type: FK CONSTRAINT; Schema: public; Owner: dataverse_app +-- + +ALTER TABLE ONLY dataversefacet + ADD CONSTRAINT fk_dataversefacet_dataverse_id FOREIGN KEY (dataverse_id) REFERENCES dvobject(id); + + +-- +-- Name: fk_dataversefeatureddataverse_dataverse_id; Type: FK CONSTRAINT; Schema: public; Owner: dataverse_app +-- + +ALTER TABLE ONLY dataversefeatureddataverse + ADD CONSTRAINT fk_dataversefeatureddataverse_dataverse_id FOREIGN KEY (dataverse_id) REFERENCES dvobject(id); + + +-- +-- Name: fk_dataversefeatureddataverse_featureddataverse_id; Type: FK CONSTRAINT; Schema: public; Owner: dataverse_app +-- + +ALTER TABLE ONLY dataversefeatureddataverse + ADD CONSTRAINT fk_dataversefeatureddataverse_featureddataverse_id FOREIGN KEY (featureddataverse_id) REFERENCES dvobject(id); + + +-- +-- Name: fk_dataversefieldtypeinputlevel_datasetfieldtype_id; Type: FK CONSTRAINT; Schema: public; Owner: dataverse_app +-- + +ALTER TABLE ONLY dataversefieldtypeinputlevel + ADD CONSTRAINT fk_dataversefieldtypeinputlevel_datasetfieldtype_id FOREIGN KEY (datasetfieldtype_id) REFERENCES datasetfieldtype(id); + + +-- +-- Name: fk_dataversefieldtypeinputlevel_dataverse_id; Type: FK CONSTRAINT; Schema: public; Owner: dataverse_app +-- + +ALTER TABLE ONLY dataversefieldtypeinputlevel + ADD CONSTRAINT fk_dataversefieldtypeinputlevel_dataverse_id FOREIGN KEY (dataverse_id) REFERENCES dvobject(id); + + +-- +-- Name: fk_dataverselinkingdataverse_dataverse_id; Type: FK CONSTRAINT; Schema: public; Owner: dataverse_app +-- + +ALTER TABLE ONLY dataverselinkingdataverse + ADD CONSTRAINT fk_dataverselinkingdataverse_dataverse_id FOREIGN KEY (dataverse_id) REFERENCES dvobject(id); + + +-- +-- Name: fk_dataverselinkingdataverse_linkingdataverse_id; Type: FK CONSTRAINT; Schema: public; Owner: dataverse_app +-- + +ALTER TABLE ONLY dataverselinkingdataverse + ADD CONSTRAINT fk_dataverselinkingdataverse_linkingdataverse_id FOREIGN KEY (linkingdataverse_id) REFERENCES dvobject(id); + + +-- +-- Name: fk_dataverserole_owner_id; Type: FK CONSTRAINT; Schema: public; Owner: dataverse_app +-- + +ALTER TABLE ONLY dataverserole + ADD CONSTRAINT fk_dataverserole_owner_id FOREIGN KEY (owner_id) REFERENCES dvobject(id); + + +-- +-- Name: fk_dataversesubjects_controlledvocabularyvalue_id; Type: FK CONSTRAINT; Schema: public; Owner: dataverse_app +-- + +ALTER TABLE ONLY dataversesubjects + ADD CONSTRAINT fk_dataversesubjects_controlledvocabularyvalue_id FOREIGN KEY (controlledvocabularyvalue_id) REFERENCES controlledvocabularyvalue(id); + + +-- +-- Name: fk_dataversesubjects_dataverse_id; Type: FK CONSTRAINT; Schema: public; Owner: dataverse_app +-- + +ALTER TABLE ONLY dataversesubjects + ADD CONSTRAINT fk_dataversesubjects_dataverse_id FOREIGN KEY (dataverse_id) REFERENCES dvobject(id); + + +-- +-- Name: fk_dataversetheme_dataverse_id; Type: FK CONSTRAINT; Schema: public; Owner: dataverse_app +-- + +ALTER TABLE ONLY dataversetheme + ADD CONSTRAINT fk_dataversetheme_dataverse_id FOREIGN KEY (dataverse_id) REFERENCES dvobject(id); + + +-- +-- Name: fk_dvobject_creator_id; Type: FK CONSTRAINT; Schema: public; Owner: dataverse_app +-- + +ALTER TABLE ONLY dvobject + ADD CONSTRAINT fk_dvobject_creator_id FOREIGN KEY (creator_id) REFERENCES authenticateduser(id); + + +-- +-- Name: fk_dvobject_owner_id; Type: FK CONSTRAINT; Schema: public; Owner: dataverse_app +-- + +ALTER TABLE ONLY dvobject + ADD CONSTRAINT fk_dvobject_owner_id FOREIGN KEY (owner_id) REFERENCES dvobject(id); + + +-- +-- Name: fk_dvobject_releaseuser_id; Type: FK CONSTRAINT; Schema: public; Owner: dataverse_app +-- + +ALTER TABLE ONLY dvobject + ADD CONSTRAINT fk_dvobject_releaseuser_id FOREIGN KEY (releaseuser_id) REFERENCES authenticateduser(id); + + +-- +-- Name: fk_explicitgroup_authenticateduser_explicitgroup_id; Type: FK CONSTRAINT; Schema: public; Owner: dataverse_app +-- + +ALTER TABLE ONLY explicitgroup_authenticateduser + ADD CONSTRAINT fk_explicitgroup_authenticateduser_explicitgroup_id FOREIGN KEY (explicitgroup_id) REFERENCES explicitgroup(id); + + +-- +-- Name: fk_explicitgroup_containedroleassignees_explicitgroup_id; Type: FK CONSTRAINT; Schema: public; Owner: dataverse_app +-- + +ALTER TABLE ONLY explicitgroup_containedroleassignees + ADD CONSTRAINT fk_explicitgroup_containedroleassignees_explicitgroup_id FOREIGN KEY (explicitgroup_id) REFERENCES explicitgroup(id); + + +-- +-- Name: fk_explicitgroup_explicitgroup_containedexplicitgroups_id; Type: FK CONSTRAINT; Schema: public; Owner: dataverse_app +-- + +ALTER TABLE ONLY explicitgroup_explicitgroup + ADD CONSTRAINT fk_explicitgroup_explicitgroup_containedexplicitgroups_id FOREIGN KEY (containedexplicitgroups_id) REFERENCES explicitgroup(id); + + +-- +-- Name: fk_explicitgroup_explicitgroup_explicitgroup_id; Type: FK CONSTRAINT; Schema: public; Owner: dataverse_app +-- + +ALTER TABLE ONLY explicitgroup_explicitgroup + ADD CONSTRAINT fk_explicitgroup_explicitgroup_explicitgroup_id FOREIGN KEY (explicitgroup_id) REFERENCES explicitgroup(id); + + +-- +-- Name: fk_explicitgroup_owner_id; Type: FK CONSTRAINT; Schema: public; Owner: dataverse_app +-- + +ALTER TABLE ONLY explicitgroup + ADD CONSTRAINT fk_explicitgroup_owner_id FOREIGN KEY (owner_id) REFERENCES dvobject(id); + + +-- +-- Name: fk_fileaccessrequests_authenticated_user_id; Type: FK CONSTRAINT; Schema: public; Owner: dataverse_app +-- + +ALTER TABLE ONLY fileaccessrequests + ADD CONSTRAINT fk_fileaccessrequests_authenticated_user_id FOREIGN KEY (authenticated_user_id) REFERENCES authenticateduser(id); + + +-- +-- Name: fk_fileaccessrequests_datafile_id; Type: FK CONSTRAINT; Schema: public; Owner: dataverse_app +-- + +ALTER TABLE ONLY fileaccessrequests + ADD CONSTRAINT fk_fileaccessrequests_datafile_id FOREIGN KEY (datafile_id) REFERENCES dvobject(id); + + +-- +-- Name: fk_filemetadata_datafile_id; Type: FK CONSTRAINT; Schema: public; Owner: dataverse_app +-- + +ALTER TABLE ONLY filemetadata + ADD CONSTRAINT fk_filemetadata_datafile_id FOREIGN KEY (datafile_id) REFERENCES dvobject(id); + + +-- +-- Name: fk_filemetadata_datafilecategory_filecategories_id; Type: FK CONSTRAINT; Schema: public; Owner: dataverse_app +-- + +ALTER TABLE ONLY filemetadata_datafilecategory + ADD CONSTRAINT fk_filemetadata_datafilecategory_filecategories_id FOREIGN KEY (filecategories_id) REFERENCES datafilecategory(id); + + +-- +-- Name: fk_filemetadata_datafilecategory_filemetadatas_id; Type: FK CONSTRAINT; Schema: public; Owner: dataverse_app +-- + +ALTER TABLE ONLY filemetadata_datafilecategory + ADD CONSTRAINT fk_filemetadata_datafilecategory_filemetadatas_id FOREIGN KEY (filemetadatas_id) REFERENCES filemetadata(id); + + +-- +-- Name: fk_filemetadata_datasetversion_id; Type: FK CONSTRAINT; Schema: public; Owner: dataverse_app +-- + +ALTER TABLE ONLY filemetadata + ADD CONSTRAINT fk_filemetadata_datasetversion_id FOREIGN KEY (datasetversion_id) REFERENCES datasetversion(id); + + +-- +-- Name: fk_foreignmetadatafieldmapping_foreignmetadataformatmapping_id; Type: FK CONSTRAINT; Schema: public; Owner: dataverse_app +-- + +ALTER TABLE ONLY foreignmetadatafieldmapping + ADD CONSTRAINT fk_foreignmetadatafieldmapping_foreignmetadataformatmapping_id FOREIGN KEY (foreignmetadataformatmapping_id) REFERENCES foreignmetadataformatmapping(id); + + +-- +-- Name: fk_foreignmetadatafieldmapping_parentfieldmapping_id; Type: FK CONSTRAINT; Schema: public; Owner: dataverse_app +-- + +ALTER TABLE ONLY foreignmetadatafieldmapping + ADD CONSTRAINT fk_foreignmetadatafieldmapping_parentfieldmapping_id FOREIGN KEY (parentfieldmapping_id) REFERENCES foreignmetadatafieldmapping(id); + + +-- +-- Name: fk_guestbook_dataverse_id; Type: FK CONSTRAINT; Schema: public; Owner: dataverse_app +-- + +ALTER TABLE ONLY guestbook + ADD CONSTRAINT fk_guestbook_dataverse_id FOREIGN KEY (dataverse_id) REFERENCES dvobject(id); + + +-- +-- Name: fk_guestbookresponse_authenticateduser_id; Type: FK CONSTRAINT; Schema: public; Owner: dataverse_app +-- + +ALTER TABLE ONLY guestbookresponse + ADD CONSTRAINT fk_guestbookresponse_authenticateduser_id FOREIGN KEY (authenticateduser_id) REFERENCES authenticateduser(id); + + +-- +-- Name: fk_guestbookresponse_datafile_id; Type: FK CONSTRAINT; Schema: public; Owner: dataverse_app +-- + +ALTER TABLE ONLY guestbookresponse + ADD CONSTRAINT fk_guestbookresponse_datafile_id FOREIGN KEY (datafile_id) REFERENCES dvobject(id); + + +-- +-- Name: fk_guestbookresponse_dataset_id; Type: FK CONSTRAINT; Schema: public; Owner: dataverse_app +-- + +ALTER TABLE ONLY guestbookresponse + ADD CONSTRAINT fk_guestbookresponse_dataset_id FOREIGN KEY (dataset_id) REFERENCES dvobject(id); + + +-- +-- Name: fk_guestbookresponse_datasetversion_id; Type: FK CONSTRAINT; Schema: public; Owner: dataverse_app +-- + +ALTER TABLE ONLY guestbookresponse + ADD CONSTRAINT fk_guestbookresponse_datasetversion_id FOREIGN KEY (datasetversion_id) REFERENCES datasetversion(id); + + +-- +-- Name: fk_guestbookresponse_guestbook_id; Type: FK CONSTRAINT; Schema: public; Owner: dataverse_app +-- + +ALTER TABLE ONLY guestbookresponse + ADD CONSTRAINT fk_guestbookresponse_guestbook_id FOREIGN KEY (guestbook_id) REFERENCES guestbook(id); + + +-- +-- Name: fk_harvestingdataverseconfig_dataverse_id; Type: FK CONSTRAINT; Schema: public; Owner: dataverse_app +-- + +ALTER TABLE ONLY harvestingdataverseconfig + ADD CONSTRAINT fk_harvestingdataverseconfig_dataverse_id FOREIGN KEY (dataverse_id) REFERENCES dvobject(id); + + +-- +-- Name: fk_ingestreport_datafile_id; Type: FK CONSTRAINT; Schema: public; Owner: dataverse_app +-- + +ALTER TABLE ONLY ingestreport + ADD CONSTRAINT fk_ingestreport_datafile_id FOREIGN KEY (datafile_id) REFERENCES dvobject(id); + + +-- +-- Name: fk_ingestrequest_datafile_id; Type: FK CONSTRAINT; Schema: public; Owner: dataverse_app +-- + +ALTER TABLE ONLY ingestrequest + ADD CONSTRAINT fk_ingestrequest_datafile_id FOREIGN KEY (datafile_id) REFERENCES dvobject(id); + + +-- +-- Name: fk_ipv4range_owner_id; Type: FK CONSTRAINT; Schema: public; Owner: dataverse_app +-- + +ALTER TABLE ONLY ipv4range + ADD CONSTRAINT fk_ipv4range_owner_id FOREIGN KEY (owner_id) REFERENCES persistedglobalgroup(id); + + +-- +-- Name: fk_ipv6range_owner_id; Type: FK CONSTRAINT; Schema: public; Owner: dataverse_app +-- + +ALTER TABLE ONLY ipv6range + ADD CONSTRAINT fk_ipv6range_owner_id FOREIGN KEY (owner_id) REFERENCES persistedglobalgroup(id); + + +-- +-- Name: fk_maplayermetadata_datafile_id; Type: FK CONSTRAINT; Schema: public; Owner: dataverse_app +-- + +ALTER TABLE ONLY maplayermetadata + ADD CONSTRAINT fk_maplayermetadata_datafile_id FOREIGN KEY (datafile_id) REFERENCES dvobject(id); + + +-- +-- Name: fk_maplayermetadata_dataset_id; Type: FK CONSTRAINT; Schema: public; Owner: dataverse_app +-- + +ALTER TABLE ONLY maplayermetadata + ADD CONSTRAINT fk_maplayermetadata_dataset_id FOREIGN KEY (dataset_id) REFERENCES dvobject(id); + + +-- +-- Name: fk_metadatablock_owner_id; Type: FK CONSTRAINT; Schema: public; Owner: dataverse_app +-- + +ALTER TABLE ONLY metadatablock + ADD CONSTRAINT fk_metadatablock_owner_id FOREIGN KEY (owner_id) REFERENCES dvobject(id); + + +-- +-- Name: fk_passwordresetdata_builtinuser_id; Type: FK CONSTRAINT; Schema: public; Owner: dataverse_app +-- + +ALTER TABLE ONLY passwordresetdata + ADD CONSTRAINT fk_passwordresetdata_builtinuser_id FOREIGN KEY (builtinuser_id) REFERENCES builtinuser(id); + + +-- +-- Name: fk_roleassignment_definitionpoint_id; Type: FK CONSTRAINT; Schema: public; Owner: dataverse_app +-- + +ALTER TABLE ONLY roleassignment + ADD CONSTRAINT fk_roleassignment_definitionpoint_id FOREIGN KEY (definitionpoint_id) REFERENCES dvobject(id); + + +-- +-- Name: fk_roleassignment_role_id; Type: FK CONSTRAINT; Schema: public; Owner: dataverse_app +-- + +ALTER TABLE ONLY roleassignment + ADD CONSTRAINT fk_roleassignment_role_id FOREIGN KEY (role_id) REFERENCES dataverserole(id); + + +-- +-- Name: fk_savedsearch_creator_id; Type: FK CONSTRAINT; Schema: public; Owner: dataverse_app +-- + +ALTER TABLE ONLY savedsearch + ADD CONSTRAINT fk_savedsearch_creator_id FOREIGN KEY (creator_id) REFERENCES authenticateduser(id); + + +-- +-- Name: fk_savedsearch_definitionpoint_id; Type: FK CONSTRAINT; Schema: public; Owner: dataverse_app +-- + +ALTER TABLE ONLY savedsearch + ADD CONSTRAINT fk_savedsearch_definitionpoint_id FOREIGN KEY (definitionpoint_id) REFERENCES dvobject(id); + + +-- +-- Name: fk_savedsearchfilterquery_savedsearch_id; Type: FK CONSTRAINT; Schema: public; Owner: dataverse_app +-- + +ALTER TABLE ONLY savedsearchfilterquery + ADD CONSTRAINT fk_savedsearchfilterquery_savedsearch_id FOREIGN KEY (savedsearch_id) REFERENCES savedsearch(id); + + +-- +-- Name: fk_summarystatistic_datavariable_id; Type: FK CONSTRAINT; Schema: public; Owner: dataverse_app +-- + +ALTER TABLE ONLY summarystatistic + ADD CONSTRAINT fk_summarystatistic_datavariable_id FOREIGN KEY (datavariable_id) REFERENCES datavariable(id); + + +-- +-- Name: fk_template_dataverse_id; Type: FK CONSTRAINT; Schema: public; Owner: dataverse_app +-- + +ALTER TABLE ONLY template + ADD CONSTRAINT fk_template_dataverse_id FOREIGN KEY (dataverse_id) REFERENCES dvobject(id); + + +-- +-- Name: fk_usernotification_user_id; Type: FK CONSTRAINT; Schema: public; Owner: dataverse_app +-- + +ALTER TABLE ONLY usernotification + ADD CONSTRAINT fk_usernotification_user_id FOREIGN KEY (user_id) REFERENCES authenticateduser(id); + + +-- +-- Name: fk_variablecategory_datavariable_id; Type: FK CONSTRAINT; Schema: public; Owner: dataverse_app +-- + +ALTER TABLE ONLY variablecategory + ADD CONSTRAINT fk_variablecategory_datavariable_id FOREIGN KEY (datavariable_id) REFERENCES datavariable(id); + + +-- +-- Name: fk_variablerange_datavariable_id; Type: FK CONSTRAINT; Schema: public; Owner: dataverse_app +-- + +ALTER TABLE ONLY variablerange + ADD CONSTRAINT fk_variablerange_datavariable_id FOREIGN KEY (datavariable_id) REFERENCES datavariable(id); + + +-- +-- Name: fk_variablerangeitem_datavariable_id; Type: FK CONSTRAINT; Schema: public; Owner: dataverse_app +-- + +ALTER TABLE ONLY variablerangeitem + ADD CONSTRAINT fk_variablerangeitem_datavariable_id FOREIGN KEY (datavariable_id) REFERENCES datavariable(id); + + +-- +-- Name: fk_worldmapauth_token_application_id; Type: FK CONSTRAINT; Schema: public; Owner: dataverse_app +-- + +ALTER TABLE ONLY worldmapauth_token + ADD CONSTRAINT fk_worldmapauth_token_application_id FOREIGN KEY (application_id) REFERENCES worldmapauth_tokentype(id); + + +-- +-- Name: fk_worldmapauth_token_datafile_id; Type: FK CONSTRAINT; Schema: public; Owner: dataverse_app +-- + +ALTER TABLE ONLY worldmapauth_token + ADD CONSTRAINT fk_worldmapauth_token_datafile_id FOREIGN KEY (datafile_id) REFERENCES dvobject(id); + + +-- +-- Name: fk_worldmapauth_token_dataverseuser_id; Type: FK CONSTRAINT; Schema: public; Owner: dataverse_app +-- + +ALTER TABLE ONLY worldmapauth_token + ADD CONSTRAINT fk_worldmapauth_token_dataverseuser_id FOREIGN KEY (dataverseuser_id) REFERENCES authenticateduser(id); + + +-- +-- Name: public; Type: ACL; Schema: -; Owner: michael +-- + +REVOKE ALL ON SCHEMA public FROM PUBLIC; +REVOKE ALL ON SCHEMA public FROM michael; +GRANT ALL ON SCHEMA public TO michael; +GRANT ALL ON SCHEMA public TO dataverse_app; + + +-- +-- PostgreSQL database dump complete +-- + diff --git a/dataversedock/testdata/scripts/issues/2102/setup.sh b/dataversedock/testdata/scripts/issues/2102/setup.sh new file mode 100644 index 0000000..32b7285 --- /dev/null +++ b/dataversedock/testdata/scripts/issues/2102/setup.sh @@ -0,0 +1,13 @@ +ENDPOINT=https://localhost:8181 +APIKEY=a65048f8-875c-4479-a91d-33cb8cd12821 +DATASET=3 + +echo Calling: +echo curl --insecure $ENDPOINT/api/datasets/$DATASET/versions/:latest?key=$APIKEY +echo +echo curl --insecure -X PUT -H "Content-Type:application/json" -d@dataset-metadata-next.json $ENDPOINT/api/datasets/$DATASET/versions/:draft?key=$APIKEY +echo + + +# get data: +# curl --insecure $ENDPOINT/api/datasets/$DATASET/versions/:latest?key=$APIKEY diff --git a/dataversedock/testdata/scripts/issues/2132/find-multiple-drafts.sql b/dataversedock/testdata/scripts/issues/2132/find-multiple-drafts.sql new file mode 100644 index 0000000..5af324c --- /dev/null +++ b/dataversedock/testdata/scripts/issues/2132/find-multiple-drafts.sql @@ -0,0 +1 @@ +select dataset_id, count(*) from datasetversion where versionstate='DRAFT' group by dataset_id having count(*) >1; diff --git a/dataversedock/testdata/scripts/issues/2132/one-draft-version-per-dataset-constraint.sql b/dataversedock/testdata/scripts/issues/2132/one-draft-version-per-dataset-constraint.sql new file mode 100644 index 0000000..d945934 --- /dev/null +++ b/dataversedock/testdata/scripts/issues/2132/one-draft-version-per-dataset-constraint.sql @@ -0,0 +1 @@ +CREATE UNIQUE INDEX one_draft_version_per_dataset ON datasetversion (dataset_id) WHERE versionstate='DRAFT'; diff --git a/dataversedock/testdata/scripts/issues/2438/download.R b/dataversedock/testdata/scripts/issues/2438/download.R new file mode 100644 index 0000000..eea7f18 --- /dev/null +++ b/dataversedock/testdata/scripts/issues/2438/download.R @@ -0,0 +1,26 @@ +arg <- commandArgs(trailingOnly = TRUE) + +download.dataverse.file <- function(url) { + if (length(url) == 0L) { + return( + "Please provide a URL to a file: http://guides.dataverse.org/en/latest/api/dataaccess.html" + ) + } + # Examples of URLs for tsv, original, RData, JSON, DDI/XML: + # https://groups.google.com/d/msg/dataverse-community/fFrJi7NnBus/LNpfXItbtZYJ + # + # This script assume the tsv URL is used. File id 91 is just an example. You must + # look up the id of the file. As of this writing the easiest way is via SWORD: + # https://github.com/IQSS/dataverse/issues/1837#issuecomment-121736332 + # + # url.to.download = 'https://demo.dataverse.org/api/v1/access/datafile/91' + url.to.download = url + tsvfile = 'file.tsv' + download.file(url = url.to.download, destfile = + tsvfile, method = 'curl') + mydata <- read.table(tsvfile, header = TRUE, sep = "\t") + print(mydata) + unlink(tsvfile) +} + +download.dataverse.file(arg) diff --git a/dataversedock/testdata/scripts/issues/2454/anAuthUser.json b/dataversedock/testdata/scripts/issues/2454/anAuthUser.json new file mode 100644 index 0000000..621b7fe --- /dev/null +++ b/dataversedock/testdata/scripts/issues/2454/anAuthUser.json @@ -0,0 +1,9 @@ +{ + "firstName":"Anau", + "lastName":"Thuser", + "userName":"anAuthUser", + "affiliation":"current Dataverse", + "position":"above and beyond", + "email":"anAuthUser@malinator.com", + "phone":"(888) 888-8888" +} diff --git a/dataversedock/testdata/scripts/issues/2454/anotherAuthUser.json b/dataversedock/testdata/scripts/issues/2454/anotherAuthUser.json new file mode 100644 index 0000000..94db83b --- /dev/null +++ b/dataversedock/testdata/scripts/issues/2454/anotherAuthUser.json @@ -0,0 +1,9 @@ +{ + "firstName":"Another", + "lastName":"Authuser", + "userName":"anotherAuthUser", + "affiliation":"current Dataverse", + "position":"above and beyond", + "email":"anotherAuthUser@malinator.com", + "phone":"(888) 888-8888" +} diff --git a/dataversedock/testdata/scripts/issues/2454/assignment.json b/dataversedock/testdata/scripts/issues/2454/assignment.json new file mode 100644 index 0000000..69307d0 --- /dev/null +++ b/dataversedock/testdata/scripts/issues/2454/assignment.json @@ -0,0 +1 @@ +{"assignee":":authenticated-users", "role":"curator"} diff --git a/dataversedock/testdata/scripts/issues/2454/dataverse.json b/dataversedock/testdata/scripts/issues/2454/dataverse.json new file mode 100644 index 0000000..0d2047b --- /dev/null +++ b/dataversedock/testdata/scripts/issues/2454/dataverse.json @@ -0,0 +1,13 @@ +{ + "alias": "permissionsTestDv", + "name": "PermissionsTest", + "affiliation": "Affiliation value", + "permissionRoot": true, + "description": "A Dataverse where we test permissions", + "dataverseContacts": [ + { + "contactEmail": "test.script@mailinator.com" + } + ], + "dataverseSubjects": ["Arts and Humanities"] +} diff --git a/dataversedock/testdata/scripts/issues/2454/group.json b/dataversedock/testdata/scripts/issues/2454/group.json new file mode 100644 index 0000000..0457565 --- /dev/null +++ b/dataversedock/testdata/scripts/issues/2454/group.json @@ -0,0 +1,5 @@ +{ + "displayName":"Permission test group", + "description":"Group for testing permissions", + "aliasInOwner":"PTG" +} diff --git a/dataversedock/testdata/scripts/issues/2454/rollback.sh b/dataversedock/testdata/scripts/issues/2454/rollback.sh new file mode 100755 index 0000000..9a1f2fa --- /dev/null +++ b/dataversedock/testdata/scripts/issues/2454/rollback.sh @@ -0,0 +1,31 @@ +#!/bin/bash + +ENDPOINT=http://localhost:8080/api +DB="psql dvndb -At -c " +ROOT_USER=dataverseAdmin +ROOT_KEY=$($DB "select tokenstring \ + from authenticateduser au inner join apitoken apt \ + on au.id=apt.authenticateduser_id \ + where useridentifier='$ROOT_USER'") + +echo $ROOT_USER api key is $ROOT_KEY + + +# delete DV +curl -X DELETE $ENDPOINT/dataverses/permissionsTestDv?key=$ROOT_KEY +echo +echo dataverses deleted +echo + +# delete user +for USER_NICK in anAuthUser anotherAuthUser +do + echo deleting user $USER_NICK + QUERY="select id from authenticateduser where useridentifier='$USER_NICK'" + AUTH_USER_ID=$($DB "$QUERY") + echo Auth user id is $AUTH_USER_ID + $DB "delete from apitoken where authenticateduser_id=$AUTH_USER_ID" + $DB "delete from authenticateduserlookup where authenticateduser_id=$AUTH_USER_ID" + $DB "delete from authenticateduser where id=$AUTH_USER_ID" + $DB "delete from builtinuser where id=$AUTH_USER_ID" +done diff --git a/dataversedock/testdata/scripts/issues/2454/run-test.sh b/dataversedock/testdata/scripts/issues/2454/run-test.sh new file mode 100755 index 0000000..49eb45a --- /dev/null +++ b/dataversedock/testdata/scripts/issues/2454/run-test.sh @@ -0,0 +1,108 @@ +#! /bin/bash + +# This script is an automated test to validate that issue https://github.com/IQSS/dataverse/issues/2454 +# has been properly implemented. +# The issue requires that we "Distinguish between "read" and "write" permissions, make the "write" ones apply only to AuthenticatedUsers" +# To test this, we do the following: +# 1. Create a dataverse D in root +# 2. Create a new explicit group G in D, containing :guest and @anAuthUser +# 3. Assign the Admin role to G +# 4. Validation: +# 4.1 `:guest` can view unpublished dataverse, can't manage permissions there +# 4.2 `@anAuthUser` can do both +# 4.3 `@anotherAuthUSer` can do none + +# +# /!\ This script requires jq, psql and curl. +# /!\ You can set turn off the state setup by setting SETUP_NEEDED to anything that's not "yes" +# + +##### +# Config - edit this to match your system. +ENDPOINT=http://localhost:8080/api +DB="psql dvndb -At -c " +ROOT_USER=dataverseAdmin +SETUP_NEEDED=yes + + +##### +# Setup = if needed +# +if [ $SETUP_NEEDED == "yes" ]; then + echo SETTING UP + + ROOT_KEY=$($DB "select tokenstring \ + from authenticateduser au inner join apitoken apt \ + on au.id=apt.authenticateduser_id \ + where useridentifier='$ROOT_USER'") + + echo $ROOT_USER api key is $ROOT_KEY + + # Create @anAuthUser + USER_CREATION_KEY=$($DB "SELECT content FROM setting WHERE name='BuiltinUsers.KEY'") + AN_AUTH_USER_KEY=$( curl -s -X POST -d@anAuthUser.json -H"Content-type:application/json" $ENDPOINT/builtin-users?password=XXX\&key=$USER_CREATION_KEY | jq .data.apiToken | tr -d \") + ANOTHER_AUTH_USER_KEY=$( curl -s -X POST -d@anotherAuthUser.json -H"Content-type:application/json" $ENDPOINT/builtin-users?password=XXX\&key=$USER_CREATION_KEY | jq .data.apiToken | tr -d \") + echo + echo user @anAuthUser created with key $AN_AUTH_USER_KEY + + # Create the test dataverses. + curl -s -X POST -d@dataverse.json -H "Content-type:application/json" $ENDPOINT/dataverses/:root/?key=$ROOT_KEY + echo + echo Dataverse created + + # Create the group and add the users + GROUP_ID=$( curl -s -X POST -d@group.json -H "Content-type:application/json" $ENDPOINT/dataverses/permissionsTestDv/groups/?key=$ROOT_KEY | jq .data.identifier | tr -d \" ) + echo Group created with id $GROUP_ID + curl -s -X POST -d'[":guest","@anAuthUser"]' -H "Content-type:application/json" $ENDPOINT/dataverses/permissionsTestDv/groups/PTG/roleAssignees?key=$ROOT_KEY + echo + echo added users to group + + # Assign the "Admin" role to the group + ASSIGNMENT="{\"assignee\":\"$GROUP_ID\", \"role\":\"admin\"}" + curl -s -X POST -d"$ASSIGNMENT" -H "Content-type:application/json" $ENDPOINT/dataverses/permissionsTestDv/assignments/?key=$ROOT_KEY + + echo + echo SETUP DONE + echo + +else + echo Skipping setup + AN_AUTH_USER_KEY=$($DB "select tokenstring \ + from authenticateduser au inner join apitoken apt \ + on au.id=apt.authenticateduser_id \ + where useridentifier='anAuthUser'") + ANOTHER_AUTH_USER_KEY=$($DB "select tokenstring \ + from authenticateduser au inner join apitoken apt \ + on au.id=apt.authenticateduser_id \ + where useridentifier='anotherAuthUser'") + echo + echo Keys + echo @anAuthUser $AN_AUTH_USER_KEY + echo @anotherAuthUser $ANOTHER_AUTH_USER_KEY +fi + +# Test permissions +echo :guest viewing inner dv ... expecting 200 OK +curl -si $ENDPOINT/dataverses/permissionsTestDv | head -n 1 +echo + +echo @anAuthUser viewing inner dv ... expecting 200 OK +curl -si $ENDPOINT/dataverses/permissionsTestDv?key=$AN_AUTH_USER_KEY | head -n 1 +echo + +echo @anotherAuthUser viewing inner dv ... expecting 401 Unauthorized +curl -si $ENDPOINT/dataverses/permissionsTestDv?key=$ANOTHER_AUTH_USER_KEY | head -n 1 +echo +# Assign the "Admin" role to the group + +echo :guest setting permissions ... Expecting 401 Unauthorized +curl -si -X POST -d@assignment.json -H "Content-type:application/json" $ENDPOINT/dataverses/permissionsTestDv/assignments/ | head -n 1 +echo + +echo @anotherAuthUser setting permissions ... Expecting 401 Unauthorized +curl -si -X POST -d@assignment.json -H "Content-type:application/json" $ENDPOINT/dataverses/permissionsTestDv/assignments/?key=$ANOTHER_AUTH_USER_KEY | head -n 1 +echo + +echo @anAuthUser setting permissions ... Expecting 200 OK +curl -si -X POST -d@assignment.json -H "Content-type:application/json" $ENDPOINT/dataverses/permissionsTestDv/assignments/?key=$AN_AUTH_USER_KEY | head -n 1 +echo diff --git a/dataversedock/testdata/scripts/issues/2595/monitor.py b/dataversedock/testdata/scripts/issues/2595/monitor.py new file mode 100755 index 0000000..3e92dec --- /dev/null +++ b/dataversedock/testdata/scripts/issues/2595/monitor.py @@ -0,0 +1,73 @@ +#!/usr/bin/env python +import urllib2 +import json +import datetime +import csv +import os +from shutil import move +base_url = 'http://localhost:4848/monitoring/domain/server/resources/dvnDbPool' +request = urllib2.Request(base_url, headers = { 'Accept' : 'application/json'}) +json1 = urllib2.urlopen(request).read() +data1 = json.loads(json1) +#print json.dumps(data1, indent=2) +war_file = data1['extraProperties']['childResources'].keys()[0] +request = urllib2.Request(base_url + '/' + war_file, headers = { 'Accept' : 'application/json'}) +json2 = urllib2.urlopen(request).read() +data2 = json.loads(json2) +#print json.dumps(data2, indent=2) + +def highwater(data, metric): + columns = ['lastsampletime', 'current', 'highwatermark'] + obj = data['extraProperties']['entity'][metric] + time_readable = epoch2readable (obj, columns[0]) + current = obj[columns[1]] + highwater = obj[columns[2]] + filename = metric + '.tsv' + values = [[time_readable, current, highwater]]; + write_file(metric, columns, values) + +def count(data, metric): + columns = ['lastsampletime', 'count'] + obj = data['extraProperties']['entity'][metric] + time_readable = epoch2readable (obj, columns[0]) + count = obj['count'] + values = [[time_readable, count]]; + write_file(metric, columns, values) + +def epoch2readable(obj, key): + time_epochsec = obj[key] / 1000.0 + time_readable = datetime.datetime.fromtimestamp(time_epochsec).strftime('%Y-%m-%d %H:%M:%S.%f') + return time_readable + +def write_file(metric, columns, values): + filename = metric + '.tsv' + if not os.path.isfile(filename): + write_header(columns, filename) + write_values(values, filename) + uniq(filename) + +def write_header(headers, filename): + with open(filename, 'a') as fp: + a = csv.writer(fp, delimiter='\t'); + a.writerows([headers]); + +def write_values(values, filename): + with open(filename, 'a') as fp: + a = csv.writer(fp, delimiter='\t'); + a.writerows(values); + +def uniq(filename): + tmpfile = filename + '.tmp' + lines_seen = set() # holds lines already seen + outfile = open(tmpfile, 'w') + for line in open(filename, 'r'): + if line not in lines_seen: # not a duplicate + outfile.write(line) + lines_seen.add(line) + outfile.close() + move(tmpfile, filename) + +highwater(data1, 'numconnused') +highwater(data1, 'connrequestwaittime') +count(data1, 'numconnacquired') +count(data1, 'numconnreleased') diff --git a/dataversedock/testdata/scripts/issues/2595/numconnacquired.tsv b/dataversedock/testdata/scripts/issues/2595/numconnacquired.tsv new file mode 100644 index 0000000..97a0640 --- /dev/null +++ b/dataversedock/testdata/scripts/issues/2595/numconnacquired.tsv @@ -0,0 +1,3 @@ +lastsampletime count +2015-10-14 09:34:10.553000 81572 +2015-10-14 09:49:10.695000 82053 diff --git a/dataversedock/testdata/scripts/issues/2595/plot.py b/dataversedock/testdata/scripts/issues/2595/plot.py new file mode 100755 index 0000000..640dba8 --- /dev/null +++ b/dataversedock/testdata/scripts/issues/2595/plot.py @@ -0,0 +1,13 @@ +#!/usr/bin/env python +import sys +import numpy as np +import matplotlib.pyplot as plt +import matplotlib.dates as mdates +days, impressions = np.loadtxt("numconnacquired.tsv", delimiter='\t', skiprows=1, unpack=True, + converters={ 0: mdates.strpdate2num('%Y-%m-%d %H:%M:%S.%f')}) +plt.plot_date(x=days, y=impressions, fmt="r-") +plt.title("Number of logical connections acquired from the pool") +plt.ylabel("numconnacquired") +plt.grid(True) +plt.gcf().autofmt_xdate() +plt.savefig('out.png') diff --git a/dataversedock/testdata/scripts/issues/2598/detect-duplicate-dataverse-aliases.sql b/dataversedock/testdata/scripts/issues/2598/detect-duplicate-dataverse-aliases.sql new file mode 100644 index 0000000..58eec81 --- /dev/null +++ b/dataversedock/testdata/scripts/issues/2598/detect-duplicate-dataverse-aliases.sql @@ -0,0 +1 @@ +select alias from dataverse where lower(alias) in (select lower(alias) from dataverse group by lower(alias) having count(*) >1); diff --git a/dataversedock/testdata/scripts/issues/2598/insert-duplicate-alias.sql b/dataversedock/testdata/scripts/issues/2598/insert-duplicate-alias.sql new file mode 100644 index 0000000..559692c --- /dev/null +++ b/dataversedock/testdata/scripts/issues/2598/insert-duplicate-alias.sql @@ -0,0 +1,10 @@ +-- This script should fail to insert a duplicate datavers alias (different case) +-- after a constraint has been added in https://github.com/IQSS/dataverse/issues/2598 +DELETE FROM dataverse where id = 100; +DELETE FROM dataverse where id = 101; +DELETE FROM dvobject where id = 100; +DELETE FROM dvobject where id = 101; +INSERT INTO dvobject (id, createdate, modificationtime) VALUES (100, NOW(), NOW()); +INSERT INTO dataverse (id, alias, name, dataversetype, defaultcontributorrole_id) VALUES (100, 'foo', 'foo is mine', 'UNCATEGORIZED', 1); +INSERT INTO dvobject (id, createdate, modificationtime) VALUES (101, NOW(), NOW()); +INSERT INTO dataverse (id, alias, name, dataversetype, defaultcontributorrole_id) VALUES (101, 'FOO', 'uppercase foo', 'UNCATEGORIZED', 1); diff --git a/dataversedock/testdata/scripts/issues/2648/reproduce b/dataversedock/testdata/scripts/issues/2648/reproduce new file mode 100755 index 0000000..9ff8708 --- /dev/null +++ b/dataversedock/testdata/scripts/issues/2648/reproduce @@ -0,0 +1,59 @@ +#!/bin/sh +# Test scenario: User "spruce" has marked "trees.png" as restricted* +# and has given "finch" the File Downloader role on "trees.png" at the file level +# but has not yet published the dataset. +# +# * Marking files as restricted can only be done through the GUI: https://github.com/IQSS/dataverse/issues/2497 +# +# Here is where "trees.png" lives: +# * Root Dataverse (entityId:1) +# * Trees Dataverse (entityId:7) +# * Spruce Dataverse (entityId:8) +# * Spruce Goose (entityId:10) +# * trees.png (entityId:12) +# +# Unknowns: +# - What is the behavior if you give the File Downloader role at the dataset level? +# - What is the behavior if you give the File Downloader role at the dataverse level? + +SPRUCE_STORY="spruce uploaded the file in the first place and should be able to download it." +SPARROW_STORY="sparrow has no special access and should not be able to download the file because a) it isn't published and b) it's restricted" +FINCH_STORY="finch has the DownloadFile permission but should not be able to download the file because the dataset is unpublished" + +FORBIDDEN=403 +. scripts/search/export-keys + +API_TOKEN=$SPRUCEKEY +echo "sparrow is attempting to download the file (should be forbidden)" +# Can't use header: https://github.com/IQSS/dataverse/issues/2662 +# curl -H "X-Dataverse-key:$API_TOKEN" http://localhost:8080/api/access/datafile/12 +SPRUCE_OUT=$(curl --write-out %{http_code} --silent --output /dev/null http://localhost:8080/api/access/datafile/12?key=$API_TOKEN ) +if [ $SPRUCE_OUT -ne $FORBIDDEN ]; then + echo "Good. $SPRUCE_STORY" +else + echo "Bug. $SPRUCE_STORY" +fi + +echo "---" + +# Yes, all this could be refactored to make it DRY. +API_TOKEN=$SPARROWKEY +echo "sparrow is attempting to download the file (should be forbidden)" +SPARROW_OUT=$(curl --write-out %{http_code} --silent --output /dev/null http://localhost:8080/api/access/datafile/12?key=$API_TOKEN ) +if [ $SPARROW_OUT -eq $FORBIDDEN ]; then + echo "Good. $SPARROW_STORY" +else + echo "Bug. $SPARROW_STORY" +fi + +echo "---" + +API_TOKEN=$FINCHKEY +echo "finch is attempting to download the file (should be forbidden)" +FINCH_OUT=$(curl --write-out %{http_code} --silent --output /dev/null http://localhost:8080/api/access/datafile/12?key=$API_TOKEN ) +#curl -s -i http://localhost:8080/api/access/datafile/12?key=$API_TOKEN | head | grep ^Content-Type +if [ $FINCH_OUT -eq $FORBIDDEN ]; then + echo "Good. $FINCH_STORY" +else + echo "Bug. $FINCH_STORY" +fi diff --git a/dataversedock/testdata/scripts/issues/2649/reproduce b/dataversedock/testdata/scripts/issues/2649/reproduce new file mode 100755 index 0000000..25985fd --- /dev/null +++ b/dataversedock/testdata/scripts/issues/2649/reproduce @@ -0,0 +1,6 @@ +#!/bin/sh +# "File Downloader" role has already been assigned to "finch" to one of: +# - trees.png file +# - Spruce Goose dataset +# - Spruce dataverse +curl -s 'http://localhost:8080/api/mydata/retrieve?selected_page=1&dvobject_types=DataFile&published_states=Published&published_states=Unpublished&published_states=Draft&published_states=In+Review&published_states=Deaccessioned&role_ids=1&role_ids=2&role_ids=6&mydata_search_term=&userIdentifier=finch' | jq . diff --git a/dataversedock/testdata/scripts/issues/2681/create-files b/dataversedock/testdata/scripts/issues/2681/create-files new file mode 100755 index 0000000..7d5eda3 --- /dev/null +++ b/dataversedock/testdata/scripts/issues/2681/create-files @@ -0,0 +1,18 @@ +#!/bin/sh +NUM_FILES=10 +if [ ! -z "$1" ]; then + NUM_FILES=$1 +fi +TMP="/tmp" +DIR_NAME="$TMP/${NUM_FILES}files" +TMP_DIR="$DIR_NAME" +rm -rf $TMP_DIR +mkdir $TMP_DIR +cd $TMP_DIR +for i in `seq -f "%04g" $NUM_FILES`; do + echo $i > $i.txt +done +cd $TMP +ZIP=${DIR_NAME}.zip +ls $DIR_NAME/* +zip $ZIP $DIR_NAME/* diff --git a/dataversedock/testdata/scripts/issues/3354/createDatasetWithSha1Files.sh b/dataversedock/testdata/scripts/issues/3354/createDatasetWithSha1Files.sh new file mode 100755 index 0000000..1792a9e --- /dev/null +++ b/dataversedock/testdata/scripts/issues/3354/createDatasetWithSha1Files.sh @@ -0,0 +1,5 @@ +#!/bin/sh +# existing, works, no files, commenting out +#curl -s -X POST -H "Content-type:application/json" -d @scripts/search/tests/data/dataset-finch1.json "http://localhost:8080/api/dataverses/root/datasets/?key=$API_TOKEN" +# new, has files +curl -s -X POST -H "Content-type:application/json" -d @scripts/issues/3354/datasetWithSha1Files.json "http://localhost:8080/api/dataverses/root/datasets/?key=$API_TOKEN" diff --git a/dataversedock/testdata/scripts/issues/3354/datasetWithSha1Files.json b/dataversedock/testdata/scripts/issues/3354/datasetWithSha1Files.json new file mode 100644 index 0000000..95a4d3b --- /dev/null +++ b/dataversedock/testdata/scripts/issues/3354/datasetWithSha1Files.json @@ -0,0 +1,86 @@ +{ + "datasetVersion": { + "files": [ + { + "label": "foo.txt", + "dataFile": { + "filename": "foo.txt", + "contentType": "text/plain", + "storageIdentifier": "157484f9d6c-c36006fa39e5", + "originalFormatLabel": "UNKNOWN", + "checksum": { + "type": "SHA-1", + "value": "f1d2d2f924e986ac86fdf7b36c94bcdf32beec15" + } + } + } + ], + "metadataBlocks": { + "citation": { + "fields": [ + { + "value": "Dataset with SHA-1 files", + "typeClass": "primitive", + "multiple": false, + "typeName": "title" + }, + { + "value": [ + { + "authorName": { + "value": "Finch, Fiona", + "typeClass": "primitive", + "multiple": false, + "typeName": "authorName" + }, + "authorAffiliation": { + "value": "Birds Inc.", + "typeClass": "primitive", + "multiple": false, + "typeName": "authorAffiliation" + } + } + ], + "typeClass": "compound", + "multiple": true, + "typeName": "author" + }, + { + "value": [ + { "datasetContactEmail" : { + "typeClass": "primitive", + "multiple": false, + "typeName": "datasetContactEmail", + "value" : "finch@mailinator.com" + } + }], + "typeClass": "compound", + "multiple": true, + "typeName": "datasetContact" + }, + { + "value": [ { + "dsDescriptionValue":{ + "value": "Some people prefer SHA-1 to MD5 for file fixity.", + "multiple":false, + "typeClass": "primitive", + "typeName": "dsDescriptionValue" + }}], + "typeClass": "compound", + "multiple": true, + "typeName": "dsDescription" + }, + { + "value": [ + "Other" + ], + "typeClass": "controlledVocabulary", + "multiple": true, + "typeName": "subject" + } + ], + "displayName": "Citation Metadata" + } + } + } +} diff --git a/dataversedock/testdata/scripts/issues/3354/mydata b/dataversedock/testdata/scripts/issues/3354/mydata new file mode 100755 index 0000000..eb76d06 --- /dev/null +++ b/dataversedock/testdata/scripts/issues/3354/mydata @@ -0,0 +1,3 @@ +#!/bin/sh +# FIXME: Make this into a REST Assured test. +curl -s "http://localhost:8080/api/mydata/retrieve?key=$API_TOKEN&role_ids=1&dvobject_types=DataFile&published_states=Published&published_states=Unpublished&published_states=Draft&published_states=In+Review&published_states=Deaccessioned" | jq .data.items diff --git a/dataversedock/testdata/scripts/issues/3543/dv-peteDelete1.json b/dataversedock/testdata/scripts/issues/3543/dv-peteDelete1.json new file mode 100644 index 0000000..60b6aed --- /dev/null +++ b/dataversedock/testdata/scripts/issues/3543/dv-peteDelete1.json @@ -0,0 +1,8 @@ + { + "alias":"peteDelete1", + "name":"A dataverse for testing", + "affiliation":"Affiliation value", + "contactEmail":"pete@mailinator.com", + "permissionRoot":false, + "description":"A dataverse that's added for testing purposes." +} diff --git a/dataversedock/testdata/scripts/issues/3543/dv-peteDelete2.json b/dataversedock/testdata/scripts/issues/3543/dv-peteDelete2.json new file mode 100644 index 0000000..6703297 --- /dev/null +++ b/dataversedock/testdata/scripts/issues/3543/dv-peteDelete2.json @@ -0,0 +1,8 @@ + { + "alias":"peteDelete2", + "name":"A dataverse for testing", + "affiliation":"Affiliation value", + "contactEmail":"pete@mailinator.com", + "permissionRoot":false, + "description":"A dataverse that's added for testing purposes." +} diff --git a/dataversedock/testdata/scripts/issues/3543/dv-peteDelete3.json b/dataversedock/testdata/scripts/issues/3543/dv-peteDelete3.json new file mode 100644 index 0000000..66d1126 --- /dev/null +++ b/dataversedock/testdata/scripts/issues/3543/dv-peteDelete3.json @@ -0,0 +1,8 @@ + { + "alias":"peteDelete3", + "name":"A dataverse for testing", + "affiliation":"Affiliation value", + "contactEmail":"pete@mailinator.com", + "permissionRoot":false, + "description":"A dataverse that's added for testing purposes." +} diff --git a/dataversedock/testdata/scripts/issues/3543/dv-peteDeleteTop.json b/dataversedock/testdata/scripts/issues/3543/dv-peteDeleteTop.json new file mode 100644 index 0000000..9a9d184 --- /dev/null +++ b/dataversedock/testdata/scripts/issues/3543/dv-peteDeleteTop.json @@ -0,0 +1,8 @@ + { + "alias":"peteDeleteTop", + "name":"A dataverse for testing", + "affiliation":"Affiliation value", + "contactEmail":"pete@mailinator.com", + "permissionRoot":false, + "description":"A dataverse that's added for testing purposes." +} diff --git a/dataversedock/testdata/scripts/issues/3543/setup.sh b/dataversedock/testdata/scripts/issues/3543/setup.sh new file mode 100755 index 0000000..6f3483b --- /dev/null +++ b/dataversedock/testdata/scripts/issues/3543/setup.sh @@ -0,0 +1,7 @@ +#!/bin/bash +echo Setting up dataverses for deletion, as described in https://redmine.hmdc.harvard.edu/issues/3543 + +curl -H"Content-type:application/json" -d @dv-peteDeleteTop.json http://localhost:8080/api/dataverses/peteTop?key=pete +curl -H"Content-type:application/json" -d @dv-peteDelete1.json http://localhost:8080/api/dataverses/peteDeleteTop?key=pete +curl -H"Content-type:application/json" -d @dv-peteDelete2.json http://localhost:8080/api/dataverses/peteDeleteTop?key=pete +curl -H"Content-type:application/json" -d @dv-peteDelete3.json http://localhost:8080/api/dataverses/peteDeleteTop?key=pete diff --git a/dataversedock/testdata/scripts/issues/3543/test.sh b/dataversedock/testdata/scripts/issues/3543/test.sh new file mode 100755 index 0000000..14061c7 --- /dev/null +++ b/dataversedock/testdata/scripts/issues/3543/test.sh @@ -0,0 +1,6 @@ +#!/bin/bash + +curl -X DELETE http://localhost:8080/api/dataverses/peteDelete1?key=pete +curl -X DELETE http://localhost:8080/api/dataverses/peteDelete2?key=pete +curl -X DELETE http://localhost:8080/api/dataverses/peteDelete3?key=pete +curl -X DELETE http://localhost:8080/api/dataverses/peteDeleteTop?key=pete diff --git a/dataversedock/testdata/scripts/issues/3544/delete.sh b/dataversedock/testdata/scripts/issues/3544/delete.sh new file mode 100755 index 0000000..658d11d --- /dev/null +++ b/dataversedock/testdata/scripts/issues/3544/delete.sh @@ -0,0 +1,8 @@ +#!/bin/bash + +# deleting an unreleased dataset, with a bunch of unreleased files. +# seems to be working like a charm - ? +# -- Leonid + +curl -X DELETE http://localhost:8080/api/datasets/43?key=pete + diff --git a/dataversedock/testdata/scripts/issues/796/builtin2shib b/dataversedock/testdata/scripts/issues/796/builtin2shib new file mode 100755 index 0000000..0cb6d3e --- /dev/null +++ b/dataversedock/testdata/scripts/issues/796/builtin2shib @@ -0,0 +1,10 @@ +#!/bin/sh +echo args: $@ +if [ -z "$1" ]; then + echo "call with foo:bar:baz" +else + OUTPUT=`curl -s -X PUT -d "$@" http://localhost:8080/api/test/user/convert/builtin2shib` + echo $OUTPUT + echo + echo $OUTPUT | jq . +fi diff --git a/dataversedock/testdata/scripts/issues/907/batchImportDv/version1.xml b/dataversedock/testdata/scripts/issues/907/batchImportDv/version1.xml new file mode 100644 index 0000000..2965bfb --- /dev/null +++ b/dataversedock/testdata/scripts/issues/907/batchImportDv/version1.xml @@ -0,0 +1,193 @@ + + + + + Black Professional Women, 1969 + hdl:1902.1/00012 + + + IQSS Test Dataverse Network + 2014-02-12 + + + 3 + Dwayne + + + 2 + Dwayne + Replaced by version 3 + 2014-02-13 + + + 1 + sonia, IQSSdvnAdmin + Initial version + Replaced by version 2 + 2013-09-20 + + Cynthia Fuchs Epstein, 1983, "Black Professional Women, 1969 ", http://hdl.handle.net/1902.1/00012 Murray Research Archive [Distributor] V1 [Version] + + + + + + + Black Professional Women, 1969 + hdl:1902.1/00012 + 00012 + + + Cynthia Fuchs Epstein + + + Cynthia Fuchs Epstein + + + Murray Research Archive + + + + 1983 + 1983 + + + 1 + + + + + Achievement + African American + Women + Professional + mra murraydiversity + 50 or fewer + female + mixed + African American + middle + 1 + yes + Women and education + Work + Women + + The purpose of this study was to explore the special conditions which enable African American professional women to create a self-image and achievement value system, the problems attendant to traditional female roles, and the reinforcing components of the work situation. The study was also conducted in order to test the findings of a similar study the researcher did with White women lawyers, also archived at the Murray Center (see Related Studies below).<br /> + <br /> In 1969, the researcher interviewed 35 African American women in the following professions: law, medicine, dentistry, university teaching, journalism, business, and social service administration. Nursing, social work, and teaching at other than the university level were excluded.<br /> + <br /> A structured, open-ended interview was employed. The questions included nature of work, clients, disadvantages of being a woman, disadvantages of being African American, relationships at work, professional associations, community organizations, education, demographics, family life, income, and dual-career conflict.<br /> + <br /> The Murray Archive holds additional analogue materials for this study: original record paper data for 35 women, including interviews and other descriptive materials. The Murray Archive also holds audiotaped interviews. If you would like to access these materials, please apply to use the data. + + 1969 + 1969 + United States + individuals + field study + + + + + quota sample + interview + + + + + Henry A. Murray Research Archive, Institute for Quantitative Social Sciences, Harvard University + Available + + + Submission of the following <a href= "http://www.murray.harvard.edu/application" target="_blank">Application For The Use Of Data</a> is required to access the data from this study. + I will use these data solely for the purposes stated in my application to use data, detailed in a written research proposal. I will honor all agreements and conditions made between the Contributor of the Data and the study participants, and between the Contributor of the Data and the Henry A. Murray Research Archive, Harvard University, as specified in the Memorandum of Agreement. + <a href="mailto:mra@help.hmdc.harvard.edu">Manager of Operations</a>, the Henry A. Murray Research Archive, Institute for Quantitative Social Sciences, 1737 Cambridge St, Cambridge, MA 02138, USA. + I will include a bibliographic citation acknowledging the use of these data in any publication or presentation in which these data are used. Such citations will appear in footnotes or in the reference section of any such manuscript. I understand the guideline in "How to Cite This Dataset" described in the Summary of this study. + Murray Research Archive will list my publication and manuscripts on the Archive website when I submit a bibliographic citation or title of the manuscript, and indicate the Henry A. Murray Research Archive data used. Doing this will also help Henry A. Murray Research Archive to provide funding agencies with essential information about use of archival resources, to fulfill requirements of some memoranda of agreement, and to promote the broader exchange of information about research activities. + The data are available without additional conditions other than those stated in the "Restrictions" Terms of Use above. + + <div style="padding-left: 30px;"> +<ul style="list-style-type: decimal;" ><li> + The Murray Archive (the Distributor) has granted me a revocable license to use this dataset solely for the purposes of conducting research, and the Distributor may terminate this license at any time and for any reason. +</li> +<li> + I will use the dataset solely for statistical analysis and reporting of aggregated information, and not for investigation of specific individuals or organizations, except when identification is authorized in writing by the Distributor. +</li> +<li>I will produce no links among the Distributor’s datasets or among the Distributor’s data and other datasets that could identify individuals or organizations. +</li> +<li>I represent that neither I, nor anyone I know, has any prior knowledge of the possible identities of any study participants in any dataset that I am being licensed to use. +</li> +<li> + I will not knowingly divulge any information that could be used to identify individual participants in the study, nor will I attempt to identify or contact any study participant, and I agree to use any precautions necessary to prevent such identification. +</li> +<li> + I will make no use of the identity of any person or establishment discovered inadvertently. If I suspect that I might recognize or know a study participant, I will immediately inform the Distributor, and I will not use or retain a copy of data regarding that study participant. If these measures to resolve an identity disclosure are not sufficient, the Distributor may terminate my use of the dataset. +</li> +<li> + I will not reproduce the dataset except as is necessary for my scholarly purposes. I will destroy the dataset upon the completion of my scholarly work with it. +</li> +<li> + I will not share data from the dataset (in any form or by any means) with any third party, including other members of my research team, as I understand that all users of data must obtain the data directly from the Distributor. +</li> +<li> + I will make appropriate acknowledgement of the contributor of the dataset as well as the Distributor in any manuscript or presentation (published or unpublished) using the citation standard documented here: <a href="http://thedata.org/citation"> http://thedata.org/citation</a> +</li> +<li> + THE DISTRIBUTOR MAKES NO WARRANTIES, EXPRESS OR IMPLIED, BY OPERATION OF LAW OR OTHERWISE, REGARDING OR RELATING TO THE DATASET. +</li> +</ul> +</div> + <b>IQSS Dataverse Network Terms and Conditions</b> + +<p>By downloading these Materials, I agree to the following:</p> + +<ol> +<li>I will not use the Materials to +<ol type="a"> +<li>obtain information that could directly or indirectly identify subjects.</li> +<li>produce links among the Distributor's datasets or among the Distributor's data and other datasets that could identify individuals or organizations.</li> +<li>obtain information about, or further contact with, subjects known to me except where the use and/or release of such identifying information has no potential for constituting an unwarranted invasion of privacy and/or breach of confidentiality.</li> +</ol> +</li> +<li>I agree not to download any Materials where prohibited by applicable law.</li> +<li>I agree not to use the Materials in any way prohibited by applicable law.</li> +<li>I agree that any books, articles, conference papers, theses, dissertations, reports, or other publications that I create which employ data reference the bibliographic citation accompanying this data. These citations include the data authors, data identifier, and other information accord with the Recommended Standard (<strong>http://thedata.org/citation/standard</strong>) for social science data.</li> +<li>THE DISTRIBUTOR MAKES NO WARRANTIES, EXPRESS OR IMPLIED, BY OPERATION OF LAW OR OTHERWISE, REGARDING OR RELATING TO THE DATASET</li> +</ol> + + + + Cynthia Fuchs Epstein, 1983, "Women in Law, 1965-1980", hdl:1902.1/00168 Murray Research Archive [Distributor]. <a href="http://dvn.iq.harvard.edu/dvn/dv/mra/faces/study/StudyPage.jsp?studyId=441" target= "_new">study available here</a> + + + + + 00012Epstein-Professional-Measures.pdf + Collection of blank measures used in the study + 1. Documentation + + + 00012Epstein-Professional-StudyDescription.pdf + Overview: abstract, research methodology, publications, and other info. + 1. Documentation + + + 00012Epstein-Professional-MeasuresForm.pdf + Usage guidelines relating to measures created by study investigators + 2. Supplemental Documentation + + + 00012Epstein-Professional-BoxCoverSheets.pdf + Describes contents of each box of a paper data set + 2. Supplemental Documentation + + + 00012Epstein-Professional-MemoOfAgreement.pdf + Legal agreement between data depositor and the Murray Archive + 3. Detailed Usage Terms + + + RM2562 Epstein + Box cover sheet for DVD's of .wavs from audio originals + 5.Other Resources + restricted + + \ No newline at end of file diff --git a/dataversedock/testdata/scripts/issues/guestbook/insert-guestbook-responses.sh b/dataversedock/testdata/scripts/issues/guestbook/insert-guestbook-responses.sh new file mode 100644 index 0000000..e1888db --- /dev/null +++ b/dataversedock/testdata/scripts/issues/guestbook/insert-guestbook-responses.sh @@ -0,0 +1,5 @@ +-- select * from guestbookresponse; +-- 150K would be a better test, see https://github.com/IQSS/dataverse/issues/3609#issuecomment-322559209 +--for i in {0..2000}; do psql dataverse_db -f scripts/issues/3845/insert-guestbook-responses.sh; done +-- id | downloadtype | email | institution | name | position | responsetime | sessionid | authenticateduser_id | datafile_id | dataset_id | datasetversion_id | guestbook_id +insert into guestbookresponse values (default, 1, null, null, null, null, null, null, null, 104, 103, null, 2); diff --git a/dataversedock/testdata/scripts/migration/HarvardCustomFields.csv b/dataversedock/testdata/scripts/migration/HarvardCustomFields.csv new file mode 100644 index 0000000..3e8abe0 --- /dev/null +++ b/dataversedock/testdata/scripts/migration/HarvardCustomFields.csv @@ -0,0 +1 @@ +,,Dataverse 4.0 Template Name,field_name,field_name Alliance for Research on Corporate Sustainability,1Wereanyofthesedatasetsapurchasedbobtainedthroughlicenseddatabasesorcprovidedbyanorganizationunderanondisclosureorotheragreement,ARCS1 Alliance for Research on Corporate Sustainability,2IfyourespondedYestoQ1haveyouensuredthatsharingthedatadoesnotviolatetermsoftheagreementIfyourespondedNotoQ1pleaseenterNAhere,ARCS2 Alliance for Research on Corporate Sustainability,3DoanyofthesedatasetsincludeindividualleveldataeithercollectedorpreexistinginthedatasetthatmightmakethemsubjecttoUSorinternationalhumansubjectsconsiderations,ARCS3 Alliance for Research on Corporate Sustainability,4IfyourespondedYestoQ3arethesedatasetstotallydeidentifiedorwassharingapprovedbyyourinstitutionalreviewboardIRBIfyourespondedNotoQ3pleaseenterNAhere,ARCS4 Alliance for Research on Corporate Sustainability,5DothesedatasetscontainsensitiveorpersonallyidentifiableprivateinformationHarvardResearchDataSecurityPolicywwwsecurityharvardeduresearchdatasecuritypolicymayapplybecausethisDataverseishostedbyHarvardUniversity,ARCS5 Multi-donor trust fund within the Human Development Network to carry out and support research evaluating the impact of programs to alleviate poverty.,1Wereanyofthesedatasetsapurchasedbobtainedthroughlicenseddatabasesorcprovidedbyanorganizationunderanondisclosureorotheragreement,ARCS1 Multi-donor trust fund within the Human Development Network to carry out and support research evaluating the impact of programs to alleviate poverty.,2IfyourespondedYestoQ1haveyouensuredthatsharingthedatadoesnotviolatetermsoftheagreementIfyourespondedNotoQ1pleaseenterNAhere,ARCS2 Multi-donor trust fund within the Human Development Network to carry out and support research evaluating the impact of programs to alleviate poverty.,3DoanyofthesedatasetsincludeindividualleveldataeithercollectedorpreexistinginthedatasetthatmightmakethemsubjecttoUSorinternationalhumansubjectsconsiderations,ARCS3 Multi-donor trust fund within the Human Development Network to carry out and support research evaluating the impact of programs to alleviate poverty.,4IfyourespondedYestoQ3arethesedatasetstotallydeidentifiedorwassharingapprovedbyyourinstitutionalreviewboardIRBIfyourespondedNotoQ3pleaseenterNAhere,ARCS4 Multi-donor trust fund within the Human Development Network to carry out and support research evaluating the impact of programs to alleviate poverty.,5DothesedatasetscontainsensitiveorpersonallyidentifiableprivateinformationHarvardResearchDataSecurityPolicywwwsecurityharvardeduresearchdatasecuritypolicymayapplybecausethisDataverseishostedbyHarvardUniversity,ARCS5 Project TIER,1Wereanyofthesedatasetsapurchasedbobtainedthroughlicenseddatabasesorcprovidedbyanorganizationunderanondisclosureorotheragreement,ARCS1 Project TIER,2IfyourespondedYestoQ1haveyouensuredthatsharingthedatadoesnotviolatetermsoftheagreementIfyourespondedNotoQ1pleaseenterNAhere,ARCS2 Project TIER,3DoanyofthesedatasetsincludeindividualleveldataeithercollectedorpreexistinginthedatasetthatmightmakethemsubjecttoUSorinternationalhumansubjectsconsiderations,ARCS3 Project TIER,4IfyourespondedYestoQ3arethesedatasetstotallydeidentifiedorwassharingapprovedbyyourinstitutionalreviewboardIRBIfyourespondedNotoQ3pleaseenterNAhere,ARCS4 Project TIER,5DothesedatasetscontainsensitiveorpersonallyidentifiableprivateinformationHarvardResearchDataSecurityPolicywwwsecurityharvardeduresearchdatasecuritypolicymayapplybecausethisDataverseishostedbyHarvardUniversity,ARCS5 GSD Studio Template,Accreditation,gsdAccreditation GSD Studio Template,City,city GSD Studio Template,CoreStudioCoordinator,gsdCoordinator GSD Studio Template,CountryNation,country GSD Studio Template,CourseName,gsdCourseName GSD Studio Template,DataUploadedBy,depositor GSD Studio Template,FacultyName,gsdFacultyName GSD Studio Template,FacultyRecommendation,gsdRecommendation GSD Studio Template,Notes,notesText GSD Studio Template,ProgramBrief,gsdProgramBrief GSD Studio Template,SemesterYear,gsdSemester GSD Studio Template,SiteType,gsdSiteType GSD Studio Template,StateProvince,state GSD Studio Template,StudentName,gsdStudentName GSD Studio Template,StudentNameFirstName,#IGNORE GSD Studio Template,StudentNameLastName,#IGNORE GSD Studio Template,StudentsProgramofStudy,gsdStudentProgram GSD Studio Template,Tags,gsdTags GSD Studio Template,TypesofRepresentationMediumFormat,gsdTypes GSD Studio Template Fall 2013 - OLD_OLD_OLD,City,city GSD Studio Template Fall 2013 - OLD_OLD_OLD,CoreStudioCoordinator,gsdCoordinator GSD Studio Template Fall 2013 - OLD_OLD_OLD,CountryNation,country GSD Studio Template Fall 2013 - OLD_OLD_OLD,CourseName,gsdCourseName GSD Studio Template Fall 2013 - OLD_OLD_OLD,DataUploadedBy,depositor GSD Studio Template Fall 2013 - OLD_OLD_OLD,FacultyName,gsdFacultyName GSD Studio Template Fall 2013 - OLD_OLD_OLD,FacultyRecommendation,gsdRecommendation GSD Studio Template Fall 2013 - OLD_OLD_OLD,Notes,notesText GSD Studio Template Fall 2013 - OLD_OLD_OLD,ProgramBrief,gsdProgramBrief GSD Studio Template Fall 2013 - OLD_OLD_OLD,SemesterYear,gsdSemester GSD Studio Template Fall 2013 - OLD_OLD_OLD,SiteType,gsdSiteType GSD Studio Template Fall 2013 - OLD_OLD_OLD,StateProvince,state GSD Studio Template Fall 2013 - OLD_OLD_OLD,StudentName,gsdStudentName GSD Studio Template Fall 2013 - OLD_OLD_OLD,StudentsProgramofStudy,gsdStudentProgram GSD Studio Template Fall 2013 - OLD_OLD_OLD,Tags,gsdTags GSD Studio Template Fall 2013 - OLD_OLD_OLD,TypesofRepresentationMediumFormat,gsdTypes GSD Research Fall 2013,City,city GSD Research Fall 2013,CoreStudioCoordinator,gsdCoordinator GSD Research Fall 2013,CountryNation,country GSD Research Fall 2013,CourseName,gsdCourseName GSD Research Fall 2013,DataUploadedBy,depositor GSD Research Fall 2013,FacultyName,gsdFacultyName GSD Research Fall 2013,FacultyRecommendation,gsdRecommendation GSD Research Fall 2013,Notes,notesText GSD Research Fall 2013,ProgramBrief,gsdProgramBrief GSD Research Fall 2013,SemesterYear,gsdSemester GSD Research Fall 2013,SiteType,gsdSiteType GSD Research Fall 2013,StateProvince,state GSD Research Fall 2013,StudentName,gsdStudentName GSD Research Fall 2013,StudentsProgramofStudy,gsdStudentProgram GSD Research Fall 2013,Tags,gsdTags GSD Research Fall 2013,TypesofRepresentationMediumFormat,gsdTypes GSD Studio Template Fall 2013-OLD_OLD,City,city GSD Studio Template Fall 2013-OLD_OLD,CoreStudioCoordinator,gsdCoordinator GSD Studio Template Fall 2013-OLD_OLD,CountryNation,country GSD Studio Template Fall 2013-OLD_OLD,CourseName,gsdCourseName GSD Studio Template Fall 2013-OLD_OLD,DataUploadedBy,depositor GSD Studio Template Fall 2013-OLD_OLD,FacultyName,gsdFacultyName GSD Studio Template Fall 2013-OLD_OLD,FacultyRecommendation,gsdRecommendation GSD Studio Template Fall 2013-OLD_OLD,Notes,notesText GSD Studio Template Fall 2013-OLD_OLD,ProgramBrief,gsdProgramBrief GSD Studio Template Fall 2013-OLD_OLD,SemesterYear,gsdSemester GSD Studio Template Fall 2013-OLD_OLD,SiteType,gsdSiteType GSD Studio Template Fall 2013-OLD_OLD,StateProvince,state GSD Studio Template Fall 2013-OLD_OLD,StudentName,gsdStudentName GSD Studio Template Fall 2013-OLD_OLD,StudentNameFirstName,#IGNORE GSD Studio Template Fall 2013-OLD_OLD,StudentNameLastName,#IGNORE GSD Studio Template Fall 2013-OLD_OLD,StudentsProgramofStudy,gsdStudentProgram GSD Studio Template Fall 2013-OLD_OLD,Tags,gsdTags GSD Studio Template Fall 2013-OLD_OLD,TypesofRepresentationMediumFormat,gsdTypes GSD Studio Template Fall 2013_OLD,City,city GSD Studio Template Fall 2013_OLD,CoreStudioCoordinator,gsdCoordinator GSD Studio Template Fall 2013_OLD,CountryNation,country GSD Studio Template Fall 2013_OLD,CourseName,gsdCourseName GSD Studio Template Fall 2013_OLD,DataUploadedBy,depositor GSD Studio Template Fall 2013_OLD,FacultyName,gsdFacultyName GSD Studio Template Fall 2013_OLD,FacultyRecommendation,gsdRecommendation GSD Studio Template Fall 2013_OLD,Notes,notesText GSD Studio Template Fall 2013_OLD,ProgramBrief,gsdProgramBrief GSD Studio Template Fall 2013_OLD,SemesterYear,gsdSemester GSD Studio Template Fall 2013_OLD,SiteType,gsdSiteType GSD Studio Template Fall 2013_OLD,StateProvince,state GSD Studio Template Fall 2013_OLD,StudentName,gsdStudentName GSD Studio Template Fall 2013_OLD,StudentNameFirstName,#IGNORE GSD Studio Template Fall 2013_OLD,StudentNameLastName,#IGNORE GSD Studio Template Fall 2013_OLD,StudentsProgramofStudy,gsdStudentProgram GSD Studio Template Fall 2013_OLD,Tags,gsdTags GSD Studio Template Fall 2013_OLD,TypesofRepresentationMediumFormat,gsdTypes Syllabus Template,City,city Syllabus Template,CoreStudioCoordinator,gsdCoordinator Syllabus Template,CountryNation,country Syllabus Template,CourseName,gsdCourseName Syllabus Template,DataUploadedBy,depositor Syllabus Template,FacultyName,gsdFacultyName Syllabus Template,FacultyRecommendation,gsdRecommendation Syllabus Template,Notes,notesText Syllabus Template,ProgramBrief,gsdProgramBrief Syllabus Template,SemesterYear,gsdSemester Syllabus Template,SiteType,gsdSiteType Syllabus Template,StateProvince,state Syllabus Template,StudentName,gsdStudentName Syllabus Template,StudentNameFirstName,#IGNORE Syllabus Template,StudentNameLastName,#IGNORE Syllabus Template,StudentsProgramofStudy,gsdStudentProgram Syllabus Template,Tags,gsdTags Syllabus Template,TypesofRepresentationMediumFormat,gsdTypes GSD Studio Template Fall 2013_OLD_Dec12,Accreditation,gsdAccreditation GSD Studio Template Fall 2013_OLD_Dec12,City,city GSD Studio Template Fall 2013_OLD_Dec12,CoreStudioCoordinator,gsdCoordinator GSD Studio Template Fall 2013_OLD_Dec12,CountryNation,country GSD Studio Template Fall 2013_OLD_Dec12,CourseName,gsdCourseName GSD Studio Template Fall 2013_OLD_Dec12,DataUploadedBy,depositor GSD Studio Template Fall 2013_OLD_Dec12,FacultyName,gsdFacultyName GSD Studio Template Fall 2013_OLD_Dec12,FacultyRecommendation,gsdRecommendation GSD Studio Template Fall 2013_OLD_Dec12,Notes,notesText GSD Studio Template Fall 2013_OLD_Dec12,ProgramBrief,gsdProgramBrief GSD Studio Template Fall 2013_OLD_Dec12,SemesterYear,gsdSemester GSD Studio Template Fall 2013_OLD_Dec12,SiteType,gsdSiteType GSD Studio Template Fall 2013_OLD_Dec12,StateProvince,state GSD Studio Template Fall 2013_OLD_Dec12,StudentName,gsdStudentName GSD Studio Template Fall 2013_OLD_Dec12,StudentNameFirstName,#IGNORE GSD Studio Template Fall 2013_OLD_Dec12,StudentNameLastName,#IGNORE GSD Studio Template Fall 2013_OLD_Dec12,StudentsProgramofStudy,gsdStudentProgram GSD Studio Template Fall 2013_OLD_Dec12,Tags,gsdTags GSD Studio Template Fall 2013_OLD_Dec12,TypesofRepresentationMediumFormat,gsdTypes GSD Studio Template Fall 2013,Accreditation,gsdAccreditation GSD Studio Template Fall 2013,City,city GSD Studio Template Fall 2013,CoreStudioCoordinator,gsdCoordinator GSD Studio Template Fall 2013,CountryNation,country GSD Studio Template Fall 2013,CourseName,gsdCourseName GSD Studio Template Fall 2013,DataUploadedBy,depositor GSD Studio Template Fall 2013,FacultyName,gsdFacultyName GSD Studio Template Fall 2013,FacultyRecommendation,gsdRecommendation GSD Studio Template Fall 2013,Notes,notesText GSD Studio Template Fall 2013,ProgramBrief,gsdProgramBrief GSD Studio Template Fall 2013,SemesterYear,gsdSemester GSD Studio Template Fall 2013,SiteType,gsdSiteType GSD Studio Template Fall 2013,StateProvince,state GSD Studio Template Fall 2013,StudentName,gsdStudentName GSD Studio Template Fall 2013,StudentNameFirstName,#IGNORE GSD Studio Template Fall 2013,StudentNameLastName,#IGNORE GSD Studio Template Fall 2013,StudentsProgramofStudy,gsdStudentProgram GSD Studio Template Fall 2013,Tags,gsdTags GSD Studio Template Fall 2013,TypesofRepresentationMediumFormat,gsdTypes GSSP Student Data,Accreditation,gsdAccreditation GSSP Student Data,City,city GSSP Student Data,CoreStudioCoordinator,gsdCoordinator GSSP Student Data,CountryNation,country GSSP Student Data,CourseName,gsdCourseName GSSP Student Data,DataUploadedBy,depositor GSSP Student Data,FacultyName,gsdFacultyName GSSP Student Data,FacultyRecommendation,gsdRecommendation GSSP Student Data,Notes,notesText GSSP Student Data,ProgramBrief,gsdProgramBrief GSSP Student Data,SemesterYear,gsdSemester GSSP Student Data,SiteType,gsdSiteType GSSP Student Data,StateProvince,state GSSP Student Data,StudentName,gsdStudentName GSSP Student Data,StudentNameFirstName,#IGNORE GSSP Student Data,StudentNameLastName,#IGNORE GSSP Student Data,StudentsProgramofStudy,gsdStudentProgram GSSP Student Data,Tags,gsdTags GSSP Student Data,TypesofRepresentationMediumFormat,gsdTypes Student Data,Accreditation,gsdAccreditation Student Data,City,city Student Data,CoreStudioCoordinator,gsdCoordinator Student Data,CountryNation,country Student Data,CourseName,gsdCourseName Student Data,DataUploadedBy,depositor Student Data,FacultyName,gsdFacultyName Student Data,FacultyRecommendation,gsdRecommendation Student Data,Notes,notesText Student Data,ProgramBrief,gsdProgramBrief Student Data,SemesterYear,gsdSemester Student Data,SiteType,gsdSiteType Student Data,StateProvince,state Student Data,StudentName,gsdStudentName Student Data,StudentNameFirstName,#IGNORE Student Data,StudentNameLastName,#IGNORE Student Data,StudentsProgramofStudy,gsdStudentProgram Student Data,Tags,gsdTags Student Data,TypesofRepresentationMediumFormat,gsdTypes Political Science Replication Initiative Draft Template,Aretheoriginaldatapubliclyavailable,PSRI1 Political Science Replication Initiative Draft Template,Didanotherstudentattempttoreplicatethereplicationintheclass,PSRI9 Political Science Replication Initiative Draft Template,Didanotherstudentreplicatethisreplicationsuccessfully,PSRI10 Political Science Replication Initiative Draft Template,Didaprofessorreadreviewadraftbeforethefinalversion,PSRI11 Political Science Replication Initiative Draft Template,DidyousendthereplicationmaterialstotheoriginalauthorsandnotifythemthatyoudbepostingyourreplicationonPSRI,PSRI7 Political Science Replication Initiative Draft Template,Istheoriginalcodeavailable,PSRI2 Political Science Replication Initiative Draft Template,WasthereplicationdoneinacourseIfsopleasecontinuetoanswerthesubsequentquestionsandifnotselectNA,PSRI8 Political Science Replication Initiative Draft Template,Wherearetheoriginaldataarchivednameandurl,PSRI3 Political Science Replication Initiative Draft Template,Whereistheoriginalcodepubliclyarchivednameandurl,PSRI4 Political Science Replication Initiative Draft Template,WillyousubmityourreplicationcodetothisDataverseThisisaPSRIrequirement,PSRI5 Political Science Replication Initiative Draft Template,WillyousubmityourreplicationwriteuptothisDataverseThisisaPSRIrequirement,PSRI6 PSRI dataverse,Aretheoriginaldatapubliclyavailable,PSRI1 PSRI dataverse,Didanotherstudentattempttoreplicatethereplication,PSRI9 PSRI dataverse,Didanotherstudentreplicatethisreplicationsuccessfully,PSRI10 PSRI dataverse,Didaprofessorreadreviewadraftbeforethefinalversion,PSRI11 PSRI dataverse,DidyousendthereplicationmaterialstotheoriginalauthorsandnotifythemthatyoudbepostingyourreplicationonPSRI,PSRI7 PSRI dataverse,Istheoriginalcodeavailable,PSRI2 PSRI dataverse,WasthereplicationdoneinacourseIfsopleasecontinuetoanswerthesubsequentquestionsandifnotselectNA,PSRI8 PSRI dataverse,Wherearetheoriginaldataarchivednameandurl,PSRI3 PSRI dataverse,Whereistheoriginalcodepubliclyarchivednameandurl,PSRI4 PSRI dataverse,WillyousubmityourreplicationcodetothisDataverseThisisaPSRIrequirement,PSRI5 PSRI dataverse,WillyousubmityourreplicationwriteuptothisDataverseThisisaPSRIrequirement,PSRI6 GSD 2013 2014,Accreditation,gsdAccreditation GSD 2013 2014,City,city GSD 2013 2014,CoreStudioCoordinator,gsdCoordinator GSD 2013 2014,CountryNation,country GSD 2013 2014,CourseName,gsdCourseName GSD 2013 2014,DataUploadedBy,depositor GSD 2013 2014,FacultyName,gsdFacultyName GSD 2013 2014,FacultyRecommendation,gsdRecommendation GSD 2013 2014,Notes,notesText GSD 2013 2014,ProgramBrief,gsdProgramBrief GSD 2013 2014,SemesterYear,gsdSemester GSD 2013 2014,SiteType,gsdSiteType GSD 2013 2014,StateProvince,state GSD 2013 2014,StudentName,gsdStudentName GSD 2013 2014,StudentNameFirstName,#IGNORE GSD 2013 2014,StudentNameLastName,#IGNORE GSD 2013 2014,StudentsProgramofStudy,gsdStudentProgram GSD 2013 2014,Tags,gsdTags GSD 2013 2014,TypesofRepresentationMediumFormat,gsdTypes Muthyamfirst,1Wereanyofthesedatasetsapurchasedbobtainedthroughlicenseddatabasesorcprovidedbyanorganizationunderanondisclosureorotheragreement,ARCS1 Muthyamfirst,2IfyourespondedYestoQ1haveyouensuredthatsharingthedatadoesnotviolatetermsoftheagreementIfyourespondedNotoQ1pleaseenterNAhere,ARCS2 Muthyamfirst,3DoanyofthesedatasetsincludeindividualleveldataeithercollectedorpreexistinginthedatasetthatmightmakethemsubjecttoUSorinternationalhumansubjectsconsiderations,ARCS3 Muthyamfirst,4IfyourespondedYestoQ3arethesedatasetstotallydeidentifiedorwassharingapprovedbyyourinstitutionalreviewboardIRBIfyourespondedNotoQ3pleaseenterNAhere,ARCS4 Muthyamfirst,5DothesedatasetscontainsensitiveorpersonallyidentifiableprivateinformationHarvardResearchDataSecurityPolicywwwsecurityharvardeduresearchdatasecuritypolicymayapplybecausethisDataverseishostedbyHarvardUniversity,ARCS5 PSRI Dataverse Template v2,Aretheoriginaldatapubliclyavailable,PSRI1 PSRI Dataverse Template v2,Didanotherstudentattempttoreplicatethereplicationintheclass,PSRI9 PSRI Dataverse Template v2,Didanotherstudentreplicatethisreplicationsuccessfully,PSRI10 PSRI Dataverse Template v2,Didaprofessorreadreviewadraftbeforethefinalversion,PSRI11 PSRI Dataverse Template v2,DidyousendthereplicationmaterialstotheoriginalauthorsandnotifythemthatyoudbepostingyourreplicationonPSRI,PSRI7 PSRI Dataverse Template v2,Istheoriginalcodeavailable,PSRI2 PSRI Dataverse Template v2,WasthereplicationdoneinacourseIfsopleasecontinuetoanswerthesubsequentquestionsandifnotselectNA,PSRI8 PSRI Dataverse Template v2,Wherearetheoriginaldataarchivednameandurl,PSRI3 PSRI Dataverse Template v2,Whereistheoriginalcodepubliclyarchivednameandurl,PSRI4 PSRI Dataverse Template v2,WillyousubmityourreplicationcodetothisDataverseThisisaPSRIrequirement,PSRI5 PSRI Dataverse Template v2,WillyousubmityourreplicationwriteuptothisDataverseThisisaPSRIrequirement,PSRI6 Local Monograph Template,Aretheoriginaldatapubliclyavailable,PSRI1 Local Monograph Template,Didanotherstudentattempttoreplicatethereplicationintheclass,PSRI9 Local Monograph Template,Didanotherstudentreplicatethisreplicationsuccessfully,PSRI10 Local Monograph Template,Didaprofessorreadreviewadraftbeforethefinalversion,PSRI11 Local Monograph Template,DidyousendthereplicationmaterialstotheoriginalauthorsandnotifythemthatyoudbepostingyourreplicationonPSRI,PSRI7 Local Monograph Template,Istheoriginalcodeavailable,PSRI2 Local Monograph Template,WasthereplicationdoneinacourseIfsopleasecontinuetoanswerthesubsequentquestionsandifnotselectNA,PSRI8 Local Monograph Template,Wherearetheoriginaldataarchivednameandurl,PSRI3 Local Monograph Template,Whereistheoriginalcodepubliclyarchivednameandurl,PSRI4 Local Monograph Template,WillyousubmityourreplicationcodetothisDataverseThisisaPSRIrequirement,PSRI5 Local Monograph Template,WillyousubmityourreplicationwriteuptothisDataverseThisisaPSRIrequirement,PSRI6 Journal of Human Rights (JHR),Aretheoriginaldatapubliclyavailable,PSRI1 Journal of Human Rights (JHR),Didanotherstudentattempttoreplicatethereplicationintheclass,PSRI9 Journal of Human Rights (JHR),Didanotherstudentreplicatethisreplicationsuccessfully,PSRI10 Journal of Human Rights (JHR),Didaprofessorreadreviewadraftbeforethefinalversion,PSRI11 Journal of Human Rights (JHR),DidyousendthereplicationmaterialstotheoriginalauthorsandnotifythemthatyoudbepostingyourreplicationonPSRI,PSRI7 Journal of Human Rights (JHR),Istheoriginalcodeavailable,PSRI2 Journal of Human Rights (JHR),WasthereplicationdoneinacourseIfsopleasecontinuetoanswerthesubsequentquestionsandifnotselectNA,PSRI8 Journal of Human Rights (JHR),Wherearetheoriginaldataarchivednameandurl,PSRI3 Journal of Human Rights (JHR),Whereistheoriginalcodepubliclyarchivednameandurl,PSRI4 Journal of Human Rights (JHR),WillyousubmityourreplicationcodetothisDataverseThisisaPSRIrequirement,PSRI5 Journal of Human Rights (JHR),WillyousubmityourreplicationwriteuptothisDataverseThisisaPSRIrequirement,PSRI6 GSD Fall 2014,Accreditation,gsdAccreditation GSD Fall 2014,City,city GSD Fall 2014,CoreStudioCoordinator,gsdCoordinator GSD Fall 2014,CountryNation,country GSD Fall 2014,CourseName,gsdCourseName GSD Fall 2014,DataUploadedBy,depositor GSD Fall 2014,FacultyName,gsdFacultyName GSD Fall 2014,FacultyRecommendation,gsdRecommendation GSD Fall 2014,Notes,notesText GSD Fall 2014,ProgramBrief,gsdProgramBrief GSD Fall 2014,SemesterYear,gsdSemester GSD Fall 2014,SiteType,gsdSiteType GSD Fall 2014,StateProvince,state GSD Fall 2014,StudentName,gsdStudentName GSD Fall 2014,StudentNameFirstName,#IGNORE GSD Fall 2014,StudentNameLastName,#IGNORE GSD Fall 2014,StudentsProgramofStudy,gsdStudentProgram GSD Fall 2014,Tags,gsdTags GSD Fall 2014,TypesofRepresentationMediumFormat,gsdTypes GSD_Fall2014_Platform,Accreditation,gsdAccreditation GSD_Fall2014_Platform,City,city GSD_Fall2014_Platform,CoreStudioCoordinator,gsdCoordinator GSD_Fall2014_Platform,CountryNation,country GSD_Fall2014_Platform,CourseName,gsdCourseName GSD_Fall2014_Platform,DataUploadedBy,depositor GSD_Fall2014_Platform,FacultyName,gsdFacultyName GSD_Fall2014_Platform,FacultyRecommendation,gsdRecommendation GSD_Fall2014_Platform,Notes,notesText GSD_Fall2014_Platform,ProgramBrief,gsdProgramBrief GSD_Fall2014_Platform,SemesterYear,gsdSemester GSD_Fall2014_Platform,SiteType,gsdSiteType GSD_Fall2014_Platform,StateProvince,state GSD_Fall2014_Platform,StudentName,gsdStudentName GSD_Fall2014_Platform,StudentNameFirstName,#IGNORE GSD_Fall2014_Platform,StudentNameLastName,#IGNORE GSD_Fall2014_Platform,StudentsProgramofStudy,gsdStudentProgram GSD_Fall2014_Platform,Tags,gsdTags GSD_Fall2014_Platform,TypesofRepresentationMediumFormat,gsdTypes Digaai Dataverse Jornais e Revistas,DatadePublicao,datadePublicao Digaai Dataverse Jornais e Revistas,LocaldePublicao,localdePublicao Digaai Dataverse Jornais e Revistas,Nmero,numero Digaai Dataverse Jornais e Revistas,Proprietrio,proprietrio Digaai Dataverse Jornais e Revistas,Ttulo,titulo FaceBrasil,DatadePublicao,datadePublicao FaceBrasil,LocaldePublicao,localdePublicao FaceBrasil,Nmero,numero FaceBrasil,Proprietrio,proprietrio FaceBrasil,Ttulo,titulo FaceBrasil 32,DatadePublicao,datadePublicao FaceBrasil 32,LocaldePublicao,localdePublicao FaceBrasil 32,Nmero,numero FaceBrasil 32,Proprietrio,proprietrio FaceBrasil 32,Ttulo,titulo CHIA World Historical Default Template,ClassificationSchema,classificationSchemaCHIA CHIA World Historical Default Template,Contributor,contributorName CHIA World Historical Default Template,DatesAdditionalInformation,datesAdditionalInformationCHIA CHIA World Historical Default Template,GeographicCoverageAdditionalInformation,otherGeographicCoverage CHIA World Historical Default Template,Language,language CHIA World Historical Default Template,Provenance,provenanceCHIA CHIA World Historical Default Template,RightsAvailability,rightsAvailabilityCHIA CHIA World Historical Default Template,Source,sourceCHIA CHIA World Historical Default Template,Variables,variablesCHIA Italian Political Science Review,Aretheoriginaldatapubliclyavailable,PSRI1 Italian Political Science Review,Didanotherstudentattempttoreplicatethereplicationintheclass,PSRI9 Italian Political Science Review,Didanotherstudentreplicatethisreplicationsuccessfully,PSRI10 Italian Political Science Review,Didaprofessorreadreviewadraftbeforethefinalversion,PSRI11 Italian Political Science Review,DidyousendthereplicationmaterialstotheoriginalauthorsandnotifythemthatyoudbepostingyourreplicationonPSRI,PSRI7 Italian Political Science Review,Istheoriginalcodeavailable,PSRI2 Italian Political Science Review,WasthereplicationdoneinacourseIfsopleasecontinuetoanswerthesubsequentquestionsandifnotselectNA,PSRI8 Italian Political Science Review,Wherearetheoriginaldataarchivednameandurl,PSRI3 Italian Political Science Review,Whereistheoriginalcodepubliclyarchivednameandurl,PSRI4 Italian Political Science Review,WillyousubmityourreplicationcodetothisDataverseThisisaPSRIrequirement,PSRI5 Italian Political Science Review,WillyousubmityourreplicationwriteuptothisDataverseThisisaPSRIrequirement,PSRI6 \ No newline at end of file diff --git a/dataversedock/testdata/scripts/migration/HarvardPreMigrationDataScrub.sql b/dataversedock/testdata/scripts/migration/HarvardPreMigrationDataScrub.sql new file mode 100644 index 0000000..af6b31b --- /dev/null +++ b/dataversedock/testdata/scripts/migration/HarvardPreMigrationDataScrub.sql @@ -0,0 +1,32 @@ +select m.id, m.TimePeriodCoveredEnd, v.study_id from metadata m, studyversion v where v.study_id = 121855 and m.id = v.metadata_id and TimePeriodCoveredEnd = '[17820000]'; +select m.id, m.DistributionDate, v.study_id from metadata m, studyversion v where v.study_id = 117326 and m.id = v.metadata_id and DistributionDate = '2O14'; +select m.id, a.date, v.study_id from metadata m, studyversion v, studyabstract a where v.study_id=47799 and m.id=v.metadata_id and m.id=a.metadata_id and a.date='201-'; +select m.id, m.TimePeriodCoveredEnd, v.study_id from metadata m, studyversion v where v.study_id = 88283 and m.id = v.metadata_id and TimePeriodCoveredEnd = '198x'; +select m.id, m.TimePeriodCoveredStart, v.study_id from metadata m, studyversion v where v.study_id = 215 and m.id = v.metadata_id and TimePeriodCoveredStart = '70s'; --should return 3 records +select m.id, a.date, v.study_id from metadata m, studyversion v, studyabstract a where v.study_id=91709 and m.id=v.metadata_id and m.id=a.metadata_id and a.date='2-13'; --should return 3 records +select m.id, a.date, v.study_id from metadata m, studyversion v, studyabstract a where v.study_id=114372 and m.id=v.metadata_id and m.id=a.metadata_id and a.date='2-14'; +select m.id, m.DateOfCollectionStart, m.DateOfCollectionEnd, v.study_id from metadata m, studyversion v where v.study_id = 155 and m.id = v.metadata_id and DateOfCollectionStart = '2004-01-01 to 2004-12-31' and m.DateOfCollectionEnd = '' ; -- should return 10 records + +update metadata set TimePeriodCoveredEnd = '1782' from studyversion v where v.study_id = 121855 and metadata.id = v.metadata_id and TimePeriodCoveredEnd = '[17820000]'; +update metadata set DistributionDate = '2014' from studyversion v where v.study_id = 117326 and metadata.id = v.metadata_id and DistributionDate = '2O14'; +update studyabstract set date = '2010' from metadata m, studyversion v where v.study_id=47799 and m.id=v.metadata_id and m.id=studyabstract.metadata_id and studyabstract.date='201-'; +update metadata set TimePeriodCoveredEnd = '198?' from studyversion v where v.study_id = 88283 and metadata.id = v.metadata_id and TimePeriodCoveredEnd = '198x'; +update metadata set TimePeriodCoveredStart = '197?' from studyversion v where v.study_id = 215 and metadata.id = v.metadata_id and TimePeriodCoveredStart = '70s'; --should update 3 records +update studyabstract set date = '2014' from metadata m, studyversion v where v.study_id=114372 and m.id=v.metadata_id and m.id=studyabstract.metadata_id and studyabstract.date='2-14'; +update studyabstract set date = '2013' from metadata m, studyversion v where v.study_id=91709 and m.id=v.metadata_id and m.id=studyabstract.metadata_id and studyabstract.date='2-13'; --should update 3 records +update metadata set DateOfCollectionStart = '2004-01-01', DateOfCollectionEnd = '2004-12-31' from studyversion v where v.study_id = 155 and metadata.id = v.metadata_id and DateOfCollectionStart = '2004-01-01 to 2004-12-31' and DateOfCollectionEnd = ''; -- should update 10 records + + +update studyfieldvalue set strvalue='English' where metadata_id=273999 and studyfield_id=218 and strValue='English and Dutch'; +insert into studyfieldvalue (strvalue, metadata_id, studyfield_id, displayorder) values ('Dutch', 273999,218,1); + +--Added for datasets with multiple failues 3/30 +select m.id, m.TimePeriodCoveredStart, v.study_id from metadata m, studyversion v where v.study_id = 88283 and m.id = v.metadata_id and TimePeriodCoveredStart = '198x'; +select m.id, m.TimePeriodCoveredStart, v.study_id from metadata m, studyversion v where v.study_id = 121855 and m.id = v.metadata_id and TimePeriodCoveredStart = '[17820000]'; +select m.id, m.ProductionDate, v.study_id from metadata m, studyversion v where v.study_id = 121855 and m.id = v.metadata_id and ProductionDate = '[17820000]'; +select m.id, m.dateofdeposit, v.study_id from metadata m, studyversion v where v.study_id = 74738 and m.id = v.metadata_id and dateofdeposit = '\'; + +update metadata set TimePeriodCoveredStart = '198?' from studyversion v where v.study_id = 88283 and metadata.id = v.metadata_id and TimePeriodCoveredStart = '198x'; +update metadata set ProductionDate = '1782' from studyversion v where v.study_id = 121855 and metadata.id = v.metadata_id and ProductionDate = '[17820000]'; +update metadata set TimePeriodCoveredStart = '1782' from studyversion v where v.study_id = 121855 and metadata.id = v.metadata_id and TimePeriodCoveredStart = '[17820000]'; +update metadata set dateofdeposit = '' from studyversion v where v.study_id = 74738 and metadata.id = v.metadata_id and dateofdeposit = '\'; diff --git a/dataversedock/testdata/scripts/migration/custom_field_map.sql b/dataversedock/testdata/scripts/migration/custom_field_map.sql new file mode 100644 index 0000000..515c035 --- /dev/null +++ b/dataversedock/testdata/scripts/migration/custom_field_map.sql @@ -0,0 +1,6 @@ + +delete from customfieldmap; + +COPY customfieldmap( sourcetemplate, sourcedatasetfield, targetdatasetfield) FROM '/scripts/migration/HarvardCustomFields.csv' DELIMITER ',' CSV HEADER; + + diff --git a/dataversedock/testdata/scripts/migration/datafile_pub_date.sql b/dataversedock/testdata/scripts/migration/datafile_pub_date.sql new file mode 100644 index 0000000..eb669a9 --- /dev/null +++ b/dataversedock/testdata/scripts/migration/datafile_pub_date.sql @@ -0,0 +1,12 @@ +UPDATE dvobject +SET publicationdate = x.releasetime +FROM (SELECT f.id, f.filesystemname, min(v.releasetime) as releasetime +FROM datafile f, dvobject d, datasetversion v, filemetadata m +WHERE f.id = d.id +AND d.publicationdate IS null +AND m.datafile_id = f.id +AND m.datasetversion_id = v.id +AND v.versionstate = 'RELEASED' +-- AND (NOT f.filesystemname IS null AND NOT f.filesystemname LIKE 'http%') +GROUP BY f.id, f.filesystemname) x WHERE x.id = dvobject.id; + diff --git a/dataversedock/testdata/scripts/migration/files_destination_step1_ b/dataversedock/testdata/scripts/migration/files_destination_step1_ new file mode 100755 index 0000000..d8147f6 --- /dev/null +++ b/dataversedock/testdata/scripts/migration/files_destination_step1_ @@ -0,0 +1,36 @@ +#!/usr/bin/perl + +use DBI; + +my $host = "localhost"; +my $username = "xxxxx"; +my $password = 'xxxxx'; +my $database = "xxxxx"; + +my $dbh = DBI->connect("DBI:Pg:dbname=$database;host=$host",$username,$password); +my $sth = $dbh->prepare(qq {SELECT d.protocol, d.authority, d.identifier, d.id, v.id, v.versionnumber FROM dataset d, datasetversion v WHERE v.dataset_id = d.id ORDER BY d.id, v.versionnumber}); +$sth->execute(); + +my $offset= 0; + +while ( @_ = $sth->fetchrow() ) +{ + $protocol = $_[0]; + $authority = $_[1]; + $identifier = $_[2]; + $id = $_[3]; + $vid = $_[4]; + $vnum = $_[5]; + + print $protocol . ":" . $authority . "/" . $identifier . "\t" . $id . "\t" . $vid . "\t" . $vnum . "\n"; + + $offset = $id; +} + +$sth->finish; +$dbh->disconnect; + +print STDERR "last ID in DVOBJECT table: " . $offset . "\n"; + +exit 0; + diff --git a/dataversedock/testdata/scripts/migration/files_source_ b/dataversedock/testdata/scripts/migration/files_source_ new file mode 100755 index 0000000..ad8b7ce --- /dev/null +++ b/dataversedock/testdata/scripts/migration/files_source_ @@ -0,0 +1,624 @@ +#!/usr/bin/perl + +my $host = "localhost"; +my $username = "xxxxx"; +my $database = "xxxxx"; +my $password = "xxxxx"; + +my $dvobjectoffset = shift @ARGV; +my $filecatoffset = shift @ARGV; + +unless ($dvobjectoffset > 0) +{ + print STDERR "Usage: ./files_source_ \n"; + exit 1; +} + +unless ($filecatoffset) +{ + print STDERR "WARNING! file category offset is set to ZERO.\n"; +} + +my $filecatid = $filecatoffset; # file categories (this is a new object in 4.0, so there are no 3.6 IDs to reuse) + +use DBI; + +my $dbh = DBI->connect("DBI:Pg:dbname=$database;host=$host",$username,$password); + +open PL, ">packlist.txt"; + +%STUDYMAP = {}; +%STUDYFILEMAP = {}; +%VERSIONMAP = {}; + +while ( <> ) +{ + chop; + my ($globalid, $dsid, $dsvid, $dsvnum) = split("\t", $_); + $STUDYMAP{$globalid} = $dsid; + $VERSIONMAP{$globalid . "+++" . $dsvnum} = $dsid . "-" . $dsvid; + + %FILECATEGORIES = {}; # file categories for this dataset. + + if ($globalid =~/^([a-z]*):(.*)\/([^\/]*)$/) + { + $protocol = $1; + $authority = $2; + $identifier = $3; + +# print $protocol . " " . $authority . " " . $identifier . "\n"; + } + else + { + print STDERR "WARNING! illegal global id: " . $globalid . "\n"; + next; + } + + my $sth; + + $sth = $dbh->prepare(qq {SELECT s.id, v.id FROM study s, studyversion v WHERE v.study_id = s.id AND s.protocol = '$protocol' AND s.authority='$authority' AND s.studyid = '$identifier' AND v.versionnumber = $dsvnum}); + $sth->execute(); + + my $vercount = 0; + + my $sid; + my $svid; + + while ( @foo = $sth->fetchrow() ) + { + $sid = $foo[0]; + $svid = $foo[1]; + + $vercount++; + } + + $sth->finish; + + unless ($vercount == 1) + { + print STDERR "WARNING: invalid number of versions for study " . $globalid . ", with version number " . $dsvnum . " (" . $vercount . ")!\n"; + next; + } + + $sth = $dbh->prepare(qq {SELECT fm.label, fm.category, fm.description, sf.filetype, sf.filesystemlocation, sf.md5, sf.restricted, sf.originalfiletype, sf.unf, sf.id, sf.fileclass, fm.id FROM filemetadata fm, studyfile sf WHERE fm.studyfile_id = sf.id AND fm.studyversion_id = $svid}); + + $sth->execute(); + + my $newfile = 0; + + while ( @foo = $sth->fetchrow() ) + { + # new filemetadata fields: + $label = $foo[0]; + $description = $foo[2]; + $description =~s/\n/ /g; + $description = $dbh->quote($description); + # category: + $category = $foo[1]; + # new datafile fields: + $type = $foo[3]; + unless ($type =~m:/:) + { + $type = "application/octet-stream"; + } + $md5 = $foo[5]; + $restricted = $foo[6]; + + # "restricted" is a boolean: + + $restricted = 'TRUE' if $restricted; + $restricted = 'FALSE' unless $restricted; + + # location of the file, on the old filesystem: + $fslocation = $foo[4]; + $fslocation = "" unless $fslocation; + + # additional info for subsettable files: + # (will go into the new datatable) + $originalfiletype = $foo[7]; + $unf = $foo[8]; + # id of the existing studyfile: + $sfid = $foo[9]; + # "class" of the existing studyfile: + # (tabular, "other", etc.) + $fileclass = $foo[10]; + $fmid = $foo[11]; + + if ($label =~/[\\\/:\*\?\"\<\>\|;\#]/) + { + $preservedlabel = $label; + $label=~s/[\\\/:\*\?\"\<\>\|;\#]//g; + + print STDERR "LABEL REPLACED: (FILEMETA: " . $fmid . ", FILE: " . $sfid . ", STUDY: " . $sid . ", VERSION: " . $svid . ", GLOBALID: " . $globalid . ") OLD: \"" . $preservedlabel . "\", NEW: \"" . $label . "\"\n"; + } + + if ($label eq '') + { + $label = "UNKNOWN"; + } + + $label = $dbh->quote($label); + + + unless ($STUDYFILEMAP{$sfid}) + { + $newfile = 1; + # Certain things only need to be done once per file - + # namely, each file needs one dvobject and datafile each; + # same for the datatables and variables. + # Other things, like filemetadatas, need to be created one + # per version. + + $newdatafileid = ($dvobjectoffset+$sfid); + $STUDYFILEMAP{$sfid} = $newdatafileid; + + $fsname = $fslocation; + + if ($fslocation =~/^http/ ) + { + $fsize = 0; + $fmtime = &formatTimeStamp(time); + } + else + { + if ( -f $fslocation ) + { + @fstats = stat($fslocation); + $fsize = $fstats[7]; + $mtime = $fstats[9]; + + $fmtime = &formatTimeStamp($mtime); + $packlistentry = $fslocation; + $packlistentry =~s/.*\/DVN\/data\///; + print PL $packlistentry . "\n"; + } + else + { + print STDERR "WARNING: file " . $fslocation . " not found!\n"; + $fsize = 0; + $fmtime = &formatTimeStamp(time); + } + + $fsname =~s/^.*\///g; + } + + # dvobject: + + print qq {INSERT INTO dvobject (id, dtype, owner_id, createdate, modificationtime) VALUES ($newdatafileid, 'DataFile', $dsid, '$fmtime', '$fmtime');} . "\n"; + + # datafile object: + $fsname = $dbh->quote($fsname); + print qq {INSERT INTO datafile (id, contenttype, filesystemname, filesize, md5, restricted) VALUES ($newdatafileid, '$type', $fsname, $fsize, '$md5', $restricted);} . "\n"; + # Use the below line instead of the above if you are using 4.6 or above + # print qq {INSERT INTO datafile (id, contenttype, filesystemname, filesize, checksumtype, restricted,checksumvalue,rootdatafileid) VALUES ($newdatafileid, '$type', $fsname, $fsize, 'MD5', $restricted,'',-1);} . "\n"; + } + else + { + $newdatafileid = $STUDYFILEMAP{$sfid}; + $newfile = 0; + } + + # file metadata object: + print qq {INSERT INTO filemetadata (id, description, label, restricted, version, datasetversion_id, datafile_id) VALUES ($fmid, $description, $label, $restricted, 1, $dsvid, $newdatafileid);} . "\n"; + + # and the category, if exists: + + if ($category && $category ne "") + { + $category = $dbh->quote($category); + unless ($FILECATEGORIES{$category}) + { + # this is a new category (for this dataset), + # so it needs to be created: + + $filecatid++; + + print qq{INSERT INTO datafilecategory (id, name, dataset_id) VALUES ($filecatid, $category, $newdatafileid);} . "\n"; + + $FILECATEGORIES{$category} = $filecatid; + } + + my $fcid = $FILECATEGORIES{$category}; + print qq{INSERT INTO filemetadata_datafilecategory (filecategories_id, filemetadatas_id) VALUES ($fcid, $fmid);} . "\n"; + + } + + + # subsettable files: + # (again, this only needs to be done once per file!) + + + if ($newfile && ($fileclass eq "TabularDataFile")) + { + #print STDERR "this is a subsettable file.\n"; + + # NOTE: + # there's only one datatable per file - make sure to only run this once! + # (i.e., not for every version!) + + $sth1 = $dbh->prepare(qq {SELECT id, varquantity, casequantity, unf, recordspercase FROM datatable WHERE studyfile_id = $sfid}); + + $sth1->execute(); + + $count = 0; + + while ( @dt = $sth1->fetchrow() ) + { + $dtid = $dt[0]; + $varquantity = $dt[1]; + $casequantity = $dt[2]; + $dtunf = $dt[3]; + $recordspercase = $dt[4]; + + $count++; + + unless ($unf eq $dtunf) + { + print STDERR "WARNING: unf mismatch, between studyfile and datatable: " + $unf + ":" + $dtunf + "\n"; + } + + # datatable object: + + + if ($recordspercase) + { + print qq {INSERT INTO datatable (id, varquantity, casequantity, unf, originalfileformat, recordspercase, datafile_id) VALUES ($dtid, $varquantity, $casequantity, '$unf', '$originalfiletype', $recordspercase, $newdatafileid);} . "\n"; + } + else + { + print qq {INSERT INTO datatable (id, varquantity, casequantity, unf, originalfileformat, datafile_id) VALUES ($dtid, $varquantity, $casequantity, '$unf', '$originalfiletype', $newdatafileid);} . "\n"; + } + } + + $sth1->finish; + + unless ($count == 1) + { + print STDERR "WARNING: invalid numbe of datatables: " + $count +".\n"; + } + else + { + # variables: + $sth1 = $dbh->prepare(qq {SELECT name, label, variableformattype_id, variableintervaltype_id, formatcategory, formatschema, formatschemaname, unf, fileorder, weighted, orderedfactor, numberofdecimalpoints, universe, filestartposition, fileendposition, recordsegmentnumber, id FROM datavariable WHERE datatable_id = $dtid}); + + + $sth1->execute(); + + while ( @dv = $sth1->fetchrow() ) + { + $varname = $dv[0]; + $varname = $dbh->quote($varname); + $varlabel = $dv[1]; + $varlabel = $dbh->quote($varlabel); + $variableformattype_id = $dv[2]; + # the old school formattype_id and + # intervaltype_id need to be adjusted by 1, + # to match the new enum values used in the + # 4.0 datavariables: + $variableformattype_id--; + $variableintervaltype_id = $dv[3]; + $variableintervaltype_id--; + $varformatcategory = $dv[4]; + $varformatschema = $dv[5]; + $varformatschemaname = $dv[6]; + $varunf = $dv[7]; + $varfileorder = $dv[8]; + $varweighted = $dv[9]; + if ($varweighted) + { + $varweighted = "TRUE"; + } + else + { + $varweighted = "FALSE"; + } + $varorderedfactor = $dv[10]; + if ($varorderedfactor) + { + $varorderedfactor = "TRUE"; + } + else + { + $varorderedfactor = "FALSE"; + } + + $varnumberofdecimalpoints = $dv[11]; + $varuniverse = $dv[12]; + $varfilestartposition = $dv[13]; + $varfileendposition = $dv[14]; + $varrecordsegmentnumber = $dv[15]; + $varid = $dv[16]; + + + + # new datavariable object: + + $newdvfields = "id, name, label, interval, type, unf, fileorder, orderedfactor, weighted, datatable_id"; + $newdvvalues = qq {$varid, $varname, $varlabel, $variableintervaltype_id, $variableformattype_id, '$varunf', $varfileorder, $varorderedfactor, $varweighted, $dtid}; + + if ($varformatschemaname) + { + # becomes "format": + $newdvfields = $newdvfields . ", format"; + $newdvvalues = qq{$newdvvalues, '$varformatschemaname'}; + } + + if ($varformatcategory) + { + $newdvfields = $newdvfields . ", formatcategory"; + $newdvvalues = qq{$newdvvalues, '$varformatcategory'}; + } + + if ($varfilestartposition) + { + $newdvfields = $newdvfields . ", filestartposition"; + $newdvvalues = qq{$newdvvalues, $varfilestartposition}; + } + + if ($varfileendposition) + { + $newdvfields = $newdvfields . ", fileendposition"; + $newdvvalues = qq{$newdvvalues, $varfileendposition}; + } + + if ($varrecordsegmentnumber) + { + $newdvfields = $newdvfields . ", recordsegmentnumber"; + $newdvvalues = qq{$newdvvalues, $varrecordsegmentnumber}; + } + + if ($varuniverse) + { + $newdvfields = $newdvfields . ", universe"; + $newdvvalues = qq{$newdvvalues, '$varuniverse'}; + } + + if ($varnumberofdecimalpoints) + { + $newdvfields = $newdvfields . ", numberofdecimalpoints"; + $newdvvalues = qq{$newdvvalues, $numberofdecimalpoints}; + } + + + print qq {INSERT INTO datavariable ($newdvfields) VALUES ($newdvvalues);} . "\n"; + + # variable categories: + $sth2 = $dbh->prepare(qq {SELECT id, label, value, missing, catorder, frequency FROM variablecategory WHERE datavariable_id = $varid}); + $sth2->execute(); + + while ( @vc = $sth2->fetchrow() ) + { + $varcatid = $vc[0]; + $varcatlabel = $vc[1]; + $varcatvalue = $vc[2]; + $varcatmissing = $vc[3]; + if ($varcatmissing) + { + $varcatmissing = "true"; + } + else + { + $varcatmissing = "false"; + } + $varcatorder = $vc[4]; + unless ($varcatorder) + { + if ($varcatorder eq "" || $varcatorder != 0) + { + $varcatorder = "null"; + } + } + $varcatfreq = $vc[5]; + unless ($varcatfreq) + { + if ($varcatfreq eq "" || $varcatfreq != 0) + { + $varcatfreq = "null"; + } + } + + + # only migrate the *real* categories: + if ($varcatlabel) + { + $varcatlabel = $dbh->quote($varcatlabel); + unless ($varcatvalue || ($varcatvalue eq "") || ($varcatvalue == 0)) + { + print STDERR qq {INSERT INTO variablecategory (id, label, value, missing, catorder, frequency, datavariable_id) VALUES ($varcatid, $varcatlabel, $varcatvalue, $varcatmissing, $varcatorder, $varcatfreq, $varid);} . "\n"; + } + else + { + $varcatvalue = $dbh->quote($varcatvalue); + print qq {INSERT INTO variablecategory (id, label, value, missing, catorder, frequency, datavariable_id) VALUES ($varcatid, $varcatlabel, $varcatvalue, $varcatmissing, $varcatorder, $varcatfreq, $varid);} . "\n"; + } + } + else + { + #print STDERR "empty var cat label.\n"; + } + } + + $sth2->finish; + } + + $sth1->finish; + } + + + + } + } + + $sth->finish; + +} + +# Now, the guestbooks/download activity etc.: + +# guest books from the old "studyfile activity" entries: + +$sth = $dbh->prepare(qq {SELECT a.downloadcount, a.lastdownloadtime, a.studyfile_id, s.authority, s.studyid, s.protocol FROM studyfileactivity a, study s WHERE a.study_id = s.id AND a.downloadcount > 0}); +$sth->execute(); + +$id = 1000; + +while ( @foo = $sth->fetchrow() ) +{ + my $acount = $foo[0]; + my $adownloadtime = $foo[1]; + $adownloadtime = "TIMESTAMP " . $dbh->quote($adownloadtime) if $adownloadtime; + $adownloadtime = "NULL" unless $adownloadtime; + + my $astudyfile_id = $foo[2]; + my $sauthority = $foo[3]; + my $sidentifier = $foo[4]; + my $sprotocol = $foo[5]; + + next unless $astudyfile_id; + $astudyfile_id += $dvobjectoffset; + + my $globalid = $sprotocol . ":" . $sauthority . "/" . $sidentifier; + + unless ($STUDYMAP{$globalid}) + { + next; + } + + ($dataset_id) = $STUDYMAP{$globalid}; + + for ($i = 0; $i < $acount; $i++) + { + $id++; + + if ($i == $acount - 1) + { + $downloadtime = $adownloadtime; + } + else + { + $downloadtime = "NULL"; + } + + print qq {INSERT INTO guestbookresponse (id, email, name, institution, position, responsetime, guestbook_id, datafile_id, authenticateduser_id, downloadtype, sessionid, dataset_id, datasetversion_id) VALUES ($id, NULL, 'unknown', NULL, NULL, $downloadtime, 1, $astudyfile_id, NULL, 'download', NULL, $dataset_id, NULL);} . "\n"; + } + +} + +$guestbook_response_id_offset = $id; + +# Migrating guestbooks: + +$sth = $dbh->prepare(qq {SELECT id, emailrequired, enabled, firstnamerequired, lastnamerequired, institutionrequired, positionrequired, vdc_id FROM guestbookquestionnaire}); +$sth->execute(); + +while ( @foo = $sth->fetchrow() ) +{ + my $gid = $foo[0]; + + if ($gid == 1) + { + # print STDERR "found guestbook with id=1!\n"; + # This is the default guestbook; we don't need to migrate it, as the + # new Dataverse 4.0 will have its own default guestbook. + next; + } + + my $gemailrequired = $foo[1] ? "TRUE" : "FALSE"; + my $genabled = $foo[2] ? "TRUE" : "FALSE"; + + my $gnamerequired = ($foo[3] || $foo[4]) ? "TRUE" : "FALSE"; + + my $ginstitutionrequired = $foo[5] ? "TRUE" : "FALSE"; + my $gpositionrequired = $foo[6] ? "TRUE" : "FALSE"; + + my $gdataverse_id = $foo[7] + 9; + + print qq {INSERT INTO guestbook (id, createtime, emailrequired, enabled, institutionrequired, name, namerequired, positionrequired, dataverse_id) VALUES ($gid, TIMESTAMP '1970-01-01 00:00:00', $gemailrequired, $genabled, $ginstitutionrequired, '', $gnamerequired, $gpositionrequired, $gdataverse_id);} . "\n"; + +} + +# Finally, migrating guestbook responses: + +$sth = $dbh->prepare(qq {SELECT r.id, r.email, r.firstname, r.institution, r.lastname, r.position, r.responsetime, r.guestbookquestionnaire_id, r.studyfile_id, r.vdcuser_id, r.downloadtype, r.sessionid, s.authority, s.studyid, s.protocol, v.versionnumber FROM guestbookresponse r, studyversion v, study s WHERE r.study_id = s.id AND r.studyversion_id = v.id}); +$sth->execute(); + + +while ( @foo = $sth->fetchrow() ) +{ + my $rid = $foo[0]; + + $rid += $guestbook_response_id_offset; + + my $remail = $dbh->quote($foo[1]); + my $rfirstname = $foo[2]; + my $rinstitution = $dbh->quote($foo[3]); + my $rlastname = $foo[4]; + my $rposition = $dbh->quote($foo[5]); + my $rresponsetime = $foo[6]; + $rresponsetime = "TIMESTAMP " . $dbh->quote($rresponsetime) if $rresponsetime; + $rresponsetime = "NULL" unless $rresponsetime; + my $rgbqid = $foo[7]; + my $rstudyfileid = $foo[8]; + $rstudyfileid+=$dvobjectoffset; + my $rvdcuserid = $foo[9] ? $foo[9] : "NULL"; + my $rdownloadtype = $dbh->quote($foo[10]); + my $rsessionid = $dbh->quote($foo[11]); + + my $sauthority = $foo[12]; + my $sidentifier = $foo[13]; + my $sprotocol = $foo[14]; + my $vversionnumber = $foo[15]; + + + my $globalid = $sprotocol . ":" . $sauthority . "/" . $sidentifier; + + unless ($VERSIONMAP{$globalid . "+++" . $vversionnumber}) + { + print STDERR "WARNING: No entry for " . $globalid . "+++" . $vversionnumber. "!\n"; + next; + } + + ($dataset_id, $datasetversion_id) = split ("\-", $VERSIONMAP{$globalid . "+++" . $vversionnumber}); + + unless ($dataset_id > 0 && $datasetversion_id > 0) + { + print STDERR "Invalid entry for " . $globalid . "+++" . $vversionnumber. ": " . $VERSIONMAP{$globalid . "+++" . $vversionnumber} . "!\n"; + next; + } + + my $name = ""; + $name = $rfirstname . " " if $rfirstname; + $name .= $rlastname if $rlastname; + $name = $dbh->quote($name) if $name; + $name = "NULL" unless $name; + + print qq {INSERT INTO guestbookresponse (id, email, name, institution, position, responsetime, guestbook_id, datafile_id, authenticateduser_id, downloadtype, sessionid, dataset_id, datasetversion_id) VALUES ($rid, $remail, $name, $rinstitution, $rposition, $rresponsetime, $rgbqid, $rstudyfileid, $rvdcuserid, $rdownloadtype, $rsessionid, $dataset_id, $datasetversion_id);} . "\n"; + +} + + + +$dbh->disconnect; + +close PL; + +exit 0; + +sub formatTimeStamp () { + my ($mtime) = (@_); + my ($sec,$min,$hour,$mday,$mon,$year,$wday,$yday,$isdst) = localtime($mtime); + + $year+=1900; + $mon++; + + $fmt = $year . "-" . sprintf("%02d",$mon) . "-" . sprintf("%02d",$mday) . " " . + sprintf("%02d", $hour) . ":" . sprintf("%02d",$min) . ":" . sprintf("%02d",$sec); + + return $fmt; +} + + + + + + diff --git a/dataversedock/testdata/scripts/migration/migrate_datasets.sql b/dataversedock/testdata/scripts/migration/migrate_datasets.sql new file mode 100644 index 0000000..e5b265e --- /dev/null +++ b/dataversedock/testdata/scripts/migration/migrate_datasets.sql @@ -0,0 +1,74 @@ +--copy studyversion fields to datasetversion +update datasetversion + set createtime = sv.createtime, + lastupdatetime = sv.lastupdatetime, + archivetime= sv.archivetime, + archivenote = sv.archivenote, + deaccessionlink = sv.deaccessionlink, + versionnote = sv.versionnote +from _dvn3_studyversion sv, dataset d, _dvn3_study s +where d.authority = s.authority +and d.protocol = s.protocol +and d.identifier = s.studyid +and datasetversion.dataset_id = d.id +and datasetversion.versionnumber = sv.versionnumber +and sv.study_id = s.id; + +-- set dataset.publication date to the releasetime of the earliest released studyversion +update dvobject +set publicationdate = m.releasetime +from (select dvobject.id, sv.study_id, min(sv.releasetime) as releasetime +from _dvn3_studyversion sv, dataset d, _dvn3_study s, dvobject +where d.authority = s.authority +and d.protocol = s.protocol +and d.identifier = s.studyid +and dvobject.id = d.id +and sv.study_id = s.id +and sv.versionstate!='DRAFT' group by sv.study_id, dvobject.id) m where m.id = dvobject.id; + +-- set dvobject creator_id for each dataset to study.creator_id +update dvobject +set creator_id = s.creator_id, createdate = s.createtime +from _dvn3_study s, dataset d +where d.authority = s.authority +and d.protocol = s.protocol +and d.identifier = s.studyid +and dvobject.id = d.id; + +-- migrate data from _dvn3_versioncontributor to datasetversionuser +insert into datasetversionuser ( lastupdatedate, authenticateduser_id, datasetversion_id ) ( +select vc.lastupdatetime, vc.contributor_id, dv.id +from _dvn3_versioncontributor vc, +_dvn3_studyversion sv, +_dvn3_study s, +dataset d, +datasetversion dv, +authenticateduser au +where vc.studyversion_id = sv.id +and sv.study_id = s.id +and d.authority = s.authority +and d.protocol = s.protocol +and d.identifier = s.studyid +and dv.dataset_id = d.id +and dv.versionnumber = sv.versionnumber +and au.id = vc.contributor_id); + +-- modify versionstate for older versions of deaccessioned studies +update datasetversion +set versionstate = 'DEACCESSIONED' +where id in ( +select dv1.id from datasetversion dv1, datasetversion dv2 +where dv1.dataset_id = dv2.dataset_id +and dv1.versionnumber < dv2.versionnumber +and dv2.versionstate = 'DEACCESSIONED'); + +-- update the globalidcreatetime to be equal to the createdate, +-- as it should have been registered when the draft was created in 3.6 +update dataset set globalidcreatetime = createdate +from dvobject dvo +where dataset.id = dvo.id; + +-- set the license for all versions to be NONE by default +-- TODO: once create commands are done, this can be done in the code. +update termsofuseandaccess set license = 'NONE'; + diff --git a/dataversedock/testdata/scripts/migration/migrate_dataverses.sql b/dataversedock/testdata/scripts/migration/migrate_dataverses.sql new file mode 100644 index 0000000..cc415d9 --- /dev/null +++ b/dataversedock/testdata/scripts/migration/migrate_dataverses.sql @@ -0,0 +1,47 @@ +---------------------- +-- subnetworks +----------------------- + +insert into dvobject ( id, owner_id, dtype, createdate, publicationdate, modificationtime, permissionmodificationtime, creator_id, releaseuser_id) + select id, 1, 'Dataverse', networkcreated, networkcreated, now(), now(), creator_id, creator_id + from _dvn3_vdcnetwork; + +insert into dataverse ( id, affiliation, alias, dataversetype, description, name, defaultcontributorrole_id, + facetroot, metadatablockroot, templateroot, guestbookroot, permissionroot, themeroot ) + select vdcn.id, affiliation, urlalias, 'UNCATEGORIZED', announcements, vdcn.name, dr.id, + false, false, false, false, true, true + from _dvn3_vdcnetwork vdcn, dataverserole dr + where dr.alias = 'editor'; + +-- subnetworks use the same contact e-mails as the Dataverse 4.0 root +insert into dataversecontact ( contactemail, displayorder, dataverse_id) + select dc.contactemail, dc.displayorder, _dvn3_vdcnetwork.id from dataversecontact dc, _dvn3_vdcnetwork + where dc.dataverse_id=1; + +----------------------- +-- dataverses +----------------------- + + +insert into dvobject ( id, owner_id, dtype, createdate, publicationdate, modificationtime, permissionmodificationtime, creator_id, releaseuser_id) + select id, vdcnetwork_id + 1, 'Dataverse', createddate, releasedate, now(), now(), creator_id, creator_id + from _dvn3_vdc; + +insert into dataverse ( id, affiliation, alias, dataversetype, description, name, defaultcontributorrole_id, + facetroot, metadatablockroot, templateroot, guestbookroot, permissionroot, themeroot ) + select vdc.id, affiliation, vdc.alias, 'UNCATEGORIZED', announcements, vdc.name, dr.id, + false, false, false, false, true, true + from _dvn3_vdc vdc, dataverserole dr + where dr.alias = 'editor'; + +-- this query splits the contact e-mail by , and trims both sides +insert into dataversecontact ( contactemail, displayorder, dataverse_id) + select trim(unnest(string_to_array(contactemail, ','))), 0, id from _dvn3_vdc; + + + +----------------------- +-- reset sequences +----------------------- + +SELECT setval('dvobject_id_seq', (SELECT MAX(id) FROM dvobject)); \ No newline at end of file diff --git a/dataversedock/testdata/scripts/migration/migrate_links.sql b/dataversedock/testdata/scripts/migration/migrate_links.sql new file mode 100644 index 0000000..9685b81 --- /dev/null +++ b/dataversedock/testdata/scripts/migration/migrate_links.sql @@ -0,0 +1,44 @@ + +-- links to datasets +insert into datasetlinkingdataverse (linkingdataverse_id, dataset_id, linkcreatetime) +select c.owner_id, ds.id, now() +from _dvn3_coll_studies link, _dvn3_vdccollection c, _dvn3_study s, dataset ds +where link.vdc_collection_id=c.id +and link.study_id=s.id +and s.owner_id != c.owner_id --don't include if already part of this dataverse +and ds.authority = s.authority +and ds.protocol = s.protocol +and ds.identifier = s.studyid; + + +-- links to root collections (now linked to dataverses) +insert into dataverselinkingdataverse (linkingdataverse_id, dataverse_id, linkcreatetime) +select vdc_id, owner_id, now() +from _dvn3_vdc_linked_collections link, _dvn3_vdccollection c +where link.linked_collection_id=c.id +and c.parentcollection_id is null; + +-- links to other, static collections (now linked to just the studies from them) +insert into datasetlinkingdataverse (linkingdataverse_id, dataset_id, linkcreatetime) +select vdc_id, ds.id, now() +from _dvn3_vdc_linked_collections link, _dvn3_coll_studies contents, _dvn3_vdccollection c, _dvn3_study s, dataset ds +where link.linked_collection_id=c.id +and c.parentcollection_id is not null +and c.type='static' +and c.id = contents.vdc_collection_id +and contents.study_id=s.id +and s.owner_id != vdc_id -- don't include if already part of this dataverse +and ds.authority = s.authority +and ds.protocol = s.protocol +and ds.identifier = s.studyid; + + + +----------------------- +-- reset sequences +----------------------- + +SELECT setval('datasetlinkingdataverse_id_seq', (SELECT MAX(id) FROM datasetlinkingdataverse)); +SELECT setval('dataverselinkingdataverse_id_seq', (SELECT MAX(id) FROM dataverselinkingdataverse)); + + diff --git a/dataversedock/testdata/scripts/migration/migrate_passwords.sql b/dataversedock/testdata/scripts/migration/migrate_passwords.sql new file mode 100644 index 0000000..ec2176e --- /dev/null +++ b/dataversedock/testdata/scripts/migration/migrate_passwords.sql @@ -0,0 +1,5 @@ +update builtinuser +set passwordencryptionversion = 0, +encryptedpassword= _dvn3_vdcuser.encryptedpassword +from _dvn3_vdcuser +where _dvn3_vdcuser.username=builtinuser.username; diff --git a/dataversedock/testdata/scripts/migration/migrate_permissions.sql b/dataversedock/testdata/scripts/migration/migrate_permissions.sql new file mode 100644 index 0000000..4a98d87 --- /dev/null +++ b/dataversedock/testdata/scripts/migration/migrate_permissions.sql @@ -0,0 +1,122 @@ +-- reference queries for duplicate roles in vdc_role +-- (created when user accounts where merged) +-- NOTE: may need to run multiple times + +/* +select * from _dvn3_vdcrole +where vdcuser_id || '|' || vdc_id || '|' || role_id in +( +select vdcuser_id || '|' || vdc_id || '|' || role_id from _dvn3_vdcrole +group by vdcuser_id, vdc_id, role_id +having count(*) > 1 +) +order by vdcuser_id, vdc_id, role_id + + +delete from _dvn3_vdcrole where id in +( +select max(id) from _dvn3_vdcrole +group by vdcuser_id, vdc_id, role_id +having count(*) >1 +order by max(id) +) +*/ + +----------------------- +-- dataverses role assignments +----------------------- + +-- admin (from the vdcnetwork creator) +insert into roleassignment ( assigneeidentifier, definitionpoint_id, role_id) + select '@'|| useridentifier, vdcn.id, dr.id + from _dvn3_vdcnetwork vdcn, authenticateduser, dataverserole dr + where vdcn.creator_id = authenticateduser.id + and dr.alias='admin'; + +-- admin +insert into roleassignment ( assigneeidentifier, definitionpoint_id, role_id) + select '@'|| useridentifier, vdc_id, dr.id + from _dvn3_vdcrole, authenticateduser, dataverserole dr + where _dvn3_vdcrole.vdcuser_id = authenticateduser.id + and role_id=3 and dr.alias='admin'; +-- curator +insert into roleassignment ( assigneeidentifier, definitionpoint_id, role_id) + select '@'|| useridentifier, vdc_id, dr.id + from _dvn3_vdcrole, authenticateduser, dataverserole dr + where _dvn3_vdcrole.vdcuser_id = authenticateduser.id + and role_id=2 and dr.alias='curator'; +-- contributor +insert into roleassignment ( assigneeidentifier, definitionpoint_id, role_id) + select '@'|| useridentifier, vdc_id, dr.id + from _dvn3_vdcrole, authenticateduser, dataverserole dr + where _dvn3_vdcrole.vdcuser_id = authenticateduser.id + and role_id=1 and dr.alias='dsContributor'; +-- member +insert into roleassignment ( assigneeidentifier, definitionpoint_id, role_id) + select '@'|| useridentifier, vdc_id, dr.id + from _dvn3_vdcrole, authenticateduser, dataverserole dr + where _dvn3_vdcrole.vdcuser_id = authenticateduser.id + and role_id=4 and dr.alias='member'; + +-- groups (as members) +insert into roleassignment ( assigneeidentifier, definitionpoint_id, role_id) + select '&'|| groupalias, vdcs_id, dr.id + from _dvn3_vdc_usergroup, explicitgroup, dataverserole dr + where _dvn3_vdc_usergroup.allowedgroups_id = explicitgroup.id + and dr.alias='member'; + +----------------------- +-- dataset role assignments +----------------------- + +-- contributor (from the study creator) +insert into roleassignment ( assigneeidentifier, definitionpoint_id, role_id) + select '@'|| useridentifier, ds.id, dr.id + from _dvn3_study s, authenticateduser, dataverserole dr, dataset ds + where s.creator_id = authenticateduser.id + and ds.authority = s.authority + and ds.protocol = s.protocol + and ds.identifier = s.studyid + and dr.alias='editor'; + +-- member +insert into roleassignment ( assigneeidentifier, definitionpoint_id, role_id) + select '@'|| useridentifier, ds.id, dr.id + from _dvn3_study_vdcuser, _dvn3_study s, authenticateduser, dataverserole dr, dataset ds + where _dvn3_study_vdcuser.allowedusers_id = authenticateduser.id + and _dvn3_study_vdcuser.studies_id = s.id + and ds.authority = s.authority + and ds.protocol = s.protocol + and ds.identifier = s.studyid + and dr.alias='member'; + +-- groups (as members) +insert into roleassignment ( assigneeidentifier, definitionpoint_id, role_id) + select '&'|| groupalias, ds.id, dr.id + from _dvn3_study_usergroup, _dvn3_study s, explicitgroup, dataverserole dr, dataset ds + where _dvn3_study_usergroup.allowedgroups_id = explicitgroup.id + and _dvn3_study_usergroup.studies_id = s.id + and ds.authority = s.authority + and ds.protocol = s.protocol + and ds.identifier = s.studyid + and dr.alias='member'; + +----------------------- +-- file role assignments +----------------------- + +insert into roleassignment ( assigneeidentifier, definitionpoint_id, role_id) + select '@'|| useridentifier, studyfiles_id, dr.id + from _dvn3_studyfile_vdcuser, authenticateduser, dataverserole dr + where _dvn3_studyfile_vdcuser.allowedusers_id = authenticateduser.id + and _dvn3_studyfile_vdcuser.studyfiles_id in (select id from datafile) + and dr.alias='fileDownloader'; + +insert into roleassignment ( assigneeidentifier, definitionpoint_id, role_id) + select '&'|| groupalias, studyfiles_id, dr.id + from _dvn3_studyfile_usergroup, explicitgroup, dataverserole dr + where _dvn3_studyfile_usergroup.allowedgroups_id = explicitgroup.id + and _dvn3_studyfile_usergroup.studyfiles_id in (select id from datafile) + and dr.alias='fileDownloader'; + + diff --git a/dataversedock/testdata/scripts/migration/migrate_to_workflows.sql b/dataversedock/testdata/scripts/migration/migrate_to_workflows.sql new file mode 100644 index 0000000..e1590f3 --- /dev/null +++ b/dataversedock/testdata/scripts/migration/migrate_to_workflows.sql @@ -0,0 +1,76 @@ +------------ +-- Migrate the database to the workflow-enabled version +------------ + +------------ +-- Add new workflows-related tables +------------ + +CREATE TABLE WORKFLOW (ID SERIAL NOT NULL, NAME VARCHAR(255), PRIMARY KEY (ID)); + +CREATE TABLE WORKFLOWSTEPDATA (ID SERIAL NOT NULL, + PROVIDERID VARCHAR(255), + STEPTYPE VARCHAR(255), + PARENT_ID BIGINT, + index INTEGER, PRIMARY KEY (ID)); + +CREATE TABLE PENDINGWORKFLOWINVOCATION ( INVOCATIONID VARCHAR(255) NOT NULL, + DOIPROVIDER VARCHAR(255), + IPADDRESS VARCHAR(255), + NEXTMINORVERSIONNUMBER BIGINT, + NEXTVERSIONNUMBER BIGINT, + PENDINGSTEPIDX INTEGER, + TYPEORDINAL INTEGER, + USERID VARCHAR(255), + WORKFLOW_ID BIGINT, + DATASET_ID BIGINT, + PRIMARY KEY (INVOCATIONID)); + +CREATE TABLE WorkflowStepData_STEPPARAMETERS (WorkflowStepData_ID BIGINT, + STEPPARAMETERS VARCHAR(2048), + STEPPARAMETERS_KEY VARCHAR(255)); + +CREATE TABLE PendingWorkflowInvocation_LOCALDATA (PendingWorkflowInvocation_INVOCATIONID VARCHAR(255), + LOCALDATA VARCHAR(255), + LOCALDATA_KEY VARCHAR(255)); + +ALTER TABLE WORKFLOWSTEPDATA + ADD CONSTRAINT FK_WORKFLOWSTEPDATA_PARENT_ID + FOREIGN KEY (PARENT_ID) REFERENCES WORKFLOW (ID); + +ALTER TABLE PENDINGWORKFLOWINVOCATION + ADD CONSTRAINT FK_PENDINGWORKFLOWINVOCATION_WORKFLOW_ID + FOREIGN KEY (WORKFLOW_ID) REFERENCES WORKFLOW (ID); + +ALTER TABLE PENDINGWORKFLOWINVOCATION + ADD CONSTRAINT FK_PENDINGWORKFLOWINVOCATION_DATASET_ID + FOREIGN KEY (DATASET_ID) REFERENCES DVOBJECT (ID); + +ALTER TABLE WorkflowStepData_STEPPARAMETERS + ADD CONSTRAINT FK_WorkflowStepData_STEPPARAMETERS_WorkflowStepData_ID + FOREIGN KEY (WorkflowStepData_ID) REFERENCES WORKFLOWSTEPDATA (ID); + +ALTER TABLE PendingWorkflowInvocation_LOCALDATA + ADD CONSTRAINT PndngWrkflwInvocationLOCALDATAPndngWrkflwInvocationINVOCATIONID + FOREIGN KEY (PendingWorkflowInvocation_INVOCATIONID) REFERENCES PENDINGWORKFLOWINVOCATION (INVOCATIONID); + + +------------ +-- Add lockReason field to Dataset/DatasetVersion +------------ +TBC + +------------ +-- Validate there are no double-reason locks (??) +------------ +TBC + +------------ +-- Convert from boolean lock reasons to the enum-based one +------------ +TBC + +------------ +-- Delete lock reasons columns +------------ +TBC diff --git a/dataversedock/testdata/scripts/migration/migrate_users.sql b/dataversedock/testdata/scripts/migration/migrate_users.sql new file mode 100644 index 0000000..965675f --- /dev/null +++ b/dataversedock/testdata/scripts/migration/migrate_users.sql @@ -0,0 +1,44 @@ +----------------------- +-- users +----------------------- + +insert into builtinuser( id, affiliation, email, firstname, lastname, position, username) + select id, institution, email, firstname, lastname, position, username + from _dvn3_vdcuser; + +insert into authenticateduser( id, affiliation, email, firstname, lastname, position, useridentifier, superuser) + select id, institution, email, firstname, lastname, position, username, false + from _dvn3_vdcuser; + +---------------------- +--use the below instead of the above query for migrating to 4.7.1 and above +--------------------- +--insert into authenticateduser(id, affiliation, email, firstname, lastname, position, useridentifier, superuser,createdtime) +-- select id, institution, email, firstname, lastname, position, username, false, '01-01-2000 00:00:00' +-- from _dvn3_vdcuser; + +insert into authenticateduserlookup( authenticationproviderid, persistentuserid, authenticateduser_id) + select 'builtin', username, id + from _dvn3_vdcuser; + +----------------------- +-- groups +----------------------- + +-- only copy over groups that have users +insert into explicitgroup( id, description, displayname, groupalias, groupaliasinowner, owner_id) + select id, friendlyname, friendlyname, '1-'||name, name, 1 + from _dvn3_usergroup + where id in (select usergroups_id from _dvn3_vdcuser_usergroup); + +insert into explicitgroup_authenticateduser( explicitgroup_id, containedauthenticatedusers_id) + select usergroups_id, users_id + from _dvn3_vdcuser_usergroup; + +----------------------- +-- reset sequences +----------------------- + +SELECT setval('builtinuser_id_seq', (SELECT MAX(id) FROM builtinuser)); +SELECT setval('authenticateduser_id_seq', (SELECT MAX(id) FROM authenticateduser)); +SELECT setval('explicitgroup_id_seq', (SELECT MAX(id) FROM explicitgroup)); diff --git a/dataversedock/testdata/scripts/migration/migration_instructions.txt b/dataversedock/testdata/scripts/migration/migration_instructions.txt new file mode 100644 index 0000000..e352020 --- /dev/null +++ b/dataversedock/testdata/scripts/migration/migration_instructions.txt @@ -0,0 +1,167 @@ +Migration steps: + +Assumptions: + +- DVN 3.6 networkAdmin has id = 1 +- Dataverse 4.0 admin has id = 1 (created by setup-all.sh script) + + +Pre steps (contained in the migration_presteps document): + +-7. Make a copy of the production db, and point an app server to it +-6. (if there is any data that will fail validation, run scrubbing script - this will need to be custom per installation) +-5.9 run duplicate user scrubbing scripts +-5.8 run users as emails scripts +-5. Export DDI files from 3.6 copy for all datasets to be migrated + (this now includes exporting non-released versions - presteps doc. updated) +-4. Create copies of tables in 3.6 database for migrated data +-3. Run pg dump to extract tables copies +-2. Import copied tables into 4.0 database +-1. Run offsets on _dvn3_tables in the 4.0 DB + +Migration: + +1. run migrate_users.sql script + If you are migrating to 4.7.1 or above check the comment around line 14 of the migrate_users.sql script +2. run migrate_dataverses.sql script +2a. migrate preloaded customizations +3. run custom_field_map.sql script (this must be updated to contain the custom field mappings specific to + the migration source installation.) +4. run dataset APIs: execute the following HTTP request on the Dataverse 4.0 application to initiate dataset migration: + + http:///api/batch/migrate?path=&key= + + This will return a success message and begin an asynchronous migration job - the status of the job is viewable in the import-log file + in the Glassfish logs directory. + +5. run migrate_datasets.sql script (post migration scrubbing) + +6. Run file migration scripts: + +Before you can run these scripts, edit the files +files_destination_step1_ and files_source, and modify the following +lines at the top to be able to access your new (4.*) and old (3.*) +databases, respectively: + +my $host = "localhost"; +my $username = "xxxxx"; +my $database = "xxxxx"; +my $password = "xxxxx"; + +a. On the *destination* (4.0) server, step 1 +run the script, and save the output: + +./files_destination_step1_ > migrated_datasets.txt + +The script will also print the following message on +the stderr output (for example): + +last ID in DVOBJECT table: 12345 + +- you will need to use this number as a parameter in the +next step, below. + +b. On the *source* (3.6) server - +run the script on the input produced in a., +save the sql output: + +cat migrated_datasets.txt | ./files_source_ > files_import.sql + +where is the "last ID ..." from step a. + +If you are migrating to 4.6 or above check the comment around line 192 of files_source_ script +This script may produce a lot of stderr output, that you may want to save. +You can do that by running it as + +cat migrated_datasets.txt | ./files_source_ > files_import.sql 2>datafiles.sql.out + +(bash shell assumed) + +The script will also produce the output file packlist.txt, +that you *may* need to use in step d., below. + +c. On the destination server, import the sql produced in b.: + +psql -d -U -f files_import.sql + +d. [OPTIONAL] You can continue using your existing, DVN 3* files +driectory. In this case, this step can be omitted. But if you want to +preserve the DVN 3* directory and copy the files to the new Dataverse +4 server, you'll need to package the files on the source server, using +the files packlist.txt created in the step b.: + +tar cvzf packedfiles.tgz `cat packlist.txt` + +e. [OPTIONAL] If you are moving the files, unpack the files packaged +in the step d. on the destination server: + +cd +tar xvzf packedfiles.tgz + +7. run migrate_permissions.sql script (may need to delete some duplicates) + +8. run migrate_links.sql script + +10. reset sequences: + +sequence_script.sql + +11. Add publication dates to the migrated datafiles: + +datafile_pub_date.sql + +12. (when ready for users to log in) add user passwords + +migrate_passwords.sql + +__________________________________________________ + +Not being migrated (verify?): +-- Study Comments +-- File Access requests +-- Classifications +-- Study locks +-- VDCNetworkStats (generated data) + + +Post-migration tasks. +==================== + +If you have global IDs (handles or DOIs) registered, you may need to +re-register them. (Even if your Dataverse 4.0 is staying on the same +server as your old DVN 3* installation, the URLs of the study pages +have changed: what used to be /dvn/study?globalId=... is now +/dataset.xhtml?persistentId=...; this can be taken care of with URL +rewrite rules, but it may be cleaner to just update the registered +URLs for all your global identifiers). + +To update your registered handles: + +Generate the list of the database IDs of all your *released* datasets +with Handle global ids, and/or *all* your datasets with DOI ids. +(exercise for the reader). + +Use the modifyRegistration API call to update the registration for these datasets. +You can do something like + +cat list_of_db_ids.txt | while read dbid +do + curl 'http://localhost:8080/api/datasets/'$dbid'/modifyRegistration?key=' + echo +done + +TODO: + +script the above; make it less of an exercise for the reader. + +TODO: + +explain how to transfer the Handles configuration from DVN 3 to Dataverse 4. + +TODO: + +check with Steve and Raman if the above is actually going to work for DOIs. +(or if anything special needs to be done first...) + + + diff --git a/dataversedock/testdata/scripts/migration/migration_presteps.txt b/dataversedock/testdata/scripts/migration/migration_presteps.txt new file mode 100644 index 0000000..df2be49 --- /dev/null +++ b/dataversedock/testdata/scripts/migration/migration_presteps.txt @@ -0,0 +1,144 @@ +---------------------------------------------- +-- Preparing the DDIs of production studies: +---------------------------------------------- + +All the existing DVN studies need to be re-exported *using the +specially modified version of the DVN3 DDI Export Servlet*. (This +version of the servlet exports not just the published, but all the +versions of each study). This servlet is NOT available in the DVN +v3.6.2, the last officially-released version. So please download the +specially-patched version of the DVN 3.6 war file: + +http://sourceforge.net/projects/dvn/files/dvn/3.6.2/DVN-web_v3_6_3_MIGRATION.war/download + +and deploy it on your DVN server (instead of the version 3.6.2 you are currently running). + +IMPORTANT: +---------- + +Remote access to the DDI Export servlet is restricted by default. +Access on the localhost interface is open however. So the easiest way +to perform the export is to run the script in the next step on the +same host where the DVN 3.* application is running. (And use +"http://localhost/dvn/ddi" for the export servlet URL parameter +there). + +If you must run the script on a different system, you can grant that +host access to the servlet by setting the following JVM option in +Glassfish 3 where the DVN app is running: + +-Dvdc.dsb.host= + +and restart glassfish. + + + +2. Run the script ./versions_source_. + +It will go through the list of the studies in the prod. db and issue a +call to the export servlet. The resulting DDIs will be saved in the +directory ./ddi. You will need to give the complete path of this +directory to the Dataverse 4 import process. + +Before you run the script, modify the following 3 lines at the top: + +my $host = "xxxxx"; +my $database = "xxxxx"; +my $username = "xxxxx"; +my $password = 'xxxxx'; + +To reflect your DVN 3 database location and credentials. Make sure you +can access the database from the host on which you'll be running this +script. + +Run it as follows: + +./versions_source_ "http:///dvn/ddi" "" + +The 2 arguments the script takes: + + - the URL of the DVN 3 Export Servlet; + - your local name space. + +For example: + +./versions_source_ http://localhost/dvn/ddi 1902.1 + +---------------------------------------------- +-- On 3.6 database, run the following to create copies of needed tables +---------------------------------------------- + +-- users / groups +-- ignore network admin (assumes id of 1) +create table _dvn3_vdcuser as select * from vdcuser where id != 1; +create table _dvn3_usergroup as select * from usergroup; +create table _dvn3_vdcuser_usergroup as select * from vdcuser_usergroup; + +-- dataverse networks / dataverses +-- ignore the root network +create table _dvn3_vdcnetwork as select * from vdcnetwork where id != 0; +create table _dvn3_vdc as select * from vdc; + +-- studies (for reference) +create table _dvn3_study as select * from study +-- where owner_id in (select id from _dvn3_vdc) +; + +create table _dvn3_studyversion as select * from studyversion +-- where study_id in (select id from _dvn3_study) +; + +create table _dvn3_versioncontributor as select * from versioncontributor +-- where studyversion_id in (select id from _dvn3_studyversion) +; + +-- collections (for reference) +create table _dvn3_vdccollection as select * from vdccollection; + +-- permissions +create table _dvn3_vdcrole as select * from vdcrole; +create table _dvn3_vdc_usergroup as select * from vdc_usergroup; + +create table _dvn3_study_vdcuser as select * from study_vdcuser; +create table _dvn3_study_usergroup as select * from study_usergroup; + +create table _dvn3_studyfile_vdcuser as select * from studyfile_vdcuser; +create table _dvn3_studyfile_usergroup as select * from studyfile_usergroup; + +-- links +create table _dvn3_coll_studies as select * from coll_studies; +create table _dvn3_vdc_linked_collections as select * from vdc_linked_collections; + + +---------------------------------------------- +-- run pg_dump to extract temp tables +---------------------------------------------- + +pg_dump -h localhost -U postgres <3.6 database name> -t _dvn3_* -f /tmp/dvn3_data.sql + +---------------------------------------------- +-- import temp tables into 4.0 db +---------------------------------------------- + +psql -h localhost -U postgres <4.0 database name> -f /tmp/dvn3_data.sql + +---------------------------------------------- +-- Run offsets on _dvn3_tables in the 4.0 DB +---------------------------------------------- + +-- offsets +update _dvn3_vdcnetwork set id = id + (select coalesce(max(id), 0) from dvobject); +update _dvn3_vdc set id = id + (select coalesce(max(id), 0) from _dvn3_vdcnetwork); +update _dvn3_vdcrole set vdc_id = vdc_id + (select coalesce(max(id), 0) from _dvn3_vdcnetwork); +update _dvn3_vdc_usergroup set vdcs_id = vdcs_id + (select coalesce(max(id), 0) from _dvn3_vdcnetwork); +update _dvn3_vdc_linked_collections set vdc_id = vdc_id + (select coalesce(max(id), 0) from _dvn3_vdcnetwork); +update _dvn3_study set owner_id = owner_id + (select coalesce(max(id), 0) from _dvn3_vdcnetwork); +update _dvn3_vdccollection set owner_id = owner_id + (select coalesce(max(id), 0) from _dvn3_vdcnetwork); + +-- note: need to determine what offset to use, based on the file scripts +--update _dvn3_studyfile_vdcuser set studyfiles_id = studyfiles_id +100000; +--update _dvn3_studyfile_usergroup set studyfiles_id = studyfiles_id + 100000; + + + + diff --git a/dataversedock/testdata/scripts/migration/scrub_duplicate_emails.sql b/dataversedock/testdata/scripts/migration/scrub_duplicate_emails.sql new file mode 100644 index 0000000..0599c20 --- /dev/null +++ b/dataversedock/testdata/scripts/migration/scrub_duplicate_emails.sql @@ -0,0 +1,497 @@ +-------------------- +--REFERENCE QUERIES +-------------------- +/* +-- Query to list all user acocunts with duplicate e-mails +select id, username, lower(email) from vdcuser +where lower(email) in ( +select lower(email) from vdcuser +group by lower(email) +having count(*) > 1 +) +order by email + +-- Query to list all e-mails that have are duplicated (total = # of actual users, without duplicates) +select lower(email), count(*) from vdcuser +group by lower(email) +having count(*) > 1 +order by count(*) desc + +-- Query to list all e-mails that have are duplicated and reference to original account (account with lowest id) +select u1.id, u1.username, u1.active,u1.email, u2.id, u2.username, u2.active +from vdcuser u1, vdcuser u2 +where 1=1 +and u1.id != u2.id +and lower(u1.email) = lower(u2.email) +and lower(u1.email) in ( +select lower(email) from vdcuser +group by lower(email) +having count(*) > 1 +) +and u2.id in ( +select min(id) from vdcuser +group by lower(email) +having count(*) > 1 +) +order by lower(u1.email) + +-- Delete query, to be run after all the updates +delete from vdcuser where id in ( +select u1.id +from vdcuser u1, vdcuser u2 +where 1=1 +and u1.id != u2.id +and lower(u1.email) = lower(u2.email) +and lower(u1.email) in ( +select lower(email) from vdcuser +group by lower(email) +having count(*) > 1 +) +and u2.id in ( +select min(id) from vdcuser +group by lower(email) +having count(*) > 1 +) +) + +*/ +-------------------- +--UPDATE QUERIES +-------------------- +-- these queries will update the foreign key references in all (relevant) tables to the orignal account +-- +-- Generated by +/* +SELECT tc.table_schema, tc.constraint_name, tc.table_name, kcu.column_name, ccu.table_name AS foreign_table_name,ccu.column_name AS foreign_column_name, +'update ' || tc.table_name || ' ref set ' || kcu.column_name || ' = u2.id +from vdcuser u1, vdcuser u2 +where ref.' || kcu.column_name || ' = u1.id +and u1.id != u2.id +and u1.email = u2.email +and u1.email in ( +select email from vdcuser +group by email +having count(*) > 1 +) +and u2.id in ( +select min(id) from vdcuser +group by email +having count(*) > 1 +);' as query +FROM information_schema.table_constraints tc +JOIN information_schema.key_column_usage kcu ON tc.constraint_name = kcu.constraint_name +JOIN information_schema.constraint_column_usage ccu ON ccu.constraint_name = tc.constraint_name +WHERE constraint_type = 'FOREIGN KEY' AND ccu.table_name='vdcuser'; +*/ +-- +-- +-- if any of the below fail because of duplicate constraints, you will need to first delete the duplicates +-- here is a sample query for deleting the duplicate entries from studyfile_vdcuser (the most likey to fail)) +/* +delete from studyfile_vdcuser +where allowedusers_id || '_' || studyfiles_id in ( +select u1.id || '_' || fu1.studyfiles_id +from vdcuser u1, vdcuser u2, studyfile_vdcuser fu1, studyfile_vdcuser fu2 +where 1=1 +and fu1.studyfiles_id = fu2.studyfiles_id +and fu1.allowedusers_id = u1.id +and fu2.allowedusers_id = u2.id +and u1.id != u2.id +and u1.email = u2.email +and u1.email in ( +select email from vdcuser +group by email +having count(*) > 1 +) +and u2.id in ( +select min(id) from vdcuser -- also may need to run with max(id) or some other combinations! +group by email +having count(*) > 1 +) +) +*/ + + +update flagged_study_comments ref set user_id = u2.id +from vdcuser u1, vdcuser u2 +where ref.user_id = u1.id +and u1.id != u2.id +and u1.email = u2.email +and u1.email in ( +select email from vdcuser +group by email +having count(*) > 1 +) +and u2.id in ( +select min(id) from vdcuser +group by email +having count(*) > 1 +); + +update guestbookresponse ref set vdcuser_id = u2.id +from vdcuser u1, vdcuser u2 +where ref.vdcuser_id = u1.id +and u1.id != u2.id +and u1.email = u2.email +and u1.email in ( +select email from vdcuser +group by email +having count(*) > 1 +) +and u2.id in ( +select min(id) from vdcuser +group by email +having count(*) > 1 +); + +update harvestingdataverse_vdcuser ref set allowedfileusers_id = u2.id +from vdcuser u1, vdcuser u2 +where ref.allowedfileusers_id = u1.id +and u1.id != u2.id +and u1.email = u2.email +and u1.email in ( +select email from vdcuser +group by email +having count(*) > 1 +) +and u2.id in ( +select min(id) from vdcuser +group by email +having count(*) > 1 +); + +update networkrolerequest ref set vdcuser_id = u2.id +from vdcuser u1, vdcuser u2 +where ref.vdcuser_id = u1.id +and u1.id != u2.id +and u1.email = u2.email +and u1.email in ( +select email from vdcuser +group by email +having count(*) > 1 +) +and u2.id in ( +select min(id) from vdcuser +group by email +having count(*) > 1 +); + +update rolerequest ref set vdcuser_id = u2.id +from vdcuser u1, vdcuser u2 +where ref.vdcuser_id = u1.id +and u1.id != u2.id +and u1.email = u2.email +and u1.email in ( +select email from vdcuser +group by email +having count(*) > 1 +) +and u2.id in ( +select min(id) from vdcuser +group by email +having count(*) > 1 +); + +update studyaccessrequest ref set vdcuser_id = u2.id +from vdcuser u1, vdcuser u2 +where ref.vdcuser_id = u1.id +and u1.id != u2.id +and u1.email = u2.email +and u1.email in ( +select email from vdcuser +group by email +having count(*) > 1 +) +and u2.id in ( +select min(id) from vdcuser +group by email +having count(*) > 1 +); + +update studycomment ref set commentcreator_id = u2.id +from vdcuser u1, vdcuser u2 +where ref.commentcreator_id = u1.id +and u1.id != u2.id +and u1.email = u2.email +and u1.email in ( +select email from vdcuser +group by email +having count(*) > 1 +) +and u2.id in ( +select min(id) from vdcuser +group by email +having count(*) > 1 +); + +update study ref set creator_id = u2.id +from vdcuser u1, vdcuser u2 +where ref.creator_id = u1.id +and u1.id != u2.id +and u1.email = u2.email +and u1.email in ( +select email from vdcuser +group by email +having count(*) > 1 +) +and u2.id in ( +select min(id) from vdcuser +group by email +having count(*) > 1 +); + +update studyfile_vdcuser ref set allowedusers_id = u2.id +from vdcuser u1, vdcuser u2 +where ref.allowedusers_id = u1.id +and u1.id != u2.id +and u1.email = u2.email +and u1.email in ( +select email from vdcuser +group by email +having count(*) > 1 +) +and u2.id in ( +select min(id) from vdcuser +group by email +having count(*) > 1 +); + +update study ref set lastupdater_id = u2.id +from vdcuser u1, vdcuser u2 +where ref.lastupdater_id = u1.id +and u1.id != u2.id +and u1.email = u2.email +and u1.email in ( +select email from vdcuser +group by email +having count(*) > 1 +) +and u2.id in ( +select min(id) from vdcuser +group by email +having count(*) > 1 +); + +update studylock ref set user_id = u2.id +from vdcuser u1, vdcuser u2 +where ref.user_id = u1.id +and u1.id != u2.id +and u1.email = u2.email +and u1.email in ( +select email from vdcuser +group by email +having count(*) > 1 +) +and u2.id in ( +select min(id) from vdcuser +group by email +having count(*) > 1 +); + +update studyrequest ref set vdcuser_id = u2.id +from vdcuser u1, vdcuser u2 +where ref.vdcuser_id = u1.id +and u1.id != u2.id +and u1.email = u2.email +and u1.email in ( +select email from vdcuser +group by email +having count(*) > 1 +) +and u2.id in ( +select min(id) from vdcuser +group by email +having count(*) > 1 +); + +update study ref set reviewer_id = u2.id +from vdcuser u1, vdcuser u2 +where ref.reviewer_id = u1.id +and u1.id != u2.id +and u1.email = u2.email +and u1.email in ( +select email from vdcuser +group by email +having count(*) > 1 +) +and u2.id in ( +select min(id) from vdcuser +group by email +having count(*) > 1 +); + +update study_vdcuser ref set allowedusers_id = u2.id +from vdcuser u1, vdcuser u2 +where ref.allowedusers_id = u1.id +and u1.id != u2.id +and u1.email = u2.email +and u1.email in ( +select email from vdcuser +group by email +having count(*) > 1 +) +and u2.id in ( +select min(id) from vdcuser +group by email +having count(*) > 1 +); + +update vdc ref set creator_id = u2.id +from vdcuser u1, vdcuser u2 +where ref.creator_id = u1.id +and u1.id != u2.id +and u1.email = u2.email +and u1.email in ( +select email from vdcuser +group by email +having count(*) > 1 +) +and u2.id in ( +select min(id) from vdcuser +group by email +having count(*) > 1 +); + +update vdc_fileuser ref set allowedfileusers_id = u2.id +from vdcuser u1, vdcuser u2 +where ref.allowedfileusers_id = u1.id +and u1.id != u2.id +and u1.email = u2.email +and u1.email in ( +select email from vdcuser +group by email +having count(*) > 1 +) +and u2.id in ( +select min(id) from vdcuser +group by email +having count(*) > 1 +); + +update vdcnetwork ref set creator_id = u2.id +from vdcuser u1, vdcuser u2 +where ref.creator_id = u1.id +and u1.id != u2.id +and u1.email = u2.email +and u1.email in ( +select email from vdcuser +group by email +having count(*) > 1 +) +and u2.id in ( +select min(id) from vdcuser +group by email +having count(*) > 1 +); + +update vdcnetwork ref set defaultnetworkadmin_id = u2.id +from vdcuser u1, vdcuser u2 +where ref.defaultnetworkadmin_id = u1.id +and u1.id != u2.id +and u1.email = u2.email +and u1.email in ( +select email from vdcuser +group by email +having count(*) > 1 +) +and u2.id in ( +select min(id) from vdcuser +group by email +having count(*) > 1 +); + +update vdc ref set reviewer_id = u2.id +from vdcuser u1, vdcuser u2 +where ref.reviewer_id = u1.id +and u1.id != u2.id +and u1.email = u2.email +and u1.email in ( +select email from vdcuser +group by email +having count(*) > 1 +) +and u2.id in ( +select min(id) from vdcuser +group by email +having count(*) > 1 +); + +update vdcrole ref set vdcuser_id = u2.id +from vdcuser u1, vdcuser u2 +where ref.vdcuser_id = u1.id +and u1.id != u2.id +and u1.email = u2.email +and u1.email in ( +select email from vdcuser +group by email +having count(*) > 1 +) +and u2.id in ( +select min(id) from vdcuser +group by email +having count(*) > 1 +); + +update vdcuser_studycomment ref set vdcuser_id = u2.id +from vdcuser u1, vdcuser u2 +where ref.vdcuser_id = u1.id +and u1.id != u2.id +and u1.email = u2.email +and u1.email in ( +select email from vdcuser +group by email +having count(*) > 1 +) +and u2.id in ( +select min(id) from vdcuser +group by email +having count(*) > 1 +); + +update vdcuser_usergroup ref set users_id = u2.id +from vdcuser u1, vdcuser u2 +where ref.users_id = u1.id +and u1.id != u2.id +and u1.email = u2.email +and u1.email in ( +select email from vdcuser +group by email +having count(*) > 1 +) +and u2.id in ( +select min(id) from vdcuser +group by email +having count(*) > 1 +); + +update vdc_fileuser ref set allowedfileusers_id = u2.id +from vdcuser u1, vdcuser u2 +where ref.allowedfileusers_id = u1.id +and u1.id != u2.id +and u1.email = u2.email +and u1.email in ( +select email from vdcuser +group by email +having count(*) > 1 +) +and u2.id in ( +select min(id) from vdcuser +group by email +having count(*) > 1 +); + +update versioncontributor ref set contributor_id = u2.id +from vdcuser u1, vdcuser u2 +where ref.contributor_id = u1.id +and u1.id != u2.id +and u1.email = u2.email +and u1.email in ( +select email from vdcuser +group by email +having count(*) > 1 +) +and u2.id in ( +select min(id) from vdcuser +group by email +having count(*) > 1 +); diff --git a/dataversedock/testdata/scripts/migration/scrub_email_usernames.sql b/dataversedock/testdata/scripts/migration/scrub_email_usernames.sql new file mode 100644 index 0000000..04ed83d --- /dev/null +++ b/dataversedock/testdata/scripts/migration/scrub_email_usernames.sql @@ -0,0 +1,28 @@ +-- first, find users with e-mails as usernames +select id, username, email from vdcuser where username like '%@%' +--and username != email; + +-- then find which those which would create duplicates after truncating +-- (verify that adding 1 would be OK; if not, you may need to update some individually) +select u1.id, u1.username, u2.id, u2.username from vdcuser u1, vdcuser u2 +where u1.id != u2.id +and u1.username like '%@%' +and split_part (u1.username, '@', 1) = u2.username + +-- for those usernames, truncate and add 1, so no duplicates +update vdcuser set username = split_part (username, '@', 1) ||'1' +where id in ( +select u1.id from vdcuser u1, vdcuser u2 +where u1.id != u2.id +and u1.username like '%@%' +and split_part (u1.username, '@', 1) = u2.username +) + +-- now truncate the rest +update vdcuser set username = split_part (username, '@', 1) where username like '%@%' + +-- confirm no duplicates +select id, username, email from vdcuser where username in ( +select username from vdcuser +group by username having count(*) > 1 +) \ No newline at end of file diff --git a/dataversedock/testdata/scripts/migration/sequence_script.sql b/dataversedock/testdata/scripts/migration/sequence_script.sql new file mode 100644 index 0000000..73f7661 --- /dev/null +++ b/dataversedock/testdata/scripts/migration/sequence_script.sql @@ -0,0 +1,9 @@ +SELECT setval('datafile_id_seq', (SELECT MAX(id) FROM datafile)); +SELECT setval('datafilecategory_id_seq', (SELECT MAX(id) FROM datafilecategory)); +SELECT setval('datatable_id_seq', (SELECT MAX(id) FROM datatable)); +SELECT setval('datavariable_id_seq', (SELECT MAX(id) FROM datavariable)); +SELECT setval('dvobject_id_seq', (SELECT MAX(id) FROM dvobject)); +SELECT setval('filemetadata_id_seq', (SELECT MAX(id) FROM filemetadata)); +SELECT setval('variablecategory_id_seq', (SELECT MAX(id) FROM variablecategory)); +SELECT setval('guestbook_id_seq', (SELECT MAX(id) FROM guestbook)); +SELECT setval('guestbookresponse_id_seq', (SELECT MAX(id) FROM guestbookresponse)); diff --git a/dataversedock/testdata/scripts/migration/versions_source_ b/dataversedock/testdata/scripts/migration/versions_source_ new file mode 100755 index 0000000..872c7e5 --- /dev/null +++ b/dataversedock/testdata/scripts/migration/versions_source_ @@ -0,0 +1,115 @@ +#!/usr/bin/perl + +my $url = shift @ARGV; +my $local_namespace = shift @ARGV; + +unless ( $url =~ /^http:/ && $local_namespace ne "" ) +{ + print STDERR "Usage: ./versions_source_ \n"; + exit 1; +} + +use DBI; + +my $host = "xxxxx"; +my $database = "xxxxx"; +my $username = "xxxxx"; +my $password = 'xxxxx'; + +my $dbh = DBI->connect("DBI:Pg:dbname=$database;host=$host",$username,$password); + +unless ( -d "ddi" ) +{ + mkdir "ddi"; +} + +my $sth; + +$sth = $dbh->prepare(qq {SELECT s.id, s.authority, s.studyid, v.alias FROM study s, vdc v WHERE s.owner_id = v.id}); + +$sth->execute(); + +my $sid; + +$studycounter = 0; +$releasedcounter = 0; +$extraversioncounter = 0; +$versioncounter = 0; + +while ( @foo = $sth->fetchrow() ) +{ + $sid = $foo[0]; + $authority = $foo[1]; + $studyid = $foo[2]; + $dvalias = $foo[3]; + + + unless ($authority eq $local_namespace ) { + next; + } + + unless ( -d "ddi/$dvalias" ) + { + mkdir "ddi/$dvalias"; + } + + + my $sth1; + + $sth1 = $dbh->prepare(qq {SELECT versionnumber,versionstate FROM studyversion v WHERE study_id=$sid}); + $sth1->execute(); + + my $vn; + + $localcounter = 0; + + while ( @bar = $sth1->fetchrow() ) + { + $vn = $bar[0]; + $vstate = $bar[1]; + + if ($vstate eq "RELEASED") + { + print STDERR "executing: wget -O ddi/" . $dvalias . "/" . $sid . ".xml '" . $url . "?studyId=" . $sid . "&versionNumber=" . $vn . "'\n"; + system "wget -O ddi/" . $dvalias . "/" . $sid . ".xml '" . $url . "?studyId=" . $sid . "&versionNumber=" . $vn . "'\n"; + $versioncounter++; + $releasedcounter++; + + } + else + { + print STDERR "executing: wget -O ddi/" . $dvalias . "/" . $sid . "-" . $vn . ".xml '" . $url . "?studyId=" . $sid . "&versionNumber=" . $vn . "'\n"; + system "wget -O ddi/" . $dvalias . "/" . $sid . "-" . $vn . ".xml '" . $url . "?studyId=" . $sid . "&versionNumber=" . $vn . "'\n"; + $versioncounter++; + $localcounter++; + } + } + $sth1->finish; + $studycounter++; + $extraversioncounter++ if $localcounter; + + unless ($studycounter % 1000) + { + print STDERR "\n" . $studycounter . " studies processed.\n"; + print STDERR $releasedcounter . " released versions;\n"; + print STDERR $extraversioncounter . " had versions other than released;\n"; + print STDERR "Total " . $versioncounter . " versions processed.\n"; + } +} + +print STDERR "\n" . $studycounter . " studies processed.\n"; +print STDERR $releasedcounter . " released versions;\n"; +print STDERR $extraversioncounter . " had versions other than released;\n"; +print STDERR "Total " . $versioncounter . " versions processed.\n"; + + +$sth->finish; + +$dbh->disconnect; + +exit 0; + + + + + diff --git a/dataversedock/testdata/scripts/migration/versions_source_step2_ b/dataversedock/testdata/scripts/migration/versions_source_step2_ new file mode 100755 index 0000000..9e15243 --- /dev/null +++ b/dataversedock/testdata/scripts/migration/versions_source_step2_ @@ -0,0 +1,40 @@ +#!/usr/bin/perl + + +unless ( -d "/tmp/ddi" ) +{ + mkdir "/tmp/ddi"; +} + +while (<>) +{ + chop; + @_ = split ("\t"); + $alias = $_[0]; + $studyid = $_[1]; + $ddifile = $_[2]; + + $ddifile = "/nfs/iqss/DVN/data/" . $ddifile; + + if ( -f $ddifile ) + { + $total += (stat($study))[7]; + $tmpdir = "/tmp/ddi/" . $alias; + unless ( -d $tmpdir ) + { + mkdir $tmpdir; + } + + $tmpfile = $tmpdir . "/" . $studyid . "\.xml"; + system "cp $ddifile $tmpfile"; + + } + else + { + print STDERR "warning: missing ddi file! (" . $ddifile . ")\n"; + } +} + +print "Total of " . $total . " bytes copied.\n"; + + diff --git a/dataversedock/testdata/scripts/rapache/build.sh b/dataversedock/testdata/scripts/rapache/build.sh new file mode 100755 index 0000000..fc48237 --- /dev/null +++ b/dataversedock/testdata/scripts/rapache/build.sh @@ -0,0 +1,10 @@ +#!/bin/sh +mkdir -p ~/rpmbuild/SOURCES +mkdir -p ~/rpmbuild/SPECS +wget https://github.com/jeffreyhorner/rapache/archive/v1.2.7.tar.gz -O rapache-1.2.7.tar.gz +tar xzvf rapache-1.2.7.tar.gz rapache-1.2.7/rpm/rapache.spec --strip-components 2 +# Move to build dirs +cp -f rapache-1.2.7.tar.gz ~/rpmbuild/SOURCES/ +cp -f rapache.spec ~/rpmbuild/SPECS/ +cd ~ +rpmbuild -ba ~/rpmbuild/SPECS/rapache.spec diff --git a/dataversedock/testdata/scripts/search/.gitignore b/dataversedock/testdata/scripts/search/.gitignore new file mode 100644 index 0000000..2360214 --- /dev/null +++ b/dataversedock/testdata/scripts/search/.gitignore @@ -0,0 +1,4 @@ +data/in/users +data/in/dv-birds1 +data/in/dv-trees1 +data/in/dv-psi diff --git a/dataversedock/testdata/scripts/search/add b/dataversedock/testdata/scripts/search/add new file mode 100755 index 0000000..ec6355a --- /dev/null +++ b/dataversedock/testdata/scripts/search/add @@ -0,0 +1,23 @@ +#!/bin/sh +# need the following in solr/collection1/conf/schema.xml +# +# +mkdir -p data +#echo "adding to solr..." +curl -s http://localhost:8080/api/dataverses > data/dataverses.json +#curl http://localhost:8983/solr/update/json?commit=true -H 'Content-type:application/json' --data-binary @data/dataverses.json + +curl -s http://localhost:8080/api/datasets > data/datasets.json +#curl http://localhost:8983/solr/update/json?commit=true -H 'Content-type:application/json' --data-binary @data/datasets.json + +echo "adding to elasticsearch..." +#curl -XPOST http://localhost:9200/dataverse/datasets/1 --data-binary @data/datasets/1.dump +for type in dataverses datasets; do + mkdir -p data/$type + for i in `./json2ids data/$type.json`; do + #echo "adding $i from $type..." + curl -s http://localhost:8080/api/$type/$i/dump > data/$type/$i.dump + curl -XPOST "http://localhost:9200/dataverse/$type/$i" --data-binary @data/$type/$i.dump + echo + done; +done diff --git a/dataversedock/testdata/scripts/search/assumptions b/dataversedock/testdata/scripts/search/assumptions new file mode 100755 index 0000000..7ac655c --- /dev/null +++ b/dataversedock/testdata/scripts/search/assumptions @@ -0,0 +1,24 @@ +#!/bin/bash +# `source path/to/this/file` to get the same assumptions :) +export ADMIN_ROLE=1 +export ROOT_DATAVERSE=1 +export DV_CONTRIBUTOR_ROLE=4 +export BIRDS_DATAVERSE=`grep '"alias":"birds"' /tmp/bird-dvs1 | jq .data.id` +export SPRUCE_DATAVERSE=`grep '"alias":"spruce"' /tmp/tree-dvs1 | jq .data.id` +export SPRUCE_USERNAME='@spruce' +export FINCH_USERNAME='@finch' + +export SPRUCE_ADMIN_ON_BIRDS=`curl -s "http://localhost:8080/api/dataverses/$BIRDS_DATAVERSE/assignments?key=$FINCHKEY" | jq ".data[] | select(.assignee==\"$SPRUCE_USERNAME\") | .id"` + +export FINCH_ADMIN_ON_SPRUCE=`curl -s "http://localhost:8080/api/dataverses/$SPRUCE_DATAVERSE/assignments?key=$SPRUCEKEY" | jq .data[1].id` + +export FIRST_SPRUCE_DOI=`curl --insecure -s -u spruce:spruce https://localhost:8181/dvn/api/data-deposit/v1.1/swordv2/collection/dataverse/spruce | xmllint -format - | xmlstarlet sel -t -v '//_:id' 2>/dev/null | cut -d'/' -f11,12,13` + +export FIRST_SPRUCE_DATASET_ID=`curl -s "http://localhost:8080/api/dataverses/spruce/contents?key=$SPRUCEKEY" | jq '.data[0].id'` + +export FIRST_SPRUCE_DOI=doi:10.5072/FK2/`curl -s "http://localhost:8080/api/datasets/$FIRST_SPRUCE_DATASET_ID?key=$SPRUCEKEY" | jq .data.identifier | tr -d \"` + +export FIRST_SPRUCE_FILE=`scripts/api/data-deposit/show-statement $FIRST_SPRUCE_DOI 2>/dev/null | xmlstarlet sel -t -v '//_:feed/_:entry/_:id' 2>/dev/null | cut -d '/' -f11` + +export FIRST_FINCH_DOI=`curl --insecure -s -u finch:finch https://localhost:8181/dvn/api/data-deposit/v1.1/swordv2/collection/dataverse/finches | xmllint -format - | xmlstarlet sel -t -v '//_:id' 2>/dev/null | cut -d'/' -f11,12,13` +export FIRST_FINCH_DATASET_ID=`curl -s "http://localhost:8080/api/dataverses/finches/contents?key=$FINCHKEY" | jq '.data[0].id'` diff --git a/dataversedock/testdata/scripts/search/clear b/dataversedock/testdata/scripts/search/clear new file mode 100755 index 0000000..5908e9f --- /dev/null +++ b/dataversedock/testdata/scripts/search/clear @@ -0,0 +1,5 @@ +#!/bin/sh +echo "deleting all data from Solr" +curl http://localhost:8983/solr/update/json?commit=true -H "Content-type: application/json" -X POST -d "{\"delete\": { \"query\":\"*:*\"}}" +# this was for elasticsearch +#curl -XDELETE http://localhost:9200/dataverse/ diff --git a/dataversedock/testdata/scripts/search/compare b/dataversedock/testdata/scripts/search/compare new file mode 100755 index 0000000..9dc8cc6 --- /dev/null +++ b/dataversedock/testdata/scripts/search/compare @@ -0,0 +1,19 @@ +#!/bin/bash +DIR=/tmp/searchusers +FINCHKEY=`cat $DIR/1 | jq .data.apiToken | tr -d \"` +SPRUCKEY=`cat $DIR/4 | jq .data.apiToken | tr -d \"` +echo "Search API:" +curl -s "http://localhost:8080/api/search?q=*&key=$FINCHKEY" | jq '.data.fq_actual' +echo "Database:" +scripts/search/dbperms $1 | grep '|' +echo "Solr per group docs (old):" +curl -s "http://localhost:8983/solr/collection1/select?rows=100&wt=json&indent=true&q=entityid%3A$1" | jq '.response.docs[] | {id, name_sort, perms_ss}' +echo "Solr permission docs (new):" +curl -s "http://localhost:8983/solr/collection1/select?rows=100&wt=json&indent=true&q=definition_point_dvobject_id_s%3A$1" | jq '.response.docs[] | {definition_point_s,discoverable_by_ss}' +echo "Java:" +curl -s "http://localhost:8080/api/search/perms?q=*&key=$FINCHKEY&id=$1" | jq '.data[]' 2>/dev/null +exit +echo "Search API perms, items (finch):" +curl -s "http://localhost:8080/api/search?q=*&key=$FINCHKEY" | jq '.data | {fq_actual, items}' +echo "Search API perms, items (spruce):" +curl -s "http://localhost:8080/api/search?q=*&key=$SPRUCKEY" | jq '.data | {fq_actual, items}' diff --git a/dataversedock/testdata/scripts/search/create b/dataversedock/testdata/scripts/search/create new file mode 100755 index 0000000..1ee9648 --- /dev/null +++ b/dataversedock/testdata/scripts/search/create @@ -0,0 +1,31 @@ +#!/bin/sh +DVDIR_ROOT='data/in/dataverses.root' +DVDIR_BIRDS='data/in/dataverses.birds' +DVDIR_TREES='data/in/dataverses.trees' +DSDIR='data/in/datasets' +FILESDIR='data/in/files' + +#rm data/in/dataverses/1 +for i in `ls $DVDIR_ROOT`; do + curl -s -H "Content-type:application/json" -X POST -d @$DVDIR_ROOT/$i "http://localhost:8080/api/dataverses/root?key=$PETEKEY" +done + +for i in `ls $DVDIR_BIRDS`; do + curl -s -H "Content-type:application/json" -X POST -d @$DVDIR_BIRDS/$i "http://localhost:8080/api/dataverses/birds?key=$PETEKEY" +done + +for i in `ls $DVDIR_TREES`; do + curl -s -H "Content-type:application/json" -X POST -d @$DVDIR_TREES/$i "http://localhost:8080/api/dataverses/trees?key=$PETEKEY" +done + +# 9 is "sparrows" +#curl -H "Content-type:application/json" -X POST -d @data/in/dataverses.misc/8 "http://localhost:8080/api/dataverses/9?key=pete" +#curl -H "Content-type:application/json" -X POST -d @data/in/dataverses.misc/9 "http://localhost:8080/api/dataverses/trees?key=pete" + +#for i in `ls $DSDIR`; do +# curl http://localhost:8080/api/datasets -H 'Content-type:application/json' --data-binary @$DSDIR/$i +#done + +#for i in `ls $FILESDIR`; do +# curl http://localhost:8080/api/files -H 'Content-type:application/json' --data-binary @$FILESDIR/$i +#done diff --git a/dataversedock/testdata/scripts/search/create-bird-dvs1 b/dataversedock/testdata/scripts/search/create-bird-dvs1 new file mode 100755 index 0000000..ba71e01 --- /dev/null +++ b/dataversedock/testdata/scripts/search/create-bird-dvs1 @@ -0,0 +1,21 @@ +#!/bin/sh +DIR='scripts/search/data/in/dv-birds1' +USERDIR=/tmp/searchusers +ROOT_DV=root +FINCHKEY=`cat $USERDIR/1 | jq .data.apiToken | tr -d \"` +curl -s -H "Content-type:application/json" -X POST -d @$DIR/1 "http://localhost:8080/api/dataverses/$ROOT_DV?key=$FINCHKEY" +echo + +#PARENT=`xsltproc scripts/search/data/mkpaths.xsl scripts/search/data/nodes.xml | grep '/sparrows$' | tr / " " | awk '{print $(NF-1)}'` +PARENT=birds +curl -s -H "Content-type:application/json" -X POST -d @$DIR/2 "http://localhost:8080/api/dataverses/$PARENT?key=$FINCHKEY" +echo + +curl -s -H "Content-type:application/json" -X POST -d @$DIR/3 "http://localhost:8080/api/dataverses/birds?key=$FINCHKEY" +echo + +curl -s -H "Content-type:application/json" -X POST -d @$DIR/4 "http://localhost:8080/api/dataverses/birds?key=$FINCHKEY" +echo + +curl -s -H "Content-type:application/json" -X POST -d @$DIR/5 "http://localhost:8080/api/dataverses/sparrows?key=$FINCHKEY" +echo diff --git a/dataversedock/testdata/scripts/search/create-psi-dvs b/dataversedock/testdata/scripts/search/create-psi-dvs new file mode 100755 index 0000000..150f51c --- /dev/null +++ b/dataversedock/testdata/scripts/search/create-psi-dvs @@ -0,0 +1,24 @@ +#!/bin/sh +. scripts/search/export-keys +DIR='scripts/search/data/in/dv-psi' +USERDIR=/tmp/searchusers +curl -s -H "Content-type:application/json" -X POST -d @$DIR/1 "http://localhost:8080/api/dataverses/root?key=$PSIADMINKEY" +echo + +PARENT=psi +for i in {2..9}; do + curl -s -H "Content-type:application/json" -X POST -d @$DIR/$i "http://localhost:8080/api/dataverses/$PARENT?key=$PSIADMINKEY" + echo +done + +curl -s -H "Content-type:application/json" -X POST -d @$DIR/10 "http://localhost:8080/api/dataverses/psimali?key=$PSIADMINKEY" +echo + +curl -s -H "Content-type:application/json" -X POST -d @$DIR/11 "http://localhost:8080/api/dataverses/psimali?key=$PSIADMINKEY" +echo + +curl -s -H "Content-type:application/json" -X POST -d @$DIR/12 "http://localhost:8080/api/dataverses/psimalihealth?key=$PSIADMINKEY" +echo + +curl -s -H "Content-type:application/json" -X POST -d @$DIR/13 "http://localhost:8080/api/dataverses/psimalihealthchild?key=$PSIADMINKEY" +echo diff --git a/dataversedock/testdata/scripts/search/create-tree-dvs1 b/dataversedock/testdata/scripts/search/create-tree-dvs1 new file mode 100755 index 0000000..b5ba864 --- /dev/null +++ b/dataversedock/testdata/scripts/search/create-tree-dvs1 @@ -0,0 +1,13 @@ +#!/bin/sh +DIR='scripts/search/data/in/dv-trees1' +USERDIR=/tmp/searchusers +ROOT_DV=root +SPRUCEKEY=`cat $USERDIR/4 | jq .data.apiToken | tr -d \"` +curl -s -H "Content-type:application/json" -X POST -d @$DIR/1 "http://localhost:8080/api/dataverses/$ROOT_DV?key=$SPRUCEKEY" +echo + +curl -s -H "Content-type:application/json" -X POST -d @$DIR/2 "http://localhost:8080/api/dataverses/trees?key=$SPRUCEKEY" +echo + +curl -s -H "Content-type:application/json" -X POST -d @$DIR/3 "http://localhost:8080/api/dataverses/trees?key=$SPRUCEKEY" +echo diff --git a/dataversedock/testdata/scripts/search/create-users b/dataversedock/testdata/scripts/search/create-users new file mode 100755 index 0000000..bdee0c6 --- /dev/null +++ b/dataversedock/testdata/scripts/search/create-users @@ -0,0 +1,19 @@ +#!/bin/bash +SERVER='http://localhost:8080/api' +BURRITO='burrito' +USERDIR='scripts/search/data/in/users' +OUTDIR='/tmp/searchusers' +rm -rf $OUTDIR +mkdir -p $OUTDIR + +create () { + pass=`cat $1 | jq .userName | tr -d \"` + echo $pass + resp=$(curl -s -H "Content-type:application/json" -X POST -d @$1 "$SERVER/builtin-users?password=$pass&key=$BURRITO") + echo $resp | jq . > $OUTDIR/$1 +} + +cd $USERDIR +for i in `ls`; do + create $i +done diff --git a/dataversedock/testdata/scripts/search/data/binary/1000files.zip b/dataversedock/testdata/scripts/search/data/binary/1000files.zip new file mode 100644 index 0000000..64d4cdd Binary files /dev/null and b/dataversedock/testdata/scripts/search/data/binary/1000files.zip differ diff --git a/dataversedock/testdata/scripts/search/data/binary/100files.zip b/dataversedock/testdata/scripts/search/data/binary/100files.zip new file mode 100644 index 0000000..11dd6a1 Binary files /dev/null and b/dataversedock/testdata/scripts/search/data/binary/100files.zip differ diff --git a/dataversedock/testdata/scripts/search/data/binary/3files.zip b/dataversedock/testdata/scripts/search/data/binary/3files.zip new file mode 100644 index 0000000..a4cd394 Binary files /dev/null and b/dataversedock/testdata/scripts/search/data/binary/3files.zip differ diff --git a/dataversedock/testdata/scripts/search/data/binary/health.zip b/dataversedock/testdata/scripts/search/data/binary/health.zip new file mode 100644 index 0000000..2a81248 Binary files /dev/null and b/dataversedock/testdata/scripts/search/data/binary/health.zip differ diff --git a/dataversedock/testdata/scripts/search/data/binary/trees.png b/dataversedock/testdata/scripts/search/data/binary/trees.png new file mode 100644 index 0000000..7aa5223 Binary files /dev/null and b/dataversedock/testdata/scripts/search/data/binary/trees.png differ diff --git a/dataversedock/testdata/scripts/search/data/binary/trees.zip b/dataversedock/testdata/scripts/search/data/binary/trees.zip new file mode 100644 index 0000000..170c2d3 Binary files /dev/null and b/dataversedock/testdata/scripts/search/data/binary/trees.zip differ diff --git a/dataversedock/testdata/scripts/search/data/dv-birds1.tsv b/dataversedock/testdata/scripts/search/data/dv-birds1.tsv new file mode 100755 index 0000000..e9af3e5 --- /dev/null +++ b/dataversedock/testdata/scripts/search/data/dv-birds1.tsv @@ -0,0 +1,6 @@ +name alias permissionRoot subject contactEmail description affiliation +Birds birds true Arts and Humanities birds@birds.com A bird dataverse with some trees Birds Inc. +Finches finches false Chemistry finches@birds.com A dataverse with finches Birds Inc. +Sparrows sparrows false Law sparrows@birds.com A dataverse featuring sparrows Birds Inc. +Wrens wrens false Medicine, Health and Life Sciences wrens@birds.com A dataverse full of wrens Birds Inc. +Chestnut Sparrows chestnutsparrows false Other chestnutsparrows@birds.com A dataverse with chestnut sparrows Birds Inc. diff --git a/dataversedock/testdata/scripts/search/data/dv-psi.tsv b/dataversedock/testdata/scripts/search/data/dv-psi.tsv new file mode 100755 index 0000000..b3d39c0 --- /dev/null +++ b/dataversedock/testdata/scripts/search/data/dv-psi.tsv @@ -0,0 +1,14 @@ +name alias permissionRoot subject contactEmail description affiliation +PSI psi true Social Science psi@mailinator.com PSI PSI +China psichina true Social Science psi@mailinator.com PSI PSI +Russia psirussia true Social Science psi@mailinator.com PSI PSI +India psiindia true Social Science psi@mailinator.com PSI PSI +Haiti psihaiti true Social Science psi@mailinator.com PSI PSI +Laos psilaos true Social Science psi@mailinator.com PSI PSI +Nepal psinepal true Social Science psi@mailinator.com PSI PSI +Togo psitogo true Social Science psi@mailinator.com PSI PSI +Mali psimali true Social Science psi@mailinator.com PSI PSI +Mali Health psimalihealth true Social Science psi@mailinator.com PSI PSI +Women in Mali psimaliwomen true Social Science psi@mailinator.com PSI PSI +Child of Mali Health psimalihealthchild true Social Science psi@mailinator.com PSI PSI +Grandchild of Mali Health psimalihealthgrandchild true Social Science psi@mailinator.com PSI PSI diff --git a/dataversedock/testdata/scripts/search/data/dv-trees1.tsv b/dataversedock/testdata/scripts/search/data/dv-trees1.tsv new file mode 100755 index 0000000..b0ac7e2 --- /dev/null +++ b/dataversedock/testdata/scripts/search/data/dv-trees1.tsv @@ -0,0 +1,4 @@ +name alias permissionRoot subject contactEmail description affiliation +Trees trees true Other trees@trees.com A tree dataverse with some birds Trees Inc. +Spruce spruce false Other spruce@trees.com A spruce with some birds Trees Inc. +Chestnut Trees chestnuttrees false Other chestnuttrees@trees.com A dataverse with chestnut trees and an oriole Trees Inc. diff --git a/dataversedock/testdata/scripts/search/data/group-explicit-trees.json b/dataversedock/testdata/scripts/search/data/group-explicit-trees.json new file mode 100644 index 0000000..b518edc --- /dev/null +++ b/dataversedock/testdata/scripts/search/data/group-explicit-trees.json @@ -0,0 +1,5 @@ +{ + "aliasInOwner": "trees", + "displayName": "Trees Dataverse Contributors", + "description": "Contributors to the Trees Dataverse." +} diff --git a/dataversedock/testdata/scripts/search/data/in/dataverses.birds/4 b/dataversedock/testdata/scripts/search/data/in/dataverses.birds/4 new file mode 100644 index 0000000..a54ea08 --- /dev/null +++ b/dataversedock/testdata/scripts/search/data/in/dataverses.birds/4 @@ -0,0 +1,8 @@ +{ + "affiliation": "Birds Inc.", + "alias": "finches", + "contactEmail": "finches@birds.com", + "description": "A dataverse with finches", + "name": "Finches", + "permissionRoot": "false" +} diff --git a/dataversedock/testdata/scripts/search/data/in/dataverses.birds/5 b/dataversedock/testdata/scripts/search/data/in/dataverses.birds/5 new file mode 100644 index 0000000..2207109 --- /dev/null +++ b/dataversedock/testdata/scripts/search/data/in/dataverses.birds/5 @@ -0,0 +1,8 @@ +{ + "affiliation": "Birds Inc.", + "alias": "sparrows", + "contactEmail": "sparrows@birds.com", + "description": "A dataverse featuring sparrows", + "name": "Sparrows", + "permissionRoot": "false" +} diff --git a/dataversedock/testdata/scripts/search/data/in/dataverses.birds/6 b/dataversedock/testdata/scripts/search/data/in/dataverses.birds/6 new file mode 100644 index 0000000..37a8627 --- /dev/null +++ b/dataversedock/testdata/scripts/search/data/in/dataverses.birds/6 @@ -0,0 +1,8 @@ +{ + "affiliation": "Birds Inc.", + "alias": "wrens", + "contactEmail": "wrens@birds.com", + "description": "A dataverse full of wrens", + "name": "Wrens", + "permissionRoot": "false" +} diff --git a/dataversedock/testdata/scripts/search/data/in/dataverses.root/2 b/dataversedock/testdata/scripts/search/data/in/dataverses.root/2 new file mode 100644 index 0000000..c2b1ac0 --- /dev/null +++ b/dataversedock/testdata/scripts/search/data/in/dataverses.root/2 @@ -0,0 +1,8 @@ +{ + "affiliation": "Birds Inc.", + "alias": "birds", + "contactEmail": "birds@birds.com", + "description": "A bird dataverse with some trees", + "name": "Birds", + "permissionRoot": "false" +} diff --git a/dataversedock/testdata/scripts/search/data/in/dataverses.root/3 b/dataversedock/testdata/scripts/search/data/in/dataverses.root/3 new file mode 100644 index 0000000..eef8f99 --- /dev/null +++ b/dataversedock/testdata/scripts/search/data/in/dataverses.root/3 @@ -0,0 +1,8 @@ +{ + "affiliation": "Trees Inc.", + "alias": "trees", + "contactEmail": "trees@trees.com", + "description": "A tree dataverse with some birds", + "name": "Trees", + "permissionRoot": "false" +} diff --git a/dataversedock/testdata/scripts/search/data/in/dataverses.trees/7 b/dataversedock/testdata/scripts/search/data/in/dataverses.trees/7 new file mode 100644 index 0000000..7e8026f --- /dev/null +++ b/dataversedock/testdata/scripts/search/data/in/dataverses.trees/7 @@ -0,0 +1,8 @@ +{ + "affiliation": "Trees Inc.", + "alias": "spruce", + "contactEmail": "spruce@trees.com", + "description": "A spruce with some birds", + "name": "Spruce", + "permissionRoot": "false" +} diff --git a/dataversedock/testdata/scripts/search/data/in/dataverses.trees/9 b/dataversedock/testdata/scripts/search/data/in/dataverses.trees/9 new file mode 100644 index 0000000..2410260 --- /dev/null +++ b/dataversedock/testdata/scripts/search/data/in/dataverses.trees/9 @@ -0,0 +1,8 @@ +{ + "affiliation": "Trees Inc.", + "alias": "chestnuttrees", + "contactEmail": "chestnuttrees@trees.com", + "description": "A dataverse with chestnut trees and an oriole", + "name": "Chestnut Trees", + "permissionRoot": "false" +} diff --git a/dataversedock/testdata/scripts/search/data/mkpaths.xsl b/dataversedock/testdata/scripts/search/data/mkpaths.xsl new file mode 100644 index 0000000..c14d9f7 --- /dev/null +++ b/dataversedock/testdata/scripts/search/data/mkpaths.xsl @@ -0,0 +1,14 @@ + + + + + + + + + + + + + + diff --git a/dataversedock/testdata/scripts/search/data/nodes.xml b/dataversedock/testdata/scripts/search/data/nodes.xml new file mode 100644 index 0000000..a635b2b --- /dev/null +++ b/dataversedock/testdata/scripts/search/data/nodes.xml @@ -0,0 +1,12 @@ + + + + + + + + + + + + diff --git a/dataversedock/testdata/scripts/search/data/replace_test/003.txt b/dataversedock/testdata/scripts/search/data/replace_test/003.txt new file mode 100644 index 0000000..e440e5c --- /dev/null +++ b/dataversedock/testdata/scripts/search/data/replace_test/003.txt @@ -0,0 +1 @@ +3 \ No newline at end of file diff --git a/dataversedock/testdata/scripts/search/data/replace_test/004.txt b/dataversedock/testdata/scripts/search/data/replace_test/004.txt new file mode 100644 index 0000000..bf0d87a --- /dev/null +++ b/dataversedock/testdata/scripts/search/data/replace_test/004.txt @@ -0,0 +1 @@ +4 \ No newline at end of file diff --git a/dataversedock/testdata/scripts/search/data/replace_test/005.txt b/dataversedock/testdata/scripts/search/data/replace_test/005.txt new file mode 100644 index 0000000..7813681 --- /dev/null +++ b/dataversedock/testdata/scripts/search/data/replace_test/005.txt @@ -0,0 +1 @@ +5 \ No newline at end of file diff --git a/dataversedock/testdata/scripts/search/data/replace_test/growing_file/2016-01/data.tsv b/dataversedock/testdata/scripts/search/data/replace_test/growing_file/2016-01/data.tsv new file mode 100644 index 0000000..4d75a0a --- /dev/null +++ b/dataversedock/testdata/scripts/search/data/replace_test/growing_file/2016-01/data.tsv @@ -0,0 +1 @@ +2016-01 7 diff --git a/dataversedock/testdata/scripts/search/data/replace_test/growing_file/2016-02/data.tsv b/dataversedock/testdata/scripts/search/data/replace_test/growing_file/2016-02/data.tsv new file mode 100644 index 0000000..7a1f0a8 --- /dev/null +++ b/dataversedock/testdata/scripts/search/data/replace_test/growing_file/2016-02/data.tsv @@ -0,0 +1,2 @@ +2016-01 7 +2016-02 9 diff --git a/dataversedock/testdata/scripts/search/data/replace_test/growing_file/2016-03/data.tsv b/dataversedock/testdata/scripts/search/data/replace_test/growing_file/2016-03/data.tsv new file mode 100644 index 0000000..7d7619a --- /dev/null +++ b/dataversedock/testdata/scripts/search/data/replace_test/growing_file/2016-03/data.tsv @@ -0,0 +1,3 @@ +2016-01 7 +2016-02 9 +2016-03 8 diff --git a/dataversedock/testdata/scripts/search/data/savedSearchAdvanced.json b/dataversedock/testdata/scripts/search/data/savedSearchAdvanced.json new file mode 100644 index 0000000..00b8244 --- /dev/null +++ b/dataversedock/testdata/scripts/search/data/savedSearchAdvanced.json @@ -0,0 +1,7 @@ +{ + "query": "*", + "definitionPointId": 2, + "filterQueries": [ + "date:2015" + ] +} diff --git a/dataversedock/testdata/scripts/search/data/savedSearchBasic.json b/dataversedock/testdata/scripts/search/data/savedSearchBasic.json new file mode 100644 index 0000000..26d04d8 --- /dev/null +++ b/dataversedock/testdata/scripts/search/data/savedSearchBasic.json @@ -0,0 +1,4 @@ +{ + "query": "png", + "definitionPointId": 2 +} diff --git a/dataversedock/testdata/scripts/search/data/savedSearchInvalidJson.json b/dataversedock/testdata/scripts/search/data/savedSearchInvalidJson.json new file mode 100644 index 0000000..fe51488 --- /dev/null +++ b/dataversedock/testdata/scripts/search/data/savedSearchInvalidJson.json @@ -0,0 +1 @@ +[] diff --git a/dataversedock/testdata/scripts/search/data/savedSearchInvalidJsonNoQuery.json b/dataversedock/testdata/scripts/search/data/savedSearchInvalidJsonNoQuery.json new file mode 100644 index 0000000..7d8433a --- /dev/null +++ b/dataversedock/testdata/scripts/search/data/savedSearchInvalidJsonNoQuery.json @@ -0,0 +1,3 @@ +{ + "quarry": "can't spell" +} diff --git a/dataversedock/testdata/scripts/search/data/savedSearchMaliBasicHealth.json b/dataversedock/testdata/scripts/search/data/savedSearchMaliBasicHealth.json new file mode 100644 index 0000000..ca217d3 --- /dev/null +++ b/dataversedock/testdata/scripts/search/data/savedSearchMaliBasicHealth.json @@ -0,0 +1,9 @@ +{ + "definitionPointId": 22, + "query": "health", + "filterQueries": [ + "dvObjectType:(dataverses OR datasets OR files)", + "subtreePaths:\"/13/21\"" + ], + "creatorId": 1 +} diff --git a/dataversedock/testdata/scripts/search/data/tabular/120745.dta b/dataversedock/testdata/scripts/search/data/tabular/120745.dta new file mode 100644 index 0000000..279cbfa Binary files /dev/null and b/dataversedock/testdata/scripts/search/data/tabular/120745.dta differ diff --git a/dataversedock/testdata/scripts/search/data/tabular/1char b/dataversedock/testdata/scripts/search/data/tabular/1char new file mode 100644 index 0000000..7898192 --- /dev/null +++ b/dataversedock/testdata/scripts/search/data/tabular/1char @@ -0,0 +1 @@ +a diff --git a/dataversedock/testdata/scripts/search/data/tabular/50by1000.dta b/dataversedock/testdata/scripts/search/data/tabular/50by1000.dta new file mode 100644 index 0000000..2cbadda Binary files /dev/null and b/dataversedock/testdata/scripts/search/data/tabular/50by1000.dta differ diff --git a/dataversedock/testdata/scripts/search/data/tabular/50by1000.dta.zip b/dataversedock/testdata/scripts/search/data/tabular/50by1000.dta.zip new file mode 100644 index 0000000..4280a06 Binary files /dev/null and b/dataversedock/testdata/scripts/search/data/tabular/50by1000.dta.zip differ diff --git a/dataversedock/testdata/scripts/search/dataset-add b/dataversedock/testdata/scripts/search/dataset-add new file mode 100755 index 0000000..2b222dc --- /dev/null +++ b/dataversedock/testdata/scripts/search/dataset-add @@ -0,0 +1,2 @@ +#!/bin/sh +curl http://localhost:8080/api/datasets?owner=birds -H 'Content-type:application/json' --data-binary @data/in/datasets/1 diff --git a/dataversedock/testdata/scripts/search/dbbuiltin2shib b/dataversedock/testdata/scripts/search/dbbuiltin2shib new file mode 100755 index 0000000..1b548f4 --- /dev/null +++ b/dataversedock/testdata/scripts/search/dbbuiltin2shib @@ -0,0 +1,11 @@ +#!/bin/bash -x +#psql -c "select id,name,useridentifier from authenticateduser order by id;" dataverse_db +psql -c "select * from authenticateduser order by id;" dataverse_db +psql -c "select * from authenticateduserlookup order by id;" dataverse_db +psql -c "select * from builtinuser order by id;" dataverse_db +#psql -c "select id,encryptedpassword,firstname,lastname,username from builtinuser order by id;" dataverse_db +exit +psql -c "select * from roleassignment;" dataverse_db +psql -c "select datasetversionid,useridentifier from datasetversion_dataverseuser;" dataverse_db +exit +psql -c "select * from explicitgroup;" dataverse_db diff --git a/dataversedock/testdata/scripts/search/dbdatasetversion b/dataversedock/testdata/scripts/search/dbdatasetversion new file mode 100755 index 0000000..eb0f04a --- /dev/null +++ b/dataversedock/testdata/scripts/search/dbdatasetversion @@ -0,0 +1,5 @@ +#!/bin/sh +~/.homebrew/bin/psql -c " +select id,dataset_id,versionstate,license,termsofuse from datasetversion; +---select * from datasetversion; +" dataverse_db diff --git a/dataversedock/testdata/scripts/search/dbdbobject b/dataversedock/testdata/scripts/search/dbdbobject new file mode 100755 index 0000000..d69604c --- /dev/null +++ b/dataversedock/testdata/scripts/search/dbdbobject @@ -0,0 +1,2 @@ +#!/bin/bash -x +~/.homebrew/bin/psql -c "select id, dtype, modificationtime, indextime, permissionmodificationtime, permissionindextime from dvobject order by id;" dataverse_db diff --git a/dataversedock/testdata/scripts/search/dblinks b/dataversedock/testdata/scripts/search/dblinks new file mode 100755 index 0000000..7d7c089 --- /dev/null +++ b/dataversedock/testdata/scripts/search/dblinks @@ -0,0 +1,5 @@ +#!/bin/bash -x +~/.homebrew/bin/psql -c "select * from dataverselinkingdataverse order by id;" dataverse_db +~/.homebrew/bin/psql -c "select * from datasetlinkingdataverse order by id;" dataverse_db +exit +~/.homebrew/bin/psql -c "select id, alias from dataverse order by id;" dataverse_db diff --git a/dataversedock/testdata/scripts/search/dblinks-delete b/dataversedock/testdata/scripts/search/dblinks-delete new file mode 100755 index 0000000..1d9cd3e --- /dev/null +++ b/dataversedock/testdata/scripts/search/dblinks-delete @@ -0,0 +1,3 @@ +#!/bin/bash -x +~/.homebrew/bin/psql -c "delete from dataverselinkingdataverse;" dataverse_db +~/.homebrew/bin/psql -c "delete from datasetlinkingdataverse;" dataverse_db diff --git a/dataversedock/testdata/scripts/search/dbperms b/dataversedock/testdata/scripts/search/dbperms new file mode 100755 index 0000000..c54a133 --- /dev/null +++ b/dataversedock/testdata/scripts/search/dbperms @@ -0,0 +1,9 @@ +#!/bin/sh +~/.homebrew/bin/psql -c " +select dv.id as dvObject, au.id as user +from dvobject dv, roleassignment ra, authenticateduser au +where 1=1 +and dv.id = $1 +and dv.id = ra.definitionpoint_id +and '@'|| au.useridentifier = ra.assigneeidentifier; +" dataverse_db diff --git a/dataversedock/testdata/scripts/search/dbsavedsearch b/dataversedock/testdata/scripts/search/dbsavedsearch new file mode 100755 index 0000000..2ffb977 --- /dev/null +++ b/dataversedock/testdata/scripts/search/dbsavedsearch @@ -0,0 +1,6 @@ +#!/bin/bash -x +~/.homebrew/bin/psql -c "select * from savedsearch order by id;" dataverse_db +~/.homebrew/bin/psql -c "select * from savedsearchfilterquery order by id;" dataverse_db +exit +~/.homebrew/bin/psql -c "drop table savedsearch cascade;" dataverse_db +~/.homebrew/bin/psql -c "drop table savedsearchfilterquery cascade;" dataverse_db diff --git a/dataversedock/testdata/scripts/search/dbsavedsearch-delete b/dataversedock/testdata/scripts/search/dbsavedsearch-delete new file mode 100755 index 0000000..6d0642f --- /dev/null +++ b/dataversedock/testdata/scripts/search/dbsavedsearch-delete @@ -0,0 +1,6 @@ +#!/bin/bash -x +~/.homebrew/bin/psql -c "delete from savedsearchfilterquery;" dataverse_db +~/.homebrew/bin/psql -c "delete from savedsearch cascade;" dataverse_db +exit +~/.homebrew/bin/psql -c "drop table savedsearch cascade;" dataverse_db +~/.homebrew/bin/psql -c "drop table savedsearchfilterquery cascade;" dataverse_db diff --git a/dataversedock/testdata/scripts/search/dbshibgroups b/dataversedock/testdata/scripts/search/dbshibgroups new file mode 100755 index 0000000..93c93cc --- /dev/null +++ b/dataversedock/testdata/scripts/search/dbshibgroups @@ -0,0 +1,5 @@ +#!/bin/bash -x +psql -c "select * from shibgroup;" dataverse_db +psql -c "select * from authenticateduser;" dataverse_db +psql -c "select * from persistedglobalgroup;" dataverse_db +psql -c "select * from roleassignment;" dataverse_db diff --git a/dataversedock/testdata/scripts/search/dbusers b/dataversedock/testdata/scripts/search/dbusers new file mode 100755 index 0000000..283aa99 --- /dev/null +++ b/dataversedock/testdata/scripts/search/dbusers @@ -0,0 +1,10 @@ +#!/bin/sh +~/.homebrew/bin/psql -c " +select * from builtinuser; +" dataverse_db +~/.homebrew/bin/psql -c " +select * from authenticateduser; +" dataverse_db +~/.homebrew/bin/psql -c " +select * from authenticateduserlookup; +" dataverse_db diff --git a/dataversedock/testdata/scripts/search/ds.tsv b/dataversedock/testdata/scripts/search/ds.tsv new file mode 100644 index 0000000..de48427 --- /dev/null +++ b/dataversedock/testdata/scripts/search/ds.tsv @@ -0,0 +1,8 @@ +id title author owner description citationDate distributor +1 general dataset Dr. Doctor 1 About birds 2013-12-11 For All +2 bird dataset Dr. Bird 2 bird study 1 2003-12-11 For the Birds +3 bird dataset Dr. Bird 2 bird study 2 2003-12-11 For the Birds +4 finch dataset Dr. Bird 3 bird study 2 2003-12-11 For the Birds +5 goldfinch dataset Dr. Bird 5 bird study 2 2003-12-11 For the Birds +6 tree dataset Dr. Tree 4 tree study 2 2003-12-11 For the Trees +7 chestnut dataset Dr. Tree 6 tree study 2003-12-11 For the Trees diff --git a/dataversedock/testdata/scripts/search/dv.tsv b/dataversedock/testdata/scripts/search/dv.tsv new file mode 100755 index 0000000..3480924 --- /dev/null +++ b/dataversedock/testdata/scripts/search/dv.tsv @@ -0,0 +1,10 @@ +id name alias owner contactEmail description affiliation +1 Nature nature root@nature.com (not used) Earth Inc. +2 Birds birds 1 birds@birds.com A bird dataverse with some trees Birds Inc. +3 Trees trees 1 trees@trees.com A tree dataverse with some birds Trees Inc. +4 Finches finches 2 finches@birds.com A dataverse with finches Birds Inc. +5 Sparrows sparrows 2 sparrows@birds.com A dataverse featuring sparrows Birds Inc. +6 Wrens wrens 2 wrens@birds.com A dataverse full of wrens Birds Inc. +7 Spruce spruce 3 spruce@trees.com A spruce with some birds Trees Inc. +8 Chestnut Sparrows chestnutsparrows 5 chestnutsparrows@birds.com A dataverse with chestnut sparrows Birds Inc. +9 Chestnut Trees chestnuttrees 3 chestnuttrees@trees.com A dataverse with chestnut trees and an oriole Trees Inc. diff --git a/dataversedock/testdata/scripts/search/empty-entityid-check b/dataversedock/testdata/scripts/search/empty-entityid-check new file mode 100755 index 0000000..e9ea02b --- /dev/null +++ b/dataversedock/testdata/scripts/search/empty-entityid-check @@ -0,0 +1,3 @@ +#!/bin/sh +# see also https://redmine.hmdc.harvard.edu/issues/3809 +curl 'http://localhost:8983/solr/collection1/select?rows=100&wt=json&indent=true&q=-entityid:*' diff --git a/dataversedock/testdata/scripts/search/export-keys b/dataversedock/testdata/scripts/search/export-keys new file mode 100755 index 0000000..b568879 --- /dev/null +++ b/dataversedock/testdata/scripts/search/export-keys @@ -0,0 +1,10 @@ +#!/bin/bash +# `source path/to/this/file` so you can use these keys elsewhere +export ADMINKEY=`cat /tmp/setup-all.sh.out | grep apiToken| jq .data.apiToken | tr -d \"` +export SEARCH_USER_DIR=/tmp/searchusers +export FINCHKEY=`cat $SEARCH_USER_DIR/1 | jq .data.apiToken | tr -d \"` +export SPARROWKEY=`cat $SEARCH_USER_DIR/2 | jq .data.apiToken | tr -d \"` +export WRENKEY=`cat $SEARCH_USER_DIR/3 | jq .data.apiToken | tr -d \"` +export SPRUCEKEY=`cat $SEARCH_USER_DIR/4 | jq .data.apiToken | tr -d \"` +export CHESTNUTKEY=`cat $SEARCH_USER_DIR/5 | jq .data.apiToken | tr -d \"` +export PSIADMINKEY=`cat $SEARCH_USER_DIR/6 | jq .data.apiToken | tr -d \"` diff --git a/dataversedock/testdata/scripts/search/files b/dataversedock/testdata/scripts/search/files new file mode 100755 index 0000000..361c984 --- /dev/null +++ b/dataversedock/testdata/scripts/search/files @@ -0,0 +1,3 @@ +#!/bin/sh +curl http://localhost:8080/api/index +curl -s 'http://localhost:8983/solr/collection1/select?rows=100&wt=json&indent=true&q=*&fq=dvtype:files' | jq '.response.docs[] | {name_sort, id, parentid}' diff --git a/dataversedock/testdata/scripts/search/go b/dataversedock/testdata/scripts/search/go new file mode 100755 index 0000000..e13a5cf --- /dev/null +++ b/dataversedock/testdata/scripts/search/go @@ -0,0 +1,10 @@ +#!/bin/bash -x +./clear +sleep .5 +#./populate +#./create +./add +# elasticsearch might need more time before query +sleep 1 +./query +./search diff --git a/dataversedock/testdata/scripts/search/index b/dataversedock/testdata/scripts/search/index new file mode 100755 index 0000000..448c7a3 --- /dev/null +++ b/dataversedock/testdata/scripts/search/index @@ -0,0 +1,6 @@ +#!/bin/bash +# curl -s "http://localhost:8080/api/admin/index?numPartitions=$1&partitionIdToProcess=$2&previewOnly=$3" +scripts/search/clear +curl -s -X DELETE http://localhost:8080/api/admin/index/timestamps +curl -s "http://localhost:8080/api/admin/index/continue?numPartitions=1&partitionIdToProcess=0&previewOnly=true" | jq .data.previewOfPartitionWorkload.dvContainerIds.dataverses[] | while read j; do curl http://localhost:8080/api/admin/index/dataverses/$j; done +curl -s "http://localhost:8080/api/admin/index/continue?numPartitions=1&partitionIdToProcess=0&previewOnly=true" | jq .data.previewOfPartitionWorkload.dvContainerIds.datasets[] | while read i; do curl http://localhost:8080/api/admin/index/datasets/$i; done diff --git a/dataversedock/testdata/scripts/search/index-status b/dataversedock/testdata/scripts/search/index-status new file mode 100755 index 0000000..8575223 --- /dev/null +++ b/dataversedock/testdata/scripts/search/index-status @@ -0,0 +1,2 @@ +#!/bin/sh +curl -s http://localhost:8080/api/admin/index/status | jq . diff --git a/dataversedock/testdata/scripts/search/json2ids b/dataversedock/testdata/scripts/search/json2ids new file mode 100755 index 0000000..7afa9a5 --- /dev/null +++ b/dataversedock/testdata/scripts/search/json2ids @@ -0,0 +1,23 @@ +#!/usr/bin/python +"""Find ids in JSON document""" +import sys +try: + import json +except ImportError: + import simplejson as json +import optparse +parser = optparse.OptionParser(description=__doc__) +options, args = parser.parse_args() + +if not args: + print "Please supply a filename to process" + sys.exit(1) + +json_data=open(args[0]) +data = json.load(json_data) +ids=[] +for i in data: + id = i["entityid_l"] + ids.append(str(id)) +print ' '.join(ids) +json_data.close() diff --git a/dataversedock/testdata/scripts/search/populate b/dataversedock/testdata/scripts/search/populate new file mode 100755 index 0000000..77b0a0f --- /dev/null +++ b/dataversedock/testdata/scripts/search/populate @@ -0,0 +1,27 @@ +#!/bin/bash +DVDIR='data/in/dataverses' +DVDIR_ROOT='data/in/dataverses.root' +DVDIR_BIRDS='data/in/dataverses.birds' +DVDIR_TREES='data/in/dataverses.trees' +#DSDIR='data/in/datasets' +#FILESDIR='data/in/files' +#mkdir -p $DSDIR +#mkdir -p $FILESDIR +rm -rf data/in +mkdir -p $DVDIR +mkdir -p $DVDIR_ROOT +mkdir -p $DVDIR_BIRDS +mkdir -p $DVDIR_TREES +count=1; ./tab2json dv.tsv | while read i; do echo $i | python -m json.tool > $DVDIR/$count; let count++; done +rm $DVDIR/1 +mv $DVDIR/2 $DVDIR_ROOT/2 +mv $DVDIR/3 $DVDIR_ROOT/3 +mv $DVDIR/4 $DVDIR_BIRDS/4 +mv $DVDIR/5 $DVDIR_BIRDS/5 +mv $DVDIR/6 $DVDIR_BIRDS/6 +mv $DVDIR/7 $DVDIR_TREES/7 +rm $DVDIR/8 +mv $DVDIR/9 $DVDIR_TREES/9 +rmdir $DVDIR +#count=1; ./tab2json ds.tsv | while read i; do echo $i | python -m json.tool > $DSDIR/$count; let count++; done +#count=1; ./tab2json files.tsv | while read i; do echo $i | python -m json.tool > $FILESDIR/$count; let count++; done diff --git a/dataversedock/testdata/scripts/search/populate-bird-dvs1 b/dataversedock/testdata/scripts/search/populate-bird-dvs1 new file mode 100755 index 0000000..b7a9d6a --- /dev/null +++ b/dataversedock/testdata/scripts/search/populate-bird-dvs1 @@ -0,0 +1,8 @@ +#!/bin/bash +BASEDIR='scripts/search' +OUTDIR='data/in/dv-birds1' +FULL_OUTDIR="$BASEDIR/$OUTDIR" +rm -rf $FULL_OUTDIR +mkdir -p $FULL_OUTDIR +cd $BASEDIR +count=1; ./tab2json-dvs data/dv-birds1.tsv | while read i; do echo $i | python -m json.tool > $OUTDIR/$count; let count++; done diff --git a/dataversedock/testdata/scripts/search/populate-psi-dvs b/dataversedock/testdata/scripts/search/populate-psi-dvs new file mode 100755 index 0000000..ec966cc --- /dev/null +++ b/dataversedock/testdata/scripts/search/populate-psi-dvs @@ -0,0 +1,8 @@ +#!/bin/bash +BASEDIR='scripts/search' +OUTDIR='data/in/dv-psi' +FULL_OUTDIR="$BASEDIR/$OUTDIR" +rm -rf $FULL_OUTDIR +mkdir -p $FULL_OUTDIR +cd $BASEDIR +count=1; ./tab2json-dvs data/dv-psi.tsv | while read i; do echo $i | python -m json.tool > $OUTDIR/$count; let count++; done diff --git a/dataversedock/testdata/scripts/search/populate-tree-dvs1 b/dataversedock/testdata/scripts/search/populate-tree-dvs1 new file mode 100755 index 0000000..27473bf --- /dev/null +++ b/dataversedock/testdata/scripts/search/populate-tree-dvs1 @@ -0,0 +1,8 @@ +#!/bin/bash +BASEDIR='scripts/search' +OUTDIR='data/in/dv-trees1' +FULL_OUTDIR="$BASEDIR/$OUTDIR" +rm -rf $FULL_OUTDIR +mkdir -p $FULL_OUTDIR +cd $BASEDIR +count=1; ./tab2json-dvs data/dv-trees1.tsv | while read i; do echo $i | python -m json.tool > $OUTDIR/$count; let count++; done diff --git a/dataversedock/testdata/scripts/search/populate-users b/dataversedock/testdata/scripts/search/populate-users new file mode 100755 index 0000000..c24ef96 --- /dev/null +++ b/dataversedock/testdata/scripts/search/populate-users @@ -0,0 +1,8 @@ +#!/bin/bash +BASEDIR='scripts/search' +USERDIR='data/in/users' +FULL_USERDIR="$BASEDIR/$USERDIR" +rm -rf $FULL_USERDIR +mkdir -p $FULL_USERDIR +cd $BASEDIR +count=1; ./tab2json-users users.tsv | while read i; do echo $i | python -m json.tool > $USERDIR/$count; let count++; done diff --git a/dataversedock/testdata/scripts/search/query b/dataversedock/testdata/scripts/search/query new file mode 100755 index 0000000..bd13c3e --- /dev/null +++ b/dataversedock/testdata/scripts/search/query @@ -0,0 +1,8 @@ +#!/bin/sh +curl -s 'http://localhost:8983/solr/collection1/select?rows=1000000&wt=json&indent=true&q=*%3A*' +# show combination of public stuff OR pete's private stuff +# curl -s --globoff 'http://localhost:8983/solr/collection1/select?rows=100&wt=json&indent=true&q=*&fq=({!join+from=groups_s+to=perms_ss}id:group_public+OR+{!join+from=groups_s+to=perms_ss}id:group_user2)' | jq '.response.docs[] | {name_sort}' +# https://github.com/IQSS/dataverse/issues/1262 +# curl 'http://localhost:8983/solr/collection1/select?rows=1000000&wt=json&indent=true&hl=true&hl.fl=*&q=wright&hl.snippets=10' +# remember elasticsearch? :) +#curl 'http://localhost:9200/_search?pretty=true&q=*' diff --git a/dataversedock/testdata/scripts/search/saved-search b/dataversedock/testdata/scripts/search/saved-search new file mode 100755 index 0000000..a483930 --- /dev/null +++ b/dataversedock/testdata/scripts/search/saved-search @@ -0,0 +1,15 @@ +#!/bin/bash +. scripts/search/export-keys +# 2015-03-26 11:48:50.43 +curl -s http://localhost:8080/api/admin/savedsearches/list?key=$ADMINKEY | jq . +if [ ! -z "$1" ]; then + curl -s http://localhost:8080/api/dataverses/$1/links?key=$ADMINKEY | jq . +fi +if [ ! -z "$2" ]; then + curl -s http://localhost:8080/api/datasets/$2/links?key=$ADMINKEY | jq . +fi +exit +curl -s http://localhost:8080/api/admin/savedsearches -X POST -H 'Content-type:application/json' --upload-file scripts/search/data/savedSearchBasic.json | jq . +curl -s http://localhost:8080/api/admin/savedsearches -X POST -H 'Content-type:application/json' --upload-file scripts/search/data/savedSearchAdvanced.json | jq . +# curl -s -X DELETE http://localhost:8080/api/admin/savedsearches/999 +scripts/search/dbsavedsearch diff --git a/dataversedock/testdata/scripts/search/saved-search-setup b/dataversedock/testdata/scripts/search/saved-search-setup new file mode 100755 index 0000000..d99c2c2 --- /dev/null +++ b/dataversedock/testdata/scripts/search/saved-search-setup @@ -0,0 +1,4 @@ +#!/bin/bash +curl -X PUT -d true http://localhost:8080/api/admin/settings/:SearchApiNonPublicAllowed +echo +curl -s http://localhost:8080/api/admin/savedsearches -X POST -H 'Content-type:application/json' --upload-file scripts/search/data/savedSearchMaliBasicHealth.json | jq . diff --git a/dataversedock/testdata/scripts/search/saved-search-test b/dataversedock/testdata/scripts/search/saved-search-test new file mode 100755 index 0000000..96def1b --- /dev/null +++ b/dataversedock/testdata/scripts/search/saved-search-test @@ -0,0 +1,5 @@ +#!/bin/bash +. scripts/search/export-keys +#curl -s -X PUT http://localhost:8080/api/admin/savedsearches/makelinks/all | jq . +diff -u scripts/search/tests/expected/saved-search <(curl -s "http://localhost:8080/api/search?key=$ADMINKEY&sort=name&subtree=psimalihealth&q=*" | jq '.data.items[] | {name,type}') +diff -u scripts/search/tests/expected/saved-search-links <(curl -s http://localhost:8080/api/dataverses/psimalihealth/links?key=$ADMINKEY | jq .data) diff --git a/dataversedock/testdata/scripts/search/search b/dataversedock/testdata/scripts/search/search new file mode 100755 index 0000000..ac14596 --- /dev/null +++ b/dataversedock/testdata/scripts/search/search @@ -0,0 +1,11 @@ +#!/bin/sh +if [ -z "$1" ]; then + curl -H "X-Dataverse-key: $API_TOKEN" -s 'http://localhost:8080/api/search?q=*' + #curl -s 'http://localhost:8080/api/search?q=*&key=pete' +else + # i.e. ./search 'q=*&fq=filetype_s:"image"&fq=dvtype:files' + # i.e. ./search 'q=*&start=10' + # i.e. ./search 'q=*&sort=name_sort&order=asc' + # i.e. ./search 'q=*&sort=name_sort&order=asc' | jq '.itemsJson[] | {name_sort}' + curl -H "X-Dataverse-key: $API_TOKEN" -s "http://localhost:8080/api/search?$1" +fi diff --git a/dataversedock/testdata/scripts/search/solr-delete-id b/dataversedock/testdata/scripts/search/solr-delete-id new file mode 100755 index 0000000..302a84e --- /dev/null +++ b/dataversedock/testdata/scripts/search/solr-delete-id @@ -0,0 +1,12 @@ +#!/bin/bash +if [ -z "$1" ]; then + echo "No Solr ID provided." + exit 1 +else + echo "Deleting Solr id $1" + OUTPUT=`curl -s http://localhost:8983/solr/update/json?commit=true -H "Content-type: application/json" -X POST -d "{\"delete\": { \"query\":\"id:$1\"}}"` + # exit code 7 is expected when Solr is down + EXIT_CODE=$? + #echo $EXIT_CODE + #echo $OUTPUT +fi diff --git a/dataversedock/testdata/scripts/search/spellcheck b/dataversedock/testdata/scripts/search/spellcheck new file mode 100755 index 0000000..6ae8fee --- /dev/null +++ b/dataversedock/testdata/scripts/search/spellcheck @@ -0,0 +1,5 @@ +#!/bin/sh +# output: +# "hits",1, +# "misspellingsAndCorrections",["datvrse","dataverse"] +curl -s 'http://localhost:8983/solr/spell?spellcheck=true&wt=json&indent=true&q=datvrse' diff --git a/dataversedock/testdata/scripts/search/tab2json b/dataversedock/testdata/scripts/search/tab2json new file mode 100755 index 0000000..a4cdc3d --- /dev/null +++ b/dataversedock/testdata/scripts/search/tab2json @@ -0,0 +1,53 @@ +#!/usr/bin/env python +import sys +from optparse import OptionParser +import csv +try: + import json +except ImportError: + import simplejson as json + +parser = OptionParser() +options, args = parser.parse_args() + +if args: + csv_file = open(args[0]) +else: + csv_file = sys.stdin + +reader = csv.DictReader(csv_file, delimiter="\t") +rows = [row for row in reader] +for row in rows: + if "name" in row and "alias" in row and row["id"] == "1": + del row["id"] + del row["owner"] + row["permissionRoot"] = "false" + elif "title" in row: + row["@type"] = "dataset" + row["files"] = [] + row["versions"] = [] + del row["id"] + del row["owner"] + del row["title"] + del row["author"] + del row["citationDate"] + del row["distributor"] + elif "contentType" in row: + del row["id"] + row["permissionRoot"] = "false" + #print "must be a file..." + dataset_id = row["dataset"] + #row["dataset"] = {"id": dataset_id} + else: + del row["id"] + row["permissionRoot"] = "false" + del row["owner"] + #if row["id"] == "1": + print json.dumps(row) +csv_file.close() +# sample dataverse file: +#id name alias owner contactEmail description affiliation +#2 Birds birds 1 birds@birds.com A birds dataverse Birds Inc. +# sample dataset file: +#id title author owner description citationDate distributor +#1 birdstudy1 Dr. Finch 1 About birds 2013-12-11 For the Birds diff --git a/dataversedock/testdata/scripts/search/tab2json-dvs b/dataversedock/testdata/scripts/search/tab2json-dvs new file mode 100755 index 0000000..1864532 --- /dev/null +++ b/dataversedock/testdata/scripts/search/tab2json-dvs @@ -0,0 +1,34 @@ +#!/usr/bin/env python +import sys +from optparse import OptionParser +import csv +try: + import json +except ImportError: + import simplejson as json + +parser = OptionParser() +options, args = parser.parse_args() + +if args: + csv_file = open(args[0]) +else: + csv_file = sys.stdin + +reader = csv.DictReader(csv_file, delimiter="\t") +rows = [row for row in reader] +for row in rows: + if "contactEmail" in row: + contactArray = [] + contactHash = {} + contactHash["contactEmail"] = row["contactEmail"] + contactArray.append(contactHash) + row["dataverseContacts"] = contactArray + del row["contactEmail"] + if "subject" in row: + subjectsArray = [] + subjectsArray.append(row["subject"]) + row["dataverseSubjects"] = subjectsArray + del row["subject"] + print json.dumps(row) +csv_file.close() diff --git a/dataversedock/testdata/scripts/search/tab2json-users b/dataversedock/testdata/scripts/search/tab2json-users new file mode 100755 index 0000000..388d54d --- /dev/null +++ b/dataversedock/testdata/scripts/search/tab2json-users @@ -0,0 +1,22 @@ +#!/usr/bin/env python +import sys +from optparse import OptionParser +import csv +try: + import json +except ImportError: + import simplejson as json + +parser = OptionParser() +options, args = parser.parse_args() + +if args: + csv_file = open(args[0]) +else: + csv_file = sys.stdin + +reader = csv.DictReader(csv_file, delimiter="\t") +rows = [row for row in reader] +for row in rows: + print json.dumps(row) +csv_file.close() diff --git a/dataversedock/testdata/scripts/search/tests/add-members-to-trees-group b/dataversedock/testdata/scripts/search/tests/add-members-to-trees-group new file mode 100755 index 0000000..20d9e95 --- /dev/null +++ b/dataversedock/testdata/scripts/search/tests/add-members-to-trees-group @@ -0,0 +1,3 @@ +#!/bin/sh +curl -X PUT "http://localhost:8080/api/dataverses/root/groups/trees/roleAssignees/@chestnut?key=$ADMINKEY" +curl -X PUT "http://localhost:8080/api/dataverses/root/groups/trees/roleAssignees/@spruce?key=$ADMINKEY" diff --git a/dataversedock/testdata/scripts/search/tests/create-all-and-test b/dataversedock/testdata/scripts/search/tests/create-all-and-test new file mode 100755 index 0000000..a49b0e2 --- /dev/null +++ b/dataversedock/testdata/scripts/search/tests/create-all-and-test @@ -0,0 +1,38 @@ +#!/bin/sh +. scripts/search/export-keys +echo "Creating bird and tree dataverses" +scripts/search/populate-bird-dvs1 +scripts/search/create-bird-dvs1 > /tmp/bird-dvs1 +scripts/search/populate-tree-dvs1 +scripts/search/create-tree-dvs1 > /tmp/tree-dvs1 +echo "Creating some datasets" +curl -s --insecure --data-binary @scripts/search/tests/data/dataset-trees1.xml -H 'Content-Type: application/atom+xml' -u $SPRUCEKEY: https://localhost:8181/dvn/api/data-deposit/v1.1/swordv2/collection/dataverse/spruce | xmllint -format - >/dev/null +curl -s -X POST -H "Content-type:application/json" -d @scripts/search/tests/data/dataset-finch1.json "http://localhost:8080/api/dataverses/finches/datasets/?key=$FINCHKEY" >/dev/null +echo "Uploading a file via the SWORD API" +. scripts/search/assumptions +curl -s --insecure --data-binary @scripts/search/data/binary/trees.zip -H 'Content-Disposition: filename=trees.zip' -H 'Content-Type: application/zip' -H 'Packaging: http://purl.org/net/sword/package/SimpleZip' -u $SPRUCEKEY: https://localhost:8181/dvn/api/data-deposit/v1.1/swordv2/edit-media/study/$FIRST_SPRUCE_DOI >/dev/null +echo "Uploading a file via the native API" +# echo $FIRST_FINCH_DOI # FIXME: Why is this empty? +STATUS_CODE_FROM_UPLOADING_FILE_VIA_NATIVE=$(curl -H "X-Dataverse-key:$FINCHKEY" --insecure --write-out %{http_code} --silent --output /dev/null -X POST -F "file=@scripts/search/data/replace_test/growing_file/2016-01/data.tsv" -F 'jsonData={"description":"My description.","categories":["Data"]}' "http://localhost:8080/api/v1/datasets/$FIRST_FINCH_DATASET_ID/add") +if [[ "$STATUS_CODE_FROM_UPLOADING_FILE_VIA_NATIVE" != 200 ]]; then + echo "Couldn't upload file to dataset $FIRST_FINCH_DATASET_ID via native API!" + exit 1 +fi +# give the file a little time to ingest +sleep 2 +echo "Everything in draft, checking permissions. Silence is golden." +scripts/search/tests/permissions1 +echo "Done." +. scripts/search/assumptions +echo "Giving $SPRUCE_USERNAME "admin" on Birds dataverse" +scripts/search/tests/grant-spruce-admin-on-birds +echo Re-testing permissions. Silence is golden +scripts/search/tests/permissions2 +echo Done +. scripts/search/assumptions +echo "Revoking that role" +#curl -s -X DELETE "http://localhost:8080/api/dataverses/$BIRDS_DATAVERSE/assignments/$SPRUCE_ADMIN_ON_BIRDS?key=$FINCHKEY" >/dev/null +scripts/search/tests/revoke-spruce-admin-on-birds +echo "Making sure original permissions are back. Silence is golden." +scripts/search/tests/permissions1 +echo "Done" diff --git a/dataversedock/testdata/scripts/search/tests/create-saved-search-and-test b/dataversedock/testdata/scripts/search/tests/create-saved-search-and-test new file mode 100755 index 0000000..ac54092 --- /dev/null +++ b/dataversedock/testdata/scripts/search/tests/create-saved-search-and-test @@ -0,0 +1,15 @@ +#!/bin/sh +. scripts/search/export-keys +curl -X PUT -d true http://localhost:8080/api/admin/settings/:SearchApiNonPublicAllowed +echo +scripts/search/populate-psi-dvs +scripts/search/create-psi-dvs > /tmp/psi-dvs1 +curl -s -X POST -H "Content-type:application/json" -d @scripts/search/tests/data/dataset-mali1.json "http://localhost:8080/api/dataverses/psimali/datasets/?key=$PSIADMINKEY" >/dev/null +curl -s -X POST -H "Content-type:application/json" -d @scripts/search/tests/data/dataset-mali2.json "http://localhost:8080/api/dataverses/psimali/datasets/?key=$PSIADMINKEY" >/dev/null +WOMEN_IN_MALI_DOI=`curl -s --globoff "http://localhost:8080/api/search?key=$ADMINKEY&q=title:\"Women+in+Mali+dataset+1\"" | jq '.data.items[].global_id' | sed 's/"//g'` +curl -s --insecure --data-binary @scripts/search/data/binary/health.zip -H 'Content-Disposition: filename=health.zip' -H 'Content-Type: application/zip' -H 'Packaging: http://purl.org/net/sword/package/SimpleZip' -u $PSIADMINKEY: https://localhost:8181/dvn/api/data-deposit/v1.1/swordv2/edit-media/study/$WOMEN_IN_MALI_DOI >/dev/null +scripts/search/saved-search-setup +curl -s -X PUT http://localhost:8080/api/admin/savedsearches/makelinks/all | jq . +echo "Running verification tests (silence is golden)" +scripts/search/saved-search-test +echo "Done" diff --git a/dataversedock/testdata/scripts/search/tests/data/dataset-finch1.json b/dataversedock/testdata/scripts/search/tests/data/dataset-finch1.json new file mode 100644 index 0000000..ec0856a --- /dev/null +++ b/dataversedock/testdata/scripts/search/tests/data/dataset-finch1.json @@ -0,0 +1,77 @@ +{ + "datasetVersion": { + "metadataBlocks": { + "citation": { + "fields": [ + { + "value": "Darwin's Finches", + "typeClass": "primitive", + "multiple": false, + "typeName": "title" + }, + { + "value": [ + { + "authorName": { + "value": "Finch, Fiona", + "typeClass": "primitive", + "multiple": false, + "typeName": "authorName" + }, + "authorAffiliation": { + "value": "Birds Inc.", + "typeClass": "primitive", + "multiple": false, + "typeName": "authorAffiliation" + } + } + ], + "typeClass": "compound", + "multiple": true, + "typeName": "author" + }, + { + "value": [ + { "datasetContactEmail" : { + "typeClass": "primitive", + "multiple": false, + "typeName": "datasetContactEmail", + "value" : "finch@mailinator.com" + }, + "datasetContactName" : { + "typeClass": "primitive", + "multiple": false, + "typeName": "datasetContactName", + "value": "Finch, Fiona" + } + }], + "typeClass": "compound", + "multiple": true, + "typeName": "datasetContact" + }, + { + "value": [ { + "dsDescriptionValue":{ + "value": "Darwin's finches (also known as the Galápagos finches) are a group of about fifteen species of passerine birds.", + "multiple":false, + "typeClass": "primitive", + "typeName": "dsDescriptionValue" + }}], + "typeClass": "compound", + "multiple": true, + "typeName": "dsDescription" + }, + { + "value": [ + "Medicine, Health and Life Sciences" + ], + "typeClass": "controlledVocabulary", + "multiple": true, + "typeName": "subject" + } + ], + "displayName": "Citation Metadata" + } + } + } +} diff --git a/dataversedock/testdata/scripts/search/tests/data/dataset-finch2.json b/dataversedock/testdata/scripts/search/tests/data/dataset-finch2.json new file mode 100644 index 0000000..d20f835 --- /dev/null +++ b/dataversedock/testdata/scripts/search/tests/data/dataset-finch2.json @@ -0,0 +1,82 @@ +{ + "datasetVersion": { + "metadataBlocks": { + "citation": { + "fields": [ + { + "value": "HTML & More", + "typeClass": "primitive", + "multiple": false, + "typeName": "title" + }, + { + "value": [ + { + "authorName": { + "value": "Markup, Marty", + "typeClass": "primitive", + "multiple": false, + "typeName": "authorName" + }, + "authorAffiliation": { + "value": "W4C", + "typeClass": "primitive", + "multiple": false, + "typeName": "authorAffiliation" + } + } + ], + "typeClass": "compound", + "multiple": true, + "typeName": "author" + }, + { + "value": [ + { + "datasetContactEmail": { + "typeClass": "primitive", + "multiple": false, + "typeName": "datasetContactEmail", + "value": "markup@mailinator.com" + }, + "datasetContactName": { + "typeClass": "primitive", + "multiple": false, + "typeName": "datasetContactName", + "value": "Markup, Marty" + } + } + ], + "typeClass": "compound", + "multiple": true, + "typeName": "datasetContact" + }, + { + "value": [ + { + "dsDescriptionValue": { + "value": "BEGIN

                            END", + "multiple": false, + "typeClass": "primitive", + "typeName": "dsDescriptionValue" + } + } + ], + "typeClass": "compound", + "multiple": true, + "typeName": "dsDescription" + }, + { + "value": [ + "Medicine, Health and Life Sciences" + ], + "typeClass": "controlledVocabulary", + "multiple": true, + "typeName": "subject" + } + ], + "displayName": "Citation Metadata" + } + } + } +} diff --git a/dataversedock/testdata/scripts/search/tests/data/dataset-mali1.json b/dataversedock/testdata/scripts/search/tests/data/dataset-mali1.json new file mode 100644 index 0000000..372a4a9 --- /dev/null +++ b/dataversedock/testdata/scripts/search/tests/data/dataset-mali1.json @@ -0,0 +1,71 @@ +{ + "datasetVersion": { + "metadataBlocks": { + "citation": { + "fields": [ + { + "value": "Mali health dataset 1", + "typeClass": "primitive", + "multiple": false, + "typeName": "title" + }, + { + "value": [ + { + "authorName": { + "value": "Admin, PSI", + "typeClass": "primitive", + "multiple": false, + "typeName": "authorName" + }, + "authorAffiliation": { + "value": "PSI", + "typeClass": "primitive", + "multiple": false, + "typeName": "authorAffiliation" + } + } + ], + "typeClass": "compound", + "multiple": true, + "typeName": "author" + }, + { + "value": [ + { "datasetContactEmail" : { + "typeClass": "primitive", + "multiple": false, + "typeName": "datasetContactEmail", + "value" : "psiadmin@mailinator.com" + } + }], + "typeClass": "compound", + "multiple": true, + "typeName": "datasetContact" + }, + { + "value": [ { + "dsDescriptionValue":{ + "value": "Sample dataset about health in Mali used for saved search testing.", + "multiple":false, + "typeClass": "primitive", + "typeName": "dsDescriptionValue" + }}], + "typeClass": "compound", + "multiple": true, + "typeName": "dsDescription" + }, + { + "value": [ + "Social Sciences" + ], + "typeClass": "controlledVocabulary", + "multiple": true, + "typeName": "subject" + } + ], + "displayName": "Citation Metadata" + } + } + } +} diff --git a/dataversedock/testdata/scripts/search/tests/data/dataset-mali2.json b/dataversedock/testdata/scripts/search/tests/data/dataset-mali2.json new file mode 100644 index 0000000..e9c3286 --- /dev/null +++ b/dataversedock/testdata/scripts/search/tests/data/dataset-mali2.json @@ -0,0 +1,71 @@ +{ + "datasetVersion": { + "metadataBlocks": { + "citation": { + "fields": [ + { + "value": "Women in Mali dataset 1", + "typeClass": "primitive", + "multiple": false, + "typeName": "title" + }, + { + "value": [ + { + "authorName": { + "value": "Admin, PSI", + "typeClass": "primitive", + "multiple": false, + "typeName": "authorName" + }, + "authorAffiliation": { + "value": "PSI", + "typeClass": "primitive", + "multiple": false, + "typeName": "authorAffiliation" + } + } + ], + "typeClass": "compound", + "multiple": true, + "typeName": "author" + }, + { + "value": [ + { "datasetContactEmail" : { + "typeClass": "primitive", + "multiple": false, + "typeName": "datasetContactEmail", + "value" : "psiadmin@mailinator.com" + } + }], + "typeClass": "compound", + "multiple": true, + "typeName": "datasetContact" + }, + { + "value": [ { + "dsDescriptionValue":{ + "value": "Sample dataset about women in Mali used for saved search testing.", + "multiple":false, + "typeClass": "primitive", + "typeName": "dsDescriptionValue" + }}], + "typeClass": "compound", + "multiple": true, + "typeName": "dsDescription" + }, + { + "value": [ + "Social Sciences" + ], + "typeClass": "controlledVocabulary", + "multiple": true, + "typeName": "subject" + } + ], + "displayName": "Citation Metadata" + } + } + } +} diff --git a/dataversedock/testdata/scripts/search/tests/data/dataset-trees1-edit-subject.xml b/dataversedock/testdata/scripts/search/tests/data/dataset-trees1-edit-subject.xml new file mode 100644 index 0000000..d5db66c --- /dev/null +++ b/dataversedock/testdata/scripts/search/tests/data/dataset-trees1-edit-subject.xml @@ -0,0 +1,14 @@ + + + Spruce Goose + Spruce, Sabrina + What the Spruce Goose was really made of. + Creative Commons CC-BY 3.0 (unported) http://creativecommons.org/licenses/by/3.0/ + + + Engineering + diff --git a/dataversedock/testdata/scripts/search/tests/data/dataset-trees1-edit.xml b/dataversedock/testdata/scripts/search/tests/data/dataset-trees1-edit.xml new file mode 100644 index 0000000..98cfa40 --- /dev/null +++ b/dataversedock/testdata/scripts/search/tests/data/dataset-trees1-edit.xml @@ -0,0 +1,12 @@ + + + Spruce Goose + Spruce, Sabrina + What the Spruce Goose was *really* made of. + NONE + + diff --git a/dataversedock/testdata/scripts/search/tests/data/dataset-trees1.xml b/dataversedock/testdata/scripts/search/tests/data/dataset-trees1.xml new file mode 100644 index 0000000..ab2a610 --- /dev/null +++ b/dataversedock/testdata/scripts/search/tests/data/dataset-trees1.xml @@ -0,0 +1,18 @@ + + + Spruce Goose + Spruce, Sabrina + What the Spruce Goose was really made of. + Downloader will not use the Materials in any way prohibited by applicable laws. + + + diff --git a/dataversedock/testdata/scripts/search/tests/data/dv-dash.json b/dataversedock/testdata/scripts/search/tests/data/dv-dash.json new file mode 100644 index 0000000..4d97418 --- /dev/null +++ b/dataversedock/testdata/scripts/search/tests/data/dv-dash.json @@ -0,0 +1,8 @@ + { + "alias":"dash", + "name":"Titanic - 1999", + "affiliation":"Affiliation value", + "contactEmail":"pete@mailinator.com", + "permissionRoot":false, + "description":"A dataverse with a - (a dash) in the description" +} diff --git a/dataversedock/testdata/scripts/search/tests/delete-all-and-test b/dataversedock/testdata/scripts/search/tests/delete-all-and-test new file mode 100755 index 0000000..8b3c5a6 --- /dev/null +++ b/dataversedock/testdata/scripts/search/tests/delete-all-and-test @@ -0,0 +1,27 @@ +#!/bin/bash +. scripts/search/export-keys +. scripts/search/assumptions + +# delete spruce file +curl -s --insecure -X DELETE -u $SPRUCEKEY: https://localhost:8181/dvn/api/data-deposit/v1.1/swordv2/edit-media/file/$FIRST_SPRUCE_FILE >/dev/null +# delete spruce dataset +curl -s --insecure -X DELETE -u $SPRUCEKEY: https://localhost:8181/dvn/api/data-deposit/v1.1/swordv2/edit/study/$FIRST_SPRUCE_DOI >/dev/null +# delete finch dataset +curl -s -X DELETE "http://localhost:8080/api/datasets/$FIRST_FINCH_DATASET_ID?key=$FINCHKEY" >/dev/null + +# delete all dataverses +curl -s -X DELETE "http://localhost:8080/api/dataverses/chestnutsparrows?key=$FINCHKEY" >/dev/null +curl -s -X DELETE "http://localhost:8080/api/dataverses/sparrows?key=$FINCHKEY" >/dev/null +curl -s -X DELETE "http://localhost:8080/api/dataverses/finches?key=$FINCHKEY" >/dev/null +curl -s -X DELETE "http://localhost:8080/api/dataverses/wrens?key=$FINCHKEY" >/dev/null +curl -s -X DELETE "http://localhost:8080/api/dataverses/birds?key=$FINCHKEY" >/dev/null +curl -s -X DELETE "http://localhost:8080/api/dataverses/spruce?key=$SPRUCEKEY" >/dev/null +curl -s -X DELETE "http://localhost:8080/api/dataverses/chestnuttrees?key=$SPRUCEKEY" >/dev/null +curl -s -X DELETE "http://localhost:8080/api/dataverses/trees?key=$SPRUCEKEY" >/dev/null + +echo "Making sure finch can't see anything (silence is golden)" +diff <(curl -s "http://localhost:8080/api/search?q=*&key=$FINCHKEY" | jq '.data.total_count') scripts/search/tests/expected/zero +echo Done +echo "Making sure spruce can't see anything (silence is golden)" +diff <(curl -s "http://localhost:8080/api/search?q=*&key=$SPRUCEKEY" | jq '.data.total_count') scripts/search/tests/expected/zero +echo Done diff --git a/dataversedock/testdata/scripts/search/tests/destroy-dataset-finch1 b/dataversedock/testdata/scripts/search/tests/destroy-dataset-finch1 new file mode 100755 index 0000000..21c5574 --- /dev/null +++ b/dataversedock/testdata/scripts/search/tests/destroy-dataset-finch1 @@ -0,0 +1,7 @@ +#!/bin/bash +. scripts/search/export-keys +. scripts/search/assumptions +echo $FIRST_FINCH_DATASET_ID +OUTPUT=`curl -s -X DELETE http://localhost:8080/api/datasets/$FIRST_FINCH_DATASET_ID/destroy?key=$FINCHKEY` +echo $OUTPUT +echo $OUTPUT | jq . diff --git a/dataversedock/testdata/scripts/search/tests/destroy-dataset-spruce1 b/dataversedock/testdata/scripts/search/tests/destroy-dataset-spruce1 new file mode 100755 index 0000000..55b72a4 --- /dev/null +++ b/dataversedock/testdata/scripts/search/tests/destroy-dataset-spruce1 @@ -0,0 +1,9 @@ +#!/bin/bash +# destroying requires publishing so uncomment this if need be +# scripts/search/tests/publish-spruce1-and-test +sleep 2 +. scripts/search/export-keys +. scripts/search/assumptions +OUTPUT=`curl -s -X DELETE http://localhost:8080/api/datasets/$FIRST_SPRUCE_DATASET_ID/destroy?key=$ADMINKEY` +echo $OUTPUT +echo $OUTPUT | jq . diff --git a/dataversedock/testdata/scripts/search/tests/edit-dataset-finch1 b/dataversedock/testdata/scripts/search/tests/edit-dataset-finch1 new file mode 100755 index 0000000..3b794c9 --- /dev/null +++ b/dataversedock/testdata/scripts/search/tests/edit-dataset-finch1 @@ -0,0 +1,10 @@ +#!/bin/bash +. scripts/search/export-keys +. scripts/search/assumptions +GET_VERSION_OUTPUT=`curl -s GET http://localhost:8080/api/datasets/$FIRST_FINCH_DATASET_ID/versions/:latest?key=$FINCHKEY` +echo $GET_VERSION_OUTPUT | jq .data > /tmp/old +cp /tmp/old /tmp/new +sed -i -e "s/Darwin's Finches/Darwin's Galápagos Finches/" /tmp/new +EDIT_OUTPUT=`curl -s -H "Content-type:application/json" -X PUT -d @/tmp/new http://localhost:8080/api/datasets/$FIRST_FINCH_DATASET_ID/versions/:draft?key=$FINCHKEY` +echo $EDIT_OUTPUT +echo $EDIT_OUTPUT | jq . diff --git a/dataversedock/testdata/scripts/search/tests/expected/anon b/dataversedock/testdata/scripts/search/tests/expected/anon new file mode 100644 index 0000000..9832b0d --- /dev/null +++ b/dataversedock/testdata/scripts/search/tests/expected/anon @@ -0,0 +1 @@ +"Please provide a key query parameter (?key=XXX) or via the HTTP header X-Dataverse-key" diff --git a/dataversedock/testdata/scripts/search/tests/expected/anon-empty b/dataversedock/testdata/scripts/search/tests/expected/anon-empty new file mode 100644 index 0000000..fe51488 --- /dev/null +++ b/dataversedock/testdata/scripts/search/tests/expected/anon-empty @@ -0,0 +1 @@ +[] diff --git a/dataversedock/testdata/scripts/search/tests/expected/anon3 b/dataversedock/testdata/scripts/search/tests/expected/anon3 new file mode 100644 index 0000000..b8626c4 --- /dev/null +++ b/dataversedock/testdata/scripts/search/tests/expected/anon3 @@ -0,0 +1 @@ +4 diff --git a/dataversedock/testdata/scripts/search/tests/expected/anon3-full b/dataversedock/testdata/scripts/search/tests/expected/anon3-full new file mode 100644 index 0000000..945a89c --- /dev/null +++ b/dataversedock/testdata/scripts/search/tests/expected/anon3-full @@ -0,0 +1,50 @@ +{ + "data": { + "count_in_response": 4, + "fq_provided": "[]", + "items": [ + { + "alias": "trees", + "description": "A tree dataverse with some birds", + "name": "Trees", + "published_at": "2015-01-08T03:27Z", + "type": "dataverses", + "url": "https://murphy.local/dataverse/trees" + }, + { + "authors": [ + "Spruce, Sabrina" + ], + "citation": "Spruce, Sabrina, 2015, \"Spruce Goose\", http://dx.doi.org/10.5072/FK2/I4VPEZ, Root Dataverse, V0", + "global_id": "doi:10.5072/FK2/I4VPEZ", + "name": "Spruce Goose", + "persistent_url": "http://dx.doi.org/10.5072/FK2/I4VPEZ", + "published_at": "2015-01-08T03:27Z", + "type": "datasets", + "url": "https://murphy.local/dataset.xhtml?globalId=doi:10.5072/FK2/I4VPEZ" + }, + { + "description": "", + "file_id": "12", + "file_type": "PNG Image", + "name": "trees.png", + "persistent_url": "http://dx.doi.org/10.5072/FK2/I4VPEZ", + "published_at": "2015-01-08T03:27Z", + "type": "files", + "url": "https://murphy.local/dataset.xhtml?globalId=doi:10.5072/FK2/I4VPEZ" + }, + { + "alias": "spruce", + "description": "A spruce with some birds", + "name": "Spruce", + "published_at": "2015-01-08T03:27Z", + "type": "dataverses", + "url": "https://murphy.local/dataverse/spruce" + } + ], + "q": "*", + "start": 0, + "total_count": 4 + }, + "status": "OK" +} diff --git a/dataversedock/testdata/scripts/search/tests/expected/anontest3 b/dataversedock/testdata/scripts/search/tests/expected/anontest3 new file mode 100644 index 0000000..325d80c --- /dev/null +++ b/dataversedock/testdata/scripts/search/tests/expected/anontest3 @@ -0,0 +1,6 @@ +[ + "files:trees.png", + "datasets:Spruce Goose", + "dataverses:Trees", + "dataverses:Spruce" +] diff --git a/dataversedock/testdata/scripts/search/tests/expected/finch1 b/dataversedock/testdata/scripts/search/tests/expected/finch1 new file mode 100644 index 0000000..f9fa17c --- /dev/null +++ b/dataversedock/testdata/scripts/search/tests/expected/finch1 @@ -0,0 +1,9 @@ +[ + "files:data.tsv", + "datasets:Darwin's Finches", + "dataverses:Birds", + "dataverses:Finches", + "dataverses:Sparrows", + "dataverses:Wrens", + "dataverses:Chestnut Sparrows" +] diff --git a/dataversedock/testdata/scripts/search/tests/expected/finch3 b/dataversedock/testdata/scripts/search/tests/expected/finch3 new file mode 100644 index 0000000..1d82dc5 --- /dev/null +++ b/dataversedock/testdata/scripts/search/tests/expected/finch3 @@ -0,0 +1,12 @@ +[ + "files:trees.png", + "datasets:Spruce Goose", + "datasets:Darwin's Finches", + "dataverses:Birds", + "dataverses:Finches", + "dataverses:Sparrows", + "dataverses:Wrens", + "dataverses:Chestnut Sparrows", + "dataverses:Trees", + "dataverses:Spruce" +] diff --git a/dataversedock/testdata/scripts/search/tests/expected/nosuchuser b/dataversedock/testdata/scripts/search/tests/expected/nosuchuser new file mode 100644 index 0000000..450d91a --- /dev/null +++ b/dataversedock/testdata/scripts/search/tests/expected/nosuchuser @@ -0,0 +1 @@ +"Bad api key 'nosuchuser'" diff --git a/dataversedock/testdata/scripts/search/tests/expected/saved-search b/dataversedock/testdata/scripts/search/tests/expected/saved-search new file mode 100644 index 0000000..ff494dc --- /dev/null +++ b/dataversedock/testdata/scripts/search/tests/expected/saved-search @@ -0,0 +1,12 @@ +{ + "type": "dataverse", + "name": "Child of Mali Health" +} +{ + "type": "dataverse", + "name": "Grandchild of Mali Health" +} +{ + "type": "dataset", + "name": "Mali health dataset 1" +} diff --git a/dataversedock/testdata/scripts/search/tests/expected/saved-search-links b/dataversedock/testdata/scripts/search/tests/expected/saved-search-links new file mode 100644 index 0000000..c80800d --- /dev/null +++ b/dataversedock/testdata/scripts/search/tests/expected/saved-search-links @@ -0,0 +1,7 @@ +{ + "datasets that the psimalihealth has linked to": [ + "Mali health dataset 1" + ], + "dataverses that link to the psimalihealth": [], + "dataverses that the psimalihealth dataverse has linked to": [] +} diff --git a/dataversedock/testdata/scripts/search/tests/expected/solr-down b/dataversedock/testdata/scripts/search/tests/expected/solr-down new file mode 100644 index 0000000..93ff7bd --- /dev/null +++ b/dataversedock/testdata/scripts/search/tests/expected/solr-down @@ -0,0 +1,4 @@ +{ + "message": "Exception running search for [*] with filterQueries [] and paginationStart [0]: edu.harvard.iq.dataverse.search.SearchException: Internal Dataverse Search Engine Error org.apache.solr.client.solrj.SolrServerException org.apache.solr.client.solrj.SolrServerException: Server refused connection at: http://localhost:8983/solr org.apache.http.conn.HttpHostConnectException org.apache.http.conn.HttpHostConnectException: Connection to http://localhost:8983 refused java.net.ConnectException java.net.ConnectException: Connection refused ", + "status": "ERROR" +} diff --git a/dataversedock/testdata/scripts/search/tests/expected/spruce1 b/dataversedock/testdata/scripts/search/tests/expected/spruce1 new file mode 100644 index 0000000..f11cd12 --- /dev/null +++ b/dataversedock/testdata/scripts/search/tests/expected/spruce1 @@ -0,0 +1,7 @@ +[ + "files:trees.png", + "datasets:Spruce Goose", + "dataverses:Trees", + "dataverses:Spruce", + "dataverses:Chestnut Trees" +] diff --git a/dataversedock/testdata/scripts/search/tests/expected/spruce2 b/dataversedock/testdata/scripts/search/tests/expected/spruce2 new file mode 100644 index 0000000..89ecebe --- /dev/null +++ b/dataversedock/testdata/scripts/search/tests/expected/spruce2 @@ -0,0 +1,8 @@ +[ + "files:trees.png", + "datasets:Spruce Goose", + "dataverses:Birds", + "dataverses:Trees", + "dataverses:Spruce", + "dataverses:Chestnut Trees" +] diff --git a/dataversedock/testdata/scripts/search/tests/expected/zero b/dataversedock/testdata/scripts/search/tests/expected/zero new file mode 100644 index 0000000..573541a --- /dev/null +++ b/dataversedock/testdata/scripts/search/tests/expected/zero @@ -0,0 +1 @@ +0 diff --git a/dataversedock/testdata/scripts/search/tests/explicit-group-add b/dataversedock/testdata/scripts/search/tests/explicit-group-add new file mode 100755 index 0000000..d872d55 --- /dev/null +++ b/dataversedock/testdata/scripts/search/tests/explicit-group-add @@ -0,0 +1,2 @@ +#!/bin/sh +curl -X POST http://localhost:8080/api/dataverses/root/groups?key=$ADMINKEY -H "Content-type: application/json" --upload-file scripts/search/data/group-explicit-trees.json diff --git a/dataversedock/testdata/scripts/search/tests/files b/dataversedock/testdata/scripts/search/tests/files new file mode 100755 index 0000000..8874c83 --- /dev/null +++ b/dataversedock/testdata/scripts/search/tests/files @@ -0,0 +1,3 @@ +#!/bin/sh +OUT=`curl -s "http://localhost:8080/api/admin/index/filesearch?persistentId=$1&q=$2"` +echo $OUT | jq . diff --git a/dataversedock/testdata/scripts/search/tests/grant-authusers-add-on-root b/dataversedock/testdata/scripts/search/tests/grant-authusers-add-on-root new file mode 100755 index 0000000..08b245f --- /dev/null +++ b/dataversedock/testdata/scripts/search/tests/grant-authusers-add-on-root @@ -0,0 +1,5 @@ +#!/bin/sh +. scripts/search/export-keys +OUTPUT=`curl -s -X POST -H "Content-type:application/json" -d "{\"assignee\": \":authenticated-users\",\"role\": \"fullContributor\"}" "http://localhost:8080/api/dataverses/root/assignments?key=$ADMINKEY"` +echo $OUTPUT +echo $OUTPUT | jq ' .data | {assignee,_roleAlias}' diff --git a/dataversedock/testdata/scripts/search/tests/grant-finch-admin-on-spruce b/dataversedock/testdata/scripts/search/tests/grant-finch-admin-on-spruce new file mode 100755 index 0000000..f564033 --- /dev/null +++ b/dataversedock/testdata/scripts/search/tests/grant-finch-admin-on-spruce @@ -0,0 +1,3 @@ +#!/bin/bash +. scripts/search/assumptions +curl -s -X POST -H 'Content-Type: application/x-www-form-urlencoded' "http://localhost:8080/api/roles/assignments?username=$FINCH_USERNAME&roleId=$ADMIN_ROLE&definitionPointId=$SPRUCE_DATAVERSE&key=$SPRUCEKEY" | jq ' .data | {assignee,_roleAlias}' diff --git a/dataversedock/testdata/scripts/search/tests/grant-ipgroup3-add-on-root b/dataversedock/testdata/scripts/search/tests/grant-ipgroup3-add-on-root new file mode 100755 index 0000000..cd58cea --- /dev/null +++ b/dataversedock/testdata/scripts/search/tests/grant-ipgroup3-add-on-root @@ -0,0 +1,5 @@ +#!/bin/sh +. scripts/search/export-keys +OUTPUT=`curl -s -X POST -H "Content-type:application/json" -d "{\"assignee\": \"&ip/ipGroup3\",\"role\": \"dvContributor\"}" "http://localhost:8080/api/dataverses/root/assignments?key=$ADMINKEY"` +echo $OUTPUT +echo $OUTPUT | jq ' .data | {assignee,_roleAlias}' diff --git a/dataversedock/testdata/scripts/search/tests/grant-shibgroup1-add-on-root b/dataversedock/testdata/scripts/search/tests/grant-shibgroup1-add-on-root new file mode 100755 index 0000000..f016c8a --- /dev/null +++ b/dataversedock/testdata/scripts/search/tests/grant-shibgroup1-add-on-root @@ -0,0 +1,6 @@ +#!/bin/sh +. scripts/search/export-keys +OUTPUT=`curl -s -X POST -H "Content-type:application/json" -d "{\"assignee\": \"&shib/1\",\"role\": \"dvContributor\"}" "http://localhost:8080/api/dataverses/root/assignments?key=$ADMINKEY"` +echo $OUTPUT +echo $OUTPUT | jq . +#echo $OUTPUT | jq ' .data | {assignee,_roleAlias}' diff --git a/dataversedock/testdata/scripts/search/tests/grant-spruce-admin-on-birds b/dataversedock/testdata/scripts/search/tests/grant-spruce-admin-on-birds new file mode 100755 index 0000000..70515ad --- /dev/null +++ b/dataversedock/testdata/scripts/search/tests/grant-spruce-admin-on-birds @@ -0,0 +1,6 @@ +#!/bin/sh +. scripts/search/assumptions +OUTPUT=`curl -s -X POST -H "Content-type:application/json" -d "{\"assignee\": \"@spruce\",\"role\": \"admin\"}" "http://localhost:8080/api/dataverses/birds/assignments?key=$ADMINKEY"` +echo $OUTPUT +echo +echo $OUTPUT | jq ' .data | {assignee,_roleAlias}' diff --git a/dataversedock/testdata/scripts/search/tests/ipgroup-add b/dataversedock/testdata/scripts/search/tests/ipgroup-add new file mode 100755 index 0000000..d41679f --- /dev/null +++ b/dataversedock/testdata/scripts/search/tests/ipgroup-add @@ -0,0 +1,5 @@ +#!/bin/sh +. scripts/search/export-keys +OUTPUT=`curl -s -X POST -d @scripts/api/data/ipGroup-all.json http://localhost:8080/api/admin/groups/ip -H "Content-type:application/json"` +echo $OUTPUT +echo $OUTPUT | jq . diff --git a/dataversedock/testdata/scripts/search/tests/permissions1 b/dataversedock/testdata/scripts/search/tests/permissions1 new file mode 100755 index 0000000..dfb9648 --- /dev/null +++ b/dataversedock/testdata/scripts/search/tests/permissions1 @@ -0,0 +1,18 @@ +#!/bin/bash +# After dropping your datbase and getting set up again per the dev guide, +# You should see no output from this script. Silence is golden. +# If you start creating dataverses and datasets, you should expect to see output. +# we plan to support API keys/tokens in https://github.com/IQSS/dataverse/issues/1299 +diff <(curl -s 'http://localhost:8080/api/search?q=*&key=nosuchuser' | jq .message) scripts/search/tests/expected/nosuchuser + +diff <(curl -s 'http://localhost:8080/api/search?q=*' | jq .message) scripts/search/tests/expected/anon + +diff <(curl -s "http://localhost:8080/api/admin/index/test?q=*&key=$FINCHKEY" | jq .data) scripts/search/tests/expected/finch1 + +diff <(curl -s "http://localhost:8080/api/admin/index/test?q=*&key=$SPRUCEKEY" | jq .data) scripts/search/tests/expected/spruce1 + +diff <(curl -s "http://localhost:8080/api/admin/index/test?q=*&key=$SPARROWKEY" | jq .data) scripts/search/tests/expected/anon-empty + +diff <(curl -s "http://localhost:8080/api/admin/index/test?q=*&key=$WRENKEY" | jq .data) scripts/search/tests/expected/anon-empty + +diff <(curl -s "http://localhost:8080/api/admin/index/test?q=*&key=$CHESTNUTKEY" | jq .data) scripts/search/tests/expected/anon-empty diff --git a/dataversedock/testdata/scripts/search/tests/permissions2 b/dataversedock/testdata/scripts/search/tests/permissions2 new file mode 100755 index 0000000..2f650f0 --- /dev/null +++ b/dataversedock/testdata/scripts/search/tests/permissions2 @@ -0,0 +1,10 @@ +#!/bin/bash +diff <(curl -s "http://localhost:8080/api/admin/index/test?q=*&key=$FINCHKEY" | jq .data) scripts/search/tests/expected/finch1 + +diff <(curl -s "http://localhost:8080/api/admin/index/test?q=*&key=$SPRUCEKEY" | jq .data) scripts/search/tests/expected/spruce2 + +diff <(curl -s "http://localhost:8080/api/admin/index/test?q=*&key=$SPARROWKEY" | jq .data) scripts/search/tests/expected/anon-empty + +diff <(curl -s "http://localhost:8080/api/admin/index/test?q=*&key=$WRENKEY" | jq .data) scripts/search/tests/expected/anon-empty + +diff <(curl -s "http://localhost:8080/api/admin/index/test?q=*&key=$CHESTNUTKEY" | jq .data) scripts/search/tests/expected/anon-empty diff --git a/dataversedock/testdata/scripts/search/tests/permissions3 b/dataversedock/testdata/scripts/search/tests/permissions3 new file mode 100755 index 0000000..8c105e7 --- /dev/null +++ b/dataversedock/testdata/scripts/search/tests/permissions3 @@ -0,0 +1,12 @@ +#!/bin/bash +diff <(curl -s "http://localhost:8080/api/search?q=*" | jq .data.count_in_response) scripts/search/tests/expected/anon3 + +diff <(curl -s "http://localhost:8080/api/admin/index/test?q=*&key=$FINCHKEY" | jq .data) scripts/search/tests/expected/finch3 + +diff <(curl -s "http://localhost:8080/api/admin/index/test?q=*&key=$SPRUCEKEY" | jq .data) scripts/search/tests/expected/spruce2 + +diff <(curl -s "http://localhost:8080/api/admin/index/test?q=*&key=$SPARROWKEY" | jq .data) scripts/search/tests/expected/anontest3 + +diff <(curl -s "http://localhost:8080/api/admin/index/test?q=*&key=$WRENKEY" | jq .data) scripts/search/tests/expected/anontest3 + +diff <(curl -s "http://localhost:8080/api/admin/index/test?q=*&key=$CHESTNUTKEY" | jq .data) scripts/search/tests/expected/anontest3 diff --git a/dataversedock/testdata/scripts/search/tests/permissions3-full-anon b/dataversedock/testdata/scripts/search/tests/permissions3-full-anon new file mode 100755 index 0000000..4dc24b7 --- /dev/null +++ b/dataversedock/testdata/scripts/search/tests/permissions3-full-anon @@ -0,0 +1,3 @@ +#!/bin/bash +#curl -s "http://localhost:8080/api/search?q=*" | python -m json.tool > scripts/search/tests/expected/anon3-full +diff <(curl -s "http://localhost:8080/api/search?q=*" | python -m json.tool) scripts/search/tests/expected/anon3-full diff --git a/dataversedock/testdata/scripts/search/tests/publish-dataset-spruce1 b/dataversedock/testdata/scripts/search/tests/publish-dataset-spruce1 new file mode 100755 index 0000000..14b0f80 --- /dev/null +++ b/dataversedock/testdata/scripts/search/tests/publish-dataset-spruce1 @@ -0,0 +1,7 @@ +#!/bin/sh +. scripts/search/export-keys +. scripts/search/assumptions +OUTPUT=`cat /dev/null | curl -s --insecure -u $SPRUCEKEY: -X POST -H 'In-Progress: false' --data-binary @- "https://localhost:8181/dvn/api/data-deposit/v1.1/swordv2/edit/study/$FIRST_SPRUCE_DOI"` +echo $OUTPUT +echo +echo $OUTPUT | xmllint -format - diff --git a/dataversedock/testdata/scripts/search/tests/publish-dataverse-birds b/dataversedock/testdata/scripts/search/tests/publish-dataverse-birds new file mode 100755 index 0000000..5e8af36 --- /dev/null +++ b/dataversedock/testdata/scripts/search/tests/publish-dataverse-birds @@ -0,0 +1,5 @@ +#!/bin/sh +OUTPUT=`cat /dev/null | curl -s --insecure -X POST -H 'In-Progress: false' --data-binary @- https://admin:admin@localhost:8181/dvn/api/data-deposit/v1.1/swordv2/edit/dataverse/birds` +echo $OUTPUT +echo +echo $OUTPUT | xmllint -format - diff --git a/dataversedock/testdata/scripts/search/tests/publish-dataverse-finches b/dataversedock/testdata/scripts/search/tests/publish-dataverse-finches new file mode 100755 index 0000000..0632bc5 --- /dev/null +++ b/dataversedock/testdata/scripts/search/tests/publish-dataverse-finches @@ -0,0 +1,5 @@ +#!/bin/sh +OUTPUT=`cat /dev/null | curl -s --insecure -X POST -H 'In-Progress: false' --data-binary @- https://finch:finch@localhost:8181/dvn/api/data-deposit/v1.1/swordv2/edit/dataverse/finches` +echo $OUTPUT +echo +echo $OUTPUT | xmllint -format - diff --git a/dataversedock/testdata/scripts/search/tests/publish-dataverse-root b/dataversedock/testdata/scripts/search/tests/publish-dataverse-root new file mode 100755 index 0000000..a4c1585 --- /dev/null +++ b/dataversedock/testdata/scripts/search/tests/publish-dataverse-root @@ -0,0 +1,6 @@ +#!/bin/sh +. scripts/search/export-keys +OUTPUT=`cat /dev/null | curl -s --insecure -u $ADMINKEY: -X POST -H 'In-Progress: false' --data-binary @- https://localhost:8181/dvn/api/data-deposit/v1.1/swordv2/edit/dataverse/root` +echo $OUTPUT +echo +echo $OUTPUT | xmllint -format - diff --git a/dataversedock/testdata/scripts/search/tests/publish-dataverse-spruce b/dataversedock/testdata/scripts/search/tests/publish-dataverse-spruce new file mode 100755 index 0000000..bf2746b --- /dev/null +++ b/dataversedock/testdata/scripts/search/tests/publish-dataverse-spruce @@ -0,0 +1,6 @@ +#!/bin/sh +. scripts/search/export-keys +OUTPUT=`cat /dev/null | curl -s --insecure -u $SPRUCEKEY: -X POST -H 'In-Progress: false' --data-binary @- https://localhost:8181/dvn/api/data-deposit/v1.1/swordv2/edit/dataverse/spruce` +echo $OUTPUT +echo +echo $OUTPUT | xmllint -format - diff --git a/dataversedock/testdata/scripts/search/tests/publish-dataverse-trees b/dataversedock/testdata/scripts/search/tests/publish-dataverse-trees new file mode 100755 index 0000000..65c58de --- /dev/null +++ b/dataversedock/testdata/scripts/search/tests/publish-dataverse-trees @@ -0,0 +1,6 @@ +#!/bin/sh +. scripts/search/export-keys +OUTPUT=`cat /dev/null | curl -s --insecure -u $SPRUCEKEY: -X POST -H 'In-Progress: false' --data-binary @- https://localhost:8181/dvn/api/data-deposit/v1.1/swordv2/edit/dataverse/trees` +echo $OUTPUT +echo +echo $OUTPUT | xmllint -format - diff --git a/dataversedock/testdata/scripts/search/tests/publish-spruce1-and-test b/dataversedock/testdata/scripts/search/tests/publish-spruce1-and-test new file mode 100755 index 0000000..cc363fe --- /dev/null +++ b/dataversedock/testdata/scripts/search/tests/publish-spruce1-and-test @@ -0,0 +1,7 @@ +#!/bin/sh +scripts/search/tests/publish-dataverse-root +scripts/search/tests/publish-dataverse-trees +scripts/search/tests/publish-dataverse-spruce +scripts/search/tests/publish-dataset-spruce1 +#scripts/search/tests/permissions3 +#scripts/search/tests/permissions3-full-anon diff --git a/dataversedock/testdata/scripts/search/tests/revoke-finch-admin-on-spruce b/dataversedock/testdata/scripts/search/tests/revoke-finch-admin-on-spruce new file mode 100755 index 0000000..dfe6c7e --- /dev/null +++ b/dataversedock/testdata/scripts/search/tests/revoke-finch-admin-on-spruce @@ -0,0 +1,3 @@ +#!/bin/sh +. scripts/search/assumptions +curl -s -X DELETE "http://localhost:8080/api/dataverses/$SPRUCE_DATAVERSE/assignments/$FINCH_ADMIN_ON_SPRUCE?key=$SPRUCEKEY" | jq .data.message diff --git a/dataversedock/testdata/scripts/search/tests/revoke-spruce-admin-on-birds b/dataversedock/testdata/scripts/search/tests/revoke-spruce-admin-on-birds new file mode 100755 index 0000000..b1bfff3 --- /dev/null +++ b/dataversedock/testdata/scripts/search/tests/revoke-spruce-admin-on-birds @@ -0,0 +1,6 @@ +#!/bin/sh +. scripts/search/assumptions +OUTPUT=`curl -s -X DELETE "http://localhost:8080/api/dataverses/$BIRDS_DATAVERSE/assignments/$SPRUCE_ADMIN_ON_BIRDS?key=$FINCHKEY"` +echo $OUTPUT +echo +echo $OUTPUT | jq .data.message diff --git a/dataversedock/testdata/scripts/search/tests/solr-down b/dataversedock/testdata/scripts/search/tests/solr-down new file mode 100755 index 0000000..534380a --- /dev/null +++ b/dataversedock/testdata/scripts/search/tests/solr-down @@ -0,0 +1,2 @@ +#!/bin/bash +diff <(curl -s 'http://localhost:8080/api/search?q=*' | jq .) scripts/search/tests/expected/solr-down diff --git a/dataversedock/testdata/scripts/search/tests/special-characters b/dataversedock/testdata/scripts/search/tests/special-characters new file mode 100755 index 0000000..812c638 --- /dev/null +++ b/dataversedock/testdata/scripts/search/tests/special-characters @@ -0,0 +1,9 @@ +#!/bin/bash +# curl -H "Content-type:application/json" -X POST -d @scripts/search/tests/data/dv-colon.json "http://localhost:8080/api/dataverses/peteTop?key=pete" +# curl 'http://localhost:8983/solr/collection1/select?rows=100&wt=json&indent=true&q="description:\:"' +diff <(curl -s 'http://localhost:8080/api/search?q=:') scripts/search/tests/expected/colon + +# http://stackoverflow.com/questions/18277609/search-in-solr-with-special-characters +# curl -H "Content-type:application/json" -X POST -d @scripts/search/tests/data/dv-dash.json "http://localhost:8080/api/dataverses/peteTop?key=pete" +# curl 'http://localhost:8983/solr/collection1/select?rows=100&wt=json&indent=true&q=name:\-' +# diff <(curl -s 'http://localhost:8080/api/search?q=name:"Titanic - 1999"') scripts/search/tests/expected/dash diff --git a/dataversedock/testdata/scripts/search/tests/upload-1000-files b/dataversedock/testdata/scripts/search/tests/upload-1000-files new file mode 100755 index 0000000..a4c1d46 --- /dev/null +++ b/dataversedock/testdata/scripts/search/tests/upload-1000-files @@ -0,0 +1,5 @@ +#!/bin/sh +. scripts/search/export-keys +. scripts/search/assumptions +echo "Uploading 1000 files" +curl -s --insecure --data-binary @scripts/search/data/binary/1000files.zip -H 'Content-Disposition: filename=1000files.zip' -H 'Content-Type: application/zip' -H 'Packaging: http://purl.org/net/sword/package/SimpleZip' -u spruce:spruce https://localhost:8181/dvn/api/data-deposit/v1.1/swordv2/edit-media/study/$FIRST_SPRUCE_DOI diff --git a/dataversedock/testdata/scripts/search/users.tsv b/dataversedock/testdata/scripts/search/users.tsv new file mode 100755 index 0000000..4422bea --- /dev/null +++ b/dataversedock/testdata/scripts/search/users.tsv @@ -0,0 +1,7 @@ +userName firstName lastName email +finch Fiona Finch finch@mailinator.com +sparrow Sammy Sparrow sparrow@mailinator.com +wren Wilbur Wren wren@mailinator.com +spruce Sabrina Spruce spruce@mailinator.com +chestnut Caleb Chestnut chestnut@mailinator.com +psiadmin PSI Admin psi@mailinator.com diff --git a/dataversedock/testdata/scripts/setup/asadmin-setup.sh b/dataversedock/testdata/scripts/setup/asadmin-setup.sh new file mode 100755 index 0000000..8b50d2f --- /dev/null +++ b/dataversedock/testdata/scripts/setup/asadmin-setup.sh @@ -0,0 +1,212 @@ +#!/bin/bash +# STOP! +# DO NOT ADD MORE ASADMIN COMMANDS TO THIS SCRIPT! +# IF YOU NEED TO ADD MORE GLASSFISH CONFIG SETTINGS, ADD THEM +# TO THE ../installer/glassfish-setup.sh SCRIPT. +# I'M ASSUMING THAT WE'LL WANT TO CONTINUE MAINTAINING THIS SCRIPT, +# (FOR VAGRANT SETUPS, etc.?); IT SHOULD STILL BE WORKING, BY +# CALLING THE NEW SCRIPT ABOVE - SO NO NEED TO DUPLICATE THE ASADMIN +# COMMANDS HERE. +# FROM NOW ON, ONLY NON-ASADMIN CONFIGURATION SHOULD GO INTO THIS +# SCRIPT. (which makes the name especially misleading - but I didn't +# want to change it, in case other scripts are calling it by name!) +# -Leonid 4.0 beta + +# This is a setup script for setting up Glassfish 4 to run Dataverse +# The script was tested on Mac OS X.9 +# ASSUMPTIONS +# * Script has to run locally (i.e. on the machine that hosts the server) +# * Internet connectivity is assumed, in order to get the postgresql driver. + +## +# Default values - Change to suit your machine. +DEFAULT_GLASSFISH_ROOT=/Applications/NetBeans/glassfish-4.0 +DEFAULT_DOMAIN=domain1 +DEFAULT_ASADMIN_OPTS=" " + +### +# Database values. Update as needed. +# Note: DB_USER "dvnApp" is case-sensitive and later used in "scripts/database/reference_data.sql" +# +DB_PORT=5432; export DB_PORT +DB_HOST=localhost; export DB_HOST +DB_NAME=dvndb; export DB_NAME +DB_USER=dvnApp; export DB_USER +DB_PASS=dvnAppPass; export DB_PASS + +### +# Rserve configuration: +RSERVE_HOST=localhost; export RSERVE_HOST +RSERVE_PORT=6311; export RSERVE_PORT +RSERVE_USER=rserve; export RSERVE_USER +RSERVE_PASS=rserve; export RSERVE_PASS + +### +# Other configuration values: +MEM_HEAP_SIZE=1024; export MEM_HEAP_SIZE +HOST_ADDRESS=localhost; export HOST_ADDRESS +SMTP_SERVER=mail.hmdc.harvard.edu; export SMTP_SERVER +FILES_DIR=${HOME}/dataverse/files; export FILES_DIR + +### End of default configuration values. + +# "${VAR+xxx}" for unset vs. empty per http://stackoverflow.com/questions/228544/how-to-tell-if-a-string-is-not-defined-in-a-bash-shell-script/230593#230593 + +if [ "${DB_NAME_CUSTOM+xxx}" ] + then + echo "Default DB_NAME ($DB_NAME) overridden: $DB_NAME_CUSTOM" + DB_NAME=$DB_NAME_CUSTOM +fi + +if [ "${DB_USER_CUSTOM+xxx}" ] + then + echo "Default DB_USER ($DB_USER) overridden: $DB_USER_CUSTOM" + DB_USER=$DB_USER_CUSTOM +fi + +if [ "${DB_PASS_CUSTOM+xxx}" ] + then + echo "Default DB_PASS ($DB_PASS) overridden: $DB_PASS_CUSTOM" + DB_PASS=$DB_PASS_CUSTOM +fi + +#echo "end" +#exit + +## +# External dependencies +PGSQL_DRIVER_URL=http://jdbc.postgresql.org/download/postgresql-9.3-1100.jdbc41.jar + +if [ "$SUDO_USER" = "vagrant" ] + then + echo "We are running in a Vagrant environment." + cat /etc/redhat-release + # Choosing all lower case indentifiers for DB_NAME and DB_USER for this reason: + # + # Quoting an identifier also makes it case-sensitive, whereas unquoted names + # are always folded to lower case. For example, the identifiers FOO, foo, and + # "foo" are considered the same by PostgreSQL, but "Foo" and "FOO" are + # different from these three and each other. (The folding of unquoted names + # to lower case in PostgreSQL is incompatible with the SQL standard, which + # says that unquoted names should be folded to upper case. Thus, foo should + # be equivalent to "FOO" not "foo" according to the standard. If you want to + # write portable applications you are advised to always quote a particular + # name or never quote it.) -- + # http://www.postgresql.org/docs/9.3/static/sql-syntax-lexical.html + DB_NAME=dataverse_db + DB_USER=dataverse_app + DB_PASS=secret + echo "Configuring EPEL Maven repo " + cd /etc/yum.repos.d + wget http://repos.fedorapeople.org/repos/dchen/apache-maven/epel-apache-maven.repo + cd + echo "Installing dependencies via yum" + yum install -y -q java-1.7.0-openjdk-devel postgresql-server apache-maven httpd mod_ssl + rpm -q postgresql-server + echo "Starting PostgreSQL" + chkconfig postgresql on + /sbin/service postgresql initdb + cp -a /var/lib/pgsql/data/pg_hba.conf /var/lib/pgsql/data/pg_hba.conf.orig + sed -i -e 's/ident$/trust/' /var/lib/pgsql/data/pg_hba.conf + /sbin/service postgresql start + POSTGRES_USER=postgres + echo "Creating database user $DB_USER" + su $POSTGRES_USER -s /bin/sh -c "psql -c \"CREATE ROLE \"$DB_USER\" UNENCRYPTED PASSWORD '$DB_PASS' NOSUPERUSER CREATEDB CREATEROLE NOINHERIT LOGIN\"" + #su $POSTGRES_USER -s /bin/sh -c "psql -c '\du'" + echo "Creating database $DB_NAME" + su $POSTGRES_USER -s /bin/sh -c "psql -c 'CREATE DATABASE \"$DB_NAME\" WITH OWNER = \"$DB_USER\"'" + GLASSFISH_USER=glassfish + echo "Ensuring Unix user '$GLASSFISH_USER' exists" + useradd $GLASSFISH_USER || : + GLASSFISH_ZIP=`ls /downloads/glassfish*zip` + GLASSFISH_USER_HOME=~glassfish + echo "Copying $GLASSFISH_ZIP to $GLASSFISH_USER_HOME and unzipping" + su $GLASSFISH_USER -s /bin/sh -c "cp $GLASSFISH_ZIP $GLASSFISH_USER_HOME" + su $GLASSFISH_USER -s /bin/sh -c "cd $GLASSFISH_USER_HOME && unzip -q $GLASSFISH_ZIP" + DEFAULT_GLASSFISH_ROOT=$GLASSFISH_USER_HOME/glassfish4 + su $GLASSFISH_USER -s /bin/sh -c "/scripts/installer/glassfish-setup.sh" +fi + + +# Set the scripts parameters (if needed) +if [ -z "${GLASSFISH_ROOT+xxx}" ] + then + echo setting GLASSFISH_ROOT to $DEFAULT_GLASSFISH_ROOT + GLASSFISH_ROOT=$DEFAULT_GLASSFISH_ROOT; export GLASSFISH_ROOT +fi +if [ ! -d "$GLASSFISH_ROOT" ] + then + echo Glassfish root '$GLASSFISH_ROOT' does not exist + exit 1 +fi +GLASSFISH_BIN_DIR=$GLASSFISH_ROOT/bin + +if [ -z "${DOMAIN+xxx}" ] + then + echo setting DOMAIN to $DEFAULT_DOMAIN + DOMAIN=$DEFAULT_DOMAIN + # setting the environmental variable GLASSFISH_DOMAIN, + # for the ../installer/glassfish-setup.sh script, that runs + # all the required asadmin comands + GLASSFISH_DOMAIN=$DOMAIN; export GLASSFISH_DOMAIN +fi +DOMAIN_DIR=$GLASSFISH_ROOT/glassfish/domains/$DOMAIN +if [ ! -d "$DOMAIN_DIR" ] + then + echo Domain directory '$DOMAIN_DIR' does not exist + exit 2 +fi +if [ -z "$ASADMIN_OPTS" ] + then + ASADMIN_OPTS=$DEFAULT_ASADMIN_OPTS; export ASADMIN_OPTS +fi + +echo "Setting up your glassfish4 to support Dataverse" +echo "Glassfish directory: "$GLASSFISH_ROOT +echo "Domain directory: "$DOMAIN_DIR + +### +# getting the postgres driver +DOMAIN_LIB=$DOMAIN_DIR/lib +if ! grep -qs postgres $DOMAIN_LIB/* + then + DRIVER_NAME=$(echo $PGSQL_DRIVER_URL | tr / \\n | tail -n1) + echo Downloading postgresql driver '$DRIVER_NAME' + wget $PGSQL_DRIVER_URL -O $DOMAIN_LIB/$DRIVER_NAME + else + echo postgresql driver already installed. +fi + +if [ "$SUDO_USER" = "vagrant" ] + then + /scripts/installer/glassfish-setup.sh + echo "Done configuring Vagrant environment" + exit 0 +fi + +### +# Move to the glassfish dir +pushd $GLASSFISH_BIN_DIR + +### +# take the domain up, if needed. +DOMAIN_DOWN=$(./asadmin list-domains | grep "$DOMAIN " | grep "not running") +if [ $(echo $DOMAIN_DOWN|wc -c) -ne 1 ]; + then + echo Trying to start domain $DOMAIN up... + ./asadmin $ASADMIN_OPTS start-domain $DOMAIN + else + echo domain running +fi + +# ONCE AGAIN, ASADMIN COMMANDS BELOW HAVE ALL BEEN MOVED INTO scripts/installer/glassfish-setup.sh + +# TODO: diagnostics + +### +# Clean up +popd + +echo "Glassfish setup complete" +date + diff --git a/dataversedock/testdata/scripts/trello/trello b/dataversedock/testdata/scripts/trello/trello new file mode 100755 index 0000000..6b1b7cb --- /dev/null +++ b/dataversedock/testdata/scripts/trello/trello @@ -0,0 +1,4 @@ +curl -s https://api.trello.com/1/boards/527d1605c7b30060420027b0 | python -m json.tool +#curl -s https://api.trello.com/1/lists/527d1605c7b30060420027b0?fields=name&cards=open&card_fields=name +# https://api.trello.com/1/lists/4eea4ffc91e31d174600004a?fields=name&cards=open&card_fields=name&key=[application_key]&token=[optional_auth_token] + diff --git a/dataversedock/testdata/scripts/vagrant/install-dataverse.sh b/dataversedock/testdata/scripts/vagrant/install-dataverse.sh new file mode 100644 index 0000000..ac48217 --- /dev/null +++ b/dataversedock/testdata/scripts/vagrant/install-dataverse.sh @@ -0,0 +1,35 @@ +#!/usr/bin/env bash + +if [ ! -z "$1" ]; then + MAILSERVER=$1 + MAILSERVER_ARG="--mailserver $MAILSERVER" +fi +WAR=/dataverse/target/dataverse*.war +if [ ! -f $WAR ]; then + echo "no war file found... building" + echo "Installing nss on CentOS 6 to avoid java.security.KeyException while building war file: https://github.com/IQSS/dataverse/issues/2744" + yum install -y nss + su $SUDO_USER -s /bin/sh -c "cd /dataverse && mvn package" +fi +cd /dataverse/scripts/installer + +# move any pre-existing `default.config` file out of the way to avoid overwriting +pid=$$ +if [ -e default.config ]; then + mv default.config tmp-${pid}-default.config +fi + +echo "HOST_DNS_ADDRESS localhost" > default.config +echo "GLASSFISH_DIRECTORY /home/glassfish/glassfish4" >> default.config + +if [ ! -z "$MAILSERVER" ]; then + echo "MAIL_SERVER $MAILSERVER" >> default.config +fi + +./install -y -f + +if [ -e tmp-${pid}-default.config ]; then # if we moved it out, move it back + mv -f tmp-${pid}-default.config default.config +fi + +echo "If "vagrant up" was successful (check output above) Dataverse is running on port 8080 of the Linux machine running within Vagrant, but this port has been forwarded to port 8888 of the computer you ran "vagrant up" on. For this reason you should go to http://localhost:8888 to see the Dataverse app running." diff --git a/dataversedock/testdata/scripts/vagrant/install-tworavens.sh b/dataversedock/testdata/scripts/vagrant/install-tworavens.sh new file mode 100755 index 0000000..3e1fb1b --- /dev/null +++ b/dataversedock/testdata/scripts/vagrant/install-tworavens.sh @@ -0,0 +1,35 @@ +#!/bin/bash +echo "This script is highly experimental and makes many assumptions about how Dataverse is running in Vagrant. Please consult the TwoRavens section of the Dataverse Installation Guide instead." +exit 1 +cd /root +yum install -y https://dl.fedoraproject.org/pub/epel/epel-release-latest-7.noarch.rpm +yum install -y R R-devel +# FIXME: /dataverse is mounted in Vagrant but not other places +yum install -y /dataverse/doc/sphinx-guides/source/_static/installation/files/home/rpmbuild/rpmbuild/RPMS/x86_64/rapache-1.2.6-rpm0.x86_64.rpm +yum install -y gcc-gfortran # to build R packages +COMMIT=a6869eb28693d6df529e7cb3888c40de5f302b66 +UNZIPPED=TwoRavens-$COMMIT +if [ ! -f $COMMIT ]; then + wget https://github.com/IQSS/TwoRavens/archive/$COMMIT.zip + unzip $COMMIT + cd $UNZIPPED/r-setup + ./r-setup.sh # This is expected to take a while. Look for lines like "Package Zelig successfully installed" and "Successfully installed Dataverse R framework". +fi +# FIXME: copy preprocess.R into Glassfish while running and overwrite it +curl -X PUT -d true http://localhost:8080/api/admin/settings/:TwoRavensTabularView +# Port 8888 because we're running in Vagrant. On the dev1 server we use https://dev1.dataverse.org/dataexplore/gui.html +curl -X PUT -d http://localhost:8888/dataexplore/gui.html http://localhost:8080/api/admin/settings/:TwoRavensUrl +cd /root +DIR=/var/www/html/dataexplore +if [ ! -d $DIR ]; then + cp -r $UNZIPPED $DIR +fi +cd $DIR +# The plan is to remove this hack of dropping preprocess.R into a deployed war file directory. See https://github.com/IQSS/dataverse/issues/3372 +# FIXME: don't assume version 4.6.1 +#diff /var/www/html/dataexplore/rook/preprocess/preprocess.R /usr/local/glassfish4/glassfish/domains/domain1/applications/dataverse-4.6.1/WEB-INF/classes/edu/harvard/iq/dataverse/rserve/scripts/preprocess.R +# FIXME: If `diff` shows a difference, which is likely, copy the version from TwoRavens to the Glassfish directory. +#cp /var/www/html/dataexplore/rook/preprocess/preprocess.R /usr/local/glassfish4/glassfish/domains/domain1/applications/dataverse-4.6.1/WEB-INF/classes/edu/harvard/iq/dataverse/rserve/scripts/preprocess.R +# FIXME: restart Glassfish if you had to update preprocess.R above. +# FIXME: Vagrant with it's weird 8888 port forwarding isn't working. On the dev1 server, TwoRavens works fine if you supply "https://dev1.dataverse.org" for both URLs. +echo "Next, run ./install.pl after you cd to $DIR" diff --git a/dataversedock/testdata/scripts/vagrant/rpmbuild.sh b/dataversedock/testdata/scripts/vagrant/rpmbuild.sh new file mode 100755 index 0000000..f10830a --- /dev/null +++ b/dataversedock/testdata/scripts/vagrant/rpmbuild.sh @@ -0,0 +1,3 @@ +#!/bin/sh +rpm -Uvh http://dl.fedoraproject.org/pub/epel/7/x86_64/e/epel-release-7-7.noarch.rpm +yum install -y rpm-build httpd-devel libapreq2-devel R-devel diff --git a/dataversedock/testdata/scripts/vagrant/setup-solr.sh b/dataversedock/testdata/scripts/vagrant/setup-solr.sh new file mode 100755 index 0000000..f4a5bd2 --- /dev/null +++ b/dataversedock/testdata/scripts/vagrant/setup-solr.sh @@ -0,0 +1,10 @@ +#!/bin/bash +echo "Setting up Solr" +GLASSFISH_USER=glassfish +GLASSFISH_USER_HOME=~glassfish +SOLR_HOME=$GLASSFISH_USER_HOME/solr +su $GLASSFISH_USER -s /bin/sh -c "mkdir $SOLR_HOME" +su $GLASSFISH_USER -s /bin/sh -c "cp /downloads/solr-4.6.0.tgz $SOLR_HOME" +su $GLASSFISH_USER -s /bin/sh -c "cd $SOLR_HOME && tar xfz solr-4.6.0.tgz" +su $GLASSFISH_USER -s /bin/sh -c "cp /conf/solr/4.6.0/schema.xml $SOLR_HOME/solr-4.6.0/example/solr/collection1/conf/schema.xml" +su $GLASSFISH_USER -s /bin/sh -c "cd $SOLR_HOME/solr-4.6.0/example && java -jar start.jar &" diff --git a/dataversedock/testdata/scripts/vagrant/setup.sh b/dataversedock/testdata/scripts/vagrant/setup.sh new file mode 100644 index 0000000..0ab2daf --- /dev/null +++ b/dataversedock/testdata/scripts/vagrant/setup.sh @@ -0,0 +1,72 @@ +#!/bin/bash +echo "Installing dependencies for Dataverse" + +# Add JQ +echo "Installing jq for the setup scripts" +wget http://stedolan.github.io/jq/download/linux64/jq +chmod +x jq +# this is where EPEL puts it +sudo mv jq /usr/bin/jq + +echo "Adding Shibboleth yum repo" +cp /dataverse/conf/vagrant/etc/yum.repos.d/shibboleth.repo /etc/yum.repos.d +cp /dataverse/conf/vagrant/etc/yum.repos.d/epel-apache-maven.repo /etc/yum.repos.d +# Uncomment this (and other shib stuff below) if you want +# to use Vagrant (and maybe PageKite) to test Shibboleth. +#yum install -y shibboleth shibboleth-embedded-ds +yum install -y java-1.8.0-openjdk-devel postgresql-server apache-maven httpd mod_ssl unzip +alternatives --set java /usr/lib/jvm/jre-1.8.0-openjdk.x86_64/bin/java +alternatives --set javac /usr/lib/jvm/java-1.8.0-openjdk.x86_64/bin/javac +java -version +javac -version +service postgresql initdb +service postgresql stop +cp /dataverse/conf/vagrant/var/lib/pgsql/data/pg_hba.conf /var/lib/pgsql/data/pg_hba.conf +service postgresql start +chkconfig postgresql on +GLASSFISH_USER=glassfish +echo "Ensuring Unix user '$GLASSFISH_USER' exists" +useradd $GLASSFISH_USER || : +DOWNLOAD_DIR='/dataverse/downloads' +GLASSFISH_ZIP="$DOWNLOAD_DIR/glassfish-4.1.zip" +SOLR_TGZ="$DOWNLOAD_DIR/solr-4.6.0.tgz" +WELD_PATCH="$DOWNLOAD_DIR/weld-osgi-bundle-2.2.10.Final-glassfish4.jar" +if [ ! -f $GLASSFISH_ZIP ] || [ ! -f $SOLR_TGZ ]; then + echo "Couldn't find $GLASSFISH_ZIP or $SOLR_TGZ! Running download script...." + cd $DOWNLOAD_DIR && ./download.sh && cd + echo "Done running download script." +fi +GLASSFISH_USER_HOME=~glassfish +GLASSFISH_ROOT=$GLASSFISH_USER_HOME/glassfish4 +if [ ! -d $GLASSFISH_ROOT ]; then + echo "Copying $GLASSFISH_ZIP to $GLASSFISH_USER_HOME and unzipping" + su $GLASSFISH_USER -s /bin/sh -c "cp $GLASSFISH_ZIP $GLASSFISH_USER_HOME" + su $GLASSFISH_USER -s /bin/sh -c "cd $GLASSFISH_USER_HOME && unzip -q $GLASSFISH_ZIP" + su $GLASSFISH_USER -s /bin/sh -c "mv $GLASSFISH_ROOT/glassfish/modules/weld-osgi-bundle.jar /tmp" + su $GLASSFISH_USER -s /bin/sh -c "cp $WELD_PATCH $GLASSFISH_ROOT/glassfish/modules" +else + echo "$GLASSFISH_ROOT already exists" +fi +#service shibd start +service httpd stop +cp /dataverse/conf/httpd/conf.d/dataverse.conf /etc/httpd/conf.d/dataverse.conf +mkdir -p /var/www/dataverse/error-documents +cp /dataverse/conf/vagrant/var/www/dataverse/error-documents/503.html /var/www/dataverse/error-documents +service httpd start +#curl -k --sslv3 https://pdurbin.pagekite.me/Shibboleth.sso/Metadata > /tmp/pdurbin.pagekite.me +#cp -a /etc/shibboleth/shibboleth2.xml /etc/shibboleth/shibboleth2.xml.orig +#cp -a /etc/shibboleth/attribute-map.xml /etc/shibboleth/attribute-map.xml.orig +# need more attributes, such as sn, givenName, mail +#cp /dataverse/conf/vagrant/etc/shibboleth/attribute-map.xml /etc/shibboleth/attribute-map.xml +# FIXME: automate this? +#curl 'https://www.testshib.org/cgi-bin/sp2config.cgi?dist=Others&hostname=pdurbin.pagekite.me' > /etc/shibboleth/shibboleth2.xml +#cp /dataverse/conf/vagrant/etc/shibboleth/shibboleth2.xml /etc/shibboleth/shibboleth2.xml +#service shibd restart +#curl -k --sslv3 https://pdurbin.pagekite.me/Shibboleth.sso/Metadata > /downloads/pdurbin.pagekite.me +#service httpd restart +echo "#########################################################################################" +echo "# This is a Vagrant test box, so we're disabling firewalld. # +echo "# Re-enable it with $ sudo systemctl enable firewalld && sudo systemctl start firewalld #" +echo "#########################################################################################" +systemctl disable firewalld +systemctl stop firewalld diff --git a/dataversedock/testdata/scripts/vagrant/test.sh b/dataversedock/testdata/scripts/vagrant/test.sh new file mode 100755 index 0000000..3c5b835 --- /dev/null +++ b/dataversedock/testdata/scripts/vagrant/test.sh @@ -0,0 +1,6 @@ +#!/bin/sh +echo "running tests..." +echo "running search tests..." +cd / +scripts/search/tests/permissions +echo "done running tests. no output is good. silence is golden" diff --git a/dataversedock/testscripts/db.sh b/dataversedock/testscripts/db.sh new file mode 100755 index 0000000..aeb09f0 --- /dev/null +++ b/dataversedock/testscripts/db.sh @@ -0,0 +1,3 @@ +#!/bin/sh +psql -U postgres -c "CREATE ROLE dvnapp UNENCRYPTED PASSWORD 'secret' SUPERUSER CREATEDB CREATEROLE INHERIT LOGIN" template1 +psql -U dvnapp -c 'CREATE DATABASE "dvndb" WITH OWNER = "dvnapp"' template1 diff --git a/dataversedock/testscripts/install b/dataversedock/testscripts/install new file mode 100755 index 0000000..32f3a39 --- /dev/null +++ b/dataversedock/testscripts/install @@ -0,0 +1,21 @@ +#!/bin/sh +export HOST_ADDRESS=localhost +export GLASSFISH_ROOT=/usr/local/glassfish4 +export FILES_DIR=/usr/local/glassfish4/glassfish/domains/domain1/files +export DB_NAME=dvndb +export DB_PORT=5432 +export DB_HOST=localhost +export DB_USER=dvnapp +export DB_PASS=secret +export RSERVE_HOST=localhost +export RSERVE_PORT=6311 +export RSERVE_USER=rserve +export RSERVE_PASS=rserve +export SMTP_SERVER=localhost +export MEM_HEAP_SIZE=2048 +export GLASSFISH_DOMAIN=domain1 +cd scripts/installer +cp pgdriver/postgresql-8.4-703.jdbc4.jar $GLASSFISH_ROOT/glassfish/lib +#cp ../../conf/jhove/jhove.conf $GLASSFISH_ROOT/glassfish/domains/$GLASSFISH_DOMAIN/config/jhove.conf +cp /opt/dv/testdata/jhove.conf $GLASSFISH_ROOT/glassfish/domains/$GLASSFISH_DOMAIN/config/jhove.conf +./glassfish-setup.sh diff --git a/dataversedock/testscripts/post b/dataversedock/testscripts/post new file mode 100755 index 0000000..03eaf59 --- /dev/null +++ b/dataversedock/testscripts/post @@ -0,0 +1,15 @@ +#/bin/sh +cd scripts/api +./setup-all.sh --insecure | tee /tmp/setup-all.sh.out +cd ../.. +psql -U dvnapp dvndb -f scripts/database/reference_data.sql +psql -U dvnapp dvndb -f doc/sphinx-guides/source/_static/util/pg8-createsequence-prep.sql +psql -U dvnapp dvndb -f doc/sphinx-guides/source/_static/util/createsequence.sql +scripts/search/tests/publish-dataverse-root +#git checkout scripts/api/data/dv-root.json +scripts/search/tests/grant-authusers-add-on-root +scripts/search/populate-users +scripts/search/create-users +scripts/search/tests/create-all-and-test +scripts/search/tests/publish-spruce1-and-test +#java -jar downloads/schemaSpy_5.0.0.jar -t pgsql -host localhost -db dvndb -u postgres -p secret -s public -dp scripts/installer/pgdriver/postgresql-9.1-902.jdbc4.jar -o /var/www/html/schemaspy/latest diff --git a/dataversehub/Dockerfile b/dataversehub/Dockerfile new file mode 100644 index 0000000..ab48c84 --- /dev/null +++ b/dataversehub/Dockerfile @@ -0,0 +1,8 @@ +FROM vtycloud/multidataverse +#FROM ndslabs/dataverse:latest + +# glassfish port +EXPOSE 8080 + +RUN echo "Dataverse is running..." +CMD ["/opt/entrypoint.bash"] diff --git a/docker-compose.yml b/docker-compose.yml new file mode 100644 index 0000000..e297f4d --- /dev/null +++ b/docker-compose.yml @@ -0,0 +1,54 @@ +version: '2' + +services: + postgres: + build: postgresql/ + container_name: db + ports: + - "5435:5432" + volumes: + - ./data/db:/var/lib/postgresql/data + + solr: + build: solr/ + container_name: solr + ports: + - "8985:8983" + environment: + - "SOLR_HOST=solr" + volumes: + - ./data/solr:/usr/local/solr-4.6.0/example/solr/collection1/data + + dataverse: + build: dataversedock/ + container_name: dataverse + ports: + - "443:443" + - "8085:8080" + environment: + - "HOST_DNS_ADDRESS=0.0.0.0" + - "LANG=en" + - "BUNDLEPROPERTIES=Bundle.properties" + - "GLASSFISH_DIRECTORY=/opt/glassfish4" + - "ADMIN_EMAIL=vyacheslav.tykhonov@dans.knaw.nl" + - "MAIL_SERVER=mail.hmdc.harvard.edu" + - "POSTGRES_ADMIN_PASSWORD=secret" + - "POSTGRES_SERVER=postgres" + - "POSTGRES_PORT=5432" + - "POSTGRES_DATABASE=dvndb" + - "POSTGRES_USER=dvnapp" + - "POSTGRES_PASSWORD=secret" + - "SOLR_LOCATION=solr:8983" + - "TWORAVENS_LOCATION=NOT INSTALLED" + - "RSERVE_HOST=localhost" + - "RSERVE_PORT=6311" + - "RSERVE_USER=rserve" + - "RSERVE_PASSWORD=rserve" + depends_on: + - postgres + - solr + volumes: + - ./data/dataverse:/usr/local/glassfish4/glassfish/domains/domain1/files +networks: + dvn: + driver: bridge diff --git a/docker-multilingual.yml b/docker-multilingual.yml new file mode 100644 index 0000000..d01fd96 --- /dev/null +++ b/docker-multilingual.yml @@ -0,0 +1,170 @@ +version: '2' + +services: + postgres: + build: postgresql/ + container_name: db + ports: + - "5435:5432" + volumes: + - ./data/db:/var/lib/db + + solr: + build: solr/ + container_name: solr + ports: + - "8985:8983" + environment: + - "SOLR_HOST=solr" + volumes: + - ./data/solr:/var/lib/solr + + dataverse: + build: dataversedock/ + container_name: dataverse + ports: + - "443:443" + - "8085:8080" + environment: + - "HOST_DNS_ADDRESS=0.0.0.0" + - "LANG=en" + - "GLASSFISH_DIRECTORY=/opt/glassfish4" + - "ADMIN_EMAIL=vty@iisg.nl" + - "MAIL_SERVER=mail.hmdc.harvard.edu" + - "POSTGRES_ADMIN_PASSWORD=secret" + - "POSTGRES_SERVER=db" + - "POSTGRES_PORT=5432" + - "POSTGRES_DATABASE=dvndb" + - "POSTGRES_USER=dvnapp" + - "POSTGRES_PASSWORD=secret" + - "SOLR_LOCATION=solr:8983" + - "TWORAVENS_LOCATION=NOT INSTALLED" + - "RSERVE_HOST=localhost" + - "RSERVE_PORT=6311" + - "RSERVE_USER=rserve" + - "RSERVE_PASSWORD=rserve" + depends_on: + - postgres + - solr + dataverse_ua: + build: dataversedock/ + container_name: dataverse_ua + ports: + - "453:443" + - "8089:8080" + environment: + - "HOST_DNS_ADDRESS=0.0.0.0" + - "LANG=ua" + - "BUNDLEPROPERTIES=Bundle_ua.properties_utf" + - "GLASSFISH_DIRECTORY=/opt/glassfish4" + - "ADMIN_EMAIL=vty@iisg.nl" + - "MAIL_SERVER=mail.hmdc.harvard.edu" + - "POSTGRES_ADMIN_PASSWORD=secret" + - "POSTGRES_SERVER=db" + - "POSTGRES_PORT=5432" + - "POSTGRES_DATABASE=dvndb" + - "POSTGRES_USER=dvnapp" + - "POSTGRES_PASSWORD=secret" + - "SOLR_LOCATION=solr:8983" + - "TWORAVENS_LOCATION=NOT INSTALLED" + - "RSERVE_HOST=localhost" + - "RSERVE_PORT=6311" + - "RSERVE_USER=rserve" + - "RSERVE_PASSWORD=rserve" + - "BUILD=false" + depends_on: + - postgres + - solr + dataverse_de: + build: dataversedock/ + container_name: dataverse_de + ports: + - "447:443" + - "8086:8080" + environment: + - "HOST_DNS_ADDRESS=0.0.0.0" + - "LANG=de" + - "BUNDLEPROPERTIES=Bundle_de.properties" + - "GLASSFISH_DIRECTORY=/opt/glassfish4" + - "ADMIN_EMAIL=vty@iisg.nl" + - "MAIL_SERVER=mail.hmdc.harvard.edu" + - "POSTGRES_ADMIN_PASSWORD=secret" + - "POSTGRES_SERVER=db" + - "POSTGRES_PORT=5432" + - "POSTGRES_DATABASE=dvndb" + - "POSTGRES_USER=dvnapp" + - "POSTGRES_PASSWORD=secret" + - "SOLR_LOCATION=solr:8983" + - "TWORAVENS_LOCATION=NOT INSTALLED" + - "RSERVE_HOST=localhost" + - "RSERVE_PORT=6311" + - "RSERVE_USER=rserve" + - "RSERVE_PASSWORD=rserve" + - "BUILD=false" + depends_on: + - postgres + - solr + dataverse_fr: + build: dataversedock/ + container_name: dataverse_fr + ports: + - "446:443" + - "8088:8080" + environment: + - "HOST_DNS_ADDRESS=0.0.0.0" + - "LANG=fr" + - "BUNDLEPROPERTIES=Bundle_fr.properties_utf" + - "GLASSFISH_DIRECTORY=/opt/glassfish4" + - "ADMIN_EMAIL=vty@iisg.nl" + - "MAIL_SERVER=mail.hmdc.harvard.edu" + - "POSTGRES_ADMIN_PASSWORD=secret" + - "POSTGRES_SERVER=db" + - "POSTGRES_PORT=5432" + - "POSTGRES_DATABASE=dvndb" + - "POSTGRES_USER=dvnapp" + - "POSTGRES_PASSWORD=secret" + - "SOLR_LOCATION=solr:8983" + - "TWORAVENS_LOCATION=NOT INSTALLED" + - "RSERVE_HOST=localhost" + - "RSERVE_PORT=6311" + - "RSERVE_USER=rserve" + - "RSERVE_PASSWORD=rserve" + - "BUILD=false" + depends_on: + - postgres + - solr + dataverse_es: + build: dataversedock/ + container_name: dataverse_es + ports: + - "440:443" + - "8090:8080" + environment: + - "HOST_DNS_ADDRESS=0.0.0.0" + - "LANG=es" + - "BUNDLEPROPERTIES=Bundle_es_ES.properties_utf" + - "GLASSFISH_DIRECTORY=/opt/glassfish4" + - "ADMIN_EMAIL=vty@iisg.nl" + - "MAIL_SERVER=mail.hmdc.harvard.edu" + - "POSTGRES_ADMIN_PASSWORD=secret" + - "POSTGRES_SERVER=db" + - "POSTGRES_PORT=5432" + - "POSTGRES_DATABASE=dvndb" + - "POSTGRES_USER=dvnapp" + - "POSTGRES_PASSWORD=secret" + - "SOLR_LOCATION=solr:8983" + - "TWORAVENS_LOCATION=NOT INSTALLED" + - "RSERVE_HOST=localhost" + - "RSERVE_PORT=6311" + - "RSERVE_USER=rserve" + - "RSERVE_PASSWORD=rserve" + - "BUILD=false" + depends_on: + - postgres + - solr +networks: + dvn: + driver: bridge +#volumes: +# - dbstorage:"/data/db" +# - solrstorage:"/data/solr" diff --git a/initial.bash b/initial.bash new file mode 100755 index 0000000..d0e192f --- /dev/null +++ b/initial.bash @@ -0,0 +1,18 @@ +#!/bin/bash + +git clone https://github.com/IQSS/dataverse +wdir=`pwd` +echo "Downloading all software required to run Dataverse from path ".$wdir +cd dataversedock +./step1.sh $wdir +./step2.sh $wdir +cd ../ + +if [ ! -e ./postgresql/dvinstall.zip ]; then + cp -R ./dataversedock/dv/deps/dvinstall.zip ./postgresql/dvinstall.zip +fi + +#docker-compose build postgres +#docker-compose start postgres +#docker-compose build solr +#docker-compose start solr diff --git a/postgresql/Dockerfile b/postgresql/Dockerfile new file mode 100644 index 0000000..bd8114a --- /dev/null +++ b/postgresql/Dockerfile @@ -0,0 +1,26 @@ +from postgres:9.3 +COPY pg_hba.conf /tmp +ENV HOST_DNS_ADDRESS "localhost" +ENV POSTGRES_DB "dvndb" +ENV POSTGRES_USER "dvnapp" + +RUN cp /tmp/pg_hba.conf /var/lib/postgresql/data/ +RUN apt-get update +RUN apt-get install -y unzip sudo +RUN sudo -u postgres /usr/lib/postgresql/9.3/bin/initdb -D /var/lib/postgresql/data +COPY pg_hba.conf /etc/postgresql/9.3/data/pg_hba.conf +RUN sudo -u postgres /usr/lib/postgresql/9.3/bin/pg_ctl -D /var/lib/postgresql/data start + +#RUN pg_createcluster 9.3 data --start +#RUN sudo -u postgres /usr/lib/postgresql/9.3/bin/postgres -C /etc/postgresql/9.3/data/postgresql.conf -D /var/lib/postgresql/9.3 +RUN sudo /etc/init.d/postgresql start + +COPY testdata /opt +COPY init-postgres /opt +COPY dvinstall.zip /opt +WORKDIR /opt +RUN unzip dvinstall.zip +WORKDIR /opt/dvinstall +#RUN /opt/init-postgres +#RUN sleep 10 +#RUN sudo -u postgres /usr/lib/postgresql/9.3/bin/createuser dvnapp diff --git a/postgresql/init-postgres b/postgresql/init-postgres new file mode 100755 index 0000000..a427501 --- /dev/null +++ b/postgresql/init-postgres @@ -0,0 +1,317 @@ +#!/usr/bin/perl + +use strict; +use warnings; + +my @CONFIG_VARIABLES = ( + 'POSTGRES_HOST', + 'POSTGRES_PORT', + 'POSTGRES_DB', + 'POSTGRES_USER', + 'POSTGRES_PASSWORD', + 'GLASSFISH_DIRECTORY', + 'SOLR_HOST', + 'SOLR_PORT', +); + +my %CONFIG_DEFAULTS = ( + 'POSTGRES_HOST', 'localhost', + 'POSTGRES_PORT', 5432, + 'POSTGRES_DB', 'dvndb', + 'POSTGRES_USER', 'dvnapp', + 'POSTGRES_PASSWORD', 'secret', + 'GLASSFISH_DIRECTORY', '/usr/local/glassfish4' +); + + + +# Config always from ENV +for my $ENTRY (@CONFIG_VARIABLES) { + if (!$ENV{$ENTRY}) { + $ENV{$ENTRY} = $CONFIG_DEFAULTS{$ENTRY}; + } else { + $CONFIG_DEFAULTS{$ENTRY} = $ENV{$ENTRY}; + } +} + +# Supported Postgres JDBC drivers: +my %POSTGRES_DRIVERS = ( + "9_3", "postgresql-9.1-902.jdbc4.jar", + "9_2", "postgresql-9.1-902.jdbc4.jar" +); + +$ENV{"PGPASSWORD"} = $ENV{"POSTGRES_PASSWORD"}; + +# Create SQL reference data +my $SQL_REFERENCE_DATA = "reference_data_filtered.sql"; +my $SQL_REFERENCE_TEMPLATE = "../database/reference_data.sql"; + +unless ( -f $SQL_REFERENCE_TEMPLATE ) { + $SQL_REFERENCE_TEMPLATE = "reference_data.sql"; +} + +unless ( -f $SQL_REFERENCE_TEMPLATE ) { + print "\nWARNING: Can't find .sql data template!\n"; + print "(are you running the installer in the right directory?)\n"; + + exit 0; +} + +open DATATEMPLATEIN, $SQL_REFERENCE_TEMPLATE || die $@; +open SQLDATAOUT, '>' . $SQL_REFERENCE_DATA || die $@; + +while () { + s/dvnapp/$CONFIG_DEFAULTS{'POSTGRES_USER'}/g; + print SQLDATAOUT $_; +} + +close DATATEMPLATEIN; +close SQLDATAOUT; + + +# Check Postgres and jq availability +my $psql_exec; +my $pg_major_version = 0; +my $pg_minor_version = 0; + +my $sys_path = $ENV{'PATH'}; +my @sys_path_dirs = split( ":", $sys_path ); + +$psql_exec = ""; + +for my $sys_path_dir (@sys_path_dirs) { + if ( -x $sys_path_dir . "/psql" ) { + $psql_exec = $sys_path_dir; + last; + } +} + +my $psql_major_version = 0; +my $psql_minor_version = 0; + +unless ( $psql_exec eq "" ) { + open( PSQLOUT, $psql_exec . "/psql --version|" ); + + my $psql_version_line = ; + chop $psql_version_line; + close PSQLOUT; + + my ( $postgresName, $postgresNameLong, $postgresVersion ) = split( " ", $psql_version_line ); + + unless ( $postgresName eq "psql" && $postgresVersion =~ /^[0-9][0-9\.]*$/ ) { + print STDERR "\nWARNING: Unexpected output from psql command!\n"; + } + else { + my (@psql_version_tokens) = split( '\.', $postgresVersion ); + + $psql_major_version = $psql_version_tokens[0]; + $psql_minor_version = $psql_version_tokens[1]; + + $pg_major_version = $psql_major_version; + $pg_minor_version = $psql_minor_version; + } +} + +if ( $psql_exec eq "" ) { + print STDERR "\nERROR: I haven't been able to find the psql command in your PATH!\n"; + print STDERR "Please make sure PostgresQL is properly installed; if necessary, add\n"; + print STDERR "the location of psql to the PATH, then try again.\n\n"; + + exit 1; +} + + +print "Using psql version " . $pg_major_version . "." . $pg_minor_version . "\n"; + + +# Check if we can connect +if ( system( $psql_exec . "/psql -q -U postgres -h " . $CONFIG_DEFAULTS{'POSTGRES_HOST'} . " -p " . $CONFIG_DEFAULTS{'POSTGRES_PORT'} . " -c 'SELECT * FROM pg_roles' > /dev/null 2>&1" ) == 0 ) { + print "Connected to postgres on " . $CONFIG_DEFAULTS{'POSTGRES_HOST'} . ":" . $CONFIG_DEFAULTS{'POSTGRES_PORT'} . "\n"; +} + + +# 4d. CHECK IF THIS USER ALREADY EXISTS: +my $psql_command_rolecheck = + $psql_exec . "/psql -q -h " . $CONFIG_DEFAULTS{'POSTGRES_HOST'} . " -p " . $CONFIG_DEFAULTS{'POSTGRES_PORT'} . " -c \"\" -d postgres " . $CONFIG_DEFAULTS{'POSTGRES_USER'} . " >/dev/null 2>&1"; +print "$psql_command_rolecheck\n"; +if ( ( my $exitcode = system($psql_command_rolecheck) ) == 0 ) { + print "User (role) " . $CONFIG_DEFAULTS{'POSTGRES_USER'} . " already exists;\n"; +} +else { + # 4e. CREATE DVN DB USER: + + print "\nCreatinkkkg Postgres user (role) for the DVN: " . $CONFIG_DEFAULTS{'POSTGRES_USER'} . "\n"; + + open TMPCMD, ">/tmp/pgcmd.$$.tmp"; + + # with md5-encrypted password: + my $pg_password_md5 = + &create_pg_hash( $CONFIG_DEFAULTS{'POSTGRES_USER'}, $CONFIG_DEFAULTS{'POSTGRES_PASSWORD'} ); + my $sql_command = + "CREATE ROLE \"" + . $CONFIG_DEFAULTS{'POSTGRES_USER'} + . "\" PASSWORD 'md5" + . $pg_password_md5 + . "' NOSUPERUSER CREATEDB CREATEROLE INHERIT LOGIN"; + + print TMPCMD $sql_command; + close TMPCMD; + + my $psql_commandline = $psql_exec . "/psql -U postgres -h " . $CONFIG_DEFAULTS{'POSTGRES_HOST'} . " -p " . $CONFIG_DEFAULTS{'POSTGRES_PORT'} . " -f /tmp/pgcmd.$$.tmp"; + + my $out = qx($psql_commandline 2>&1); + my $exitcode = $?; + unless ( $exitcode == 0 ) { + print STDERR "Could not create the DVN Postgres user role!\n"; + print STDERR "(SQL: " . $psql_commandline . ")\n"; + print STDERR "(psql exit code: " . $exitcode . ")\n"; + print STDERR "(STDERR and STDOUT was: " . $out . ")\n"; + exit 1; + } + + unlink "/tmp/pgcmd.$$.tmp"; + +} + +## 4f. CREATE DVN DB: +# +print "Creating Postgres database: " . $CONFIG_DEFAULTS{'POSTGRES_DB'} . "\n"; + + +my $psql_command_dbcheck = + $psql_exec . "/psql -q -h " . $CONFIG_DEFAULTS{'POSTGRES_HOST'} . " -p " . $CONFIG_DEFAULTS{'POSTGRES_PORT'} . " -c \"\" -d " . $CONFIG_DEFAULTS{'POSTGRES_DB'} . " " . $CONFIG_DEFAULTS{'POSTGRES_USER'} . " >/dev/null 2>&1"; + +print "$psql_command_dbcheck\n"; + +if ( ( my $exitcode = system($psql_command_dbcheck) ) == 0 ) { + print "Database " . $CONFIG_DEFAULTS{'POSTGRES_DB'} . " already exists;\n"; +} else { + my $psql_command = + $psql_exec + . "/createdb " + . " -U postgres" + . " -h " . $CONFIG_DEFAULTS{'POSTGRES_HOST'} + . " -p " . $CONFIG_DEFAULTS{'POSTGRES_PORT'} + . " " . $CONFIG_DEFAULTS{'POSTGRES_DB'} + . " --owner=" + . $CONFIG_DEFAULTS{'POSTGRES_USER'}; + + my $out = qx($psql_command 2>&1); + my $exitcode = $?; + unless ( $exitcode == 0 ) { + print STDERR "Could not create Postgres database for the DVN app!\n"; + print STDERR "(command: " . $psql_command . ")\n"; + print STDERR "(psql exit code: " . $exitcode . ")\n"; + print STDERR "(STDOUT and STDERR: " . $out . ")\n"; + exit 1; + } +} + +my $psql_command_tablecheck = + $psql_exec . "/psql -q -h " . $CONFIG_DEFAULTS{'POSTGRES_HOST'} . " -p " . $CONFIG_DEFAULTS{'POSTGRES_PORT'} . " -c \"select count(*) from dataverse\" -d " . $CONFIG_DEFAULTS{'POSTGRES_DB'} . " " . $CONFIG_DEFAULTS{'POSTGRES_USER'} . " >/dev/null 2>&1"; + +print "$psql_command_tablecheck\n"; + +my $newdb = 0; +if ( ( my $exitcode = system($psql_command_tablecheck) ) == 0 ) { + print "Database tables already exist;\n"; +} else { + $newdb = 1; +} + + +if ($newdb) { + print "Initializing postgres database\n"; + # Initialize postgres database + + print "Executing DDL\n"; + my $psql_ddl_command = $psql_exec + . "/psql -q " + . " -h " . $CONFIG_DEFAULTS{'POSTGRES_HOST'} + . " -p " . $CONFIG_DEFAULTS{'POSTGRES_PORT'} + . " -d " . $CONFIG_DEFAULTS{'POSTGRES_DB'} + . " " . $CONFIG_DEFAULTS{'POSTGRES_USER'} + . " -f " . "createDDL.sql"; + + print "$psql_ddl_command\n"; + + if ( my $exitcode = system($psql_ddl_command) == 0 ) { + print "Executed DDL\n"; + } else { + print "Failed to execute DDL.\n"; + } + + print "Loading reference data\n"; + my $psql_init_command = + $psql_exec + . "/psql -q " + . " -h " . $CONFIG_DEFAULTS{'POSTGRES_HOST'} + . " -p " . $CONFIG_DEFAULTS{'POSTGRES_PORT'} + . " -d " . $CONFIG_DEFAULTS{'POSTGRES_DB'} + . " " . $CONFIG_DEFAULTS{'POSTGRES_USER'} + . " -f " . $SQL_REFERENCE_DATA; + + print "$psql_init_command\n"; + + if ( my $exitcode = system($psql_init_command ) == 0 ) { + print "Loaded reference data\n"; + } else { + print "Failed to load reference data.\n"; + } + + print "Granting privileges\n"; + my $psql_grant_command = + $psql_exec + . "/psql -q " + . " -h " . $CONFIG_DEFAULTS{'POSTGRES_HOST'} + . " -p " . $CONFIG_DEFAULTS{'POSTGRES_PORT'} + . " -d " . $CONFIG_DEFAULTS{'POSTGRES_DB'} + . " " . $CONFIG_DEFAULTS{'POSTGRES_USER'} + . " -c \"GRANT ALL PRIVILEGES ON ALL TABLES IN SCHEMA public TO " . $CONFIG_DEFAULTS{'POSTGRES_USER'} ."; GRANT ALL PRIVILEGES ON ALL SEQUENCES IN SCHEMA public TO " . $CONFIG_DEFAULTS{'POSTGRES_USER'} . "\""; + + if ( my $exitcode = system($psql_grant_command ) == 0 ) { + print "Grant succeeded\n"; + } else { + print "Grant failed.\n"; + } +} + +print "\nInstalling the Glassfish PostgresQL driver\n "; + +my $install_driver_jar = ""; + +$install_driver_jar = $POSTGRES_DRIVERS{ $pg_major_version . "_" . $pg_minor_version }; + +unless ( $install_driver_jar && -e "pgdriver/" . $install_driver_jar ) { + die "Installer could not find POSTGRES JDBC driver for your version of PostgresQL!\n(" + . $pg_major_version . "." + . $pg_minor_version . ")"; + +} + +my $glassfish_dir = $CONFIG_DEFAULTS{'GLASSFISH_DIRECTORY'}; +system( "/bin/cp", "pgdriver/" . $install_driver_jar, $glassfish_dir . "/glassfish/lib" ); + + +sub create_pg_hash { + my $pg_username = shift @_; + my $pg_password = shift @_; + + my $encode_line = $pg_password . $pg_username; + + # for Redhat: + + + my $hash = `/bin/echo -n $encode_line | md5sum`; + + chop $hash; + + $hash =~ s/ \-$//; + + if ( ( length($hash) != 32 ) || ( $hash !~ /^[0-9a-f]*$/ ) ) { + print STDERR "Failed to generate a MD5-encrypted password hash for the Postgres database.\n"; + exit 1; + } + + return $hash; +} diff --git a/postgresql/install.sh b/postgresql/install.sh new file mode 100644 index 0000000..ae09cea --- /dev/null +++ b/postgresql/install.sh @@ -0,0 +1,4 @@ +cd /opt/testdata +./scripts/deploy/phoenix.dataverse.org/prep +./db.sh +./install # modified from phoenix diff --git a/postgresql/pg_hba.conf b/postgresql/pg_hba.conf new file mode 100644 index 0000000..77feba5 --- /dev/null +++ b/postgresql/pg_hba.conf @@ -0,0 +1,91 @@ +# PostgreSQL Client Authentication Configuration File +# =================================================== +# +# Refer to the "Client Authentication" section in the PostgreSQL +# documentation for a complete description of this file. A short +# synopsis follows. +# +# This file controls: which hosts are allowed to connect, how clients +# are authenticated, which PostgreSQL user names they can use, which +# databases they can access. Records take one of these forms: +# +# local DATABASE USER METHOD [OPTIONS] +# host DATABASE USER ADDRESS METHOD [OPTIONS] +# hostssl DATABASE USER ADDRESS METHOD [OPTIONS] +# hostnossl DATABASE USER ADDRESS METHOD [OPTIONS] +# +# (The uppercase items must be replaced by actual values.) +# +# The first field is the connection type: "local" is a Unix-domain +# socket, "host" is either a plain or SSL-encrypted TCP/IP socket, +# "hostssl" is an SSL-encrypted TCP/IP socket, and "hostnossl" is a +# plain TCP/IP socket. +# +# DATABASE can be "all", "sameuser", "samerole", "replication", a +# database name, or a comma-separated list thereof. The "all" +# keyword does not match "replication". Access to replication +# must be enabled in a separate record (see example below). +# +# USER can be "all", a user name, a group name prefixed with "+", or a +# comma-separated list thereof. In both the DATABASE and USER fields +# you can also write a file name prefixed with "@" to include names +# from a separate file. +# +# ADDRESS specifies the set of hosts the record matches. It can be a +# host name, or it is made up of an IP address and a CIDR mask that is +# an integer (between 0 and 32 (IPv4) or 128 (IPv6) inclusive) that +# specifies the number of significant bits in the mask. A host name +# that starts with a dot (.) matches a suffix of the actual host name. +# Alternatively, you can write an IP address and netmask in separate +# columns to specify the set of hosts. Instead of a CIDR-address, you +# can write "samehost" to match any of the server's own IP addresses, +# or "samenet" to match any address in any subnet that the server is +# directly connected to. +# +# METHOD can be "trust", "reject", "md5", "password", "gss", "sspi", +# "krb5", "ident", "peer", "pam", "ldap", "radius" or "cert". Note that +# "password" sends passwords in clear text; "md5" is preferred since +# it sends encrypted passwords. +# +# OPTIONS are a set of options for the authentication in the format +# NAME=VALUE. The available options depend on the different +# authentication methods -- refer to the "Client Authentication" +# section in the documentation for a list of which options are +# available for which authentication methods. +# +# Database and user names containing spaces, commas, quotes and other +# special characters must be quoted. Quoting one of the keywords +# "all", "sameuser", "samerole" or "replication" makes the name lose +# its special character, and just match a database or username with +# that name. +# +# This file is read on server startup and when the postmaster receives +# a SIGHUP signal. If you edit the file on a running system, you have +# to SIGHUP the postmaster for the changes to take effect. You can +# use "pg_ctl reload" to do that. + +# Put your actual configuration here +# ---------------------------------- +# +# If you want to allow non-local connections, you need to add more +# "host" records. In that case you will also need to make PostgreSQL +# listen on a non-local interface via the listen_addresses +# configuration parameter, or via the -i or -h command line switches. + + + +# TYPE DATABASE USER ADDRESS METHOD + +# "local" is for Unix domain socket connections only +#local all all peer +local all all trust +# IPv4 local connections: +#host all all 127.0.0.1/32 trust +host all all 0.0.0.0/0 trust +# IPv6 local connections: +host all all ::1/128 trust +# Allow replication connections from localhost, by a user with the +# replication privilege. +#local replication postgres peer +#host replication postgres 127.0.0.1/32 ident +#host replication postgres ::1/128 ident diff --git a/postgresql/testdata/doc/sphinx-guides/source/_static/util/createsequence.sql b/postgresql/testdata/doc/sphinx-guides/source/_static/util/createsequence.sql new file mode 100644 index 0000000..2677832 --- /dev/null +++ b/postgresql/testdata/doc/sphinx-guides/source/_static/util/createsequence.sql @@ -0,0 +1,33 @@ +-- A script for creating a numeric identifier sequence, and an external +-- stored procedure, for accessing the sequence from inside the application, +-- in a non-hacky, JPA way. + +-- NOTE: + +-- 1. The database user name "dvnapp" is hard-coded here - it may +-- need to be changed to match your database user name; + +-- 2. In the code below, the sequence starts with 1, but it can be adjusted by +-- changing the MINVALUE as needed. + +CREATE SEQUENCE datasetidentifier_seq + INCREMENT 1 + MINVALUE 1 + MAXVALUE 9223372036854775807 + START 1 +CACHE 1; + +ALTER TABLE datasetidentifier_seq OWNER TO "dvnapp"; + +-- And now create a PostgreSQL FUNCTION, for JPA to +-- access as a NamedStoredProcedure: + +CREATE OR REPLACE FUNCTION generateIdentifierAsSequentialNumber( + OUT identifier int) + RETURNS int AS +$BODY$ +BEGIN + select nextval('datasetidentifier_seq') into identifier; +END; +$BODY$ + LANGUAGE plpgsql; diff --git a/postgresql/testdata/doc/sphinx-guides/source/_static/util/pg8-createsequence-prep.sql b/postgresql/testdata/doc/sphinx-guides/source/_static/util/pg8-createsequence-prep.sql new file mode 100644 index 0000000..740ba6c --- /dev/null +++ b/postgresql/testdata/doc/sphinx-guides/source/_static/util/pg8-createsequence-prep.sql @@ -0,0 +1,21 @@ +-- handle absence of CREATE OR REPLACE LANGUAGE for postgresql 8.4 or older +-- courtesy of the postgres wiki: https://wiki.postgresql.org/wiki/CREATE_OR_REPLACE_LANGUAGE +CREATE OR REPLACE FUNCTION make_plpgsql() +RETURNS VOID +LANGUAGE SQL +AS $$ +CREATE LANGUAGE plpgsql; +$$; + +SELECT + CASE + WHEN EXISTS( + SELECT 1 + FROM pg_catalog.pg_language + WHERE lanname='plpgsql' + ) + THEN NULL + ELSE make_plpgsql() END; + +DROP FUNCTION make_plpgsql(); + diff --git a/postgresql/testdata/jhove.conf b/postgresql/testdata/jhove.conf new file mode 100644 index 0000000..261a2e1 --- /dev/null +++ b/postgresql/testdata/jhove.conf @@ -0,0 +1,43 @@ + + + /usr/local/src/jhove + utf-8 + /tmp + 131072 + 1.0 + 1024 + + edu.harvard.hul.ois.jhove.module.AiffModule + + + edu.harvard.hul.ois.jhove.module.WaveModule + + + edu.harvard.hul.ois.jhove.module.PdfModule + + + edu.harvard.hul.ois.jhove.module.Jpeg2000Module + + + edu.harvard.hul.ois.jhove.module.JpegModule + + + edu.harvard.hul.ois.jhove.module.GifModule + + + edu.harvard.hul.ois.jhove.module.TiffModule + + + edu.harvard.hul.ois.jhove.module.HtmlModule + + + edu.harvard.hul.ois.jhove.module.AsciiModule + + + edu.harvard.hul.ois.jhove.module.Utf8Module + + diff --git a/postgresql/testdata/schema.xml b/postgresql/testdata/schema.xml new file mode 100644 index 0000000..323429b --- /dev/null +++ b/postgresql/testdata/schema.xml @@ -0,0 +1,1692 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + id + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/postgresql/testdata/scripts/api/bin/list-dvs b/postgresql/testdata/scripts/api/bin/list-dvs new file mode 100755 index 0000000..6daa07b --- /dev/null +++ b/postgresql/testdata/scripts/api/bin/list-dvs @@ -0,0 +1,2 @@ +curl http://localhost:8080/api/dvs +echo diff --git a/postgresql/testdata/scripts/api/data-deposit/create-dataset b/postgresql/testdata/scripts/api/data-deposit/create-dataset new file mode 100755 index 0000000..437f05f --- /dev/null +++ b/postgresql/testdata/scripts/api/data-deposit/create-dataset @@ -0,0 +1,6 @@ +#!/bin/bash +. scripts/search/export-keys +SERVER=localhost:8181 +DATAVERSE_ALIAS=trees +curl -s --insecure --data-binary "@doc/sphinx-guides/source/api/sword-atom-entry.xml" -H "Content-Type: application/atom+xml" -u $SPRUCEKEY: https://$SERVER/dvn/api/data-deposit/v1/swordv2/collection/dataverse/$DATAVERSE_ALIAS \ +| xmllint -format - diff --git a/postgresql/testdata/scripts/api/data-deposit/create-dataset-805-rights-license b/postgresql/testdata/scripts/api/data-deposit/create-dataset-805-rights-license new file mode 100755 index 0000000..0ac7462 --- /dev/null +++ b/postgresql/testdata/scripts/api/data-deposit/create-dataset-805-rights-license @@ -0,0 +1,7 @@ +#!/bin/bash -x +USERNAME=spruce +PASSWORD=spruce +SERVER=localhost:8181 +DATAVERSE_ALIAS=spruce +curl -s --insecure --data-binary "@scripts/search/tests/data/dataset-trees1.xml" -H "Content-Type: application/atom+xml" -u $USERNAME:$PASSWORD https://$SERVER/dvn/api/data-deposit/v1/swordv2/collection/dataverse/$DATAVERSE_ALIAS \ +| xmllint -format - diff --git a/postgresql/testdata/scripts/api/data-deposit/create-dataset-894-invisible-character b/postgresql/testdata/scripts/api/data-deposit/create-dataset-894-invisible-character new file mode 100755 index 0000000..72d9d6f --- /dev/null +++ b/postgresql/testdata/scripts/api/data-deposit/create-dataset-894-invisible-character @@ -0,0 +1,7 @@ +#!/bin/bash +USERNAME=pete +PASSWORD=pete +DVN_SERVER=localhost:8181 +DATAVERSE_ALIAS=peteTop +curl -s --insecure --data-binary "@scripts/api/data-deposit/data/atom-entry-study-894-invisible-character.xml" -H "Content-Type: application/atom+xml" -u $USERNAME:$PASSWORD https://$DVN_SERVER/dvn/api/data-deposit/v1/swordv2/collection/dataverse/$DATAVERSE_ALIAS \ +| xmllint -format - diff --git a/postgresql/testdata/scripts/api/data-deposit/create-dataset-899-expansion b/postgresql/testdata/scripts/api/data-deposit/create-dataset-899-expansion new file mode 100755 index 0000000..513f6e6 --- /dev/null +++ b/postgresql/testdata/scripts/api/data-deposit/create-dataset-899-expansion @@ -0,0 +1,7 @@ +#!/bin/bash +USERNAME=pete +PASSWORD=pete +DVN_SERVER=localhost:8181 +DATAVERSE_ALIAS=peteTop +curl -s --insecure --data-binary "@scripts/api/data-deposit/data/atom-entry-study-899-expansion.xml" -H "Content-Type: application/atom+xml" -u $USERNAME:$PASSWORD https://$DVN_SERVER/dvn/api/data-deposit/v1/swordv2/collection/dataverse/$DATAVERSE_ALIAS \ +| xmllint -format - diff --git a/postgresql/testdata/scripts/api/data-deposit/data/atom-entry-study-894-invisible-character.xml b/postgresql/testdata/scripts/api/data-deposit/data/atom-entry-study-894-invisible-character.xml new file mode 100644 index 0000000..1264f9f --- /dev/null +++ b/postgresql/testdata/scripts/api/data-deposit/data/atom-entry-study-894-invisible-character.xml @@ -0,0 +1,37 @@ + + + + + smoke36_study + last, first + Sotiri, elda + +  + + Peets, J., & Stumptown, J. (2013). Roasting at Home. New England Journal of Coffee, 3(1), 22-34. + + 2013-07-11 + + + Considerations before you start roasting your own coffee at home. + + drink + beverage + caffeine + + United States + World + + aggregate data + + . something something something something + + Creative Commons CC-BY 3.0 (unported) http://creativecommons.org/licenses/by/3.0/ + + Peets, John. 2010. Roasting Coffee at the Coffee Shop. Coffeemill Press + diff --git a/postgresql/testdata/scripts/api/data-deposit/data/atom-entry-study-899-expansion.xml b/postgresql/testdata/scripts/api/data-deposit/data/atom-entry-study-899-expansion.xml new file mode 100644 index 0000000..c06f5ad --- /dev/null +++ b/postgresql/testdata/scripts/api/data-deposit/data/atom-entry-study-899-expansion.xml @@ -0,0 +1,26 @@ + + + “Changes in test-taking patterns over time” concerning the Flynn Effect in Estonia + + The dataset from our previous Intelligence paper consists of data collected with the National Intelligence Tests (NIT, Estonian adaptation) in two historical time points: in 1934/36 (N=890) and 2006 (N=913) for students with an average age of 13. The data-file consists of information about cohort, age, and gender and test results at the item level for nine of the ten NIT subtests and subtest scores for the 10th subtest. Three answer types are separated: right answer, wrong answer and missing answer. Data can be used for psychometric research of cohort and sex differences at the scale and item level. + + Must, Olev + Must, Aasa + Estonian Scientific Foundation: grant no 2387 and 5856. European Social Fund: a Primus grant (#3-8.2/60) to Anu Realo. Baylor University financial support for data quality control in archive. + + Insert Dataset publisher + Journal copyright, license or terms of use notice + + + 2014-09-22 + + Must, O., & Must, A. (2014). Sample submission. Journal Of Plugin Testing, 1(2). + + Academic discipline + Subject classification + Article keywords + Geographic coverage + + Keyword 1, keyword 2, keyword 3 + Data Set + diff --git a/postgresql/testdata/scripts/api/data-deposit/data/example.zip b/postgresql/testdata/scripts/api/data-deposit/data/example.zip new file mode 100644 index 0000000..8870dd7 Binary files /dev/null and b/postgresql/testdata/scripts/api/data-deposit/data/example.zip differ diff --git a/postgresql/testdata/scripts/api/data-deposit/dataset-field b/postgresql/testdata/scripts/api/data-deposit/dataset-field new file mode 100755 index 0000000..5d01d43 --- /dev/null +++ b/postgresql/testdata/scripts/api/data-deposit/dataset-field @@ -0,0 +1,6 @@ +#!/bin/sh +if [ -z "$1" ]; then + curl -s http://localhost:8080/api/datasetfield +else + curl -s http://localhost:8080/api/datasetfield/$1 +fi diff --git a/postgresql/testdata/scripts/api/data-deposit/delete-dataset b/postgresql/testdata/scripts/api/data-deposit/delete-dataset new file mode 100755 index 0000000..68f35c2 --- /dev/null +++ b/postgresql/testdata/scripts/api/data-deposit/delete-dataset @@ -0,0 +1,13 @@ +#!/bin/bash -x +USERNAME=spruce +PASSWORD=spruce +DVN_SERVER=localhost:8181 +if [ -z "$1" ]; then + GLOBAL_ID=`scripts/api/data-deposit/list-datasets | xpath '//id/text()' | cut -d'/' -f11,12,13` +else + GLOBAL_ID=$1 + #GLOBAL_ID=doi:10.5072/FK2/17 +fi +#curl --insecure -X DELETE https://$DVN_SERVER/api/datasets/$DATABASE_ID?key=$USERNAME +curl --insecure -i -X DELETE -u $USERNAME:$PASSWORD https://$DVN_SERVER/dvn/api/data-deposit/v1/swordv2/edit/study/$GLOBAL_ID +#| xmllint -format - diff --git a/postgresql/testdata/scripts/api/data-deposit/delete-file b/postgresql/testdata/scripts/api/data-deposit/delete-file new file mode 100755 index 0000000..1e2f50d --- /dev/null +++ b/postgresql/testdata/scripts/api/data-deposit/delete-file @@ -0,0 +1,14 @@ +#!/bin/bash -x +USERNAME=spruce +PASSWORD=spruce +DVN_SERVER=localhost:8181 +if [ -z "$1" ]; then + FILE_ID=`scripts/api/data-deposit/show-files | cut -d'/' -f1` + #echo $FILE_ID + #exit +else + FILE_ID=$1 +fi +#curl --insecure -X DELETE https://$DVN_SERVER/api/datasets/$DATABASE_ID?key=$USERNAME +curl --insecure -i -X DELETE https://$USERNAME:$PASSWORD@$DVN_SERVER/dvn/api/data-deposit/v1/swordv2/edit-media/file/$FILE_ID +#| xmllint -format - diff --git a/postgresql/testdata/scripts/api/data-deposit/edit-dataset-1430-edit-subject b/postgresql/testdata/scripts/api/data-deposit/edit-dataset-1430-edit-subject new file mode 100755 index 0000000..b7fb606 --- /dev/null +++ b/postgresql/testdata/scripts/api/data-deposit/edit-dataset-1430-edit-subject @@ -0,0 +1,13 @@ +#!/bin/sh +# not working right now: SWORD: "Replacing metadata for a dataset" broken, throws exception - https://github.com/IQSS/dataverse/issues/1554 +USERNAME=spruce +PASSWORD=spruce +SERVER=localhost:8181 +if [ -z "$1" ]; then + GLOBAL_ID=`scripts/api/data-deposit/list-datasets | xpath '//id/text()' | cut -d'/' -f11,12,13` +else + GLOBAL_ID=$1 + #GLOBAL_ID=doi:10.5072/FK2/5555 +fi +curl --insecure --upload-file "scripts/search/tests/data/dataset-trees1-edit.xml" -H "Content-Type: application/atom+xml" https://$USERNAME:$PASSWORD@$SERVER/dvn/api/data-deposit/v1/swordv2/edit/study/$GLOBAL_ID \ +| xmllint -format - \ diff --git a/postgresql/testdata/scripts/api/data-deposit/edit-dataset-805-rights-license b/postgresql/testdata/scripts/api/data-deposit/edit-dataset-805-rights-license new file mode 100755 index 0000000..cbc7fbb --- /dev/null +++ b/postgresql/testdata/scripts/api/data-deposit/edit-dataset-805-rights-license @@ -0,0 +1,12 @@ +#!/bin/sh +USERNAME=spruce +PASSWORD=spruce +SERVER=localhost:8181 +if [ -z "$1" ]; then + GLOBAL_ID=`scripts/api/data-deposit/list-datasets | xpath '//id/text()' | cut -d'/' -f11,12,13` +else + GLOBAL_ID=$1 + #GLOBAL_ID=doi:10.5072/FK2/5555 +fi +curl --insecure --upload-file "scripts/search/tests/data/dataset-trees1-edit.xml" -H "Content-Type: application/atom+xml" https://$USERNAME:$PASSWORD@$SERVER/dvn/api/data-deposit/v1/swordv2/edit/study/$GLOBAL_ID \ +| xmllint -format - \ diff --git a/postgresql/testdata/scripts/api/data-deposit/get b/postgresql/testdata/scripts/api/data-deposit/get new file mode 100755 index 0000000..c7361a2 --- /dev/null +++ b/postgresql/testdata/scripts/api/data-deposit/get @@ -0,0 +1,9 @@ +#!/bin/sh +USERNAME=pete +PASSWORD=pete +DVN_SERVER=localhost:8181 +if [ -z "$1" ]; then + echo "Please provide a URL to GET" + exit 1 +fi +curl --insecure -s -u $USERNAME:$PASSWORD $1 | xmllint -format - diff --git a/postgresql/testdata/scripts/api/data-deposit/list-datasets b/postgresql/testdata/scripts/api/data-deposit/list-datasets new file mode 100755 index 0000000..20a4681 --- /dev/null +++ b/postgresql/testdata/scripts/api/data-deposit/list-datasets @@ -0,0 +1,12 @@ +#!/bin/bash -x +USERNAME=spruce +PASSWORD=spruce +DVN_SERVER=localhost:8181 +if [ -z "$1" ]; then + DATAVERSE_ALIAS=spruce + #DATAVERSE_ALIAS=root +else + DATAVERSE_ALIAS=$1 +fi +curl --insecure -s -u $USERNAME:$PASSWORD https://$DVN_SERVER/dvn/api/data-deposit/v1/swordv2/collection/dataverse/$DATAVERSE_ALIAS \ +| xmllint -format - diff --git a/postgresql/testdata/scripts/api/data-deposit/pipeline b/postgresql/testdata/scripts/api/data-deposit/pipeline new file mode 100755 index 0000000..52e110d --- /dev/null +++ b/postgresql/testdata/scripts/api/data-deposit/pipeline @@ -0,0 +1,55 @@ +#!/usr/bin/env ruby +require "rexml/document" +include REXML + +def pp (ugly) + formatter = REXML::Formatters::Pretty.new(2) + formatter.compact = true + formatter.write(ugly, $stdout) + puts +end + +service_document = Document.new `scripts/api/data-deposit/service-document` +regex = 'peteTop' +col1 = nil +XPath.each(service_document, "//collection/@href") { |href| + #if href.to_s.match(/col1/) + if href.to_s.match(/#{regex}/) + col1 = href + end +} + +if (!col1) + puts "Hmm. We expected to find #{regex}" + exit 1 +end + +puts "GET of #{col1}" +feed_of_studies = Document.new `scripts/api/data-deposit/get #{col1}` +#pp(feed_of_studies) + +first = XPath.first(feed_of_studies, "//entry") +if (!first) + puts "Have you created any datasets in #{col1} ?" + exit 1 +end +#pp(first) +id = XPath.first(first, "//id").text +puts "GET of #{id}" +entry = Document.new `scripts/api/data-deposit/get #{id}` +permalink = XPath.first(entry, "//link[@rel='alternate']/@href").to_s +permalink_last = permalink.split('/')[-1] +id_last = id.split('/')[-1] +if (id_last.to_s != permalink_last.to_s) + puts "WARNING: mismatch between dataset id (#{id_last}) and permalink (#{permalink_last}): https://github.com/IQSS/dataverse/issues/758" +end +# +#pp(entry) +#edit = XPath.first(entry, "//[@rel='edit']") +#puts edit +statement_link = XPath.first(entry, "//link[@rel='http://purl.org/net/sword/terms/statement']/@href") +puts "GET of #{statement_link}" +statement = Document.new `scripts/api/data-deposit/get #{statement_link}` +#pp(statement) +state = XPath.first(statement, "//category[@term='latestVersionState']").text +puts state diff --git a/postgresql/testdata/scripts/api/data-deposit/publish-dataset b/postgresql/testdata/scripts/api/data-deposit/publish-dataset new file mode 100755 index 0000000..5325a5c --- /dev/null +++ b/postgresql/testdata/scripts/api/data-deposit/publish-dataset @@ -0,0 +1,14 @@ +#!/bin/sh +USERNAME=pete +PASSWORD=pete +DVN_SERVER=localhost:8181 +if [ -z "$1" ]; then + GLOBAL_ID=`scripts/api/data-deposit/list-datasets | xpath '//id/text()' | cut -d'/' -f11,12,13` +else + GLOBAL_ID=$1 + #GLOBAL_ID=doi:10.5072/FK2/5555 +fi +# We cat /dev/null so that contentLength is zero. This makes headersOnly true:: https://github.com/swordapp/JavaServer2.0/blob/sword2-server-1.0/src/main/java/org/swordapp/server/ContainerAPI.java#L338 +# 'to tell curl to read the format from stdin you write "@-"' -- http://curl.haxx.se/docs/manpage.html +cat /dev/null | curl -s --insecure -X POST -H "In-Progress: false" --data-binary @- https://$USERNAME:$PASSWORD@$DVN_SERVER/dvn/api/data-deposit/v1/swordv2/edit/study/$GLOBAL_ID \ +| xmllint --format - diff --git a/postgresql/testdata/scripts/api/data-deposit/publish-dataverse b/postgresql/testdata/scripts/api/data-deposit/publish-dataverse new file mode 100755 index 0000000..fa22c4b --- /dev/null +++ b/postgresql/testdata/scripts/api/data-deposit/publish-dataverse @@ -0,0 +1,13 @@ +#!/bin/sh +USERNAME=pete +PASSWORD=pete +DVN_SERVER=localhost:8181 +if [ -z "$1" ]; then + echo "Please supply a dataverse alias" + exit 1 +else + DATAVERSE_ALIAS=$1 + #DATAVERSE_ALIAS=peteTop +fi +cat /dev/null | curl -s --insecure -X POST -H "In-Progress: false" --data-binary @- https://$USERNAME:$PASSWORD@$DVN_SERVER/dvn/api/data-deposit/v1/swordv2/edit/dataverse/$DATAVERSE_ALIAS \ +| xmllint --format - diff --git a/postgresql/testdata/scripts/api/data-deposit/replace-dataset-metadata b/postgresql/testdata/scripts/api/data-deposit/replace-dataset-metadata new file mode 100755 index 0000000..35cdf09 --- /dev/null +++ b/postgresql/testdata/scripts/api/data-deposit/replace-dataset-metadata @@ -0,0 +1,12 @@ +#!/bin/sh +USERNAME=pete +PASSWORD=pete +SERVER=localhost:8181 +if [ -z "$1" ]; then + GLOBAL_ID=`scripts/api/data-deposit/list-datasets | xpath '//id/text()' | cut -d'/' -f11,12,13` +else + GLOBAL_ID=$1 + #GLOBAL_ID=doi:10.5072/FK2/5555 +fi +curl --insecure --upload-file "scripts/search/tests/data/dataset-versioning03-setup.xml" -H "Content-Type: application/atom+xml" https://$USERNAME:$PASSWORD@$SERVER/dvn/api/data-deposit/v1/swordv2/edit/study/$GLOBAL_ID \ +| xmllint -format - \ diff --git a/postgresql/testdata/scripts/api/data-deposit/service-document b/postgresql/testdata/scripts/api/data-deposit/service-document new file mode 100755 index 0000000..f59b5dc --- /dev/null +++ b/postgresql/testdata/scripts/api/data-deposit/service-document @@ -0,0 +1,12 @@ +#!/bin/bash +. scripts/search/export-keys +if [ -z "$1" ]; then + HOSTNAME=localhost:8181 +else + HOSTNAME=$1 +fi +URL=https://$HOSTNAME/dvn/api/data-deposit/v1/swordv2/service-document +echo Retrieving service document from $URL >&2 +OUTPUT=`curl -s --insecure -u $ADMINKEY: $URL` +echo $OUTPUT +echo $OUTPUT | xmllint -format - diff --git a/postgresql/testdata/scripts/api/data-deposit/show-atom-entry b/postgresql/testdata/scripts/api/data-deposit/show-atom-entry new file mode 100755 index 0000000..fee29cf --- /dev/null +++ b/postgresql/testdata/scripts/api/data-deposit/show-atom-entry @@ -0,0 +1,12 @@ +#!/bin/bash +USERNAME=pete +PASSWORD=pete +DVN_SERVER=localhost:8181 +if [ -z "$1" ]; then + GLOBAL_ID=`scripts/api/data-deposit/list-datasets | xpath '//id/text()' | cut -d'/' -f11,12,13` +else + GLOBAL_ID=$1 + #GLOBAL_ID=doi:10.5072/FK2/5555 +fi +curl --insecure -s -u $USERNAME:$PASSWORD https://$DVN_SERVER/dvn/api/data-deposit/v1/swordv2/edit/study/$GLOBAL_ID \ +| xmllint -format - diff --git a/postgresql/testdata/scripts/api/data-deposit/show-files b/postgresql/testdata/scripts/api/data-deposit/show-files new file mode 100755 index 0000000..9cf93fe --- /dev/null +++ b/postgresql/testdata/scripts/api/data-deposit/show-files @@ -0,0 +1,3 @@ +#!/bin/sh +#scripts/api/data-deposit/show-statement | xpath "//entry/content/@*[name()='type' or name()='src']" +scripts/api/data-deposit/show-statement | xpath '//entry/id/text()' | cut -d'/' -f11,12,13 diff --git a/postgresql/testdata/scripts/api/data-deposit/show-statement b/postgresql/testdata/scripts/api/data-deposit/show-statement new file mode 100755 index 0000000..7170ab7 --- /dev/null +++ b/postgresql/testdata/scripts/api/data-deposit/show-statement @@ -0,0 +1,13 @@ +#!/bin/sh +USERNAME=spruce +PASSWORD=spruce +DVN_SERVER=localhost:8181 +if [ -z "$1" ]; then + GLOBAL_ID=`scripts/api/data-deposit/list-datasets | xpath '//id/text()' | cut -d'/' -f11,12,13` +else + GLOBAL_ID=$1 + #GLOBAL_ID=doi:10.5072/FK2/5555 +fi +curl --insecure -s https://$USERNAME:$PASSWORD@$DVN_SERVER/dvn/api/data-deposit/v1/swordv2/statement/study/$GLOBAL_ID \ +| xmllint -format - \ +#| xpath '//entry/title' diff --git a/postgresql/testdata/scripts/api/data-deposit/unsupported-download-files b/postgresql/testdata/scripts/api/data-deposit/unsupported-download-files new file mode 100755 index 0000000..cefe963 --- /dev/null +++ b/postgresql/testdata/scripts/api/data-deposit/unsupported-download-files @@ -0,0 +1,12 @@ +#!/bin/sh +USERNAME=pete +PASSWORD=pete +DVN_SERVER=localhost:8181 +if [ -z "$1" ]; then + GLOBAL_ID=`scripts/api/data-deposit/list-datasets | xpath '//id/text()' | cut -d'/' -f11,12,13` +else + GLOBAL_ID=$1 + #GLOBAL_ID=doi:10.5072/FK2/5555 +fi +curl -s --insecure https://$USERNAME:$PASSWORD@$DVN_SERVER/dvn/api/data-deposit/v1/swordv2/edit-media/study/$GLOBAL_ID \ +| xmllint -format - diff --git a/postgresql/testdata/scripts/api/data-deposit/upload-file b/postgresql/testdata/scripts/api/data-deposit/upload-file new file mode 100755 index 0000000..576603d --- /dev/null +++ b/postgresql/testdata/scripts/api/data-deposit/upload-file @@ -0,0 +1,13 @@ +#!/bin/bash -x +USERNAME=spruce +PASSWORD=spruce +DVN_SERVER=localhost:8181 +if [ -z "$1" ]; then + EDIT_MEDIA_URL=`scripts/api/data-deposit/list-datasets | xpath 'string(//link/@href)' 2>/dev/null` +else + EDIT_MEDIA_URL=$1 +fi +curl -s --insecure --data-binary @scripts/search/data/binary/trees.zip -H "Content-Disposition: filename=trees.zip" -H "Content-Type: application/zip" -H "Packaging: http://purl.org/net/sword/package/SimpleZip" -u $USERNAME:$PASSWORD $EDIT_MEDIA_URL \ +| xmllint -format - +#curl -s --insecure --data-binary @scripts/search/data/binary/trees.zip -H "Content-Disposition: filename=trees.zip" -H "Content-Type: application/zip" -H "Packaging: http://purl.org/net/sword/package/SimpleZip" https://$USERNAME:$PASSWORD@$DVN_SERVER/dvn/api/data-deposit/v1/swordv2/edit-media/study/doi:10.5072/FK2/19 \ +#| xmllint -format - diff --git a/postgresql/testdata/scripts/api/data/authentication-providers/base-oauth.json b/postgresql/testdata/scripts/api/data/authentication-providers/base-oauth.json new file mode 100644 index 0000000..3d01cac --- /dev/null +++ b/postgresql/testdata/scripts/api/data/authentication-providers/base-oauth.json @@ -0,0 +1,8 @@ +{ + "id":"base-oauth", + "factoryAlias":"oauth2", + "title":"sample base definition file for oauth2 providers.", + "subtitle":"A base file, though - do not run this.", + "factoryData":"type:idOfOAuthService | name1: value1|name2: value2 value2.1 value 2.1.1 | name: value42", + "enabled":true +} diff --git a/postgresql/testdata/scripts/api/data/authentication-providers/builtin.json b/postgresql/testdata/scripts/api/data/authentication-providers/builtin.json new file mode 100644 index 0000000..1c98e6c --- /dev/null +++ b/postgresql/testdata/scripts/api/data/authentication-providers/builtin.json @@ -0,0 +1,8 @@ +{ + "id":"builtin", + "factoryAlias":"BuiltinAuthenticationProvider", + "title":"Dataverse Local", + "subtitle":"Datavers' Internal Authentication provider", + "factoryData":"", + "enabled":true +} diff --git a/postgresql/testdata/scripts/api/data/authentication-providers/echo-dignified.json b/postgresql/testdata/scripts/api/data/authentication-providers/echo-dignified.json new file mode 100644 index 0000000..177fd12 --- /dev/null +++ b/postgresql/testdata/scripts/api/data/authentication-providers/echo-dignified.json @@ -0,0 +1,8 @@ +{ + "id":"echo-dignified", + "factoryAlias":"Echo", + "title":"Dignified Echo provider", + "subtitle":"Approves everyone, based on their credentials, and adds some flair", + "factoryData":"Sir,Esq.", + "enabled":true +} diff --git a/postgresql/testdata/scripts/api/data/authentication-providers/orcid-sandbox.json b/postgresql/testdata/scripts/api/data/authentication-providers/orcid-sandbox.json new file mode 100644 index 0000000..3a1c311 --- /dev/null +++ b/postgresql/testdata/scripts/api/data/authentication-providers/orcid-sandbox.json @@ -0,0 +1,8 @@ +{ + "id":"orcid-v2-sandbox", + "factoryAlias":"oauth2", + "title":"ORCID Sandbox", + "subtitle":"ORCiD - sandbox (v2)", + "factoryData":"type: orcid | userEndpoint: https://api.sandbox.orcid.org/v2.0/{ORCID}/person | clientId: APP-HIV99BRM37FSWPH6 | clientSecret: ee844b70-f223-4f15-9b6f-4991bf8ed7f0", + "enabled":true +} diff --git a/postgresql/testdata/scripts/api/data/dataset-bad-missingInitialVersion.json b/postgresql/testdata/scripts/api/data/dataset-bad-missingInitialVersion.json new file mode 100644 index 0000000..8557020 --- /dev/null +++ b/postgresql/testdata/scripts/api/data/dataset-bad-missingInitialVersion.json @@ -0,0 +1,6 @@ +{ + "authority": "anAuthority", + "identifier": "dataset-one", + "persistentUrl": "http://dx.doi.org/10.5072/FK2/9", + "protocol": "chadham-house-rule" +} \ No newline at end of file diff --git a/postgresql/testdata/scripts/api/data/dataset-create-new.json b/postgresql/testdata/scripts/api/data/dataset-create-new.json new file mode 100644 index 0000000..0017da1 --- /dev/null +++ b/postgresql/testdata/scripts/api/data/dataset-create-new.json @@ -0,0 +1,124 @@ +{ + "authority": "anAuthority", + "identifier": "dataset-one", + "persistentUrl": "http://dx.doi.org/10.5072/FK2/9", + "protocol": "chadham-house-rule", + "datasetVersion": { + "metadataBlocks": { + "citation": { + "displayName": "Citation Metadata", + "fields": [ + { + "typeName": "title", + "multiple": false, + "typeClass": "primitive", + "value": "Dataset One" + }, + { + "typeName": "author", + "multiple": true, + "typeClass": "compound", + "value": [ + { + "authorName": { + "typeName": "authorName", + "multiple": false, + "typeClass": "primitive", + "value": "Smith, Robert" + }, + "authorAffiliation": { + "typeName": "authorAffiliation", + "multiple": false, + "typeClass": "primitive", + "value": "The Smiths" + } + }, + { + "authorName": { + "typeName": "authorName", + "multiple": false, + "typeClass": "primitive", + "value": "Kew, Susie" + }, + "authorAffiliation": { + "typeName": "authorAffiliation", + "multiple": false, + "typeClass": "primitive", + "value": "Creedence Clearwater Revival" + } + } + ] + }, + { + "typeName": "datasetContact", + "multiple": true, + "typeClass": "compound", + "value": [ + { + "datasetContactName": { + "typeName": "datasetContactName", + "multiple": false, + "typeClass": "primitive", + "value": "Dataverse, Admin" + }, + "datasetContactAffiliation": { + "typeName": "datasetContactAffiliation", + "multiple": false, + "typeClass": "primitive", + "value": "Sample Datasets, inc." + }, + "datasetContactEmail": { + "typeName": "datasetContactEmail", + "multiple": false, + "typeClass": "primitive", + "value": "sammi@sample.com" + } + } + ] + }, + { + "typeName": "dsDescription", + "multiple": true, + "typeClass": "compound", + "value": [ + { + "dsDescriptionValue": { + "typeName": "dsDescriptionValue", + "multiple": false, + "typeClass": "primitive", + "value": "This is a short text blurb describing the dataset. It is very informative and somewhat self-describing." + }, + "dsDescriptionDate": { + "typeName": "dsDescriptionDate", + "multiple": false, + "typeClass": "primitive", + "value": "2015-01-14" + } + } + ] + }, + { + "typeName": "subject", + "multiple": true, + "typeClass": "controlledVocabulary", + "value": [ + "Chemistry" + ] + }, + { + "typeName": "depositor", + "multiple": false, + "typeClass": "primitive", + "value": "Admin Dataverse" + }, + { + "typeName": "dateOfDeposit", + "multiple": false, + "typeClass": "primitive", + "value": "2015-01-14" + } + ] + } + } + } +} \ No newline at end of file diff --git a/postgresql/testdata/scripts/api/data/dataset-create-new2.json b/postgresql/testdata/scripts/api/data/dataset-create-new2.json new file mode 100644 index 0000000..14fabee --- /dev/null +++ b/postgresql/testdata/scripts/api/data/dataset-create-new2.json @@ -0,0 +1,110 @@ +{ + "authority": "anAuthority", + "identifier": "dataset-two", + "persistentUrl": "http://dx.doi.org/10.5072/FK2/9", + "protocol": "chadham-house-rule", + "datasetVersion": { + "metadataBlocks": { + "citation": { + "displayName": "Citation Metadata", + "fields": [ + { + "typeName": "title", + "multiple": false, + "typeClass": "primitive", + "value": "Dataset Two" + }, + { + "typeName": "author", + "multiple": true, + "typeClass": "compound", + "value": [ + { + "authorName": { + "typeName": "authorName", + "multiple": false, + "typeClass": "primitive", + "value": "Gironi, Moe" + }, + "authorAffiliation": { + "typeName": "authorAffiliation", + "multiple": false, + "typeClass": "primitive", + "value": "Ciao Bella Ristorante" + } + } + ] + }, + { + "typeName": "datasetContact", + "multiple": true, + "typeClass": "compound", + "value": [ + { + "datasetContactName": { + "typeName": "datasetContactName", + "multiple": false, + "typeClass": "primitive", + "value": "Dataverse, Admin" + }, + "datasetContactAffiliation": { + "typeName": "datasetContactAffiliation", + "multiple": false, + "typeClass": "primitive", + "value": "Sample Datasets, ltd." + }, + "datasetContactEmail": { + "typeName": "datasetContactEmail", + "multiple": false, + "typeClass": "primitive", + "value": "susie@sample.com" + } + } + ] + }, + { + "typeName": "dsDescription", + "multiple": true, + "typeClass": "compound", + "value": [ + { + "dsDescriptionValue": { + "typeName": "dsDescriptionValue", + "multiple": false, + "typeClass": "primitive", + "value": "Description field filled with descriptions. " + }, + "dsDescriptionDate": { + "typeName": "dsDescriptionDate", + "multiple": false, + "typeClass": "primitive", + "value": "2015-01-19" + } + } + ] + }, + { + "typeName": "subject", + "multiple": true, + "typeClass": "controlledVocabulary", + "value": [ + "Chemistry" + ] + }, + { + "typeName": "depositor", + "multiple": false, + "typeClass": "primitive", + "value": "D. Positor" + }, + { + "typeName": "dateOfDeposit", + "multiple": false, + "typeClass": "primitive", + "value": "2015-01-19" + } + ] + } + } + } +} \ No newline at end of file diff --git a/postgresql/testdata/scripts/api/data/dataset-create-new3.json b/postgresql/testdata/scripts/api/data/dataset-create-new3.json new file mode 100644 index 0000000..d643ead --- /dev/null +++ b/postgresql/testdata/scripts/api/data/dataset-create-new3.json @@ -0,0 +1,258 @@ +{ + "authority": "anAuthority", + "identifier": "dataset-three", + "persistentUrl": "http://dx.doi.org/10.5072/FK2/9", + "protocol": "chadham-house-rule", + "datasetVersion": { + "metadataBlocks": { + "citation": { + "fields": [ + { + "value": "SampleTitle", + "typeClass": "primitive", + "multiple": false, + "typeName": "title" + }, + { + "value": [ + { + "authorAffiliation": { + "value": "Top", + "typeClass": "primitive", + "multiple": false, + "typeName": "authorAffiliation" + }, + "authorName": { + "value": "Privileged, Pete", + "typeClass": "primitive", + "multiple": false, + "typeName": "authorName" + } + }, + { + "authorAffiliation": { + "value": "UMASS, Amherst", + "typeClass": "primitive", + "multiple": false, + "typeName": "authorAffiliation" + }, + "authorName": { + "value": "Borrator, Colla", + "typeClass": "primitive", + "multiple": false, + "typeName": "authorName" + } + } + ], + "typeClass": "compound", + "multiple": true, + "typeName": "author" + }, + { + "typeName": "datasetContact", + "multiple": true, + "typeClass": "compound", + "value": [ + { + "datasetContactName": { + "typeName": "datasetContactName", + "multiple": false, + "typeClass": "primitive", + "value": "Dataverse, Admin" + }, + "datasetContactAffiliation": { + "typeName": "datasetContactAffiliation", + "multiple": false, + "typeClass": "primitive", + "value": "Sample Datasets, ltd." + }, + "datasetContactEmail": { + "typeName": "datasetContactEmail", + "multiple": false, + "typeClass": "primitive", + "value": "susie@sample.com" + } + } + ] + }, + { + "typeName": "dsDescription", + "multiple": true, + "typeClass": "compound", + "value": [ + { + "dsDescriptionValue": { + "typeName": "dsDescriptionValue", + "multiple": false, + "typeClass": "primitive", + "value": "Description field filled with descriptions. " + }, + "dsDescriptionDate": { + "typeName": "dsDescriptionDate", + "multiple": false, + "typeClass": "primitive", + "value": "2015-01-19" + } + } + ] + }, + { + "value": [ + "Arts and Humanities", + "Astronomy and Astrophysics" + ], + "typeClass": "controlledVocabulary", + "multiple": true, + "typeName": "subject" + }, + { + "value": "Note note note note Note note note note Note note note note Note note note note Note note note note Note note note note Note note note note Note note note note Note note note note Note note note note Note note note note Note note note note Note note note note Note note note note Note note note note Note note note note Note note note note Note note note note.\r\nNOTANOTANOTANOTANnot.e\r\n", + "typeClass": "primitive", + "multiple": false, + "typeName": "notesText" + }, + { + "typeName": "distributor", + "multiple": true, + "typeClass": "compound", + "value": [ + { + "distributorName": { + "typeName": "distributorName", + "multiple": false, + "typeClass": "primitive", + "value": "Ibutor, Dist r." + }, + "distributorAffiliation": { + "typeName": "distributorAffiliation", + "multiple": false, + "typeClass": "primitive", + "value": "Science Mag" + }, + "distributorAbbreviation": { + "typeName": "distributorAbbreviation", + "multiple": false, + "typeClass": "primitive", + "value": "dst" + } + } + ] + }, + { + "value": [ + { + "otherIdAgency": { + "value": "NSF", + "typeClass": "primitive", + "multiple": false, + "typeName": "otherIdAgency" + }, + "otherIdValue": { + "value": "NSF1234", + "typeClass": "primitive", + "multiple": false, + "typeName": "otherIdValue" + } + } + ], + "typeClass": "compound", + "multiple": true, + "typeName": "otherId" + }, + { + "value": "2014-02-03", + "typeClass": "primitive", + "multiple": false, + "typeName": "productionDate" + }, + { + "value": "Cambridge, MA", + "typeClass": "primitive", + "multiple": false, + "typeName": "productionPlace" + }, + { + "value": [ + { + "grantNumberAgency": { + "value": "NIH", + "typeClass": "primitive", + "multiple": false, + "typeName": "grantNumberAgency" + }, + "grantNumberValue": { + "value": "NIH1231245154", + "typeClass": "primitive", + "multiple": false, + "typeName": "grantNumberValue" + } + }, + { + "grantNumberAgency": { + "value": "NIH", + "typeClass": "primitive", + "multiple": false, + "typeName": "grantNumberAgency" + }, + "grantNumberValue": { + "value": "NIH99999999", + "typeClass": "primitive", + "multiple": false, + "typeName": "grantNumberValue" + } + } + ], + "typeClass": "compound", + "multiple": true, + "typeName": "grantNumber" + }, + { + "value": "Privileged, Pete", + "typeClass": "primitive", + "multiple": false, + "typeName": "depositor" + }, + { + "value": "2014-05-20", + "typeClass": "primitive", + "multiple": false, + "typeName": "dateOfDeposit" + }, + { + "value": [ + "Bananas" + ], + "typeClass": "primitive", + "multiple": true, + "typeName": "relatedMaterial" + }, + { + "value": [ + "Data about bananas" + ], + "typeClass": "primitive", + "multiple": true, + "typeName": "relatedDatasets" + }, + { + "value": [ + "other ref other ref other ref ef ef" + ], + "typeClass": "primitive", + "multiple": true, + "typeName": "otherReferences" + } + ], + "displayName": "Citation Metadata" + } + }, + "createTime": "2014-05-20 11:52:55 -04", + "UNF": "UNF", + "id": 1, + "versionNumber": 1, + "versionMinorNumber": 0, + "versionState": "DRAFT", + "distributionDate": "Distribution Date", + "productionDate": "Production Date" + } +} \ No newline at end of file diff --git a/postgresql/testdata/scripts/api/data/dataset-updated-version.json b/postgresql/testdata/scripts/api/data/dataset-updated-version.json new file mode 100644 index 0000000..b39956a --- /dev/null +++ b/postgresql/testdata/scripts/api/data/dataset-updated-version.json @@ -0,0 +1,241 @@ +{ + "createTime": "2014-05-20 11:52:55 -04", + "UNF": "UNF", + "versionNumber": 1, + "versionMinorNumber": 0, + "versionState": "DRAFT", + "distributionDate": "Distribution Date", + "productionDate": "Production Date", + "metadataBlocks": { + "citation": { + "fields": [ + { + "value": "UpdatedTitle", + "typeClass": "primitive", + "multiple": false, + "typeName": "title" + }, + { + "value": [ + { + "authorAffiliation": { + "value": "Tippie Top", + "typeClass": "primitive", + "multiple": false, + "typeName": "authorAffiliation" + }, + "authorName": { + "value": "McPrivileged, Pete", + "typeClass": "primitive", + "multiple": false, + "typeName": "authorName" + } + }, + { + "authorAffiliation": { + "value": "UNC", + "typeClass": "primitive", + "multiple": false, + "typeName": "authorAffiliation" + }, + "authorName": { + "value": "Borrator, Colla", + "typeClass": "primitive", + "multiple": false, + "typeName": "authorName" + } + }, + { + "authorAffiliation": { + "value": "NASA", + "typeClass": "primitive", + "multiple": false, + "typeName": "authorAffiliation" + }, + "authorName": { + "value": "Naut, Astro", + "typeClass": "primitive", + "multiple": false, + "typeName": "authorName" + } + } + ], + "typeClass": "compound", + "multiple": true, + "typeName": "author" + }, + { + "value": "Lorem ipsum dolor sit amet, consectetur adipisicing elit. Quos, eos, natus soluta porro harum beatae voluptatem unde rerum eius quaerat officiis maxime autem asperiores facere.", + "typeClass": "primitive", + "multiple": false, + "typeName": "dsDescription" + }, + { + "value": [ + "kw10", + "kw20", + "kw30" + ], + "typeClass": "primitive", + "multiple": true, + "typeName": "keyword" + }, + { + "value": [ + "Arts and Humanities", + "Astronomy and Astrophysics" + ], + "typeClass": "controlledVocabulary", + "multiple": true, + "typeName": "subject" + }, + { + "value": "Note note note note Note note note note Note note note note Note note note note Note note note note Note note note note Note note note note Note note note note Note note note note Note note note note Note note note note Note note note note Note note note note Note note note note Note note note note Note note note note Note note note note Note note note note.\r\nNOTANOTANOTANOTANnot.e\r\n", + "typeClass": "primitive", + "multiple": false, + "typeName": "notesText" + }, + { + "value": [ + { + "otherIdAgency": { + "value": "NSF", + "typeClass": "primitive", + "multiple": false, + "typeName": "otherIdAgency" + }, + "otherIdValue": { + "value": "NSF1234", + "typeClass": "primitive", + "multiple": false, + "typeName": "otherIdValue" + } + }, + { + "otherIdAgency": { + "value": "NIH", + "typeClass": "primitive", + "multiple": false, + "typeName": "otherIdAgency" + }, + "otherIdValue": { + "value": "NIH98765", + "typeClass": "primitive", + "multiple": false, + "typeName": "otherIdValue" + } + } + ], + "typeClass": "compound", + "multiple": true, + "typeName": "otherId" + }, + { + "value": [ + { + "contributorName": { + "value": "Dennis", + "typeClass": "primitive", + "multiple": false, + "typeName": "contributorName" + }, + "contributorType": { + "value": "Funder", + "typeClass": "controlledVocabulary", + "multiple": false, + "typeName": "contributorType" + } + } + ], + "typeClass": "compound", + "multiple": true, + "typeName": "contributor" + }, + { + "value": "2014-02-03", + "typeClass": "primitive", + "multiple": false, + "typeName": "productionDate" + }, + { + "value": "Cambridge, UK", + "typeClass": "primitive", + "multiple": false, + "typeName": "productionPlace" + }, + { + "value": [ + { + "grantNumberAgency": { + "value": "NIH", + "typeClass": "primitive", + "multiple": false, + "typeName": "grantNumberAgency" + }, + "grantNumberValue": { + "value": "NIH1231245154", + "typeClass": "primitive", + "multiple": false, + "typeName": "grantNumberValue" + } + }, + { + "grantNumberAgency": { + "value": "NIH", + "typeClass": "primitive", + "multiple": false, + "typeName": "grantNumberAgency" + }, + "grantNumberValue": { + "value": "NIH99999999", + "typeClass": "primitive", + "multiple": false, + "typeName": "grantNumberValue" + } + } + ], + "typeClass": "compound", + "multiple": true, + "typeName": "grantNumber" + }, + { + "value": "Privileged, Pete", + "typeClass": "primitive", + "multiple": false, + "typeName": "depositor" + }, + { + "value": "2014-05-20", + "typeClass": "primitive", + "multiple": false, + "typeName": "dateOfDeposit" + }, + { + "value": [ + "Bananas" + ], + "typeClass": "primitive", + "multiple": true, + "typeName": "relatedMaterial" + }, + { + "value": [ + "Data about bananas" + ], + "typeClass": "primitive", + "multiple": true, + "typeName": "relatedDatasets" + }, + { + "value": [ + "other ref other ref other ref ef ef" + ], + "typeClass": "primitive", + "multiple": true, + "typeName": "otherReferences" + } + ], + "displayName": "Citation Metadata" + } + } +} \ No newline at end of file diff --git a/postgresql/testdata/scripts/api/data/dataset-updated-version2.json b/postgresql/testdata/scripts/api/data/dataset-updated-version2.json new file mode 100644 index 0000000..f173eef --- /dev/null +++ b/postgresql/testdata/scripts/api/data/dataset-updated-version2.json @@ -0,0 +1,218 @@ +{ + "UNF": "UNF", + "versionNumber": 1, + "versionMinorNumber": 0, + "versionState": "DRAFT", + "distributionDate": "Distribution Date", + "productionDate": "Production Date", + "metadataBlocks": { + "citation": { + "fields": [ + { + "value": "This is another title", + "typeClass": "primitive", + "multiple": false, + "typeName": "title" + }, + { + "value": [ + { + "authorAffiliation": { + "value": "Tippie Top", + "typeClass": "primitive", + "multiple": false, + "typeName": "authorAffiliation" + }, + "authorName": { + "value": "McPrivileged, Pete", + "typeClass": "primitive", + "multiple": false, + "typeName": "authorName" + } + }, + { + "authorAffiliation": { + "value": "Uber Under", + "typeClass": "primitive", + "multiple": false, + "typeName": "authorAffiliation" + }, + "authorName": { + "value": "McNew, Oldie", + "typeClass": "primitive", + "multiple": false, + "typeName": "authorName" + } + }, + { + "authorAffiliation": { + "value": "UNC", + "typeClass": "primitive", + "multiple": false, + "typeName": "authorAffiliation" + }, + "authorName": { + "value": "Borrator, Colla", + "typeClass": "primitive", + "multiple": false, + "typeName": "authorName" + } + }, + { + "authorAffiliation": { + "value": "NASA", + "typeClass": "primitive", + "multiple": false, + "typeName": "authorAffiliation" + }, + "authorName": { + "value": "Naut, Astro", + "typeClass": "primitive", + "multiple": false, + "typeName": "authorName" + } + } + ], + "typeClass": "compound", + "multiple": true, + "typeName": "author" + }, + + { + "value": [ + "Arts and Humanities", + "Astronomy and Astrophysics" + ], + "typeClass": "controlledVocabulary", + "multiple": true, + "typeName": "subject" + }, + { + "value": "Note note note note Note note note note Note note note note Note note note note Note note note note Note note note note Note note note note Note note note note Note note note note Note note note note Note note note note Note note note note Note note note note Note note note note Note note note note Note note note note Note note note note Note note note note.\r\nNOTANOTANOTANOTANnot.e\r\n", + "typeClass": "primitive", + "multiple": false, + "typeName": "notesText" + }, + { + "value": [ + { + "otherIdAgency": { + "value": "NSF", + "typeClass": "primitive", + "multiple": false, + "typeName": "otherIdAgency" + }, + "otherIdValue": { + "value": "NSF1234", + "typeClass": "primitive", + "multiple": false, + "typeName": "otherIdValue" + } + }, + { + "otherIdAgency": { + "value": "NIH", + "typeClass": "primitive", + "multiple": false, + "typeName": "otherIdAgency" + }, + "otherIdValue": { + "value": "NIH98765", + "typeClass": "primitive", + "multiple": false, + "typeName": "otherIdValue" + } + } + ], + "typeClass": "compound", + "multiple": true, + "typeName": "otherId" + }, + { + "value": "2014-02-03", + "typeClass": "primitive", + "multiple": false, + "typeName": "productionDate" + }, + { + "value": "Cambridge, UK", + "typeClass": "primitive", + "multiple": false, + "typeName": "productionPlace" + }, + { + "value": [ + { + "grantNumberAgency": { + "value": "NIH", + "typeClass": "primitive", + "multiple": false, + "typeName": "grantNumberAgency" + }, + "grantNumberValue": { + "value": "NIH1231245154", + "typeClass": "primitive", + "multiple": false, + "typeName": "grantNumberValue" + } + }, + { + "grantNumberAgency": { + "value": "NIH", + "typeClass": "primitive", + "multiple": false, + "typeName": "grantNumberAgency" + }, + "grantNumberValue": { + "value": "NIH99999999", + "typeClass": "primitive", + "multiple": false, + "typeName": "grantNumberValue" + } + } + ], + "typeClass": "compound", + "multiple": true, + "typeName": "grantNumber" + }, + { + "value": "Privileged, Pete", + "typeClass": "primitive", + "multiple": false, + "typeName": "depositor" + }, + { + "value": "2014-05-20", + "typeClass": "primitive", + "multiple": false, + "typeName": "dateOfDeposit" + }, + { + "value": [ + "Bananas" + ], + "typeClass": "primitive", + "multiple": true, + "typeName": "relatedMaterial" + }, + { + "value": [ + "Data about bananas" + ], + "typeClass": "primitive", + "multiple": true, + "typeName": "relatedDatasets" + }, + { + "value": [ + "other ref other ref other ref ef ef" + ], + "typeClass": "primitive", + "multiple": true, + "typeName": "otherReferences" + } + ], + "displayName": "Citation Metadata" + } + } +} \ No newline at end of file diff --git a/postgresql/testdata/scripts/api/data/dataset-version.json b/postgresql/testdata/scripts/api/data/dataset-version.json new file mode 100644 index 0000000..7037082 --- /dev/null +++ b/postgresql/testdata/scripts/api/data/dataset-version.json @@ -0,0 +1,110 @@ +{ + "id": 2, + "versionState": "DRAFT", + "distributionDate": "Distribution Date", + "productionDate": "Production Date", + "lastUpdateTime": "2015-01-14 05:58:27 +02", + "createTime": "2015-01-14 05:48:30 +02", + "metadataBlocks": { + "citation": { + "displayName": "Citation Metadata", + "fields": [ + { + "typeName": "title", + "multiple": false, + "typeClass": "primitive", + "value": "Sample-published-dataset (updated)" + }, + { + "typeName": "author", + "multiple": true, + "typeClass": "compound", + "value": [ + { + "authorName": { + "typeName": "authorName", + "multiple": false, + "typeClass": "primitive", + "value": "Kew, Susie" + }, + "authorAffiliation": { + "typeName": "authorAffiliation", + "multiple": false, + "typeClass": "primitive", + "value": "Creedence Clearwater Revival" + } + } + ] + }, + { + "typeName": "datasetContact", + "multiple": true, + "typeClass": "compound", + "value": [ + { + "datasetContactName": { + "typeName": "datasetContactName", + "multiple": false, + "typeClass": "primitive", + "value": "Dataverse, Admin" + }, + "datasetContactAffiliation": { + "typeName": "datasetContactAffiliation", + "multiple": false, + "typeClass": "primitive", + "value": "Dataverse" + }, + "datasetContactEmail": { + "typeName": "datasetContactEmail", + "multiple": false, + "typeClass": "primitive", + "value": "admin@malinator.com" + } + } + ] + }, + { + "typeName": "dsDescription", + "multiple": true, + "typeClass": "compound", + "value": [ + { + "dsDescriptionValue": { + "typeName": "dsDescriptionValue", + "multiple": false, + "typeClass": "primitive", + "value": "This is a public dataset" + }, + "dsDescriptionDate": { + "typeName": "dsDescriptionDate", + "multiple": false, + "typeClass": "primitive", + "value": "2015-01-14" + } + } + ] + }, + { + "typeName": "subject", + "multiple": true, + "typeClass": "controlledVocabulary", + "value": [ + "Chemistry" + ] + }, + { + "typeName": "depositor", + "multiple": false, + "typeClass": "primitive", + "value": "Admin Dataverse" + }, + { + "typeName": "dateOfDeposit", + "multiple": false, + "typeClass": "primitive", + "value": "2015-01-14" + } + ] + } + } +} diff --git a/postgresql/testdata/scripts/api/data/dv-pete-sub-normal.json b/postgresql/testdata/scripts/api/data/dv-pete-sub-normal.json new file mode 100644 index 0000000..769eb66 --- /dev/null +++ b/postgresql/testdata/scripts/api/data/dv-pete-sub-normal.json @@ -0,0 +1,13 @@ +{ + "alias": "peteSubNormal", + "name": "Pete's public place", + "affiliation": "Affiliation value", + "permissionRoot": false, + "description": "Where Pete stores normal data", + "dataverseContacts": [ + { + "contactEmail": "pete@mailinator.com" + } + ], + "dataverseSubjects": ["Law"] +} diff --git a/postgresql/testdata/scripts/api/data/dv-pete-sub-restricted.json b/postgresql/testdata/scripts/api/data/dv-pete-sub-restricted.json new file mode 100644 index 0000000..b76686a --- /dev/null +++ b/postgresql/testdata/scripts/api/data/dv-pete-sub-restricted.json @@ -0,0 +1,13 @@ +{ + "alias": "peteSubRestricted", + "name": "Pete's restricted data", + "affiliation": "Affiliation value", + "permissionRoot": false, + "description": "Where Pete stores restricted data, to be shared in moderation", + "dataverseContacts": [ + { + "contactEmail": "pete@mailinator.com" + } + ], + "dataverseSubjects": ["Chemistry"] +} diff --git a/postgresql/testdata/scripts/api/data/dv-pete-sub-secret.json b/postgresql/testdata/scripts/api/data/dv-pete-sub-secret.json new file mode 100644 index 0000000..6a25d45 --- /dev/null +++ b/postgresql/testdata/scripts/api/data/dv-pete-sub-secret.json @@ -0,0 +1,13 @@ +{ + "alias": "peteSubSecret", + "name": "Pete's secrets", + "affiliation": "Affiliation value", + "permissionRoot": true, + "description": "Where Pete stores secret data", + "dataverseContacts": [ + { + "contactEmail": "pete@mailinator.com" + } + ], + "dataverseSubjects": ["Astronomy and Astrophysics"] +} diff --git a/postgresql/testdata/scripts/api/data/dv-pete-top.json b/postgresql/testdata/scripts/api/data/dv-pete-top.json new file mode 100644 index 0000000..dfb949f --- /dev/null +++ b/postgresql/testdata/scripts/api/data/dv-pete-top.json @@ -0,0 +1,13 @@ +{ + "alias": "peteTop", + "name": "Top dataverse of Pete", + "affiliation": "Affiliation value", + "permissionRoot": false, + "description": "Pete's top level dataverse", + "dataverseContacts": [ + { + "contactEmail": "pete@mailinator.com" + } + ], + "dataverseSubjects": ["Arts and Humanities"] +} diff --git a/postgresql/testdata/scripts/api/data/dv-root.json b/postgresql/testdata/scripts/api/data/dv-root.json new file mode 100644 index 0000000..bfbfedd --- /dev/null +++ b/postgresql/testdata/scripts/api/data/dv-root.json @@ -0,0 +1,13 @@ +{ + "alias": "root", + "name": "Root", + "permissionRoot": false, + "facetRoot": true, + "description": "The root dataverse.", + "dataverseContacts": [ + { + "contactEmail": "root@mailinator.com" + } + ], + "dataverseSubjects": ["ALL"] +} diff --git a/postgresql/testdata/scripts/api/data/dv-uma-deletable.json b/postgresql/testdata/scripts/api/data/dv-uma-deletable.json new file mode 100644 index 0000000..03381dd --- /dev/null +++ b/postgresql/testdata/scripts/api/data/dv-uma-deletable.json @@ -0,0 +1,13 @@ +{ + "alias": "umaDeletable", + "name": "Uma's deletable", + "affiliation": "Affiliation value", + "permissionRoot": true, + "description": "Forgettable, deletable, temporary.", + "dataverseContacts": [ + { + "contactEmail": "Uma@mailinator.com" + } + ], + "dataverseSubjects": ["Business and Management"] +} diff --git a/postgresql/testdata/scripts/api/data/dv-uma-sub1.json b/postgresql/testdata/scripts/api/data/dv-uma-sub1.json new file mode 100644 index 0000000..1f9ece2 --- /dev/null +++ b/postgresql/testdata/scripts/api/data/dv-uma-sub1.json @@ -0,0 +1,13 @@ +{ + "alias": "umaSub1", + "name": "Uma's first", + "affiliation": "Affiliation value", + "permissionRoot": false, + "description": "Some data of Uma", + "dataverseContacts": [ + { + "contactEmail": "Uma@mailinator.com" + } + ], + "dataverseSubjects": ["Medicine, Health & Life Sciences"] +} diff --git a/postgresql/testdata/scripts/api/data/dv-uma-sub2.json b/postgresql/testdata/scripts/api/data/dv-uma-sub2.json new file mode 100644 index 0000000..590d144 --- /dev/null +++ b/postgresql/testdata/scripts/api/data/dv-uma-sub2.json @@ -0,0 +1,13 @@ +{ + "alias": "umaSub2", + "name": "Uma's restricted", + "affiliation": "Affiliation value", + "permissionRoot": true, + "description": "Pete can't get here", + "dataverseContacts": [ + { + "contactEmail": "Uma@mailinator.com" + } + ], + "dataverseSubjects": ["Engineering"] +} diff --git a/postgresql/testdata/scripts/api/data/dv-uma-top.json b/postgresql/testdata/scripts/api/data/dv-uma-top.json new file mode 100644 index 0000000..d138619 --- /dev/null +++ b/postgresql/testdata/scripts/api/data/dv-uma-top.json @@ -0,0 +1,13 @@ +{ + "alias": "umaTop", + "name": "Top dataverse of Uma", + "affiliation": "Affiliation value", + "permissionRoot": false, + "description": "Uma's top level dataverse", + "dataverseContacts": [ + { + "contactEmail": "Uma@mailinator.com" + } + ], + "dataverseSubjects": ["Mathematical Sciences"] +} diff --git a/postgresql/testdata/scripts/api/data/explicit-group-2nd.json b/postgresql/testdata/scripts/api/data/explicit-group-2nd.json new file mode 100644 index 0000000..9f3fac5 --- /dev/null +++ b/postgresql/testdata/scripts/api/data/explicit-group-2nd.json @@ -0,0 +1,5 @@ +{ + "description":"The second explicit group", + "displayName":"Explicit Group number two", + "aliasInOwner":"EG:II" +} diff --git a/postgresql/testdata/scripts/api/data/explicit-group-first-edit.json b/postgresql/testdata/scripts/api/data/explicit-group-first-edit.json new file mode 100644 index 0000000..e1c9339 --- /dev/null +++ b/postgresql/testdata/scripts/api/data/explicit-group-first-edit.json @@ -0,0 +1,5 @@ +{ + "description":"This is the description field", + "displayName":"Explicit Group number one (edited)", + "aliasInOwner":"EG-1" +} diff --git a/postgresql/testdata/scripts/api/data/explicit-group-first.json b/postgresql/testdata/scripts/api/data/explicit-group-first.json new file mode 100644 index 0000000..85b74ae --- /dev/null +++ b/postgresql/testdata/scripts/api/data/explicit-group-first.json @@ -0,0 +1,5 @@ +{ + "description":"This is the description field", + "displayName":"Explicit Group number one", + "aliasInOwner":"EG-1" +} diff --git a/postgresql/testdata/scripts/api/data/ipGroup-all-ipv4.json b/postgresql/testdata/scripts/api/data/ipGroup-all-ipv4.json new file mode 100644 index 0000000..c5ff32d --- /dev/null +++ b/postgresql/testdata/scripts/api/data/ipGroup-all-ipv4.json @@ -0,0 +1,5 @@ +{ + "alias":"all-ipv4", + "name":"IP group to match all IPv4 addresses", + "ranges" : [["0.0.0.0", "255.255.255.255"]] +} diff --git a/postgresql/testdata/scripts/api/data/ipGroup-all.json b/postgresql/testdata/scripts/api/data/ipGroup-all.json new file mode 100644 index 0000000..b9fc163 --- /dev/null +++ b/postgresql/testdata/scripts/api/data/ipGroup-all.json @@ -0,0 +1,6 @@ +{ + "alias":"ipGroup3", + "name":"IP group to match all IPv4 and IPv6 addresses", + "ranges" : [["0.0.0.0", "255.255.255.255"], + ["::", "ffff:ffff:ffff:ffff:ffff:ffff:ffff:ffff"]] +} diff --git a/postgresql/testdata/scripts/api/data/ipGroup-localhost.json b/postgresql/testdata/scripts/api/data/ipGroup-localhost.json new file mode 100644 index 0000000..4f8d2f7 --- /dev/null +++ b/postgresql/testdata/scripts/api/data/ipGroup-localhost.json @@ -0,0 +1,5 @@ +{ + "alias":"localhost", + "name":"Localhost connections", + "addresses": [ "::1", "127.0.0.1" ] +} diff --git a/postgresql/testdata/scripts/api/data/ipGroup-single-IPv4.json b/postgresql/testdata/scripts/api/data/ipGroup-single-IPv4.json new file mode 100644 index 0000000..515c512 --- /dev/null +++ b/postgresql/testdata/scripts/api/data/ipGroup-single-IPv4.json @@ -0,0 +1,5 @@ +{ + "alias":"singleIPv4", + "name":"Single IPv4", + "addresses" : ["128.0.0.7"] +} diff --git a/postgresql/testdata/scripts/api/data/ipGroup-single-IPv6.json b/postgresql/testdata/scripts/api/data/ipGroup-single-IPv6.json new file mode 100644 index 0000000..73eaa8e --- /dev/null +++ b/postgresql/testdata/scripts/api/data/ipGroup-single-IPv6.json @@ -0,0 +1,5 @@ +{ + "alias":"singleIPv6", + "name":"Single IPv6", + "addresses" : ["aa:bb:cc:dd:ee:ff::1"] +} diff --git a/postgresql/testdata/scripts/api/data/ipGroup1.json b/postgresql/testdata/scripts/api/data/ipGroup1.json new file mode 100644 index 0000000..bf4b114 --- /dev/null +++ b/postgresql/testdata/scripts/api/data/ipGroup1.json @@ -0,0 +1,7 @@ +{ + "alias":"ipGroup1", + "name":"The first IP Group", + "ranges" : [["60.0.0.0", "60.0.0.255"], + ["128.0.0.0", "129.0.255.255"], + ["ff:abcd:eff::ffff", "ff:abcd:eff::0"]] +} \ No newline at end of file diff --git a/postgresql/testdata/scripts/api/data/ipGroup2.json b/postgresql/testdata/scripts/api/data/ipGroup2.json new file mode 100644 index 0000000..52c5e8c --- /dev/null +++ b/postgresql/testdata/scripts/api/data/ipGroup2.json @@ -0,0 +1,8 @@ +{ + "alias":"ipGroup2", + "name":"The second IP Group", + "ranges" : [["207.0.0.0", "207.0.0.255"], + ["128.0.0.0", "129.0.255.255"], + ["dd:2:2:2:2:2:2:2","dd:a:a:a:a:a:a:a"] + ] +} diff --git a/postgresql/testdata/scripts/api/data/ipGroupDuplicate-v1.json b/postgresql/testdata/scripts/api/data/ipGroupDuplicate-v1.json new file mode 100644 index 0000000..eda0c8e --- /dev/null +++ b/postgresql/testdata/scripts/api/data/ipGroupDuplicate-v1.json @@ -0,0 +1,7 @@ +{ + "alias":"ipGroup-dup", + "name":"IP Group with duplicate files (1)", + "description":"This is the FIRST version of the group", + "ranges" : [["60.0.0.0", "60.0.0.255"], + ["60::1", "60::ffff"]] +} diff --git a/postgresql/testdata/scripts/api/data/ipGroupDuplicate-v2.json b/postgresql/testdata/scripts/api/data/ipGroupDuplicate-v2.json new file mode 100644 index 0000000..8db88e9 --- /dev/null +++ b/postgresql/testdata/scripts/api/data/ipGroupDuplicate-v2.json @@ -0,0 +1,7 @@ +{ + "alias":"ipGroup-dup", + "name":"IP Group with duplicate files-v2", + "description":"This is the second version of the group", + "ranges" : [["70.0.0.0", "70.0.0.255"], + ["70::1", "70::ffff"]] +} diff --git a/postgresql/testdata/scripts/api/data/metadatablocks/astrophysics.tsv b/postgresql/testdata/scripts/api/data/metadatablocks/astrophysics.tsv new file mode 100644 index 0000000..d6266d2 --- /dev/null +++ b/postgresql/testdata/scripts/api/data/metadatablocks/astrophysics.tsv @@ -0,0 +1,54 @@ +#metadataBlock name dataverseAlias displayName + astrophysics Astronomy and Astrophysics Metadata +#datasetField name title description watermark fieldType displayOrder displayFormat advancedSearchField allowControlledVocabulary allowmultiples facetable displayoncreate required parent metadatablock_id + astroType Type The nature or genre of the content of the files in the dataset. text 0 TRUE TRUE TRUE TRUE FALSE FALSE astrophysics + astroFacility Facility The observatory or facility where the data was obtained. text 1 TRUE TRUE TRUE TRUE FALSE FALSE astrophysics + astroInstrument Instrument The instrument used to collect the data. text 2 TRUE TRUE TRUE TRUE FALSE FALSE astrophysics + astroObject Object Astronomical Objects represented in the data (Given as SIMBAD recognizable names preferred). text 3 TRUE FALSE TRUE TRUE FALSE FALSE astrophysics + resolution.Spatial Spatial Resolution The spatial (angular) resolution that is typical of the observations, in decimal degrees. text 4 TRUE FALSE FALSE TRUE FALSE FALSE astrophysics + resolution.Spectral Spectral Resolution The spectral resolution that is typical of the observations, given as the ratio λ/Δλ. text 5 TRUE FALSE FALSE TRUE FALSE FALSE astrophysics + resolution.Temporal Time Resolution The temporal resolution that is typical of the observations, given in seconds. text 6 FALSE FALSE FALSE FALSE FALSE FALSE astrophysics + coverage.Spectral.Bandpass Bandpass Conventional bandpass name text 7 TRUE TRUE TRUE TRUE FALSE FALSE astrophysics + coverage.Spectral.CentralWavelength Central Wavelength (m) The central wavelength of the spectral bandpass, in meters. Enter a floating-point number. float 8 TRUE FALSE TRUE TRUE FALSE FALSE astrophysics + coverage.Spectral.Wavelength Wavelength Range The minimum and maximum wavelength of the spectral bandpass. Enter a floating-point number. none 9 FALSE FALSE TRUE FALSE FALSE FALSE astrophysics + coverage.Spectral.MinimumWavelength Minimum (m) The minimum wavelength of the spectral bandpass, in meters. Enter a floating-point number. float 10 TRUE FALSE FALSE TRUE FALSE FALSE coverage.Spectral.Wavelength astrophysics + coverage.Spectral.MaximumWavelength Maximum (m) The maximum wavelength of the spectral bandpass, in meters. Enter a floating-point number. float 11 TRUE FALSE FALSE TRUE FALSE FALSE coverage.Spectral.Wavelength astrophysics + coverage.Temporal Dataset Date Range Time period covered by the data. none 12 FALSE FALSE TRUE FALSE FALSE FALSE astrophysics + coverage.Temporal.StartTime Start Dataset Start Date YYYY-MM-DD date 13 TRUE FALSE FALSE TRUE FALSE FALSE coverage.Temporal astrophysics + coverage.Temporal.StopTime End Dataset End Date YYYY-MM-DD date 14 TRUE FALSE FALSE TRUE FALSE FALSE coverage.Temporal astrophysics + coverage.Spatial Sky Coverage The sky coverage of the data object. text 15 FALSE FALSE TRUE FALSE FALSE FALSE astrophysics + coverage.Depth Depth Coverage The (typical) depth coverage, or sensitivity, of the data object in Jy. Enter a floating-point number. float 16 FALSE FALSE FALSE FALSE FALSE FALSE astrophysics + coverage.ObjectDensity Object Density The (typical) density of objects, catalog entries, telescope pointings, etc., on the sky, in number per square degree. Enter a floating-point number. float 17 FALSE FALSE FALSE FALSE FALSE FALSE astrophysics + coverage.ObjectCount Object Count The total number of objects, catalog entries, etc., in the data object. Enter an integer. int 18 FALSE FALSE FALSE FALSE FALSE FALSE astrophysics + coverage.SkyFraction Fraction of Sky The fraction of the sky represented in the observations, ranging from 0 to 1. Enter a floating-point number. float 19 FALSE FALSE FALSE FALSE FALSE FALSE astrophysics + coverage.Polarization Polarization The polarization coverage text 20 FALSE FALSE FALSE FALSE FALSE FALSE astrophysics + redshiftType RedshiftType RedshiftType string C "Redshift"; or "Optical" or "Radio" definitions of Doppler velocity used in the data object. text 21 FALSE FALSE FALSE FALSE FALSE FALSE astrophysics + resolution.Redshift Redshift Resolution The resolution in redshift (unitless) or Doppler velocity (km/s) in the data object. Enter a floating-point number. float 22 FALSE FALSE FALSE FALSE FALSE FALSE astrophysics + coverage.RedshiftValue Redshift Value The value of the redshift (unitless) or Doppler velocity (km/s in the data object. Enter a floating-point number. float 23 FALSE FALSE TRUE FALSE FALSE FALSE astrophysics + coverage.Redshift.MinimumValue Minimum The minimum value of the redshift (unitless) or Doppler velocity (km/s in the data object. Enter a floating-point number. float 24 FALSE FALSE FALSE FALSE FALSE FALSE coverage.RedshiftValue astrophysics + coverage.Redshift.MaximumValue Maximum The maximum value of the redshift (unitless) or Doppler velocity (km/s in the data object. Enter a floating-point number. float 25 FALSE FALSE FALSE FALSE FALSE FALSE coverage.RedshiftValue astrophysics +#controlledVocabulary DatasetField Value identifier displayOrder + astroType Image 0 + astroType Mosaic 1 + astroType EventList 2 + astroType Spectrum 3 + astroType Cube 4 + astroType Table 5 + astroType Catalog 6 + astroType LightCurve 7 + astroType Simulation 8 + astroType Figure 9 + astroType Artwork 10 + astroType Animation 11 + astroType PrettyPicture 12 + astroType Documentation 13 + astroType Other 14 + astroType Library 15 + astroType Press Release 16 + astroType Facsimile 17 + astroType Historical 18 + astroType Observation 19 + astroType Object 20 + astroType Value 21 + astroType ValuePair 22 + astroType Survey 23 \ No newline at end of file diff --git a/postgresql/testdata/scripts/api/data/metadatablocks/biomedical.tsv b/postgresql/testdata/scripts/api/data/metadatablocks/biomedical.tsv new file mode 100644 index 0000000..f45c584 --- /dev/null +++ b/postgresql/testdata/scripts/api/data/metadatablocks/biomedical.tsv @@ -0,0 +1,295 @@ +#metadataBlock name dataverseAlias displayName + biomedical Life Sciences Metadata +#datasetField name title description watermark fieldType displayOrder displayFormat advancedSearchField allowControlledVocabulary allowmultiples facetable displayoncreate required parent metadatablock_id + studyDesignType Design Type Design types that are based on the overall experimental design. text 0 TRUE TRUE TRUE TRUE FALSE FALSE biomedical + studyFactorType Factor Type Factors used in the Dataset. text 1 TRUE TRUE TRUE TRUE FALSE FALSE biomedical + studyAssayOrganism Organism The taxonomic name of the organism used in the Dataset or from which the starting biological material derives. text 2 TRUE TRUE TRUE TRUE FALSE FALSE biomedical + studyAssayOtherOrganism Other Organism If Other was selected in Organism, list any other organisms that were used in this Dataset. Terms from the NCBI Taxonomy are recommended. text 3 TRUE FALSE TRUE TRUE FALSE FALSE biomedical + studyAssayMeasurementType Measurement Type A term to qualify the endpoint, or what is being measured (e.g. gene expression profiling; protein identification). text 4 TRUE TRUE TRUE TRUE FALSE FALSE biomedical + studyAssayOtherMeasurmentType Other Measurement Type If Other was selected in Measurement Type, list any other measurement types that were used. Terms from NCBO Bioportal are recommended. text 5 TRUE FALSE TRUE TRUE FALSE FALSE biomedical + studyAssayTechnologyType Technology Type A term to identify the technology used to perform the measurement (e.g. DNA microarray; mass spectrometry). text 6 TRUE TRUE TRUE TRUE FALSE FALSE biomedical + studyAssayPlatform Technology Platform The manufacturer and name of the technology platform used in the assay (e.g. Bruker AVANCE). text 7 TRUE TRUE TRUE TRUE FALSE FALSE biomedical + studyAssayCellType Cell Type The name of the cell line from which the source or sample derives. text 8 TRUE TRUE TRUE TRUE FALSE FALSE biomedical +#controlledVocabulary DatasetField Value identifier displayOrder + studyDesignType Case Control EFO_0001427 0 + studyDesignType Cross Sectional EFO_0001428 1 + studyDesignType Cohort Study OCRE100078 2 + studyDesignType Nested Case Control Design NCI_C48202 3 + studyDesignType Not Specified OTHER_DESIGN 4 + studyDesignType Parallel Group Design OBI_0500006 5 + studyDesignType Perturbation Design OBI_0001033 6 + studyDesignType Randomized Controlled Trial MESH_D016449 7 + studyDesignType Technological Design TECH_DESIGN 8 + studyFactorType Age EFO_0000246 0 + studyFactorType Biomarkers BIOMARKERS 1 + studyFactorType Cell Surface Markers CELL_SURFACE_M 2 + studyFactorType Cell Type/Cell Line EFO_0000324;EFO_0000322 3 + studyFactorType Developmental Stage EFO_0000399 4 + studyFactorType Disease State OBI_0001293 5 + studyFactorType Drug Susceptibility IDO_0000469 6 + studyFactorType Extract Molecule FBcv_0010001 7 + studyFactorType Genetic Characteristics OBI_0001404 8 + studyFactorType Immunoprecipitation Antibody OBI_0000690 9 + studyFactorType Organism OBI_0100026 10 + studyFactorType Other OTHER_FACTOR 11 + studyFactorType Passages PASSAGES_FACTOR 12 + studyFactorType Platform OBI_0000050 13 + studyFactorType Sex EFO_0000695 14 + studyFactorType Strain EFO_0005135 15 + studyFactorType Time Point EFO_0000724 16 + studyFactorType Tissue Type BTO_0001384 17 + studyFactorType Treatment Compound EFO_0000369 18 + studyFactorType Treatment Type EFO_0000727 19 + studyAssayMeasurementType cell counting ERO_0001899 0 + studyAssayMeasurementType cell sorting CHMO_0001085 1 + studyAssayMeasurementType clinical chemistry analysis OBI_0000520 2 + studyAssayMeasurementType copy number variation profiling OBI_0000537 3 + studyAssayMeasurementType DNA methylation profiling OBI_0000634 4 + studyAssayMeasurementType DNA methylation profiling (Bisulfite-Seq) OBI_0000748 5 + studyAssayMeasurementType DNA methylation profiling (MeDIP-Seq) _OBI_0000634 6 + studyAssayMeasurementType drug susceptibility _IDO_0000469 7 + studyAssayMeasurementType environmental gene survey ENV_GENE_SURVEY 8 + studyAssayMeasurementType genome sequencing ERO_0001183 9 + studyAssayMeasurementType hematology OBI_0000630 10 + studyAssayMeasurementType histology OBI_0600020 11 + studyAssayMeasurementType Histone Modification (ChIP-Seq) OBI_0002017 12 + studyAssayMeasurementType loss of heterozygosity profiling SO_0001786 13 + studyAssayMeasurementType metabolite profiling OBI_0000366 14 + studyAssayMeasurementType metagenome sequencing METAGENOME_SEQ 15 + studyAssayMeasurementType protein expression profiling OBI_0000615 16 + studyAssayMeasurementType protein identification ERO_0000346 17 + studyAssayMeasurementType protein-DNA binding site identification PROTEIN_DNA_BINDING 18 + studyAssayMeasurementType protein-protein interaction detection OBI_0000288 19 + studyAssayMeasurementType protein-RNA binding (RIP-Seq) PROTEIN_RNA_BINDING 20 + studyAssayMeasurementType SNP analysis OBI_0000435 21 + studyAssayMeasurementType targeted sequencing TARGETED_SEQ 22 + studyAssayMeasurementType transcription factor binding (ChIP-Seq) OBI_0002018 23 + studyAssayMeasurementType transcription factor binding site identification OBI_0000291 24 + studyAssayMeasurementType transcription profiling OBI_0000424 25 + studyAssayMeasurementType transcription profiling EFO_0001032 26 + studyAssayMeasurementType transcription profiling (Microarray) TRANSCRIPTION_PROF 27 + studyAssayMeasurementType transcription profiling (RNA-Seq) OBI_0001271 28 + studyAssayMeasurementType TRAP translational profiling TRAP_TRANS_PROF 29 + studyAssayMeasurementType Other OTHER_MEASUREMENT 30 + studyAssayOrganism Arabidopsis thaliana NCBITaxon_3702 0 + studyAssayOrganism Bos taurus NCBITaxon_9913 1 + studyAssayOrganism Caenorhabditis elegans NCBITaxon_6239 2 + studyAssayOrganism Chlamydomonas reinhardtii NCBITaxon_3055 3 + studyAssayOrganism Danio rerio (zebrafish) NCBITaxon_7955 4 + studyAssayOrganism Dictyostelium discoideum NCBITaxon_44689 5 + studyAssayOrganism Drosophila melanogaster NCBITaxon_7227 6 + studyAssayOrganism Escherichia coli NCBITaxon_562 7 + studyAssayOrganism Hepatitis C virus NCBITaxon_11103 8 + studyAssayOrganism Homo sapiens NCBITaxon_9606 9 + studyAssayOrganism Mus musculus NCBITaxon_10090 10 + studyAssayOrganism Mycobacterium africanum NCBITaxon_33894 11 + studyAssayOrganism Mycobacterium canetti NCBITaxon_78331 12 + studyAssayOrganism Mycobacterium tuberculosis NCBITaxon_1773 13 + studyAssayOrganism Mycoplasma pneumoniae NCBITaxon_2104 14 + studyAssayOrganism Oryza sativa NCBITaxon_4530 15 + studyAssayOrganism Plasmodium falciparum NCBITaxon_5833 16 + studyAssayOrganism Pneumocystis carinii NCBITaxon_4754 17 + studyAssayOrganism Rattus norvegicus NCBITaxon_10116 18 + studyAssayOrganism Saccharomyces cerevisiae (brewer's yeast) NCBITaxon_4932 19 + studyAssayOrganism Schizosaccharomyces pombe NCBITaxon_4896 20 + studyAssayOrganism Takifugu rubripes NCBITaxon_31033 21 + studyAssayOrganism Xenopus laevis NCBITaxon_8355 22 + studyAssayOrganism Zea mays NCBITaxon_4577 23 + studyAssayOrganism Other OTHER_TAXONOMY 24 + studyAssayTechnologyType culture based drug susceptibility testing, single concentration CULTURE_DRUG_TEST_SINGLE 0 + studyAssayTechnologyType culture based drug susceptibility testing, two concentrations CULTURE_DRUG_TEST_TWO 1 + studyAssayTechnologyType culture based drug susceptibility testing, three or more concentrations (minimium inhibitory concentration measurement) CULTURE_DRUG_TEST_THREE 2 + studyAssayTechnologyType DNA microarray OBI_0400148 3 + studyAssayTechnologyType flow cytometry OBI_0000916 4 + studyAssayTechnologyType gel electrophoresis OBI_0600053 5 + studyAssayTechnologyType mass spectrometry OBI_0000470 6 + studyAssayTechnologyType NMR spectroscopy OBI_0000623 7 + studyAssayTechnologyType nucleotide sequencing OBI_0000626 8 + studyAssayTechnologyType protein microarray OBI_0400149 9 + studyAssayTechnologyType real time PCR OBI_0000893 10 + studyAssayTechnologyType no technology required NO_TECHNOLOGY 11 + studyAssayTechnologyType Other OTHER_TECHNOLOGY 12 + studyAssayPlatform 210-MS GC Ion Trap (Varian) 210_MS_GC 0 + studyAssayPlatform 220-MS GC Ion Trap (Varian) 220_MS_GC 1 + studyAssayPlatform 225-MS GC Ion Trap (Varian) 225_MS_GC 2 + studyAssayPlatform 240-MS GC Ion Trap (Varian) 240_MS_GC 3 + studyAssayPlatform 300-MS quadrupole GC/MS (Varian) 300_MS_GCMS 4 + studyAssayPlatform 320-MS LC/MS (Varian) 320_MS_LCMS 5 + studyAssayPlatform 325-MS LC/MS (Varian) 325_MS_LCMS 6 + studyAssayPlatform 320-MS GC/MS (Varian) 500_MS_GCMS 7 + studyAssayPlatform 500-MS LC/MS (Varian) 500_MS_LCMS 8 + studyAssayPlatform 800D (Jeol) 800D 9 + studyAssayPlatform 910-MS TQ-FT (Varian) 910_MS_TQFT 10 + studyAssayPlatform 920-MS TQ-FT (Varian) 920_MS_TQFT 11 + studyAssayPlatform 3100 Mass Detector (Waters) 3100_MASS_D 12 + studyAssayPlatform 6110 Quadrupole LC/MS (Agilent) 6110_QUAD_LCMS 13 + studyAssayPlatform 6120 Quadrupole LC/MS (Agilent) 6120_QUAD_LCMS 14 + studyAssayPlatform 6130 Quadrupole LC/MS (Agilent) 6130_QUAD_LCMS 15 + studyAssayPlatform 6140 Quadrupole LC/MS (Agilent) 6140_QUAD_LCMS 16 + studyAssayPlatform 6310 Ion Trap LC/MS (Agilent) 6310_ION_LCMS 17 + studyAssayPlatform 6320 Ion Trap LC/MS (Agilent) 6320_ION_LCMS 18 + studyAssayPlatform 6330 Ion Trap LC/MS (Agilent) 6330_ION_LCMS 19 + studyAssayPlatform 6340 Ion Trap LC/MS (Agilent) 6340_ION_LCMS 20 + studyAssayPlatform 6410 Triple Quadrupole LC/MS (Agilent) 6410_TRIPLE_LCMS 21 + studyAssayPlatform 6430 Triple Quadrupole LC/MS (Agilent) 6430_TRIPLE_LCMS 22 + studyAssayPlatform 6460 Triple Quadrupole LC/MS (Agilent) 6460_TRIPLE_LCMS 23 + studyAssayPlatform 6490 Triple Quadrupole LC/MS (Agilent) 6490_TRIPLE_LCMS 24 + studyAssayPlatform 6530 Q-TOF LC/MS (Agilent) 6530_Q_TOF_LCMS 25 + studyAssayPlatform 6540 Q-TOF LC/MS (Agilent) 6540_Q_TOF_LCMS 26 + studyAssayPlatform 6210 TOF LC/MS (Agilent) 6210_Q_TOF_LCMS 27 + studyAssayPlatform 6220 TOF LC/MS (Agilent) 6220_Q_TOF_LCMS 28 + studyAssayPlatform 6230 TOF LC/MS (Agilent) 6230_Q_TOF_LCMS 29 + studyAssayPlatform 7000B Triple Quadrupole GC/MS (Agilent) 700B_TRIPLE_GCMS 30 + studyAssayPlatform AccuTO DART (Jeol) ACCUTO_DART 31 + studyAssayPlatform AccuTOF GC (Jeol) ACCUTOF_GC 32 + studyAssayPlatform AccuTOF LC (Jeol) ACCUTOF_LC 33 + studyAssayPlatform ACQUITY SQD (Waters) ACQUITY_SQD 34 + studyAssayPlatform ACQUITY TQD (Waters) ACQUITY_TQD 35 + studyAssayPlatform Agilent AGILENT 36 + studyAssayPlatform Agilent 5975E GC/MSD (Agilent) AGILENT_ 5975E_GCMSD 37 + studyAssayPlatform Agilent 5975T LTM GC/MSD (Agilent) AGILENT_5975T_LTM_GCMSD 38 + studyAssayPlatform 5975C Series GC/MSD (Agilent) 5975C_GCMSD 39 + studyAssayPlatform Affymetrix AFFYMETRIX 40 + studyAssayPlatform amaZon ETD ESI Ion Trap (Bruker) AMAZON_ETD_ESI 41 + studyAssayPlatform amaZon X ESI Ion Trap (Bruker) AMAZON_X_ESI 42 + studyAssayPlatform apex-ultra hybrid Qq-FTMS (Bruker) APEX_ULTRA_QQ_FTMS 43 + studyAssayPlatform API 2000 (AB Sciex) API_2000 44 + studyAssayPlatform API 3200 (AB Sciex) API_3200 45 + studyAssayPlatform API 3200 QTRAP (AB Sciex) API_3200_QTRAP 46 + studyAssayPlatform API 4000 (AB Sciex) API_4000 47 + studyAssayPlatform API 4000 QTRAP (AB Sciex) API_4000_QTRAP 48 + studyAssayPlatform API 5000 (AB Sciex) API_5000 49 + studyAssayPlatform API 5500 (AB Sciex) API_5500 50 + studyAssayPlatform API 5500 QTRAP (AB Sciex) API_5500_QTRAP 51 + studyAssayPlatform Applied Biosystems Group (ABI) APPLIED_BIOSYSTEMS 52 + studyAssayPlatform AQI Biosciences AQI_BIOSCIENCES 53 + studyAssayPlatform Atmospheric Pressure GC (Waters) ATMOS_GC 54 + studyAssayPlatform autoflex III MALDI-TOF MS (Bruker) AUTOFLEX_III_MALDI_TOF_MS 55 + studyAssayPlatform autoflex speed(Bruker) AUTOFLEX_SPEED 56 + studyAssayPlatform AutoSpec Premier (Waters) AUTOSPEC_PREMIER 57 + studyAssayPlatform AXIMA Mega TOF (Shimadzu) AXIMA_MEGA_TOF 58 + studyAssayPlatform AXIMA Performance MALDI TOF/TOF (Shimadzu) AXIMA_PERF_MALDI_TOF 59 + studyAssayPlatform A-10 Analyzer (Apogee) A_10_ANALYZER 60 + studyAssayPlatform A-40-MiniFCM (Apogee) A_40_MINIFCM 61 + studyAssayPlatform Bactiflow (Chemunex SA) BACTIFLOW 62 + studyAssayPlatform Base4innovation BASE4INNOVATION 63 + studyAssayPlatform BD BACTEC MGIT 320 BD_BACTEC_MGIT_320 64 + studyAssayPlatform BD BACTEC MGIT 960 BD_BACTEC_MGIT_960 65 + studyAssayPlatform BD Radiometric BACTEC 460TB BD_RADIO_BACTEC_460TB 66 + studyAssayPlatform BioNanomatrix BIONANOMATRIX 67 + studyAssayPlatform Cell Lab Quanta SC (Becman Coulter) CELL_LAB_QUANTA_SC 68 + studyAssayPlatform Clarus 560 D GC/MS (PerkinElmer) CLARUS_560_D_GCMS 69 + studyAssayPlatform Clarus 560 S GC/MS (PerkinElmer) CLARUS_560_S_GCMS 70 + studyAssayPlatform Clarus 600 GC/MS (PerkinElmer) CLARUS_600_GCMS 71 + studyAssayPlatform Complete Genomics COMPLETE_GENOMICS 72 + studyAssayPlatform Cyan (Dako Cytomation) CYAN 73 + studyAssayPlatform CyFlow ML (Partec) CYFLOW_ML 74 + studyAssayPlatform Cyow SL (Partec) CYFLOW_SL 75 + studyAssayPlatform CyFlow SL3 (Partec) CYFLOW_SL3 76 + studyAssayPlatform CytoBuoy (Cyto Buoy Inc) CYTOBUOY 77 + studyAssayPlatform CytoSence (Cyto Buoy Inc) CYTOSENCE 78 + studyAssayPlatform CytoSub (Cyto Buoy Inc) CYTOSUB 79 + studyAssayPlatform Danaher DANAHER 80 + studyAssayPlatform DFS (Thermo Scientific) DFS 81 + studyAssayPlatform Exactive(Thermo Scientific) EXACTIVE 82 + studyAssayPlatform FACS Canto (Becton Dickinson) FACS_CANTO 83 + studyAssayPlatform FACS Canto2 (Becton Dickinson) FACS_CANTO2 84 + studyAssayPlatform FACS Scan (Becton Dickinson) FACS_SCAN 85 + studyAssayPlatform FC 500 (Becman Coulter) FC_500 86 + studyAssayPlatform GCmate II GC/MS (Jeol) GCMATE_II 87 + studyAssayPlatform GCMS-QP2010 Plus (Shimadzu) GCMS_QP2010_PLUS 88 + studyAssayPlatform GCMS-QP2010S Plus (Shimadzu) GCMS_QP2010S_PLUS 89 + studyAssayPlatform GCT Premier (Waters) GCT_PREMIER 90 + studyAssayPlatform GENEQ GENEQ 91 + studyAssayPlatform Genome Corp. GENOME_CORP 92 + studyAssayPlatform GenoVoxx GENOVOXX 93 + studyAssayPlatform GnuBio GNUBIO 94 + studyAssayPlatform Guava EasyCyte Mini (Millipore) GUAVA_EASYCYTE_MINI 95 + studyAssayPlatform Guava EasyCyte Plus (Millipore) GUAVA_EASYCYTE_PLUS 96 + studyAssayPlatform Guava Personal Cell Analysis (Millipore) GUAVA_PERSONAL_CELL 97 + studyAssayPlatform Guava Personal Cell Analysis-96 (Millipore) GUAVA_PERSONAL_CELL_96 98 + studyAssayPlatform Helicos BioSciences HELICOS_BIO 99 + studyAssayPlatform Illumina ILLUMINA 100 + studyAssayPlatform Indirect proportion method on LJ medium INDIRECT_LJ_MEDIUM 101 + studyAssayPlatform Indirect proportion method on Middlebrook Agar 7H9 INDIRECT_AGAR_7H9 102 + studyAssayPlatform Indirect proportion method on Middlebrook Agar 7H10 INDIRECT_AGAR_7H10 103 + studyAssayPlatform Indirect proportion method on Middlebrook Agar 7H11 INDIRECT_AGAR_7H11 104 + studyAssayPlatform inFlux Analyzer (Cytopeia) INFLUX_ANALYZER 105 + studyAssayPlatform Intelligent Bio-Systems INTELLIGENT_BIOSYSTEMS 106 + studyAssayPlatform ITQ 700 (Thermo Scientific) ITQ_700 107 + studyAssayPlatform ITQ 900 (Thermo Scientific) ITQ_900 108 + studyAssayPlatform ITQ 1100 (Thermo Scientific) ITQ_1100 109 + studyAssayPlatform JMS-53000 SpiralTOF (Jeol) JMS_53000_SPIRAL 110 + studyAssayPlatform LaserGen LASERGEN 111 + studyAssayPlatform LCMS-2020 (Shimadzu) LCMS_2020 112 + studyAssayPlatform LCMS-2010EV (Shimadzu) LCMS_2010EV 113 + studyAssayPlatform LCMS-IT-TOF (Shimadzu) LCMS_IT_TOF 114 + studyAssayPlatform Li-Cor LI_COR 115 + studyAssayPlatform Life Tech LIFE_TECH 116 + studyAssayPlatform LightSpeed Genomics LIGHTSPEED_GENOMICS 117 + studyAssayPlatform LCT Premier XE (Waters) LCT_PREMIER_XE 118 + studyAssayPlatform LCQ Deca XP MAX (Thermo Scientific) LCQ_DECA_XP_MAX 119 + studyAssayPlatform LCQ Fleet (Thermo Scientific) LCQ_FLEET 120 + studyAssayPlatform LXQ (Thermo Scientific) LXQ_THERMO 121 + studyAssayPlatform LTQ Classic (Thermo Scientific) LTQ_CLASSIC 122 + studyAssayPlatform LTQ XL (Thermo Scientific) LTQ_XL 123 + studyAssayPlatform LTQ Velos (Thermo Scientific) LTQ_VELOS 124 + studyAssayPlatform LTQ Orbitrap Classic (Thermo Scientific) LTQ_ORBITRAP_CLASSIC 125 + studyAssayPlatform LTQ Orbitrap XL (Thermo Scientific) LTQ_ORBITRAP_XL 126 + studyAssayPlatform LTQ Orbitrap Discovery (Thermo Scientific) LTQ_ORBITRAP_DISCOVERY 127 + studyAssayPlatform LTQ Orbitrap Velos (Thermo Scientific) LTQ_ORBITRAP_VELOS 128 + studyAssayPlatform Luminex 100 (Luminex) LUMINEX_100 129 + studyAssayPlatform Luminex 200 (Luminex) LUMINEX_200 130 + studyAssayPlatform MACS Quant (Miltenyi) MACS_QUANT 131 + studyAssayPlatform MALDI SYNAPT G2 HDMS (Waters) MALDI_SYNAPT_G2_HDMS 132 + studyAssayPlatform MALDI SYNAPT G2 MS (Waters) MALDI_SYNAPT_G2_MS 133 + studyAssayPlatform MALDI SYNAPT HDMS (Waters) MALDI_SYNAPT_HDMS 134 + studyAssayPlatform MALDI SYNAPT MS (Waters) MALDI_SYNAPT_MS 135 + studyAssayPlatform MALDI micro MX (Waters) MALDI_MICROMX 136 + studyAssayPlatform maXis (Bruker) MAXIS 137 + studyAssayPlatform maXis G4 (Bruker) MAXISG4 138 + studyAssayPlatform microflex LT MALDI-TOF MS (Bruker) MICROFLEX_LT_MALDI_TOF_MS 139 + studyAssayPlatform microflex LRF MALDI-TOF MS (Bruker) MICROFLEX_LRF_MALDI_TOF_MS 140 + studyAssayPlatform microflex III MALDI-TOF MS (Bruker) MICROFLEX_III_TOF_MS 141 + studyAssayPlatform micrOTOF II ESI TOF (Bruker) MICROTOF_II_ESI_TOF 142 + studyAssayPlatform micrOTOF-Q II ESI-Qq-TOF (Bruker) MICROTOF_Q_II_ESI_QQ_TOF 143 + studyAssayPlatform microplate Alamar Blue (resazurin) colorimetric method MICROPLATE_ALAMAR_BLUE_COLORIMETRIC 144 + studyAssayPlatform Mstation (Jeol) MSTATION 145 + studyAssayPlatform MSQ Plus (Thermo Scientific) MSQ_PLUS 146 + studyAssayPlatform NABsys NABSYS 147 + studyAssayPlatform Nanophotonics Biosciences NANOPHOTONICS_BIOSCIENCES 148 + studyAssayPlatform Network Biosystems NETWORK_BIOSYSTEMS 149 + studyAssayPlatform Nimblegen NIMBLEGEN 150 + studyAssayPlatform Oxford Nanopore Technologies OXFORD_NANOPORE_TECHNOLOGIES 151 + studyAssayPlatform Pacific Biosciences PACIFIC_BIOSCIENCES 152 + studyAssayPlatform Population Genetics Technologies POPULATION_GENETICS_TECHNOLOGIES 153 + studyAssayPlatform Q1000GC UltraQuad (Jeol) Q1000GC_ULTRAQUAD 154 + studyAssayPlatform Quattro micro API (Waters) QUATTRO_MICRO_API 155 + studyAssayPlatform Quattro micro GC (Waters) QUATTRO_MICRO_GC 156 + studyAssayPlatform Quattro Premier XE (Waters) QUATTRO_PREMIER_XE 157 + studyAssayPlatform QSTAR (AB Sciex) QSTAR 158 + studyAssayPlatform Reveo REVEO 159 + studyAssayPlatform Roche ROCHE 160 + studyAssayPlatform Seirad SEIRAD 161 + studyAssayPlatform solariX hybrid Qq-FTMS (Bruker) SOLARIX_HYBRID_QQ_FTMS 162 + studyAssayPlatform Somacount (Bently Instruments) SOMACOUNT 163 + studyAssayPlatform SomaScope (Bently Instruments) SOMASCOPE 164 + studyAssayPlatform SYNAPT G2 HDMS (Waters) SYNAPT_G2_HDMS 165 + studyAssayPlatform SYNAPT G2 MS (Waters) SYNAPT_G2_MS 166 + studyAssayPlatform SYNAPT HDMS (Waters) SYNAPT_HDMS 167 + studyAssayPlatform SYNAPT MS (Waters) SYNAPT_MS 168 + studyAssayPlatform TripleTOF 5600 (AB Sciex) TRIPLETOF_5600 169 + studyAssayPlatform TSQ Quantum Ultra (Thermo Scientific) TSQ_QUANTUM_ULTRA 170 + studyAssayPlatform TSQ Quantum Access (Thermo Scientific) TSQ_QUANTUM_ACCESS 171 + studyAssayPlatform TSQ Quantum Access MAX (Thermo Scientific) TSQ_QUANTUM_ACCESS_MAX 172 + studyAssayPlatform TSQ Quantum Discovery MAX (Thermo Scientific) TSQ_QUANTUM_DISCOVERY_MAX 173 + studyAssayPlatform TSQ Quantum GC (Thermo Scientific) TSQ_QUANTUM_GC 174 + studyAssayPlatform TSQ Quantum XLS (Thermo Scientific) TSQ_QUANTUM_XLS 175 + studyAssayPlatform TSQ Vantage (Thermo Scientific) TSQ_VANTAGE 176 + studyAssayPlatform ultrafleXtreme MALDI-TOF MS (Bruker) ULTRAFLEXTREME_MALDI_TOF_MS 177 + studyAssayPlatform VisiGen Biotechnologies VISIGEN_BIO 178 + studyAssayPlatform Xevo G2 QTOF (Waters) XEVO_G2_QTOF 179 + studyAssayPlatform Xevo QTof MS (Waters) XEVO_QTOF_MS 180 + studyAssayPlatform Xevo TQ MS (Waters) XEVO_TQ_MS 181 + studyAssayPlatform Xevo TQ-S (Waters) XEVO_TQ_S 182 + studyAssayPlatform Other OTHER_PLATFORM 183 \ No newline at end of file diff --git a/postgresql/testdata/scripts/api/data/metadatablocks/citation.tsv b/postgresql/testdata/scripts/api/data/metadatablocks/citation.tsv new file mode 100644 index 0000000..f7c4447 --- /dev/null +++ b/postgresql/testdata/scripts/api/data/metadatablocks/citation.tsv @@ -0,0 +1,318 @@ +#metadataBlock name dataverseAlias displayName + citation Citation Metadata +#datasetField name title description watermark fieldType displayOrder displayFormat advancedSearchField allowControlledVocabulary allowmultiples facetable displayoncreate required parent metadatablock_id + title Title Full title by which the Dataset is known. Enter title... text 0 TRUE FALSE FALSE FALSE TRUE TRUE citation + subtitle Subtitle A secondary title used to amplify or state certain limitations on the main title. text 1 FALSE FALSE FALSE FALSE FALSE FALSE citation + alternativeTitle Alternative Title A title by which the work is commonly referred, or an abbreviation of the title. text 2 FALSE FALSE FALSE FALSE FALSE FALSE citation + alternativeURL Alternative URL A URL where the dataset can be viewed, such as a personal or project website. Enter full URL, starting with http:// url 3 FALSE FALSE FALSE FALSE FALSE FALSE citation + otherId Other ID Another unique identifier that identifies this Dataset (e.g., producer's or another repository's number). none 4 : FALSE FALSE TRUE FALSE FALSE FALSE citation + otherIdAgency Agency Name of agency which generated this identifier. text 5 #VALUE FALSE FALSE FALSE FALSE FALSE FALSE otherId citation + otherIdValue Identifier Other identifier that corresponds to this Dataset. text 6 #VALUE FALSE FALSE FALSE FALSE FALSE FALSE otherId citation + author Author The person(s), corporate body(ies), or agency(ies) responsible for creating the work. none 7 FALSE FALSE TRUE FALSE TRUE FALSE citation + authorName Name The author's Family Name, Given Name or the name of the organization responsible for this Dataset. FamilyName, GivenName or Organization text 8 #VALUE TRUE FALSE FALSE TRUE TRUE TRUE author citation + authorAffiliation Affiliation The organization with which the author is affiliated. text 9 (#VALUE) TRUE FALSE FALSE TRUE TRUE FALSE author citation + authorIdentifierScheme Identifier Scheme Name of the identifier scheme (ORCID, ISNI). text 10 - #VALUE: FALSE TRUE FALSE FALSE TRUE FALSE author citation + authorIdentifier Identifier Uniquely identifies an individual author or organization, according to various schemes. text 11 #VALUE FALSE FALSE FALSE FALSE TRUE FALSE author citation + datasetContact Contact The contact(s) for this Dataset. none 12 FALSE FALSE TRUE FALSE TRUE FALSE citation + datasetContactName Name The contact's Family Name, Given Name or the name of the organization. FamilyName, GivenName or Organization text 13 #VALUE FALSE FALSE FALSE FALSE TRUE FALSE datasetContact citation + datasetContactAffiliation Affiliation The organization with which the contact is affiliated. text 14 (#VALUE) FALSE FALSE FALSE FALSE TRUE FALSE datasetContact citation + datasetContactEmail E-mail The e-mail address(es) of the contact(s) for the Dataset. This will not be displayed. email 15 #EMAIL FALSE FALSE FALSE FALSE TRUE TRUE datasetContact citation + dsDescription Description A summary describing the purpose, nature, and scope of the Dataset. none 16 FALSE FALSE TRUE FALSE TRUE FALSE citation + dsDescriptionValue Text A summary describing the purpose, nature, and scope of the Dataset. textbox 17 #VALUE TRUE FALSE FALSE FALSE TRUE TRUE dsDescription citation + dsDescriptionDate Date In cases where a Dataset contains more than one description (for example, one might be supplied by the data producer and another prepared by the data repository where the data are deposited), the date attribute is used to distinguish between the two descriptions. The date attribute follows the ISO convention of YYYY-MM-DD. YYYY-MM-DD date 18 (#VALUE) FALSE FALSE FALSE FALSE TRUE FALSE dsDescription citation + subject Subject Domain-specific Subject Categories that are topically relevant to the Dataset. text 19 TRUE TRUE TRUE TRUE TRUE TRUE citation + keyword Keyword Key terms that describe important aspects of the Dataset. none 20 FALSE FALSE TRUE FALSE TRUE FALSE citation + keywordValue Term Key terms that describe important aspects of the Dataset. Can be used for building keyword indexes and for classification and retrieval purposes. A controlled vocabulary can be employed. The vocab attribute is provided for specification of the controlled vocabulary in use, such as LCSH, MeSH, or others. The vocabURI attribute specifies the location for the full controlled vocabulary. text 21 #VALUE TRUE FALSE FALSE TRUE TRUE FALSE keyword citation + keywordVocabulary Vocabulary For the specification of the keyword controlled vocabulary in use, such as LCSH, MeSH, or others. text 22 (#VALUE) FALSE FALSE FALSE FALSE TRUE FALSE keyword citation + keywordVocabularyURI Vocabulary URL Keyword vocabulary URL points to the web presence that describes the keyword vocabulary, if appropriate. Enter an absolute URL where the keyword vocabulary web site is found, such as http://www.my.org. Enter full URL, starting with http:// url 23 #VALUE FALSE FALSE FALSE FALSE TRUE FALSE keyword citation + topicClassification Topic Classification The classification field indicates the broad important topic(s) and subjects that the data cover. Library of Congress subject terms may be used here. none 24 FALSE FALSE TRUE FALSE FALSE FALSE citation + topicClassValue Term Topic or Subject term that is relevant to this Dataset. text 25 #VALUE TRUE FALSE FALSE TRUE FALSE FALSE topicClassification citation + topicClassVocab Vocabulary Provided for specification of the controlled vocabulary in use, e.g., LCSH, MeSH, etc. text 26 (#VALUE) FALSE FALSE FALSE FALSE FALSE FALSE topicClassification citation + topicClassVocabURI Vocabulary URL Specifies the URL location for the full controlled vocabulary. Enter full URL, starting with http:// url 27 #VALUE FALSE FALSE FALSE FALSE FALSE FALSE topicClassification citation + publication Related Publication Publications that use the data from this Dataset. none 28 FALSE FALSE TRUE FALSE TRUE FALSE citation + publicationCitation Citation The full bibliographic citation for this related publication. textbox 29 #VALUE TRUE FALSE FALSE FALSE TRUE FALSE publication citation + publicationIDType ID Type The type of digital identifier used for this publication (e.g., Digital Object Identifier (DOI)). text 30 #VALUE: TRUE TRUE FALSE FALSE TRUE FALSE publication citation + publicationIDNumber ID Number The identifier for the selected ID type. text 31 #VALUE TRUE FALSE FALSE FALSE TRUE FALSE publication citation + publicationURL URL Link to the publication web page (e.g., journal article page, archive record page, or other). Enter full URL, starting with http:// url 32 #VALUE FALSE FALSE FALSE FALSE FALSE FALSE publication citation + notesText Notes Additional important information about the Dataset. textbox 33 FALSE FALSE FALSE FALSE TRUE FALSE citation + language Language Language of the Dataset text 34 TRUE TRUE TRUE TRUE FALSE FALSE citation + producer Producer Person or organization with the financial or administrative responsibility over this Dataset none 35 FALSE FALSE TRUE FALSE FALSE FALSE citation + producerName Name Producer name FamilyName, GivenName or Organization text 36 #VALUE TRUE FALSE FALSE TRUE FALSE FALSE producer citation + producerAffiliation Affiliation The organization with which the producer is affiliated. text 37 (#VALUE) FALSE FALSE FALSE FALSE FALSE FALSE producer citation + producerAbbreviation Abbreviation The abbreviation by which the producer is commonly known. (ex. IQSS, ICPSR) text 38 (#VALUE) FALSE FALSE FALSE FALSE FALSE FALSE producer citation + producerURL URL Producer URL points to the producer's web presence, if appropriate. Enter an absolute URL where the producer's web site is found, such as http://www.my.org. Enter full URL, starting with http:// url 39 #VALUE FALSE FALSE FALSE FALSE FALSE FALSE producer citation + producerLogoURL Logo URL URL for the producer's logo, which points to this producer's web-accessible logo image. Enter an absolute URL where the producer's logo image is found, such as http://www.my.org/images/logo.gif. Enter full URL for image, starting with http:// url 40
                            FALSE FALSE FALSE FALSE FALSE FALSE producer citation + productionDate Production Date Date when the data collection or other materials were produced (not distributed, published or archived). YYYY-MM-DD date 41 TRUE FALSE FALSE TRUE FALSE FALSE citation + productionPlace Production Place The location where the data collection and any other related materials were produced. text 42 FALSE FALSE FALSE FALSE FALSE FALSE citation + contributor Contributor The organization or person responsible for either collecting, managing, or otherwise contributing in some form to the development of the resource. none 43 : FALSE FALSE TRUE FALSE FALSE FALSE citation + contributorType Type The type of contributor of the resource. text 44 #VALUE TRUE TRUE FALSE TRUE FALSE FALSE contributor citation + contributorName Name The Family Name, Given Name or organization name of the contributor. FamilyName, GivenName or Organization text 45 #VALUE TRUE FALSE FALSE TRUE FALSE FALSE contributor citation + grantNumber Grant Information Grant Information none 46 : FALSE FALSE TRUE FALSE FALSE FALSE citation + grantNumberAgency Grant Agency Grant Number Agency text 47 #VALUE TRUE FALSE FALSE TRUE FALSE FALSE grantNumber citation + grantNumberValue Grant Number The grant or contract number of the project that sponsored the effort. text 48 #VALUE TRUE FALSE FALSE TRUE FALSE FALSE grantNumber citation + distributor Distributor The organization designated by the author or producer to generate copies of the particular work including any necessary editions or revisions. none 49 FALSE FALSE TRUE FALSE FALSE FALSE citation + distributorName Name Distributor name FamilyName, GivenName or Organization text 50 #VALUE TRUE FALSE FALSE TRUE FALSE FALSE distributor citation + distributorAffiliation Affiliation The organization with which the distributor contact is affiliated. text 51 (#VALUE) FALSE FALSE FALSE FALSE FALSE FALSE distributor citation + distributorAbbreviation Abbreviation The abbreviation by which this distributor is commonly known (e.g., IQSS, ICPSR). text 52 (#VALUE) FALSE FALSE FALSE FALSE FALSE FALSE distributor citation + distributorURL URL Distributor URL points to the distributor's web presence, if appropriate. Enter an absolute URL where the distributor's web site is found, such as http://www.my.org. Enter full URL, starting with http:// url 53 #VALUE FALSE FALSE FALSE FALSE FALSE FALSE distributor citation + distributorLogoURL Logo URL URL of the distributor's logo, which points to this distributor's web-accessible logo image. Enter an absolute URL where the distributor's logo image is found, such as http://www.my.org/images/logo.gif. Enter full URL for image, starting with http:// url 54
                            FALSE FALSE FALSE FALSE FALSE FALSE distributor citation + distributionDate Distribution Date Date that the work was made available for distribution/presentation. YYYY-MM-DD date 55 TRUE FALSE FALSE TRUE FALSE FALSE citation + depositor Depositor The person (Family Name, Given Name) or the name of the organization that deposited this Dataset to the repository. text 56 FALSE FALSE FALSE FALSE FALSE FALSE citation + dateOfDeposit Deposit Date Date that the Dataset was deposited into the repository. YYYY-MM-DD date 57 FALSE FALSE FALSE TRUE FALSE FALSE citation + timePeriodCovered Time Period Covered Time period to which the data refer. This item reflects the time period covered by the data, not the dates of coding or making documents machine-readable or the dates the data were collected. Also known as span. none 58 ; FALSE FALSE TRUE FALSE FALSE FALSE citation + timePeriodCoveredStart Start Start date which reflects the time period covered by the data, not the dates of coding or making documents machine-readable or the dates the data were collected. YYYY-MM-DD date 59 #NAME: #VALUE TRUE FALSE FALSE TRUE FALSE FALSE timePeriodCovered citation + timePeriodCoveredEnd End End date which reflects the time period covered by the data, not the dates of coding or making documents machine-readable or the dates the data were collected. YYYY-MM-DD date 60 #NAME: #VALUE TRUE FALSE FALSE TRUE FALSE FALSE timePeriodCovered citation + dateOfCollection Date of Collection Contains the date(s) when the data were collected. none 61 ; FALSE FALSE TRUE FALSE FALSE FALSE citation + dateOfCollectionStart Start Date when the data collection started. YYYY-MM-DD date 62 #NAME: #VALUE FALSE FALSE FALSE FALSE FALSE FALSE dateOfCollection citation + dateOfCollectionEnd End Date when the data collection ended. YYYY-MM-DD date 63 #NAME: #VALUE FALSE FALSE FALSE FALSE FALSE FALSE dateOfCollection citation + kindOfData Kind of Data Type of data included in the file: survey data, census/enumeration data, aggregate data, clinical data, event/transaction data, program source code, machine-readable text, administrative records data, experimental data, psychological test, textual data, coded textual, coded documents, time budget diaries, observation data/ratings, process-produced data, or other. text 64 TRUE FALSE TRUE TRUE FALSE FALSE citation + series Series Information about the Dataset series. none 65 : FALSE FALSE FALSE FALSE FALSE FALSE citation + seriesName Name Name of the dataset series to which the Dataset belongs. text 66 #VALUE TRUE FALSE FALSE TRUE FALSE FALSE series citation + seriesInformation Information History of the series and summary of those features that apply to the series as a whole. textbox 67 #VALUE FALSE FALSE FALSE FALSE FALSE FALSE series citation + software Software Information about the software used to generate the Dataset. none 68 , FALSE FALSE TRUE FALSE FALSE FALSE citation + softwareName Name Name of software used to generate the Dataset. text 69 #VALUE FALSE TRUE FALSE FALSE FALSE FALSE software citation + softwareVersion Version Version of the software used to generate the Dataset. text 70 #NAME: #VALUE FALSE FALSE FALSE FALSE FALSE FALSE software citation + relatedMaterial Related Material Any material related to this Dataset. textbox 71 FALSE FALSE TRUE FALSE FALSE FALSE citation + relatedDatasets Related Datasets Any Datasets that are related to this Dataset, such as previous research on this subject. textbox 72 FALSE FALSE TRUE FALSE FALSE FALSE citation + otherReferences Other References Any references that would serve as background or supporting material to this Dataset. text 73 FALSE FALSE TRUE FALSE FALSE FALSE citation + dataSources Data Sources List of books, articles, serials, or machine-readable data files that served as the sources of the data collection. textbox 74 FALSE FALSE TRUE FALSE FALSE FALSE citation + originOfSources Origin of Sources For historical materials, information about the origin of the sources and the rules followed in establishing the sources should be specified. textbox 75 FALSE FALSE FALSE FALSE FALSE FALSE citation + characteristicOfSources Characteristic of Sources Noted Assessment of characteristics and source material. textbox 76 FALSE FALSE FALSE FALSE FALSE FALSE citation + accessToSources Documentation and Access to Sources Level of documentation of the original sources. textbox 77 FALSE FALSE FALSE FALSE FALSE FALSE citation +#controlledVocabulary DatasetField Value identifier displayOrder + subject Agricultural Sciences D01 0 + subject Arts and Humanities D0 1 + subject Astronomy and Astrophysics D1 2 + subject Business and Management D2 3 + subject Chemistry D3 4 + subject Computer and Information Science D7 5 + subject Earth and Environmental Sciences D4 6 + subject Engineering D5 7 + subject Law D8 8 + subject Mathematical Sciences D9 9 + subject Medicine, Health and Life Sciences D6 10 + subject Physics D10 11 + subject Social Sciences D11 12 + subject Other D12 13 + publicationIDType ark 0 + publicationIDType arXiv 1 arxiv + publicationIDType bibcode 2 + publicationIDType doi 3 + publicationIDType ean13 4 + publicationIDType eissn 5 + publicationIDType handle 6 + publicationIDType isbn 7 + publicationIDType issn 8 + publicationIDType istc 9 + publicationIDType lissn 10 + publicationIDType lsid 11 + publicationIDType pmid 12 + publicationIDType purl 13 + publicationIDType upc 14 + publicationIDType url 15 + publicationIDType urn 16 + contributorType Data Collector 0 + contributorType Data Curator 1 + contributorType Data Manager 2 + contributorType Editor 3 + contributorType Funder 4 + contributorType Hosting Institution 5 + contributorType Project Leader 6 + contributorType Project Manager 7 + contributorType Project Member 8 + contributorType Related Person 9 + contributorType Researcher 10 + contributorType Research Group 11 + contributorType Rights Holder 12 + contributorType Sponsor 13 + contributorType Supervisor 14 + contributorType Work Package Leader 15 + contributorType Other 16 + authorIdentifierScheme ORCID 0 + authorIdentifierScheme ISNI 1 + authorIdentifierScheme LCNA 2 + language Abkhaz 0 + language Afar 1 + language Afrikaans 2 + language Akan 3 + language Albanian 4 + language Amharic 5 + language Arabic 6 + language Aragonese 7 + language Armenian 8 + language Assamese 9 + language Avaric 10 + language Avestan 11 + language Aymara 12 + language Azerbaijani 13 + language Bambara 14 + language Bashkir 15 + language Basque 16 + language Belarusian 17 + language Bengali, Bangla 18 + language Bihari 19 + language Bislama 20 + language Bosnian 21 + language Breton 22 + language Bulgarian 23 + language Burmese 24 + language Catalan,Valencian 25 + language Chamorro 26 + language Chechen 27 + language Chichewa, Chewa, Nyanja 28 + language Chinese 29 + language Chuvash 30 + language Cornish 31 + language Corsican 32 + language Cree 33 + language Croatian 34 + language Czech 35 + language Danish 36 + language Divehi, Dhivehi, Maldivian 37 + language Dutch 38 + language Dzongkha 39 + language English 40 + language Esperanto 41 + language Estonian 42 + language Ewe 43 + language Faroese 44 + language Fijian 45 + language Finnish 46 + language French 47 + language Fula, Fulah, Pulaar, Pular 48 + language Galician 49 + language Georgian 50 + language German 51 + language Greek (modern) 52 + language Guaraní 53 + language Gujarati 54 + language Haitian, Haitian Creole 55 + language Hausa 56 + language Hebrew (modern) 57 + language Herero 58 + language Hindi 59 + language Hiri Motu 60 + language Hungarian 61 + language Interlingua 62 + language Indonesian 63 + language Interlingue 64 + language Irish 65 + language Igbo 66 + language Inupiaq 67 + language Ido 68 + language Icelandic 69 + language Italian 70 + language Inuktitut 71 + language Japanese 72 + language Javanese 73 + language Kalaallisut, Greenlandic 74 + language Kannada 75 + language Kanuri 76 + language Kashmiri 77 + language Kazakh 78 + language Khmer 79 + language Kikuyu, Gikuyu 80 + language Kinyarwanda 81 + language Kyrgyz 82 + language Komi 83 + language Kongo 84 + language Korean 85 + language Kurdish 86 + language Kwanyama, Kuanyama 87 + language Latin 88 + language Luxembourgish, Letzeburgesch 89 + language Ganda 90 + language Limburgish, Limburgan, Limburger 91 + language Lingala 92 + language Lao 93 + language Lithuanian 94 + language Luba-Katanga 95 + language Latvian 96 + language Manx 97 + language Macedonian 98 + language Malagasy 99 + language Malay 100 + language Malayalam 101 + language Maltese 102 + language Māori 103 + language Marathi (Marāṭhī) 104 + language Marshallese 105 + language Mongolian 106 + language Nauru 107 + language Navajo, Navaho 108 + language Northern Ndebele 109 + language Nepali 110 + language Ndonga 111 + language Norwegian Bokmål 112 + language Norwegian Nynorsk 113 + language Norwegian 114 + language Nuosu 115 + language Southern Ndebele 116 + language Occitan 117 + language Ojibwe, Ojibwa 118 + language Old Church Slavonic,Church Slavonic,Old Bulgarian 119 + language Oromo 120 + language Oriya 121 + language Ossetian, Ossetic 122 + language Panjabi, Punjabi 123 + language Pāli 124 + language Persian (Farsi) 125 + language Polish 126 + language Pashto, Pushto 127 + language Portuguese 128 + language Quechua 129 + language Romansh 130 + language Kirundi 131 + language Romanian 132 + language Russian 133 + language Sanskrit (Saṁskṛta) 134 + language Sardinian 135 + language Sindhi 136 + language Northern Sami 137 + language Samoan 138 + language Sango 139 + language Serbian 140 + language Scottish Gaelic, Gaelic 141 + language Shona 142 + language Sinhala, Sinhalese 143 + language Slovak 144 + language Slovene 145 + language Somali 146 + language Southern Sotho 147 + language Spanish, Castilian 148 + language Sundanese 149 + language Swahili 150 + language Swati 151 + language Swedish 152 + language Tamil 153 + language Telugu 154 + language Tajik 155 + language Thai 156 + language Tigrinya 157 + language Tibetan Standard, Tibetan, Central 158 + language Turkmen 159 + language Tagalog 160 + language Tswana 161 + language Tonga (Tonga Islands) 162 + language Turkish 163 + language Tsonga 164 + language Tatar 165 + language Twi 166 + language Tahitian 167 + language Uyghur, Uighur 168 + language Ukrainian 169 + language Urdu 170 + language Uzbek 171 + language Venda 172 + language Vietnamese 173 + language Volapük 174 + language Walloon 175 + language Welsh 176 + language Wolof 177 + language Western Frisian 178 + language Xhosa 179 + language Yiddish 180 + language Yoruba 181 + language Zhuang, Chuang 182 + language Zulu 183 + language Not applicable 184 diff --git a/postgresql/testdata/scripts/api/data/metadatablocks/customARCS.tsv b/postgresql/testdata/scripts/api/data/metadatablocks/customARCS.tsv new file mode 100644 index 0000000..e287349 --- /dev/null +++ b/postgresql/testdata/scripts/api/data/metadatablocks/customARCS.tsv @@ -0,0 +1,21 @@ +#metadataBlock name dataverseAlias displayName + customARCS Alliance for Research on Corporate Sustainability Metadata +#datasetField name title description watermark fieldType displayOrder displayFormat advancedSearchField allowControlledVocabulary allowmultiples facetable displayoncreate required parent metadatablock_id + ARCS1 1) Were any of these data sets a) purchased, b) obtained through licensed databases, or c) provided by an organization under a nondisclosure or other agreement? Licensed agreement of deposited data. text 0 FALSE TRUE FALSE FALSE FALSE FALSE customARCS + ARCS2 2) If you responded Yes to Q1, have you ensured that sharing the data does not violate terms of the agreement? If you responded No to Q1, please enter N/A here. Data sharing does not violate terms. text 1 FALSE TRUE FALSE FALSE FALSE FALSE customARCS + ARCS3 3) Do any of these data sets include individual-level data (either collected or pre-existing in the dataset) that might make them subject to U.S. or international human subjects considerations? Human subjects consideration. text 2 FALSE TRUE FALSE FALSE FALSE FALSE customARCS + ARCS4 4) If you responded Yes to Q3, are these data sets totally de-identified or was sharing approved by your institutional review board ( IRB)? If you responded No to Q3 please enter N/A here. Deidentified data/sharing approved by IRB. text 3 FALSE TRUE FALSE FALSE FALSE FALSE customARCS + ARCS5 5) Do these datasets contain sensitive or personally identifiable private information? (Harvard Research Data Security Policy {www.security.harvard.edu/research-data-security-policy} may apply because this Dataverse is hosted by Harvard University.) Data contain sensitive/identifiable private information. text 4 FALSE TRUE FALSE FALSE FALSE FALSE customARCS +#controlledVocabulary DatasetField Value identifier displayOrder + ARCS1 No 0 + ARCS1 Yes 1 + ARCS2 NA 0 + ARCS2 No 1 + ARCS2 Yes 2 + ARCS3 No 0 + ARCS3 Yes 1 + ARCS4 NA 0 + ARCS4 No 1 + ARCS4 Yes 2 + ARCS5 No 0 + ARCS5 Yes 1 \ No newline at end of file diff --git a/postgresql/testdata/scripts/api/data/metadatablocks/customCHIA.tsv b/postgresql/testdata/scripts/api/data/metadatablocks/customCHIA.tsv new file mode 100644 index 0000000..255981c --- /dev/null +++ b/postgresql/testdata/scripts/api/data/metadatablocks/customCHIA.tsv @@ -0,0 +1,10 @@ +#metadataBlock name dataverseAlias displayName + customCHIA CHIA Metadata +#datasetField name title description watermark fieldType displayOrder displayFormat advancedSearchField allowControlledVocabulary allowmultiples facetable displayoncreate required parent metadatablock_id + sourceCHIA Source Source - This describes the source of the data. Is it from the Bureau of Labor and Statistics? Is it data from the United Nations? text 0 TRUE FALSE FALSE TRUE FALSE FALSE customCHIA + datesAdditionalInformationCHIA Dates - Additional Information Dates - Additional Information - Note any additional information about dates or time periods in the dataset including intervals (annual, decennial, centennial, etc.) Also note the column(s) in the dataset where dates and other temporal information can be found. text 1 TRUE FALSE FALSE FALSE FALSE FALSE customCHIA + variablesCHIA Variables Variables - Define the variables in this dataset. Please note the column in the dataset where variable information can be found. textbox 2 TRUE FALSE FALSE FALSE FALSE FALSE customCHIA + classificationSchemaCHIA Classification Schema Classification Schema - If there is a classification scheme in this dataset, please describe it. For example, M_20_24 should be read as Males, aged 20-24. textbox 3 TRUE FALSE FALSE TRUE FALSE FALSE customCHIA + provenanceCHIA Provenance Provenance - The provenance of the datasets is the record of ownership and will be used as a guide to the authenticity or quality of the data. For example, the Provenance statement might be, "This dataset was created from data collected by David Ruvolo during a data collection trip to Spain in 1992. Since that time, the data has not been altered other than to migrate it to more current formats." text 4 TRUE FALSE FALSE FALSE FALSE FALSE customCHIA + rightsAvailabilityCHIA Rights/Availability Rights/Availability - Do you have the rights to share this data? text 5 TRUE FALSE FALSE FALSE FALSE FALSE customCHIA +#controlledVocabulary DatasetField Value identifier displayOrder \ No newline at end of file diff --git a/postgresql/testdata/scripts/api/data/metadatablocks/customDigaai.tsv b/postgresql/testdata/scripts/api/data/metadatablocks/customDigaai.tsv new file mode 100644 index 0000000..8345d52 --- /dev/null +++ b/postgresql/testdata/scripts/api/data/metadatablocks/customDigaai.tsv @@ -0,0 +1,47 @@ +#metadataBlock name dataverseAlias displayName + customDigaai Digaai Metadata +#datasetField name title description watermark fieldType displayOrder displayFormat advancedSearchField allowControlledVocabulary allowmultiples facetable displayoncreate required parent metadatablock_id + titulo Título Título do jornal ou revista. text 0 TRUE TRUE TRUE TRUE FALSE FALSE customDigaai + numero Número Número do jornal ou revista. text 1 TRUE FALSE FALSE TRUE FALSE FALSE customDigaai + datadePublicao Data de Publicação Entrar dia/mes/ano. dia/mes/ano text 2 TRUE FALSE FALSE TRUE FALSE FALSE customDigaai + localdePublicao Local de Publicação Local de Publicação. text 3 TRUE FALSE FALSE TRUE FALSE FALSE customDigaai + proprietrio Proprietário Proprietário text 4 TRUE FALSE FALSE TRUE FALSE FALSE customDigaai +#controlledVocabulary DatasetField Value identifier displayOrder + titulo Achei USA 0 + titulo Acontece Magazine 1 + titulo A Notícia 2 + titulo Brasil Best 3 + titulo Brasileiros & Brasileiras 4 + titulo Brasil USA 5 + titulo Brazil Explore 6 + titulo Brazilian Press 7 + titulo Brazilian Voice 8 + titulo Brazil News 9 + titulo Brazuca 10 + titulo Cia Brasil 11 + titulo Comunidade News 12 + titulo Diário do Brasil 13 + titulo FaceBrasil 14 + titulo Green and Yellow News 15 + titulo Jornal dos Sports 16 + titulo Jornal Moderno 17 + titulo Metropolitan 18 + titulo National 19 + titulo Negócio Fechado 20 + titulo Nossa Gente 21 + titulo Nossa Terra 22 + titulo O Brasileirinho 23 + titulo O Imigrante Cristão 24 + titulo O Jornal Brasileiro 25 + titulo O Novo Mundo 26 + titulo O Popular 27 + titulo Revista Linha Aberta 28 + titulo Revista MASSA 29 + titulo Revista Tititi 30 + titulo Sucesso USA 31 + titulo Ta na Mão 32 + titulo TC Brazil 33 + titulo Texas Magazine 34 + titulo The Brazilian Journal 35 + titulo Today Magazine 36 + titulo Viver Magazine 37 \ No newline at end of file diff --git a/postgresql/testdata/scripts/api/data/metadatablocks/customGSD.tsv b/postgresql/testdata/scripts/api/data/metadatablocks/customGSD.tsv new file mode 100644 index 0000000..d15a4e8 --- /dev/null +++ b/postgresql/testdata/scripts/api/data/metadatablocks/customGSD.tsv @@ -0,0 +1,528 @@ +#metadataBlock name dataverseAlias displayName + customGSD Graduate School of Design Metadata +#datasetField name title description watermark fieldType displayOrder displayFormat advancedSearchField allowControlledVocabulary allowmultiples facetable displayoncreate required parent metadatablock_id + gsdStudentName Student Name Full name of the student: Last Name, First Name (example: Smith, Jane). Use the name that the GSD Administrator has on file. LastName, FirstName text 0 TRUE FALSE TRUE FALSE FALSE FALSE customGSD + gsdStudentProgram Student's Program of Study Student's program of study. text 1 TRUE TRUE TRUE TRUE FALSE FALSE customGSD + gsdCourseName Course Name Name of the course. text 2 TRUE TRUE FALSE TRUE FALSE FALSE customGSD + gsdFacultyName Faculty Name Name of the studio instructor. text 3 TRUE TRUE TRUE TRUE FALSE FALSE customGSD + gsdCoordinator Core Studio Coordinator Name of the studio coordinator(s). text 4 FALSE TRUE TRUE FALSE FALSE FALSE customGSD + gsdSemester Semester / Year Select the semester / year. text 5 TRUE TRUE FALSE TRUE FALSE FALSE customGSD + gsdRecommendation Faculty Recommendation Indicate the recommendation(s) from the faculty for this project. text 6 TRUE TRUE TRUE TRUE FALSE FALSE customGSD + gsdAccreditation Accreditation Selection made by faculty. text 7 TRUE TRUE FALSE TRUE FALSE FALSE customGSD + gsdSiteType Site Type Describe the type of building or site, based on function / purpose. Example: Military base. text 8 TRUE FALSE TRUE TRUE FALSE FALSE customGSD + gsdProgramBrief Program / Brief Example: redevelopment, restoration. textbox 9 TRUE FALSE TRUE TRUE FALSE FALSE customGSD + gsdTypes Types of Representation/ Medium/ Format Choose from the list. text 10 FALSE TRUE TRUE TRUE FALSE FALSE customGSD + gsdPrizes Prizes Choose from the list. text 11 TRUE TRUE FALSE TRUE FALSE FALSE customGSD + gsdTags GSD Tags Use tags to describe the project. Write one keyword per field. To add more tags, click on the plus sign on the right. text 12 TRUE FALSE TRUE TRUE FALSE FALSE customGSD +#controlledVocabulary DatasetField Value identifier displayOrder + gsdFacultyName Abalos, Inaki Abalos_Inaki 0 + gsdFacultyName Adjaye, David Adjaye_David 1 + gsdFacultyName Adofo-Wilson, Baye Adofo-Wilson_Baye 2 + gsdFacultyName Agre, Claire Agre_Claire 3 + gsdFacultyName Altringer, Beth Altringer_Beth 4 + gsdFacultyName Apfelbaum, Steven Apfelbaum_Steven 5 + gsdFacultyName Aquino, Gerdo Aquino_Gerdo 6 + gsdFacultyName Asensio Villoria, Leire Asensio_Villoria_Leire 7 + gsdFacultyName Baines, Bridget Baines_Bridget 8 + gsdFacultyName Bandy, Vincent Bandy_Vincent 9 + gsdFacultyName Barkan, Katy Barkan_Katy 10 + gsdFacultyName Barkow, Frank Barkow_Frank 11 + gsdFacultyName Beard, Peter Beard_Peter 12 + gsdFacultyName Belanger, Pierre Belanger_Pierre 13 + gsdFacultyName Benedito, Silvia Benedito_Silvia 14 + gsdFacultyName Berrizbeitia, Ann Berrizbeitia_Ann 15 + gsdFacultyName Bewtra, Manisha Bewtra_Manisha 16 + gsdFacultyName Blau, Eve Blau_Eve 17 + gsdFacultyName Bozdogan, Sibel Bozdogan_Sibel 18 + gsdFacultyName Brandlhuber, Arno Brandlhuber_Arno 19 + gsdFacultyName Brenner, Neil Brenner_Neil 20 + gsdFacultyName Buchard, Jeffry Buchard_Jeffry 21 + gsdFacultyName Buckler, Julie Buckler_Julie 22 + gsdFacultyName Burchard, Jeffry Burchard_Jeffry 23 + gsdFacultyName Busquets, Joan Busquets_Joan 24 + gsdFacultyName Callejas Mujica, Luis Rodrigo Callejas_Mujica_Luis_Rodrigo 25 + gsdFacultyName Calvillo, Nerea Calvillo_Nerea 26 + gsdFacultyName Cantrell, Bradley Cantrell_Bradley 27 + gsdFacultyName Carras, James Carras_James 28 + gsdFacultyName Castillo, Jose Castillo_Jose 29 + gsdFacultyName Cephas, Jana Cephas_Jana 30 + gsdFacultyName Cheng, Christine Cheng_Christine 31 + gsdFacultyName Cohen, Preston Scott Cohen_Preston_Scott 32 + gsdFacultyName Coignet, Philippe Coignet_Philippe 33 + gsdFacultyName Cook, Peter Cook_Peter 34 + gsdFacultyName Corneil, Janne Corneil_Janne 35 + gsdFacultyName Correa, Felipe Correa_Felipe 36 + gsdFacultyName Craig, Salmaan Craig_Salmaan 37 + gsdFacultyName Curtis, Lawrence Curtis_Lawrence 38 + gsdFacultyName Daoust, Renee Daoust_Renee 39 + gsdFacultyName Davis, Diane Davis_Diane 40 + gsdFacultyName de Broche des Combes, Eric de_Broche_des_Combes_Eric 41 + gsdFacultyName de Castro Mazarro, Alejandro de_Castro_Mazarro_Alejandro 42 + gsdFacultyName de Meuron, Pierre de_Meuron_Pierre 43 + gsdFacultyName Del Tredici, Peter Del_Tredici_Peter 44 + gsdFacultyName Desimini, Jill Desimini_Jill 45 + gsdFacultyName Desvigne, Michel Desvigne_Michel 46 + gsdFacultyName D'Oca, Daniel D_Oca_Daniel 47 + gsdFacultyName Doherty, Gareth Doherty_Gareth 48 + gsdFacultyName Doran, Kelly Doran_Kelly 49 + gsdFacultyName Duempelmann, Sonja Duempelmann_Sonja 50 + gsdFacultyName Echeverria, Inaki Echeverria_Inaki 51 + gsdFacultyName Eigen, Ed Eigen_Ed 52 + gsdFacultyName Elkin, Rosetta Elkin_Rosetta 53 + gsdFacultyName Ellis, Erle Ellis_Erle 54 + gsdFacultyName Etzler, Danielle Etzler_Danielle 55 + gsdFacultyName Evans, Teman Evans_Teman 56 + gsdFacultyName Flores Dewey, Onesimo Flores_Dewey_Onesimo 57 + gsdFacultyName Forsyth, Ann Forsyth_Ann 58 + gsdFacultyName Frederickson, Kristin Frederickson_Kristin 59 + gsdFacultyName Gamble, David Gamble_David 60 + gsdFacultyName Garcia Grinda, Efren Garcia_Grinda_Efren 61 + gsdFacultyName Garciavelez Alfaro, Carlos Garciavelez_Alfaro_Carlos 62 + gsdFacultyName Geers, Kersten Geers_Kersten 63 + gsdFacultyName Gelabert-Sanchez, Ana Gelabert-Sanchez_Ana 64 + gsdFacultyName Georgoulias, Andreas Georgoulias_Andreas 65 + gsdFacultyName Geuze, Adriaan Geuze_Adriaan 66 + gsdFacultyName Gillies-Smith, Shauna Gillies-Smith_Shauna 67 + gsdFacultyName Ham, Derek Ham_Derek 68 + gsdFacultyName Hansch, Inessa Hansch_Inessa 69 + gsdFacultyName Hansen, Andrea Hansen_Andrea 70 + gsdFacultyName Harabasz, Ewa Harabasz_Ewa 71 + gsdFacultyName Hays, K. Michael Hays_K._Michael 72 + gsdFacultyName Herzog, Jacques Herzog_Jacques 73 + gsdFacultyName Hilderbrand, Gary Hilderbrand_Gary 74 + gsdFacultyName Hoberman, Chuck Hoberman_Chuck 75 + gsdFacultyName Hong, Zaneta Hong_Zaneta 76 + gsdFacultyName Hooftman, Eelco Hooftman_Eelco 77 + gsdFacultyName Hooper, Michael Hooper_Michael 78 + gsdFacultyName Howeler, Eric Howeler_Eric 79 + gsdFacultyName Hoxie, Christopher Hoxie_Christopher 80 + gsdFacultyName Hung, Ying-Yu Hung_Ying-Yu 81 + gsdFacultyName Hunt, John Hunt_John 82 + gsdFacultyName Hutton, Jane Hutton_Jane 83 + gsdFacultyName Hyde, Timothy Hyde_Timothy 84 + gsdFacultyName Ibanez, Mariana Ibanez_Mariana 85 + gsdFacultyName Idenburg, Florian Idenburg_Florian 86 + gsdFacultyName Johnston, Sharon Johnston_Sharon 87 + gsdFacultyName Kayden, Jerold Kayden_Jerold 88 + gsdFacultyName Khamsi, James Khamsi_James 89 + gsdFacultyName Kiefer, Matthew Kiefer_Matthew 90 + gsdFacultyName Kirkwood, Niall Kirkwood_Niall 91 + gsdFacultyName Koolhaas, Remment Koolhaas_Remment 92 + gsdFacultyName Krieger, Alex Krieger_Alex 93 + gsdFacultyName Kuo, Max Kuo_Max 94 + gsdFacultyName La, Grace La_Grace 95 + gsdFacultyName Lacaton, Anne Lacaton_Anne 96 + gsdFacultyName Laszlo Tait, Rachel Laszlo_Tait_Rachel 97 + gsdFacultyName Leach, Neil Leach_Neil 98 + gsdFacultyName Lee, Chris Lee_Chris 99 + gsdFacultyName Lee, Christopher Lee_Christopher 100 + gsdFacultyName Lee, Mark Lee_Mark 101 + gsdFacultyName Legendre, George L. Legendre_George_L. 102 + gsdFacultyName Lehrer, Mia Lehrer_Mia 103 + gsdFacultyName Liaropoulos-Legendre, George Liaropoulos-Legendre_George 104 + gsdFacultyName Long, Judith Long_Judith 105 + gsdFacultyName Lopez-Pineiro, Sergio Lopez-Pineiro_Sergio 106 + gsdFacultyName Lott, Jonathan Lott_Jonathan 107 + gsdFacultyName Madden, Kathryn Madden_Kathryn 108 + gsdFacultyName Mah, David Mah_David 109 + gsdFacultyName Malkawi, Ali Malkawi_Ali 110 + gsdFacultyName Maltzan, Michael Maltzan_Michael 111 + gsdFacultyName Manfredi, Michael Manfredi_Michael 112 + gsdFacultyName Marchant, Edward Marchant_Edward 113 + gsdFacultyName Mateo, Josep Lluis Mateo_Josep_Lluis 114 + gsdFacultyName McCafferty, Patrick McCafferty_Patrick 115 + gsdFacultyName McIntosh, Alistair McIntosh_Alistair 116 + gsdFacultyName MCloskey, Karen MCloskey_Karen 117 + gsdFacultyName Mehrotra, Rahul Mehrotra_Rahul 118 + gsdFacultyName Menchaca, Alejandra Menchaca_Alejandra 119 + gsdFacultyName Menges, Achim Menges_Achim 120 + gsdFacultyName Menges, Achim Menges_Achim 121 + gsdFacultyName Michalatos, Panagiotis Michalatos_Panagiotis 122 + gsdFacultyName Moe, Kiel Moe_Kiel 123 + gsdFacultyName Molinsky, Jennifer Molinsky_Jennifer 124 + gsdFacultyName Moreno, Cristina Diaz Moreno_Cristina_Diaz 125 + gsdFacultyName Mori, Toshiko Mori_Toshiko 126 + gsdFacultyName Moussavi, Farshid Moussavi_Farshid 127 + gsdFacultyName Mulligan, Mark Mulligan_Mark 128 + gsdFacultyName Muro, Carles Muro_Carles 129 + gsdFacultyName Naginski, Erika Naginski_Erika 130 + gsdFacultyName Najle, Ciro Najle_Ciro 131 + gsdFacultyName Nakazawa, Paul Nakazawa_Paul 132 + gsdFacultyName Navarro Rios, Victor Navarro_Rios_Victor 133 + gsdFacultyName Nichols, Albert Nichols_Albert 134 + gsdFacultyName O'Carroll, Aisling O_Carroll_Aisling 135 + gsdFacultyName O'Donnell, Sheila O_Donnell_Sheila 136 + gsdFacultyName Oman, Rok Oman_Rok 137 + gsdFacultyName O'Neill-Uzgiris, Kelly Ann O_Neill-Uzgiris_Kelly_Ann 138 + gsdFacultyName Oppenheim, Chad Oppenheim_Chad 139 + gsdFacultyName Other Other 140 + gsdFacultyName Ozay, Erkin Ozay_Erkin 141 + gsdFacultyName Panzano, Megan Panzano_Megan 142 + gsdFacultyName Park, Peter Park_Peter 143 + gsdFacultyName Parsons, Katharine Parsons_Katharine 144 + gsdFacultyName Peiser, Richard Peiser_Richard 145 + gsdFacultyName Petcu, Constantin Petcu_Constantin 146 + gsdFacultyName Petrescu, Doina Petrescu_Doina 147 + gsdFacultyName Pietrusko, Robert Pietrusko_Robert 148 + gsdFacultyName Rahm, Philippe Rahm_Philippe 149 + gsdFacultyName Raspall Galli, Carlos Felix Raspall_Galli_Carlos_Felix 150 + gsdFacultyName Reed, Chris Reed_Chris 151 + gsdFacultyName Rein-Cano, Martin Rein-Cano_Martin 152 + gsdFacultyName Restrepo Ochoa, Camilo Restrepo_Ochoa_Camilo 153 + gsdFacultyName Rich, Damon Rich_Damon 154 + gsdFacultyName Rocker, Ingeborg Rocker_Ingeborg 155 + gsdFacultyName Rojo, Marcos Rojo_Marcos 156 + gsdFacultyName Rosenthal, Joyce Klein Rosenthal_Joyce_Klein 157 + gsdFacultyName Rowe, Peter Rowe_Peter 158 + gsdFacultyName Ryan, Thomas Ryan_Thomas 159 + gsdFacultyName Samuelson, Holly Samuelson_Holly 160 + gsdFacultyName Sarkis, A. Hashim Sarkis_A._Hashim 161 + gsdFacultyName Schumacher, Patrik Schumacher_Patrik 162 + gsdFacultyName Schwartz, Martha Schwartz_Martha 163 + gsdFacultyName Scogin, Buford Scogin_Buford 164 + gsdFacultyName Scogin, Mack Scogin_Mack 165 + gsdFacultyName Sennett, Richard Sennett_Richard 166 + gsdFacultyName Sentkiewicz, Renata Sentkiewicz_Renata 167 + gsdFacultyName Shigematsu, Shohei Shigematsu_Shohei 168 + gsdFacultyName Silman, Robert Silman_Robert 169 + gsdFacultyName Silver, Mitchell Silver_Mitchell 170 + gsdFacultyName Silvetti, Jorge Silvetti_Jorge 171 + gsdFacultyName Smith, Christine Smith_Christine 172 + gsdFacultyName Snyder, Susan Snyder_Susan 173 + gsdFacultyName Solano, Laura Solano_Laura 174 + gsdFacultyName Sorkin, Michael Sorkin_Michael 175 + gsdFacultyName Spiegelman, Kathy Spiegelman_Kathy 176 + gsdFacultyName Stilgoe, John Stilgoe_John 177 + gsdFacultyName Stockard, James Stockard_James 178 + gsdFacultyName Tato, Belinda Tato_Belinda 179 + gsdFacultyName Thomas, George Thomas_George 180 + gsdFacultyName Thompson, Maryann Thompson_Maryann 181 + gsdFacultyName Torto, Raymond Torto_Raymond 182 + gsdFacultyName Tuomey, John Tuomey_John 183 + gsdFacultyName Urbanski, Matthew Urbanski_Matthew 184 + gsdFacultyName Valenzuela, Luis Valenzuela_Luis 185 + gsdFacultyName Vallejo, Jose Luis Vallejo_Jose_Luis 186 + gsdFacultyName Van Valkenburgh, Michael Van_Valkenburgh_Michael 187 + gsdFacultyName VanDerSys, Keith VanDerSys_Keith 188 + gsdFacultyName Vecitis, Chad Vecitis_Chad 189 + gsdFacultyName Videcnik, Spela Videcnik_Spela 190 + gsdFacultyName Waldheim, Charles Waldheim_Charles 191 + gsdFacultyName Wang, Bing Wang_Bing 192 + gsdFacultyName Weitz, David Weitz_David 193 + gsdFacultyName Wendel, Delia Wendel_Delia 194 + gsdFacultyName Whittaker, Elizabeth Whittaker_Elizabeth 195 + gsdFacultyName Wickersham, Jay Wickersham_Jay 196 + gsdFacultyName Witt, Andrew Witt_Andrew 197 + gsdFacultyName Wodiczko, Krzysztof Wodiczko_Krzysztof 198 + gsdFacultyName Wood, Robert Wood_Robert 199 + gsdFacultyName Wu, Cameron Wu_Cameron 200 + gsdFacultyName Zickler, Todd Zickler_Todd 201 + gsdCoordinator Abalos, Inaki Abalos_Inaki 0 + gsdCoordinator Belanger, Pierre Belanger_Pierre 1 + gsdCoordinator Correa, Felipe Correa_Felipe 2 + gsdCoordinator Desimini, Jill Desimini_Jill 3 + gsdCoordinator Forsyth, Ann Forsyth_Ann 4 + gsdCoordinator Etzler, Danielle Etzler_Danielle 5 + gsdCoordinator Gelabert-Sanchez, Ana Gelabert-Sanchez_Ana 6 + gsdCoordinator Hilderbrand, Gary Hilderbrand_Gary 7 + gsdCoordinator Howeler, Eric Howeler_Eric 8 + gsdCoordinator Howler, Eric Howler_Eric 9 + gsdCoordinator Hutton, Jane Hutton_Jane 10 + gsdCoordinator Ibanez, Mariana Ibanez_Mariana 11 + gsdCoordinator Idenburg, Florian Idenburg_Florian 12 + gsdCoordinator La, Grace La_Grace 13 + gsdCoordinator Long, Judith Grant Long_Judith_Grant 14 + gsdCoordinator Moe, Kiel Moe_Kiel 15 + gsdCoordinator Muro, Carles Muro_Carles 16 + gsdCoordinator Wu, Cameron Wu_Cameron 17 + gsdCoordinator Other Other 18 + gsdStudentProgram DDes DDes 0 + gsdStudentProgram MArch I MArch_I 1 + gsdStudentProgram MArch II MArch_II 2 + gsdStudentProgram MAUD or MLAUD MAUD_or_MLAUD 3 + gsdStudentProgram MDes MDes 4 + gsdStudentProgram MLA I MLA_I 5 + gsdStudentProgram MLA I AP MLA_I_AP 6 + gsdStudentProgram MLA II MLA_II 7 + gsdStudentProgram MUD MUD 8 + gsdStudentProgram MUP MUP 9 + gsdStudentProgram MUP/MArch MUP_MArch 10 + gsdStudentProgram MUP/MAUD or MLAUD MUP_MAUD_or_MLAUD 11 + gsdStudentProgram MUP/MDes MUP_MDes 12 + gsdStudentProgram MUP/MLA MUP_MLA 13 + gsdStudentProgram Other Other 14 + gsdStudentProgram PhD PhD 15 + gsdSemester Fall 2013 Fall_2013 0 + gsdSemester Spring 2014 Spring_2014 1 + gsdSemester Fall 2014 Fall_2014 2 + gsdSemester Spring 2015 Spring_2015 3 + gsdSemester Fall 2015 Fall_2015 4 + gsdSemester Spring 2016 Spring_2016 5 + gsdSemester Fall 2016 Fall_2016 6 + gsdRecommendation Accreditation Accreditation 0 + gsdRecommendation Open House Open_House 1 + gsdRecommendation Platform Platform 2 + gsdRecommendation Website Website 3 + gsdTypes Animations Animations 0 + gsdTypes Axonometric drawings Axonometric_drawings 1 + gsdTypes Axonometric projections Axonometric_projections 2 + gsdTypes Diagrams Diagrams 3 + gsdTypes Drawings Drawings 4 + gsdTypes Elevations (drawings) Elevations_drawings 5 + gsdTypes Floor plans Floor_plans 6 + gsdTypes Isometric drawings Isometric_drawings 7 + gsdTypes Isometric projections Isometric_projections 8 + gsdTypes Maps Maps 9 + gsdTypes Master plans Master_plans 10 + gsdTypes Models (representations) Models_representations 11 + gsdTypes Other Other 12 + gsdTypes Perspective drawings Perspective_drawings 13 + gsdTypes Photographs Photographs 14 + gsdTypes Plans (drawings) Plans_drawings 15 + gsdTypes Plans (maps) Plans_maps 16 + gsdTypes Renderings Renderings 17 + gsdTypes Sectional elevations Sectional_elevations 18 + gsdTypes Sectional perspectives Sectional_perspectives 19 + gsdTypes Sections Sections 20 + gsdTypes Sections (orthographic projections) Sections_orthographic_projections 21 + gsdTypes Site plans Site_plans 22 + gsdTypes Sketches Sketches 23 + gsdTypes Videos Videos 24 + gsdPrizes Araldo Cossutta Annual Prize for Design Excellence Araldo_Cossutta_Annual_Prize_for_Design_Excellence 0 + gsdPrizes Award for Academic Excellence in Urban Design Award_for_Academic_Excellence_in_Urban_Design 1 + gsdPrizes Award for Academic Excellence in Urban Planning Award_for_Academic_Excellence_in_Urban_Planning 2 + gsdPrizes Award for Outstanding Leadership in Urban Design Award_for_Outstanding_Leadership_in_Urban_Design 3 + gsdPrizes Award for Outstanding Leadership in Urban Planning Award_for_Outstanding_Leadership_in_Urban_Planning 4 + gsdPrizes Charles Eliot Traveling Fellowship in Landscape Architecture Charles_Eliot_Traveling_Fellowship_in_Landscape_Architecture 5 + gsdPrizes Clifford Wong Prize in Housing Design Clifford_Wong_Prize_in_Housing_Design 6 + gsdPrizes Digital Design Prize Digital_Design_Prize 7 + gsdPrizes Dimitris Pikionis Award Dimitris_Pikionis_Award 8 + gsdPrizes Druker Traveling Fellowship Druker_Traveling_Fellowship 9 + gsdPrizes Ferdinand Colloredo-Mansfeld Prize for Superior Achievement in Real Estate Studies Ferdinand_Colloredo-Mansfeld_Prize_for_Superior_Achievement_in_Real_Estate_Studies 10 + gsdPrizes Frederick Sheldon Traveling Fellowship Frederick_Sheldon_Traveling_Fellowship 11 + gsdPrizes Howard T. Fisher Prize for Excellence in Geographic Information Science Howard_T_Fisher_Prize_for_Excellence_in_Geographic_Information_Science 12 + gsdPrizes Jacob Weidenmann Prize Jacob_Weidenmann_Prize 13 + gsdPrizes Julia Amory Appleton Traveling Fellowship in Architecture Julia_Amory_Appleton_Traveling_Fellowship_in_Architecture 14 + gsdPrizes Kevin V. Kieran Prize (Kevin Kieran Memorial Scholarship) Kevin_V_Kieran_Prize_(Kevin_Kieran_Memorial_Scholarship) 15 + gsdPrizes Norman T. Newton Prize Norman_T_Newton_Prize 16 + gsdPrizes Peter Rice Prize for Innovation in Architecture and Structural Design Peter_Rice_Prize_for_Innovation_in_Architecture_and_Structural_Design 17 + gsdPrizes Peter Walker & Partners Fellowship for Landscape Architecture Peter_Walker_&_Partners_Fellowship_for_Landscape_Architecture 18 + gsdPrizes Sinclair Kennedy Traveling Fellowship Sinclair_Kennedy_Traveling_Fellowship 19 + gsdPrizes The Daniel L. Schodek Award for Technology and Sustainability The_Daniel_L_Schodek_Award_for_Technology_and_Sustainability 20 + gsdAccreditation High High 0 + gsdAccreditation Medium Medium 1 + gsdAccreditation Low Low 2 + gsdCourseName 01101: First Semester Core: PROJECT 01101 0 + gsdCourseName 01102: Second Semester Core: SITUATE 01102 1 + gsdCourseName 01111: LA I: First Semester Core Studio 01111 2 + gsdCourseName 01112: Landscape Architecture II 01112 3 + gsdCourseName 01121: First Semester Core Urban Planning Studio 01121 4 + gsdCourseName 01122: Second Semester Core Urban Planning Studio 01122 5 + gsdCourseName 01201: Third Semester Core: INTEGRATE 01201 6 + gsdCourseName 01202: Fourth Semester Core: RELATE 01202 7 + gsdCourseName 01211: LA III: Third Semester Core Studio 01211 8 + gsdCourseName 01212: Landscape Architecture IV 01212 9 + gsdCourseName 01221: Elements of Urban Design 01221 10 + gsdCourseName 01301: Kyoto Studio II: Seasons and Architecture 01301 11 + gsdCourseName 01301: The Function of Time 01301 12 + gsdCourseName 01302: Architecture Club London 01302 13 + gsdCourseName 01302: Unfinished Work III 01302 14 + gsdCourseName 01303: Alimentary Design 01303 15 + gsdCourseName 01303: Workplan 01303 16 + gsdCourseName 01304: Alimentary Design 01304 17 + gsdCourseName 01304: Socio-Environmental Responsive Design 01304 18 + gsdCourseName 01305: Built Climates 01305 19 + gsdCourseName 01305: Parametric Semiology - High Performance Architecture for Apple, Google and Facebook 01305 20 + gsdCourseName 01306: 21st Cent. Arch.of Africa and the Diaspora 01306 21 + gsdCourseName 01306: Material Performance - Fibrous Tectonics 01306 22 + gsdCourseName 01307: La Strada Novissima 01307 23 + gsdCourseName 01307: Material Performance 01307 24 + gsdCourseName 01308: City of Artificial Extrusions 01308 25 + gsdCourseName 01308: Green Card Conversations 01308 26 + gsdCourseName 01309: Studio Alaska 01309 27 + gsdCourseName 01309: Theatre and the City 01309 28 + gsdCourseName 01310: Architecture of Cultural Prosthetics 01310 29 + gsdCourseName 01310: Rotterdam Study Abroad Studio Option: Elements of Architecture 01310 30 + gsdCourseName 01311: Apres Ski: Eco Village Les Diablerets 01311 31 + gsdCourseName 01311: The Forms of Transition 01311 32 + gsdCourseName 01312: "You Can't Die in Disney World" A ZOO 01312 33 + gsdCourseName 01312: Basel Study Abroad Studio Option 01312 34 + gsdCourseName 01313: Indebted Architecture 01313 35 + gsdCourseName 01314: IN THE LAND OF NANDUTi: following the lines, threads, and figures of the river 01314 36 + gsdCourseName 01315: Real and Imaginary Variables (Final): Global Arenas 01315 37 + gsdCourseName 01316: High-rise / High-density 01316 38 + gsdCourseName 01317: Another nature 01317 39 + gsdCourseName 01318: Borrominations, or the Auratic Dome 01318 40 + gsdCourseName 01319: Thermodynamic Materialism Applied to Dense Urban Conglomerates, Two Chinese Case Studies 01319 41 + gsdCourseName 01401: A New [Landscape] Infrastructure for Los Angeles 01401 42 + gsdCourseName 01401: Liminal Space 01401 43 + gsdCourseName 01402: Parallel Motion: Walden Pond, Concord / Central Park, New York 01402 44 + gsdCourseName 01402: Parallel Motion: Walden Pond, Concord/ Central Park , NY 01402 45 + gsdCourseName 01402: The Endless Landscape - River Hudson 01402 46 + gsdCourseName 01403: After La Villette 01403 47 + gsdCourseName 01403: After La Vilette (Paris) 01403 48 + gsdCourseName 01403: LIFE-STYLED - CHINA-TOWN 01403 49 + gsdCourseName 01404: California Limnolarium 01404 50 + gsdCourseName 01404: California Limnolarium (experiments in projective processes) 01404 51 + gsdCourseName 01404: Post-suburb - Nashua NH 01404 52 + gsdCourseName 01405: Airport Park Zurich 01405 53 + gsdCourseName 01405: Envisioning Miami: Simulated Natures 01405 54 + gsdCourseName 01406: The Ocean State 01406 55 + gsdCourseName 01407: From the City to the Object: Terre des Hommes 2017 01407 56 + gsdCourseName 01408: Caen Island: Public Space 01408 57 + gsdCourseName 01409: Negative Planning in Nanshahe, Haidian District, Beijing 01409 58 + gsdCourseName 01501: Haters Make Me Famous: The Newark Riverfront and the Post-Great Migration City 01501 59 + gsdCourseName 01501: RURBAN 01501 60 + gsdCourseName 01502: Networked Urbanism: Urban Waste - Urban Design 01502 61 + gsdCourseName 01502: The Storm, the Strife, and Everyday Life 01502 62 + gsdCourseName 01503: Planning and Development on the East Boston Waterfront 01503 63 + gsdCourseName 01503: The Countryside as a City 01503 64 + gsdCourseName 01504: Retrofitting the (post?) Industrial Metropolis 01504 65 + gsdCourseName 01505: Medellin: Urban Porosity as Social Infrastructure 01505 66 + gsdCourseName 01506: Obsolescence and Pathways to Redevelopment: 01506 67 + gsdCourseName 01507: Design and Politics - Managing Risks and Vulnerabilities 01507 68 + gsdCourseName 01601: Macau: Cross-border Cities 01601 69 + gsdCourseName 01602: Territorialism II 01602 70 + gsdCourseName 01603: Meydan: Designing the Surfaces of Public Space around Beyazit Square, Istanbul 01603 71 + gsdCourseName 01606: Los Angeles Study Abroad Studio: The Possibilities of the Wrong Scale 01606 72 + gsdCourseName 02121: Visual Studies 02121 73 + gsdCourseName 02122: Projective Representation in Architecture 02122 74 + gsdCourseName 02129: Spatial Analysis and Representation 02129 75 + gsdCourseName 02141: Landscape Representation I 02141 76 + gsdCourseName 02142: Landscape Representation I 02142 77 + gsdCourseName 02223: Digital Media I 02223 78 + gsdCourseName 02224: Digital Media II 02224 79 + gsdCourseName 02241: Landscape Representation II 02241 80 + gsdCourseName 02241: Landscape Representation III 02241 81 + gsdCourseName 02322: Digital Media for Design 02322 82 + gsdCourseName 02341: Communication for Designer 02341 83 + gsdCourseName 02415: Paper or Plastic 02415 84 + gsdCourseName 02444: Landscape Material Design Practice and Digital Media 02444 85 + gsdCourseName 02446: Drawing for Designers 02446 86 + gsdCourseName 02448: Landscape as Painting 02448 87 + gsdCourseName 02449: Immersive Landscape 02449 88 + gsdCourseName 02449: Landscape as Video Game 02449 89 + gsdCourseName 02450: Landscape as Weather/Atmosphere 02450 90 + gsdCourseName 02482: Art, Design and the Public Domain 02482 91 + gsdCourseName 02602: Basel Study Abroad Seminar 02602 92 + gsdCourseName 03241: Theories of Landscape as Urbanism 03241 93 + gsdCourseName 03241: Theories of Landscape as Urbanism, Landscape as Infrastructure 03241 94 + gsdCourseName 03242: Theories of Landscape Architecture 03242 95 + gsdCourseName 03330: Conservation of Older Buildings 03330 96 + gsdCourseName 03333: Culture, Conservation and Design 03333 97 + gsdCourseName 03338: carbonurbanism 03338 98 + gsdCourseName 03345: Emergence in Landscape Architecture 03345 99 + gsdCourseName 03375: Planning for Conservation: 03375 100 + gsdCourseName 03453: Light Structure I 03453 101 + gsdCourseName 03494: Design for Learning 03494 102 + gsdCourseName 03499: The Aperture Analyzed 03499 103 + gsdCourseName 03602: Study Abroad Seminar: Islands 03602 104 + gsdCourseName 03603: The Hitchhikers Guide to Hyperreality 03603 105 + gsdCourseName 04105: Studies of the Built North American Environment 04105 106 + gsdCourseName 04105: Studies of the Built North American Environment 1580 - Present 04105 107 + gsdCourseName 04115: History and Theory of Urban Interventions 04115 108 + gsdCourseName 04121: Buildings, Texts, and Contexts I 04121 109 + gsdCourseName 04141: Histories of Landscape Architecture 04141 110 + gsdCourseName 04142: Histories of Landscape Architecture II 04142 111 + gsdCourseName 04223: Buildings, Texts, and Contexts III 04223 112 + gsdCourseName 04303: Modernization in the Visual U.S. Environment 04303 113 + gsdCourseName 04304: North American Seacoasts + Landscapes Discovery Period to the Present 04304 114 + gsdCourseName 04304: North American Seacoasts and Landscape 04304 115 + gsdCourseName 04305: Adventure + Fantasy Simulation 1871-2036 04305 116 + gsdCourseName 04329: Urbanization in the East Asian Region 04329 117 + gsdCourseName 04358: Authority and Invention: Medieval Art and Architecture 04358 118 + gsdCourseName 04362: Structuring Urban Experience 04362 119 + gsdCourseName 04363: Walking 04363 120 + gsdCourseName 04405: Istanbul 04405 121 + gsdCourseName 04408: Situating the Modern 04408 122 + gsdCourseName 04439: "In the Manner of a Picture" 04439 123 + gsdCourseName 04444: Historical Ground 04444 124 + gsdCourseName 04445: Envisioning Landscape: Cultures of Vision in the Air and on the Ground 04445 125 + gsdCourseName 04446: A History of Nature Conservation and Cultural Landscape Preservation 04446 126 + gsdCourseName 04447: Forest, Grove, Tree 04447 127 + gsdCourseName 04477: Slums in Architectural History 04477 128 + gsdCourseName 05204: Real Estate Finance and Development 05204 129 + gsdCourseName 05206: Land Use and Environmental Law 05206 130 + gsdCourseName 05210: Cities by Design I 05210 131 + gsdCourseName 05212: Field Studies in Real Estate, Planning, and Urban Design 05212 132 + gsdCourseName 05213: Policy Making in Urban Settings 05213 133 + gsdCourseName 05222: Markets and Market Failures with Cases 05222 134 + gsdCourseName 05304: Transportation Planning and Development 05304 135 + gsdCourseName 05326: Housing and Urbanization in the United States 05326 136 + gsdCourseName 05330: Healthy Places 05330 137 + gsdCourseName 05338: Planning for the 21st Century 05338 138 + gsdCourseName 05342: Creating Resilient Cities 05342 139 + gsdCourseName 05343: Critical and Social Cartography 05343 140 + gsdCourseName 05360: Territorial Intelligence 05360 141 + gsdCourseName 05433: Modern Housing and Urban Districts 05433 142 + gsdCourseName 05492: Real Estate Finance and Development Fundamentals 05492 143 + gsdCourseName 05495: Market Analysis and Urban Economics 05495 144 + gsdCourseName 05502: Urban Governance and the Politics of Planning in the Developing World 05502 145 + gsdCourseName 06121 Construction Lab 06121 146 + gsdCourseName 06122 Energy in Architecture 06122 147 + gsdCourseName 06141: Ecologies, Techniques, Technologies I 06141 148 + gsdCourseName 06141: Ecologies, Techniques, Techs. I 06141 149 + gsdCourseName 06142: Ecologies, Techniques, Techs. II 06142 150 + gsdCourseName 06227: Structural Design 1 06227 151 + gsdCourseName 06230: Cases in Contemporary Construction 06230 152 + gsdCourseName 06241: Ecologies, Techniques, Technologies III 06241 153 + gsdCourseName 06241: Ecologies, Techniques, Techs. III 06241 154 + gsdCourseName 06242: Ecologies, Techniques, Techs. IV 06242 155 + gsdCourseName 06243: Ecologies, Techniques, Techs. V 06243 156 + gsdCourseName 06251: Research Seminar on Urban Ecology 06251 157 + gsdCourseName 06271: The Innovative Practice 06271 158 + gsdCourseName 06272: Innovation in Science and Engineering 06272 159 + gsdCourseName 06273: Water Engineering 06273 160 + gsdCourseName 06274: Advanced Introduction to Robotics 06274 161 + gsdCourseName 06275: Computer Vision 06275 162 + gsdCourseName 06317: Material Practice as Research 06317 163 + gsdCourseName 06318: Urban and Suburban Ecology 06318 164 + gsdCourseName 06322: Mapping: Geographic Representation 06322 165 + gsdCourseName 06323: Brownfields Practicum 06323 166 + gsdCourseName 06333: Aquatic Ecology 06333 167 + gsdCourseName 06335: Phytotechnologies 06335 168 + gsdCourseName 06337: Changing Natural and Built Coastal Environments 06337 169 + gsdCourseName 06337: Changing Natural and Built Coastal Environments 06337 170 + gsdCourseName 06338: Introduction to Computational Design 06338 171 + gsdCourseName 06436: Expanded Mechanisms / Empirical Materialisms 06436 172 + gsdCourseName 06450: High Performance Buildings and Systems Integration 06450 173 + gsdCourseName 06451: Research Seminar on Urban Ecology 06451 174 + gsdCourseName 06454: Poetics of Construction: Detail Design 06454 175 + gsdCourseName 06468: Design By Committee 06468 176 + gsdCourseName 06470: Energy Simulation for Design 06470 177 + gsdCourseName 06474: Natural Ventilation 06474 178 + gsdCourseName 06478: Informal Robotics 06478 179 + gsdCourseName 06479: Daylighting 06479 180 + gsdCourseName 07241: Practices of LA 07241 181 + gsdCourseName 07241: Practices of Landscape Architecture 07241 182 + gsdCourseName 07408: Frameworks of Contemporary Practice 07408 183 + gsdCourseName 07410: The Architect in History 07410 184 + gsdCourseName 09123: The Fourth Typology 09123 185 + gsdCourseName 09123: The Fourth Typology: Dominant Type + the Idea of the City 09123 186 + gsdCourseName 09127: Real Estate and City Making in China 09127 187 + gsdCourseName 09131: Cultivating Scale: Territorial Planting Strategies 09131 188 + gsdCourseName 09136: Teaching Creativity 09136 189 + gsdCourseName 09137: Mapping Cultural Space 09137 190 + gsdCourseName 09201: Independent Study Masters Degrees 09201 191 + gsdCourseName 09204: Preparation for Independent Thesis Proposal for MUP, MAUD, or MLAUD 09204 192 + gsdCourseName 09204: Thesis Prep for MUP, MAUD, or MLAUD 09204 193 + gsdCourseName 09301: Independent Thesis in Satisfaction of Degree MArch 09301 194 + gsdCourseName 09302: Independent Thesis in Satisfaction of the Degree MAUD, MLAUD, or MUP 09302 195 + gsdCourseName 09304: Independent Thesis for Mdes 09304 196 + gsdCourseName 09304: Independent Thesis for the Degree Master in Design Studies 09304 197 + gsdCourseName 09305: Master of Design Studies Final Project 09305 198 + gsdCourseName 09341: Preparation of Design Thesis Proposal for MLA 09341 199 + gsdCourseName 09341: Thesis Prep for MLA 09341 200 + gsdCourseName 09342: Independent Thesis 09342 201 + gsdCourseName 09342: Independent Thesis in Satisfaction of the Degree MLA 09342 202 + gsdCourseName 09503: Preparation of Doctoral Thesis Proposal 09503 203 + gsdCourseName 09504: Thesis in Satisfaction of the Degree Doctor of Design 09504 204 + gsdCourseName 09506: Thesis Extension in Satisfaction of Degree Doctor of Design 09506 205 + gsdCourseName 09601: MArch II Proseminar 09601 206 + gsdCourseName 09630: Urban Design Proseminar 09630 207 + gsdCourseName 09641: MLA Proseminar 09641 208 + gsdCourseName 09641: Proseminar in Landscape Architecture 09641 209 + gsdCourseName 09661: Proseminar in Urbanism, Landscape, Ecology 09661 210 + gsdCourseName 09663: Risk and Resilience Proseminar 09663 211 + gsdCourseName 09691: Doctoral Program Proseminar 09691 212 + gsdCourseName Other Other 213 \ No newline at end of file diff --git a/postgresql/testdata/scripts/api/data/metadatablocks/customMRA.tsv b/postgresql/testdata/scripts/api/data/metadatablocks/customMRA.tsv new file mode 100644 index 0000000..ea91557 --- /dev/null +++ b/postgresql/testdata/scripts/api/data/metadatablocks/customMRA.tsv @@ -0,0 +1,16 @@ +#metadataBlock name dataverseAlias displayName + customMRA MRA Metadata +#datasetField name title description watermark fieldType displayOrder displayFormat advancedSearchField allowControlledVocabulary allowmultiples facetable displayoncreate required parent metadatablock_id + mraCollection Murray Research Archive Collection Browse the Murray Research Archive collection with the following terms. text 0 FALSE TRUE TRUE TRUE FALSE FALSE customMRA +#controlledVocabulary DatasetField Value identifier displayOrder + mraCollection Diversity samples: Race, Ethnicity, Sexual Orientation, Religion MRA0 0 + mraCollection Early Head Start Research and Evaluation Project, 1996 - 2001 MRA1 1 + mraCollection Economic Theory and Demography MRA2 2 + mraCollection Education MRA3 3 + mraCollection Family. Marriage. Women MRA4 4 + mraCollection Health MRA5 5 + mraCollection Politics and Government MRA6 6 + mraCollection Replications, Extensions and Followups MRA7 7 + mraCollection Studies with Audio Data MRA8 8 + mraCollection Studies with Video Data MRA9 9 + mraCollection Work MRA10 10 \ No newline at end of file diff --git a/postgresql/testdata/scripts/api/data/metadatablocks/customPSI.tsv b/postgresql/testdata/scripts/api/data/metadatablocks/customPSI.tsv new file mode 100644 index 0000000..b5103df --- /dev/null +++ b/postgresql/testdata/scripts/api/data/metadatablocks/customPSI.tsv @@ -0,0 +1,106 @@ +#metadataBlock name dataverseAlias displayName + customPSI PSI Metadata +#datasetField name title description watermark fieldType displayOrder displayFormat advancedSearchField allowControlledVocabulary allowmultiples facetable displayoncreate required parent metadatablock_id + psiBehavior Behavior Behavior text 0 TRUE TRUE TRUE TRUE FALSE FALSE customPSI + psiDonor Donor Donor text 1 TRUE TRUE TRUE TRUE FALSE FALSE customPSI + psiHealthArea Health Area Health Area text 2 TRUE TRUE TRUE TRUE FALSE FALSE customPSI + psiIntervention Intervention Intervention text 3 TRUE TRUE TRUE TRUE FALSE FALSE customPSI + psiPopulation Population Population text 4 TRUE TRUE TRUE TRUE FALSE FALSE customPSI + psiProductsServices Products/Services Products/Services text 5 TRUE TRUE TRUE TRUE FALSE FALSE customPSI + psiStudyDesignElement Study Design Element Study Design Element text 6 TRUE TRUE TRUE TRUE FALSE FALSE customPSI + psiStudyType Study Type Study Type text 7 TRUE TRUE TRUE TRUE FALSE FALSE customPSI +#controlledVocabulary DatasetField Value identifier displayOrder + psiBehavior Abstinence 0 + psiBehavior Birth spacing 1 + psiBehavior Cervical cancer screening 2 + psiBehavior Condom use 3 + psiBehavior FGM 4 + psiBehavior HIV risk behaviors 5 + psiBehavior HIV/STI testing 6 + psiBehavior LLIN use 7 + psiBehavior Male circumcision 8 + psiBehavior Modern contraceptive use 9 + psiBehavior ORS use 10 + psiBehavior Partner reduction 11 + psiBehavior Referral uptake 12 + psiBehavior Treatment adherence 13 + psiBehavior Water treatment 14 + psiDonor CDC 0 + psiDonor DFID 1 + psiDonor Dutch 2 + psiDonor Gates Foundation 3 + psiDonor Global Fund 4 + psiDonor KfW 5 + psiDonor LAD 6 + psiDonor Other 7 + psiDonor PEPFAR 8 + psiDonor UNFPA 9 + psiDonor USAID 10 + psiHealthArea Diarrhea 0 + psiHealthArea GBV 1 + psiHealthArea HIV 2 + psiHealthArea ICM 3 + psiHealthArea Malaria 4 + psiHealthArea NCDs 5 + psiHealthArea Nutrition 6 + psiHealthArea Pneumonia 7 + psiHealthArea Reproductive health 8 + psiHealthArea TB 9 + psiIntervention BCC 0 + psiIntervention IPC 1 + psiIntervention Medical detailing 2 + psiIntervention mHealth 3 + psiIntervention Provider training 4 + psiIntervention Social franchising 5 + psiPopulation Caregivers 0 + psiPopulation Couples 1 + psiPopulation FSW 2 + psiPopulation General population 3 + psiPopulation IDUs 4 + psiPopulation MARPs 5 + psiPopulation Men 6 + psiPopulation MSM 7 + psiPopulation PLHIV 8 + psiPopulation Providers 9 + psiPopulation Truck drivers 10 + psiPopulation Women 11 + psiPopulation WRA 12 + psiPopulation Youth 13 + psiProductsServices ACT 0 + psiProductsServices ANC 1 + psiProductsServices Antibiotics 2 + psiProductsServices ART 3 + psiProductsServices Clean delivery kit 4 + psiProductsServices Condoms 5 + psiProductsServices Household water treatment 6 + psiProductsServices HTC 7 + psiProductsServices LLIN 8 + psiProductsServices Long-term Methods 9 + psiProductsServices Medicated Abortion 10 + psiProductsServices Misoprostol 11 + psiProductsServices Multivitamin 12 + psiProductsServices Needle and syringe 13 + psiProductsServices Nevirapine 14 + psiProductsServices ORS 15 + psiProductsServices PMTCT 16 + psiProductsServices Short-term methods 17 + psiProductsServices STI kit 18 + psiProductsServices TB DOTS 19 + psiProductsServices VMC 20 + psiProductsServices Zinc 21 + psiStudyDesignElement CEM 0 + psiStudyDesignElement Client exit interview 1 + psiStudyDesignElement Control group 2 + psiStudyDesignElement Cross-sectional 3 + psiStudyDesignElement Focus group 4 + psiStudyDesignElement In-depth interview 5 + psiStudyDesignElement Longitudinal 6 + psiStudyDesignElement LQAS 7 + psiStudyDesignElement Mystery client 8 + psiStudyDesignElement Pretesting 9 + psiStudyDesignElement TRaC 10 + psiStudyType MAP 0 + psiStudyType Mixed Methods 1 + psiStudyType Qualitative 2 + psiStudyType Quantitative 3 + psiStudyType Retail audit 4 \ No newline at end of file diff --git a/postgresql/testdata/scripts/api/data/metadatablocks/customPSRI.tsv b/postgresql/testdata/scripts/api/data/metadatablocks/customPSRI.tsv new file mode 100644 index 0000000..9493687 --- /dev/null +++ b/postgresql/testdata/scripts/api/data/metadatablocks/customPSRI.tsv @@ -0,0 +1,38 @@ +#metadataBlock name dataverseAlias displayName + customPSRI Political Science Replication Initiative Metadata +#datasetField name title description watermark fieldType displayOrder displayFormat advancedSearchField allowControlledVocabulary allowmultiples facetable displayoncreate required parent metadatablock_id + PSRI1 Are the original data publicly available? Select from the list of options. text 0 FALSE TRUE FALSE FALSE FALSE FALSE customPSRI + PSRI2 Is the original code available? Select from the list of options. text 1 FALSE TRUE FALSE FALSE FALSE FALSE customPSRI + PSRI3 Where are the original data archived (name and url)? Answer if the data are publicly available. text 2 FALSE FALSE FALSE FALSE FALSE FALSE customPSRI + PSRI4 Where is the original code publicly archived (name and url)? Answer if the code is publicly available. text 3 FALSE FALSE FALSE FALSE FALSE FALSE customPSRI + PSRI5 Will you submit your replication code to this Dataverse (This is a PSRI requirement)? Select from the list of options. text 4 FALSE TRUE FALSE FALSE FALSE FALSE customPSRI + PSRI6 Will you submit your replication write-up to this Dataverse (This is a PSRI requirement)? Select from the list of options. text 5 FALSE TRUE FALSE FALSE FALSE FALSE customPSRI + PSRI7 Did you send the replication materials to the original author(s) and notify them that you'd be posting your replication on PSRI? Select from the list of options. text 6 FALSE TRUE FALSE FALSE FALSE FALSE customPSRI + PSRI8 Was the replication done in a course? (If so, please continue to answer the subsequent questions, and if not, select N/A) Select from the list of options. text 7 FALSE TRUE FALSE FALSE FALSE FALSE customPSRI + PSRI9 Did another student attempt to replicate the replication in the class? Select from the list of options. text 8 FALSE TRUE FALSE FALSE FALSE FALSE customPSRI + PSRI10 Did another student replicate this replication successfully? Select from the list of options. text 9 FALSE TRUE FALSE FALSE FALSE FALSE customPSRI + PSRI11 Did a professor read/review a draft before the final version? Select from the list of options. text 10 FALSE TRUE FALSE FALSE FALSE FALSE customPSRI +#controlledVocabulary DatasetField Value identifier displayOrder + PSRI1 No 0 + PSRI1 Yes 1 + PSRI2 No 0 + PSRI2 Yes 1 + PSRI2 NA 2 + PSRI5 No 0 + PSRI5 Yes 1 + PSRI6 No 0 + PSRI6 Yes 1 + PSRI7 No 0 + PSRI7 Yes 1 + PSRI8 No 0 + PSRI8 Yes 1 + PSRI8 NA 2 + PSRI9 NA 0 + PSRI9 No 1 + PSRI9 Yes 2 + PSRI10 NA 0 + PSRI10 No 1 + PSRI10 Yes 2 + PSRI11 NA 0 + PSRI11 No 1 + PSRI11 Yes 2 \ No newline at end of file diff --git a/postgresql/testdata/scripts/api/data/metadatablocks/custom_hbgdki.tsv b/postgresql/testdata/scripts/api/data/metadatablocks/custom_hbgdki.tsv new file mode 100644 index 0000000..bbb098d --- /dev/null +++ b/postgresql/testdata/scripts/api/data/metadatablocks/custom_hbgdki.tsv @@ -0,0 +1,72 @@ +#metadataBlock name dataverseAlias displayName + custom_hbgdki HBGDki HBGDki Custom Metadata +#datasetField name title description watermark fieldType displayOrder displayFormat advancedSearchField allowControlledVocabulary allowmultiples facetable displayoncreate required parent metadatablock_id + hbgdkiStudyName Name of Study Name of the study. Limit to 20 characters. text 0 TRUE FALSE FALSE FALSE TRUE FALSE custom_hbgdki + hbgdkiStudyRegistry Study Registry Which study registry was used? none 1 FALSE FALSE TRUE FALSE TRUE FALSE custom_hbgdki + hbgdkiStudyRegistryType ID Type Which study registry was used? text 2 TRUE TRUE FALSE FALSE TRUE FALSE hbgdkiStudyRegistry custom_hbgdki + hbgdkiStudyRegistryNumber ID Number ID number for the study per the registry. text 3 TRUE FALSE FALSE FALSE TRUE FALSE hbgdkiStudyRegistry custom_hbgdki + hbgdkiStudyType Type of study Type of study. text 4 TRUE TRUE TRUE TRUE TRUE FALSE custom_hbgdki + hbgdkiIntervention Intervention If an interventional study, describe the interventions. textbox 5 TRUE FALSE FALSE FALSE TRUE FALSE custom_hbgdki + hbgdkiLowerLimitAge Lower limit of age at enrollment Lower limit of age at enrollment. int 6 TRUE FALSE FALSE FALSE TRUE FALSE custom_hbgdki + hbgdkiUnitsLowerLimitAge Units for lower age limit Units for lower age limit. text 7 TRUE FALSE FALSE FALSE TRUE FALSE custom_hbgdki + hbgdkiUpperLimitAge Upper limit of age at enrollment Upper limit of age at enrollment. int 8 TRUE FALSE FALSE FALSE TRUE FALSE custom_hbgdki + hbgdkiUnitsUpperLimitAge Units for upper age limit Units for upper age limit. text 9 TRUE FALSE FALSE FALSE TRUE FALSE custom_hbgdki + hbgdkiOther Other entry criteria Other entry criteria. textbox 10 TRUE FALSE FALSE FALSE TRUE FALSE custom_hbgdki + hbgdkiBiosampleType Types of biosamples collected, if any Types of biosamples used (e.g., Blood, Stool,...). text 11 TRUE FALSE TRUE TRUE TRUE FALSE custom_hbgdki + hbgdkiGestationalAge Gestational age Gestational age text 12 TRUE TRUE FALSE FALSE TRUE FALSE custom_hbgdki + hbgdkiAnthropometry Anthropometry Anthropometry text 13 TRUE TRUE FALSE FALSE TRUE FALSE custom_hbgdki + hbgdkiBirthWeight Birth weight Birth weight text 14 TRUE TRUE FALSE FALSE TRUE FALSE custom_hbgdki + hbgdkiNeurocognitiveDev Neurocognitive development Neurocognitive development text 15 TRUE TRUE FALSE FALSE TRUE FALSE custom_hbgdki + hbgdkiMaternalChar Maternal characteristics Dataset parameters can include: age, height, weight, obstetric history. text 16 TRUE TRUE FALSE FALSE TRUE FALSE custom_hbgdki + hbgdkiPregnancyBirth Pregnancy and birth Dataset parameters can include: Morbidity, nutrition, ANC, delivery method and setting. text 17 TRUE TRUE FALSE FALSE TRUE FALSE custom_hbgdki + hbgdkiSocioeconomicChar Socioeconomic characteristics Socioeconomic characteristics text 18 TRUE TRUE FALSE FALSE TRUE FALSE custom_hbgdki + hbgdkiFeedingCare Feeding care & practice Feeding care & practice text 19 TRUE TRUE FALSE FALSE TRUE FALSE custom_hbgdki + hbgdkiImmunizations Immunizations Immunizations text 20 TRUE TRUE FALSE FALSE TRUE FALSE custom_hbgdki + hbgdkiInfantChildhoodMorbidity Morbidity in infancy and childhood Morbidity in infancy and childhood text 21 TRUE TRUE FALSE FALSE TRUE FALSE custom_hbgdki + hbgdkiWaterSanHygiene Water, sanitation and hygiene standards Water, sanitation and hygiene standards text 22 TRUE TRUE FALSE FALSE TRUE FALSE custom_hbgdki +#controlledVocabulary DatasetField Value identifier displayOrder + hbgdkiStudyRegistryType International Clinical Trials Registry Platform (ICTRP) hbgdki_study_registry_type_ICTRP 0 + hbgdkiStudyRegistryType Australian New Zealand Clinical Trials Registry (ANZCTR) hbgdki_study_registry_type_ANZCTR 1 + hbgdkiStudyRegistryType Brazilian Clinical Trials Registry (ReBec) hbgdki_study_registry_type_ReBec 2 + hbgdkiStudyRegistryType Chinese Clinical Trial Registry (ChiCTR) hbgdki_study_registry_type_ChiCTR 3 + hbgdkiStudyRegistryType Clinical Research Information Service (CRiS), Republic of Korea hbgdki_study_registry_type_CRiS 4 + hbgdkiStudyRegistryType Clinical Trials Registry - India (CTRI) hbgdki_study_registry_type_CTRI 5 + hbgdkiStudyRegistryType Cuban Public Registry of Clinical Trials (RPCEC) hbgdki_study_registry_type_RPCEC 6 + hbgdkiStudyRegistryType EU Clinical Trials Register (EU-CTR) hbgdki_study_registry_type_EU-CTR 7 + hbgdkiStudyRegistryType German Clinical Trials Register (DRKS) hbgdki_study_registry_type_DRKS 8 + hbgdkiStudyRegistryType Iranian Registry of Clinical Trials (IRCT) hbgdki_study_registry_type_IRCT 9 + hbgdkiStudyRegistryType ISRCTN hbgdki_study_registry_type_ISRCTN 10 + hbgdkiStudyRegistryType Japan Primary Registries Network (JPRN) hbgdki_study_registry_type_JPRN 11 + hbgdkiStudyRegistryType Pan African Clinical Trial Registry (PACTR) hbgdki_study_registry_type_PACTR 12 + hbgdkiStudyRegistryType Sri Lanka Clinical Trials Registry (SLCTR) hbgdki_study_registry_type_SLCTR 13 + hbgdkiStudyRegistryType Thai Clinical Trials Registry (TCTR) hbgdki_study_registry_type_TCTR 14 + hbgdkiStudyRegistryType The Netherlands National Trial Register (NTR) hbgdki_study_registry_type_NTR 15 + hbgdkiStudyRegistryType US Clinical Trials Registry (clinicaltrials.gov) hbgdki_study_registry_type_USCTR 16 + hbgdkiStudyType Interventional hbgdki_interventional 0 + hbgdkiStudyType Observational hbgdki_observational 1 + hbgdkiStudyType Case Control hbgdki_case_control 2 + hbgdkiStudyType Meta-analysis hbgdki_meta_analysis 3 + hbgdkiStudyType Demographic & Health Survey hbgdki_demographic_health_survey 4 + hbgdkiStudyType Other Survey hbgdki_other_survey 5 + hbgdkiGestationalAge Yes hbgdki_gestation_y 0 + hbgdkiGestationalAge No hbgdki_gestation_n 1 + hbgdkiAnthropometry Yes hbgdki_anthropometry_y 0 + hbgdkiAnthropometry No hbgdki_anthropometry_n 1 + hbgdkiBirthWeight Yes hbgdki_birth_weight_y 0 + hbgdkiBirthWeight No hbgdki_birth_weight_n 1 + hbgdkiNeurocognitiveDev Yes hbgdki_neurocognitive_dev_y 0 + hbgdkiNeurocognitiveDev No hbgdki_neurocognitive_dev_n 1 + hbgdkiMaternalChar Yes hbgdki_maternal_char_y 0 + hbgdkiMaternalChar No hbgdki_maternal_char_ n 1 + hbgdkiPregnancyBirth Yes hbgdki_pregnancy_birth_y 0 + hbgdkiPregnancyBirth No hbgdki_pregnancy_birth_ n 1 + hbgdkiSocioeconomicChar Yes hbgdki_socioeconomic_char_y 0 + hbgdkiSocioeconomicChar No hbgdki_socioeconomic_char_n 1 + hbgdkiFeedingCare Yes hbgdki_feeding_care_y 0 + hbgdkiFeedingCare No hbgdki_feeding_care_n 1 + hbgdkiImmunizations Yes hbgdki_immunizations_y 0 + hbgdkiImmunizations No hbgdki_immunizations_n 1 + hbgdkiInfantChildhoodMorbidity Yes hbgdki_infant_childhood_morbidity_y 0 + hbgdkiInfantChildhoodMorbidity No hbgdki_infant_childhood_morbidity_n 1 + hbgdkiWaterSanHygiene Yes hbgdki_water_san_hygiene_y 0 + hbgdkiWaterSanHygiene No hbgdki_water_san_hygiene_n 1 \ No newline at end of file diff --git a/postgresql/testdata/scripts/api/data/metadatablocks/geospatial.tsv b/postgresql/testdata/scripts/api/data/metadatablocks/geospatial.tsv new file mode 100644 index 0000000..7464d51 --- /dev/null +++ b/postgresql/testdata/scripts/api/data/metadatablocks/geospatial.tsv @@ -0,0 +1,264 @@ +#metadataBlock name dataverseAlias displayName + geospatial Geospatial Metadata +#datasetField name title description watermark fieldType displayOrder displayFormat advancedSearchField allowControlledVocabulary allowmultiples facetable displayoncreate required parent metadatablock_id + geographicCoverage Geographic Coverage Information on the geographic coverage of the data. Includes the total geographic scope of the data. none 0 FALSE FALSE TRUE FALSE FALSE FALSE geospatial + country Country / Nation The country or nation that the Dataset is about. text 1 TRUE TRUE FALSE TRUE FALSE FALSE geographicCoverage geospatial + state State / Province The state or province that the Dataset is about. Use GeoNames for correct spelling and avoid abbreviations. text 2 TRUE FALSE FALSE TRUE FALSE FALSE geographicCoverage geospatial + city City The name of the city that the Dataset is about. Use GeoNames for correct spelling and avoid abbreviations. text 3 TRUE FALSE FALSE TRUE FALSE FALSE geographicCoverage geospatial + otherGeographicCoverage Other Other information on the geographic coverage of the data. text 4 FALSE FALSE FALSE FALSE FALSE FALSE geographicCoverage geospatial + geographicUnit Geographic Unit Lowest level of geographic aggregation covered by the Dataset, e.g., village, county, region. text 5 TRUE FALSE TRUE TRUE FALSE FALSE geospatial + geographicBoundingBox Geographic Bounding Box The fundamental geometric description for any Dataset that models geography is the geographic bounding box. It describes the minimum box, defined by west and east longitudes and north and south latitudes, which includes the largest geographic extent of the Dataset's geographic coverage. This element is used in the first pass of a coordinate-based search. Inclusion of this element in the codebook is recommended, but is required if the bound polygon box is included. none 6 FALSE FALSE TRUE FALSE FALSE FALSE geospatial + westLongitude West Longitude Westernmost coordinate delimiting the geographic extent of the Dataset. A valid range of values, expressed in decimal degrees, is -180,0 <= West Bounding Longitude Value <= 180,0. text 7 FALSE FALSE FALSE FALSE FALSE FALSE geographicBoundingBox geospatial + eastLongitude East Longitude Easternmost coordinate delimiting the geographic extent of the Dataset. A valid range of values, expressed in decimal degrees, is -180,0 <= East Bounding Longitude Value <= 180,0. text 8 FALSE FALSE FALSE FALSE FALSE FALSE geographicBoundingBox geospatial + northLongitude North Latitude Northernmost coordinate delimiting the geographic extent of the Dataset. A valid range of values, expressed in decimal degrees, is -90,0 <= North Bounding Latitude Value <= 90,0. text 9 FALSE FALSE FALSE FALSE FALSE FALSE geographicBoundingBox geospatial + southLongitude South Latitude Southernmost coordinate delimiting the geographic extent of the Dataset. A valid range of values, expressed in decimal degrees, is -90,0 <= South Bounding Latitude Value <= 90,0. text 10 FALSE FALSE FALSE FALSE FALSE FALSE geographicBoundingBox geospatial +#controlledVocabulary DatasetField Value identifier displayOrder + country Afghanistan 0 + country Albania 1 + country Algeria 2 + country American Samoa 3 + country Andorra 4 + country Angola 5 + country Anguilla 6 + country Antarctica 7 + country Antigua and Barbuda 8 + country Argentina 9 + country Armenia 10 + country Aruba 11 + country Australia 12 + country Austria 13 + country Azerbaijan 14 + country Bahamas 15 + country Bahrain 16 + country Bangladesh 17 + country Barbados 18 + country Belarus 19 + country Belgium 20 + country Belize 21 + country Benin 22 + country Bermuda 23 + country Bhutan 24 + country Bolivia, Plurinational State of 25 + country Bonaire, Sint Eustatius and Saba 26 + country Bosnia and Herzegovina 27 + country Botswana 28 BOTSWANA + country Bouvet Island 29 + country Brazil 30 Brasil + country British Indian Ocean Territory 31 + country Brunei Darussalam 32 + country Bulgaria 33 + country Burkina Faso 34 + country Burundi 35 + country Cambodia 36 + country Cameroon 37 + country Canada 38 + country Cape Verde 39 + country Cayman Islands 40 + country Central African Republic 41 + country Chad 42 + country Chile 43 + country China 44 + country Christmas Island 45 + country Cocos (Keeling) Islands 46 + country Colombia 47 + country Comoros 48 + country Congo 49 + country Congo, the Democratic Republic of the 50 + country Cook Islands 51 + country Costa Rica 52 + country Croatia 53 + country Cuba 54 + country Curaçao 55 + country Cyprus 56 + country Czech Republic 57 + country Côte d'Ivoire 58 + country Denmark 59 + country Djibouti 60 + country Dominica 61 + country Dominican Republic 62 + country Ecuador 63 + country Egypt 64 + country El Salvador 65 + country Equatorial Guinea 66 + country Eritrea 67 + country Estonia 68 + country Ethiopia 69 + country Falkland Islands (Malvinas) 70 + country Faroe Islands 71 + country Fiji 72 + country Finland 73 + country France 74 + country French Guiana 75 + country French Polynesia 76 + country French Southern Territories 77 + country Gabon 78 + country Gambia 79 Gambia, The + country Georgia 80 + country Germany 81 Germany (Federal Republic of) + country Ghana 82 GHANA + country Gibraltar 83 + country Greece 84 + country Greenland 85 + country Grenada 86 + country Guadeloupe 87 + country Guam 88 + country Guatemala 89 + country Guernsey 90 + country Guinea 91 + country Guinea-Bissau 92 + country Guyana 93 + country Haiti 94 + country Heard Island and Mcdonald Islands 95 + country Holy See (Vatican City State) 96 + country Honduras 97 + country Hong Kong 98 + country Hungary 99 + country Iceland 100 + country India 101 INDIA + country Indonesia 102 Sumatra + country Iran, Islamic Republic of 103 Iran Iran (Islamic Republic of) + country Iraq 104 IRAQ + country Ireland 105 + country Isle of Man 106 + country Israel 107 + country Italy 108 + country Jamaica 109 + country Japan 110 + country Jersey 111 + country Jordan 112 + country Kazakhstan 113 + country Kenya 114 + country Kiribati 115 + country Korea, Democratic People's Republic of 116 + country Korea, Republic of 117 + country Kuwait 118 + country Kyrgyzstan 119 + country Lao People's Democratic Republic 120 Laos + country Latvia 121 + country Lebanon 122 + country Lesotho 123 LESOTHO + country Liberia 124 + country Libya 125 + country Liechtenstein 126 + country Lithuania 127 + country Luxembourg 128 + country Macao 129 + country Macedonia, the Former Yugoslav Republic of 130 + country Madagascar 131 + country Malawi 132 + country Malaysia 133 + country Maldives 134 + country Mali 135 + country Malta 136 + country Marshall Islands 137 + country Martinique 138 + country Mauritania 139 + country Mauritius 140 + country Mayotte 141 + country Mexico 142 + country Micronesia, Federated States of 143 + country Moldova, Republic of 144 + country Monaco 145 + country Mongolia 146 + country Montenegro 147 + country Montserrat 148 + country Morocco 149 + country Mozambique 150 MOZAMBIQUE + country Myanmar 151 + country Namibia 152 NAMIBIA + country Nauru 153 + country Nepal 154 + country Netherlands 155 + country New Caledonia 156 + country New Zealand 157 + country Nicaragua 158 + country Niger 159 + country Nigeria 160 + country Niue 161 + country Norfolk Island 162 + country Northern Mariana Islands 163 + country Norway 164 + country Oman 165 + country Pakistan 166 + country Palau 167 + country Palestine, State of 168 + country Panama 169 + country Papua New Guinea 170 + country Paraguay 171 + country Peru 172 + country Philippines 173 + country Pitcairn 174 + country Poland 175 + country Portugal 176 + country Puerto Rico 177 + country Qatar 178 + country Romania 179 + country Russian Federation 180 + country Rwanda 181 + country Réunion 182 + country Saint Barthélemy 183 + country Saint Helena, Ascension and Tristan da Cunha 184 + country Saint Kitts and Nevis 185 + country Saint Lucia 186 + country Saint Martin (French part) 187 + country Saint Pierre and Miquelon 188 + country Saint Vincent and the Grenadines 189 + country Samoa 190 + country San Marino 191 + country Sao Tome and Principe 192 + country Saudi Arabia 193 + country Senegal 194 + country Serbia 195 + country Seychelles 196 + country Sierra Leone 197 + country Singapore 198 + country Sint Maarten (Dutch part) 199 + country Slovakia 200 + country Slovenia 201 + country Solomon Islands 202 + country Somalia 203 + country South Africa 204 + country South Georgia and the South Sandwich Islands 205 + country South Sudan 206 + country Spain 207 + country Sri Lanka 208 + country Sudan 209 + country Suriname 210 + country Svalbard and Jan Mayen 211 + country Swaziland 212 SWAZILAND + country Sweden 213 + country Switzerland 214 + country Syrian Arab Republic 215 + country Taiwan, Province of China 216 Taiwan + country Tajikistan 217 + country Tanzania, United Republic of 218 Tanzania + country Thailand 219 + country Timor-Leste 220 + country Togo 221 + country Tokelau 222 + country Tonga 223 + country Trinidad and Tobago 224 + country Tunisia 225 + country Turkey 226 + country Turkmenistan 227 + country Turks and Caicos Islands 228 + country Tuvalu 229 + country Uganda 230 + country Ukraine 231 + country United Arab Emirates 232 UAE + country United Kingdom 233 + country United States 234 U.S.A USA United States of America U.S.A. + country United States Minor Outlying Islands 235 + country Uruguay 236 + country Uzbekistan 237 + country Vanuatu 238 + country Venezuela, Bolivarian Republic of 239 + country Viet Nam 240 + country Virgin Islands, British 241 + country Virgin Islands, U.S. 242 + country Wallis and Futuna 243 + country Western Sahara 244 + country Yemen 245 YEMEN + country Zambia 246 + country Zimbabwe 247 + country Åland Islands 248 \ No newline at end of file diff --git a/postgresql/testdata/scripts/api/data/metadatablocks/journals.tsv b/postgresql/testdata/scripts/api/data/metadatablocks/journals.tsv new file mode 100644 index 0000000..097f029 --- /dev/null +++ b/postgresql/testdata/scripts/api/data/metadatablocks/journals.tsv @@ -0,0 +1,41 @@ +#metadataBlock name dataverseAlias displayName + journal Journal Metadata +#datasetField name title description watermark fieldType displayOrder displayFormat advancedSearchField allowControlledVocabulary allowmultiples facetable displayoncreate required parent metadatablock_id + journalVolumeIssue Journal Indicates the volume, issue and date of a journal, which this Dataset is associated with. none 0 FALSE FALSE TRUE FALSE FALSE FALSE journal + journalVolume Volume The journal volume which this Dataset is associated with (e.g., Volume 4). text 1 TRUE FALSE FALSE TRUE FALSE FALSE journalVolumeIssue journal + journalIssue Issue The journal issue number which this Dataset is associated with (e.g., Number 2, Autumn). text 2 TRUE FALSE FALSE TRUE FALSE FALSE journalVolumeIssue journal + journalPubDate Publication Date The publication date for this journal volume/issue, which this Dataset is associated with (e.g., 1999). YYYY or YYYY-MM or YYYY-MM-DD date 3 TRUE FALSE FALSE TRUE FALSE FALSE journalVolumeIssue journal + journalArticleType Type of Article Indicates what kind of article this is, for example, a research article, a commentary, a book or product review, a case report, a calendar, etc (based on JATS). text 4 TRUE TRUE FALSE TRUE FALSE FALSE journal +#controlledVocabulary DatasetField Value identifier displayOrder + journalArticleType abstract 0 + journalArticleType addendum 1 + journalArticleType announcement 2 + journalArticleType article-commentary 3 + journalArticleType book review 4 + journalArticleType books received 5 + journalArticleType brief report 6 + journalArticleType calendar 7 + journalArticleType case report 8 + journalArticleType collection 9 + journalArticleType correction 10 + journalArticleType data paper 11 + journalArticleType discussion 12 + journalArticleType dissertation 13 + journalArticleType editorial 14 + journalArticleType in brief 15 + journalArticleType introduction 16 + journalArticleType letter 17 + journalArticleType meeting report 18 + journalArticleType news 19 + journalArticleType obituary 20 + journalArticleType oration 21 + journalArticleType partial retraction 22 + journalArticleType product review 23 + journalArticleType rapid communication 24 + journalArticleType reply 25 + journalArticleType reprint 26 + journalArticleType research article 27 + journalArticleType retraction 28 + journalArticleType review article 29 + journalArticleType translation 30 + journalArticleType other 31 \ No newline at end of file diff --git a/postgresql/testdata/scripts/api/data/metadatablocks/social_science.tsv b/postgresql/testdata/scripts/api/data/metadatablocks/social_science.tsv new file mode 100644 index 0000000..7ef714c --- /dev/null +++ b/postgresql/testdata/scripts/api/data/metadatablocks/social_science.tsv @@ -0,0 +1,29 @@ +#metadataBlock name dataverseAlias displayName + socialscience Social Science and Humanities Metadata +#datasetField name title description watermark fieldType displayOrder displayFormat advancedSearchField allowControlledVocabulary allowmultiples facetable displayoncreate required parent metadatablock_id + unitOfAnalysis Unit of Analysis Basic unit of analysis or observation that this Dataset describes, such as individuals, families/households, groups, institutions/organizations, administrative units, and more. For information about the DDI's controlled vocabulary for this element, please refer to the DDI web page at http://www.ddialliance.org/controlled-vocabularies. textbox 0 TRUE FALSE TRUE TRUE FALSE FALSE socialscience + universe Universe Description of the population covered by the data in the file; the group of people or other elements that are the object of the study and to which the study results refer. Age, nationality, and residence commonly help to delineate a given universe, but any number of other factors may be used, such as age limits, sex, marital status, race, ethnic group, nationality, income, veteran status, criminal convictions, and more. The universe may consist of elements other than persons, such as housing units, court cases, deaths, countries, and so on. In general, it should be possible to tell from the description of the universe whether a given individual or element is a member of the population under study. Also known as the universe of interest, population of interest, and target population. textbox 1 TRUE FALSE TRUE TRUE FALSE FALSE socialscience + timeMethod Time Method The time method or time dimension of the data collection, such as panel, cross-sectional, trend, time- series, or other. text 2 TRUE FALSE FALSE TRUE FALSE FALSE socialscience + dataCollector Data Collector Individual, agency or organization responsible for administering the questionnaire or interview or compiling the data. FamilyName, GivenName or Organization text 3 FALSE FALSE FALSE FALSE FALSE FALSE socialscience + collectorTraining Collector Training Type of training provided to the data collector text 4 FALSE FALSE FALSE FALSE FALSE FALSE socialscience + frequencyOfDataCollection Frequency If the data collected includes more than one point in time, indicate the frequency with which the data was collected; that is, monthly, quarterly, or other. text 5 TRUE FALSE FALSE TRUE FALSE FALSE socialscience + samplingProcedure Sampling Procedure Type of sample and sample design used to select the survey respondents to represent the population. May include reference to the target sample size and the sampling fraction. textbox 6 FALSE FALSE FALSE FALSE FALSE FALSE socialscience + targetSampleSize Target Sample Size Specific information regarding the target sample size, actual sample size, and the formula used to determine this. none 7 FALSE FALSE FALSE FALSE FALSE FALSE socialscience + targetSampleActualSize Actual Actual sample size. Enter an integer... int 8 FALSE FALSE FALSE FALSE FALSE FALSE targetSampleSize socialscience + targetSampleSizeFormula Formula Formula used to determine target sample size. text 9 FALSE FALSE FALSE FALSE FALSE FALSE targetSampleSize socialscience + deviationsFromSampleDesign Major Deviations for Sample Design Show correspondence as well as discrepancies between the sampled units (obtained) and available statistics for the population (age, sex-ratio, marital status, etc.) as a whole. text 10 FALSE FALSE FALSE FALSE FALSE FALSE socialscience + collectionMode Collection Mode Method used to collect the data; instrumentation characteristics (e.g., telephone interview, mail questionnaire, or other). textbox 11 FALSE FALSE FALSE FALSE FALSE FALSE socialscience + researchInstrument Type of Research Instrument Type of data collection instrument used. Structured indicates an instrument in which all respondents are asked the same questions/tests, possibly with precoded answers. If a small portion of such a questionnaire includes open-ended questions, provide appropriate comments. Semi-structured indicates that the research instrument contains mainly open-ended questions. Unstructured indicates that in-depth interviews were conducted. text 12 FALSE FALSE FALSE FALSE FALSE FALSE socialscience + dataCollectionSituation Characteristics of Data Collection Situation Description of noteworthy aspects of the data collection situation. Includes information on factors such as cooperativeness of respondents, duration of interviews, number of call backs, or similar. textbox 13 FALSE FALSE FALSE FALSE FALSE FALSE socialscience + actionsToMinimizeLoss Actions to Minimize Losses Summary of actions taken to minimize data loss. Include information on actions such as follow-up visits, supervisory checks, historical matching, estimation, and so on. text 14 FALSE FALSE FALSE FALSE FALSE FALSE socialscience + controlOperations Control Operations Control OperationsMethods to facilitate data control performed by the primary investigator or by the data archive. text 15 FALSE FALSE FALSE FALSE FALSE FALSE socialscience + weighting Weighting The use of sampling procedures might make it necessary to apply weights to produce accurate statistical results. Describes the criteria for using weights in analysis of a collection. If a weighting formula or coefficient was developed, the formula is provided, its elements are defined, and it is indicated how the formula was applied to the data. textbox 16 FALSE FALSE FALSE FALSE FALSE FALSE socialscience + cleaningOperations Cleaning Operations Methods used to clean the data collection, such as consistency checking, wildcode checking, or other. text 17 FALSE FALSE FALSE FALSE FALSE FALSE socialscience + datasetLevelErrorNotes Study Level Error Notes Note element used for any information annotating or clarifying the methodology and processing of the study. text 18 FALSE FALSE FALSE FALSE FALSE FALSE socialscience + responseRate Response Rate Percentage of sample members who provided information. textbox 19 TRUE FALSE FALSE TRUE FALSE FALSE socialscience + samplingErrorEstimates Estimates of Sampling Error Measure of how precisely one can estimate a population value from a given sample. text 20 FALSE FALSE FALSE FALSE FALSE FALSE socialscience + otherDataAppraisal Other Forms of Data Appraisal Other issues pertaining to the data appraisal. Describe issues such as response variance, nonresponse rate and testing for bias, interviewer and response bias, confidence levels, question bias, or similar. text 21 FALSE FALSE FALSE FALSE FALSE FALSE socialscience + socialScienceNotes Notes General notes about this Dataset. none 22 FALSE FALSE FALSE FALSE FALSE FALSE socialscience + socialScienceNotesType Type Type of note. text 23 FALSE FALSE FALSE FALSE FALSE FALSE socialScienceNotes socialscience + socialScienceNotesSubject Subject Note subject. text 24 FALSE FALSE FALSE FALSE FALSE FALSE socialScienceNotes socialscience + socialScienceNotesText Text Text for this note. textbox 25 FALSE FALSE FALSE FALSE FALSE FALSE socialScienceNotes socialscience diff --git a/postgresql/testdata/scripts/api/data/role-admin.json b/postgresql/testdata/scripts/api/data/role-admin.json new file mode 100644 index 0000000..6d13474 --- /dev/null +++ b/postgresql/testdata/scripts/api/data/role-admin.json @@ -0,0 +1,8 @@ +{ + "alias":"admin", + "name":"Admin", + "description":"A person who has all permissions for dataverses, datasets, and files.", + "permissions":[ + "ALL" + ] +} diff --git a/postgresql/testdata/scripts/api/data/role-assign-eg1-curator.json b/postgresql/testdata/scripts/api/data/role-assign-eg1-curator.json new file mode 100644 index 0000000..c375704 --- /dev/null +++ b/postgresql/testdata/scripts/api/data/role-assign-eg1-curator.json @@ -0,0 +1,4 @@ +{ + "assignee": "&explicit/1-EG-1", + "role": "curator" +} diff --git a/postgresql/testdata/scripts/api/data/role-assign-localhost-curator.json b/postgresql/testdata/scripts/api/data/role-assign-localhost-curator.json new file mode 100644 index 0000000..55e2642 --- /dev/null +++ b/postgresql/testdata/scripts/api/data/role-assign-localhost-curator.json @@ -0,0 +1,4 @@ +{ + "assignee": "&ip/localhost", + "role": "curator" +} diff --git a/postgresql/testdata/scripts/api/data/role-assign.json b/postgresql/testdata/scripts/api/data/role-assign.json new file mode 100644 index 0000000..b1a08bb --- /dev/null +++ b/postgresql/testdata/scripts/api/data/role-assign.json @@ -0,0 +1,4 @@ +{ + "assignee": "@gabbi", + "role": "curator" +} diff --git a/postgresql/testdata/scripts/api/data/role-assignee-list.json b/postgresql/testdata/scripts/api/data/role-assignee-list.json new file mode 100644 index 0000000..71c58af --- /dev/null +++ b/postgresql/testdata/scripts/api/data/role-assignee-list.json @@ -0,0 +1 @@ +["@admin",":guest","&ip/ipGroup1",":authenticated-users"] diff --git a/postgresql/testdata/scripts/api/data/role-contrib.json b/postgresql/testdata/scripts/api/data/role-contrib.json new file mode 100644 index 0000000..40cde38 --- /dev/null +++ b/postgresql/testdata/scripts/api/data/role-contrib.json @@ -0,0 +1,11 @@ +{ + "alias": "contrib", + "name": "Dataverse Contributor", + "description": "Someone that can add data to a dataverse, but not remove it.", + "permissions": [ + "Access", + "AccessRestrictedMetadata", + "UndoableEdit", + "EditMetadata" + ] +} diff --git a/postgresql/testdata/scripts/api/data/role-creator.json b/postgresql/testdata/scripts/api/data/role-creator.json new file mode 100644 index 0000000..947291d --- /dev/null +++ b/postgresql/testdata/scripts/api/data/role-creator.json @@ -0,0 +1,9 @@ +{ + "alias": "creator", + "name": "Creator", + "description": "Allows creation of DataSet/Verse", + "permissions": [ + "CreateDataverse", + "CreateDataset" + ] +} diff --git a/postgresql/testdata/scripts/api/data/role-curator.json b/postgresql/testdata/scripts/api/data/role-curator.json new file mode 100644 index 0000000..2de5b2a --- /dev/null +++ b/postgresql/testdata/scripts/api/data/role-curator.json @@ -0,0 +1,16 @@ +{ + "alias":"curator", + "name":"Curator", + "description":"For datasets, a person who can edit License + Terms, edit Permissions, and publish datasets.", + "permissions":[ + "ViewUnpublishedDataset", + "EditDataset", + "DownloadFile", + "DeleteDatasetDraft", + "PublishDataset", + "ManageDatasetPermissions", + "AddDataverse", + "AddDataset", + "ViewUnpublishedDataverse" + ] +} diff --git a/postgresql/testdata/scripts/api/data/role-dsContributor.json b/postgresql/testdata/scripts/api/data/role-dsContributor.json new file mode 100644 index 0000000..3cd854c --- /dev/null +++ b/postgresql/testdata/scripts/api/data/role-dsContributor.json @@ -0,0 +1,8 @@ +{ + "alias": "dsContributor", + "name": "Dataset Creator", + "description": "A person who can add datasets within a dataverse.", + "permissions": [ + "AddDataset" + ] +} diff --git a/postgresql/testdata/scripts/api/data/role-dvContributor.json b/postgresql/testdata/scripts/api/data/role-dvContributor.json new file mode 100644 index 0000000..b0e264f --- /dev/null +++ b/postgresql/testdata/scripts/api/data/role-dvContributor.json @@ -0,0 +1,8 @@ +{ + "alias": "dvContributor", + "name": "Dataverse Creator", + "description": "A person who can add subdataverses within a dataverse.", + "permissions": [ + "AddDataverse" + ] +} diff --git a/postgresql/testdata/scripts/api/data/role-editor.json b/postgresql/testdata/scripts/api/data/role-editor.json new file mode 100644 index 0000000..98c08ac --- /dev/null +++ b/postgresql/testdata/scripts/api/data/role-editor.json @@ -0,0 +1,11 @@ +{ + "alias":"editor", + "name":"Contributor", + "description":"For datasets, a person who can edit License + Terms, and then submit them for review.", + "permissions":[ + "ViewUnpublishedDataset", + "EditDataset", + "DownloadFile", + "DeleteDatasetDraft" + ] +} diff --git a/postgresql/testdata/scripts/api/data/role-filedownloader.json b/postgresql/testdata/scripts/api/data/role-filedownloader.json new file mode 100644 index 0000000..3fb046f --- /dev/null +++ b/postgresql/testdata/scripts/api/data/role-filedownloader.json @@ -0,0 +1,8 @@ +{ + "alias":"fileDownloader", + "name":"File Downloader", + "description":"A person who can download a published file.", + "permissions":[ + "DownloadFile" + ] +} diff --git a/postgresql/testdata/scripts/api/data/role-fullContributor.json b/postgresql/testdata/scripts/api/data/role-fullContributor.json new file mode 100644 index 0000000..c18fcc7 --- /dev/null +++ b/postgresql/testdata/scripts/api/data/role-fullContributor.json @@ -0,0 +1,9 @@ +{ + "alias": "fullContributor", + "name": "Dataverse + Dataset Creator", + "description": "A person who can add subdataverses and datasets within a dataverse.", + "permissions": [ + "AddDataverse", + "AddDataset" + ] +} diff --git a/postgresql/testdata/scripts/api/data/role-guest.json b/postgresql/testdata/scripts/api/data/role-guest.json new file mode 100644 index 0000000..ef5d236 --- /dev/null +++ b/postgresql/testdata/scripts/api/data/role-guest.json @@ -0,0 +1,8 @@ +{ + "alias": "guest-role", + "name": "What guests can do", + "description": "Guests can browse", + "permissions": [ + "Discover" + ] +} diff --git a/postgresql/testdata/scripts/api/data/role-manager.json b/postgresql/testdata/scripts/api/data/role-manager.json new file mode 100644 index 0000000..1105cbf --- /dev/null +++ b/postgresql/testdata/scripts/api/data/role-manager.json @@ -0,0 +1,11 @@ +{ + "alias":"manager", + "name":"Curator", + "description":"For datasets, a person who can add a dataset, edit License + Terms, and submit datasets for review.", + "permissions":[ + "ViewUnpublishedDataset", + "EditDataset", + "DownloadFile", + "DeleteDatasetDraft" + ] +} diff --git a/postgresql/testdata/scripts/api/data/role-member.json b/postgresql/testdata/scripts/api/data/role-member.json new file mode 100644 index 0000000..d1e1456 --- /dev/null +++ b/postgresql/testdata/scripts/api/data/role-member.json @@ -0,0 +1,10 @@ +{ + "alias":"member", + "name":"Member", + "description":"A person who can view both unpublished dataverses and datasets.", + "permissions":[ + "ViewUnpublishedDataset", + "ViewUnpublishedDataverse", + "DownloadFile" + ] +} diff --git a/postgresql/testdata/scripts/api/data/shibGroupHarvard.json b/postgresql/testdata/scripts/api/data/shibGroupHarvard.json new file mode 100644 index 0000000..d90cde6 --- /dev/null +++ b/postgresql/testdata/scripts/api/data/shibGroupHarvard.json @@ -0,0 +1,5 @@ +{ + "name": "All Harvard PIN/Shibboleth Users", + "attribute": "Shib-Identity-Provider", + "pattern": "https://fed.huit.harvard.edu/idp/shibboleth" +} diff --git a/postgresql/testdata/scripts/api/data/shibGroupMit.json b/postgresql/testdata/scripts/api/data/shibGroupMit.json new file mode 100644 index 0000000..e61e81e --- /dev/null +++ b/postgresql/testdata/scripts/api/data/shibGroupMit.json @@ -0,0 +1,5 @@ +{ + "name": "All MIT Shibboleth Users", + "attribute": "Shib-Identity-Provider", + "pattern": "urn:mace:incommon:mit.edu" +} diff --git a/postgresql/testdata/scripts/api/data/shibGroupTestShib.json b/postgresql/testdata/scripts/api/data/shibGroupTestShib.json new file mode 100644 index 0000000..01b2bd5 --- /dev/null +++ b/postgresql/testdata/scripts/api/data/shibGroupTestShib.json @@ -0,0 +1,5 @@ +{ + "name": "All testshib.org Shibboleth Users", + "attribute": "Shib-Identity-Provider", + "pattern": "https://idp.testshib.org/idp/shibboleth" +} diff --git a/postgresql/testdata/scripts/api/data/tsv/tsv2json b/postgresql/testdata/scripts/api/data/tsv/tsv2json new file mode 100755 index 0000000..38a6ded --- /dev/null +++ b/postgresql/testdata/scripts/api/data/tsv/tsv2json @@ -0,0 +1,38 @@ +#!/usr/bin/env python +import sys +from optparse import OptionParser +import csv +try: + import json +except ImportError: + import simplejson as json + +parser = OptionParser() +options, args = parser.parse_args() + +if args: + csv_file = open(args[0]) +else: + csv_file = sys.stdin + +reader = csv.DictReader(csv_file, delimiter="\t") +rows = [row for row in reader] +for row in rows: + row["permissionRoot"] = "false" + parent = row["parent"] + parts = parent.split("/") + if parts[1]: + target = parts[-1] + else: + target = "root" + del row["parent"] + creator = row["creator"] + del row["creator"] + # FIXME: don't simply strip out single quotes + row["description"] = row["description"].replace("'", "") + jsondata = "%s%s%s" % ("'",json.dumps(row),"'") + start = 'curl -H "Content-type:application/json" -X POST "http://localhost:8080/api/dataverses/' + print "echo creating dataverse ", row["alias"] + print "%s%s%s%s%s%s" % (start, target, "?key=",creator, "\" -d ", jsondata) + print +csv_file.close() diff --git a/postgresql/testdata/scripts/api/data/user-admin.json b/postgresql/testdata/scripts/api/data/user-admin.json new file mode 100644 index 0000000..938f34d --- /dev/null +++ b/postgresql/testdata/scripts/api/data/user-admin.json @@ -0,0 +1,8 @@ +{ + "firstName":"Dataverse", + "lastName":"Admin", + "userName":"dataverseAdmin", + "affiliation":"Dataverse.org", + "position":"Admin", + "email":"dataverse@mailinator.com" +} diff --git a/postgresql/testdata/scripts/api/data/userCathy.json b/postgresql/testdata/scripts/api/data/userCathy.json new file mode 100644 index 0000000..b75fa8b --- /dev/null +++ b/postgresql/testdata/scripts/api/data/userCathy.json @@ -0,0 +1,9 @@ +{ + "firstName":"Cathy", + "lastName":"Collaborator", + "userName":"cathy", + "affiliation":"mid", + "position":"Data Scientist", + "email":"cathy@malinator.com", + "phone":"(888) 888-8888" +} diff --git a/postgresql/testdata/scripts/api/data/userGabbi.json b/postgresql/testdata/scripts/api/data/userGabbi.json new file mode 100644 index 0000000..2fc8b73 --- /dev/null +++ b/postgresql/testdata/scripts/api/data/userGabbi.json @@ -0,0 +1,9 @@ +{ + "firstName":"Gabbi", + "lastName":"Guest", + "userName":"gabbi", + "affiliation":"low", + "position":"A Guest", + "email":"gabbi@malinator.com", + "phone":"(888) 888-8888" +} \ No newline at end of file diff --git a/postgresql/testdata/scripts/api/data/userNick.json b/postgresql/testdata/scripts/api/data/userNick.json new file mode 100644 index 0000000..b4b796d --- /dev/null +++ b/postgresql/testdata/scripts/api/data/userNick.json @@ -0,0 +1,9 @@ +{ + "firstName":"Nick", + "lastName":"NSA", + "userName":"nick", + "affiliation":"gov", + "position":"Signals Intelligence", + "email":"nick@malinator.com", + "phone":"(888) 888-8888" +} diff --git a/postgresql/testdata/scripts/api/data/userPete.json b/postgresql/testdata/scripts/api/data/userPete.json new file mode 100644 index 0000000..4958c99 --- /dev/null +++ b/postgresql/testdata/scripts/api/data/userPete.json @@ -0,0 +1,9 @@ +{ + "firstName":"Pete", + "lastName":"Privileged", + "userName":"pete", + "affiliation":"Top", + "position":"The Boss", + "email":"pete@malinator.com", + "phone":"(888) 888-8888" +} \ No newline at end of file diff --git a/postgresql/testdata/scripts/api/data/userUma.json b/postgresql/testdata/scripts/api/data/userUma.json new file mode 100644 index 0000000..fb67daf --- /dev/null +++ b/postgresql/testdata/scripts/api/data/userUma.json @@ -0,0 +1,9 @@ +{ + "firstName":"Uma", + "lastName":"Underprivileged", + "userName":"uma", + "affiliation":"mid", + "position":"The Intern", + "email":"Uma@malinator.com", + "phone":"(888) 888-8888" +} \ No newline at end of file diff --git a/postgresql/testdata/scripts/api/data/workflows/internal-httpSR-workflow.json b/postgresql/testdata/scripts/api/data/workflows/internal-httpSR-workflow.json new file mode 100644 index 0000000..8c233d7 --- /dev/null +++ b/postgresql/testdata/scripts/api/data/workflows/internal-httpSR-workflow.json @@ -0,0 +1,32 @@ +{ + "name": "dump to local RSAL", + "steps": [ + { + "provider":":internal", + "stepType":"log", + "parameters": { + "message": "Pre-http request" + } + }, + { + "provider":":internal", + "stepType":"http/sr", + "parameters": { + "url":"http://localhost:5050/dump/${invocationId}", + "method":"POST", + "contentType":"text/plain", + "body":"${invocationId}\ndataset.id=${dataset.id} /\ndataset.identifier=${dataset.identifier} /dataset.globalId=${dataset.globalId} /\ndataset.displayName=${dataset.displayName} /\ndataset.citation=${dataset.citation} /\nminorVersion=${minorVersion} /\nmajorVersion=${majorVersion} /\nreleaseCompleted=${releaseStatus} /", + "expectedResponse":"OK.*", + "rollbackUrl":"http://localhost:5050/dump/${invocationId}", + "rollbackMethod":"DELETE" + } + }, + { + "provider":":internal", + "stepType":"log", + "parameters": { + "message": "Post-http request" + } + } + ] +} diff --git a/postgresql/testdata/scripts/api/data/workflows/internal-no-pause-long-workflow.json b/postgresql/testdata/scripts/api/data/workflows/internal-no-pause-long-workflow.json new file mode 100644 index 0000000..6675d14 --- /dev/null +++ b/postgresql/testdata/scripts/api/data/workflows/internal-no-pause-long-workflow.json @@ -0,0 +1,58 @@ +{ + "name": "no pauses, long", + "steps": [ + { + "provider":":internal", + "stepType":"log", + "parameters": { + "memo":"first step" + } + }, + { + "provider":":internal", + "stepType":"log" + }, + + { + "provider":":internal", + "stepType":"log" + }, + + { + "provider":":internal", + "stepType":"log" + }, + + { + "provider":":internal", + "stepType":"log" + }, + + { + "provider":":internal", + "stepType":"log" + }, + + { + "provider":":internal", + "stepType":"log" + }, + + { + "provider":":internal", + "stepType":"log" + }, + + { + "provider":":internal", + "stepType":"log" + }, + { + "provider":":internal", + "stepType":"log", + "parameters" : { + "memo":"Last step" + } + } + ] +} diff --git a/postgresql/testdata/scripts/api/data/workflows/internal-no-pause-workflow.json b/postgresql/testdata/scripts/api/data/workflows/internal-no-pause-workflow.json new file mode 100644 index 0000000..df44527 --- /dev/null +++ b/postgresql/testdata/scripts/api/data/workflows/internal-no-pause-workflow.json @@ -0,0 +1,21 @@ +{ + "name": "Internal steps only, no with pause", + "steps": [ + { + "provider":":internal", + "stepType":"log", + "parameters": { + "step":1, + "stepName":"first step" + } + }, + { + "provider":":internal", + "stepType":"log", + "parameters": { + "number":42, + "anotherMessage": "This is the last step before releasing." + } + } + ] +} diff --git a/postgresql/testdata/scripts/api/data/workflows/internal-pause-workflow.json b/postgresql/testdata/scripts/api/data/workflows/internal-pause-workflow.json new file mode 100644 index 0000000..c8a5d7b --- /dev/null +++ b/postgresql/testdata/scripts/api/data/workflows/internal-pause-workflow.json @@ -0,0 +1,28 @@ +{ + "name": "Internal steps with pause", + "steps": [ + { + "provider":":internal", + "stepType":"log", + "parameters": { + "step":1, + "stepName":"first step" + } + }, + { + "provider":":internal", + "stepType":"pause", + "parameters": { + "paramName":"parameter value with a longer name." + } + }, + { + "provider":":internal", + "stepType":"log", + "parameters": { + "number":42, + "anotherMessage": "This is the last step before releasing." + } + } + ] +} diff --git a/postgresql/testdata/scripts/api/download/.gitignore b/postgresql/testdata/scripts/api/download/.gitignore new file mode 100644 index 0000000..9b815cf --- /dev/null +++ b/postgresql/testdata/scripts/api/download/.gitignore @@ -0,0 +1,3 @@ +files.tsv +files +downloaded-files diff --git a/postgresql/testdata/scripts/api/download/dbquery b/postgresql/testdata/scripts/api/download/dbquery new file mode 100755 index 0000000..64229bc --- /dev/null +++ b/postgresql/testdata/scripts/api/download/dbquery @@ -0,0 +1,13 @@ +#!/bin/sh +if [ -z "$1" ]; then + echo "No start date in YYYY-MM-DD format provided." + exit 1 +else + if [ -z "$2" ]; then + echo "No end date in YYYY-MM-DD format provided." + exit 1 + fi + START_DATE=$1 + END_DATE=$2 +fi +psql -h $DB_SERVER -U $DB_USER -p $DB_PORT $DB_NAME -F $'\t' --no-align --pset footer -c "select dvobject.id, dvobject.createdate, dvobject.owner_id, datafile.* from dvobject, datafile where dvobject.id=datafile.id and dvobject.dtype='DataFile' and dvobject.createdate>to_date('$START_DATE','YYYY-MM-DD') and dvobject.createdate files.tsv diff --git a/postgresql/testdata/scripts/api/download/download b/postgresql/testdata/scripts/api/download/download new file mode 100755 index 0000000..cb5ca24 --- /dev/null +++ b/postgresql/testdata/scripts/api/download/download @@ -0,0 +1,10 @@ +#!/bin/sh +DOWNLOAD_DIR=downloaded-files +rm -rf $DOWNLOAD_DIR +mkdir -p $DOWNLOAD_DIR && \ +cat files | while read i; do + echo "Downloding file id $i..." + cd $DOWNLOAD_DIR && mkdir $i && cd $i && \ + curl -s -k -O -J https://$DATAVERSE_SERVER/api/access/datafile/$i?key=$API_TOKEN && \ + cd ../.. +done diff --git a/postgresql/testdata/scripts/api/download/tsv2files b/postgresql/testdata/scripts/api/download/tsv2files new file mode 100755 index 0000000..dde1bf6 --- /dev/null +++ b/postgresql/testdata/scripts/api/download/tsv2files @@ -0,0 +1,2 @@ +#!/bin/sh +cut -f1 files.tsv | tail -n +2 > files diff --git a/postgresql/testdata/scripts/api/post-install-api-block.sh b/postgresql/testdata/scripts/api/post-install-api-block.sh new file mode 100755 index 0000000..4cc0ac7 --- /dev/null +++ b/postgresql/testdata/scripts/api/post-install-api-block.sh @@ -0,0 +1,27 @@ +#!/bin/bash + +# This script can be run on a system that was set up with unrestricted access to +# the sensitive API endpoints, in order to block it for the general public. + +# First, revoke the authentication token from the built-in user: +curl -X DELETE $SERVER/admin/settings/BuiltinUsers.KEY + +# Block the sensitive endpoints: +# Relevant settings: +# - :BlockedApiPolicy - one of allow, drop, localhost-only, unblock-key +# - :BlockedApiKey - when using the unblock-key policy, pass this key in the unblock-key query param to allow the call to a blocked endpoint +# - :BlockedApiEndpoints - comma separated list of blocked api endpoints + +# This leaves /api/admin and /api/test blocked to all connections except from those +# coming from localhost: +curl -X PUT -d localhost-only http://localhost:8080/api/admin/settings/:BlockedApiPolicy +curl -X PUT -d admin,test http://localhost:8080/api/admin/settings/:BlockedApiEndpoints + +# In some situations, you may prefer an alternative solution - to block ALL connections to +# these endpoints completely; but allow connections authenticated with the defined +# "unblock key" (password): + +#curl -X PUT -d YOURSUPERSECRETUNBLOCKKEY http://localhost:8080/api/admin/settings/:BlockedApiKey +#curl -X PUT -d unblock-key http://localhost:8080/api/admin/settings/:BlockedApiPolicy + + diff --git a/postgresql/testdata/scripts/api/py_api_wrapper/api_fun.py b/postgresql/testdata/scripts/api/py_api_wrapper/api_fun.py new file mode 100644 index 0000000..e0ee07f --- /dev/null +++ b/postgresql/testdata/scripts/api/py_api_wrapper/api_fun.py @@ -0,0 +1,153 @@ +import os, sys +import time +import json +from dataverse_api_link import DataverseAPILink + +def msg(s): print s +def dashes(char='-'): msg(40*char) +def msgt(s): dashes(); msg(s); dashes() +def msgx(s): dashes('\/'); msg(s); dashes('\/'); sys.exit(0) + +def get_dataverse_link_object(apikey='pete'): + server_with_api = 'http://localhost:8080/' + #server_with_api = 'https://dvn-build.hmdc.harvard.edu' + return DataverseAPILink(server_with_api, use_https=False, apikey=apikey) + +def check_dv(): + dat = get_dataverse_link_object() + dat.save_current_metadata('local-data') + + #add_and_publish_dataverses('local-data/dataverses_2014-0612_11.json','pete') + add_and_publish_dataverses('demo-data/dataverses_2014-0609_16.json','pete') + +def load_users_from_api_file(fname): + """ + Given the JSON results of the list users command (/api/builtin-users): + (a) Iterate through the list + (b) Check if a user exists (by id) + (c) If the user is not found, create the user + + :param fname: full path to a file with user info in JSON format + """ + if not os.path.isfile(fname): + msgx('File not found: %s' % fname) + + # Load the JSON file + user_dict = json.loads(open(fname,'r').read()) + + # Get a the DataverseAPILink object + dv_lnk_obj = get_dataverse_link_object('pete') + dv_lnk_obj.set_return_mode_python() + + # Iterate through json + for user_info in user_dict.get('data', []): + # check if user exists via api + current_user_info = dv_lnk_obj.get_user_data(user_info.get('id', None)) + if current_user_info and current_user_info.get('status') == 'OK': + continue # The user exist, loop to the next user + + user_info.pop('id') # Use the param, except for the 'id' + + # Create the user, passing user params and a password + # + + new_password = user_info.get('userName') + dv_lnk_obj.create_user(user_info, new_password) + +def add_and_publish_dataverses(fname, apikey): + if not os.path.isfile(fname): + msgx('File not found: %s' % fname) + + # Load the JSON file + dv_dict = json.loads(open(fname,'r').read()) + + # Get a the DataverseAPILink object + dv_lnk_obj = get_dataverse_link_object(apikey) + dv_lnk_obj.set_return_mode_python() + + # Iterate through json + previous_alias = "root" + for dv_info in dv_dict.get('data', []): + # check if user exists via api + current_dv_info = dv_lnk_obj.get_dataverse_by_id_or_alias(dv_info.get('id', None)) + + # DV exists, continue loop + if current_dv_info and current_dv_info.get('status') == 'OK': + msg('>>> FOUND IT') + previous_alias = current_dv_info['data']['alias'] + continue # The user exist, loop to the next user + + # No DV, create it + keys_not_needed = ['id', 'ownerID', 'creationDate', 'creator'] + for key in keys_not_needed: + if dv_info.has_key(key): + dv_info.pop(key) + + msg('params to send: %s' % dv_info) + # If created, publish it + json_resp = dv_lnk_obj.create_dataverse(previous_alias, dv_info) + if json_resp.get('status') == 'OK': + new_dv_data = json_resp.get('data', {}) + new_id = new_dv_data.get('id', None) + if new_id is not None: + dv_lnk_obj.publish_dataverse(new_id) + previous_alias = current_dv_info.get("alias", "root") + #break + +def add_dataverses(name, cnt=1, parent_dv_name_or_id=1, apikey='snoopy'): + # get the DataverseAPILink + dat = get_dataverse_link_object(apikey=apikey) + dat.set_return_mode_python() + + for x in range(249, 260): + dat.publish_dataverse(x) + return + for x in range(0, cnt): + num = x+1 + alias_str = "new_dv_%d" % num + dv_params_str = """{ "alias":"%s", + "name":"%s %s", + "affiliation":"Affiliation value", + "contactEmail":"pete@malinator.com", + "permissionRoot":true, + "description":"More API testing" + }""" % (alias_str, name, num) + + dv_params = json.loads(dv_params_str) + dat.create_dataverse(parent_dv_name_or_id, dv_params, ) + if x % 20 == 0: time.sleep(1) + + +def delete_dataverses_id_greather_than(id_num, apikey): + if not type(id_num) == int: + raise('id_num needs be an int--not a %s' % type(id_num)) + + # get the DataverseAPILink + dat = get_dataverse_link_object(apikey=apikey) + dat.set_return_mode_python() + + # List the dataverses + dv_json = dat.list_dataverses() + print dv_json + # Pull dataverse ids > 30 + dv_ids = [dv['id'] for dv in dv_json.get("data") if dv['id'] > id_num] + + # reverse order ids + dv_ids.sort() + dv_ids.reverse() + + # delete them + for dv_id in dv_ids: + print dat.delete_dataverse_by_id(dv_id) + #print dat.list_datasets() + +if __name__ == '__main__': + check_dv() + #load_users_from_api_file('demo-data/users_2014-0609_14.json') + #load_users_from_api_file('demo-data/rp_users.json') + #add_and_publish_dataverses('demo-data/dataverses_2014-0609_14.json', 'gromit') + #add_and_publish_dataverses('demo-data/rp_dataverses.json', 'gromit') + + #add_dataverses('Other DV #', 17, 23, 'snoopy') + #add_dataverses('Uma\'s Other Retricted DVs #', 7, 8, 'pete') + #delete_dataverses_id_greather_than(177, 'pete') diff --git a/postgresql/testdata/scripts/api/py_api_wrapper/dataverse_api_link.py b/postgresql/testdata/scripts/api/py_api_wrapper/dataverse_api_link.py new file mode 100644 index 0000000..4c6df59 --- /dev/null +++ b/postgresql/testdata/scripts/api/py_api_wrapper/dataverse_api_link.py @@ -0,0 +1,430 @@ +""" +Use Dataverse native APIs described here: https://github.com/IQSS/dataverse/tree/master/scripts/api + +5/8/2013 - scratch work, examining API +6/5/2013 - Back to implementing some API work +6/6/2013 - Move function parameters into API_SPECS, create functions on init + +Requires the python requests library: http://docs.python-requests.org + +""" +import os +import sys +import json +import requests +from msg_util import * +import types # MethodType, FunctionType +from datetime import datetime +from single_api_spec import SingleAPISpec + +def msg(s): print s +def dashes(char='-'): msg(40*char) +def msgt(s): dashes(); msg(s); dashes() +def msgx(s): dashes('\/'); msg(s); dashes('\/'); sys.exit(0) + + +class DataverseAPILink: + """ + Convenience class to access the Dataverse API described in github: + + https://github.com/IQSS/dataverse/tree/master/scripts/api + + Example: + from dataverse_api_link import DataverseAPILink + server_with_api = 'https://dvn-build.hmdc.harvard.edu' + + dat = DataverseAPILink(server_with_api, use_https=False, apikey='pete') + dat.set_return_mode_python() + print dat.list_users() + print dat.list_roles() + print dat.list_dataverses() + print dat.list_datasets() + print dat.get_dataverse_by_id_or_alias(5) + print dat.view_dataset_metadata_by_id_version(123, 57) + print dat.view_root_dataverse() + print dat.get_user_data(1) + """ + RETURN_MODE_STR = 'RETURN_MODE_STR' + RETURN_MODE_PYTHON = 'RETURN_MODE_PYTHON' + HTTP_GET = 'GET' + HTTP_POST = 'POST' + HTTP_DELETE = 'DELETE' + HTTP_METHODS = [HTTP_GET, HTTP_POST, HTTP_DELETE] + + # Each List corresponds to 'new_function_name', 'name', 'url_path', 'use_api_key', 'num_id_vals', 'use_params_dict' + # + API_READ_SPECS = ( + # USERS + [ 'list_users', 'List Users', '/api/builtin-users', False, 0]\ + , ['get_user_data', 'Get metadata for a specific user', '/api/builtin-users/%s' % SingleAPISpec.URL_PLACEHOLDER, False, 1]\ + + # ROLES + , ['list_roles', 'List Roles', '/api/roles', False, 0]\ + + # Datasets + , ['list_datasets', 'List Datasets', '/api/datasets', True, 0]\ + , ['view_dataset_by_id', 'View Dataset By ID' \ + , '/api/datasets/%s' % (SingleAPISpec.URL_PLACEHOLDER,), True, 1]\ + #, ['view_dataset_versions_by_id', 'View Dataset By ID', '/api/datasets/%s/versions' % SingleAPISpec.URL_PLACEHOLDER, True, True]\ + # Dataverses + , ['list_dataverses', 'List Dataverses', '/api/dataverses', False, 0]\ + , ['get_dataverse_by_id_or_alias', 'View Dataverse by ID or Alias', '/api/dataverses/%s' % (SingleAPISpec.URL_PLACEHOLDER,), False, 1]\ + , ['view_root_dataverse', 'View Root Dataverse', '/api/dataverses/:root', False, 0]\ + + # Metadata + , ['list_metadata_blocks', 'List metadata blocks', '/api/metadatablocks', False, 0] + , ['view_dataset_metadata_by_id_version', 'View Dataset By ID'\ + , '/api/datasets/%s/versions/%s/metadata' % (SingleAPISpec.URL_PLACEHOLDER, SingleAPISpec.URL_PLACEHOLDER), True, 2]\ + + ) + + + API_WRITE_SPECS = ( + + # Create a Dataverse + # curl -H "Content-type:application/json" -X POST -d @data/dv-pete-top.json "http://localhost:8080/api/dataverses/root?key=pete" + # + #[ 'create_dataverse', 'Create Dataverse', '/api/dataverses/%s' % SingleAPISpec.URL_PLACEHOLDER, True, 1, True]\ + + # Create a User + # curl -H "Content-type:application/json" -X POST -d @data/userPete.json "http://localhost:8080/api/builtin-users?password=pete" + # + #[ 'create_user', 'Create User', '/api/builtin-users?password=%s' % SingleAPISpec.URL_PLACEHOLDER, False, 1, True]\ + #, + ) + + API_DELETE_SPECS = ( + # Dataset + [ 'delete_dataset', 'Delete Dataset', '/api/builtin-users/%s' % SingleAPISpec.URL_PLACEHOLDER, True, True]\ + #DELETE http://{{SERVER}}/api/datasets/{{id}}?key={{apikey}} + ) + + def __init__(self, server_name, use_https, apikey=None): + """ + :param server_name: e.g. dataverse.org, dvn-build.hmdc.harvard.edu, etc. + :type server_name: str + :param use_https: Use https for api calls? + :type use_https: boolean + """ + self.server_name = server_name + if len(self.server_name.split('//')) > 1: # remove accidental additional of http:// or https:// + self.server_name = self.server_name.split('//')[-1] + if self.server_name.endswith('/'): + self.server_name = self.server_name[:-1] + self.use_https = use_https + self.apikey = apikey + self.update_server_name() + self.return_mode = self.RETURN_MODE_STR + self.bind_basic_functions() + + def set_return_mode_python(self): + """API calls return JSON text response as a Python object + Uses json.loads(json_str) + """ + self.return_mode = self.RETURN_MODE_PYTHON + + def set_return_mode_string(self): + """API calls return JSON responses as a string""" + self.return_mode = self.RETURN_MODE_STR + + + def update_server_name(self): + if self.server_name is None: + raise Exception('Server name is None!') + + if self.server_name.endswith('/'): # cut trailing slash + self.server_name = self.server_name[-1] + + server_name_pieces = self.server_name.split('//') + if len(server_name_pieces) > 1: + self.server_name = server_name_pieces[1] + + def get_server_name(self): + + if self.use_https: + return 'https://' + self.server_name + return 'http://' + self.server_name + + def make_api_call(self, url_str, method, params={}, headers=None): + """ + Use the requests library to make the actual API call + + :param url_str: str, url to call + :param method: str indicating http method: GET, POST, DELETE, etc. Must be in self.HTTP_METHODS: GET, POST, DELETE, + :param params: dict containing python parameters + :param headers: optional dict containing headers. e.g. {'content-type': 'application/json'} + + :returns: response from the request + :rtype: depends on self.RETURN_MODE_PYTHON; either text or JSON converted to python dict + """ + + msg('url_str: [%s]\nmethod:[%s]\nparams:[%s]\nheaders:[%s]' % (url_str, method, params, headers)) + if url_str is None: + return None + if not method in self.HTTP_METHODS: + msgt('Error: Method not found: %s' % method) + if not type(params) == dict: + msgt('Params must be a python dict, {}') + + params = json.dumps(params) + + if method == self.HTTP_GET: + r = requests.get(url_str, data=params) + elif method == self.HTTP_POST: + if headers is not None: + r = requests.post(url_str, data=params, headers=headers) + else: + r = requests.post(url_str, data=params) + elif method == self.HTTP_DELETE: + r = requests.delete(url_str, data=params) + + msg('Status Code: %s' % r.status_code) + msg('Encoding: %s' % r.encoding) + msg('Text: %s' % r.text) + + if self.return_mode == self.RETURN_MODE_PYTHON: + return r.json() + + #print json.dumps(json.loads(s), indent=4) + try: + return json.dumps(json.loads(r.text), indent=4) + except: + pass + return r.text + + + def create_user(self, dv_params, new_password): + """ + Create a user + + :param dv_params: dict containing the parameters for the new user + :param new_password: str for the user's password + """ + msgt('create_user') + if not type(dv_params) is dict: + msgx('dv_params is None') + + # [ 'create_user', 'Create User', '/api/builtin-users?password=%s' % SingleAPISpec.URL_PLACEHOLDER, False, 1, True]\ + + url_str = self.get_server_name() + '/api/builtin-users?password=%s' % (new_password) + headers = {'content-type': 'application/json'} + return self.make_api_call(url_str, self.HTTP_POST, params=dv_params, headers=headers) + + + def create_dataverse(self, parent_dv_alias_or_id, dv_params): + """Create a dataverse + POST http://{{SERVER}}/api/dataverses/{{ parent_dv_name }}?key={{username}} + + :param parent_dv_alias_or_id: str or integer, the alias or id of an existing datavese + :param dv_params: dict containing the parameters for the new dataverse + + Sample: Create Dataverse + + from dataverse_api import DataverseAPILink + server_with_api = 'dvn-build.hmdc.harvard.edu' + dat = DataverseAPILink(server_with_api, use_https=False, apikey='pete') + dv_params = { + "alias":"hm_dv", + "name":"Home, Home on the Dataverse", + "affiliation":"Affiliation value", + "contactEmail":"pete@mailinator.com", + "permissionRoot":False, + "description":"API testing" + } + parent_dv_alias_or_id = 'root' + print dat.create_dataverse(parent_dv_alias_or_id, dv_params) + """ + msgt('create_dataverse') + if not type(dv_params) is dict: + msgx('dv_params is None') + + url_str = self.get_server_name() + '/api/dataverses/%s?key=%s' % (parent_dv_alias_or_id, self.apikey) + headers = {'content-type': 'application/json'} + return self.make_api_call(url_str, self.HTTP_POST, params=dv_params, headers=headers) + + def publish_dataverse(self, dv_id_or_name): + """ + Publish a dataverse based on its id or alias + #POST http://{{SERVER}}/api/dataverses/{{identifier}}/actions/:publish?key={{apikey}} + + :param dv_id_or_name: Dataverse id (str or int) or alias (str) + """ + msgt('publish_dataverse') + print 'dv_id_or_name', dv_id_or_name + if dv_id_or_name is None: + msgx('dv_id_or_name is None') + + url_str = self.get_server_name() + '/api/dataverses/%s/actions/:publish?key=%s' % (dv_id_or_name, self.apikey) + headers = {'content-type': 'application/json'} + return self.make_api_call(url_str, self.HTTP_POST) + + + def show_api_info(self): + for spec in self.API_READ_SPECS: + print spec[0] + + + def bind_single_function(self, spec_list, function_name_for_api_call): + """ + :param spec_list: list or tuple defining function sepcs + :param function_name_for_api_call: str naming coded function in the DataverseAPILink + """ + # Load the function specs + single_api_spec = SingleAPISpec(spec_list) + + # Pull the code to generate the function. e.g. def function_name(params): etc, etc + code_str = single_api_spec.get_code_str(function_name_for_api_call) # ---- GET ---- + + # Create the function + exec(code_str) + + # Bind the function to this instance of DataverseAPILink + self.__dict__[single_api_spec.new_function_name] = types.MethodType(eval(single_api_spec.new_function_name), self) + + + def bind_basic_functions(self): + """ + Go through API specs and add the functions to DataverseAPILink + """ + + # Add read functions + for spec in self.API_READ_SPECS: + self.bind_single_function(spec, 'make_api_get_call') + + # Decided to explicitly write add functions for clarity + # Add write functions + #for spec in self.API_WRITE_SPECS: + # self.bind_single_function(spec, 'make_api_write_call') + + + + + def make_api_write_call(self, call_name, url_path, use_api_key=False, id_val=None, params_dict={}): + msgt(call_name) + print 'params_dict', params_dict + if not type(params_dict) is dict: + msgx('params_dict is not a dict. Found: %s' % type(params_dict)) + + if use_api_key: + url_str = '%s%s?key=%s' % (self.get_server_name(), url_path, self.apikey) + else: + url_str = '%s%s' % (self.get_server_name(), url_path) + + headers = {'content-type': 'application/json'} + return self.make_api_call(url_str, self.HTTP_POST, params=params_dict, headers=headers) + + + + def make_api_get_call(self, call_name, url_path, use_api_key=False, id_val=None): + msgt(call_name) + if use_api_key: + url_str = '%s%s?key=%s' % (self.get_server_name(), url_path, self.apikey) + else: + url_str = '%s%s' % (self.get_server_name(), url_path) + + return self.make_api_call(url_str, self.HTTP_GET) + + + def make_api_delete_call(self, call_name, url_path, use_api_key=False, id_val=None): + msgt(call_name) + if use_api_key: + url_str = '%s%s?key=%s' % (self.get_server_name(), url_path, self.apikey) + else: + url_str = '%s%s' % (self.get_server_name(), url_path) + + return self.make_api_call(url_str, self.HTTP_DELETE)#, kwargs) + + + def save_to_file(self, fname, content): + dirname = os.path.dirname(fname) + if not os.path.isdir(dirname): + msgx('This directory does not exist: %s' % dirname) + fh = open(fname, 'w') + fh.write(content) + fh.close() + msg('File written: %s' % fname) + + + def save_current_metadata(self, output_dir): + """ + For the current server, save JSON with information on: + - Users + - Dataverses + - Datasets + """ + msgt('run_dataverse_backup') + if not os.path.isdir(output_dir): + msgx('This directory does not exist: %s' % output_dir) + + #date_str = datetime.now().strftime('%Y-%m%d_%H%M') + date_str = datetime.now().strftime('%Y-%m%d_%H') + + self.set_return_mode_string() + + #--------------------------- + # Retrieve the users + #--------------------------- + user_json = self.list_users() + self.save_to_file(os.path.join(output_dir, 'users_%s.json' % date_str), user_json) + + #--------------------------- + # Retrieve the roles + #--------------------------- + #roles_json = self.list_roles() + #self.save_to_file(os.path.join(output_dir, 'roles_%s.json' % date_str), roles_json) + + #--------------------------- + # Retrieve the dataverses + #--------------------------- + dv_json = self.list_dataverses() + self.save_to_file(os.path.join(output_dir, 'dataverses_%s.json' % date_str), dv_json) + + #--------------------------- + # Retrieve the datasets + #--------------------------- + dset_json = self.list_datasets() + self.save_to_file(os.path.join(output_dir, 'datasets_%s.json' % date_str), dset_json) + + + def delete_dataverse_by_id(self, id_val): + msgt('delete_dataverse_by_id: %s' % id_val) + url_str = self.get_server_name() + '/api/dataverses/%s?key=%s' % (id_val, self.apikey) + return self.make_api_call(url_str, self.HTTP_DELETE) + + + + +if __name__=='__main__': + import time + + #POST http://{{SERVER}}/api/dataverses/{{identifier}}/actions/:publish?key={{apikey}} + + server_with_api = 'https://dvn-build.hmdc.harvard.edu' + dat = DataverseAPILink(server_with_api, use_https=False, apikey='pete') + #dat.save_current_metadata('demo-data') + #sys.exit(0) + #dat.set_return_mode_string() + + """ """ + dv_params = { + "alias":"hm_dv", + "name":"Home, Home on the Dataverse", + "affiliation":"Affiliation value", + "contactEmail":"pete@mailinator.com", + "permissionRoot":False, + "description":"API testing" + } + print dat.create_dataverse('root', dv_params) + #print dat.create_user('some_pw', dv_params) + """ + print dat.get_dataverse_by_id_or_alias(5) + print dat.view_dataset_metadata_by_id_version(123, 57) + print dat.list_users() + print dat.list_roles() + print dat.list_datasets() + print dat.list_dataverses() + print dat.view_root_dataverse() + print dat.get_user_data(1) + print dat.list_metadata_blocks() + """ + \ No newline at end of file diff --git a/postgresql/testdata/scripts/api/py_api_wrapper/msg_util.py b/postgresql/testdata/scripts/api/py_api_wrapper/msg_util.py new file mode 100644 index 0000000..bd16af1 --- /dev/null +++ b/postgresql/testdata/scripts/api/py_api_wrapper/msg_util.py @@ -0,0 +1,10 @@ +import sys +def msg(s): print s +def dashes(char='-'): msg(40*char) +def msgt(s): dashes(); msg(s); dashes() +def msgx(s): dashes('\/'); msg(s); dashes('\/'); sys.exit(0) + +""" + +curl -H "Content-type:application/json" -X POST -d user_params.json "http://dvn-build.hmdc.harvard.edu/api/builtin-users?password=linus" +""" diff --git a/postgresql/testdata/scripts/api/py_api_wrapper/readme.md b/postgresql/testdata/scripts/api/py_api_wrapper/readme.md new file mode 100644 index 0000000..f062567 --- /dev/null +++ b/postgresql/testdata/scripts/api/py_api_wrapper/readme.md @@ -0,0 +1,92 @@ +## note: not yet updated to work with new permissions +---- + +# Python API Wrapper Guide + +(6/5/2014 - work in progress) + +This a python class "DataverseAPILink" which may be used to make the API calls described in the Dataverse [API Guide](https://github.com/IQSS/dataverse/tree/master/scripts/api/readme.md) + +Results of API calls may by returned as JSON (string format) or as python dictionaries. + + +## Dependency + +[python requests module](http://docs.python-requests.org/) + +## Quick example + +List the dataverses + + +```python +from dataverse_api_link import DataverseAPILink + +server_with_api = 'demo.dataverse.org' +dal = DataverseAPILink(server_with_api, use_https=False, apikey='admin') +json_text = dal.list_dataverses() +print json_text +``` + +Output: +```javascript +{ + "status":"OK", + "data":[ + { + "id":93, + "alias":"b", + "name":"b", + "affiliation":"b", + "contactEmail":"b@b", + "permissionRoot":false, + "creator":{ + "id":13, + "firstName":"b", + "lastName":"b", + "userName":"b", + "affiliation":"b", + "position":"b", + "email":"b@b" + }, + "description":"b", + "ownerId":1, + "creationDate":"2014-05-12 02:38:36 -04" + }, + + (etc, etc) +``` + +Return the same list as a python object + +```python + +dat.set_return_mode_python() # Return python dict instead of a string +d = dat.list_dataverses() # python dictionary {} +print d.keys() +dv_names = [dv_info.get('name', 'no name?') for dv_info in d['data']] +print dv_names +``` + +Output: +```python +[u'status', u'data'] +[u'b', u'Beta Candidate', u'kc58', u'Kevin Smoke Test 5/8', u'Penultimate Smoke Test', u"Pete's public place", u"Pete's restricted data", u"Pete's secrets", u'Root', u'smoke 5/7', u'testadd', u'testauthor', u'Test Cliosed', u'Test Open', u'testpete', u'Top dataverse of Pete', u'Top dataverse of Uma', u"Uma's first", u"Uma's restricted"] +``` +### Users + +List Users: + +```python +dat.set_return_mode_python() +user_info = dat.list_users() +print user_info +``` + +Iterate through each user and pull the same data by 'id' + +```python +user_ids = [info['id'] for info in user_info['data'] if info['id'] is not None] +for uid in user_ids: + print dat.get_user_data(uid) +``` diff --git a/postgresql/testdata/scripts/api/py_api_wrapper/single_api_spec.py b/postgresql/testdata/scripts/api/py_api_wrapper/single_api_spec.py new file mode 100644 index 0000000..0dafaaa --- /dev/null +++ b/postgresql/testdata/scripts/api/py_api_wrapper/single_api_spec.py @@ -0,0 +1,73 @@ + +class SingleAPISpec: + """ + Convenience class used to help DataverseAPILink when making API functions + """ + + ATTR_NAMES = ['new_function_name', 'name', 'url_path', 'use_api_key', 'num_id_vals', 'use_param_dict'] + URL_PLACEHOLDER = '{{ID_VAL}}' + + def __init__(self, spec_list): + if not type(spec_list) in (list,tuple): + raise Exception('Bad spec. Expected list or tuple.\nReceived: %s' % type(spec_list)) + + num_params = len(spec_list) + if not num_params in (5,6): + raise Exception('Expected 5 or 6 values.\nReceived: %s' % spec_list) + + # Lazy way to add attributes + for idx, attr in enumerate(self.ATTR_NAMES): + if (idx) == num_params: + self.__dict__[attr] = None # only 5 params given, param_dict not needed + else: + self.__dict__[attr] = spec_list[idx] + # e.g., 1st iteration is equivalent of "self.new_function_name = spec_list[0]" + + + def get_code_str(self, dv_link_function_to_call='make_api_get_call'): + """ + Used to create functions within the DataverseAPILink class + """ + if self.use_param_dict is True: + # call_name, url_path, use_api_key=False, id_val=None, params_dict={} + code_str = """ +def %s(self, param_dict, *args): + url_path = '%s' + if args: + for val in args: + if not type(val) in (str, unicode): + val = `val` + url_path = url_path.replace('%s', val, 1) + #url_path += '/' + str(id_val) + print 'OK!' + print 'param_dict', param_dict + return self.%s('%s', url_path, %s, None, param_dict)""" \ + % (self.new_function_name\ + , self.url_path + , SingleAPISpec.URL_PLACEHOLDER + , dv_link_function_to_call + , self.name + , self.use_api_key) + + else: + code_str = """ +def %s(self, *args): + url_path = '%s' + if args: + for val in args: + if not type(val) in (str, unicode): + val = `val` + url_path = url_path.replace('%s', val, 1) + #url_path += '/' + str(id_val) + + return self.%s('%s', url_path, %s)""" \ + % (self.new_function_name\ + , self.url_path + , SingleAPISpec.URL_PLACEHOLDER + , dv_link_function_to_call + , self.name + , self.use_api_key) + print code_str + return code_str + + \ No newline at end of file diff --git a/postgresql/testdata/scripts/api/setup-all.sh b/postgresql/testdata/scripts/api/setup-all.sh new file mode 100755 index 0000000..bde54c1 --- /dev/null +++ b/postgresql/testdata/scripts/api/setup-all.sh @@ -0,0 +1,99 @@ +#!/bin/bash + +SECURESETUP=1 + +for opt in $* +do + case $opt in + "--insecure") + SECURESETUP=0 + ;; + "-insecure") + SECURESETUP=0; + ;; + *) + echo "invalid option: $opt" + exit 1 >&2 + ;; + esac +done + +command -v jq >/dev/null 2>&1 || { echo >&2 '`jq` ("sed for JSON") is required, but not installed. Download the binary for your platform from http://stedolan.github.io/jq/ and make sure it is in your $PATH (/usr/bin/jq is fine) and executable with `sudo chmod +x /usr/bin/jq`. On Mac, you can install it with `brew install jq` if you use homebrew: http://brew.sh . Aborting.'; exit 1; } + +echo "deleting all data from Solr" +curl http://localhost:8983/solr/update/json?commit=true -H "Content-type: application/json" -X POST -d "{\"delete\": { \"query\":\"*:*\"}}" + +SERVER=http://localhost:8080/api + +# Everything + the kitchen sink, in a single script +# - Setup the metadata blocks and controlled vocabulary +# - Setup the builtin roles +# - Setup the authentication providers +# - setup the settings (local sign-in) +# - Create admin user and root dataverse +# - (optional) Setup optional users and dataverses + + +echo "Setup the metadata blocks" +./setup-datasetfields.sh + +echo "Setup the builtin roles" +./setup-builtin-roles.sh + +echo "Setup the authentication providers" +./setup-identity-providers.sh + +echo "Setting up the settings" +echo "- Allow internal signup" +curl -X PUT -d yes "$SERVER/admin/settings/:AllowSignUp" +curl -X PUT -d /dataverseuser.xhtml?editMode=CREATE "$SERVER/admin/settings/:SignUpUrl" + +curl -X PUT -d doi "$SERVER/admin/settings/:Protocol" +curl -X PUT -d 10.5072/FK2 "$SERVER/admin/settings/:Authority" +curl -X PUT -d EZID "$SERVER/admin/settings/:DoiProvider" +curl -X PUT -d / "$SERVER/admin/settings/:DoiSeparator" +curl -X PUT -d burrito $SERVER/admin/settings/BuiltinUsers.KEY +curl -X PUT -d localhost-only $SERVER/admin/settings/:BlockedApiPolicy +echo + +echo "Setting up the admin user (and as superuser)" +adminResp=$(curl -s -H "Content-type:application/json" -X POST -d @data/user-admin.json "$SERVER/builtin-users?password=admin&key=burrito") +echo $adminResp +curl -X POST "$SERVER/admin/superuser/dataverseAdmin" +echo + +echo "Setting up the root dataverse" +adminKey=$(echo $adminResp | jq .data.apiToken | tr -d \") +curl -s -H "Content-type:application/json" -X POST -d @data/dv-root.json "$SERVER/dataverses/?key=$adminKey" +echo +echo "Set the metadata block for Root" +curl -s -X POST -H "Content-type:application/json" -d "[\"citation\"]" $SERVER/dataverses/:root/metadatablocks/?key=$adminKey +echo +echo "Set the default facets for Root" +curl -s -X POST -H "Content-type:application/json" -d "[\"authorName\",\"subject\",\"keywordValue\",\"dateOfDeposit\"]" $SERVER/dataverses/:root/facets/?key=$adminKey +echo + +# OPTIONAL USERS AND DATAVERSES +#./setup-optional.sh + +if [ $SECURESETUP = 1 ] +then + # Revoke the "burrito" super-key; + # Block the sensitive API endpoints; + curl -X DELETE $SERVER/admin/settings/BuiltinUsers.KEY + curl -X PUT -d admin,test $SERVER/admin/settings/:BlockedApiEndpoints + echo "Access to the /api/admin and /api/test is now disabled, except for connections from localhost." +else + echo "IMPORTANT!!!" + echo "You have run the setup script in the INSECURE mode!" + echo "Do keep in mind, that access to your admin API is now WIDE-OPEN!" + echo "Also, your built-in user is still set up with the default authentication token" + echo "(that is distributed as part of this script, hence EVERYBODY KNOWS WHAT IT IS!)" + echo "Please consider the consequences of this choice. You can block access to the" + echo "/api/admin and /api/test endpoints, for all connections except from localhost," + echo "and revoke the authentication token from the built-in user by executing the" + echo "script post-install-api-block.sh." +fi + +echo +echo "Setup done." diff --git a/postgresql/testdata/scripts/api/setup-builtin-roles.sh b/postgresql/testdata/scripts/api/setup-builtin-roles.sh new file mode 100755 index 0000000..0f3c1c1 --- /dev/null +++ b/postgresql/testdata/scripts/api/setup-builtin-roles.sh @@ -0,0 +1,34 @@ +SERVER=http://localhost:8080/api + +# Setup the builtin roles +echo "Setting up admin role" +curl -H "Content-type:application/json" -d @data/role-admin.json http://localhost:8080/api/admin/roles/ +echo + +echo "Setting up file downloader role" +curl -H "Content-type:application/json" -d @data/role-filedownloader.json http://localhost:8080/api/admin/roles/ +echo + +echo "Setting up full contributor role" +curl -H "Content-type:application/json" -d @data/role-fullContributor.json http://localhost:8080/api/admin/roles/ +echo + +echo "Setting up dv contributor role" +curl -H "Content-type:application/json" -d @data/role-dvContributor.json http://localhost:8080/api/admin/roles/ +echo + +echo "Setting up ds contributor role" +curl -H "Content-type:application/json" -d @data/role-dsContributor.json http://localhost:8080/api/admin/roles/ +echo + +echo "Setting up editor role" +curl -H "Content-type:application/json" -d @data/role-editor.json http://localhost:8080/api/admin/roles/ +echo + +echo "Setting up curator role" +curl -H "Content-type:application/json" -d @data/role-curator.json http://localhost:8080/api/admin/roles/ +echo + +echo "Setting up member role" +curl -H "Content-type:application/json" -d @data/role-member.json http://localhost:8080/api/admin/roles/ +echo diff --git a/postgresql/testdata/scripts/api/setup-datasetfields.sh b/postgresql/testdata/scripts/api/setup-datasetfields.sh new file mode 100755 index 0000000..4ce27bc --- /dev/null +++ b/postgresql/testdata/scripts/api/setup-datasetfields.sh @@ -0,0 +1,8 @@ +#!/bin/sh +curl http://localhost:8080/api/admin/datasetfield/loadNAControlledVocabularyValue +curl http://localhost:8080/api/admin/datasetfield/load -X POST --data-binary @data/metadatablocks/citation.tsv -H "Content-type: text/tab-separated-values" +curl http://localhost:8080/api/admin/datasetfield/load -X POST --data-binary @data/metadatablocks/geospatial.tsv -H "Content-type: text/tab-separated-values" +curl http://localhost:8080/api/admin/datasetfield/load -X POST --data-binary @data/metadatablocks/social_science.tsv -H "Content-type: text/tab-separated-values" +curl http://localhost:8080/api/admin/datasetfield/load -X POST --data-binary @data/metadatablocks/astrophysics.tsv -H "Content-type: text/tab-separated-values" +curl http://localhost:8080/api/admin/datasetfield/load -X POST --data-binary @data/metadatablocks/biomedical.tsv -H "Content-type: text/tab-separated-values" +curl http://localhost:8080/api/admin/datasetfield/load -X POST --data-binary @data/metadatablocks/journals.tsv -H "Content-type: text/tab-separated-values" diff --git a/postgresql/testdata/scripts/api/setup-dvs.sh b/postgresql/testdata/scripts/api/setup-dvs.sh new file mode 100755 index 0000000..9110935 --- /dev/null +++ b/postgresql/testdata/scripts/api/setup-dvs.sh @@ -0,0 +1,34 @@ +#!/bin/bash -f +SERVER=http://localhost:8080/api +echo Setting up dataverses on $SERVER +echo ============================================== +if [ $# -eq 0 ] + then + echo "Please supply Pete and Uma's API keys like so:" + echo "$0 [pete's key] [uma's key]" + echo "The keys are printed at the end of the setup-users.sh script" + echo "Or, just get them from the database" + exit 1 +fi + +echo Pete +curl -s -H "Content-type:application/json" -X POST -d @data/dv-pete-top.json "$SERVER/dataverses/root?key=$1" +echo +curl -s -H "Content-type:application/json" -X POST -d @data/dv-pete-sub-normal.json "$SERVER/dataverses/peteTop?key=$1" +echo +curl -s -H "Content-type:application/json" -X POST -d @data/dv-pete-sub-restricted.json "$SERVER/dataverses/peteTop?key=$1" +echo +curl -s -H "Content-type:application/json" -X POST -d @data/dv-pete-sub-secret.json "$SERVER/dataverses/peteTop?key=$1" +echo + +echo Uma +echo Pete creates top-level for Uma +curl -s -H "Content-type:application/json" -H "X-Dataverse-key:$1" -X POST -d @data/dv-uma-top.json "$SERVER/dataverses/root" +echo +echo Pete makes Uma an admin on her own DV +curl -s -H "Content-type:application/json" -H "X-Dataverse-key:$1" -X POST -d"{\"assignee\":\"@uma\",\"role\":\"admin\"}" $SERVER/dataverses/umaTop/assignments/ +echo +curl -s -H "Content-type:application/json" -H "X-Dataverse-key:$2" -X POST -d @data/dv-uma-sub1.json "$SERVER/dataverses/umaTop" +echo +curl -s -H "Content-type:application/json" -H "X-Dataverse-key:$2" -X POST -d @data/dv-uma-sub2.json "$SERVER/dataverses/umaTop" +echo diff --git a/postgresql/testdata/scripts/api/setup-identity-providers.sh b/postgresql/testdata/scripts/api/setup-identity-providers.sh new file mode 100755 index 0000000..89ac59d --- /dev/null +++ b/postgresql/testdata/scripts/api/setup-identity-providers.sh @@ -0,0 +1,9 @@ +SERVER=http://localhost:8080/api + +# Setup the authentication providers +echo "Setting up internal user provider" +curl -H "Content-type:application/json" -d @data/authentication-providers/builtin.json http://localhost:8080/api/admin/authenticationProviders/ + +#echo "Setting up Echo providers" +#curl -H "Content-type:application/json" -d @data/authentication-providers/echo.json http://localhost:8080/api/admin/authenticationProviders/ +#curl -H "Content-type:application/json" -d @data/authentication-providers/echo-dignified.json http://localhost:8080/api/admin/authenticationProviders/ diff --git a/postgresql/testdata/scripts/api/setup-optional-harvard.sh b/postgresql/testdata/scripts/api/setup-optional-harvard.sh new file mode 100755 index 0000000..a5553a6 --- /dev/null +++ b/postgresql/testdata/scripts/api/setup-optional-harvard.sh @@ -0,0 +1,55 @@ +#!/bin/bash +SERVER=http://localhost:8080/api + +echo "Setting up Harvard-specific settings" +echo "- Application Status header" +curl -s -X PUT -d 'Upgrade in progress...' $SERVER/admin/settings/:StatusMessageHeader +echo "- Application Status message" +curl -s -X PUT -d 'Dataverse is currently being upgraded. You can see the features, bug fixes, and other upgrades for this release in the Dataverse Roadmap.' $SERVER/admin/settings/:StatusMessageText +echo "- Harvard Privacy Policy" +curl -s -X PUT -d http://best-practices.dataverse.org/harvard-policies/harvard-privacy-policy.html $SERVER/admin/settings/:ApplicationPrivacyPolicyUrl +curl -s -X PUT -d http://best-practices.dataverse.org/harvard-policies/harvard-api-tou.html $SERVER/admin/settings/:ApiTermsOfUse +echo "- Configuring Harvard's password policy in Dataverse" +# Min length is 10 because that is the minimum Harvard requires without periodic expiration +curl -s -X PUT -d 10 $SERVER/admin/settings/:PVMinLength +# If password 20+ characters, other rules do not apply +curl -s -X PUT -d 20 $SERVER/admin/settings/:PVGoodStrength +# The character classes users can choose between and the number of each needed +curl -X PUT -d 'UpperCase:1,Digit:1,LowerCase:1,Special:1' $SERVER/admin/settings/:PVCharacterRules +# The number of character classes a password needs to be valid +curl -s -X PUT -d 3 $SERVER/admin/settings/:PVNumberOfCharacteristics +# The number of character classes a password needs to be valid +curl -s -X PUT -d 4 $SERVER/admin/settings/:PVNumberOfConsecutiveDigitsAllowed +# Harvard requires a dictionary check on common words & names. We use the unix 'words' file, removing ones less than 4 characters. Policy clarification received by Harvard Key was no words 4 characters or longer. +DIR="/usr/local/glassfish4/glassfish/domains/domain1/files" #this can be replaced with a different file path for storing the dictionary +sed '/^.\{,3\}$/d' /usr/share/dict/words > $DIR/pwdictionary +curl -s -X PUT -d "$DIR/pwdictionary" $SERVER/admin/settings/:PVDictionaries +echo "- Adjust Solr frag size" +curl -s -X PUT -d 320 $SERVER/admin/settings/:SearchHighlightFragmentSize +echo "- Google Analytics setting" +curl -X PUT -d true "$SERVER/admin/settings/:ScrubMigrationData" +echo "- Enabling Shibboleth" +curl -X POST -H "Content-type: application/json" http://localhost:8080/api/admin/authenticationProviders --upload-file ../../doc/sphinx-guides/source/_static/installation/files/etc/shibboleth/shibAuthProvider.json +echo "- Enabling TwoRavens" +curl -s -X PUT -d true "$SERVER/admin/settings/:TwoRavensTabularView" +echo "- Enabling Geoconnect" +curl -s -X PUT -d true "$SERVER/admin/settings/:GeoconnectCreateEditMaps" +curl -s -X PUT -d true "$SERVER/admin/settings/:GeoconnectViewMaps" +echo "- Setting system email" +curl -X PUT -d "Harvard Dataverse Support " http://localhost:8080/api/admin/settings/:SystemEmail +curl -X PUT -d ", The President & Fellows of Harvard College" http://localhost:8080/api/admin/settings/:FooterCopyright +echo "- Setting up the Harvard Shibboleth institutional group" +curl -s -X POST -H 'Content-type:application/json' --upload-file data/shibGroupHarvard.json "$SERVER/admin/groups/shib?key=$adminKey" +echo +echo "- Setting up the MIT Shibboleth institutional group" +curl -s -X POST -H 'Content-type:application/json' --upload-file data/shibGroupMit.json "$SERVER/admin/groups/shib?key=$adminKey" +echo +curl http://localhost:8080/api/admin/datasetfield/load -X POST --data-binary @data/metadatablocks/customMRA.tsv -H "Content-type: text/tab-separated-values" +curl http://localhost:8080/api/admin/datasetfield/load -X POST --data-binary @data/metadatablocks/customGSD.tsv -H "Content-type: text/tab-separated-values" +curl http://localhost:8080/api/admin/datasetfield/load -X POST --data-binary @data/metadatablocks/customARCS.tsv -H "Content-type: text/tab-separated-values" +curl http://localhost:8080/api/admin/datasetfield/load -X POST --data-binary @data/metadatablocks/customPSRI.tsv -H "Content-type: text/tab-separated-values" +curl http://localhost:8080/api/admin/datasetfield/load -X POST --data-binary @data/metadatablocks/customPSI.tsv -H "Content-type: text/tab-separated-values" +curl http://localhost:8080/api/admin/datasetfield/load -X POST --data-binary @data/metadatablocks/customCHIA.tsv -H "Content-type: text/tab-separated-values" +curl http://localhost:8080/api/admin/datasetfield/load -X POST --data-binary @data/metadatablocks/customDigaai.tsv -H "Content-type: text/tab-separated-values" +curl http://localhost:8080/api/admin/datasetfield/load -X POST --data-binary @data/metadatablocks/custom_hbgdki.tsv -H "Content-type: text/tab-separated-values" +echo diff --git a/postgresql/testdata/scripts/api/setup-optional-publish-terms.sh b/postgresql/testdata/scripts/api/setup-optional-publish-terms.sh new file mode 100644 index 0000000..f0bb0ff --- /dev/null +++ b/postgresql/testdata/scripts/api/setup-optional-publish-terms.sh @@ -0,0 +1,8 @@ +#!/bin/bash + + +SERVER=http://localhost:8080/api + +echo "- Enabling Publish Popup Custom Text" +curl -s -X PUT -d true "$SERVER/admin/settings/:DatasetPublishPopupCustomTextOnAllVersions" +curl -X PUT -d "By default datasets are published with the CC0-“Public Domain Dedication” waiver. Learn more about the CC0 waiver here.

                            To publish with custom Terms of Use, click the Cancel button and go to the Terms tab for this dataset." $SERVER/admin/settings/:DatasetPublishPopupCustomText \ No newline at end of file diff --git a/postgresql/testdata/scripts/api/setup-optional.sh b/postgresql/testdata/scripts/api/setup-optional.sh new file mode 100755 index 0000000..9a8c852 --- /dev/null +++ b/postgresql/testdata/scripts/api/setup-optional.sh @@ -0,0 +1,13 @@ +#!/bin/bash +command -v jq >/dev/null 2>&1 || { echo >&2 '`jq` ("sed for JSON") is required, but not installed. Download the binary for your platform from http://stedolan.github.io/jq/ and make sure it is in your $PATH (/usr/bin/jq is fine) and executable with `sudo chmod +x /usr/bin/jq`. On Mac, you can install it with `brew install jq` if you use homebrew: http://brew.sh . Aborting.'; exit 1; } + +# OPTIONAL USERS AND DATAVERSES +TMP=setup.temp +./setup-users.sh | tee $TMP + +PETE=$(cat $TMP | grep :result: | grep Pete | cut -d: -f4) +UMA=$(cat $TMP | grep :result: | grep Uma | cut -d: -f4) + +./setup-dvs.sh $PETE $UMA + +rm $TMP diff --git a/postgresql/testdata/scripts/api/setup-users.sh b/postgresql/testdata/scripts/api/setup-users.sh new file mode 100755 index 0000000..141e1b3 --- /dev/null +++ b/postgresql/testdata/scripts/api/setup-users.sh @@ -0,0 +1,30 @@ +#!/bin/bash -f +command -v jq >/dev/null 2>&1 || { echo >&2 "jq required, but it's not installed. On mac, use brew (http://brew.sh) to install it. Aborting."; exit 1; } + +SERVER=http://localhost:8080/api +echo Setting up users on $SERVER +echo ============================================== + +curl -X PUT -d burrito $SERVER/admin/settings/BuiltinUsers.KEY + + +peteResp=$(curl -s -H "Content-type:application/json" -X POST -d @data/userPete.json "$SERVER/builtin-users?password=pete&key=burrito") +echo $peteResp + +umaResp=$(curl -s -H "Content-type:application/json" -X POST -d @data/userUma.json "$SERVER/builtin-users?password=uma&key=burrito") +echo $umaResp + +curl -s -H "Content-type:application/json" -X POST -d @data/userGabbi.json "$SERVER/builtin-users?password=gabbi&key=burrito" +echo + +curl -s -H "Content-type:application/json" -X POST -d @data/userCathy.json "$SERVER/builtin-users?password=cathy&key=burrito" +echo + +curl -s -H "Content-type:application/json" -X POST -d @data/userNick.json "$SERVER/builtin-users?password=nick&key=burrito" +echo + +echo reporting API keys +peteKey=$(echo $peteResp | jq .data.apiToken | tr -d \") +echo :result: Pete\'s key is: $peteKey +umaKey=$(echo $umaResp | jq .data.apiToken | tr -d \") +echo :result: Uma\'s key is: $umaKey \ No newline at end of file diff --git a/postgresql/testdata/scripts/api/testBlockEndpoints.sh b/postgresql/testdata/scripts/api/testBlockEndpoints.sh new file mode 100755 index 0000000..59d375e --- /dev/null +++ b/postgresql/testdata/scripts/api/testBlockEndpoints.sh @@ -0,0 +1,53 @@ +#!/bin/bash + +ADMIN_KEY=$1 + +echo Testing Groups +curl http://localhost:8080/api/admin/groups/ip/?key=$ADMIN_KEY +echo + +echo blocking groups +curl -X PUT -d groups http://localhost:8080/api/admin/settings/:BlockedApiEndpoints +echo + +echo Testing Groups again - expecting 503 Unavailable +curl -v http://localhost:8080/api/admin/groups/ip/?key=$ADMIN_KEY +echo + +echo Unblocking groups +curl -X DELETE http://localhost:8080/api/admin/settings/:BlockedApiEndpoints +echo + +echo Testing Groups +curl http://localhost:8080/api/admin/groups/ip/?key=$ADMIN_KEY +echo + +echo blocking groups, Roles +curl -X PUT -d groups,roles http://localhost:8080/api/admin/settings/:BlockedApiEndpoints +echo + +echo Testing Groups again - expecting 503 Unavailable +curl -v http://localhost:8080/api/admin/groups/ip/?key=$ADMIN_KEY +echo + +echo Testing Roles - expecting 503 Unavailable +curl -v http://localhost:8080/api/roles/?key=$ADMIN_KEY +echo + +echo blocking Roles only +curl -X PUT -d roles http://localhost:8080/api/admin/settings/:BlockedApiEndpoints +echo + +echo Testing Groups again +curl -v http://localhost:8080/api/admin/groups/ip/?key=$ADMIN_KEY +echo + +echo Testing Roles - expecting 503 Unavailable +curl -v http://localhost:8080/api/roles/?key=$ADMIN_KEY +echo + +echo Unblocking all +curl -X DELETE http://localhost:8080/api/admin/settings/:BlockedApiEndpoints +echo + +echo DONE diff --git a/postgresql/testdata/scripts/api/update-datasetfields.sh b/postgresql/testdata/scripts/api/update-datasetfields.sh new file mode 100644 index 0000000..ae099f8 --- /dev/null +++ b/postgresql/testdata/scripts/api/update-datasetfields.sh @@ -0,0 +1,7 @@ +#!/bin/sh +curl http://localhost:8080/api/admin/datasetfield/load -X POST --data-binary @data/metadatablocks/citation.tsv -H "Content-type: text/tab-separated-values" +curl http://localhost:8080/api/admin/datasetfield/load -X POST --data-binary @data/metadatablocks/geospatial.tsv -H "Content-type: text/tab-separated-values" +curl http://localhost:8080/api/admin/datasetfield/load -X POST --data-binary @data/metadatablocks/social_science.tsv -H "Content-type: text/tab-separated-values" +curl http://localhost:8080/api/admin/datasetfield/load -X POST --data-binary @data/metadatablocks/astrophysics.tsv -H "Content-type: text/tab-separated-values" +curl http://localhost:8080/api/admin/datasetfield/load -X POST --data-binary @data/metadatablocks/biomedical.tsv -H "Content-type: text/tab-separated-values" +curl http://localhost:8080/api/admin/datasetfield/load -X POST --data-binary @data/metadatablocks/journals.tsv -H "Content-type: text/tab-separated-values" \ No newline at end of file diff --git a/postgresql/testdata/scripts/backup/run_backup/README_HOWTO.txt b/postgresql/testdata/scripts/backup/run_backup/README_HOWTO.txt new file mode 100644 index 0000000..2e2a0a8 --- /dev/null +++ b/postgresql/testdata/scripts/backup/run_backup/README_HOWTO.txt @@ -0,0 +1,205 @@ +Introduction +============ + +The script, run_backup.py is run on schedule (by a crontab, most +likely). It will back up the files stored in your Dataverse on a +remote storage system. + +As currently implemented, the script can read Dataverse files stored +either on the filesystem or S3; and back them up on a remote storage +server via ssh/scp. It can be easily expanded to support other storage +and backup types (more information is provided below). + +Requirements +============ + +The backup script is written in Python. It was tested with Python v. 2.6 and 2.7. +The following extra modules are required: + +psycopg2 [2.7.3.2] - PostgreSQL driver +boto3 [1.4.7] - AWS sdk, for accessing S3 storage +paramiko [2.2.1] - SSH client, for transferring files via SFTP + +(see below for the exact versions tested) + +Also, an incomplete implementation for backing up files on a remote +swift node is provided. To fully add swift support (left as an +exercise for the reader) an additional module, swiftclient will be +needed. + +Test platforms: + +MacOS 10.12 +----------- + +Python: 2.7.2 - part of standard distribution +paramiko: 2.2.1 - standard +psycopg2: 2.7.3.2 - built with "pip install psycopg2" +boto3: 1.4.7 - built with "pip install boto3" + +CentOS 6 +-------- + +Python: 2.6.6 (from the base distribution for CentOS 6; default /usr/bin/python) +paramiko: 1.7.5 (base distribution) + +distributed as an rpm, python-paramiko.noarch, via the yum repo "base". +if not installed: + yum install python-paramiko + +psycopg2: 2.0.14 (base distribution) +distributed as an rpm, python-psycopg2.x86_64, via the yum repo "base". +if not installed: + yum install python-psycopg2 + +boto3: 1.4.8 (built with "pip install boto3") + +- quick and easy build; +make sure you have pip installed. ("yum install python-pip", if not) + +NOTE: v. 2.6 of Python is considered obsolete; the only reason we are +using it is that it is the default version that comes with an equally +obsolete distribution v.6 of CentOS; which just happened to be what we +had available to test this setup on. Similarly, the versions of +paramiko and psycopg2, above, are quite old too. But everything +appears to be working. + +CentOS 7: +--------- + +(TODO) + + +Usage +===== + +In the default mode, the script will attempt to retrieve and back up +only the files that have been created in the Dataverse since the +createdate timestamp on the most recent file already in the backup +database; or all the files, if this is the first run (see the section +below on what the "backup databse" is). + +When run with the "--rerun" option (python run_backup.py --rerun) the +script will retrieve the list of ALL the files currently in the +dataverse, but will only attempt to back up the ones not yet backed up +successfully. (i.e. it will skip the files already in the backup +database with the 'OK' backup status) + + +Configuration +============= + +Access credentials, for the Dataverse +and the remote storage system are configured in the file config.ini. + +The following config.ini sections must be configured for the +whole thing to work: + +1. Database. + +The script needs to be able to access the Dataverse database, in order to +obtain the lists of files that have changed since the last backup and +need to be copied. The script can use PostgreSQL running on a +remote server. Just make sure that the remote server is configured to +allow connections from the host running the backup script; and that +PostgreSQL is allowing database access from this host too. + +Configure the access credentials as in the example below: + +[Database] +Host: localhost +Port: 5432 +Database: dvndb +Username: dvnapp +Password: xxxxx + +In addition to the main Dataverse database, the script maintains its +own database for keeping track of the backup status of individual +files. The name of the database is specified in the following setting: + +BackupDatabase: backupdb + +The database must be created prior to running of the script. For +example, on the command line: + createdb -U postgres backupdb --owner=dvnapp + +NOTE that the current assumption is that this Postgres database lives +on the same server as the main Dataverse database and is owned by the +same user. + +Also, one table must be created *in this database* (NOT in the main +Dataverse database) before the script can be run. The script +backupdb.sql is provided in this directory. NOTE that the Postgres +user name dvnapp is hard-coded in the script; change it to reflect the +name of the database user on your system, if necessary. + +You can use the standard psql command to create the table; for example: + + psql -d backupdb -f backupdb.sql + +(please note that the example above assumes "backupdb" as the name of +the backup database) + +2. Repository + +This section configures access to the datafiles stored in your +Dataverse. In its present form, the script can read files stored on +the filesystem and S3. There is no support for reading files stored +via swift as of yet. Adding swift support should be straightforward, +by supplying another storage module - similarly to the existing +storage_filesystem.py and storage_s3.py. If you'd like to work on +this, please get in touch. + +For the filesystem storage: the assumption is that the script has +direct access to the filesystem where the files live. Meaning that in +order for the script to work on a server that's different from the one +running the Dataverse application, the filesystem must be readable by +the server via NFS, or similarly shared with it. + +The filesystem access requires the single configuration setting, as in +the example below: + +[Repository] +FileSystemDirectory: /usr/local/glassfish4/glassfish/domains/domain1/files + +For S3, no configuration is needed in the config.ini. But AWS +access must be properly configured for the user running the backup +module, in the standard ~/.aws location. + + +3. Backup section. + +This section specifies the method for storing the files on the remote +("secondary") storage subsystem: + +[Backup] +StorageType: ssh + +The currently supported methods are "ssh" (the files are transferred +to the remote location via SSH/SFTP) and "swift" (untested, and +possibly incomplete implementation is provided; see +README_IMPLEMENTATION.txt for more details). + +For ssh access, the following configuration entries are needed: + +SshHost: yyy.zzz.edu +SshPort: 22 +SshUsername: xxxxx + +Additionally, SSH access to the remote server (SshHost, above) must be +provided for the user specified (SshUsername) via ssh keys. + +4. Email notifications + +Once the script completes a backup run it will send a (very minimal) +status report to the email address specified in the config.ini file; +for example: + +[Notifications] +Email: xxx@yyy.zzz.edu + +As currently implemented, the report will only specify how many files +have been processed, and how many succeeded or failed. In order to get +more detailed information about the individual files you'll need to +consult the datafilestatus table in the backup database. + diff --git a/postgresql/testdata/scripts/backup/run_backup/README_IMPLEMENTATION.txt b/postgresql/testdata/scripts/backup/run_backup/README_IMPLEMENTATION.txt new file mode 100644 index 0000000..7e78e0e --- /dev/null +++ b/postgresql/testdata/scripts/backup/run_backup/README_IMPLEMENTATION.txt @@ -0,0 +1,102 @@ +The backup script is implemented in Python (developed and tested with +v. 2.7.10). The following extra modules are needed: + +(versions tested as of the writing of this doc, 11.14.2017) + +psycopg2 [2.7.3.2] - PostgresQL driver +boto3 [1.4.7] - AWS sdk, for accessing S3 storage +paramiko [2.2.1] - SSH client, for transferring files via SFTP +swiftclient [2.7.0] - for reading [not yet implemented] and writing [incomplete implementation provided] swift objects. + +1. Database access. + +The module uses psycopg2 to access the Dataverse database, to obtain +the lists of files that have changed since the last backup that need +to be copied over. Additionally, it maintains its own database for +keeping track of the backup status of individual files. As of now, +this extra database must reside on the same server as the main +Dataverse database and is owned by the same Postgres user. + +Consult README_HOWTO.txt on how to set up this backup database (needs +to be done prior to running the backup script) + +2. Storage access + +Currently implemented storage access methods, for local filesystem and +S3 are isolated in the files storage_filesystem.py and storage_s3.py, +respectively. To add support for swift a similar fragment of code will +need to be provided, with an open_storage_object... method that can go +to the configured swift end node and return the byte stream associated +with the datafile. Use storage_filesystem.py as the model. Then the +top-level storage.py class will need to be modified to import and use +the extra storage method. + +3. Backup (write) access. + +Similarly, storage type-specific code for writing backed up objects is +isolated in the backup_...py files. The currently implemented storage +methods are ssh/ftp (backup_ssh.py, default) and swift +(backup_swift.py; experimental, untested). To add support for other +storage systems, use backup_ssh.py as the model to create your own +backup_... classes, implementing similar methods, that a) copy the +byte stream associated with a Dataverse datafile onto this storage +system and b) verify the copy against the checksum (MD5 or SHA1) +provided by the Dataverse. In the SSH/SFTP implementation, we can do +the verification step by simply executing md5sum/sha1sum on the remote +server via ssh, once the file is copied. With swift, the only way to +verify against the checksum is to read the file *back* from the swift +end note, and calculate the checksum on the obtained stream. + +4. Keeping track of the backup status + +The module uses the table datafilestatus in the "backup database" to +maintain the backup status information for the individual +datafiles. For the successfully backed up files the 'OK' status is +stored. If the module fails to read the file from the Dataverse +storage, the status 'FAIL_READ' is stored; if it fails to copy over or +verify the backup copy against the checksum, the status 'FAIL_WRITE' +is stored. The Dataverse "createdate" timestamp of the Datafile is +also stored in the database; this way, for incremental backups, the +script tries to retrieve only the Datafiles created after the latest +createdate timestamp currently in the backup db. + +5. TODOs + + +As currently implemented, the status notification report will only +specify how many files have been processed, and how many succeeded or +failed. In order to get more detailed information about the individual +files you'll need to consult the datafilestatus table in the backup +database. + +It could be useful to perhaps extend it to provide a list of specific +files that have been backed up successfully or failed. + +Note that the script relies on the *nix 'mail' command to send the +email notification. I chose to do it this way because it felt easier +than to require the user to configure which smtp server to use in +order to send it from python code... But this requires the mail +command to be there, and the system configured to be able to send +email from the command line. + +If for whatever reason this is not an option, and mail needs to be +sent via remote SMTP, the provided email_notification.py could be +easily modified to use something like + + +import smtplib +from email.mime.text import MIMEText + +... + +msg = MIMEText(text) + +msg['Subject'] = subject_str +msg['To'] = ConfigSectionMap("Notifications")['email'] + +... + +s = smtplib.SMTP(ConfigSectionMap("Notifications")['smtpserver']) +s.sendmail(from, ConfigSectionMap("Notifications")['email'], msg.as_string()) +s.quit() + diff --git a/postgresql/testdata/scripts/backup/run_backup/backup.py b/postgresql/testdata/scripts/backup/run_backup/backup.py new file mode 100644 index 0000000..6004f21 --- /dev/null +++ b/postgresql/testdata/scripts/backup/run_backup/backup.py @@ -0,0 +1,17 @@ +import io +import re +#import backup_swift #TODO +from backup_ssh import (backup_file_ssh) +from config import (ConfigSectionMap) + +def backup_file (file_input, dataset_authority, dataset_identifier, storage_identifier, checksum_type, checksum_value, file_size): + storage_type = ConfigSectionMap("Backup")['storagetype'] + + if storage_type == 'swift': + #backup_file_swift(file_input, dataset_authority, dataset_identifier, storage_identifier, checksum_type, checksum_value, file_size) + raise NotImplementedError('no backup_swift yet') + elif storage_type == 'ssh': + backup_file_ssh(file_input, dataset_authority, dataset_identifier, storage_identifier, checksum_type, checksum_value, file_size) + else: + raise ValueError("only ssh/sftp and swift are supported as backup storage media") + diff --git a/postgresql/testdata/scripts/backup/run_backup/backup_ssh.py b/postgresql/testdata/scripts/backup/run_backup/backup_ssh.py new file mode 100644 index 0000000..3355b9c --- /dev/null +++ b/postgresql/testdata/scripts/backup/run_backup/backup_ssh.py @@ -0,0 +1,149 @@ +# Dataverse backup, ssh io module + +import sys +import io +import paramiko +import os +import re +from config import (ConfigSectionMap) + +my_ssh_client = None + +def open_ssh_client(): + ssh_host = ConfigSectionMap("Backup")['sshhost'] + ssh_port = ConfigSectionMap("Backup")['sshport'] + ssh_username = ConfigSectionMap("Backup")['sshusername'] + + print "SSH Host: %s" % (ssh_host) + print "SSH Port: %s" % (ssh_port) + print "SSH Username: %s" % (ssh_username) + + + ssh_client=paramiko.SSHClient() + ssh_client.set_missing_host_key_policy(paramiko.AutoAddPolicy()) + ssh_client.connect(hostname=ssh_host,username=ssh_username) + + print "Connected!" + + return ssh_client + +# Transfers the file "local_flo" over ssh/sftp to the configured remote server. +# local_flo can be either a string specifying the file path, or a file-like object (stream). +# Note that if a stream is supplied, the method also needs the file size to be specified, +# via the parameter byte_size. +def transfer_file(local_flo, dataset_authority, dataset_identifier, storage_identifier, byte_size): + sftp_client=my_ssh_client.open_sftp() + + remote_dir = dataset_authority + "/" + dataset_identifier + + subdirs = remote_dir.split("/") + + cdir = ConfigSectionMap("Backup")['backupdirectory'] + "/" + for subdir in subdirs: + try: + cdir = cdir + subdir + "/" + sftpattr=sftp_client.stat(cdir) + except IOError: + #print "directory "+cdir+" does not exist (creating)" + sftp_client.mkdir(cdir) + #else: + # print "directory "+cdir+" already exists" + + m = re.search('^([a-z0-9]*)://(.*)$', storage_identifier) + if m is not None: + storageTag = m.group(1) + storage_identifier = re.sub('^.*:', '', storage_identifier) + + remote_file = cdir + storage_identifier + + if (type(local_flo) is str): + sftp_client.put(local_flo,remote_file) + else: + # assume it's a stream: + # sftp_client.putfo() is convenient, but appears to be unavailable in older + # versions of paramiko; so we'll be using .read() and .write() instead: + #sftp_client.putfo(local_flo,remote_file,byte_size) + sftp_stream = sftp_client.open(remote_file,"wb") + while True: + buffer = local_flo.read(32*1024) + if len(buffer) == 0: + break; + sftp_stream.write (buffer) + sftp_stream.close() + + sftp_client.close() + + print "File transfered." + + return remote_file + +def verify_remote_file(remote_file, checksum_type, checksum_value): + try: + stdin,stdout,stderr=my_ssh_client.exec_command("ls "+remote_file) + remote_file_checked = stdout.readlines()[0].rstrip("\n\r") + except: + raise ValueError("remote file check failed (" + remote_file + ")") + + if (remote_file != remote_file_checked): + raise ValueError("remote file NOT FOUND! (" + remote_file_checked + ")") + + if (checksum_type == "MD5"): + remote_command = "md5sum" + elif (checksum_type == "SHA1"): + remote_command = "sha1sum" + + try: + stdin,stdout,stderr=my_ssh_client.exec_command(remote_command+" "+remote_file) + remote_checksum_value = (stdout.readlines()[0]).split(" ")[0] + except: + raise ValueError("remote checksum check failed (" + remote_file + ")") + + if (checksum_value != remote_checksum_value): + raise ValueError("remote checksum BAD! (" + remote_checksum_value + ")") + + +def backup_file_ssh(file_input, dataset_authority, dataset_identifier, storage_identifier, checksum_type, checksum_value, byte_size=0): + global my_ssh_client + if (my_ssh_client is None): + my_ssh_client = open_ssh_client() + print "ssh client is not defined" + else: + print "reusing the existing ssh client" + + try: + file_transfered = transfer_file(file_input, dataset_authority, dataset_identifier, storage_identifier, byte_size) + except: + raise ValueError("failed to transfer file") + + verify_remote_file(file_transfered, checksum_type, checksum_value) + +def main(): + + print "entering ssh (standalone mode)" + + + print "testing local file:" + try: + file_path="config.ini" + backup_file_ssh("config.ini", "1902.1", "XYZ", "config.ini", "MD5", "8e6995806b1cf27df47c5900869fdd27") + except ValueError: + print "failed to verify file (\"config.ini\")" + else: + print "file ok" + + print "testing file stream:" + try: + file_size = os.stat(file_path).st_size + print ("file size: %d" % file_size) + file_stream = io.open("config.ini", "rb") + backup_file_ssh(file_stream, "1902.1", "XYZ", "config.ini", "MD5", "8e6995806b1cf27df47c5900869fdd27", file_size) + except ValueError: + print "failed to verify file (\"config.ini\")" + else: + print "file ok" + + +if __name__ == "__main__": + main() + + diff --git a/postgresql/testdata/scripts/backup/run_backup/backup_swift.py b/postgresql/testdata/scripts/backup/run_backup/backup_swift.py new file mode 100644 index 0000000..463c8de --- /dev/null +++ b/postgresql/testdata/scripts/backup/run_backup/backup_swift.py @@ -0,0 +1,25 @@ +import io +import re +import swiftclient +from config import (ConfigSectionMap) + +def backup_file_swift (file_input, dataset_authority, dataset_identifier, storage_identifier): + auth_url = ConfigSectionMap("Backup")['swiftauthurl'] + auth_version = ConfigSectionMap("Backup")['swiftauthversion'] + user = ConfigSectionMap("Backup")['swiftuser'] + tenant = ConfigSectionMap("Backup")['swifttenant'] + key = ConfigSectionMap("Backup")['swiftkey'] + + conn = swiftclient.Connection( + authurl=auth_url, + user=user, + key=key, + tenant_name=tenant, + auth_version=auth_version + ) + + container_name = dataset_authority + ":" + dataset_identifier + conn.put(container_name) + + conn.put_object(container_name, storage_identifier, file_input) + diff --git a/postgresql/testdata/scripts/backup/run_backup/backupdb.sql b/postgresql/testdata/scripts/backup/run_backup/backupdb.sql new file mode 100644 index 0000000..85acb2f --- /dev/null +++ b/postgresql/testdata/scripts/backup/run_backup/backupdb.sql @@ -0,0 +1,31 @@ +CREATE TABLE datafilestatus ( + id integer NOT NULL, + datasetidentifier character varying(255), + storageidentifier character varying(255), + status character varying(255), + createdate timestamp without time zone, + lastbackuptime timestamp without time zone, + lastbackupmethod character varying(16) +); + +ALTER TABLE datafilestatus OWNER TO dvnapp; + +CREATE SEQUENCE datafilestatus_id_seq + START WITH 1 + INCREMENT BY 1 + NO MINVALUE + NO MAXVALUE + CACHE 1; + + +ALTER TABLE datafilestatus_id_seq OWNER TO dvnapp; + +ALTER SEQUENCE datafilestatus_id_seq OWNED BY datafilestatus.id; + +ALTER TABLE ONLY datafilestatus + ADD CONSTRAINT datafilestatus_pkey PRIMARY KEY (id); + +ALTER TABLE ONLY datafilestatus ALTER COLUMN id SET DEFAULT nextval('datafilestatus_id_seq'::regclass); + +ALTER TABLE ONLY datafilestatus + ADD CONSTRAINT datafilestatus_storageidentifier_key UNIQUE (storageidentifier); \ No newline at end of file diff --git a/postgresql/testdata/scripts/backup/run_backup/config.ini b/postgresql/testdata/scripts/backup/run_backup/config.ini new file mode 100644 index 0000000..b6bc7a8 --- /dev/null +++ b/postgresql/testdata/scripts/backup/run_backup/config.ini @@ -0,0 +1,66 @@ +[Database] +; Dataverse database access configuration +; Note that this section is REQUIRED! - +; you must be able to access the database in order to run the backup module. +; The database can run on a remote server; but make sure you configure the +; host and access creds (below) correctly, and make sure Postgres is accepting +; connections from this server address. + +Host: localhost +Port: 5432 +Database: dvndb +Username: dvnapp +Password: xxxxxx +BackupDatabase: backupdb + +[Repository] +; This section provides configuration for accessing (reading) the files stored +; in this Dataverse. Note that the files can be physicall stored on different +; physical media; if you have files in your Dataverse stored via different +; supported storage drivers - filesystem, swift, S3 - as long as access is properly +; configured here, this script should be able to back them up. + +; configuration for files stored on the filesystem +; (the filesystem needs to be accessible by the system running the backup module) + +FileSystemDirectory: /usr/local/glassfish4/glassfish/domains/domain1/files + +; no configuration needed here for reading files stored on AWS/S3 +; (but the S3 authentication credentials need to be provided in the +; standard ~/.aws location) + +; configuration for files stored on openstack/swift: +; swift NOT SUPPORTED yet + +[Backup] +; ssh configuration: +; (i.e., backup to remote storage accessible via ssh/sftp; default) + +StorageType: ssh +SshHost: backup.dataverse.edu +; ssh port is optional, defaults to 22 +SshPort: 22 +SshUsername: backup +; (the remote server must have ssh key access configured for the user +; specified above) +; the directory on the remote server where the files will be copied to: +BackupDirectory: /dataverse_backup + +; Swift configuration: + +;StorageType: swift +SwiftAuthUrl: https://something.dataverse.edu/swift/v2.0/tokens +SwiftAuthVersion: 2 +SwiftUser: xxx +SwiftKey: yyy +; Note that the 'tenant' setting is only needed for Auth v.1 and 2. +SwiftTenant: zzz +SwiftEndPoint: https://something.dataverse.edu/swift/v1 + +; S3 configuration: +; Dataverse files will be backed up onto AWS/S3, in the bucket specified. +; S3 authentication credentials are stored in the +; standard ~/.aws location + +[Notifications] +Email: somebody@dataverse.edu diff --git a/postgresql/testdata/scripts/backup/run_backup/config.py b/postgresql/testdata/scripts/backup/run_backup/config.py new file mode 100644 index 0000000..8faaa4f --- /dev/null +++ b/postgresql/testdata/scripts/backup/run_backup/config.py @@ -0,0 +1,17 @@ +import ConfigParser +import sys +Config = ConfigParser.ConfigParser() +Config.read("config.ini") + +def ConfigSectionMap(section): + dict1 = {} + options = Config.options(section) + for option in options: + try: + dict1[option] = Config.get(section, option) + if dict1[option] == -1: + sys.stderr.write("skip: %s\n" % option) + except: + print("exception on %s!" % option) + dict1[option] = None + return dict1 diff --git a/postgresql/testdata/scripts/backup/run_backup/database.py b/postgresql/testdata/scripts/backup/run_backup/database.py new file mode 100644 index 0000000..9c08038 --- /dev/null +++ b/postgresql/testdata/scripts/backup/run_backup/database.py @@ -0,0 +1,138 @@ +import psycopg2 +import sys +import pprint +from time import (time) +from datetime import (datetime, timedelta) +from config import (ConfigSectionMap) + +dataverse_db_connection=None +backup_db_connection=None + +def create_database_connection(database='database'): + Host = ConfigSectionMap("Database")['host'] + Port = ConfigSectionMap("Database")['port'] + Database = ConfigSectionMap("Database")[database] + Username = ConfigSectionMap("Database")['username'] + Password = ConfigSectionMap("Database")['password'] + + #print "Database Host: %s" % (Host) + #print "Database Port: %s" % (Port) + #print "Database Name: %s" % (Database) + #print "Username: %s" % (Username) + #print "Password: %s" % (Password) + + #Define our connection string + conn_string = "host='"+Host+"' dbname='"+Database+"' user='"+Username+"' password='"+Password+"'" + + #print "Connecting to database\n->%s" % (conn_string) + + # get a connection, if a connect cannot be made an exception will be raised here + conn = psycopg2.connect(conn_string) + + #print "Connected!\n" + + return conn + +def get_backupdb_connection(): + global backup_db_connection + + if backup_db_connection is None: + backup_db_connection = create_database_connection('backupdatabase') + + return backup_db_connection + +def query_database(sinceTimestamp=None): + global dataverse_db_connection + + dataverse_db_connection = create_database_connection() + + cursor = dataverse_db_connection.cursor() + + # Select data files from the database + # The query below is a bit monstrous, as we try to get all the information about the stored file + # from multiple tables in the single request. Note the "LEFT JOIN" in it - we want it to return + # the "datatable" object referencing this datafile, if such exists, or NULL otherwise. If the + # value is not NULL, we know this is a tabular data file. + dataverse_query="SELECT s.authority, s.identifier, o.storageidentifier, f.checksumtype, f.checksumvalue, f.filesize,o.createdate, datatable.id FROM datafile f LEFT JOIN datatable ON f.id = datatable.datafile_id, dataset s, dvobject o WHERE o.id = f.id AND o.owner_id = s.id AND s.harvestingclient_id IS null" + if sinceTimestamp is None: + cursor.execute(dataverse_query) + else: + dataverse_query = dataverse_query+" AND o.createdate > %s" + cursor.execute(dataverse_query, (sinceTimestamp,)) + + + records = cursor.fetchall() + + return records + +def get_last_timestamp(): + backup_db_connection = get_backupdb_connection() + + cursor = backup_db_connection.cursor() + + # select the last timestamp from the datafilestatus table: + dataverse_query="SELECT createdate FROM datafilestatus ORDER BY createdate DESC LIMIT 1" + + cursor.execute(dataverse_query) + + record = cursor.fetchone() + + if record is None: + #print "table is empty" + return None + + #timestamp = record[0] + timedelta(seconds=1) + timestamp = record[0] + # milliseconds are important! + timestamp_str = timestamp.strftime('%Y-%m-%d %H:%M:%S.%f') + + return timestamp_str + +def get_datafile_status(dataset_authority, dataset_identifier, storage_identifier): + backup_db_connection = get_backupdb_connection() + cursor = backup_db_connection.cursor() + + # select the last timestamp from the datafilestatus table: + + dataverse_query="SELECT status FROM datafilestatus WHERE datasetidentifier=%s AND storageidentifier=%s;" + + dataset_id=dataset_authority+"/"+dataset_identifier + + cursor.execute(dataverse_query, (dataset_id, storage_identifier)) + + record = cursor.fetchone() + + if record is None: + #print "no backup status for this file" + return None + + backupstatus = record[0] + #print "last backup status: "+backupstatus + return backupstatus + +def record_datafile_status(dataset_authority, dataset_identifier, storage_identifier, status, createdate): + current_status = get_datafile_status(dataset_authority, dataset_identifier, storage_identifier) + + backup_db_connection = get_backupdb_connection() + cursor = backup_db_connection.cursor() + + createdate_str = createdate.strftime('%Y-%m-%d %H:%M:%S.%f') + nowdate_str = datetime.fromtimestamp(time()).strftime('%Y-%m-%d %H:%M:%S') + + if current_status is None: + query = "INSERT INTO datafilestatus (status, createdate, lastbackuptime, lastbackupmethod, datasetidentifier, storageidentifier) VALUES (%s, %s, %s, %s, %s, %s);" + else: + query = "UPDATE datafilestatus SET status=%s, createdate=%s, lastbackuptime=%s, lastbackupmethod=%s WHERE datasetidentifier=%s AND storageidentifier=%s;" + + dataset_id=dataset_authority+"/"+dataset_identifier + backup_method = ConfigSectionMap("Backup")['storagetype'] + + cursor.execute(query, (status, createdate_str, nowdate_str, backup_method, dataset_id, storage_identifier)) + + # finalize transaction: + backup_db_connection.commit() + cursor.close() + + + + diff --git a/postgresql/testdata/scripts/backup/run_backup/email_notification.py b/postgresql/testdata/scripts/backup/run_backup/email_notification.py new file mode 100644 index 0000000..ed3504b --- /dev/null +++ b/postgresql/testdata/scripts/backup/run_backup/email_notification.py @@ -0,0 +1,25 @@ +from config import (ConfigSectionMap) +from subprocess import Popen, PIPE, STDOUT +from time import (time) +from datetime import (datetime) + +def send_notification(text): + try: + notification_address = ConfigSectionMap("Notifications")['email'] + except: + notification_address = None + + if (notification_address is None): + raise ValueError('Notification email address is not configured') + + nowdate_str = datetime.fromtimestamp(time()).strftime('%Y-%m-%d %H:%M') + subject_str = ('Dataverse datafile backup report [%s]' % nowdate_str) + + p = Popen(['mail','-s',subject_str,notification_address], stdout=PIPE, stdin=PIPE, stderr=PIPE) + stdout_data = p.communicate(input=text)[0] + +def main(): + send_notification('backup report: test, please disregard') + +if __name__ == "__main__": + main() diff --git a/postgresql/testdata/scripts/backup/run_backup/requirements.txt b/postgresql/testdata/scripts/backup/run_backup/requirements.txt new file mode 100644 index 0000000..5696d13 --- /dev/null +++ b/postgresql/testdata/scripts/backup/run_backup/requirements.txt @@ -0,0 +1,6 @@ +# python2 requirements + +psycopg2 +boto3 +paramiko +# TODO: where to get `swiftclient` from diff --git a/postgresql/testdata/scripts/backup/run_backup/run_backup.py b/postgresql/testdata/scripts/backup/run_backup/run_backup.py new file mode 100644 index 0000000..7124d21 --- /dev/null +++ b/postgresql/testdata/scripts/backup/run_backup/run_backup.py @@ -0,0 +1,99 @@ +#!/usr/bin/env python + +import ConfigParser +import psycopg2 +import sys +import io +import re +from database import (query_database, get_last_timestamp, record_datafile_status, get_datafile_status) +from storage import (open_dataverse_file) +from backup import (backup_file) +from email_notification import (send_notification) + +def main(): + rrmode = False + + if (len(sys.argv) > 1 and sys.argv[1] == '--rerun'): + rrmode = True + + if rrmode: + time_stamp = None + else: + time_stamp = get_last_timestamp() + + if time_stamp is None: + print "No time stamp! first run (or a full re-run)." + records = query_database() + else: + print "last backup: "+time_stamp + records = query_database(time_stamp) + + files_total=0 + files_success=0 + files_failed=0 + files_skipped=0 + + for result in records: + dataset_authority = result[0] + dataset_identifier = result[1] + storage_identifier = result[2] + checksum_type = result[3] + checksum_value = result[4] + file_size = result[5] + create_time = result[6] + is_tabular_data = result[7] + + if (checksum_value is None): + checksum_value = "MISSING" + + + if (storage_identifier is not None and dataset_identifier is not None and dataset_authority is not None): + files_total += 1 + print dataset_authority + "/" + dataset_identifier + "/" + storage_identifier + ", " + checksum_type + ": " + checksum_value + + file_input=None + + # if this is a re-run, we are only re-trying the files that have failed previously: + if (rrmode and get_datafile_status(dataset_authority, dataset_identifier, storage_identifier) == 'OK'): + files_skipped += 1 + continue + + try: + file_input = open_dataverse_file(dataset_authority, dataset_identifier, storage_identifier, is_tabular_data) + except: + print "failed to open file "+storage_identifier + file_input=None + + + if (file_input is not None): + try: + backup_file(file_input, dataset_authority, dataset_identifier, storage_identifier, checksum_type, checksum_value, file_size) + print "backed up file "+storage_identifier + record_datafile_status(dataset_authority, dataset_identifier, storage_identifier, 'OK', create_time) + files_success += 1 + except ValueError, ve: + exception_message = str(ve) + print "failed to back up file "+storage_identifier+": "+exception_message + if (re.match("^remote", exception_message) is not None): + record_datafile_status(dataset_authority, dataset_identifier, storage_identifier, 'FAIL_VERIFY', create_time) + else: + record_datafile_status(dataset_authority, dataset_identifier, storage_identifier, 'FAIL_WRITE', create_time) + files_failed += 1 + #TODO: add a separate failure status 'FAIL_VERIFY' - for when it looked like we were able to copy the file + # onto the remote storage system, but the checksum verification failed (?) + else: + record_datafile_status(dataset_authority, dataset_identifier, storage_identifier, 'FAIL_READ', create_time) + files_failed += 1 + + if (files_skipped > 0): + report = ('backup script run report: %d files processed; %d skipped (already backed up), %d success, %d failed' % (files_total, files_skipped, files_success, files_failed)) + else: + report = ('backup script run report: %d files processed; %d success, %d failed' % (files_total, files_success, files_failed)) + print report + send_notification(report) + +if __name__ == "__main__": + main() + + + diff --git a/postgresql/testdata/scripts/backup/run_backup/storage.py b/postgresql/testdata/scripts/backup/run_backup/storage.py new file mode 100644 index 0000000..b831e7e --- /dev/null +++ b/postgresql/testdata/scripts/backup/run_backup/storage.py @@ -0,0 +1,28 @@ +import io +import re +import boto3 +from config import (ConfigSectionMap) +from storage_filesystem import (open_storage_object_filesystem) +from storage_s3 import (open_storage_object_s3) + + +def open_dataverse_file(dataset_authority, dataset_identifier, storage_identifier, is_tabular_data): + m = re.search('^([a-z0-9]*)://(.*)$', storage_identifier) + if m is None: + # no storage identifier tag. (defaulting to filesystem storage) + storageTag = 'file' + objectLocation = storage_identifier; + else: + storageTag = m.group(1) + objectLocation = m.group(2) + + if storageTag == 'file': + byteStream = open_storage_object_filesystem(dataset_authority, dataset_identifier, objectLocation, is_tabular_data) + return byteStream + elif storageTag == 's3': + byteStream = open_storage_object_s3(dataset_authority, dataset_identifier, objectLocation, is_tabular_data) + return byteStream + elif storageTag == 'swift': + raise ValueError("backup of swift objects not supported yet") + + raise ValueError("Unknown or unsupported storage method: "+storage_identifier) diff --git a/postgresql/testdata/scripts/backup/run_backup/storage_filesystem.py b/postgresql/testdata/scripts/backup/run_backup/storage_filesystem.py new file mode 100644 index 0000000..f5cff99 --- /dev/null +++ b/postgresql/testdata/scripts/backup/run_backup/storage_filesystem.py @@ -0,0 +1,11 @@ +import io +import re +from config import (ConfigSectionMap) + +def open_storage_object_filesystem(dataset_authority, dataset_identifier, object_location, is_tabular_data): + filesystem_directory = ConfigSectionMap("Repository")['filesystemdirectory'] + if (is_tabular_data is not None): + object_location += ".orig" + file_path = filesystem_directory+"/"+dataset_authority+"/"+dataset_identifier+"/"+object_location + byte_stream = io.open(file_path, "rb") + return byte_stream diff --git a/postgresql/testdata/scripts/backup/run_backup/storage_s3.py b/postgresql/testdata/scripts/backup/run_backup/storage_s3.py new file mode 100644 index 0000000..94858ee --- /dev/null +++ b/postgresql/testdata/scripts/backup/run_backup/storage_s3.py @@ -0,0 +1,13 @@ +import io +import re +import boto3 + +def open_storage_object_s3(dataset_authority, dataset_identifier, object_location, is_tabular_data): + s3 = boto3.resource('s3') + bucket_name,object_name = object_location.split(":",1) + key = dataset_authority + "/" + dataset_identifier + "/" + object_name; + if (is_tabular_data is not None): + key += ".orig" + s3_obj = s3.Object(bucket_name=bucket_name, key=key) + # "Body" is a byte stream associated with the object: + return s3_obj.get()['Body'] diff --git a/postgresql/testdata/scripts/database/3561-update.sql b/postgresql/testdata/scripts/database/3561-update.sql new file mode 100644 index 0000000..8ddd3d3 --- /dev/null +++ b/postgresql/testdata/scripts/database/3561-update.sql @@ -0,0 +1,24 @@ +-- create the workflow tables +CREATE TABLE WORKFLOW (ID SERIAL NOT NULL, NAME VARCHAR(255), PRIMARY KEY (ID)); +CREATE TABLE PENDINGWORKFLOWINVOCATION (INVOCATIONID VARCHAR(255) NOT NULL, DOIPROVIDER VARCHAR(255), IPADDRESS VARCHAR(255), NEXTMINORVERSIONNUMBER BIGINT, NEXTVERSIONNUMBER BIGINT, PENDINGSTEPIDX INTEGER, TYPEORDINAL INTEGER, USERID VARCHAR(255), WORKFLOW_ID BIGINT, DATASET_ID BIGINT, PRIMARY KEY (INVOCATIONID)); + +CREATE INDEX INDEX_DATASETLOCK_user_id ON DATASETLOCK (user_id); +CREATE INDEX INDEX_DATASETLOCK_dataset_id ON DATASETLOCK (dataset_id); + +-- Alter Dataset lock +ALTER TABLE DATASETLOCK ADD CONSTRAINT FK_DATASETLOCK_DATASET_ID FOREIGN KEY (DATASET_ID) REFERENCES DVOBJECT (ID); +ALTER TABLE DATASETLOCK ADD CONSTRAINT FK_DATASETLOCK_USER_ID FOREIGN KEY (USER_ID) REFERENCES AUTHENTICATEDUSER (ID); +ALTER TABLE DATASETLOCK ADD COLUMN REASON VARCHAR(255); + +-- All existing dataset locks are due to ingest. +UPDATE DATASETLOCK set REASON='Ingest'; + +-- /!\ Important! +-- change "1" to the an admin user id. +-- +INSERT INTO datasetlock (info, starttime, dataset_id, user_id, reason) +SELECT '', localtimestamp, dataset_id, 1, 'InReview' +FROM datasetversion +WHERE inreview=true; + +ALTER TABLE DATASETVERSION DROP COLUMN inreview; diff --git a/postgresql/testdata/scripts/database/drop-all.sh b/postgresql/testdata/scripts/database/drop-all.sh new file mode 100755 index 0000000..782465c --- /dev/null +++ b/postgresql/testdata/scripts/database/drop-all.sh @@ -0,0 +1,8 @@ +#!/bin/bash +PSQL=psql +DB_NAME=dvndb +SQL_FILENAME=dropall.sql + +$PSQL $DB_NAME -t -c"SELECT 'drop table \"' || tablename || '\" cascade;' FROM pg_tables WHERE schemaname='public';" > $SQL_FILENAME +$PSQL $DB_NAME -a -f $SQL_FILENAME +rm $SQL_FILENAME diff --git a/postgresql/testdata/scripts/database/drop-create.sh b/postgresql/testdata/scripts/database/drop-create.sh new file mode 100755 index 0000000..04138ee --- /dev/null +++ b/postgresql/testdata/scripts/database/drop-create.sh @@ -0,0 +1,12 @@ +#!/bin/bash + +# Drops and creates the database. Assumes pg_dump and psql are in $PATH, and that the db does not need password. +DUMP=pg_dump +PSQL=psql +DB=dvndb +DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )" + +$DUMP -s $DB > temp-schema.sql +$PSQL -d $DB -f $DIR/drop-all.sql +$PSQL -d $DB -f temp-schema.sql +rm temp-schema.sql \ No newline at end of file diff --git a/postgresql/testdata/scripts/database/facetlist.sql b/postgresql/testdata/scripts/database/facetlist.sql new file mode 100644 index 0000000..83eb0f6 --- /dev/null +++ b/postgresql/testdata/scripts/database/facetlist.sql @@ -0,0 +1,5 @@ +-- default facets defined in https://redmine.hmdc.harvard.edu/issues/3490 +-- show selected facets by displayorder +SELECT title,name,datasetfield.id FROM dataversefacet, datasetfield WHERE dataversefacet.datasetfield_id = datasetfield.id ORDER BY dataversefacet.displayorder; +-- more detail +-- SELECT dataversefacet.id, title, name, datasetfield.id, dataversefacet.displayorder, dataverse_id FROM dataversefacet, datasetfield WHERE dataversefacet.datasetfield_id = datasetfield.id ORDER BY displayorder; diff --git a/postgresql/testdata/scripts/database/fedora/rebuild-and-test b/postgresql/testdata/scripts/database/fedora/rebuild-and-test new file mode 100755 index 0000000..07e3b1c --- /dev/null +++ b/postgresql/testdata/scripts/database/fedora/rebuild-and-test @@ -0,0 +1,20 @@ +#!/bin/sh +GLASSFISH_HOME=$HOME/tools/devguide-gf4/glassfish4 +ASADMIN=$GLASSFISH_HOME/glassfish/bin/asadmin +$ASADMIN stop-domain +psql -U postgres -c 'DROP DATABASE "dvnDb"' +scripts/search/clear +psql -U postgres -c 'CREATE DATABASE "dvnDb" WITH OWNER = "dvnApp"' +mvn package +$ASADMIN start-domain +# should probably use this instead: https://maven-glassfish-plugin.java.net/deploy-mojo.html +cp target/dataverse-4.0.war $GLASSFISH_HOME/glassfish/domains/domain1/autodeploy +sleep 30 +psql -U postgres dvnDb -f scripts/database/reference_data.sql +cd scripts/api +./datasetfields.sh +./setup-users.sh +./setup-dvs.sh +cd ../.. +scripts/search/tests/permissions +scripts/search/tests/delete-dataverse diff --git a/postgresql/testdata/scripts/database/homebrew/convert b/postgresql/testdata/scripts/database/homebrew/convert new file mode 100755 index 0000000..bd4ccbd --- /dev/null +++ b/postgresql/testdata/scripts/database/homebrew/convert @@ -0,0 +1,4 @@ +#!/bin/sh +sed -i -e "s/postgres/$USER/" /tmp/dataverse_db.sql +sed -i -e 's/dvnapp/dataverse_app/' /tmp/dataverse_db.sql +sed -i -e 's/dvn-vm7.hmdc.harvard.edu:8983/localhost:8983/' /tmp/dataverse_db.sql diff --git a/postgresql/testdata/scripts/database/homebrew/create-database b/postgresql/testdata/scripts/database/homebrew/create-database new file mode 100755 index 0000000..20b1c75 --- /dev/null +++ b/postgresql/testdata/scripts/database/homebrew/create-database @@ -0,0 +1,2 @@ +#!/bin/sh +~/.homebrew/bin/psql -c 'CREATE DATABASE "dataverse_db" WITH OWNER = "dataverse_app"' template1 diff --git a/postgresql/testdata/scripts/database/homebrew/create-role b/postgresql/testdata/scripts/database/homebrew/create-role new file mode 100755 index 0000000..114526f --- /dev/null +++ b/postgresql/testdata/scripts/database/homebrew/create-role @@ -0,0 +1,2 @@ +#!/bin/sh +~/.homebrew/bin/psql -c "CREATE ROLE dataverse_app UNENCRYPTED PASSWORD 'secret' NOSUPERUSER CREATEDB CREATEROLE NOINHERIT LOGIN" template1 diff --git a/postgresql/testdata/scripts/database/homebrew/create-role-superuser b/postgresql/testdata/scripts/database/homebrew/create-role-superuser new file mode 100755 index 0000000..a8b2913 --- /dev/null +++ b/postgresql/testdata/scripts/database/homebrew/create-role-superuser @@ -0,0 +1,3 @@ +#!/bin/sh +# so you don't have to sudo to postgres to create roles, etc. +~/.homebrew/bin/psql -c "CREATE ROLE $USER UNENCRYPTED PASSWORD 'secret' SUPERUSER CREATEDB CREATEROLE INHERIT LOGIN REPLICATION" template1 diff --git a/postgresql/testdata/scripts/database/homebrew/custom-build-number b/postgresql/testdata/scripts/database/homebrew/custom-build-number new file mode 100755 index 0000000..abc074e --- /dev/null +++ b/postgresql/testdata/scripts/database/homebrew/custom-build-number @@ -0,0 +1,8 @@ +#!/bin/sh +if [ -z "$1" ]; then + BRANCH_COMMIT=$(git rev-parse --abbrev-ref HEAD)-$(git log --oneline | head -1 | awk '{print $1}') + echo "No custom build number specified. Using $BRANCH_COMMIT" + echo "build.number=$BRANCH_COMMIT" > src/main/java/BuildNumber.properties +else + echo "build.number=$@" > src/main/java/BuildNumber.properties +fi diff --git a/postgresql/testdata/scripts/database/homebrew/delete-all b/postgresql/testdata/scripts/database/homebrew/delete-all new file mode 100755 index 0000000..2d2c211 --- /dev/null +++ b/postgresql/testdata/scripts/database/homebrew/delete-all @@ -0,0 +1,7 @@ +#!/bin/sh +/Applications/NetBeans/glassfish4/glassfish/bin/asadmin stop-domain +rm -rf /Applications/NetBeans/glassfish4/glassfish/domains/domain1/generated +scripts/database/homebrew/drop-database +scripts/search/clear +rm -rf ~/dataverse/files +scripts/database/homebrew/create-database diff --git a/postgresql/testdata/scripts/database/homebrew/devinstall b/postgresql/testdata/scripts/database/homebrew/devinstall new file mode 100755 index 0000000..89284b5 --- /dev/null +++ b/postgresql/testdata/scripts/database/homebrew/devinstall @@ -0,0 +1,20 @@ +#!/bin/sh +cd scripts/installer +export DB_NAME=dataverse_db +export DB_PORT=5432 +export DB_HOST=localhost +export DB_USER=dataverse_app +export DB_PASS=secret +export RSERVE_HOST=localhost +export RSERVE_PORT=6311 +export RSERVE_USER=rserve +export RSERVE_PASS=rserve +export SMTP_SERVER=localhost +export HOST_ADDRESS=`hostname` +export FILES_DIR=$HOME/dataverse/files +export MEM_HEAP_SIZE=2048 +export GLASSFISH_DOMAIN=domain1 +export GLASSFISH_ROOT=/Applications/NetBeans/glassfish4 +cp pgdriver/postgresql-9.1-902.jdbc4.jar $GLASSFISH_ROOT/glassfish/lib +cp ../../conf/jhove/jhove.conf $GLASSFISH_ROOT/glassfish/domains/$GLASSFISH_DOMAIN/config/jhove.conf +./glassfish-setup.sh diff --git a/postgresql/testdata/scripts/database/homebrew/drop-database b/postgresql/testdata/scripts/database/homebrew/drop-database new file mode 100755 index 0000000..d0d4c3a --- /dev/null +++ b/postgresql/testdata/scripts/database/homebrew/drop-database @@ -0,0 +1,2 @@ +#!/bin/sh +~/.homebrew/bin/psql -c 'DROP DATABASE "dataverse_db"' template1 diff --git a/postgresql/testdata/scripts/database/homebrew/drop-role b/postgresql/testdata/scripts/database/homebrew/drop-role new file mode 100755 index 0000000..a42711d --- /dev/null +++ b/postgresql/testdata/scripts/database/homebrew/drop-role @@ -0,0 +1,2 @@ +#!/bin/sh +~/.homebrew/bin/psql -c "DROP ROLE dataverse_app" template1 diff --git a/postgresql/testdata/scripts/database/homebrew/dump b/postgresql/testdata/scripts/database/homebrew/dump new file mode 100755 index 0000000..94e71d1 --- /dev/null +++ b/postgresql/testdata/scripts/database/homebrew/dump @@ -0,0 +1,2 @@ +#!/bin/sh +~/.homebrew/bin/pg_dump dataverse_db -f /tmp/dataverse_db.sql diff --git a/postgresql/testdata/scripts/database/homebrew/keys2tmp b/postgresql/testdata/scripts/database/homebrew/keys2tmp new file mode 100755 index 0000000..f91be78 --- /dev/null +++ b/postgresql/testdata/scripts/database/homebrew/keys2tmp @@ -0,0 +1,17 @@ +#!/bin/sh +DIR=/tmp/keys +mkdir -p $DIR +key2tmp () { + #export $2=`grep apiToken /tmp/setup-all.sh.out | grep $1 | jq .data.apiToken | grep -v null | sed s/\"//g` + echo `grep apiToken /tmp/setup-all.sh.out | grep $1 | jq .data.apiToken | grep -v null | sed s/\"//g` > $DIR/$1 +} +key2tmp pete PETEKEY +key2tmp uma UMAKEY +key2tmp gabbi GABBIKEY +key2tmp cathy CATHYKEY +key2tmp nick NICKKEY +#echo "pete's key: $PETEKEY" +#echo "uma's key: $UMAKEY" +#echo "gabbi's key: $GABBIKEY" +#echo "cathy's key: $CATHYKEY" +#echo "nick's key: $NICKKEY" diff --git a/postgresql/testdata/scripts/database/homebrew/kill9glassfish b/postgresql/testdata/scripts/database/homebrew/kill9glassfish new file mode 100755 index 0000000..678a8a5 --- /dev/null +++ b/postgresql/testdata/scripts/database/homebrew/kill9glassfish @@ -0,0 +1,2 @@ +#!/bin/sh +kill -9 `jps | grep ASMain | awk '{print $1}'` diff --git a/postgresql/testdata/scripts/database/homebrew/rebuild-and-test b/postgresql/testdata/scripts/database/homebrew/rebuild-and-test new file mode 100755 index 0000000..670fb84 --- /dev/null +++ b/postgresql/testdata/scripts/database/homebrew/rebuild-and-test @@ -0,0 +1,11 @@ +#!/bin/sh +scripts/database/homebrew/run-post-create-post-deploy +echo "Publishing root dataverse" +scripts/search/tests/publish-dataverse-root +echo "---" +echo "Creating search users" +scripts/search/populate-users > /dev/null +scripts/search/create-users > /dev/null +scripts/search/tests/grant-authusers-add-on-root +scripts/search/tests/create-all-and-test +#scripts/search/tests/create-saved-search-and-test diff --git a/postgresql/testdata/scripts/database/homebrew/restore b/postgresql/testdata/scripts/database/homebrew/restore new file mode 100755 index 0000000..75e6a22 --- /dev/null +++ b/postgresql/testdata/scripts/database/homebrew/restore @@ -0,0 +1,2 @@ +#!/bin/sh +~/.homebrew/bin/psql dataverse_db -f /tmp/dataverse_db.sql diff --git a/postgresql/testdata/scripts/database/homebrew/run-post-create-post-deploy b/postgresql/testdata/scripts/database/homebrew/run-post-create-post-deploy new file mode 100755 index 0000000..671958e --- /dev/null +++ b/postgresql/testdata/scripts/database/homebrew/run-post-create-post-deploy @@ -0,0 +1,7 @@ +#!/bin/sh +scripts/database/homebrew/run-reference_data.sql > /tmp/run-reference_data.sql +psql dataverse_db -f doc/sphinx-guides/source/_static/util/createsequence.sql +psql -c 'ALTER TABLE datasetidentifier_seq OWNER TO "dataverse_app";' dataverse_db +cd scripts/api +./setup-all.sh --insecure > /tmp/setup-all.sh.out 2> /tmp/setup-all.sh.err +cd ../.. diff --git a/postgresql/testdata/scripts/database/homebrew/run-reference_data.sql b/postgresql/testdata/scripts/database/homebrew/run-reference_data.sql new file mode 100755 index 0000000..99fa05b --- /dev/null +++ b/postgresql/testdata/scripts/database/homebrew/run-reference_data.sql @@ -0,0 +1,2 @@ +#!/bin/sh +~/.homebrew/bin/psql dataverse_db -f $HOME/NetBeansProjects/dataverse/scripts/database/reference_data.sql diff --git a/postgresql/testdata/scripts/database/homebrew/set-env-for-setup b/postgresql/testdata/scripts/database/homebrew/set-env-for-setup new file mode 100755 index 0000000..98cbb98 --- /dev/null +++ b/postgresql/testdata/scripts/database/homebrew/set-env-for-setup @@ -0,0 +1,22 @@ +#!/bin/bash +# put these variables into your env with `source path/to/script` + +export GLASSFISH_ROOT='/Applications/NetBeans/glassfish4' +export -n GLASSFISH_ROOT +unset GLASSFISH_ROOT +echo $GLASSFISH_ROOT + +export DB_NAME_CUSTOM='dataverse_db' +#export -n DB_NAME_CUSTOM +#unset DB_NAME_CUSTOM +echo $DB_NAME_CUSTOM + +export DB_USER_CUSTOM='dataverse_app' +#export -n DB_USER_CUSTOM +#unset DB_USER_CUSTOM +echo $DB_USER_CUSTOM + +export DB_PASS_CUSTOM='secret' +#export -n DB_PASS_CUSTOM +#unset DB_PASS_CUSTOM +echo $DB_PASS_CUSTOM diff --git a/postgresql/testdata/scripts/database/homebrew/superuser-password-update b/postgresql/testdata/scripts/database/homebrew/superuser-password-update new file mode 100755 index 0000000..a955ce6 --- /dev/null +++ b/postgresql/testdata/scripts/database/homebrew/superuser-password-update @@ -0,0 +1,5 @@ +#!/bin/sh +# default "admin" password +# $2a$10$H8jca9BBbvCQAs2fU6TaseQeyD6ho3vZuIBKdlknDaR5lh69effde +~/.homebrew/bin/psql -c 'select username,encryptedpassword from builtinuser where id = 1' dataverse_db +~/.homebrew/bin/psql -c "update builtinuser set encryptedpassword='\$2a\$10\$H8jca9BBbvCQAs2fU6TaseQeyD6ho3vZuIBKdlknDaR5lh69effde' where id = 1" dataverse_db diff --git a/postgresql/testdata/scripts/database/reference_data.sql b/postgresql/testdata/scripts/database/reference_data.sql new file mode 100644 index 0000000..2e71f0b --- /dev/null +++ b/postgresql/testdata/scripts/database/reference_data.sql @@ -0,0 +1,51 @@ +-- using http://dublincore.org/schemas/xmls/qdc/dcterms.xsd because at http://dublincore.org/schemas/xmls/ it's the schema location for http://purl.org/dc/terms/ which is referenced in http://swordapp.github.io/SWORDv2-Profile/SWORDProfile.html +INSERT INTO foreignmetadataformatmapping(id, name, startelement, displayName, schemalocation) VALUES (1, 'http://purl.org/dc/terms/', 'entry', 'dcterms: DCMI Metadata Terms', 'http://dublincore.org/schemas/xmls/qdc/dcterms.xsd'); +INSERT INTO foreignmetadatafieldmapping (id, foreignfieldxpath, datasetfieldname, isattribute, parentfieldmapping_id, foreignmetadataformatmapping_id) VALUES (1, ':title', 'title', FALSE, NULL, 1 ); +INSERT INTO foreignmetadatafieldmapping (id, foreignfieldxpath, datasetfieldname, isattribute, parentfieldmapping_id, foreignmetadataformatmapping_id) VALUES (2, ':identifier', 'otherIdValue', FALSE, NULL, 1 ); +INSERT INTO foreignmetadatafieldmapping (id, foreignfieldxpath, datasetfieldname, isattribute, parentfieldmapping_id, foreignmetadataformatmapping_id) VALUES (3, ':creator', 'authorName', FALSE, NULL, 1 ); +INSERT INTO foreignmetadatafieldmapping (id, foreignfieldxpath, datasetfieldname, isattribute, parentfieldmapping_id, foreignmetadataformatmapping_id) VALUES (4, ':date', 'productionDate', FALSE, NULL, 1 ); +INSERT INTO foreignmetadatafieldmapping (id, foreignfieldxpath, datasetfieldname, isattribute, parentfieldmapping_id, foreignmetadataformatmapping_id) VALUES (5, ':subject', 'keywordValue', FALSE, NULL, 1 ); +INSERT INTO foreignmetadatafieldmapping (id, foreignfieldxpath, datasetfieldname, isattribute, parentfieldmapping_id, foreignmetadataformatmapping_id) VALUES (6, ':description', 'dsDescriptionValue', FALSE, NULL, 1 ); +INSERT INTO foreignmetadatafieldmapping (id, foreignfieldxpath, datasetfieldname, isattribute, parentfieldmapping_id, foreignmetadataformatmapping_id) VALUES (7, ':relation', 'relatedMaterial', FALSE, NULL, 1 ); +INSERT INTO foreignmetadatafieldmapping (id, foreignfieldxpath, datasetfieldname, isattribute, parentfieldmapping_id, foreignmetadataformatmapping_id) VALUES (8, ':isReferencedBy', 'publicationCitation', FALSE, NULL, 1 ); +INSERT INTO foreignmetadatafieldmapping (id, foreignfieldxpath, datasetfieldname, isattribute, parentfieldmapping_id, foreignmetadataformatmapping_id) VALUES (9, 'holdingsURI', 'publicationURL', TRUE, 8, 1 ); +INSERT INTO foreignmetadatafieldmapping (id, foreignfieldxpath, datasetfieldname, isattribute, parentfieldmapping_id, foreignmetadataformatmapping_id) VALUES (10, 'agency', 'publicationIDType', TRUE, 8, 1 ); +INSERT INTO foreignmetadatafieldmapping (id, foreignfieldxpath, datasetfieldname, isattribute, parentfieldmapping_id, foreignmetadataformatmapping_id) VALUES (11, 'IDNo', 'publicationIDNumber', TRUE, 8, 1 ); +INSERT INTO foreignmetadatafieldmapping (id, foreignfieldxpath, datasetfieldname, isattribute, parentfieldmapping_id, foreignmetadataformatmapping_id) VALUES (12, ':coverage', 'otherGeographicCoverage', FALSE, NULL, 1 ); +INSERT INTO foreignmetadatafieldmapping (id, foreignfieldxpath, datasetfieldname, isattribute, parentfieldmapping_id, foreignmetadataformatmapping_id) VALUES (13, ':type', 'kindOfData', FALSE, NULL, 1 ); +INSERT INTO foreignmetadatafieldmapping (id, foreignfieldxpath, datasetfieldname, isattribute, parentfieldmapping_id, foreignmetadataformatmapping_id) VALUES (14, ':source', 'dataSources', FALSE, NULL, 1 ); +INSERT INTO foreignmetadatafieldmapping (id, foreignfieldxpath, datasetfieldname, isattribute, parentfieldmapping_id, foreignmetadataformatmapping_id) VALUES (15, 'affiliation', 'authorAffiliation', TRUE, 3, 1 ); +INSERT INTO foreignmetadatafieldmapping (id, foreignfieldxpath, datasetfieldname, isattribute, parentfieldmapping_id, foreignmetadataformatmapping_id) VALUES (16, ':contributor', 'contributorName', FALSE, NULL, 1 ); +INSERT INTO foreignmetadatafieldmapping (id, foreignfieldxpath, datasetfieldname, isattribute, parentfieldmapping_id, foreignmetadataformatmapping_id) VALUES (17, 'type', 'contributorType', TRUE, 16, 1 ); +INSERT INTO foreignmetadatafieldmapping (id, foreignfieldxpath, datasetfieldname, isattribute, parentfieldmapping_id, foreignmetadataformatmapping_id) VALUES (18, ':publisher', 'producerName', FALSE, NULL, 1 ); +INSERT INTO foreignmetadatafieldmapping (id, foreignfieldxpath, datasetfieldname, isattribute, parentfieldmapping_id, foreignmetadataformatmapping_id) VALUES (19, ':language', 'language', FALSE, NULL, 1 ); + +INSERT INTO guestbook( + emailrequired, enabled, institutionrequired, createtime, + "name", namerequired, positionrequired, dataverse_id) + VALUES ( false, true, false, now(), + 'Default', false, false, null); + +-- TODO: Remove if http://stackoverflow.com/questions/25743191/how-to-add-a-case-insensitive-jpa-unique-constraint +-- gets an answer. See also https://github.com/IQSS/dataverse/issues/2598#issuecomment-158219334 +CREATE UNIQUE INDEX dataverse_alias_unique_idx on dataverse (LOWER(alias)); +CREATE UNIQUE INDEX index_authenticateduser_lower_email ON authenticateduser (lower(email)); +CREATE UNIQUE INDEX index_builtinuser_lower_email ON builtinuser (lower(email)); + +--Edit Dataset: Investigate and correct multiple draft issue: https://github.com/IQSS/dataverse/issues/2132 +--This unique index will prevent the multiple draft issue +CREATE UNIQUE INDEX one_draft_version_per_dataset ON datasetversion +(dataset_id) WHERE versionstate='DRAFT'; + + +INSERT INTO worldmapauth_tokentype +( name, + created, + contactemail, hostname, ipaddress, + mapitlink, md5, + modified, timelimitminutes) + VALUES ( 'GEOCONNECT', current_timestamp, + 'support@dataverse.org', 'geoconnect.datascience.iq.harvard.edu', '140.247.115.127', + 'http://geoconnect.datascience.iq.harvard.edu/shapefile/map-it', + '38c0a931b2d582a5c43fc79405b30c22', + current_timestamp, 30); diff --git a/postgresql/testdata/scripts/database/upgrades/upgrade_v4.0.1_to_v4.1.sql b/postgresql/testdata/scripts/database/upgrades/upgrade_v4.0.1_to_v4.1.sql new file mode 100644 index 0000000..4ac1789 --- /dev/null +++ b/postgresql/testdata/scripts/database/upgrades/upgrade_v4.0.1_to_v4.1.sql @@ -0,0 +1,10 @@ +/* ---------------------------------------- + Add unique constraint to prevent multiple drafts + Ticket 2132 +*/ ---------------------------------------- + +ALTER TABLE datasetversion +ADD CONSTRAINT uq_datasetversion UNIQUE(dataset_id, versionnumber, minorversionnumber); + +-- make sure Member role has DownloadFilePermission +update dataverserole set permissionbits=28 where alias='member'; \ No newline at end of file diff --git a/postgresql/testdata/scripts/database/upgrades/upgrade_v4.0_to_v4.0.1.sql b/postgresql/testdata/scripts/database/upgrades/upgrade_v4.0_to_v4.0.1.sql new file mode 100644 index 0000000..c72962a --- /dev/null +++ b/postgresql/testdata/scripts/database/upgrades/upgrade_v4.0_to_v4.0.1.sql @@ -0,0 +1,196 @@ +/* ---------------------------------------- + Description: These SQL statements in this file relate to the following tickets + + (1) "Index Check" - https://github.com/IQSS/dataverse/issues/1880 + Summary: Add indices to existing columns. + +*/ ---------------------------------------- +/* ---------------------------------------- + actionlogrecord indices (ActionLogRecord.java) +*/ ---------------------------------------- +CREATE INDEX index_actionlogrecord_useridentifier ON actionlogrecord (useridentifier); +CREATE INDEX index_actionlogrecord_actiontype ON actionlogrecord (actiontype); +CREATE INDEX index_actionlogrecord_starttime ON actionlogrecord (starttime); +/* ---------------------------------------- + authenticationproviderrow index (AuthenticationProviderRow.java) +*/ ---------------------------------------- +CREATE INDEX index_authenticationproviderrow_enabled ON authenticationproviderrow (enabled); +/* ---------------------------------------- + builtinuser index (BuiltInUser.java) +*/ ---------------------------------------- +CREATE INDEX index_builtinuser_lastname ON builtinuser (lastname); +/* ---------------------------------------- + controlledvocabalternate indices (ControlledVocabAlternate.java) +*/ ---------------------------------------- +CREATE INDEX index_controlledvocabalternate_controlledvocabularyvalue_id ON controlledvocabalternate (controlledvocabularyvalue_id); +CREATE INDEX index_controlledvocabalternate_datasetfieldtype_id ON controlledvocabalternate (datasetfieldtype_id); +/* ---------------------------------------- + controlledvocabularyvalue indices (ControlledVocabularyValue.java) +*/ ---------------------------------------- +CREATE INDEX index_controlledvocabularyvalue_datasetfieldtype_id ON controlledvocabularyvalue (datasetfieldtype_id); +CREATE INDEX index_controlledvocabularyvalue_displayorder ON controlledvocabularyvalue (displayorder); +/* ---------------------------------------- + customfieldmap indices (CustomFieldMap.java) +*/ ---------------------------------------- +CREATE INDEX index_customfieldmap_sourcedatasetfield ON customfieldmap (sourcedatasetfield); +CREATE INDEX index_customfieldmap_sourcetemplate ON customfieldmap (sourcetemplate); +/* ---------------------------------------- + datafile indices (DataFile.java) +*/ ---------------------------------------- +CREATE INDEX index_datafile_ingeststatus ON datafile (ingeststatus); +CREATE INDEX index_datafile_md5 ON datafile (md5); +CREATE INDEX index_datafile_contenttype ON datafile (contenttype); +CREATE INDEX index_datafile_restricted ON datafile (restricted); +/* ---------------------------------------- + datasetfielddefaultvalue indices (DatasetFieldDefaultValue.java) +*/ ---------------------------------------- +CREATE INDEX index_datasetfielddefaultvalue_datasetfield_id ON datasetfielddefaultvalue (datasetfield_id); +CREATE INDEX index_datasetfielddefaultvalue_defaultvalueset_id ON datasetfielddefaultvalue (defaultvalueset_id); +CREATE INDEX index_datasetfielddefaultvalue_parentdatasetfielddefaultvalue_id ON datasetfielddefaultvalue (parentdatasetfielddefaultvalue_id); +CREATE INDEX index_datasetfielddefaultvalue_displayorder ON datasetfielddefaultvalue (displayorder); +/* ---------------------------------------- + datasetlock indices (DatasetLock.java) +*/ ---------------------------------------- +CREATE INDEX index_datasetlock_user_id ON datasetlock (user_id); +CREATE INDEX index_datasetlock_dataset_id ON datasetlock (dataset_id); +/* ---------------------------------------- + datasetversionuser indices (DatasetVersionUser.java) +*/ ---------------------------------------- +CREATE INDEX index_datasetversionuser_authenticateduser_id ON datasetversionuser (authenticateduser_id); +CREATE INDEX index_datasetversionuser_datasetversion_id ON datasetversionuser (datasetversion_id); +/* ---------------------------------------- + dataverse indices (Dataverse.java) +*/ ---------------------------------------- +CREATE INDEX index_dataverse_fk_dataverse_id ON dataverse (fk_dataverse_id); +CREATE INDEX index_dataverse_defaultcontributorrole_id ON dataverse (defaultcontributorrole_id); +CREATE INDEX index_dataverse_defaulttemplate_id ON dataverse (defaulttemplate_id); +CREATE INDEX index_dataverse_alias ON dataverse (alias); +CREATE INDEX index_dataverse_affiliation ON dataverse (affiliation); +CREATE INDEX index_dataverse_dataversetype ON dataverse (dataversetype); +CREATE INDEX index_dataverse_facetroot ON dataverse (facetroot); +CREATE INDEX index_dataverse_guestbookroot ON dataverse (guestbookroot); +CREATE INDEX index_dataverse_metadatablockroot ON dataverse (metadatablockroot); +CREATE INDEX index_dataverse_templateroot ON dataverse (templateroot); +CREATE INDEX index_dataverse_permissionroot ON dataverse (permissionroot); +CREATE INDEX index_dataverse_themeroot ON dataverse (themeroot); +/* ---------------------------------------- + dataversecontact indices (DataverseContact.java) +*/ ---------------------------------------- +CREATE INDEX index_dataversecontact_dataverse_id ON dataversecontact (dataverse_id); +CREATE INDEX index_dataversecontact_contactemail ON dataversecontact (contactemail); +CREATE INDEX index_dataversecontact_displayorder ON dataversecontact (displayorder); +/* ---------------------------------------- + dataversefacet indices (DataverseFacet.java) +*/ ---------------------------------------- +CREATE INDEX index_dataversefacet_dataverse_id ON dataversefacet (dataverse_id); +CREATE INDEX index_dataversefacet_datasetfieldtype_id ON dataversefacet (datasetfieldtype_id); +CREATE INDEX index_dataversefacet_displayorder ON dataversefacet (displayorder); +/* ---------------------------------------- + dataversefeatureddataverse indices (DataverseFeaturedDataverse.java) +*/ ---------------------------------------- +CREATE INDEX index_dataversefeatureddataverse_dataverse_id ON dataversefeatureddataverse (dataverse_id); +CREATE INDEX index_dataversefeatureddataverse_featureddataverse_id ON dataversefeatureddataverse (featureddataverse_id); +CREATE INDEX index_dataversefeatureddataverse_displayorder ON dataversefeatureddataverse (displayorder); +/* ---------------------------------------- + dataversefieldtypeinputlevel indices (DataverseFieldTypeInputLevel.java) +*/ ---------------------------------------- +CREATE INDEX index_dataversefieldtypeinputlevel_dataverse_id ON dataversefieldtypeinputlevel (dataverse_id); +CREATE INDEX index_dataversefieldtypeinputlevel_datasetfieldtype_id ON dataversefieldtypeinputlevel (datasetfieldtype_id); +CREATE INDEX index_dataversefieldtypeinputlevel_required ON dataversefieldtypeinputlevel (required); +/* ---------------------------------------- + dataverserole indices (DataverseRole.java) +*/ ---------------------------------------- +CREATE INDEX index_dataverserole_owner_id ON dataverserole (owner_id); +CREATE INDEX index_dataverserole_name ON dataverserole (name); +CREATE INDEX index_dataverserole_alias ON dataverserole (alias); +/* ---------------------------------------- + dvobject indices (DvObject.java) +*/ ---------------------------------------- +CREATE INDEX index_dvobject_dtype ON dvobject (dtype); +/* Should already exist: +CREATE INDEX index_dvobject_owner_id ON dvobject (owner_id); +CREATE INDEX index_dvobject_creator_id ON dvobject (creator_id); +CREATE INDEX index_dvobject_releaseuser_id ON dvobject (releaseuser_id); +*/ +/* ---------------------------------------- + explicitgroup indices (ExplicitGroup.java) +*/ ---------------------------------------- +CREATE INDEX index_explicitgroup_owner_id ON explicitgroup (owner_id); +CREATE INDEX index_explicitgroup_groupalias ON explicitgroup (groupalias); +CREATE INDEX index_explicitgroup_groupaliasinowner ON explicitgroup (groupaliasinowner); +/* ---------------------------------------- + foreignmetadatafieldmapping indices (ForeignMetadataFieldMapping.java) +*/ ---------------------------------------- +CREATE INDEX index_foreignmetadatafieldmapping_foreignmetadataformatmapping_id ON foreignmetadatafieldmapping (foreignmetadataformatmapping_id); +CREATE INDEX index_foreignmetadatafieldmapping_foreignfieldxpath ON foreignmetadatafieldmapping (foreignfieldxpath); +CREATE INDEX index_foreignmetadatafieldmapping_parentfieldmapping_id ON foreignmetadatafieldmapping (parentfieldmapping_id); +/* ---------------------------------------- + foreignmetadataformatmapping index (ForeignMetadataFormatMapping.java) +*/ ---------------------------------------- +CREATE INDEX index_foreignmetadataformatmapping_name ON foreignmetadataformatmapping (name); +/* ---------------------------------------- + harvestingdataverseconfig indices (HarvestingDataverseConfig.java) +*/ ---------------------------------------- +CREATE INDEX index_harvestingdataverseconfig_dataverse_id ON harvestingdataverseconfig (dataverse_id); +CREATE INDEX index_harvestingdataverseconfig_harvesttype ON harvestingdataverseconfig (harvesttype); +CREATE INDEX index_harvestingdataverseconfig_harveststyle ON harvestingdataverseconfig (harveststyle); +CREATE INDEX index_harvestingdataverseconfig_harvestingurl ON harvestingdataverseconfig (harvestingurl); +/* ---------------------------------------- + ipv4range index (IPv4Range.java) +*/ ---------------------------------------- +CREATE INDEX index_ipv4range_owner_id ON ipv4range (owner_id); +/* ---------------------------------------- + ipv6range index (IPv6Range.java) +*/ ---------------------------------------- +CREATE INDEX index_ipv6range_owner_id ON ipv6range (owner_id); +/* ---------------------------------------- + maplayermetadata indices (MapLayerMetadata.java) +*/ ---------------------------------------- +CREATE INDEX index_maplayermetadata_dataset_id ON maplayermetadata (dataset_id); +CREATE INDEX index_maplayermetadata_datafile_id ON maplayermetadata (datafile_id); +/* ---------------------------------------- + metadatablock indices (MetadataBlock.java) +*/ ---------------------------------------- +CREATE INDEX index_metadatablock_name ON metadatablock (name); +CREATE INDEX index_metadatablock_owner_id ON metadatablock (owner_id); +/* ---------------------------------------- + passwordresetdata indices (PasswordResetData.java) +*/ ---------------------------------------- +CREATE INDEX index_passwordresetdata_token ON passwordresetdata (token); +CREATE INDEX index_passwordresetdata_builtinuser_id ON passwordresetdata (builtinuser_id); +/* ---------------------------------------- + persistedglobalgroup indices (PersistedGlobalGroup.java) +*/ ---------------------------------------- +CREATE INDEX index_persistedglobalgroup_persistedgroupalias ON persistedglobalgroup (persistedgroupalias); +CREATE INDEX index_persistedglobalgroup_dtype ON persistedglobalgroup (dtype); +/* ---------------------------------------- + roleassignment indices (RoleAssignment.java) +*/ ---------------------------------------- +CREATE INDEX index_roleassignment_assigneeidentifier ON roleassignment (assigneeidentifier); +CREATE INDEX index_roleassignment_definitionpoint_id ON roleassignment (definitionpoint_id); +CREATE INDEX index_roleassignment_role_id ON roleassignment (role_id); +/* ---------------------------------------- + savedsearch indices (SavedSearch.java) +*/ ---------------------------------------- +CREATE INDEX index_savedsearch_definitionpoint_id ON savedsearch (definitionpoint_id); +CREATE INDEX index_savedsearch_creator_id ON savedsearch (creator_id); +/* ---------------------------------------- + savedsearchfilterquery index (SavedSearchFilterQuery.java) +*/ ---------------------------------------- +CREATE INDEX index_savedsearchfilterquery_savedsearch_id ON savedsearchfilterquery (savedsearch_id); +/* ---------------------------------------- + template index (Template.java) +*/ ---------------------------------------- +CREATE INDEX index_template_dataverse_id ON template (dataverse_id); +/* ---------------------------------------- + worldmapauth_token indices (WorldMapToken.java) +*/ ---------------------------------------- +CREATE INDEX index_worldmapauth_token_application_id ON worldmapauth_token (application_id); +CREATE INDEX index_worldmapauth_token_datafile_id ON worldmapauth_token (datafile_id); +CREATE INDEX index_worldmapauth_token_dataverseuser_id ON worldmapauth_token (dataverseuser_id); +/*------------------------------------------ + Add Compound Unique Constraint to dataversefieldtypeinputlevel + combining dataverse_id and datasetfieldtype_id +*/------------------------------------------ +ALTER TABLE dataversefieldtypeinputlevel + ADD CONSTRAINT unq_dataversefieldtypeinputlevel_add UNIQUE (dataverse_id, datasetfieldtype_id); \ No newline at end of file diff --git a/postgresql/testdata/scripts/database/upgrades/upgrade_v4.1_to_v4.2.sql b/postgresql/testdata/scripts/database/upgrades/upgrade_v4.1_to_v4.2.sql new file mode 100644 index 0000000..89a8954 --- /dev/null +++ b/postgresql/testdata/scripts/database/upgrades/upgrade_v4.1_to_v4.2.sql @@ -0,0 +1,36 @@ +/* --------------------------------------- +Separate Terms of Use and Access from Dataset Version +and add to Template +*/ --------------------------------------- +ALTER TABLE template +ADD termsofuseandaccess_id bigint; + +ALTER TABLE datasetversion +ADD termsofuseandaccess_id bigint; + +/* ------------------------------------------------- +Migrate terms of use and access to the new table +reset counter of the id for the new table +*/ ------------------------------------------------- + +INSERT INTO termsofuseandaccess + (id, availabilitystatus, citationrequirements, conditions, confidentialitydeclaration, +contactforaccess, depositorrequirements, disclaimer, fileaccessrequest, license, originalarchive, restrictions, sizeofcollection, +specialpermissions, studycompletion, termsofaccess, termsofuse) +SELECT id, availabilitystatus, citationrequirements, conditions, confidentialitydeclaration, +contactforaccess, depositorrequirements, disclaimer, fileaccessrequest, license, originalarchive, restrictions, sizeofcollection, +specialpermissions, studycompletion, termsofaccess, termsofuse + FROM datasetversion; + +update datasetversion set termsofuseandaccess_id = id; + +SELECT setval(pg_get_serial_sequence('termsofuseandaccess', 'id'), coalesce(max(id),0) + 1, false) FROM datasetversion; + +/*------------------------------------------- +Clean up bad data where datasets in review +did NOT have their flags reset +on publish +*/------------------------------------------- + +UPDATE datasetversion SET inreview = false where inreview = true +and versionstate = 'RELEASED'; \ No newline at end of file diff --git a/postgresql/testdata/scripts/database/upgrades/upgrade_v4.2.1_to_v4.2.2.sql b/postgresql/testdata/scripts/database/upgrades/upgrade_v4.2.1_to_v4.2.2.sql new file mode 100644 index 0000000..f58a763 --- /dev/null +++ b/postgresql/testdata/scripts/database/upgrades/upgrade_v4.2.1_to_v4.2.2.sql @@ -0,0 +1,23 @@ +-- A dataverse alias should not be case sensitive: https://github.com/IQSS/dataverse/issues/2598 +CREATE UNIQUE INDEX dataverse_alias_unique_idx on dataverse (LOWER(alias)); +-- If creating the index fails, check for dataverse with the same alias using this query: +-- select alias from dataverse where lower(alias) in (select lower(alias) from dataverse group by lower(alias) having count(*) >1) order by lower(alias); + + +--Edit Dataset: Investigate and correct multiple draft issue: https://github.com/IQSS/dataverse/issues/2132 +--This unique index will prevent the multiple draft issue +CREATE UNIQUE INDEX one_draft_version_per_dataset ON datasetversion +(dataset_id) WHERE versionstate='DRAFT'; +--It may not be applied until all of the datasets with +--multiple drafts have been resolved + + +--Guestbook: Entering more text in any textbox field, custom or not, fails to write to db but still downloads file.: https://github.com/IQSS/dataverse/issues/2752 +--Modify column to allow essay responses to guestbook custom questions +ALTER TABLE customquestionresponse + ALTER COLUMN response TYPE text; + +-- A new boolean in the DvObject table, to indicate that we have a generated thumbnail/preview image +-- for this object. +-- Added by Leonid, Nov. 23 2015 +ALTER TABLE dvobject ADD COLUMN previewImageAvailable BOOLEAN; diff --git a/postgresql/testdata/scripts/database/upgrades/upgrade_v4.2.4_to_4.3.sql b/postgresql/testdata/scripts/database/upgrades/upgrade_v4.2.4_to_4.3.sql new file mode 100644 index 0000000..8950619 --- /dev/null +++ b/postgresql/testdata/scripts/database/upgrades/upgrade_v4.2.4_to_4.3.sql @@ -0,0 +1,59 @@ +/* + * To change this license header, choose License Headers in Project Properties. + * To change this template file, choose Tools | Templates + * and open the template in the editor. + */ +/** + * Author: skraffmi + * Created: Mar 4, 2016 + */ + + +-- remove non used columns from datasetversion +alter table +datasetversion +drop column if exists availabilitystatus, +drop column if exists citationrequirements, +drop column if exists conditions, +drop column if exists confidentialitydeclaration, +drop column if exists contactforaccess, +drop column if exists dataaccessplace, +drop column if exists depositorrequirements, +drop column if exists disclaimer, +drop column if exists fileaccessrequest, +drop column if exists license, +drop column if exists originalarchive, +drop column if exists restrictions, +drop column if exists sizeofcollection, +drop column if exists specialpermissions, +drop column if exists studycompletion, +drop column if exists termsofaccess, +drop column if exists termsofuse; + + +-- Add new foreign key to dataset for citation date (from datasetfieldtype) +ALTER TABLE dataset ADD COLUMN citationdatedatasetfieldtype_id bigint; + +ALTER TABLE dataset + ADD CONSTRAINT fk_dataset_citationdatedatasetfieldtype_id FOREIGN KEY (citationdatedatasetfieldtype_id) + REFERENCES datasetfieldtype (id) MATCH SIMPLE + ON UPDATE NO ACTION ON DELETE NO ACTION; + + +-- Add new indices for case insensitive e-mails +CREATE UNIQUE INDEX index_authenticateduser_lower_email ON authenticateduser (lower(email)); +CREATE UNIQUE INDEX index_builtinuser_lower_email ON builtinuser (lower(email)); + + +/* + For ticket #2957, additional columns for mapping of tabular data +*/ +-- > Distinguishes a mapped Tabular file from a shapefile +ALTER TABLE maplayermetadata ADD COLUMN isjoinlayer BOOLEAN default false; +-- > Description of the tabular join. e.g. joined to layer XYZ on column TRACT, etc +ALTER TABLE maplayermetadata ADD COLUMN joindescription TEXT default NULL; +-- > For all maps, store the WorldMap links to generate alternative versions, +-- e.g. PNG, zipped shapefile, GeoJSON, Excel, etc +ALTER TABLE maplayermetadata ADD COLUMN maplayerlinks TEXT default NULL; + + diff --git a/postgresql/testdata/scripts/database/upgrades/upgrade_v4.4_to_v4.5.sql b/postgresql/testdata/scripts/database/upgrades/upgrade_v4.4_to_v4.5.sql new file mode 100644 index 0000000..6f9a0ca --- /dev/null +++ b/postgresql/testdata/scripts/database/upgrades/upgrade_v4.4_to_v4.5.sql @@ -0,0 +1,11 @@ +-- A Private URL is a specialized role assignment with a token. +ALTER TABLE roleassignment ADD COLUMN privateurltoken character varying(255); +-- "Last Export Time" added to the dataset: +ALTER TABLE dataset ADD COLUMN lastExportTime TIMESTAMP; +-- Direct link to the harvesting configuration, for harvested datasets: +ALTER TABLE dataset ADD COLUMN harvestingClient_id bigint; +-- For harveted datasets, native OAI identifier used by the original OAI server +ALTER TABLE dataset ADD COLUMN harvestIdentifier VARCHAR(255); +-- Add extra rules to the Dublin Core import logic: +INSERT INTO foreignmetadatafieldmapping (id, foreignfieldxpath, datasetfieldname, isattribute, parentfieldmapping_id, foreignmetadataformatmapping_id) VALUES (18, ':publisher', 'producerName', FALSE, NULL, 1 ); +INSERT INTO foreignmetadatafieldmapping (id, foreignfieldxpath, datasetfieldname, isattribute, parentfieldmapping_id, foreignmetadataformatmapping_id) VALUES (19, ':language', 'language', FALSE, NULL, 1 ); diff --git a/postgresql/testdata/scripts/database/upgrades/upgrade_v4.5.1_to_v4.6.sql b/postgresql/testdata/scripts/database/upgrades/upgrade_v4.5.1_to_v4.6.sql new file mode 100644 index 0000000..eb7d954 --- /dev/null +++ b/postgresql/testdata/scripts/database/upgrades/upgrade_v4.5.1_to_v4.6.sql @@ -0,0 +1,8 @@ +ALTER TABLE datafile ADD COLUMN checksumtype character varying(255); +UPDATE datafile SET checksumtype = 'MD5'; +ALTER TABLE datafile ALTER COLUMN checksumtype SET NOT NULL; +-- alternate statement for sbgrid.org and others interested in SHA-1 support +-- note that in the database we use "SHA1" (no hyphen) but the GUI will show "SHA-1" +--UPDATE datafile SET checksumtype = 'SHA1'; +ALTER TABLE datafile RENAME md5 TO checksumvalue; +ALTER TABLE filemetadata ADD COLUMN directorylabel character varying(255); diff --git a/postgresql/testdata/scripts/database/upgrades/upgrade_v4.5_to_v4.5.1.sql b/postgresql/testdata/scripts/database/upgrades/upgrade_v4.5_to_v4.5.1.sql new file mode 100644 index 0000000..6296fca --- /dev/null +++ b/postgresql/testdata/scripts/database/upgrades/upgrade_v4.5_to_v4.5.1.sql @@ -0,0 +1 @@ +ALTER TABLE authenticateduser ADD COLUMN emailconfirmed timestamp without time zone; diff --git a/postgresql/testdata/scripts/database/upgrades/upgrade_v4.6.1_to_v4.6.2.sql b/postgresql/testdata/scripts/database/upgrades/upgrade_v4.6.1_to_v4.6.2.sql new file mode 100644 index 0000000..bc06f11 --- /dev/null +++ b/postgresql/testdata/scripts/database/upgrades/upgrade_v4.6.1_to_v4.6.2.sql @@ -0,0 +1,3 @@ +ALTER TABLE dataset ADD COLUMN useGenericThumbnail boolean; +ALTER TABLE maplayermetadata ADD COLUMN lastverifiedtime timestamp without time zone; +ALTER TABLE maplayermetadata ADD COLUMN lastverifiedstatus bigint; diff --git a/postgresql/testdata/scripts/database/upgrades/upgrade_v4.6.2_to_v4.7.sql b/postgresql/testdata/scripts/database/upgrades/upgrade_v4.6.2_to_v4.7.sql new file mode 100644 index 0000000..08d73a6 --- /dev/null +++ b/postgresql/testdata/scripts/database/upgrades/upgrade_v4.6.2_to_v4.7.sql @@ -0,0 +1,2 @@ +--Uncomment to preserve "Dataverse" at end of each dataverse name. +--UPDATE dataverse SET name = name || ' Dataverse'; \ No newline at end of file diff --git a/postgresql/testdata/scripts/database/upgrades/upgrade_v4.6_to_v4.6.1.sql b/postgresql/testdata/scripts/database/upgrades/upgrade_v4.6_to_v4.6.1.sql new file mode 100644 index 0000000..d4da4c2 --- /dev/null +++ b/postgresql/testdata/scripts/database/upgrades/upgrade_v4.6_to_v4.6.1.sql @@ -0,0 +1,7 @@ +DELETE FROM authenticationproviderrow where id = 'echo-simple'; +DELETE FROM authenticationproviderrow where id = 'echo-dignified'; +-- For DataFile, file replace functionality: +ALTER TABLE datafile ADD COLUMN rootdatafileid bigint default -1; +ALTER TABLE datafile ADD COLUMN previousdatafileid bigint default null; +-- For existing DataFile objects, update rootDataFileId values: +UPDATE datafile SET rootdatafileid = -1; diff --git a/postgresql/testdata/scripts/database/upgrades/upgrade_v4.7.1_to_v4.8.sql b/postgresql/testdata/scripts/database/upgrades/upgrade_v4.7.1_to_v4.8.sql new file mode 100644 index 0000000..2ec3121 --- /dev/null +++ b/postgresql/testdata/scripts/database/upgrades/upgrade_v4.7.1_to_v4.8.sql @@ -0,0 +1,15 @@ +-- Updates the database to add a storage identifier to each DvObject +ALTER TABLE dvobject ADD COLUMN storageidentifier character varying(255); + +UPDATE dvobject +SET storageidentifier=(SELECT datafile.filesystemname +FROM datafile +WHERE datafile.id=dvobject.id AND dvobject.dtype='DataFile') where dvobject.dtype='DataFile'; + +UPDATE dvobject +SET storageidentifier=(select concat('file://',authority::text,ds.doiseparator::text,ds.identifier::text) +FROM dataset ds +WHERE dvobject.id=ds.id) +WHERE storageidentifier IS NULL; + +ALTER TABLE datafile DROP COLUMN filesystemname; diff --git a/postgresql/testdata/scripts/database/upgrades/upgrade_v4.7_to_v4.7.1.sql b/postgresql/testdata/scripts/database/upgrades/upgrade_v4.7_to_v4.7.1.sql new file mode 100644 index 0000000..5eef4a2 --- /dev/null +++ b/postgresql/testdata/scripts/database/upgrades/upgrade_v4.7_to_v4.7.1.sql @@ -0,0 +1,35 @@ +-- Adding new columns for "createdtime", "lastlogintime", and "lastapiusetime" +-- Default "createdtime" to 1/1/2000 +-- Dropping "modificationtime" as it is inconsistent between user auths and best replaced by the new columns. +ALTER TABLE authenticateduser ADD COLUMN createdtime TIMESTAMP NOT NULL DEFAULT '01-01-2000 00:00:00'; +ALTER TABLE authenticateduser ADD COLUMN lastlogintime TIMESTAMP DEFAULT NULL; +ALTER TABLE authenticateduser ADD COLUMN lastapiusetime TIMESTAMP DEFAULT NULL; +ALTER TABLE authenticateduser DROP COLUMN modificationtime; + +-- Removing authenticated builtin users who do not exist in the builtin table because they were created through faulty validation +-- creates view containing authentication ids that you will be deleting +CREATE TEMP VIEW useridstodelete AS (SELECT DISTINCT a.id FROM authenticateduserlookup al, authenticateduser a WHERE al.authenticateduser_id = a.id AND al.authenticationproviderid = 'builtin' AND a.useridentifier NOT IN (SELECT username FROM builtinuser)); +-- commands to remove the users from the appropriate tables +DELETE FROM confirmemaildata WHERE authenticateduser_id IN (SELECT * FROM useridstodelete); +DELETE FROM usernotification WHERE user_id IN (SELECT * FROM useridstodelete); +DELETE FROM guestbookresponse WHERE authenticateduser_id IN (SELECT * FROM useridstodelete); +DELETE FROM authenticateduserlookup WHERE authenticateduser_id IN (SELECT * FROM useridstodelete); +DELETE FROM authenticateduser WHERE id NOT IN (SELECT authenticateduser_id FROM authenticateduserlookup); + +/* +Add validationFormat to DatasetFieldType to + */ +ALTER TABLE datasetfieldtype +ADD COLUMN validationFormat character varying(255); + +/* +for testing display format +This adds a display format that links out to an outside site. The format of the #VALUE is +four characters alpha numeric (3fki works) + +update datasetfieldtype +set displayformat = 'PDB (RCSB) #VALUE', +fieldType= 'TEXT' +where id = xxx; + +*/ diff --git a/postgresql/testdata/scripts/database/upgrades/upgrade_v4.8.3_to_v4.8.4.sql b/postgresql/testdata/scripts/database/upgrades/upgrade_v4.8.3_to_v4.8.4.sql new file mode 100644 index 0000000..670a2d1 --- /dev/null +++ b/postgresql/testdata/scripts/database/upgrades/upgrade_v4.8.3_to_v4.8.4.sql @@ -0,0 +1,2 @@ +-- Google login has used 131 characters. 64 is not enough. +ALTER TABLE oauth2tokendata ALTER COLUMN accesstoken TYPE text; diff --git a/postgresql/testdata/scripts/deploy/phoenix.dataverse.org/cert.md b/postgresql/testdata/scripts/deploy/phoenix.dataverse.org/cert.md new file mode 100644 index 0000000..d68910f --- /dev/null +++ b/postgresql/testdata/scripts/deploy/phoenix.dataverse.org/cert.md @@ -0,0 +1,13 @@ +Note that `-sha256` is used but the important thing is making sure SHA-1 is not selected when uploading the CSR to https://cert-manager.com/customer/InCommon + + openssl genrsa -out phoenix.dataverse.org.key 2048 + + openssl req -new -sha256 -key phoenix.dataverse.org.key -out phoenix.dataverse.org.csr + + Country Name (2 letter code) [XX]:US + State or Province Name (full name) []:Massachusetts + Locality Name (eg, city) [Default City]:Cambridge + Organization Name (eg, company) [Default Company Ltd]:Harvard College + Organizational Unit Name (eg, section) []:IQSS + Common Name (eg, your name or your server's hostname) []:phoenix.dataverse.org + Email Address []:support@dataverse.org diff --git a/postgresql/testdata/scripts/deploy/phoenix.dataverse.org/deploy b/postgresql/testdata/scripts/deploy/phoenix.dataverse.org/deploy new file mode 100755 index 0000000..f45d7d6 --- /dev/null +++ b/postgresql/testdata/scripts/deploy/phoenix.dataverse.org/deploy @@ -0,0 +1,4 @@ +#!/bin/sh +scripts/deploy/phoenix.dataverse.org/prep +sudo /home/jenkins/dataverse/scripts/deploy/phoenix.dataverse.org/rebuild +scripts/deploy/phoenix.dataverse.org/post diff --git a/postgresql/testdata/scripts/deploy/phoenix.dataverse.org/dv-root.json b/postgresql/testdata/scripts/deploy/phoenix.dataverse.org/dv-root.json new file mode 100644 index 0000000..20fa890 --- /dev/null +++ b/postgresql/testdata/scripts/deploy/phoenix.dataverse.org/dv-root.json @@ -0,0 +1,15 @@ +{ + "alias": "root", + "name": "Root", + "permissionRoot": false, + "facetRoot": true, + "description": "Welcome! phoenix.dataverse.org is so named because data here is deleted on every build of the latest Dataverse code: http://guides.dataverse.org/en/latest/developers", + "dataverseSubjects": [ + "Other" + ], + "dataverseContacts": [ + { + "contactEmail": "root@mailinator.com" +} + ] +} diff --git a/postgresql/testdata/scripts/deploy/phoenix.dataverse.org/install b/postgresql/testdata/scripts/deploy/phoenix.dataverse.org/install new file mode 100755 index 0000000..f3df88a --- /dev/null +++ b/postgresql/testdata/scripts/deploy/phoenix.dataverse.org/install @@ -0,0 +1,20 @@ +#!/bin/sh +export HOST_ADDRESS=phoenix.dataverse.org +export GLASSFISH_ROOT=/usr/local/glassfish4 +export FILES_DIR=/usr/local/glassfish4/glassfish/domains/domain1/files +export DB_NAME=dvndb +export DB_PORT=5432 +export DB_HOST=localhost +export DB_USER=dvnapp +export DB_PASS=secret +export RSERVE_HOST=localhost +export RSERVE_PORT=6311 +export RSERVE_USER=rserve +export RSERVE_PASS=rserve +export SMTP_SERVER=localhost +export MEM_HEAP_SIZE=2048 +export GLASSFISH_DOMAIN=domain1 +cd scripts/installer +cp pgdriver/postgresql-8.4-703.jdbc4.jar $GLASSFISH_ROOT/glassfish/lib +cp ../../conf/jhove/jhove.conf $GLASSFISH_ROOT/glassfish/domains/$GLASSFISH_DOMAIN/config/jhove.conf +./glassfish-setup.sh diff --git a/postgresql/testdata/scripts/deploy/phoenix.dataverse.org/post b/postgresql/testdata/scripts/deploy/phoenix.dataverse.org/post new file mode 100755 index 0000000..7716fa8 --- /dev/null +++ b/postgresql/testdata/scripts/deploy/phoenix.dataverse.org/post @@ -0,0 +1,15 @@ +#/bin/sh +cd scripts/api +./setup-all.sh --insecure | tee /tmp/setup-all.sh.out +cd ../.. +psql -U dvnapp dvndb -f scripts/database/reference_data.sql +psql -U dvnapp dvndb -f doc/sphinx-guides/source/_static/util/pg8-createsequence-prep.sql +psql -U dvnapp dvndb -f doc/sphinx-guides/source/_static/util/createsequence.sql +scripts/search/tests/publish-dataverse-root +git checkout scripts/api/data/dv-root.json +scripts/search/tests/grant-authusers-add-on-root +scripts/search/populate-users +scripts/search/create-users +scripts/search/tests/create-all-and-test +scripts/search/tests/publish-spruce1-and-test +java -jar downloads/schemaSpy_5.0.0.jar -t pgsql -host localhost -db dvndb -u postgres -p secret -s public -dp scripts/installer/pgdriver/postgresql-9.1-902.jdbc4.jar -o /var/www/html/schemaspy/latest diff --git a/postgresql/testdata/scripts/deploy/phoenix.dataverse.org/prep b/postgresql/testdata/scripts/deploy/phoenix.dataverse.org/prep new file mode 100755 index 0000000..4660125 --- /dev/null +++ b/postgresql/testdata/scripts/deploy/phoenix.dataverse.org/prep @@ -0,0 +1,2 @@ +#/bin/bash -x +cp scripts/deploy/phoenix.dataverse.org/dv-root.json scripts/api/data/dv-root.json diff --git a/postgresql/testdata/scripts/deploy/phoenix.dataverse.org/rebuild b/postgresql/testdata/scripts/deploy/phoenix.dataverse.org/rebuild new file mode 100755 index 0000000..ca92ef5 --- /dev/null +++ b/postgresql/testdata/scripts/deploy/phoenix.dataverse.org/rebuild @@ -0,0 +1,18 @@ +#!/bin/sh +LIST_APP=$(/usr/local/glassfish4/glassfish/bin/asadmin list-applications -t) +echo "deployed: $LIST_APP" +OLD_WAR=$(echo $LIST_APP | awk '{print $1}') +NEW_WAR=/tmp/dataverse.war +/usr/local/glassfish4/glassfish/bin/asadmin undeploy $OLD_WAR +/usr/local/glassfish4/glassfish/bin/asadmin stop-domain +# blow away "generated" directory to avoid EJB Timer Service is not available" https://github.com/IQSS/dataverse/issues/3336 +rm -rf /usr/local/glassfish4/glassfish/domains/domain1/generated +rm -rf /usr/local/glassfish4/glassfish/domains/domain1/files +#psql -U postgres -c "CREATE ROLE dvnapp UNENCRYPTED PASSWORD 'secret' SUPERUSER CREATEDB CREATEROLE INHERIT LOGIN" template1 +psql -U dvnapp -c 'DROP DATABASE "dvndb"' template1 +echo $? +curl http://localhost:8983/solr/update/json?commit=true -H "Content-type: application/json" -X POST -d "{\"delete\": { \"query\":\"*:*\"}}" +psql -U dvnapp -c 'CREATE DATABASE "dvndb" WITH OWNER = "dvnapp"' template1 +echo $? +/usr/local/glassfish4/glassfish/bin/asadmin start-domain +/usr/local/glassfish4/glassfish/bin/asadmin deploy $NEW_WAR diff --git a/postgresql/testdata/scripts/installer/Makefile b/postgresql/testdata/scripts/installer/Makefile new file mode 100644 index 0000000..046e6cb --- /dev/null +++ b/postgresql/testdata/scripts/installer/Makefile @@ -0,0 +1,76 @@ +INSTALLER_ZIP_DIR=dvinstall +DISTRIBUTION_WAR_FILE=${INSTALLER_ZIP_DIR}/dataverse.war +GLASSFISH_SETUP_SCRIPT=${INSTALLER_ZIP_DIR}/glassfish-setup.sh +POSTGRES_DRIVERS=${INSTALLER_ZIP_DIR}/pgdriver +API_SCRIPTS=${INSTALLER_ZIP_DIR}/setup-datasetfields.sh ${INSTALLER_ZIP_DIR}/setup-users.sh ${INSTALLER_ZIP_DIR}/setup-builtin-roles.sh ${INSTALLER_ZIP_DIR}/setup-dvs.sh ${INSTALLER_ZIP_DIR}/data ${INSTALLER_ZIP_DIR}/setup-identity-providers.sh ${INSTALLER_ZIP_DIR}/setup-all.sh ${INSTALLER_ZIP_DIR}/post-install-api-block.sh +DB_SCRIPT=${INSTALLER_ZIP_DIR}/reference_data.sql +JHOVE_CONFIG=${INSTALLER_ZIP_DIR}/jhove.conf +JHOVE_SCHEMA=${INSTALLER_ZIP_DIR}/jhoveConfig.xsd +SOLR_SCHEMA=${INSTALLER_ZIP_DIR}/schema.xml +INSTALL_SCRIPT=${INSTALLER_ZIP_DIR}/install + +installer: dvinstall.zip + +clean: + /bin/rm -rf ${INSTALLER_ZIP_DIR} dvinstall.zip + +dvinstall.zip: ${GLASSFISH_SETUP_SCRIPT} ${POSTGRES_DRIVERS} ${DISTRIBUTION_WAR_FILE} ${API_SCRIPTS} ${DB_SCRIPT} ${JHOVE_CONFIG} ${JHOVE_SCHEMA} ${SOLR_SCHEMA} ${INSTALL_SCRIPT} + @echo making installer... + zip -r dvinstall.zip ${GLASSFISH_SETUP_SCRIPT} ${POSTGRES_DRIVERS} ${DISTRIBUTION_WAR_FILE} ${API_SCRIPTS} ${DB_SCRIPT} ${JHOVE_CONFIG} ${JHOVE_SCHEMA} ${SOLR_SCHEMA} ${INSTALL_SCRIPT} + @echo + @echo "Done!" + +${INSTALL_SCRIPT}: install + @echo copying the installer script + mkdir -p ${INSTALLER_ZIP_DIR} + /bin/cp install ${INSTALLER_ZIP_DIR} + +${DISTRIBUTION_WAR_FILE}: + @echo copying war file... + @mkdir -p ${INSTALLER_ZIP_DIR} + @VERSION_NUMBER=`grep -m1 '' ../../pom.xml | sed 's/ *<\/*version>//g'`; export VERSION_NUMBER; \ + BUILT_WAR_FILE=../../target/dataverse-$$VERSION_NUMBER.war; export BUILT_WAR_FILE; \ + if [ -f $$BUILT_WAR_FILE ]; \ + then \ + /bin/cp $$BUILT_WAR_FILE ${DISTRIBUTION_WAR_FILE}; \ + else \ + echo "ERROR: can't find application .war file ($${BUILT_WAR_FILE})!"; \ + echo "Build the Dataverse application war file, then try again."; \ + exit 1; \ + fi + +${GLASSFISH_SETUP_SCRIPT}: glassfish-setup.sh + @echo copying glassfish setup + mkdir -p ${INSTALLER_ZIP_DIR} + /bin/cp glassfish-setup.sh ${INSTALLER_ZIP_DIR} + + +${POSTGRES_DRIVERS}: pgdriver/postgresql-8.4-703.jdbc4.jar pgdriver/postgresql-9.0-802.jdbc4.jar pgdriver/postgresql-9.1-902.jdbc4.jar pgdriver/postgresql-9.2-1004.jdbc4.jar pgdriver/postgresql-9.3-1104.jdbc4.jar pgdriver/postgresql-9.4.1212.jar pgdriver/postgresql-42.1.4.jar + @echo copying postgres drviers + @mkdir -p ${POSTGRES_DRIVERS} + /bin/cp pgdriver/postgresql-8.4-703.jdbc4.jar pgdriver/postgresql-9.0-802.jdbc4.jar pgdriver/postgresql-9.1-902.jdbc4.jar pgdriver/postgresql-9.2-1004.jdbc4.jar pgdriver/postgresql-9.3-1104.jdbc4.jar pgdriver/postgresql-9.4.1212.jar pgdriver/postgresql-42.1.4.jar ${INSTALLER_ZIP_DIR}/pgdriver + +${API_SCRIPTS}: ../api/setup-datasetfields.sh ../api/setup-users.sh ../api/setup-dvs.sh ../api/setup-identity-providers.sh ../api/setup-all.sh ../api/post-install-api-block.sh ../api/setup-builtin-roles.sh ../api/data + @echo copying api scripts + @mkdir -p ${INSTALLER_ZIP_DIR} + /bin/cp -R ../api/setup-datasetfields.sh ../api/setup-users.sh ../api/setup-dvs.sh ../api/setup-identity-providers.sh ../api/setup-all.sh ../api/post-install-api-block.sh ../api/setup-builtin-roles.sh ../api/data ${INSTALLER_ZIP_DIR} + +${DB_SCRIPT}: ../database/reference_data.sql + @echo copying reference data sql script + @mkdir -p ${INSTALLER_ZIP_DIR} + /bin/cp ../database/reference_data.sql ${INSTALLER_ZIP_DIR} + +${JHOVE_CONFIG}: ../../conf/jhove/jhove.conf + @echo copying jhove config file + @mkdir -p ${INSTALLER_ZIP_DIR} + /bin/cp ../../conf/jhove/jhove.conf ${INSTALLER_ZIP_DIR} + +${JHOVE_SCHEMA}: ../../conf/jhove/jhoveConfig.xsd + @echo copying jhove schema file + @mkdir -p ${INSTALLER_ZIP_DIR} + /bin/cp ../../conf/jhove/jhoveConfig.xsd ${INSTALLER_ZIP_DIR} + +${SOLR_SCHEMA}: ../../conf/solr/4.6.0/schema.xml + @echo copying Solr schema file + @mkdir -p ${INSTALLER_ZIP_DIR} + /bin/cp ../../conf/solr/4.6.0/schema.xml ${INSTALLER_ZIP_DIR} diff --git a/postgresql/testdata/scripts/installer/README.txt b/postgresql/testdata/scripts/installer/README.txt new file mode 100644 index 0000000..fb41f4f --- /dev/null +++ b/postgresql/testdata/scripts/installer/README.txt @@ -0,0 +1,45 @@ +The installer script (install) can be run either by a developer (inside the source tree), or by an end-user installing the Dataverse. The latter will obtain the script as part of the distribution bundle; and they will be running it inside the unzipped bundle directory. + +In the former (developer) case, the installer will be looking for the files it needs in the other directories in the source tree. +For example, the war file (once built) can be found in ../../target/. The name of the war file will be dataverse-{VERSION}.war, where +{VERSION} is the version number of the Dataverse, obtained from the pom file (../../pom.xml). For example, as of writing this README.txt (July 2015) the war file is ../../target/dataverse-4.1.war/ + +When building a distribution archive, the Makefile will pile all the files that the installer needs in one directory (./dvinstall here) and then zip it up. We upload the resulting zip bundle on github as the actual software release. This way the end user only gets the files they actually need to install the Dataverse app. So they can do so without pulling the entire source tree. + + +The installer script itself (the perl script ./install) knows to look for all these files in 2 places (for example, it will look for the war file in ../../target/; if it's not there, it'll assume this is a distribution bundle and look for it as ./dataverse.war) + +Here's the list of the files that the installer needs: + +the war file: +target/dataverse-{VERSION}.war + +and also: + +from scripts/installer (this directory): + +install +glassfish-setup.sh +pgdriver (the entire directory with all its contents) + +from scripts/api: + +setup-all.sh +setup-builtin-roles.sh +setup-datasetfields.sh +setup-dvs.sh +setup-identity-providers.sh +setup-users.sh +data (the entire directory with all its contents) + +from scripts/database: + +reference_data.sql + +from conf/jhove: + +jhove.conf + +SOLR schema file, from conf/solr/4.6.0: + +schema.xml diff --git a/postgresql/testdata/scripts/installer/dvinstall/glassfish-setup.sh b/postgresql/testdata/scripts/installer/dvinstall/glassfish-setup.sh new file mode 100755 index 0000000..397cebf --- /dev/null +++ b/postgresql/testdata/scripts/installer/dvinstall/glassfish-setup.sh @@ -0,0 +1,261 @@ +#!/bin/bash +# YOU (THE HUMAN USER) SHOULD NEVER RUN THIS SCRIPT DIRECTLY! +# It should be run by higher-level installers. +# The following arguments should be passed to it +# as environmental variables: +# (no defaults for these values are provided here!) +# +# glassfish configuration: +# GLASSFISH_ROOT +# GLASSFISH_DOMAIN +# ASADMIN_OPTS +# MEM_HEAP_SIZE +# +# database configuration: +# DB_PORT +# DB_HOST +# DB_NAME +# DB_USER +# DB_PASS +# +# Rserve configuration: +# RSERVE_HOST +# RSERVE_PORT +# RSERVE_USER +# RSERVE_PASS +# +# other local configuration: +# HOST_ADDRESS +# SMTP_SERVER +# FILES_DIR + +# The script is going to fail and exit if any of the +# parameters aren't supplied. It is the job of the +# parent script to set all these env. variables, +# providing default values, if none are supplied by +# the user, etc. + +if [ -z "$DB_NAME" ] + then + echo "You must specify database name (DB_NAME)." + echo "PLEASE NOTE THAT YOU (THE HUMAN USER) SHOULD NEVER RUN THIS SCRIPT DIRECTLY!" + echo "IT SHOULD ONLY BE RUN BY OTHER SCRIPTS." + exit 1 +fi + +if [ -z "$DB_PORT" ] + then + echo "You must specify database port (DB_PORT)." + exit 1 +fi + +if [ -z "$DB_HOST" ] + then + echo "You must specify database host (DB_HOST)." + exit 1 +fi + +if [ -z "$DB_USER" ] + then + echo "You must specify database user (DB_USER)." + exit 1 +fi + +if [ -z "$DB_PASS" ] + then + echo "You must specify database password (DB_PASS)." + exit 1 +fi + +if [ -z "$RSERVE_HOST" ] + then + echo "You must specify Rserve host (RSERVE_HOST)." + exit 1 +fi + +if [ -z "$RSERVE_PORT" ] + then + echo "You must specify Rserve port (RSERVE_PORT)." + exit 1 +fi + +if [ -z "$RSERVE_USER" ] + then + echo "You must specify Rserve user (RSERVE_USER)." + exit 1 +fi + +if [ -z "$RSERVE_PASS" ] + then + echo "You must specify Rserve password (RSERVE_PASS)." + exit 1 +fi + +if [ -z "$SMTP_SERVER" ] + then + echo "You must specify smtp server (SMTP_SERVER)." + exit 1 +fi + +if [ -z "$HOST_ADDRESS" ] + then + echo "You must specify host address (HOST_ADDRESS)." + exit 1 +fi + +if [ -z "$FILES_DIR" ] + then + echo "You must specify files directory (FILES_DIR)." + exit 1 +fi + +if [ -z "$MEM_HEAP_SIZE" ] + then + echo "You must specify the memory heap size for glassfish (MEM_HEAP_SIZE)." + exit 1 +fi + +if [ -z "$GLASSFISH_DOMAIN" ] + then + echo "You must specify glassfish domain (GLASSFISH_DOMAIN)." + exit 1 +fi + +echo "checking glassfish root:"${GLASSFISH_ROOT} + +if [ ! -d "$GLASSFISH_ROOT" ] + then + echo Glassfish root '$GLASSFISH_ROOT' does not exist + exit 1 +fi +GLASSFISH_BIN_DIR=$GLASSFISH_ROOT/bin + +echo "checking glassfish domain:"${GLASSFISH_ROOT}/glassfish/domains/$GLASSFISH_DOMAIN + +DOMAIN_DIR=$GLASSFISH_ROOT/glassfish/domains/$GLASSFISH_DOMAIN +if [ ! -d "$DOMAIN_DIR" ] + then + echo Domain directory '$DOMAIN_DIR' does not exist + exit 2 +fi + +echo "Setting up your glassfish4 to support Dataverse" +echo "Glassfish directory: "$GLASSFISH_ROOT +echo "Domain directory: "$DOMAIN_DIR + +# Move to the glassfish dir +pushd $GLASSFISH_BIN_DIR + +### +# take the domain up, if needed. +DOMAIN_DOWN=$(./asadmin list-domains | grep "$DOMAIN " | grep "not running") +if [ $(echo $DOMAIN_DOWN|wc -c) -ne 1 ]; + then + echo Trying to start domain $GLASSFISH_DOMAIN up... + ./asadmin $ASADMIN_OPTS start-domain $GLASSFISH_DOMAIN + else + echo domain running +fi + +# undeploy the app, if running: + +./asadmin $ASADMIN_OPTS undeploy dataverse-4.0 + +# avoid OutOfMemoryError: PermGen per http://eugenedvorkin.com/java-lang-outofmemoryerror-permgen-space-error-during-deployment-to-glassfish/ +#./asadmin $ASADMIN_OPTS list-jvm-options +./asadmin $ASADMIN_OPTS delete-jvm-options "-XX\:MaxPermSize=192m" +./asadmin $ASADMIN_OPTS create-jvm-options "-XX\:MaxPermSize=512m" +./asadmin $ASADMIN_OPTS create-jvm-options "-XX\:PermSize=256m" +./asadmin $ASADMIN_OPTS delete-jvm-options -Xmx512m +./asadmin $ASADMIN_OPTS create-jvm-options "-Xmx${MEM_HEAP_SIZE}m" +./asadmin $ASADMIN_OPTS delete-jvm-options -client +./asadmin $ASADMIN_OPTS create-jvm-options "-server" + +### +# JDBC connection pool + +# we'll try to delete a pool with this name, if already exists. +# - in case the database name has changed since the last time it +# was configured. +./asadmin $ASADMIN_OPTS delete-jdbc-connection-pool --cascade=true dvnDbPool + + +./asadmin $ASADMIN_OPTS create-jdbc-connection-pool --restype javax.sql.DataSource \ + --datasourceclassname org.postgresql.ds.PGPoolingDataSource \ + --property create=true:User=$DB_USER:PortNumber=$DB_PORT:databaseName=$DB_NAME:password=$DB_PASS:ServerName=$DB_HOST \ + dvnDbPool + +### +# Create data sources +./asadmin $ASADMIN_OPTS create-jdbc-resource --connectionpoolid dvnDbPool jdbc/VDCNetDS + +### +# Set up the data source for the timers +./asadmin $ASADMIN_OPTS set configs.config.server-config.ejb-container.ejb-timer-service.timer-datasource=jdbc/VDCNetDS + +### +# Add the necessary JVM options: +# +# location of the datafiles directory: +# (defaults to dataverse/files in the users home directory) +./asadmin $ASADMIN_OPTS create-jvm-options "\-Ddataverse.files.directory=${FILES_DIR}" +# Rserve-related JVM options: +./asadmin $ASADMIN_OPTS create-jvm-options "\-Ddataverse.rserve.host=${RSERVE_HOST}" +./asadmin $ASADMIN_OPTS create-jvm-options "\-Ddataverse.rserve.port=${RSERVE_PORT}" +./asadmin $ASADMIN_OPTS create-jvm-options "\-Ddataverse.rserve.user=${RSERVE_USER}" +./asadmin $ASADMIN_OPTS create-jvm-options "\-Ddataverse.rserve.password=${RSERVE_PASS}" +# Data Deposit API options +./asadmin $ASADMIN_OPTS create-jvm-options "\-Ddataverse.fqdn=${HOST_ADDRESS}" +# password reset token timeout in minutes +./asadmin $ASADMIN_OPTS create-jvm-options "\-Ddataverse.auth.password-reset-timeout-in-minutes=60" + +./asadmin $ASADMIN_OPTS create-jvm-options "\-Djavax.xml.parsers.SAXParserFactory=com.sun.org.apache.xerces.internal.jaxp.SAXParserFactoryImpl" + +# EZID DOI Settings +./asadmin $ASADMIN_OPTS create-jvm-options "\-Ddoi.password=apitest" +./asadmin $ASADMIN_OPTS create-jvm-options "\-Ddoi.username=apitest" +./asadmin $ASADMIN_OPTS create-jvm-options "\-Ddoi.baseurlstring=https\://ezid.cdlib.org" +# "I am the timer server" option: +./asadmin $ASADMIN_OPTS create-jvm-options "-Ddataverse.timerServer=true" + +# enable comet support +./asadmin $ASADMIN_OPTS set server-config.network-config.protocols.protocol.http-listener-1.http.comet-support-enabled="true" + +./asadmin $ASADMIN_OPTS delete-connector-connection-pool --cascade=true jms/__defaultConnectionFactory-Connection-Pool + +# no need to explicitly delete the connector resource for the connection pool deleted in the step +# above - the cascade delete takes care of it. +#./asadmin $ASADMIN_OPTS delete-connector-resource jms/__defaultConnectionFactory-Connection-Pool + +# http://docs.oracle.com/cd/E19798-01/821-1751/gioce/index.html +./asadmin $ASADMIN_OPTS create-connector-connection-pool --steadypoolsize 1 --maxpoolsize 250 --poolresize 2 --maxwait 60000 --raname jmsra --connectiondefinition javax.jms.QueueConnectionFactory jms/IngestQueueConnectionFactoryPool + +# http://docs.oracle.com/cd/E18930_01/html/821-2416/abllx.html#giogt +./asadmin $ASADMIN_OPTS create-connector-resource --poolname jms/IngestQueueConnectionFactoryPool --description "ingest connector resource" jms/IngestQueueConnectionFactory + +# http://docs.oracle.com/cd/E18930_01/html/821-2416/ablmc.html#giolr +./asadmin $ASADMIN_OPTS create-admin-object --restype javax.jms.Queue --raname jmsra --description "sample administered object" --property Name=DataverseIngest jms/DataverseIngest + +# no need to explicitly create the resource reference for the connection factory created above - +# the "create-connector-resource" creates the reference automatically. +#./asadmin $ASADMIN_OPTS create-resource-ref --target Cluster1 jms/IngestQueueConnectionFactory + +# created mail configuration: + +./asadmin $ASADMIN_OPTS create-javamail-resource --mailhost "$SMTP_SERVER" --mailuser "dataversenotify" --fromaddress "do-not-reply@${HOST_ADDRESS}" mail/notifyMailSession + +# so we can front with apache httpd ( ProxyPass / ajp://localhost:8009/ ) +./asadmin $ASADMIN_OPTS create-network-listener --protocol http-listener-1 --listenerport 8009 --jkenabled true jk-connector + +### +# Restart +echo Updates done. Restarting... +./asadmin $ASADMIN_OPTS restart-domain $GLASSFISH_DOMAIN + +### +# Clean up +popd + +echo "Glassfish setup complete" +date + diff --git a/postgresql/testdata/scripts/installer/dvinstall/pgdriver/postgresql-42.1.4.jar b/postgresql/testdata/scripts/installer/dvinstall/pgdriver/postgresql-42.1.4.jar new file mode 100644 index 0000000..08a54b1 Binary files /dev/null and b/postgresql/testdata/scripts/installer/dvinstall/pgdriver/postgresql-42.1.4.jar differ diff --git a/postgresql/testdata/scripts/installer/dvinstall/pgdriver/postgresql-8.4-703.jdbc4.jar b/postgresql/testdata/scripts/installer/dvinstall/pgdriver/postgresql-8.4-703.jdbc4.jar new file mode 100644 index 0000000..7c8d5f8 Binary files /dev/null and b/postgresql/testdata/scripts/installer/dvinstall/pgdriver/postgresql-8.4-703.jdbc4.jar differ diff --git a/postgresql/testdata/scripts/installer/dvinstall/pgdriver/postgresql-9.0-802.jdbc4.jar b/postgresql/testdata/scripts/installer/dvinstall/pgdriver/postgresql-9.0-802.jdbc4.jar new file mode 100644 index 0000000..9e16af0 Binary files /dev/null and b/postgresql/testdata/scripts/installer/dvinstall/pgdriver/postgresql-9.0-802.jdbc4.jar differ diff --git a/postgresql/testdata/scripts/installer/dvinstall/pgdriver/postgresql-9.1-902.jdbc4.jar b/postgresql/testdata/scripts/installer/dvinstall/pgdriver/postgresql-9.1-902.jdbc4.jar new file mode 100644 index 0000000..078f379 Binary files /dev/null and b/postgresql/testdata/scripts/installer/dvinstall/pgdriver/postgresql-9.1-902.jdbc4.jar differ diff --git a/postgresql/testdata/scripts/installer/dvinstall/pgdriver/postgresql-9.2-1004.jdbc4.jar b/postgresql/testdata/scripts/installer/dvinstall/pgdriver/postgresql-9.2-1004.jdbc4.jar new file mode 100644 index 0000000..b9270d2 Binary files /dev/null and b/postgresql/testdata/scripts/installer/dvinstall/pgdriver/postgresql-9.2-1004.jdbc4.jar differ diff --git a/postgresql/testdata/scripts/installer/dvinstall/pgdriver/postgresql-9.3-1104.jdbc4.jar b/postgresql/testdata/scripts/installer/dvinstall/pgdriver/postgresql-9.3-1104.jdbc4.jar new file mode 100644 index 0000000..a79525d Binary files /dev/null and b/postgresql/testdata/scripts/installer/dvinstall/pgdriver/postgresql-9.3-1104.jdbc4.jar differ diff --git a/postgresql/testdata/scripts/installer/dvinstall/pgdriver/postgresql-9.4.1212.jar b/postgresql/testdata/scripts/installer/dvinstall/pgdriver/postgresql-9.4.1212.jar new file mode 100644 index 0000000..b0de752 Binary files /dev/null and b/postgresql/testdata/scripts/installer/dvinstall/pgdriver/postgresql-9.4.1212.jar differ diff --git a/postgresql/testdata/scripts/installer/glassfish-setup.sh b/postgresql/testdata/scripts/installer/glassfish-setup.sh new file mode 100755 index 0000000..397cebf --- /dev/null +++ b/postgresql/testdata/scripts/installer/glassfish-setup.sh @@ -0,0 +1,261 @@ +#!/bin/bash +# YOU (THE HUMAN USER) SHOULD NEVER RUN THIS SCRIPT DIRECTLY! +# It should be run by higher-level installers. +# The following arguments should be passed to it +# as environmental variables: +# (no defaults for these values are provided here!) +# +# glassfish configuration: +# GLASSFISH_ROOT +# GLASSFISH_DOMAIN +# ASADMIN_OPTS +# MEM_HEAP_SIZE +# +# database configuration: +# DB_PORT +# DB_HOST +# DB_NAME +# DB_USER +# DB_PASS +# +# Rserve configuration: +# RSERVE_HOST +# RSERVE_PORT +# RSERVE_USER +# RSERVE_PASS +# +# other local configuration: +# HOST_ADDRESS +# SMTP_SERVER +# FILES_DIR + +# The script is going to fail and exit if any of the +# parameters aren't supplied. It is the job of the +# parent script to set all these env. variables, +# providing default values, if none are supplied by +# the user, etc. + +if [ -z "$DB_NAME" ] + then + echo "You must specify database name (DB_NAME)." + echo "PLEASE NOTE THAT YOU (THE HUMAN USER) SHOULD NEVER RUN THIS SCRIPT DIRECTLY!" + echo "IT SHOULD ONLY BE RUN BY OTHER SCRIPTS." + exit 1 +fi + +if [ -z "$DB_PORT" ] + then + echo "You must specify database port (DB_PORT)." + exit 1 +fi + +if [ -z "$DB_HOST" ] + then + echo "You must specify database host (DB_HOST)." + exit 1 +fi + +if [ -z "$DB_USER" ] + then + echo "You must specify database user (DB_USER)." + exit 1 +fi + +if [ -z "$DB_PASS" ] + then + echo "You must specify database password (DB_PASS)." + exit 1 +fi + +if [ -z "$RSERVE_HOST" ] + then + echo "You must specify Rserve host (RSERVE_HOST)." + exit 1 +fi + +if [ -z "$RSERVE_PORT" ] + then + echo "You must specify Rserve port (RSERVE_PORT)." + exit 1 +fi + +if [ -z "$RSERVE_USER" ] + then + echo "You must specify Rserve user (RSERVE_USER)." + exit 1 +fi + +if [ -z "$RSERVE_PASS" ] + then + echo "You must specify Rserve password (RSERVE_PASS)." + exit 1 +fi + +if [ -z "$SMTP_SERVER" ] + then + echo "You must specify smtp server (SMTP_SERVER)." + exit 1 +fi + +if [ -z "$HOST_ADDRESS" ] + then + echo "You must specify host address (HOST_ADDRESS)." + exit 1 +fi + +if [ -z "$FILES_DIR" ] + then + echo "You must specify files directory (FILES_DIR)." + exit 1 +fi + +if [ -z "$MEM_HEAP_SIZE" ] + then + echo "You must specify the memory heap size for glassfish (MEM_HEAP_SIZE)." + exit 1 +fi + +if [ -z "$GLASSFISH_DOMAIN" ] + then + echo "You must specify glassfish domain (GLASSFISH_DOMAIN)." + exit 1 +fi + +echo "checking glassfish root:"${GLASSFISH_ROOT} + +if [ ! -d "$GLASSFISH_ROOT" ] + then + echo Glassfish root '$GLASSFISH_ROOT' does not exist + exit 1 +fi +GLASSFISH_BIN_DIR=$GLASSFISH_ROOT/bin + +echo "checking glassfish domain:"${GLASSFISH_ROOT}/glassfish/domains/$GLASSFISH_DOMAIN + +DOMAIN_DIR=$GLASSFISH_ROOT/glassfish/domains/$GLASSFISH_DOMAIN +if [ ! -d "$DOMAIN_DIR" ] + then + echo Domain directory '$DOMAIN_DIR' does not exist + exit 2 +fi + +echo "Setting up your glassfish4 to support Dataverse" +echo "Glassfish directory: "$GLASSFISH_ROOT +echo "Domain directory: "$DOMAIN_DIR + +# Move to the glassfish dir +pushd $GLASSFISH_BIN_DIR + +### +# take the domain up, if needed. +DOMAIN_DOWN=$(./asadmin list-domains | grep "$DOMAIN " | grep "not running") +if [ $(echo $DOMAIN_DOWN|wc -c) -ne 1 ]; + then + echo Trying to start domain $GLASSFISH_DOMAIN up... + ./asadmin $ASADMIN_OPTS start-domain $GLASSFISH_DOMAIN + else + echo domain running +fi + +# undeploy the app, if running: + +./asadmin $ASADMIN_OPTS undeploy dataverse-4.0 + +# avoid OutOfMemoryError: PermGen per http://eugenedvorkin.com/java-lang-outofmemoryerror-permgen-space-error-during-deployment-to-glassfish/ +#./asadmin $ASADMIN_OPTS list-jvm-options +./asadmin $ASADMIN_OPTS delete-jvm-options "-XX\:MaxPermSize=192m" +./asadmin $ASADMIN_OPTS create-jvm-options "-XX\:MaxPermSize=512m" +./asadmin $ASADMIN_OPTS create-jvm-options "-XX\:PermSize=256m" +./asadmin $ASADMIN_OPTS delete-jvm-options -Xmx512m +./asadmin $ASADMIN_OPTS create-jvm-options "-Xmx${MEM_HEAP_SIZE}m" +./asadmin $ASADMIN_OPTS delete-jvm-options -client +./asadmin $ASADMIN_OPTS create-jvm-options "-server" + +### +# JDBC connection pool + +# we'll try to delete a pool with this name, if already exists. +# - in case the database name has changed since the last time it +# was configured. +./asadmin $ASADMIN_OPTS delete-jdbc-connection-pool --cascade=true dvnDbPool + + +./asadmin $ASADMIN_OPTS create-jdbc-connection-pool --restype javax.sql.DataSource \ + --datasourceclassname org.postgresql.ds.PGPoolingDataSource \ + --property create=true:User=$DB_USER:PortNumber=$DB_PORT:databaseName=$DB_NAME:password=$DB_PASS:ServerName=$DB_HOST \ + dvnDbPool + +### +# Create data sources +./asadmin $ASADMIN_OPTS create-jdbc-resource --connectionpoolid dvnDbPool jdbc/VDCNetDS + +### +# Set up the data source for the timers +./asadmin $ASADMIN_OPTS set configs.config.server-config.ejb-container.ejb-timer-service.timer-datasource=jdbc/VDCNetDS + +### +# Add the necessary JVM options: +# +# location of the datafiles directory: +# (defaults to dataverse/files in the users home directory) +./asadmin $ASADMIN_OPTS create-jvm-options "\-Ddataverse.files.directory=${FILES_DIR}" +# Rserve-related JVM options: +./asadmin $ASADMIN_OPTS create-jvm-options "\-Ddataverse.rserve.host=${RSERVE_HOST}" +./asadmin $ASADMIN_OPTS create-jvm-options "\-Ddataverse.rserve.port=${RSERVE_PORT}" +./asadmin $ASADMIN_OPTS create-jvm-options "\-Ddataverse.rserve.user=${RSERVE_USER}" +./asadmin $ASADMIN_OPTS create-jvm-options "\-Ddataverse.rserve.password=${RSERVE_PASS}" +# Data Deposit API options +./asadmin $ASADMIN_OPTS create-jvm-options "\-Ddataverse.fqdn=${HOST_ADDRESS}" +# password reset token timeout in minutes +./asadmin $ASADMIN_OPTS create-jvm-options "\-Ddataverse.auth.password-reset-timeout-in-minutes=60" + +./asadmin $ASADMIN_OPTS create-jvm-options "\-Djavax.xml.parsers.SAXParserFactory=com.sun.org.apache.xerces.internal.jaxp.SAXParserFactoryImpl" + +# EZID DOI Settings +./asadmin $ASADMIN_OPTS create-jvm-options "\-Ddoi.password=apitest" +./asadmin $ASADMIN_OPTS create-jvm-options "\-Ddoi.username=apitest" +./asadmin $ASADMIN_OPTS create-jvm-options "\-Ddoi.baseurlstring=https\://ezid.cdlib.org" +# "I am the timer server" option: +./asadmin $ASADMIN_OPTS create-jvm-options "-Ddataverse.timerServer=true" + +# enable comet support +./asadmin $ASADMIN_OPTS set server-config.network-config.protocols.protocol.http-listener-1.http.comet-support-enabled="true" + +./asadmin $ASADMIN_OPTS delete-connector-connection-pool --cascade=true jms/__defaultConnectionFactory-Connection-Pool + +# no need to explicitly delete the connector resource for the connection pool deleted in the step +# above - the cascade delete takes care of it. +#./asadmin $ASADMIN_OPTS delete-connector-resource jms/__defaultConnectionFactory-Connection-Pool + +# http://docs.oracle.com/cd/E19798-01/821-1751/gioce/index.html +./asadmin $ASADMIN_OPTS create-connector-connection-pool --steadypoolsize 1 --maxpoolsize 250 --poolresize 2 --maxwait 60000 --raname jmsra --connectiondefinition javax.jms.QueueConnectionFactory jms/IngestQueueConnectionFactoryPool + +# http://docs.oracle.com/cd/E18930_01/html/821-2416/abllx.html#giogt +./asadmin $ASADMIN_OPTS create-connector-resource --poolname jms/IngestQueueConnectionFactoryPool --description "ingest connector resource" jms/IngestQueueConnectionFactory + +# http://docs.oracle.com/cd/E18930_01/html/821-2416/ablmc.html#giolr +./asadmin $ASADMIN_OPTS create-admin-object --restype javax.jms.Queue --raname jmsra --description "sample administered object" --property Name=DataverseIngest jms/DataverseIngest + +# no need to explicitly create the resource reference for the connection factory created above - +# the "create-connector-resource" creates the reference automatically. +#./asadmin $ASADMIN_OPTS create-resource-ref --target Cluster1 jms/IngestQueueConnectionFactory + +# created mail configuration: + +./asadmin $ASADMIN_OPTS create-javamail-resource --mailhost "$SMTP_SERVER" --mailuser "dataversenotify" --fromaddress "do-not-reply@${HOST_ADDRESS}" mail/notifyMailSession + +# so we can front with apache httpd ( ProxyPass / ajp://localhost:8009/ ) +./asadmin $ASADMIN_OPTS create-network-listener --protocol http-listener-1 --listenerport 8009 --jkenabled true jk-connector + +### +# Restart +echo Updates done. Restarting... +./asadmin $ASADMIN_OPTS restart-domain $GLASSFISH_DOMAIN + +### +# Clean up +popd + +echo "Glassfish setup complete" +date + diff --git a/postgresql/testdata/scripts/installer/install b/postgresql/testdata/scripts/installer/install new file mode 100755 index 0000000..9edb8d6 --- /dev/null +++ b/postgresql/testdata/scripts/installer/install @@ -0,0 +1,1544 @@ +#!/usr/bin/perl + +use strict; +use warnings; +use Getopt::Long; +use Socket; +use File::Copy; + +my $verbose; +my $pg_only; +my $hostname; +my $gfuser; +my $gfdir; +my $mailserver; +my $yes; +my $force; +my $nogfpasswd; +my $admin_email; +my ($rez) = GetOptions( + #"length=i" => \$length, # numeric + #"file=s" => \$data, # string + "verbose" => \$verbose, + "pg_only" => \$pg_only, + "hostname=s" => \$hostname, + "gfuser=s" => \$gfuser, + "gfdir=s" => \$gfdir, + "mailserver=s" => \$mailserver, + "y|yes" => \$yes, + "f|force" => \$force, + "nogfpasswd" => \$nogfpasswd, + "admin_email=s" => \$admin_email, +); + +my @CONFIG_VARIABLES; + +my $postgresonly = 0; + +if ($pg_only) +{ + @CONFIG_VARIABLES = + ( 'POSTGRES_SERVER', 'POSTGRES_PORT', 'POSTGRES_DATABASE', 'POSTGRES_USER', 'POSTGRES_PASSWORD', 'POSTGRES_ADMIN_PASSWORD' ); + + $postgresonly = 1; +} +else +{ + + @CONFIG_VARIABLES = ( + 'HOST_DNS_ADDRESS', + 'GLASSFISH_USER', + 'GLASSFISH_DIRECTORY', + 'ADMIN_EMAIL', + 'MAIL_SERVER', + + 'POSTGRES_SERVER', + 'POSTGRES_PORT', + 'POSTGRES_ADMIN_PASSWORD', + 'POSTGRES_DATABASE', + 'POSTGRES_USER', + 'POSTGRES_PASSWORD', + + 'SOLR_LOCATION', + + 'TWORAVENS_LOCATION', + + 'RSERVE_HOST', + 'RSERVE_PORT', + 'RSERVE_USER', + 'RSERVE_PASSWORD' + + ); +} + +my %CONFIG_DEFAULTS = ( + 'HOST_DNS_ADDRESS', 'localhost', + 'GLASSFISH_USER', '', + 'GLASSFISH_DIRECTORY', '/usr/local/glassfish4', + 'GLASSFISH_USER', '', + 'ADMIN_EMAIL', '', + 'MAIL_SERVER', 'mail.hmdc.harvard.edu', + + 'POSTGRES_ADMIN_PASSWORD', 'secret', + 'POSTGRES_SERVER', '127.0.0.1', + 'POSTGRES_PORT', 5432, + 'POSTGRES_DATABASE', 'dvndb', + 'POSTGRES_USER', 'dvnapp', + 'POSTGRES_PASSWORD', 'secret', + + 'SOLR_LOCATION', 'LOCAL', + + 'TWORAVENS_LOCATION', 'NOT INSTALLED', + + 'RSERVE_HOST', 'localhost', + 'RSERVE_PORT', 6311, + 'RSERVE_USER', 'rserve', + 'RSERVE_PASSWORD', 'rserve' + +); +my %CONFIG_PROMPTS = ( + 'HOST_DNS_ADDRESS', 'Fully Qualified Domain Name of your host', + 'GLASSFISH_USER', 'Glassfish service account username', + 'GLASSFISH_DIRECTORY', 'Glassfish Directory', + 'ADMIN_EMAIL', 'Administrator email address for this Dataverse', + 'MAIL_SERVER', 'SMTP (mail) server to relay notification messages', + + 'POSTGRES_SERVER', 'Postgres Server Address', + 'POSTGRES_PORT', 'Postgres Server Port', + 'POSTGRES_ADMIN_PASSWORD', 'Postgres ADMIN password', + 'POSTGRES_DATABASE', 'Name of the Postgres Database', + 'POSTGRES_USER', 'Name of the Postgres User', + 'POSTGRES_PASSWORD', 'Postgres user password', + + 'SOLR_LOCATION', 'Remote SOLR indexing service', + + 'TWORAVENS_LOCATION', 'Will this Dataverse be using TwoRavens application', + + 'RSERVE_HOST', 'Rserve Server', + 'RSERVE_PORT', 'Rserve Server Port', + 'RSERVE_USER', 'Rserve User Name', + 'RSERVE_PASSWORD', 'Rserve User Password' + +); + + +my %CONFIG_COMMENTS = ( + 'HOST_DNS_ADDRESS', ":\n(enter numeric IP address, if FQDN is unavailable) ", + 'GLASSFISH_USER', ":\nThis user will be running Glassfish service on your system.\n - If this is a dev. environment, this should be your own username; \n - In production, we suggest \"glassfish\" or another unprivileged user\n: ", + 'GLASSFISH_DIRECTORY', '', + 'ADMIN_EMAIL', ":\n(please enter a valid email address!) ", + 'MAIL_SERVER', '', + + 'POSTGRES_SERVER', '', + 'POSTGRES_PORT', '', + 'POSTGRES_ADMIN_PASSWORD', ":\n - We will need this to create the user and database that the Dataverse application will be using.\n (Hit RETURN if access control is set to \"trust\" for this connection in pg_hba.conf)\n: ", + 'POSTGRES_USER', ":\n - This is the Postgres user that the Dataverse app will be using to talk to the database\n: ", + 'POSTGRES_DATABASE', '', + 'POSTGRES_PASSWORD', '', + + 'SOLR_LOCATION', "? \n - Leave this set to \"LOCAL\" if the SOLR will be running on the same (this) server.\n Otherwise, please enter the host AND THE PORT NUMBER of the remote SOLR service, colon-separated\n (for example: foo.edu:8983)\n: ", + + 'TWORAVENS_LOCATION', "? \n - If so, please provide the complete URL of the TwoRavens GUI under rApache,\n for example, \"https://foo.edu/dataexplore/gui.html\".\n (PLEASE NOTE, TwoRavens will need to be installed separately! - see the installation docs for more info)\n: ", + + 'RSERVE_HOST', '', + 'RSERVE_PORT', '', + 'RSERVE_USER', '', + 'RSERVE_PASSWORD', '' + +); + + +my $API_URL = "http://localhost:8080/api"; + +# Supported Posstgres JDBC drivers: +# (have to be configured explicitely, so that Perl "taint" (security) mode +# doesn't get paranoid) + +my %POSTGRES_DRIVERS = ( + "8_4", "postgresql-8.4-703.jdbc4.jar", + "9_0", "postgresql-9.0-802.jdbc4.jar", + "9_1", "postgresql-9.1-902.jdbc4.jar", + "9_2", "postgresql-9.2-1004.jdbc4.jar", + "9_3", "postgresql-9.3-1104.jdbc4.jar", + "9_4", "postgresql-9.4.1212.jar", + "9_5", "postgresql-42.1.4.jar", + "9_6", "postgresql-42.1.4.jar" +); + +# A few preliminary checks: + +# OS: + +my $uname_out = `uname -a`; + +# hostname: + +my $hostname_from_cmdline = `hostname`; +chop $hostname_from_cmdline; + +if ($hostname) { + $CONFIG_DEFAULTS{'HOST_DNS_ADDRESS'} = $hostname; +} +else { + $CONFIG_DEFAULTS{'HOST_DNS_ADDRESS'} = $hostname_from_cmdline; +} + +# read default configuration values from tab separated file "default.config" if it exists +# moved after the $hostname_from_cmdline section to avoid excessively complicating the logic +# of command line argument, automatic selection, or config file. +sub trim { my $s = shift; $s =~ s/^\s+|\s+$//g; return $s }; +my $config_default_file = "default.config"; +if ( -e $config_default_file ) +{ + print("loading default configuration values from $config_default_file\n"); + open( my $inp_cfg, $config_default_file ); + while( my $ln = <$inp_cfg> ) + { + my @xs = split('\t', $ln ); + if ( 2 == @xs ) + { + my $k = $xs[0]; + my $v = trim($xs[1]); + $CONFIG_DEFAULTS{$k}=$v; + } + } +} +else +{ + print("using hard-coded default configuration values ($config_default_file not found)\n"); +} + +# get current user. first one wins. +my $current_user = $ENV{LOGNAME} || $ENV{USER} || getpwuid($<); + +if (!$CONFIG_DEFAULTS{'GLASSFISH_USER'}) { + $CONFIG_DEFAULTS{'GLASSFISH_USER'} = $current_user; + print "No pre-configured user found; using $current_user.\n"; +} + +# command-line argument takes precendence +if ($gfuser) { + print "Using CLI-specified user $gfuser.\n"; + $CONFIG_DEFAULTS{'GLASSFISH_USER'} = $gfuser; +} + +# prefer that we not install as root. +unless ( $< != 0 ) { +print "####################################################################\n"; +print " It is recommended that this script not be run as root.\n"; +print " Consider creating a glassfish service account, giving it ownership\n"; +print " on the glassfish/domains/domain1/ and glassfish/lib/ directories,\n"; +print " along with the JVM-specified files.dir location, and running\n"; +print " this installer as the user who will launch Glassfish.\n"; +print "####################################################################\n"; +} + +# ensure $gfuser exists or bail +my $gfidcmd="id $CONFIG_DEFAULTS{'GLASSFISH_USER'}"; +my $gfreturncode=system($gfidcmd); +if ($gfreturncode != 0) { + die "Couldn't find user $gfuser. Please ensure the account exists and is readable by the user running this installer.\n"; +} + +if ($mailserver) { + $CONFIG_DEFAULTS{'MAIL_SERVER'} = $mailserver; +} + +if ($gfdir) { + $CONFIG_DEFAULTS{'GLASSFISH_DIRECTORY'} = $gfdir; +} + +print "\nWelcome to the Dataverse installer.\n"; +unless ($postgresonly) { + print "You will be guided through the process of setting up a NEW\n"; + print "instance of the dataverse application\n"; +} +else { + print "You will be guided through the process of configuring the\n"; + print "LOCAL instance of PostgreSQL database for use by the DVN\n"; + print "application.\n"; +} + +my @uname_tokens = split( " ", $uname_out ); + +my $WORKING_OS; +if ( $uname_tokens[0] eq "Darwin" ) { + print "\nThis appears to be a MacOS X system; good.\n"; + # TODO: check the OS version + + $WORKING_OS = "MacOSX"; +} +elsif ( $uname_tokens[0] eq "Linux" ) { + if ( -f "/etc/redhat-release" ) { + print "\nThis appears to be a RedHat system; good.\n"; + $WORKING_OS = "RedHat"; + # TODO: check the distro version + } + else { + print "\nThis appears to be a non-RedHat Linux system;\n"; + print "this installation *may* succeed; but we're not making any promises!\n"; + $WORKING_OS = "Linux"; + } +} +else { + print "\nWARNING: This appears to be neither a Linux or MacOS X system!\n"; + print "This installer script will most likely fail. Please refer to the\n"; + print "DVN Installers Guide for more information.\n\n"; + + $WORKING_OS = "Unknown"; + + print "Do you wish to continue?\n [y/n] "; + + my $yesnocont; + + if ($yes) { + $yesnocont = "y"; + } + else { + print "here"; + exit; + $yesnocont = <>; + chop $yesnocont; + } + + while ( $yesnocont ne "y" && $yesnocont ne "n" ) { + print "Please enter 'y' or 'n'!\n"; + print "(or ctrl-C to exit the installer)\n"; + $yesnocont = <>; + chop $yesnocont; + } + + if ( $yesnocont eq "n" ) { + exit 0; + } + +} + +ENTERCONFIG: + +print "\n"; +print "Please enter the following configuration values:\n"; +print "(hit [RETURN] to accept the default value)\n"; +print "\n"; + +for my $ENTRY (@CONFIG_VARIABLES) +{ + my $config_prompt = $CONFIG_PROMPTS{$ENTRY}; + my $config_comment = $CONFIG_COMMENTS{$ENTRY}; + + if ( $config_comment eq '' ) + { + print $config_prompt . ": "; + print "[" . $CONFIG_DEFAULTS{$ENTRY} . "] "; + } + else + { + print $config_prompt . $config_comment; + print "[" . $CONFIG_DEFAULTS{$ENTRY} . "] "; + } + + my $user_entry = ""; + + unless ($yes) + { + $user_entry = <>; + chop $user_entry; + + if ( $user_entry ne "" ) { + $CONFIG_DEFAULTS{$ENTRY} = $user_entry; + } + + + # for some values, we'll try to do some validation right here, in real time: + + if ($ENTRY eq 'ADMIN_EMAIL') + { + $user_entry = $CONFIG_DEFAULTS{$ENTRY}; + my $attempts = 0; + while ($user_entry !~/[A-Za-z0-9._%+-]+@[A-Za-z0-9.-]+\.[A-Za-z]{2,4}/) + { + $attempts++; + print "Please enter a valid email address: "; + $user_entry = <>; + chop $user_entry; + } + + if ($attempts) + { + print "OK, looks legit.\n"; + $CONFIG_DEFAULTS{$ENTRY} = $user_entry; + } + + } + elsif ($ENTRY eq 'GLASSFISH_DIRECTORY') + { + # 5a. CHECK IF GLASSFISH DIR LOOKS OK: + print "\nChecking your Glassfish installation..."; + + my $g_dir = $CONFIG_DEFAULTS{'GLASSFISH_DIRECTORY'}; + + + unless ( -d $g_dir . "/glassfish/domains/domain1" ) + { + # TODO: need better check than this + + while ( !( -d $g_dir . "/glassfish/domains/domain1" ) ) + { + print "\nInvalid Glassfish directory " . $g_dir . "!\n"; + print "Enter the root directory of your Glassfish installation:\n"; + print "(Or ctrl-C to exit the installer): "; + + $g_dir = <>; + chop $g_dir; + } + + # TODO: + # verify that we can write in the Glassfish directory + # (now that we are no longer requiring to run the installer as root) + + my $g_testdir = $g_dir . "/glassfish/domains/domain1"; + my $g_libdir = $g_dir . "/glassfish/lib"; + if (!(-w $g_testdir)) { + die("$g_testdir not writable. Have you created a glassfish user, and given it write permission on $g_testdir?\n"); + } elsif (!(-w $g_libdir)) { + die("$g_libdir not writable. Have you created a glassfish user, and given it write permission on $g_libdir?\n"); + } + + } + + print "OK!\n"; + $CONFIG_DEFAULTS{'GLASSFISH_DIRECTORY'} = $g_dir; + + } + elsif ($ENTRY eq 'MAIL_SERVER') + { + my $smtp_server = ""; + while (! &validate_smtp_server() ) + { + print "Enter a valid SMTP (mail) server:\n"; + print "(Or ctrl-C to exit the installer): "; + + $smtp_server = <>; + chop $smtp_server; + + $CONFIG_DEFAULTS{'MAIL_SERVER'} = $smtp_server unless $smtp_server eq ''; + } + } + } + + print "\n"; +} + +# CONFIRM VALUES ENTERED: + +print "\nOK, please confirm what you've entered:\n\n"; + +for my $ENTRY (@CONFIG_VARIABLES) { + print $CONFIG_PROMPTS{$ENTRY} . ": " . $CONFIG_DEFAULTS{$ENTRY} . "\n"; +} + +my $yesno; +if ($yes) { + $yesno = "y"; +} +else { + print "\nIs this correct? [y/n] "; + $yesno = <>; + chop $yesno; +} + +while ( $yesno ne "y" && $yesno ne "n" ) { + print "Please enter 'y' or 'n'!\n"; + print "(or ctrl-C to exit the installer)\n"; + $yesno = <>; + chop $yesno; +} + +if ( $yesno eq "n" ) { + goto ENTERCONFIG; +} + +# VALIDATION/VERIFICATION OF THE CONFIGURATION VALUES: +# 1. VERIFY/VALIDATE THE MAIL SERVER THEY CONFIGURED: +# (has been moved to the top, so that it's validated in real time, when the user enters the value) + +# 2. CHECK IF THE WAR FILE IS AVAILABLE: + +print "\nChecking if the application .war file is available... "; + +# if this installation is running out of the installer zib bundle directory, +# the war file will be sitting right here, named "dataverse.war": + +my $WARFILE_LOCATION = "dataverse.war"; + +# but if it's not here, this is probably a personal development +# setup, so their build should be up in their source tree: + +unless ( -f $WARFILE_LOCATION ) { + my $DATAVERSE_VERSION = ""; + my $DATAVERSE_POM_FILE = "../../pom.xml"; + if ( -f $DATAVERSE_POM_FILE ) + { + open DPF, $DATAVERSE_POM_FILE; + my $pom_line; + while ($pom_line=) + { + chop $pom_line; + if ($pom_line =~/^[ \t]*([0-9\.]+)<\/version>/) + { + $DATAVERSE_VERSION=$1; + last; + } + } + close DPF; + + if ($DATAVERSE_VERSION ne "") { + $WARFILE_LOCATION = "../../target/dataverse-" . $DATAVERSE_VERSION . ".war"; + } + } +} + +# But, if the war file cannot be found in either of the 2 +# places - we'll just have to give up: + +unless ( -f $WARFILE_LOCATION ) { + print "\nWARNING: Can't find the project .war file!\n"; + print "\tAre you running the installer in the right directory?\n"; + print "\tHave you built the war file?\n"; + print "\t(if not, build the project and run the installer again)\n"; + + exit 0; +} + +print " Yes, it is!\n"; + + +# check the working (installer) dir: +my $cwd; +chomp( $cwd = `pwd` ); + +# 2b. CHECK IF THE SQL TEMPLATE IS IN PLACE AND CREATE THE SQL FILE + +#my $SQL_REFERENCE_DATA = "reference_data_filtered.sql"; +my $SQL_REFERENCE_TEMPLATE = "../database/reference_data.sql"; + +unless ( -f $SQL_REFERENCE_TEMPLATE ) { + $SQL_REFERENCE_TEMPLATE = "reference_data.sql"; +} + +unless ( -f $SQL_REFERENCE_TEMPLATE ) { + print "\nWARNING: Can't find .sql data template!\n"; + print "(are you running the installer in the right directory?)\n"; + + exit 0; +} + +#open DATATEMPLATEIN, $SQL_REFERENCE_TEMPLATE || die $@; +#open SQLDATAOUT, '>' . $SQL_REFERENCE_DATA || die $@; +# +#while () { +# s/dvnapp/$CONFIG_DEFAULTS{'POSTGRES_USER'}/g; +# print SQLDATAOUT $_; +#} + +#close DATATEMPLATEIN; +#close SQLDATAOUT; + +# 3. CHECK POSTGRES AND JQ AVAILABILITY: + +my $pg_local_connection = 0; +my $psql_exec; +my $jq_exec = ""; +my $pg_major_version = 0; +my $pg_minor_version = 0; + +my $POSTGRES_SYS_UID; +if ( $CONFIG_DEFAULTS{'POSTGRES_SERVER'} eq 'localhost' || $CONFIG_DEFAULTS{'POSTGRES_SERVER'} eq '127.0.0.1' ) +{ + $pg_local_connection = 1; +} +elsif ($pg_only) +{ + print "In the --pg_only mode the script can only be run LOCALLY,\n"; + print "i.e., on the server where PostgresQL is running, with the\n"; + print "Postgres server address as localhost - \"127.0.0.1\".\n"; + exit 1; +} + +### 3a. CHECK FOR USER postgres: (NO LONGER USED!) + +###print "\nChecking system user \"postgres\"... "; + +###my $POSTGRES_SYS_NAME = "postgres"; +###$POSTGRES_SYS_UID = ( getpwnam("postgres") )[2]; + +# 3b. LOCATE THE EXECUTABLES, FOR jq AND psql: + +my $sys_path = $ENV{'PATH'}; +my @sys_path_dirs = split( ":", $sys_path ); + +for my $sys_path_dir (@sys_path_dirs) { + if ( -x $sys_path_dir . "/jq" ) { + $jq_exec = $sys_path_dir; + last; + } +} +if ( $jq_exec eq "" ) { + print STDERR "\nERROR: I haven't been able to find the jq command in your PATH! Please install it from http://stedolan.github.io/jq/\n"; + exit 1; + +} + + +$psql_exec = ""; + +for my $sys_path_dir (@sys_path_dirs) { + if ( -x $sys_path_dir . "/psql" ) { + $psql_exec = $sys_path_dir; + last; + } +} + + +my $psql_major_version = 0; +my $psql_minor_version = 0; + +# 3c. IF PSQL WAS FOUND IN THE PATH, CHECK ITS VERSION: + +unless ( $psql_exec eq "" ) { + open( PSQLOUT, $psql_exec . "/psql --version|" ); + + my $psql_version_line = ; + chop $psql_version_line; + close PSQLOUT; + + my ( $postgresName, $postgresNameLong, $postgresVersion ) = split( " ", $psql_version_line ); + + unless ( $postgresName eq "psql" && $postgresVersion =~ /^[0-9][0-9\.]*$/ ) { + print STDERR "\nWARNING: Unexpected output from psql command!\n"; + } + else { + my (@psql_version_tokens) = split( '\.', $postgresVersion ); + + print "\n\nFound Postgres psql command, version $postgresVersion.\n\n"; + + $psql_major_version = $psql_version_tokens[0]; + $psql_minor_version = $psql_version_tokens[1]; + + $pg_major_version = $psql_major_version; + $pg_minor_version = $psql_minor_version; + + } +} + +# a frequent problem with MacOSX is that the copy of psql found in the PATH +# belongs to the older version of PostgresQL supplied with the OS, which happens +# to be incompatible with the newer builds from the Postgres project; which are +# recommended to be used with Dataverse. So if this is a MacOSX box, we'll +# check what other versions of PG are available, and select the highest version +# we can find: + +if ( $WORKING_OS eq "MacOSX" ) { + my $macos_pg_major_version = 0; + my $macos_pg_minor_version = 0; + + for $macos_pg_minor_version ( "5", "4", "3", "2", "1", "0" ) { + if ( -x "/Library/PostgreSQL/9." . $macos_pg_minor_version . "/bin/psql" ) { + $macos_pg_major_version = 9; + if ( ( $macos_pg_major_version > $psql_major_version ) + || ( $macos_pg_minor_version >= $psql_minor_version ) ) + { + $psql_exec = "/Library/PostgreSQL/9." . $macos_pg_minor_version . "/bin"; + $pg_major_version = $macos_pg_major_version; + $pg_minor_version = $macos_pg_minor_version; + } + last; + } + } + + # And if we haven't found an 9.* version of postgresql installed, we'll also check + # for version 8.* available: + + if ( $macos_pg_major_version < 9 ) { + for $macos_pg_minor_version ( "4", "3" ) + # TODO: + # Do we even want to support postgres 8.3? + { + if ( -x "/Library/PostgreSQL/8." . $macos_pg_minor_version . "/bin/psql" ) { + $macos_pg_major_version = 8; + if ( $macos_pg_major_version > $psql_major_version + || $macos_pg_minor_version > $psql_minor_version ) + { + $psql_exec = "/Library/PostgreSQL/8." . $macos_pg_minor_version . "/bin"; + $pg_major_version = $macos_pg_major_version; + $pg_minor_version = $macos_pg_minor_version; + } + last; + } + } + } +} + + + +my $psql_admin_exec = ""; + +if ( $psql_exec eq "" ) +{ + if ( $pg_local_connection ) + { + print STDERR "\nERROR: I haven't been able to find the psql command in your PATH!\n"; + print STDERR "Please make sure PostgresQL is properly installed; if necessary, add\n"; + print STDERR "the location of psql to the PATH, then try again.\n\n"; + + exit 1; + } + else + { + print "WARNING: I haven't been able to find the psql command in your PATH!\n"; + print "But since we are configuring a Dataverse instance to use a remote Postgres server,\n"; + print "we can still set up the database by running a setup script on that remote server\n"; + print "(see below for instructions).\n"; + } +} else { + + if ( $pg_major_version == 0 ) { + # hmm? + } + + print "(We'll be Using psql version " . $pg_major_version . "." . $pg_minor_version . ")\n"; + + + $psql_admin_exec = "PGPASSWORD=" . $CONFIG_DEFAULTS{'POSTGRES_ADMIN_PASSWORD'} . "; export PGPASSWORD; " . $psql_exec; + $psql_exec = "PGPASSWORD=" . $CONFIG_DEFAULTS{'POSTGRES_PASSWORD'} . "; export PGPASSWORD; " . $psql_exec; + + print "Checking if we can talk to Postgres as the admin user...\n"; +} + +# 4. CONFIGURE POSTGRES: + +# 4a. BUT FIRST, CHECK IF WE CAN TALK TO POSTGRES AS THE ADMIN: + +if ( $psql_admin_exec eq "" || system( $psql_admin_exec . "/psql -h " . $CONFIG_DEFAULTS{'POSTGRES_SERVER'} . " -U postgres -d postgres -c 'SELECT * FROM pg_roles' > /dev/null 2>&1" ) ) +{ + # No, we can't. :( + if ($pg_local_connection) + { + # If Postgres is running locally, this is a fatal condition. + # We'll give them some (potentially) helpful pointers and exit. + print "Nope, I haven't been able to connect to the local instance of PostgresQL as the admin user.\n"; + print "\nIs postgresql running? \n"; + print " On a RedHat-like system, you can check the status of the daemon with\n\n"; + print " service postgresql status\n\n"; + print " and, if it's not running, start the daemon with\n\n"; + print " service postgresql start\n\n"; + print " On MacOSX, use Applications -> PostgresQL -> Start Server.\n"; + print " (or, if there's no \"Start Server\" item in your PostgresQL folder, \n"; + print " simply restart your MacOSX system!)\n"; + print "\nAlso, please make sure that the daemon is listening to network connections!\n"; + print " - at least on the localhost interface. (See \"Installing Postgres\" section\n"; + print " of the installation manual).\n"; + print "\nFinally, did you supply the correct admin password?\n"; + print " Don't know the admin password for your Postgres installation?\n"; + print " - then simply set the access level to \"trust\" temporarily (for localhost only!)\n"; + print " in your pg_hba.conf file. Again, please consult the \n"; + print " installation manual).\n"; + exit 1; + } + else + { + # If we are configuring the Dataverse instance to use a Postgres server + # running on a remote host, it is possible to configure the database + # without opening remote access for the admin user. They will simply + # have to run this script in the "postgres-only" mode on that server, locally, + # then resume the installation here: + + print "Nope, I haven't been able to connect to the remote Postgres server as the admin user.\n"; + print "(Or you simply don't have psql installed on this server)\n"; + print "It IS possible to configure a database for your Dataverse on a remote server,\n"; + print "without having admin access to that remote Postgres installation.\n\n"; + print "In order to do that, pleaes copy the installer (the entire package) to the server\n"; + print "where PostgresQL is running and run the installer with the \"--pg_only\" option:\n\n"; + print " ./install --pg_only\n\n"; + + print "Press any key to continue the installation process once that has been\n"; + print "done. Or press ctrl-C to exit the installer.\n\n"; + + system "stty cbreak /dev/tty 2>&1"; + my $key = getc(STDIN); + system "stty -cbreak /dev/tty 2>&1"; + print "\n"; + + # Find out what Postgres version is running remotely: + + $pg_major_version = 9; + $pg_minor_version = 1; + + print "What version of PostgresQL is installed on the remote server?\n [" + . $pg_major_version . "." + . $pg_minor_version . "] "; + + my $postgresVersion = <>; + chop $postgresVersion; + + while ( $postgresVersion ne "" && !( $postgresVersion =~ /^[0-9]+\.[0-9]+$/ ) ) { + print "Please enter valid Postgres version!\n"; + print "(or ctrl-C to exit the installer)\n"; + $postgresVersion = <>; + chop $postgresVersion; + } + + unless ( $postgresVersion eq "" ) { + my (@postgres_version_tokens) = split( '\.', $postgresVersion ); + + unless ( ( $postgres_version_tokens[0] == 8 && $postgres_version_tokens[1] >= 4 ) + || ( $postgres_version_tokens[0] >= 9 ) ) + { + print STDERR "\nERROR: PostgresQL version 8.4, or newer, is required!\n"; + print STDERR "Please make sure the right version of PostgresQL is properly installed\n"; + print STDERR "on the remote server, then try again.\n"; + + exit 1; + } + + $pg_major_version = $postgres_version_tokens[0]; + $pg_minor_version = $postgres_version_tokens[1]; + } + } +} +else +{ + print "Yes, we can!\n"; + + # ok, we can proceed with configuring things... + + print "\nConfiguring Postgres Database:\n"; + + # 4c. CHECK IF THIS DB ALREADY EXISTS: + + my $psql_command_dbcheck = + $psql_admin_exec . "/psql -h " . $CONFIG_DEFAULTS{'POSTGRES_SERVER'} . " -U postgres -c \"\" -d " . $CONFIG_DEFAULTS{'POSTGRES_DATABASE'} . ">/dev/null 2>&1"; + + if ( ( my $exitcode = system($psql_command_dbcheck) ) == 0 ) + { + if ($force) + { + print "WARNING! Database " + . $CONFIG_DEFAULTS{'POSTGRES_DATABASE'} + . " already exists but --force given... continuing.\n"; + } + else + { + print "WARNING! Database " . $CONFIG_DEFAULTS{'POSTGRES_DATABASE'} . " already exists!\n"; + print "\nPlease note that you can only use this installer to create a blank, \n"; + print "new and shiny Dataverse database. I.e., you cannot install on top of an \n"; + print "existing one. Please enter a different name for the DVN database.\n"; + print "\nPress any key to continue, or ctrl-C to exit the installer...\n\n"; + + system "stty cbreak /dev/tty 2>&1"; + my $key = getc(STDIN); + system "stty -cbreak /dev/tty 2>&1"; + print "\n"; + + goto ENTERCONFIG; + } + } + + # 4d. CHECK IF THIS USER ALREADY EXISTS: + + my $psql_command_rolecheck = + $psql_exec . "/psql -h " . $CONFIG_DEFAULTS{'POSTGRES_SERVER'} . " -c \"\" -d postgres " . $CONFIG_DEFAULTS{'POSTGRES_USER'} . " >/dev/null 2>&1"; + if ( ( my $exitcode = system($psql_command_rolecheck) ) == 0 ) + { + print "User (role) " . $CONFIG_DEFAULTS{'POSTGRES_USER'} . " already exists;\n"; + print "Proceeding."; + } + else + { + # 4e. CREATE DVN DB USER: + + print "\nCreating Postgres user (role) for the DVN:\n"; + + open TMPCMD, ">/tmp/pgcmd.$$.tmp"; + + # with md5-encrypted password: + my $pg_password_md5 = + &create_pg_hash( $CONFIG_DEFAULTS{'POSTGRES_USER'}, $CONFIG_DEFAULTS{'POSTGRES_PASSWORD'} ); + my $sql_command = + "CREATE ROLE \"" + . $CONFIG_DEFAULTS{'POSTGRES_USER'} + . "\" PASSWORD 'md5" + . $pg_password_md5 + . "' NOSUPERUSER CREATEDB CREATEROLE INHERIT LOGIN"; + + print TMPCMD $sql_command; + close TMPCMD; + + my $psql_commandline = $psql_admin_exec . "/psql -h " . $CONFIG_DEFAULTS{'POSTGRES_SERVER'} . " -U postgres -d postgres -f /tmp/pgcmd.$$.tmp >/dev/null 2>&1"; + + my $out = qx($psql_commandline 2>&1); + my $exitcode = $?; + unless ( $exitcode == 0 ) + { + print STDERR "Could not create the DVN Postgres user role!\n"; + print STDERR "(SQL: " . $sql_command . ")\n"; + print STDERR "(psql exit code: " . $exitcode . ")\n"; + print STDERR "(STDERR and STDOUT was: " . $out . ")\n"; + exit 1; + } + + unlink "/tmp/pgcmd.$$.tmp"; + print "done.\n"; + } + + # 4f. CREATE DVN DB: + + print "\nCreating Postgres database:\n"; + + my $psql_command = + $psql_exec + . "/createdb -h " . $CONFIG_DEFAULTS{'POSTGRES_SERVER'} . " -U $CONFIG_DEFAULTS{'POSTGRES_USER'} " + . $CONFIG_DEFAULTS{'POSTGRES_DATABASE'} + . " --owner=" + . $CONFIG_DEFAULTS{'POSTGRES_USER'}; + + my $out = qx($psql_command 2>&1); + my $exitcode = $?; + unless ( $exitcode == 0 ) + { + print STDERR "Could not create Postgres database for the Dataverse app!\n"; + print STDERR "(command: " . $psql_command . ")\n"; + print STDERR "(psql exit code: " . $exitcode . ")\n"; + print STDERR "(STDOUT and STDERR: " . $out . ")\n"; + if ($force) + { + print STDERR "\n--force called, continuing\n"; + } + else + { + print STDERR "\naborting the installation (sorry!)\n\n"; + exit 1; + } + } + +} + +if ($postgresonly) { + print "\nOK, done.\n"; + print "You can now resume the installation on the main Dataverse host.\n\n"; + + exit 0; +} + + +# Whether the user and the database were created locally or remotely, we'll now +# verify that we can talk to that database, with the credentials of the database +# user that we want the Dataverse application to be using: + +if ( system( $psql_exec . "/psql -h " . $CONFIG_DEFAULTS{'POSTGRES_SERVER'} . " -U " . $CONFIG_DEFAULTS{'POSTGRES_USER'} . " -d " . $CONFIG_DEFAULTS{'POSTGRES_DATABASE'} . " -c 'SELECT * FROM pg_roles' > /dev/null 2>&1" ) ) +{ + print STDERR "Oops, haven't been able to connect to the database " . $CONFIG_DEFAULTS{'POSTGRES_DATABASE'} . ",\n"; + print STDERR "running on " . $CONFIG_DEFAULTS{'POSTGRES_SERVER'} . ", as user " . $CONFIG_DEFAULTS{'POSTGRES_USER'} . ".\n\n"; + print STDERR "Aborting the installation (sorry!)\n"; + exit 1; +} + + +# 5. CONFIGURE GLASSFISH + +my $glassfish_dir = $CONFIG_DEFAULTS{'GLASSFISH_DIRECTORY'}; + +print "\nProceeding with the Glassfish setup.\n"; + +# 5b. DETERMINE HOW MUCH MEMORY TO GIVE TO GLASSFISH AS HEAP: + +my $gf_heap_default = "2048m"; +my $sys_mem_total = 0; + +if ( -e "/proc/meminfo" && open MEMINFO, "/proc/meminfo" ) { + # Linux + + while ( my $mline = ) { + if ( $mline =~ /MemTotal:[ \t]*([0-9]*) kB/ ) { + $sys_mem_total = $1; + } + } + + close MEMINFO; + +# TODO: Figure out how to determine the amount of memory when running in Docker +# because we're wondering if Dataverse can run in the free OpenShift Online +# offering that only gives you 1 GB of memory. Obviously, if this is someone's +# first impression of Dataverse, we want to to run well! What if you try to +# ingest a large file or perform other memory-intensive operations? For more +# context, see https://github.com/IQSS/dataverse/issues/4040#issuecomment-331282286 + if ( -e "/sys/fs/cgroup/memory/memory.limit_in_bytes" && open CGROUPMEM, "/sys/fs/cgroup/memory/memory.limit_in_bytes" ) { + print "We must be running in Docker! Fancy!\n"; + while ( my $limitline = ) { + # The goal of this cgroup check is for + # "Setting the heap limit for Glassfish to 750MB" + # to change to some other value, based on memory available. + print "/sys/fs/cgroup/memory/memory.limit_in_bytes: $limitline\n"; + my $limit_in_kb = $limitline / 1024; + print "Docker limit_in_kb = $limit_in_kb but ignoring\n"; + # In openshift.json, notice how PostgreSQL and Solr have + # resources.limits.memory set to "256Mi". + # If you try to give the Dataverse/Glassfish container twice + # as much memory (512 MB) and allow $sys_mem_total to + # be set below, you should see the following: + # "Setting the heap limit for Glassfish to 192MB." + # FIXME: dataverse.war will not deploy with only 512 GB of memory. + # Again, the goal is 1 GB total (512MB + 256MB + 256MB) for + # Glassfish, PostgreSQL, and Solr to fit in the free OpenShift tier. + #print "setting sys_mem_total to: $limit_in_kb\n"; + #$sys_mem_total = $limit_in_kb; + } + close CGROUPMEM; + } +} +elsif ( -x "/usr/sbin/sysctl" ) { + # MacOS X, probably... + + $sys_mem_total = `/usr/sbin/sysctl -n hw.memsize`; + chop $sys_mem_total; + if ( $sys_mem_total > 0 ) { + $sys_mem_total = int( $sys_mem_total / 1024 ); + # size in kb + } +} + +if ( $sys_mem_total > 0 ) { + # setting the default heap size limit to 3/8 of the available + # amount of memory: + $gf_heap_default = ( int( $sys_mem_total / ( 8 / 3 * 1024 ) ) ); + + print "\nSetting the heap limit for Glassfish to " . $gf_heap_default . "MB. \n"; + print "You may need to adjust this setting to better suit \n"; + print "your system.\n\n"; + + #$gf_heap_default .= "m"; + +} +else { + print "\nCould not determine the amount of memory on your system.\n"; + print "Setting the heap limit for Glassfish to 2GB. You may need \n"; + print "to adjust the value to better suit your system.\n\n"; +} + +push @CONFIG_VARIABLES, "DEF_MEM_SIZE"; +$CONFIG_DEFAULTS{"DEF_MEM_SIZE"} = $gf_heap_default; + +# TODO: +# if the system has more than 4GB of memory (I believe), glassfish must +# be run with the 64 bit flag set explicitly (at least that was the case +# with the MacOS glassfish build...). Verify, and if still the case, +# add a check. + +print "\nInstalling the Glassfish PostgresQL driver... "; + +my $install_driver_jar = ""; + +$install_driver_jar = $POSTGRES_DRIVERS{ $pg_major_version . "_" . $pg_minor_version }; + +unless ( $install_driver_jar && -e "pgdriver/" . $install_driver_jar ) { + die "Installer could not find POSTGRES JDBC driver for your version of PostgresQL!\n(" + . $pg_major_version . "." + . $pg_minor_version . ")"; + +} + +system( "/bin/cp", "pgdriver/" . $install_driver_jar, $glassfish_dir . "/glassfish/lib" ); +# more diagnostics needed? + +print "done!\n"; + +print "\n*********************\n"; +print "PLEASE NOTE, SOME OF THE ASADMIN COMMANDS ARE GOING TO FAIL,\n"; +print "FOR EXAMPLE, IF A CONFIGURATION SETTING THAT WE ARE TRYING\n"; +print "TO CREATE ALREADY EXISTS; OR IF A JVM OPTION THAT WE ARE\n"; +print "DELETING DOESN'T. THESE \"FAILURES\" ARE NORMAL!\n"; +print "*********************\n\n"; +print "When/if asadmin asks you to \"Enter admin user name\",\n"; +print "it should be safe to hit return and accept the default\n"; +print "(which is \"admin\").\n"; + +print "\nPress any key to continue...\n\n"; + +system "stty cbreak /dev/tty 2>&1"; +unless ($yes) { + my $key = getc(STDIN); +} +system "stty -cbreak /dev/tty 2>&1"; +print "\n"; + +# start domain, if not running: + +my $javacheck = `java -version`; +my $exitcode = $?; +unless ( $exitcode == 0 ) { + print STDERR "$javacheck\n" if $javacheck; + print STDERR "Do you have java installed?\n"; + exit 1; +} +my $DOMAIN = "domain1"; +my $DOMAIN_DOWN = + `$CONFIG_DEFAULTS{'GLASSFISH_DIRECTORY'}/bin/asadmin list-domains | grep "$DOMAIN " | grep "not running"`; +print STDERR $DOMAIN_DOWN . "\n"; +if ($DOMAIN_DOWN) { + print "Trying to start domain up...\n"; + system( "sudo -u $CONFIG_DEFAULTS{'GLASSFISH_USER'} " . $CONFIG_DEFAULTS{'GLASSFISH_DIRECTORY'} . "/bin/asadmin start-domain domain1" ); + + # TODO: (?) - retest that the domain is running now? +} +else { + print "domain appears to be up...\n"; +} + +# create asadmin login, so that the user doesn't have to enter +# the username and password for every asadmin command, if +# access to :4848 is password-protected: + +system( $glassfish_dir. "/bin/asadmin login" ); + +# NEW: configure glassfish using ASADMIN commands: + +my $success = &setup_glassfish(); + +# CHECK EXIT STATUS, BARF IF SETUP SCRIPT FAILED: + +unless ($success) { + print "\nERROR! Failed to configure Glassfish domain!\n"; + print "(see the error messages above - if any)\n"; + print "Aborting...\n"; + + exit 1; +} + +# Additional config files: + +my $JHOVE_CONFIG = "jhove.conf"; +my $JHOVE_CONF_SCHEMA = "jhoveConfig.xsd"; + + +my $JHOVE_CONFIG_DIST = $JHOVE_CONFIG; +my $JHOVE_CONF_SCHEMA_DIST = $JHOVE_CONF_SCHEMA; + +# (if the installer is being run NOT as part of a distribution zipped bundle, but +# from inside the source tree - adjust the locations of the jhove config files: + +unless ( -f $JHOVE_CONFIG ) { + $JHOVE_CONFIG_DIST = "../../conf/jhove/jhove.conf"; + $JHOVE_CONF_SCHEMA_DIST = "../../conf/jhove/jhoveConfig.xsd"; +} + +# but if we can't find the files in either location, it must mean +# that they are not running the script in the correct directory - so +# nothing else left for us to do but give up: + +unless ( -f $JHOVE_CONFIG_DIST && -f $JHOVE_CONF_SCHEMA_DIST ) { + print "\nERROR! JHOVE configuration files not found in the config dir!\n"; + print "(are you running the installer in the right directory?\n"; + print "Aborting...\n"; + exit 1; +} + +print "\nCopying additional configuration files... "; + +system( "/bin/cp -f " . $JHOVE_CONF_SCHEMA_DIST . " " . $glassfish_dir . "/glassfish/domains/domain1/config" ); + +# The JHOVE conf file has an absolute PATH of the JHOVE config schema file (uh, yeah...) +# - so it may need to be readjusted here: + +if ( $glassfish_dir ne "/usr/local/glassfish4" ) +{ + system( "sed 's:/usr/local/glassfish4:$glassfish_dir:g' < " . $JHOVE_CONFIG_DIST . " > " . $glassfish_dir . "/glassfish/domains/domain1/config/" . $JHOVE_CONFIG); +} +else +{ + system( "/bin/cp -f " . $JHOVE_CONFIG_DIST . " " . $glassfish_dir . "/glassfish/domains/domain1/config" ); +} + +print "done!\n"; + +# check if glassfish is running: +# TODO. + +# 6. DEPLOY THE APPLICATION: + +print "\nAttempting to deploy the application.\n"; +print "Command line: " . $glassfish_dir . "/bin/asadmin deploy " . $WARFILE_LOCATION . "\n"; +unless (( + my $exit_code = + system( $glassfish_dir . "/bin/asadmin deploy " . $WARFILE_LOCATION ) + ) == 0 ) +{ + print STDERR "Failed to deploy the application! WAR file: " . $WARFILE_LOCATION . ".\n"; + print STDERR "(exit code: " . $exit_code . ")\n"; + print STDERR "Aborting.\n"; + exit 1; +} + + +# 7. PRE-POPULATE THE DATABASE: +# (in this step some pre-supplied content is inserted into the database that we have just created; +# it is not *necessary* for the application to run in the very basic mode; but some features - certain +# types of metadata imports, for example - will be unavailable if it's not done. + +print "\nPre-populating the database:\n\n"; + +my $psql_command = $psql_exec . "/psql -h " . $CONFIG_DEFAULTS{'POSTGRES_SERVER'} . " -U " . $CONFIG_DEFAULTS{'POSTGRES_USER'} . " -d $CONFIG_DEFAULTS{'POSTGRES_DATABASE'} -f $SQL_REFERENCE_TEMPLATE"; + +unless ( ( my $exitcode = system("$psql_command") ) == 0 ) +{ + print "WARNING: Could not pre-populate Postgres database for the Dataverse application!\n"; + print "(command: " . $psql_command . ")\n"; + print "(psql exit code: " . $exitcode . ")\n"; + print "\nYou must populate the database in order for all the features of your \n"; + print "new Dataverse to be available. \n"; + print "\n"; + print "You can try this again, by executing the following on the command line:\n"; + print " psql -U $CONFIG_DEFAULTS{'POSTGRES_USER'} -d $CONFIG_DEFAULTS{'POSTGRES_DATABASE'} -f $SQL_REFERENCE_TEMPLATE\n"; + print "then re-start glassfish with \n\n"; + print " " . $glassfish_dir . "/bin/asadmin stop-domain domain1\n\n"; + print " " . $glassfish_dir . "/bin/asadmin start-domain domain1\n\n"; + print "\n"; + print "If it's still failing, please consult the installation manual and/or\n"; + print "seek support from the Dataverse team.\n\n"; + + print "Press any key to continue... "; + + system "stty cbreak /dev/tty 2>&1"; + my $key = getc(STDIN); + system "stty -cbreak /dev/tty 2>&1"; + print "\n"; +} +else +{ + print "\nOK, done!\n"; +} + +# Check if the App is running: + +unless (( + my $exit_code = + system( $glassfish_dir . "/bin/asadmin list-applications | grep -q '^dataverse'" ) + ) == 0 ) +{ + # If the "asadmin list-applications" has failed, it may only mean that an earlier + # "asadmin login" had failed, and asadmin is now failing to run without the user + # supplying the username and password. (And the fact that we are trying to pile the + # output to grep prevents it from providing the prompts). + # So before we give up, we'll try an alternative: + + unless (( + my $exit_code_2 = + system( "curl http://localhost:8080/robots.txt | grep -q '^User-agent'" ) + ) == 0 ) + { + print STDERR "It appears that the Dataverse application is not running...\n"; + print STDERR "Even though the \"asadmin deploy\" command had succeeded earlier.\n\n"; + print STDERR "Aborting - sorry...\n\n"; + } +} + + +print "\nOK, the Dataverse application appears to be running...\n\n"; + +# Run the additional setup scripts, that populate the metadata block field values, create users +# and dataverses, etc. + +unless ( -d "data" && -f "setup-datasetfields.sh" && -f "setup-users.sh" && -f "setup-dvs.sh" && -f "setup-all.sh" ) { + chdir("../api"); +} + +unless ( -d "data" && -f "setup-datasetfields.sh" && -f "setup-users.sh" && -f "setup-dvs.sh" && -f "setup-builtin-roles.sh" && -f "setup-all.sh" ) { + print "\nERROR: Can't find the metadata and user/dataverse setup scripts!\n"; + print "\tAre you running the installer in the right directory?\n"; + exit 1; +} + +# if there's an admin_email set from arguments, replace the value in `dv-root.json` (called by `setup-all.sh`) +if ($admin_email) +{ + print "setting contact email for root dataverse to: $admin_email\n"; + set_root_contact_email( $admin_email ); +} +else +{ + print "using default contact email for root dataverse\n"; +} + +for my $script ( "setup-all.sh" ) { + # (there's only 1 setup script to run now - it runs all the other required scripts) + print "Executing post-deployment setup script " . $script . "... "; + + my $my_hostname = $CONFIG_DEFAULTS{'HOST_DNS_ADDRESS'}; + + # We used to filter the supplied scripts, replacing "localhost" and the port, in + # case they are running Dataverse on a different port... Now we are simply requiring + # that the port 8080 is still configured in domain.xml when they are running the + # installer: + my $run_script; + #if ( $my_hostname ne "localhost" ) { + # system( "sed 's/localhost:8080/$my_hostname/g' < " . $script . " > tmpscript.sh; chmod +x tmpscript.sh" ); + # $run_script = "tmpscript.sh"; + #} + #else { + $run_script = $script; + #} + + unless ( my $exit_code = system( "./" . $run_script . " > $run_script.$$.log 2>&1") == 0 ) + { + print "\nERROR executing script " . $script . "!\n"; + exit 1; + } + print "done!\n"; +} + +# SOME ADDITIONAL SETTINGS THAT ARE NOT TAKEN CARE OF BY THE setup-all SCRIPT +# NEED TO BE CONFIGURED HERE: + +print "Making additional configuration changes...\n\n"; + + +# a. Configure the Admin email in the Dataverse settings: + +print "Executing " . "curl -X PUT -d " . $CONFIG_DEFAULTS{'ADMIN_EMAIL'} . " " . $API_URL . "/admin/settings/:SystemEmail" . "\n"; + +my $exit_code = system("curl -X PUT -d " . $CONFIG_DEFAULTS{'ADMIN_EMAIL'} . " " . $API_URL . "/admin/settings/:SystemEmail"); +if ( $exit_code ) +{ + print "WARNING: failed to configure the admin email in the Dataverse settings!\n\n"; +} +else +{ + print "OK.\n\n"; +} + +# b. If this installation is going to be using TwoRavens, configure its location in the Dataverse settings; +# Otherwise, set the "NO TwoRavens FOR YOU!" option in the settings: + + +if ($CONFIG_DEFAULTS{'TWORAVENS_LOCATION'} ne 'NOT INSTALLED') +{ + print "Executing " . "curl -X PUT -d " . $CONFIG_DEFAULTS{'TWORAVENS_LOCATION'} . " " . $API_URL . "/admin/settings/:TwoRavensUrl" . "\n"; + my $exit_code = system("curl -X PUT -d " . $CONFIG_DEFAULTS{'TWORAVENS_LOCATION'} . " " . $API_URL . "/admin/settings/:TwoRavensUrl"); + if ( $exit_code ) + { + print "WARNING: failed to configure the location of the TwoRavens app in the Dataverse settings!\n\n"; + } + else + { + print "OK.\n\n"; + } + + # (and, we also need to explicitly set the tworavens option to "true": + $exit_code = system("curl -X PUT -d true " . $API_URL . "/admin/settings/:TwoRavensTabularView"); + +} else { + print "Executing " . "curl -X PUT -d false " . $API_URL . "/admin/settings/:TwoRavensTabularView" . "\n"; + my $exit_code = system("curl -X PUT -d false " . $API_URL . "/admin/settings/:TwoRavensTabularView"); + if ( $exit_code ) + { + print "WARNING: failed to disable the TwoRavens app in the Dataverse settings!\n\n"; + } + else + { + print "OK.\n\n"; + } +} + +# c. If this installation is going to be using a remote SOLR search engine service, configure its location in the settings: + +if ($CONFIG_DEFAULTS{'SOLR_LOCATION'} ne 'LOCAL') +{ + print "Executing " . "curl -X PUT -d " . $CONFIG_DEFAULTS{'SOLR_LOCATION'} . " " . $API_URL . "/admin/settings/:SolrHostColonPort" . "\n"; + my $exit_code = system("curl -X PUT -d " . $CONFIG_DEFAULTS{'SOLR_LOCATION'} . " " . $API_URL . "/admin/settings/:SolrHostColonPort"); + if ( $exit_code ) + { + print "WARNING: failed to configure the location of the remote SOLR service!\n\n"; + } + else + { + print "OK.\n\n"; + } +} + + + +chdir($cwd); + +print "\n\nYou should now have a running DVN instance at\n"; +print " http://" . $CONFIG_DEFAULTS{'HOST_DNS_ADDRESS'} . ":8080\n\n\n"; + +# (going to skip the Rserve check, for now) + +exit 0; + +# 9. FINALLY, CHECK IF RSERVE IS RUNNING: +print "\n\nFinally, checking if Rserve is running and accessible...\n"; + +unless ( $CONFIG_DEFAULTS{'RSERVE_PORT'} =~ /^[0-9][0-9]*$/ ) { + print $CONFIG_DEFAULTS{'RSERVE_HOST'} . " does not look like a valid port number,\n"; + print "defaulting to 6311.\n\n"; + + $CONFIG_DEFAULTS{'RSERVE_PORT'} = 6311; +} + +my ( $rserve_iaddr, $rserve_paddr, $rserve_proto ); + +unless ( $rserve_iaddr = inet_aton( $CONFIG_DEFAULTS{'RSERVE_HOST'} ) ) { + print STDERR "Could not look up $CONFIG_DEFAULTS{'RSERVE_HOST'},\n"; + print STDERR "the host you specified as your R server.\n"; + print STDERR "\nDVN can function without a working R server, but\n"; + print STDERR "much of the functionality concerning running statistics\n"; + print STDERR "and analysis on quantitative data will not be available.\n"; + print STDERR "Please consult the Installers guide for more info.\n"; + + exit 0; +} + +$rserve_paddr = sockaddr_in( $CONFIG_DEFAULTS{'RSERVE_PORT'}, $rserve_iaddr ); +$rserve_proto = getprotobyname('tcp'); + +unless ( socket( SOCK, PF_INET, SOCK_STREAM, $rserve_proto ) + && connect( SOCK, $rserve_paddr ) ) +{ + print STDERR "Could not establish connection to $CONFIG_DEFAULTS{'RSERVE_HOST'}\n"; + print STDERR "on port $CONFIG_DEFAULTS{'RSERVE_PORT'}, the address you provided\n"; + print STDERR "for your R server.\n"; + print STDERR "DVN can function without a working R server, but\n"; + print STDERR "much of the functionality concerning running statistics\n"; + print STDERR "and analysis on quantitative data will not be available.\n"; + print STDERR "Please consult the \"Installing R\" section in the Installers guide\n"; + print STDERR "for more info.\n"; + + exit 0; + +} + +close(SOCK); +print "\nOK!\n"; + +sub setup_glassfish { + my $success = 1; + my $failure = 0; + + # We are going to run a standalone shell script with a bunch of asadmin + # commands to set up all the glassfish components for the application. + # All the parameters must be passed to that script as environmental + # variables: + + $ENV{'GLASSFISH_ROOT'} = $CONFIG_DEFAULTS{'GLASSFISH_DIRECTORY'}; + $ENV{'GLASSFISH_DOMAIN'} = "domain1"; + $ENV{'ASADMIN_OPTS'} = ""; + $ENV{'MEM_HEAP_SIZE'} = $CONFIG_DEFAULTS{'DEF_MEM_SIZE'}; + + $ENV{'DB_PORT'} = $CONFIG_DEFAULTS{'POSTGRES_PORT'}; + $ENV{'DB_HOST'} = $CONFIG_DEFAULTS{'POSTGRES_SERVER'}; + $ENV{'DB_NAME'} = $CONFIG_DEFAULTS{'POSTGRES_DATABASE'}; + $ENV{'DB_USER'} = $CONFIG_DEFAULTS{'POSTGRES_USER'}; + $ENV{'DB_PASS'} = $CONFIG_DEFAULTS{'POSTGRES_PASSWORD'}; + + $ENV{'RSERVE_HOST'} = $CONFIG_DEFAULTS{'RSERVE_HOST'}; + $ENV{'RSERVE_PORT'} = $CONFIG_DEFAULTS{'RSERVE_PORT'}; + $ENV{'RSERVE_USER'} = $CONFIG_DEFAULTS{'RSERVE_USER'}; + $ENV{'RSERVE_PASS'} = $CONFIG_DEFAULTS{'RSERVE_PASSWORD'}; + + $ENV{'HOST_ADDRESS'} = $CONFIG_DEFAULTS{'HOST_DNS_ADDRESS'}; + $ENV{'SMTP_SERVER'} = $CONFIG_DEFAULTS{'MAIL_SERVER'}; + $ENV{'FILES_DIR'} = + $CONFIG_DEFAULTS{'GLASSFISH_DIRECTORY'} . "/glassfish/domains/" . $ENV{'GLASSFISH_DOMAIN'} . "/files"; + + system("./glassfish-setup.sh"); + + if ($?) { + return $failure; + } + return $success; +} + +sub create_pg_hash { + my $pg_username = shift @_; + my $pg_password = shift @_; + + my $encode_line = $pg_password . $pg_username; + + # for Redhat: + + ##print STDERR "executing /bin/echo -n $encode_line | md5sum\n"; + + my $hash; + if ( $WORKING_OS eq "MacOSX" ) { + $hash = `/bin/echo -n $encode_line | md5`; + } + else { + $hash = `/bin/echo -n $encode_line | md5sum`; + } + + chop $hash; + + $hash =~ s/ \-$//; + + if ( ( length($hash) != 32 ) || ( $hash !~ /^[0-9a-f]*$/ ) ) { + print STDERR "Failed to generate a MD5-encrypted password hash for the Postgres database.\n"; + exit 1; + } + + return $hash; +} + +sub validate_smtp_server { + my ( $mail_server_iaddr, $mail_server__paddr, $mail_server_proto, $mail_server_status ); + + $mail_server_status = 1; + + unless ( $mail_server_iaddr = inet_aton( $CONFIG_DEFAULTS{'MAIL_SERVER'} ) ) { + print STDERR "Could not look up $CONFIG_DEFAULTS{'MAIL_SERVER'},\n"; + print STDERR "the host you specified as your mail server\n"; + $mail_server_status = 0; + } + + if ($mail_server_status) { + my $mail_server_paddr = sockaddr_in( 25, $mail_server_iaddr ); + $mail_server_proto = getprotobyname('tcp'); + + unless ( socket( SOCK, PF_INET, SOCK_STREAM, $mail_server_proto ) + && connect( SOCK, $mail_server_paddr ) ) + { + print STDERR "Could not establish connection to $CONFIG_DEFAULTS{'MAIL_SERVER'},\n"; + print STDERR "the address you provided for your Mail server.\n"; + print STDERR "Please select a valid mail server, and try again.\n\n"; + + $mail_server_status = 0; + } + + close(SOCK); + } + + return $mail_server_status; +} + +# support function for set_root_contact_email +sub search_replace_file +{ + my ($infile, $pattern, $replacement, $outfile) = @_; + open (my $inp, $infile); + local $/ = undef; + my $txt = <$inp>; + close $inp; + $txt =~s/$pattern/$replacement/g; + open (my $opf, '>:encoding(UTF-8)', $outfile); + print $opf $txt; + close $opf; + return; +} +# set the email address for the default `dataverseAdmin` account +sub set_root_contact_email +{ + my ($contact_email) = @_; + my $config_json = "data/user-admin.json"; + search_replace_file($config_json,"\"email\":\"dataverse\@mailinator.com\"","\"email\":\"$contact_email\"",$config_json); + return; +} + diff --git a/postgresql/testdata/scripts/installer/pgdriver/postgresql-42.1.4.jar b/postgresql/testdata/scripts/installer/pgdriver/postgresql-42.1.4.jar new file mode 100644 index 0000000..08a54b1 Binary files /dev/null and b/postgresql/testdata/scripts/installer/pgdriver/postgresql-42.1.4.jar differ diff --git a/postgresql/testdata/scripts/installer/pgdriver/postgresql-8.4-703.jdbc4.jar b/postgresql/testdata/scripts/installer/pgdriver/postgresql-8.4-703.jdbc4.jar new file mode 100644 index 0000000..7c8d5f8 Binary files /dev/null and b/postgresql/testdata/scripts/installer/pgdriver/postgresql-8.4-703.jdbc4.jar differ diff --git a/postgresql/testdata/scripts/installer/pgdriver/postgresql-9.0-802.jdbc4.jar b/postgresql/testdata/scripts/installer/pgdriver/postgresql-9.0-802.jdbc4.jar new file mode 100644 index 0000000..9e16af0 Binary files /dev/null and b/postgresql/testdata/scripts/installer/pgdriver/postgresql-9.0-802.jdbc4.jar differ diff --git a/postgresql/testdata/scripts/installer/pgdriver/postgresql-9.1-902.jdbc4.jar b/postgresql/testdata/scripts/installer/pgdriver/postgresql-9.1-902.jdbc4.jar new file mode 100644 index 0000000..078f379 Binary files /dev/null and b/postgresql/testdata/scripts/installer/pgdriver/postgresql-9.1-902.jdbc4.jar differ diff --git a/postgresql/testdata/scripts/installer/pgdriver/postgresql-9.2-1004.jdbc4.jar b/postgresql/testdata/scripts/installer/pgdriver/postgresql-9.2-1004.jdbc4.jar new file mode 100644 index 0000000..b9270d2 Binary files /dev/null and b/postgresql/testdata/scripts/installer/pgdriver/postgresql-9.2-1004.jdbc4.jar differ diff --git a/postgresql/testdata/scripts/installer/pgdriver/postgresql-9.3-1104.jdbc4.jar b/postgresql/testdata/scripts/installer/pgdriver/postgresql-9.3-1104.jdbc4.jar new file mode 100644 index 0000000..a79525d Binary files /dev/null and b/postgresql/testdata/scripts/installer/pgdriver/postgresql-9.3-1104.jdbc4.jar differ diff --git a/postgresql/testdata/scripts/installer/pgdriver/postgresql-9.4.1212.jar b/postgresql/testdata/scripts/installer/pgdriver/postgresql-9.4.1212.jar new file mode 100644 index 0000000..b0de752 Binary files /dev/null and b/postgresql/testdata/scripts/installer/pgdriver/postgresql-9.4.1212.jar differ diff --git a/postgresql/testdata/scripts/issues/1262/create-sparrow1 b/postgresql/testdata/scripts/issues/1262/create-sparrow1 new file mode 100755 index 0000000..6837ca5 --- /dev/null +++ b/postgresql/testdata/scripts/issues/1262/create-sparrow1 @@ -0,0 +1,2 @@ +#!/bin/sh +curl -s -X POST -H "Content-type:application/json" -d @scripts/issues/1262/sparrow1.json "http://localhost:8080/api/dataverses/sparrows/datasets/?key=$SPARROWKEY" diff --git a/postgresql/testdata/scripts/issues/1262/search-sparrow b/postgresql/testdata/scripts/issues/1262/search-sparrow new file mode 100755 index 0000000..393baf7 --- /dev/null +++ b/postgresql/testdata/scripts/issues/1262/search-sparrow @@ -0,0 +1,3 @@ +#!/bin/sh +# relies on experimental SearchApiNonPublicAllowed feature, see https://github.com/IQSS/dataverse/issues/1299 +curl "http://localhost:8080/api/search?key=$SPARROWKEY&show_relevance=true&q=sparrow" diff --git a/postgresql/testdata/scripts/issues/1262/sparrow1.json b/postgresql/testdata/scripts/issues/1262/sparrow1.json new file mode 100644 index 0000000..9235d60 --- /dev/null +++ b/postgresql/testdata/scripts/issues/1262/sparrow1.json @@ -0,0 +1,78 @@ +{ + "datasetVersion": { + "metadataBlocks": { + "citation": { + "displayName": "Citation Metadata", + "fields": [ + { + "typeName": "title", + "multiple": false, + "typeClass": "primitive", + "value": "The Sparrow" + }, + { + "typeName": "author", + "multiple": true, + "typeClass": "compound", + "value": [ + { + "authorName": { + "typeName": "authorName", + "multiple": false, + "typeClass": "primitive", + "value": "Hoxha, Adil" + } + } + ] + }, + { + "typeName": "datasetContact", + "multiple": true, + "typeClass": "compound", + "value": [ + { + "datasetContactEmail": { + "value": "sparrow@mailinator.com", + "typeName": "datasetContactEmail", + "multiple": false, + "typeClass": "primitive" + } + } + ] + }, + { + "typeName": "dsDescription", + "multiple": true, + "typeClass": "compound", + "value": [ + { + "dsDescriptionValue": { + "typeName": "dsDescriptionValue", + "typeClass": "primitive", + "multiple": false, + "value": "The habits and habitats of Albanian sparrows." + } + }, + { + "dsDescriptionValue": { + "typeName": "dsDescriptionValue", + "typeClass": "primitive", + "multiple": false, + "value": "Sparrows as shutterbugs." + } + } + ] + }, + { + "typeName": "subject", + "multiple": true, + "typeClass": "controlledVocabulary", + "value": [ + "Medicine, Health & Life Sciences" + ] + } + ] + } + } + } +} diff --git a/postgresql/testdata/scripts/issues/1380/01-add.localhost.sh b/postgresql/testdata/scripts/issues/1380/01-add.localhost.sh new file mode 100755 index 0000000..331011d --- /dev/null +++ b/postgresql/testdata/scripts/issues/1380/01-add.localhost.sh @@ -0,0 +1,2 @@ +# Add the localhost group to the system. +curl -X POST -H"Content-Type:application/json" -d@../../api/data/ipGroup-localhost.json localhost:8080/api/admin/groups/ip diff --git a/postgresql/testdata/scripts/issues/1380/02-build-dv-structure.sh b/postgresql/testdata/scripts/issues/1380/02-build-dv-structure.sh new file mode 100755 index 0000000..f0936e3 --- /dev/null +++ b/postgresql/testdata/scripts/issues/1380/02-build-dv-structure.sh @@ -0,0 +1,12 @@ +#!/bin/bash + +echo Run this after running setup-users.sh, and making Pete an +echo admin on the root dataverse. + + +PETE=$(grep :result: users.out | grep Pete | cut -f4 -d: | tr -d \ ) +UMA=$(grep :result: users.out | grep Uma | cut -f4 -d: | tr -d \ ) + +pushd ../../api +./setup-dvs.sh $PETE $UMA +popd diff --git a/postgresql/testdata/scripts/issues/1380/add-ip-group.sh b/postgresql/testdata/scripts/issues/1380/add-ip-group.sh new file mode 100755 index 0000000..2fba944 --- /dev/null +++ b/postgresql/testdata/scripts/issues/1380/add-ip-group.sh @@ -0,0 +1,4 @@ +#!/bin/bash + +# Add the passed group to the system. +curl -X POST -H"Content-Type:application/json" -d@../../api/data/$1 localhost:8080/api/admin/groups/ip diff --git a/postgresql/testdata/scripts/issues/1380/add-user b/postgresql/testdata/scripts/issues/1380/add-user new file mode 100755 index 0000000..1781181 --- /dev/null +++ b/postgresql/testdata/scripts/issues/1380/add-user @@ -0,0 +1,3 @@ +#!/bin/bash +# add-user dv group user api-token +curl -H "Content-type:application/json" -X POST -d"[$3]" localhost:8080/api/dataverses/$1/groups/$2/roleAssignees?key=$4 diff --git a/postgresql/testdata/scripts/issues/1380/data/3-eg1.json b/postgresql/testdata/scripts/issues/1380/data/3-eg1.json new file mode 100644 index 0000000..a874d69 --- /dev/null +++ b/postgresql/testdata/scripts/issues/1380/data/3-eg1.json @@ -0,0 +1 @@ +["&explicit/3-eg1"] diff --git a/postgresql/testdata/scripts/issues/1380/data/guest.json b/postgresql/testdata/scripts/issues/1380/data/guest.json new file mode 100644 index 0000000..3e4188a --- /dev/null +++ b/postgresql/testdata/scripts/issues/1380/data/guest.json @@ -0,0 +1 @@ +[":guest"] diff --git a/postgresql/testdata/scripts/issues/1380/data/locals.json b/postgresql/testdata/scripts/issues/1380/data/locals.json new file mode 100644 index 0000000..8bb5e3e --- /dev/null +++ b/postgresql/testdata/scripts/issues/1380/data/locals.json @@ -0,0 +1 @@ +["&ip/localhost"] diff --git a/postgresql/testdata/scripts/issues/1380/data/pete.json b/postgresql/testdata/scripts/issues/1380/data/pete.json new file mode 100644 index 0000000..298e813 --- /dev/null +++ b/postgresql/testdata/scripts/issues/1380/data/pete.json @@ -0,0 +1 @@ +["@pete"] diff --git a/postgresql/testdata/scripts/issues/1380/data/uma.json b/postgresql/testdata/scripts/issues/1380/data/uma.json new file mode 100644 index 0000000..3caf8c5 --- /dev/null +++ b/postgresql/testdata/scripts/issues/1380/data/uma.json @@ -0,0 +1 @@ +["@uma"] diff --git a/postgresql/testdata/scripts/issues/1380/db-list-dvs b/postgresql/testdata/scripts/issues/1380/db-list-dvs new file mode 100755 index 0000000..4161f7f --- /dev/null +++ b/postgresql/testdata/scripts/issues/1380/db-list-dvs @@ -0,0 +1 @@ +psql dvndb -c "select dvobject.id, name, alias, owner_id from dvobject inner join dataverse on dvobject.id = dataverse.id" diff --git a/postgresql/testdata/scripts/issues/1380/delete-ip-group b/postgresql/testdata/scripts/issues/1380/delete-ip-group new file mode 100755 index 0000000..b6138d9 --- /dev/null +++ b/postgresql/testdata/scripts/issues/1380/delete-ip-group @@ -0,0 +1,9 @@ +#/bin/bahx +if [ $# -eq 0 ] + then + echo "Please provide IP group id" + echo "e.g $0 845" + exit 1 +fi + +curl -X DELETE http://localhost:8080/api/admin/groups/ip/$1 diff --git a/postgresql/testdata/scripts/issues/1380/dvs.gv b/postgresql/testdata/scripts/issues/1380/dvs.gv new file mode 100644 index 0000000..5260660 --- /dev/null +++ b/postgresql/testdata/scripts/issues/1380/dvs.gv @@ -0,0 +1,19 @@ +digraph { +d1[label="Root"] +d2[label="Top dataverse of Pete"] +d3[label="Pete's public place"] +d4[label="Pete's restricted data"] +d5[label="Pete's secrets"] +d6[label="Top dataverse of Uma"] +d7[label="Uma's first"] +d8[label="Uma's restricted"] + +d1 -> d2 +d2 -> d3 +d2 -> d4 +d2 -> d5 +d1 -> d6 +d6 -> d7 +d6 -> d8 + +} diff --git a/postgresql/testdata/scripts/issues/1380/dvs.pdf b/postgresql/testdata/scripts/issues/1380/dvs.pdf new file mode 100644 index 0000000..5169f44 Binary files /dev/null and b/postgresql/testdata/scripts/issues/1380/dvs.pdf differ diff --git a/postgresql/testdata/scripts/issues/1380/explicitGroup1.json b/postgresql/testdata/scripts/issues/1380/explicitGroup1.json new file mode 100644 index 0000000..337a0b6 --- /dev/null +++ b/postgresql/testdata/scripts/issues/1380/explicitGroup1.json @@ -0,0 +1,5 @@ +{ + "description":"Sample Explicit Group", + "displayName":"Close Collaborators", + "aliasInOwner":"eg1" +} diff --git a/postgresql/testdata/scripts/issues/1380/explicitGroup2.json b/postgresql/testdata/scripts/issues/1380/explicitGroup2.json new file mode 100644 index 0000000..fbac263 --- /dev/null +++ b/postgresql/testdata/scripts/issues/1380/explicitGroup2.json @@ -0,0 +1,5 @@ +{ + "description":"Sample Explicit Group", + "displayName":"Not-So-Close Collaborators", + "aliasInOwner":"eg2" +} diff --git a/postgresql/testdata/scripts/issues/1380/keys.txt b/postgresql/testdata/scripts/issues/1380/keys.txt new file mode 100644 index 0000000..9dc47d3 --- /dev/null +++ b/postgresql/testdata/scripts/issues/1380/keys.txt @@ -0,0 +1,3 @@ +Keys for P e t e and U m a. Produced by running setup-all.sh from the /scripts/api folder. +Pete:757a6493-456a-4bf0-943e-9b559d551a3f +Uma:8797f19b-b8aa-4f96-a789-1b99506f2eab diff --git a/postgresql/testdata/scripts/issues/1380/list-groups-for b/postgresql/testdata/scripts/issues/1380/list-groups-for new file mode 100755 index 0000000..063b92c --- /dev/null +++ b/postgresql/testdata/scripts/issues/1380/list-groups-for @@ -0,0 +1,2 @@ +#!/bin/bash +curl -s -X GET http://localhost:8080/api/test/explicitGroups/$1 | jq . diff --git a/postgresql/testdata/scripts/issues/1380/list-ip-groups.sh b/postgresql/testdata/scripts/issues/1380/list-ip-groups.sh new file mode 100755 index 0000000..fba29cc --- /dev/null +++ b/postgresql/testdata/scripts/issues/1380/list-ip-groups.sh @@ -0,0 +1,2 @@ +#!/bin/bash +curl -X GET http://localhost:8080/api/admin/groups/ip | jq . diff --git a/postgresql/testdata/scripts/issues/1380/truth-table.numbers b/postgresql/testdata/scripts/issues/1380/truth-table.numbers new file mode 100644 index 0000000..86f6738 Binary files /dev/null and b/postgresql/testdata/scripts/issues/1380/truth-table.numbers differ diff --git a/postgresql/testdata/scripts/issues/1380/users.out b/postgresql/testdata/scripts/issues/1380/users.out new file mode 100644 index 0000000..337b9e2 --- /dev/null +++ b/postgresql/testdata/scripts/issues/1380/users.out @@ -0,0 +1,6 @@ +{"status":"OK","data":{"user":{"id":4,"firstName":"Gabbi","lastName":"Guest","userName":"gabbi","affiliation":"low","position":"A Guest","email":"gabbi@malinator.com"},"authenticatedUser":{"id":4,"identifier":"@gabbi","displayName":"Gabbi Guest","firstName":"Gabbi","lastName":"Guest","email":"gabbi@malinator.com","superuser":false,"affiliation":"low","position":"A Guest","persistentUserId":"gabbi","authenticationProviderId":"builtin"},"apiToken":"d1940786-c315-491e-9812-a8ff809289cc"}} +{"status":"OK","data":{"user":{"id":5,"firstName":"Cathy","lastName":"Collaborator","userName":"cathy","affiliation":"mid","position":"Data Scientist","email":"cathy@malinator.com"},"authenticatedUser":{"id":5,"identifier":"@cathy","displayName":"Cathy Collaborator","firstName":"Cathy","lastName":"Collaborator","email":"cathy@malinator.com","superuser":false,"affiliation":"mid","position":"Data Scientist","persistentUserId":"cathy","authenticationProviderId":"builtin"},"apiToken":"0ddfcb1e-fb51-4ce7-88ab-308b23e13e9a"}} +{"status":"OK","data":{"user":{"id":6,"firstName":"Nick","lastName":"NSA","userName":"nick","affiliation":"gov","position":"Signals Intelligence","email":"nick@malinator.com"},"authenticatedUser":{"id":6,"identifier":"@nick","displayName":"Nick NSA","firstName":"Nick","lastName":"NSA","email":"nick@malinator.com","superuser":false,"affiliation":"gov","position":"Signals Intelligence","persistentUserId":"nick","authenticationProviderId":"builtin"},"apiToken":"6d74745d-1733-459a-ae29-422110056ec0"}} +reporting API keys +:result: Pete's key is: 757a6493-456a-4bf0-943e-9b559d551a3f +:result: Uma's key is: 8797f19b-b8aa-4f96-a789-1b99506f2eab \ No newline at end of file diff --git a/postgresql/testdata/scripts/issues/2013/download-zip.sh b/postgresql/testdata/scripts/issues/2013/download-zip.sh new file mode 100755 index 0000000..dd801d4 --- /dev/null +++ b/postgresql/testdata/scripts/issues/2013/download-zip.sh @@ -0,0 +1,5 @@ +#!/bin/bash +APACHE_PORT=8888 +GLASSFISH_PORT=8088 +PORT=$APACHE_PORT +count=0; while true; do echo "downloading 4 GB file as zip attempt $((++count))"; curl -s http://127.0.0.1:$PORT/api/access/datafiles/3 > /tmp/3; done diff --git a/postgresql/testdata/scripts/issues/2013/hit-homepage.sh b/postgresql/testdata/scripts/issues/2013/hit-homepage.sh new file mode 100755 index 0000000..41be470 --- /dev/null +++ b/postgresql/testdata/scripts/issues/2013/hit-homepage.sh @@ -0,0 +1,2 @@ +#!/bin/bash +count=0; while true; echo "hitting homepage attempt $((++count))"; do (curl -s -i http://127.0.0.1:8888 | head -9); sleep 3; done diff --git a/postgresql/testdata/scripts/issues/2021/sort-files b/postgresql/testdata/scripts/issues/2021/sort-files new file mode 100755 index 0000000..e3abc6b --- /dev/null +++ b/postgresql/testdata/scripts/issues/2021/sort-files @@ -0,0 +1,4 @@ +#!/bin/bash -x +OUT=`curl -s "http://localhost:8080/api/admin/index/filemetadata/50825?maxResults=0&sort=$1&order=$2"` +echo $OUT +echo $OUT | jq . diff --git a/postgresql/testdata/scripts/issues/2036/delete-ned-assignment b/postgresql/testdata/scripts/issues/2036/delete-ned-assignment new file mode 100755 index 0000000..0b5fe43 --- /dev/null +++ b/postgresql/testdata/scripts/issues/2036/delete-ned-assignment @@ -0,0 +1,7 @@ +#!/bin/sh +~/.homebrew/bin/psql -c " +select * from roleassignment where assigneeidentifier = '@ned' +" dataverse_db +~/.homebrew/bin/psql -c " +delete from roleassignment where assigneeidentifier = '@ned' +" dataverse_db diff --git a/postgresql/testdata/scripts/issues/2036/grant-role-then-revoke b/postgresql/testdata/scripts/issues/2036/grant-role-then-revoke new file mode 100755 index 0000000..4049b73 --- /dev/null +++ b/postgresql/testdata/scripts/issues/2036/grant-role-then-revoke @@ -0,0 +1,22 @@ +#!/bin/sh +SERVER=http://localhost:8080 +if [ -z "$1" ]; then + DATAVERSE=togo +else + DATAVERSE=$1 +fi +USERID="@pdurbin" +ROLE=admin + +echo "Assigning $ROLE to $USERID on $DATAVERSE..." +OUT_ASSIGN=`time curl -s -X POST -H "Content-type:application/json" -d "{\"assignee\": \"$USERID\",\"role\": \"$ROLE\"}" "$SERVER/api/dataverses/$DATAVERSE/assignments?key=$API_TOKEN"` +echo $OUT_ASSIGN | jq '.data | {assignee,_roleAlias}' + +echo "Retrieving ID of role to revoke..." +ASSIGNMENTS=`time curl -s "$SERVER/api/dataverses/$DATAVERSE/assignments?key=$API_TOKEN"` +echo $ASSIGNMENTS | jq ".data[] | select(.assignee==\"$USERID\") | ." + +echo "Revoking $ROLE from $USERID on $DATAVERSE..." +ID_TO_REVOKE=`echo $ASSIGNMENTS | jq ".data[] | select(.assignee==\"$USERID\") | .id"` +OUT_REVOKE=`time curl -s -X DELETE "$SERVER/api/dataverses/$DATAVERSE/assignments/$ID_TO_REVOKE?key=$API_TOKEN"` +echo $OUT_REVOKE | jq '.data.message' diff --git a/postgresql/testdata/scripts/issues/2102/dataset-metadata-next.json b/postgresql/testdata/scripts/issues/2102/dataset-metadata-next.json new file mode 100644 index 0000000..a7619d7 --- /dev/null +++ b/postgresql/testdata/scripts/issues/2102/dataset-metadata-next.json @@ -0,0 +1,105 @@ +{ + "id": 1, + "versionNumber": 1, + "versionMinorNumber": 0, + "versionState": "RELEASED", + "productionDate": "Production Date", + "lastUpdateTime": "2015-06-08T19:40:14Z", + "releaseTime": "2015-06-08T19:40:14Z", + "createTime": "2015-06-08T17:30:09Z", + "metadataBlocks": { + "citation": { + "displayName": "Citation Metadata", + "fields": [{ + "typeName": "title", + "multiple": false, + "typeClass": "primitive", + "value": "Second Version of Sample Dataset" + }, { + "typeName": "author", + "multiple": true, + "typeClass": "compound", + "value": [{ + "authorName": { + "typeName": "authorName", + "multiple": false, + "typeClass": "primitive", + "value": "Admin, Dataverse" + }, + "authorAffiliation": { + "typeName": "authorAffiliation", + "multiple": false, + "typeClass": "primitive", + "value": "Dataverse.org" + } + }] + }, { + "typeName": "datasetContact", + "multiple": true, + "typeClass": "compound", + "value": [{ + "datasetContactName": { + "typeName": "datasetContactName", + "multiple": false, + "typeClass": "primitive", + "value": "Admin, Dataverse" + }, + "datasetContactAffiliation": { + "typeName": "datasetContactAffiliation", + "multiple": false, + "typeClass": "primitive", + "value": "Dataverse.org" + }, + "datasetContactEmail": { + "typeName": "datasetContactEmail", + "multiple": false, + "typeClass": "primitive", + "value": "dataverse@mailinator.com" + } + }] + }, { + "typeName": "dsDescription", + "multiple": true, + "typeClass": "compound", + "value": [{ + "dsDescriptionValue": { + "typeName": "dsDescriptionValue", + "multiple": false, + "typeClass": "primitive", + "value": "We need to add files to this Dataset." + } + }] + }, { + "typeName": "subject", + "multiple": true, + "typeClass": "controlledVocabulary", + "value": ["Arts and Humanities"] + }, { + "typeName": "depositor", + "multiple": false, + "typeClass": "primitive", + "value": "Admin, Dataverse" + }, { + "typeName": "dateOfDeposit", + "multiple": false, + "typeClass": "primitive", + "value": "2015-06-08" + }] + } + }, + "files": [{ + "description": "This is a description of the file.", + "label": "2001, Palestinian Proposal at the Taba Conference.kmz", + "version": 1, + "datasetVersionId": 1, + "datafile": { + "id": 4, + "name": "2001, Palestinian Proposal at the Taba Conference.kmz", + "contentType": "application/vnd.google-earth.kmz", + "filename": "14dd48f37d9-68789d517db2", + "originalFormatLabel": "UNKNOWN", + "md5": "cfaad1e9562443bb07119fcdbe11ccd2", + "description": "This is a description of the file." + } + }] +} diff --git a/postgresql/testdata/scripts/issues/2102/dataset-metadata.json b/postgresql/testdata/scripts/issues/2102/dataset-metadata.json new file mode 100644 index 0000000..7b92d27 --- /dev/null +++ b/postgresql/testdata/scripts/issues/2102/dataset-metadata.json @@ -0,0 +1 @@ +{"status":"OK","data":{"id":1,"versionNumber":1,"versionMinorNumber":0,"versionState":"RELEASED","productionDate":"Production Date","lastUpdateTime":"2015-06-08T19:40:14Z","releaseTime":"2015-06-08T19:40:14Z","createTime":"2015-06-08T17:30:09Z","metadataBlocks":{"citation":{"displayName":"Citation Metadata","fields":[{"typeName":"title","multiple":false,"typeClass":"primitive","value":"Sample Dataset"},{"typeName":"author","multiple":true,"typeClass":"compound","value":[{"authorName":{"typeName":"authorName","multiple":false,"typeClass":"primitive","value":"Admin, Dataverse"},"authorAffiliation":{"typeName":"authorAffiliation","multiple":false,"typeClass":"primitive","value":"Dataverse.org"}}]},{"typeName":"datasetContact","multiple":true,"typeClass":"compound","value":[{"datasetContactName":{"typeName":"datasetContactName","multiple":false,"typeClass":"primitive","value":"Admin, Dataverse"},"datasetContactAffiliation":{"typeName":"datasetContactAffiliation","multiple":false,"typeClass":"primitive","value":"Dataverse.org"},"datasetContactEmail":{"typeName":"datasetContactEmail","multiple":false,"typeClass":"primitive","value":"dataverse@mailinator.com"}}]},{"typeName":"dsDescription","multiple":true,"typeClass":"compound","value":[{"dsDescriptionValue":{"typeName":"dsDescriptionValue","multiple":false,"typeClass":"primitive","value":"We need to add files to this Dataset."}}]},{"typeName":"subject","multiple":true,"typeClass":"controlledVocabulary","value":["Arts and Humanities"]},{"typeName":"depositor","multiple":false,"typeClass":"primitive","value":"Admin, Dataverse"},{"typeName":"dateOfDeposit","multiple":false,"typeClass":"primitive","value":"2015-06-08"}]}},"files":[{"description":"This is a description of the file.","label":"2001, Palestinian Proposal at the Taba Conference.kmz","version":1,"datasetVersionId":1,"datafile":{"id":4,"name":"2001, Palestinian Proposal at the Taba Conference.kmz","contentType":"application/vnd.google-earth.kmz","filename":"14dd48f37d9-68789d517db2","originalFormatLabel":"UNKNOWN","md5":"cfaad1e9562443bb07119fcdbe11ccd2","description":"This is a description of the file."}}]}} \ No newline at end of file diff --git a/postgresql/testdata/scripts/issues/2102/ready-state.sql b/postgresql/testdata/scripts/issues/2102/ready-state.sql new file mode 100644 index 0000000..96ccf58 --- /dev/null +++ b/postgresql/testdata/scripts/issues/2102/ready-state.sql @@ -0,0 +1,7269 @@ +-- +-- PostgreSQL database dump +-- + +SET statement_timeout = 0; +SET client_encoding = 'UTF8'; +SET standard_conforming_strings = on; +SET check_function_bodies = false; +SET client_min_messages = warning; + +-- +-- Name: plpgsql; Type: EXTENSION; Schema: -; Owner: +-- + +CREATE EXTENSION IF NOT EXISTS plpgsql WITH SCHEMA pg_catalog; + + +-- +-- Name: EXTENSION plpgsql; Type: COMMENT; Schema: -; Owner: +-- + +COMMENT ON EXTENSION plpgsql IS 'PL/pgSQL procedural language'; + + +SET search_path = public, pg_catalog; + +SET default_tablespace = ''; + +SET default_with_oids = false; + +-- +-- Name: actionlogrecord; Type: TABLE; Schema: public; Owner: dataverse_app; Tablespace: +-- + +CREATE TABLE actionlogrecord ( + id character varying(36) NOT NULL, + actionresult character varying(255), + actionsubtype character varying(255), + actiontype character varying(255), + endtime timestamp without time zone, + info character varying(1024), + starttime timestamp without time zone, + useridentifier character varying(255) +); + + +ALTER TABLE public.actionlogrecord OWNER TO dataverse_app; + +-- +-- Name: apitoken; Type: TABLE; Schema: public; Owner: dataverse_app; Tablespace: +-- + +CREATE TABLE apitoken ( + id integer NOT NULL, + createtime timestamp without time zone NOT NULL, + disabled boolean NOT NULL, + expiretime timestamp without time zone NOT NULL, + tokenstring character varying(255) NOT NULL, + authenticateduser_id bigint NOT NULL +); + + +ALTER TABLE public.apitoken OWNER TO dataverse_app; + +-- +-- Name: apitoken_id_seq; Type: SEQUENCE; Schema: public; Owner: dataverse_app +-- + +CREATE SEQUENCE apitoken_id_seq + START WITH 1 + INCREMENT BY 1 + NO MINVALUE + NO MAXVALUE + CACHE 1; + + +ALTER TABLE public.apitoken_id_seq OWNER TO dataverse_app; + +-- +-- Name: apitoken_id_seq; Type: SEQUENCE OWNED BY; Schema: public; Owner: dataverse_app +-- + +ALTER SEQUENCE apitoken_id_seq OWNED BY apitoken.id; + + +-- +-- Name: apitoken_id_seq; Type: SEQUENCE SET; Schema: public; Owner: dataverse_app +-- + +SELECT pg_catalog.setval('apitoken_id_seq', 1, true); + + +-- +-- Name: authenticateduser; Type: TABLE; Schema: public; Owner: dataverse_app; Tablespace: +-- + +CREATE TABLE authenticateduser ( + id integer NOT NULL, + affiliation character varying(255), + email character varying(255) NOT NULL, + firstname character varying(255), + lastname character varying(255), + modificationtime timestamp without time zone, + name character varying(255), + "position" character varying(255), + superuser boolean, + useridentifier character varying(255) NOT NULL +); + + +ALTER TABLE public.authenticateduser OWNER TO dataverse_app; + +-- +-- Name: authenticateduser_id_seq; Type: SEQUENCE; Schema: public; Owner: dataverse_app +-- + +CREATE SEQUENCE authenticateduser_id_seq + START WITH 1 + INCREMENT BY 1 + NO MINVALUE + NO MAXVALUE + CACHE 1; + + +ALTER TABLE public.authenticateduser_id_seq OWNER TO dataverse_app; + +-- +-- Name: authenticateduser_id_seq; Type: SEQUENCE OWNED BY; Schema: public; Owner: dataverse_app +-- + +ALTER SEQUENCE authenticateduser_id_seq OWNED BY authenticateduser.id; + + +-- +-- Name: authenticateduser_id_seq; Type: SEQUENCE SET; Schema: public; Owner: dataverse_app +-- + +SELECT pg_catalog.setval('authenticateduser_id_seq', 1, true); + + +-- +-- Name: authenticateduserlookup; Type: TABLE; Schema: public; Owner: dataverse_app; Tablespace: +-- + +CREATE TABLE authenticateduserlookup ( + id integer NOT NULL, + authenticationproviderid character varying(255), + persistentuserid character varying(255), + authenticateduser_id bigint NOT NULL +); + + +ALTER TABLE public.authenticateduserlookup OWNER TO dataverse_app; + +-- +-- Name: authenticateduserlookup_id_seq; Type: SEQUENCE; Schema: public; Owner: dataverse_app +-- + +CREATE SEQUENCE authenticateduserlookup_id_seq + START WITH 1 + INCREMENT BY 1 + NO MINVALUE + NO MAXVALUE + CACHE 1; + + +ALTER TABLE public.authenticateduserlookup_id_seq OWNER TO dataverse_app; + +-- +-- Name: authenticateduserlookup_id_seq; Type: SEQUENCE OWNED BY; Schema: public; Owner: dataverse_app +-- + +ALTER SEQUENCE authenticateduserlookup_id_seq OWNED BY authenticateduserlookup.id; + + +-- +-- Name: authenticateduserlookup_id_seq; Type: SEQUENCE SET; Schema: public; Owner: dataverse_app +-- + +SELECT pg_catalog.setval('authenticateduserlookup_id_seq', 1, true); + + +-- +-- Name: authenticationproviderrow; Type: TABLE; Schema: public; Owner: dataverse_app; Tablespace: +-- + +CREATE TABLE authenticationproviderrow ( + id character varying(255) NOT NULL, + enabled boolean, + factoryalias character varying(255), + factorydata text, + subtitle character varying(255), + title character varying(255) +); + + +ALTER TABLE public.authenticationproviderrow OWNER TO dataverse_app; + +-- +-- Name: builtinuser; Type: TABLE; Schema: public; Owner: dataverse_app; Tablespace: +-- + +CREATE TABLE builtinuser ( + id integer NOT NULL, + affiliation character varying(255), + email character varying(255) NOT NULL, + encryptedpassword character varying(255), + firstname character varying(255), + lastname character varying(255), + passwordencryptionversion integer, + "position" character varying(255), + username character varying(255) NOT NULL +); + + +ALTER TABLE public.builtinuser OWNER TO dataverse_app; + +-- +-- Name: builtinuser_id_seq; Type: SEQUENCE; Schema: public; Owner: dataverse_app +-- + +CREATE SEQUENCE builtinuser_id_seq + START WITH 1 + INCREMENT BY 1 + NO MINVALUE + NO MAXVALUE + CACHE 1; + + +ALTER TABLE public.builtinuser_id_seq OWNER TO dataverse_app; + +-- +-- Name: builtinuser_id_seq; Type: SEQUENCE OWNED BY; Schema: public; Owner: dataverse_app +-- + +ALTER SEQUENCE builtinuser_id_seq OWNED BY builtinuser.id; + + +-- +-- Name: builtinuser_id_seq; Type: SEQUENCE SET; Schema: public; Owner: dataverse_app +-- + +SELECT pg_catalog.setval('builtinuser_id_seq', 1, true); + + +-- +-- Name: controlledvocabalternate; Type: TABLE; Schema: public; Owner: dataverse_app; Tablespace: +-- + +CREATE TABLE controlledvocabalternate ( + id integer NOT NULL, + strvalue text, + controlledvocabularyvalue_id bigint NOT NULL, + datasetfieldtype_id bigint NOT NULL +); + + +ALTER TABLE public.controlledvocabalternate OWNER TO dataverse_app; + +-- +-- Name: controlledvocabalternate_id_seq; Type: SEQUENCE; Schema: public; Owner: dataverse_app +-- + +CREATE SEQUENCE controlledvocabalternate_id_seq + START WITH 1 + INCREMENT BY 1 + NO MINVALUE + NO MAXVALUE + CACHE 1; + + +ALTER TABLE public.controlledvocabalternate_id_seq OWNER TO dataverse_app; + +-- +-- Name: controlledvocabalternate_id_seq; Type: SEQUENCE OWNED BY; Schema: public; Owner: dataverse_app +-- + +ALTER SEQUENCE controlledvocabalternate_id_seq OWNED BY controlledvocabalternate.id; + + +-- +-- Name: controlledvocabalternate_id_seq; Type: SEQUENCE SET; Schema: public; Owner: dataverse_app +-- + +SELECT pg_catalog.setval('controlledvocabalternate_id_seq', 24, true); + + +-- +-- Name: controlledvocabularyvalue; Type: TABLE; Schema: public; Owner: dataverse_app; Tablespace: +-- + +CREATE TABLE controlledvocabularyvalue ( + id integer NOT NULL, + displayorder integer, + identifier character varying(255), + strvalue text, + datasetfieldtype_id bigint +); + + +ALTER TABLE public.controlledvocabularyvalue OWNER TO dataverse_app; + +-- +-- Name: controlledvocabularyvalue_id_seq; Type: SEQUENCE; Schema: public; Owner: dataverse_app +-- + +CREATE SEQUENCE controlledvocabularyvalue_id_seq + START WITH 1 + INCREMENT BY 1 + NO MINVALUE + NO MAXVALUE + CACHE 1; + + +ALTER TABLE public.controlledvocabularyvalue_id_seq OWNER TO dataverse_app; + +-- +-- Name: controlledvocabularyvalue_id_seq; Type: SEQUENCE OWNED BY; Schema: public; Owner: dataverse_app +-- + +ALTER SEQUENCE controlledvocabularyvalue_id_seq OWNED BY controlledvocabularyvalue.id; + + +-- +-- Name: controlledvocabularyvalue_id_seq; Type: SEQUENCE SET; Schema: public; Owner: dataverse_app +-- + +SELECT pg_catalog.setval('controlledvocabularyvalue_id_seq', 824, true); + + +-- +-- Name: customfieldmap; Type: TABLE; Schema: public; Owner: dataverse_app; Tablespace: +-- + +CREATE TABLE customfieldmap ( + id integer NOT NULL, + sourcedatasetfield character varying(255), + sourcetemplate character varying(255), + targetdatasetfield character varying(255) +); + + +ALTER TABLE public.customfieldmap OWNER TO dataverse_app; + +-- +-- Name: customfieldmap_id_seq; Type: SEQUENCE; Schema: public; Owner: dataverse_app +-- + +CREATE SEQUENCE customfieldmap_id_seq + START WITH 1 + INCREMENT BY 1 + NO MINVALUE + NO MAXVALUE + CACHE 1; + + +ALTER TABLE public.customfieldmap_id_seq OWNER TO dataverse_app; + +-- +-- Name: customfieldmap_id_seq; Type: SEQUENCE OWNED BY; Schema: public; Owner: dataverse_app +-- + +ALTER SEQUENCE customfieldmap_id_seq OWNED BY customfieldmap.id; + + +-- +-- Name: customfieldmap_id_seq; Type: SEQUENCE SET; Schema: public; Owner: dataverse_app +-- + +SELECT pg_catalog.setval('customfieldmap_id_seq', 1, false); + + +-- +-- Name: customquestion; Type: TABLE; Schema: public; Owner: dataverse_app; Tablespace: +-- + +CREATE TABLE customquestion ( + id integer NOT NULL, + displayorder integer, + hidden boolean, + questionstring character varying(255) NOT NULL, + questiontype character varying(255) NOT NULL, + required boolean, + guestbook_id bigint NOT NULL +); + + +ALTER TABLE public.customquestion OWNER TO dataverse_app; + +-- +-- Name: customquestion_id_seq; Type: SEQUENCE; Schema: public; Owner: dataverse_app +-- + +CREATE SEQUENCE customquestion_id_seq + START WITH 1 + INCREMENT BY 1 + NO MINVALUE + NO MAXVALUE + CACHE 1; + + +ALTER TABLE public.customquestion_id_seq OWNER TO dataverse_app; + +-- +-- Name: customquestion_id_seq; Type: SEQUENCE OWNED BY; Schema: public; Owner: dataverse_app +-- + +ALTER SEQUENCE customquestion_id_seq OWNED BY customquestion.id; + + +-- +-- Name: customquestion_id_seq; Type: SEQUENCE SET; Schema: public; Owner: dataverse_app +-- + +SELECT pg_catalog.setval('customquestion_id_seq', 1, false); + + +-- +-- Name: customquestionresponse; Type: TABLE; Schema: public; Owner: dataverse_app; Tablespace: +-- + +CREATE TABLE customquestionresponse ( + id integer NOT NULL, + response character varying(255), + customquestion_id bigint NOT NULL, + guestbookresponse_id bigint NOT NULL +); + + +ALTER TABLE public.customquestionresponse OWNER TO dataverse_app; + +-- +-- Name: customquestionresponse_id_seq; Type: SEQUENCE; Schema: public; Owner: dataverse_app +-- + +CREATE SEQUENCE customquestionresponse_id_seq + START WITH 1 + INCREMENT BY 1 + NO MINVALUE + NO MAXVALUE + CACHE 1; + + +ALTER TABLE public.customquestionresponse_id_seq OWNER TO dataverse_app; + +-- +-- Name: customquestionresponse_id_seq; Type: SEQUENCE OWNED BY; Schema: public; Owner: dataverse_app +-- + +ALTER SEQUENCE customquestionresponse_id_seq OWNED BY customquestionresponse.id; + + +-- +-- Name: customquestionresponse_id_seq; Type: SEQUENCE SET; Schema: public; Owner: dataverse_app +-- + +SELECT pg_catalog.setval('customquestionresponse_id_seq', 1, false); + + +-- +-- Name: customquestionvalue; Type: TABLE; Schema: public; Owner: dataverse_app; Tablespace: +-- + +CREATE TABLE customquestionvalue ( + id integer NOT NULL, + displayorder integer, + valuestring character varying(255) NOT NULL, + customquestion_id bigint NOT NULL +); + + +ALTER TABLE public.customquestionvalue OWNER TO dataverse_app; + +-- +-- Name: customquestionvalue_id_seq; Type: SEQUENCE; Schema: public; Owner: dataverse_app +-- + +CREATE SEQUENCE customquestionvalue_id_seq + START WITH 1 + INCREMENT BY 1 + NO MINVALUE + NO MAXVALUE + CACHE 1; + + +ALTER TABLE public.customquestionvalue_id_seq OWNER TO dataverse_app; + +-- +-- Name: customquestionvalue_id_seq; Type: SEQUENCE OWNED BY; Schema: public; Owner: dataverse_app +-- + +ALTER SEQUENCE customquestionvalue_id_seq OWNED BY customquestionvalue.id; + + +-- +-- Name: customquestionvalue_id_seq; Type: SEQUENCE SET; Schema: public; Owner: dataverse_app +-- + +SELECT pg_catalog.setval('customquestionvalue_id_seq', 1, false); + + +-- +-- Name: datafile; Type: TABLE; Schema: public; Owner: dataverse_app; Tablespace: +-- + +CREATE TABLE datafile ( + id bigint NOT NULL, + contenttype character varying(255) NOT NULL, + filesystemname character varying(255) NOT NULL, + filesize bigint, + ingeststatus character(1), + md5 character varying(255) NOT NULL, + name character varying(255), + restricted boolean +); + + +ALTER TABLE public.datafile OWNER TO dataverse_app; + +-- +-- Name: datafilecategory; Type: TABLE; Schema: public; Owner: dataverse_app; Tablespace: +-- + +CREATE TABLE datafilecategory ( + id integer NOT NULL, + name character varying(255) NOT NULL, + dataset_id bigint NOT NULL +); + + +ALTER TABLE public.datafilecategory OWNER TO dataverse_app; + +-- +-- Name: datafilecategory_id_seq; Type: SEQUENCE; Schema: public; Owner: dataverse_app +-- + +CREATE SEQUENCE datafilecategory_id_seq + START WITH 1 + INCREMENT BY 1 + NO MINVALUE + NO MAXVALUE + CACHE 1; + + +ALTER TABLE public.datafilecategory_id_seq OWNER TO dataverse_app; + +-- +-- Name: datafilecategory_id_seq; Type: SEQUENCE OWNED BY; Schema: public; Owner: dataverse_app +-- + +ALTER SEQUENCE datafilecategory_id_seq OWNED BY datafilecategory.id; + + +-- +-- Name: datafilecategory_id_seq; Type: SEQUENCE SET; Schema: public; Owner: dataverse_app +-- + +SELECT pg_catalog.setval('datafilecategory_id_seq', 1, true); + + +-- +-- Name: datafiletag; Type: TABLE; Schema: public; Owner: dataverse_app; Tablespace: +-- + +CREATE TABLE datafiletag ( + id integer NOT NULL, + type integer NOT NULL, + datafile_id bigint NOT NULL +); + + +ALTER TABLE public.datafiletag OWNER TO dataverse_app; + +-- +-- Name: datafiletag_id_seq; Type: SEQUENCE; Schema: public; Owner: dataverse_app +-- + +CREATE SEQUENCE datafiletag_id_seq + START WITH 1 + INCREMENT BY 1 + NO MINVALUE + NO MAXVALUE + CACHE 1; + + +ALTER TABLE public.datafiletag_id_seq OWNER TO dataverse_app; + +-- +-- Name: datafiletag_id_seq; Type: SEQUENCE OWNED BY; Schema: public; Owner: dataverse_app +-- + +ALTER SEQUENCE datafiletag_id_seq OWNED BY datafiletag.id; + + +-- +-- Name: datafiletag_id_seq; Type: SEQUENCE SET; Schema: public; Owner: dataverse_app +-- + +SELECT pg_catalog.setval('datafiletag_id_seq', 1, false); + + +-- +-- Name: dataset; Type: TABLE; Schema: public; Owner: dataverse_app; Tablespace: +-- + +CREATE TABLE dataset ( + id bigint NOT NULL, + authority character varying(255), + doiseparator character varying(255), + fileaccessrequest boolean, + globalidcreatetime timestamp without time zone, + identifier character varying(255) NOT NULL, + protocol character varying(255), + guestbook_id bigint, + thumbnailfile_id bigint +); + + +ALTER TABLE public.dataset OWNER TO dataverse_app; + +-- +-- Name: datasetfield; Type: TABLE; Schema: public; Owner: dataverse_app; Tablespace: +-- + +CREATE TABLE datasetfield ( + id integer NOT NULL, + datasetfieldtype_id bigint NOT NULL, + datasetversion_id bigint, + parentdatasetfieldcompoundvalue_id bigint, + template_id bigint +); + + +ALTER TABLE public.datasetfield OWNER TO dataverse_app; + +-- +-- Name: datasetfield_controlledvocabularyvalue; Type: TABLE; Schema: public; Owner: dataverse_app; Tablespace: +-- + +CREATE TABLE datasetfield_controlledvocabularyvalue ( + datasetfield_id bigint NOT NULL, + controlledvocabularyvalues_id bigint NOT NULL +); + + +ALTER TABLE public.datasetfield_controlledvocabularyvalue OWNER TO dataverse_app; + +-- +-- Name: datasetfield_id_seq; Type: SEQUENCE; Schema: public; Owner: dataverse_app +-- + +CREATE SEQUENCE datasetfield_id_seq + START WITH 1 + INCREMENT BY 1 + NO MINVALUE + NO MAXVALUE + CACHE 1; + + +ALTER TABLE public.datasetfield_id_seq OWNER TO dataverse_app; + +-- +-- Name: datasetfield_id_seq; Type: SEQUENCE OWNED BY; Schema: public; Owner: dataverse_app +-- + +ALTER SEQUENCE datasetfield_id_seq OWNED BY datasetfield.id; + + +-- +-- Name: datasetfield_id_seq; Type: SEQUENCE SET; Schema: public; Owner: dataverse_app +-- + +SELECT pg_catalog.setval('datasetfield_id_seq', 14, true); + + +-- +-- Name: datasetfieldcompoundvalue; Type: TABLE; Schema: public; Owner: dataverse_app; Tablespace: +-- + +CREATE TABLE datasetfieldcompoundvalue ( + id integer NOT NULL, + displayorder integer, + parentdatasetfield_id bigint +); + + +ALTER TABLE public.datasetfieldcompoundvalue OWNER TO dataverse_app; + +-- +-- Name: datasetfieldcompoundvalue_id_seq; Type: SEQUENCE; Schema: public; Owner: dataverse_app +-- + +CREATE SEQUENCE datasetfieldcompoundvalue_id_seq + START WITH 1 + INCREMENT BY 1 + NO MINVALUE + NO MAXVALUE + CACHE 1; + + +ALTER TABLE public.datasetfieldcompoundvalue_id_seq OWNER TO dataverse_app; + +-- +-- Name: datasetfieldcompoundvalue_id_seq; Type: SEQUENCE OWNED BY; Schema: public; Owner: dataverse_app +-- + +ALTER SEQUENCE datasetfieldcompoundvalue_id_seq OWNED BY datasetfieldcompoundvalue.id; + + +-- +-- Name: datasetfieldcompoundvalue_id_seq; Type: SEQUENCE SET; Schema: public; Owner: dataverse_app +-- + +SELECT pg_catalog.setval('datasetfieldcompoundvalue_id_seq', 3, true); + + +-- +-- Name: datasetfielddefaultvalue; Type: TABLE; Schema: public; Owner: dataverse_app; Tablespace: +-- + +CREATE TABLE datasetfielddefaultvalue ( + id integer NOT NULL, + displayorder integer, + strvalue text, + datasetfield_id bigint NOT NULL, + defaultvalueset_id bigint NOT NULL, + parentdatasetfielddefaultvalue_id bigint +); + + +ALTER TABLE public.datasetfielddefaultvalue OWNER TO dataverse_app; + +-- +-- Name: datasetfielddefaultvalue_id_seq; Type: SEQUENCE; Schema: public; Owner: dataverse_app +-- + +CREATE SEQUENCE datasetfielddefaultvalue_id_seq + START WITH 1 + INCREMENT BY 1 + NO MINVALUE + NO MAXVALUE + CACHE 1; + + +ALTER TABLE public.datasetfielddefaultvalue_id_seq OWNER TO dataverse_app; + +-- +-- Name: datasetfielddefaultvalue_id_seq; Type: SEQUENCE OWNED BY; Schema: public; Owner: dataverse_app +-- + +ALTER SEQUENCE datasetfielddefaultvalue_id_seq OWNED BY datasetfielddefaultvalue.id; + + +-- +-- Name: datasetfielddefaultvalue_id_seq; Type: SEQUENCE SET; Schema: public; Owner: dataverse_app +-- + +SELECT pg_catalog.setval('datasetfielddefaultvalue_id_seq', 1, false); + + +-- +-- Name: datasetfieldtype; Type: TABLE; Schema: public; Owner: dataverse_app; Tablespace: +-- + +CREATE TABLE datasetfieldtype ( + id integer NOT NULL, + advancedsearchfieldtype boolean, + allowcontrolledvocabulary boolean, + allowmultiples boolean, + description text, + displayformat character varying(255), + displayoncreate boolean, + displayorder integer, + facetable boolean, + fieldtype character varying(255) NOT NULL, + name text, + required boolean, + title text, + watermark character varying(255), + metadatablock_id bigint, + parentdatasetfieldtype_id bigint +); + + +ALTER TABLE public.datasetfieldtype OWNER TO dataverse_app; + +-- +-- Name: datasetfieldtype_id_seq; Type: SEQUENCE; Schema: public; Owner: dataverse_app +-- + +CREATE SEQUENCE datasetfieldtype_id_seq + START WITH 1 + INCREMENT BY 1 + NO MINVALUE + NO MAXVALUE + CACHE 1; + + +ALTER TABLE public.datasetfieldtype_id_seq OWNER TO dataverse_app; + +-- +-- Name: datasetfieldtype_id_seq; Type: SEQUENCE OWNED BY; Schema: public; Owner: dataverse_app +-- + +ALTER SEQUENCE datasetfieldtype_id_seq OWNED BY datasetfieldtype.id; + + +-- +-- Name: datasetfieldtype_id_seq; Type: SEQUENCE SET; Schema: public; Owner: dataverse_app +-- + +SELECT pg_catalog.setval('datasetfieldtype_id_seq', 154, true); + + +-- +-- Name: datasetfieldvalue; Type: TABLE; Schema: public; Owner: dataverse_app; Tablespace: +-- + +CREATE TABLE datasetfieldvalue ( + id integer NOT NULL, + displayorder integer, + value text, + datasetfield_id bigint NOT NULL +); + + +ALTER TABLE public.datasetfieldvalue OWNER TO dataverse_app; + +-- +-- Name: datasetfieldvalue_id_seq; Type: SEQUENCE; Schema: public; Owner: dataverse_app +-- + +CREATE SEQUENCE datasetfieldvalue_id_seq + START WITH 1 + INCREMENT BY 1 + NO MINVALUE + NO MAXVALUE + CACHE 1; + + +ALTER TABLE public.datasetfieldvalue_id_seq OWNER TO dataverse_app; + +-- +-- Name: datasetfieldvalue_id_seq; Type: SEQUENCE OWNED BY; Schema: public; Owner: dataverse_app +-- + +ALTER SEQUENCE datasetfieldvalue_id_seq OWNED BY datasetfieldvalue.id; + + +-- +-- Name: datasetfieldvalue_id_seq; Type: SEQUENCE SET; Schema: public; Owner: dataverse_app +-- + +SELECT pg_catalog.setval('datasetfieldvalue_id_seq', 9, true); + + +-- +-- Name: datasetlinkingdataverse; Type: TABLE; Schema: public; Owner: dataverse_app; Tablespace: +-- + +CREATE TABLE datasetlinkingdataverse ( + id integer NOT NULL, + linkcreatetime timestamp without time zone NOT NULL, + dataset_id bigint NOT NULL, + linkingdataverse_id bigint NOT NULL +); + + +ALTER TABLE public.datasetlinkingdataverse OWNER TO dataverse_app; + +-- +-- Name: datasetlinkingdataverse_id_seq; Type: SEQUENCE; Schema: public; Owner: dataverse_app +-- + +CREATE SEQUENCE datasetlinkingdataverse_id_seq + START WITH 1 + INCREMENT BY 1 + NO MINVALUE + NO MAXVALUE + CACHE 1; + + +ALTER TABLE public.datasetlinkingdataverse_id_seq OWNER TO dataverse_app; + +-- +-- Name: datasetlinkingdataverse_id_seq; Type: SEQUENCE OWNED BY; Schema: public; Owner: dataverse_app +-- + +ALTER SEQUENCE datasetlinkingdataverse_id_seq OWNED BY datasetlinkingdataverse.id; + + +-- +-- Name: datasetlinkingdataverse_id_seq; Type: SEQUENCE SET; Schema: public; Owner: dataverse_app +-- + +SELECT pg_catalog.setval('datasetlinkingdataverse_id_seq', 1, false); + + +-- +-- Name: datasetlock; Type: TABLE; Schema: public; Owner: dataverse_app; Tablespace: +-- + +CREATE TABLE datasetlock ( + id integer NOT NULL, + info character varying(255), + starttime timestamp without time zone, + user_id bigint NOT NULL, + dataset_id bigint NOT NULL +); + + +ALTER TABLE public.datasetlock OWNER TO dataverse_app; + +-- +-- Name: datasetlock_id_seq; Type: SEQUENCE; Schema: public; Owner: dataverse_app +-- + +CREATE SEQUENCE datasetlock_id_seq + START WITH 1 + INCREMENT BY 1 + NO MINVALUE + NO MAXVALUE + CACHE 1; + + +ALTER TABLE public.datasetlock_id_seq OWNER TO dataverse_app; + +-- +-- Name: datasetlock_id_seq; Type: SEQUENCE OWNED BY; Schema: public; Owner: dataverse_app +-- + +ALTER SEQUENCE datasetlock_id_seq OWNED BY datasetlock.id; + + +-- +-- Name: datasetlock_id_seq; Type: SEQUENCE SET; Schema: public; Owner: dataverse_app +-- + +SELECT pg_catalog.setval('datasetlock_id_seq', 1, false); + + +-- +-- Name: datasetversion; Type: TABLE; Schema: public; Owner: dataverse_app; Tablespace: +-- + +CREATE TABLE datasetversion ( + id integer NOT NULL, + unf character varying(255), + archivenote character varying(1000), + archivetime timestamp without time zone, + availabilitystatus text, + citationrequirements text, + conditions text, + confidentialitydeclaration text, + contactforaccess text, + createtime timestamp without time zone NOT NULL, + dataaccessplace text, + deaccessionlink character varying(255), + depositorrequirements text, + disclaimer text, + fileaccessrequest boolean, + inreview boolean, + lastupdatetime timestamp without time zone NOT NULL, + license character varying(255), + minorversionnumber bigint, + originalarchive text, + releasetime timestamp without time zone, + restrictions text, + sizeofcollection text, + specialpermissions text, + studycompletion text, + termsofaccess text, + termsofuse text, + version bigint, + versionnote character varying(1000), + versionnumber bigint, + versionstate character varying(255), + dataset_id bigint +); + + +ALTER TABLE public.datasetversion OWNER TO dataverse_app; + +-- +-- Name: datasetversion_id_seq; Type: SEQUENCE; Schema: public; Owner: dataverse_app +-- + +CREATE SEQUENCE datasetversion_id_seq + START WITH 1 + INCREMENT BY 1 + NO MINVALUE + NO MAXVALUE + CACHE 1; + + +ALTER TABLE public.datasetversion_id_seq OWNER TO dataverse_app; + +-- +-- Name: datasetversion_id_seq; Type: SEQUENCE OWNED BY; Schema: public; Owner: dataverse_app +-- + +ALTER SEQUENCE datasetversion_id_seq OWNED BY datasetversion.id; + + +-- +-- Name: datasetversion_id_seq; Type: SEQUENCE SET; Schema: public; Owner: dataverse_app +-- + +SELECT pg_catalog.setval('datasetversion_id_seq', 1, true); + + +-- +-- Name: datasetversionuser; Type: TABLE; Schema: public; Owner: dataverse_app; Tablespace: +-- + +CREATE TABLE datasetversionuser ( + id integer NOT NULL, + lastupdatedate timestamp without time zone NOT NULL, + authenticateduser_id bigint, + datasetversion_id bigint +); + + +ALTER TABLE public.datasetversionuser OWNER TO dataverse_app; + +-- +-- Name: datasetversionuser_id_seq; Type: SEQUENCE; Schema: public; Owner: dataverse_app +-- + +CREATE SEQUENCE datasetversionuser_id_seq + START WITH 1 + INCREMENT BY 1 + NO MINVALUE + NO MAXVALUE + CACHE 1; + + +ALTER TABLE public.datasetversionuser_id_seq OWNER TO dataverse_app; + +-- +-- Name: datasetversionuser_id_seq; Type: SEQUENCE OWNED BY; Schema: public; Owner: dataverse_app +-- + +ALTER SEQUENCE datasetversionuser_id_seq OWNED BY datasetversionuser.id; + + +-- +-- Name: datasetversionuser_id_seq; Type: SEQUENCE SET; Schema: public; Owner: dataverse_app +-- + +SELECT pg_catalog.setval('datasetversionuser_id_seq', 1, true); + + +-- +-- Name: datatable; Type: TABLE; Schema: public; Owner: dataverse_app; Tablespace: +-- + +CREATE TABLE datatable ( + id integer NOT NULL, + casequantity bigint, + originalfileformat character varying(255), + originalformatversion character varying(255), + recordspercase bigint, + unf character varying(255) NOT NULL, + varquantity bigint, + datafile_id bigint NOT NULL +); + + +ALTER TABLE public.datatable OWNER TO dataverse_app; + +-- +-- Name: datatable_id_seq; Type: SEQUENCE; Schema: public; Owner: dataverse_app +-- + +CREATE SEQUENCE datatable_id_seq + START WITH 1 + INCREMENT BY 1 + NO MINVALUE + NO MAXVALUE + CACHE 1; + + +ALTER TABLE public.datatable_id_seq OWNER TO dataverse_app; + +-- +-- Name: datatable_id_seq; Type: SEQUENCE OWNED BY; Schema: public; Owner: dataverse_app +-- + +ALTER SEQUENCE datatable_id_seq OWNED BY datatable.id; + + +-- +-- Name: datatable_id_seq; Type: SEQUENCE SET; Schema: public; Owner: dataverse_app +-- + +SELECT pg_catalog.setval('datatable_id_seq', 1, false); + + +-- +-- Name: datavariable; Type: TABLE; Schema: public; Owner: dataverse_app; Tablespace: +-- + +CREATE TABLE datavariable ( + id integer NOT NULL, + fileendposition bigint, + fileorder integer, + filestartposition bigint, + format character varying(255), + formatcategory character varying(255), + "interval" integer, + label text, + name character varying(255), + numberofdecimalpoints bigint, + orderedfactor boolean, + recordsegmentnumber bigint, + type integer, + unf character varying(255), + universe character varying(255), + weighted boolean, + datatable_id bigint NOT NULL +); + + +ALTER TABLE public.datavariable OWNER TO dataverse_app; + +-- +-- Name: datavariable_id_seq; Type: SEQUENCE; Schema: public; Owner: dataverse_app +-- + +CREATE SEQUENCE datavariable_id_seq + START WITH 1 + INCREMENT BY 1 + NO MINVALUE + NO MAXVALUE + CACHE 1; + + +ALTER TABLE public.datavariable_id_seq OWNER TO dataverse_app; + +-- +-- Name: datavariable_id_seq; Type: SEQUENCE OWNED BY; Schema: public; Owner: dataverse_app +-- + +ALTER SEQUENCE datavariable_id_seq OWNED BY datavariable.id; + + +-- +-- Name: datavariable_id_seq; Type: SEQUENCE SET; Schema: public; Owner: dataverse_app +-- + +SELECT pg_catalog.setval('datavariable_id_seq', 1, false); + + +-- +-- Name: dataverse; Type: TABLE; Schema: public; Owner: dataverse_app; Tablespace: +-- + +CREATE TABLE dataverse ( + id bigint NOT NULL, + affiliation character varying(255), + alias character varying(255) NOT NULL, + dataversetype character varying(255) NOT NULL, + description text, + facetroot boolean, + guestbookroot boolean, + metadatablockroot boolean, + name character varying(255) NOT NULL, + permissionroot boolean, + templateroot boolean, + themeroot boolean, + defaultcontributorrole_id bigint NOT NULL, + defaulttemplate_id bigint +); + + +ALTER TABLE public.dataverse OWNER TO dataverse_app; + +-- +-- Name: dataverse_metadatablock; Type: TABLE; Schema: public; Owner: dataverse_app; Tablespace: +-- + +CREATE TABLE dataverse_metadatablock ( + dataverse_id bigint NOT NULL, + metadatablocks_id bigint NOT NULL +); + + +ALTER TABLE public.dataverse_metadatablock OWNER TO dataverse_app; + +-- +-- Name: dataversecontact; Type: TABLE; Schema: public; Owner: dataverse_app; Tablespace: +-- + +CREATE TABLE dataversecontact ( + id integer NOT NULL, + contactemail character varying(255) NOT NULL, + displayorder integer, + dataverse_id bigint +); + + +ALTER TABLE public.dataversecontact OWNER TO dataverse_app; + +-- +-- Name: dataversecontact_id_seq; Type: SEQUENCE; Schema: public; Owner: dataverse_app +-- + +CREATE SEQUENCE dataversecontact_id_seq + START WITH 1 + INCREMENT BY 1 + NO MINVALUE + NO MAXVALUE + CACHE 1; + + +ALTER TABLE public.dataversecontact_id_seq OWNER TO dataverse_app; + +-- +-- Name: dataversecontact_id_seq; Type: SEQUENCE OWNED BY; Schema: public; Owner: dataverse_app +-- + +ALTER SEQUENCE dataversecontact_id_seq OWNED BY dataversecontact.id; + + +-- +-- Name: dataversecontact_id_seq; Type: SEQUENCE SET; Schema: public; Owner: dataverse_app +-- + +SELECT pg_catalog.setval('dataversecontact_id_seq', 2, true); + + +-- +-- Name: dataversefacet; Type: TABLE; Schema: public; Owner: dataverse_app; Tablespace: +-- + +CREATE TABLE dataversefacet ( + id integer NOT NULL, + displayorder integer, + datasetfieldtype_id bigint, + dataverse_id bigint +); + + +ALTER TABLE public.dataversefacet OWNER TO dataverse_app; + +-- +-- Name: dataversefacet_id_seq; Type: SEQUENCE; Schema: public; Owner: dataverse_app +-- + +CREATE SEQUENCE dataversefacet_id_seq + START WITH 1 + INCREMENT BY 1 + NO MINVALUE + NO MAXVALUE + CACHE 1; + + +ALTER TABLE public.dataversefacet_id_seq OWNER TO dataverse_app; + +-- +-- Name: dataversefacet_id_seq; Type: SEQUENCE OWNED BY; Schema: public; Owner: dataverse_app +-- + +ALTER SEQUENCE dataversefacet_id_seq OWNED BY dataversefacet.id; + + +-- +-- Name: dataversefacet_id_seq; Type: SEQUENCE SET; Schema: public; Owner: dataverse_app +-- + +SELECT pg_catalog.setval('dataversefacet_id_seq', 4, true); + + +-- +-- Name: dataversefeatureddataverse; Type: TABLE; Schema: public; Owner: dataverse_app; Tablespace: +-- + +CREATE TABLE dataversefeatureddataverse ( + id integer NOT NULL, + displayorder integer, + dataverse_id bigint, + featureddataverse_id bigint +); + + +ALTER TABLE public.dataversefeatureddataverse OWNER TO dataverse_app; + +-- +-- Name: dataversefeatureddataverse_id_seq; Type: SEQUENCE; Schema: public; Owner: dataverse_app +-- + +CREATE SEQUENCE dataversefeatureddataverse_id_seq + START WITH 1 + INCREMENT BY 1 + NO MINVALUE + NO MAXVALUE + CACHE 1; + + +ALTER TABLE public.dataversefeatureddataverse_id_seq OWNER TO dataverse_app; + +-- +-- Name: dataversefeatureddataverse_id_seq; Type: SEQUENCE OWNED BY; Schema: public; Owner: dataverse_app +-- + +ALTER SEQUENCE dataversefeatureddataverse_id_seq OWNED BY dataversefeatureddataverse.id; + + +-- +-- Name: dataversefeatureddataverse_id_seq; Type: SEQUENCE SET; Schema: public; Owner: dataverse_app +-- + +SELECT pg_catalog.setval('dataversefeatureddataverse_id_seq', 1, false); + + +-- +-- Name: dataversefieldtypeinputlevel; Type: TABLE; Schema: public; Owner: dataverse_app; Tablespace: +-- + +CREATE TABLE dataversefieldtypeinputlevel ( + id integer NOT NULL, + include boolean, + required boolean, + datasetfieldtype_id bigint, + dataverse_id bigint +); + + +ALTER TABLE public.dataversefieldtypeinputlevel OWNER TO dataverse_app; + +-- +-- Name: dataversefieldtypeinputlevel_id_seq; Type: SEQUENCE; Schema: public; Owner: dataverse_app +-- + +CREATE SEQUENCE dataversefieldtypeinputlevel_id_seq + START WITH 1 + INCREMENT BY 1 + NO MINVALUE + NO MAXVALUE + CACHE 1; + + +ALTER TABLE public.dataversefieldtypeinputlevel_id_seq OWNER TO dataverse_app; + +-- +-- Name: dataversefieldtypeinputlevel_id_seq; Type: SEQUENCE OWNED BY; Schema: public; Owner: dataverse_app +-- + +ALTER SEQUENCE dataversefieldtypeinputlevel_id_seq OWNED BY dataversefieldtypeinputlevel.id; + + +-- +-- Name: dataversefieldtypeinputlevel_id_seq; Type: SEQUENCE SET; Schema: public; Owner: dataverse_app +-- + +SELECT pg_catalog.setval('dataversefieldtypeinputlevel_id_seq', 1, false); + + +-- +-- Name: dataverselinkingdataverse; Type: TABLE; Schema: public; Owner: dataverse_app; Tablespace: +-- + +CREATE TABLE dataverselinkingdataverse ( + id integer NOT NULL, + linkcreatetime timestamp without time zone, + dataverse_id bigint NOT NULL, + linkingdataverse_id bigint NOT NULL +); + + +ALTER TABLE public.dataverselinkingdataverse OWNER TO dataverse_app; + +-- +-- Name: dataverselinkingdataverse_id_seq; Type: SEQUENCE; Schema: public; Owner: dataverse_app +-- + +CREATE SEQUENCE dataverselinkingdataverse_id_seq + START WITH 1 + INCREMENT BY 1 + NO MINVALUE + NO MAXVALUE + CACHE 1; + + +ALTER TABLE public.dataverselinkingdataverse_id_seq OWNER TO dataverse_app; + +-- +-- Name: dataverselinkingdataverse_id_seq; Type: SEQUENCE OWNED BY; Schema: public; Owner: dataverse_app +-- + +ALTER SEQUENCE dataverselinkingdataverse_id_seq OWNED BY dataverselinkingdataverse.id; + + +-- +-- Name: dataverselinkingdataverse_id_seq; Type: SEQUENCE SET; Schema: public; Owner: dataverse_app +-- + +SELECT pg_catalog.setval('dataverselinkingdataverse_id_seq', 1, false); + + +-- +-- Name: dataverserole; Type: TABLE; Schema: public; Owner: dataverse_app; Tablespace: +-- + +CREATE TABLE dataverserole ( + id integer NOT NULL, + alias character varying(255) NOT NULL, + description character varying(255), + name character varying(255) NOT NULL, + permissionbits bigint, + owner_id bigint +); + + +ALTER TABLE public.dataverserole OWNER TO dataverse_app; + +-- +-- Name: dataverserole_id_seq; Type: SEQUENCE; Schema: public; Owner: dataverse_app +-- + +CREATE SEQUENCE dataverserole_id_seq + START WITH 1 + INCREMENT BY 1 + NO MINVALUE + NO MAXVALUE + CACHE 1; + + +ALTER TABLE public.dataverserole_id_seq OWNER TO dataverse_app; + +-- +-- Name: dataverserole_id_seq; Type: SEQUENCE OWNED BY; Schema: public; Owner: dataverse_app +-- + +ALTER SEQUENCE dataverserole_id_seq OWNED BY dataverserole.id; + + +-- +-- Name: dataverserole_id_seq; Type: SEQUENCE SET; Schema: public; Owner: dataverse_app +-- + +SELECT pg_catalog.setval('dataverserole_id_seq', 8, true); + + +-- +-- Name: dataversesubjects; Type: TABLE; Schema: public; Owner: dataverse_app; Tablespace: +-- + +CREATE TABLE dataversesubjects ( + dataverse_id bigint NOT NULL, + controlledvocabularyvalue_id bigint NOT NULL +); + + +ALTER TABLE public.dataversesubjects OWNER TO dataverse_app; + +-- +-- Name: dataversetheme; Type: TABLE; Schema: public; Owner: dataverse_app; Tablespace: +-- + +CREATE TABLE dataversetheme ( + id integer NOT NULL, + backgroundcolor character varying(255), + linkcolor character varying(255), + linkurl character varying(255), + logo character varying(255), + logoalignment character varying(255), + logobackgroundcolor character varying(255), + logoformat character varying(255), + tagline character varying(255), + textcolor character varying(255), + dataverse_id bigint +); + + +ALTER TABLE public.dataversetheme OWNER TO dataverse_app; + +-- +-- Name: dataversetheme_id_seq; Type: SEQUENCE; Schema: public; Owner: dataverse_app +-- + +CREATE SEQUENCE dataversetheme_id_seq + START WITH 1 + INCREMENT BY 1 + NO MINVALUE + NO MAXVALUE + CACHE 1; + + +ALTER TABLE public.dataversetheme_id_seq OWNER TO dataverse_app; + +-- +-- Name: dataversetheme_id_seq; Type: SEQUENCE OWNED BY; Schema: public; Owner: dataverse_app +-- + +ALTER SEQUENCE dataversetheme_id_seq OWNED BY dataversetheme.id; + + +-- +-- Name: dataversetheme_id_seq; Type: SEQUENCE SET; Schema: public; Owner: dataverse_app +-- + +SELECT pg_catalog.setval('dataversetheme_id_seq', 1, false); + + +-- +-- Name: defaultvalueset; Type: TABLE; Schema: public; Owner: dataverse_app; Tablespace: +-- + +CREATE TABLE defaultvalueset ( + id integer NOT NULL, + name character varying(255) NOT NULL +); + + +ALTER TABLE public.defaultvalueset OWNER TO dataverse_app; + +-- +-- Name: defaultvalueset_id_seq; Type: SEQUENCE; Schema: public; Owner: dataverse_app +-- + +CREATE SEQUENCE defaultvalueset_id_seq + START WITH 1 + INCREMENT BY 1 + NO MINVALUE + NO MAXVALUE + CACHE 1; + + +ALTER TABLE public.defaultvalueset_id_seq OWNER TO dataverse_app; + +-- +-- Name: defaultvalueset_id_seq; Type: SEQUENCE OWNED BY; Schema: public; Owner: dataverse_app +-- + +ALTER SEQUENCE defaultvalueset_id_seq OWNED BY defaultvalueset.id; + + +-- +-- Name: defaultvalueset_id_seq; Type: SEQUENCE SET; Schema: public; Owner: dataverse_app +-- + +SELECT pg_catalog.setval('defaultvalueset_id_seq', 1, false); + + +-- +-- Name: dvobject; Type: TABLE; Schema: public; Owner: dataverse_app; Tablespace: +-- + +CREATE TABLE dvobject ( + id integer NOT NULL, + dtype character varying(31), + createdate timestamp without time zone NOT NULL, + indextime timestamp without time zone, + modificationtime timestamp without time zone NOT NULL, + permissionindextime timestamp without time zone, + permissionmodificationtime timestamp without time zone, + publicationdate timestamp without time zone, + creator_id bigint, + owner_id bigint, + releaseuser_id bigint +); + + +ALTER TABLE public.dvobject OWNER TO dataverse_app; + +-- +-- Name: dvobject_id_seq; Type: SEQUENCE; Schema: public; Owner: dataverse_app +-- + +CREATE SEQUENCE dvobject_id_seq + START WITH 1 + INCREMENT BY 1 + NO MINVALUE + NO MAXVALUE + CACHE 1; + + +ALTER TABLE public.dvobject_id_seq OWNER TO dataverse_app; + +-- +-- Name: dvobject_id_seq; Type: SEQUENCE OWNED BY; Schema: public; Owner: dataverse_app +-- + +ALTER SEQUENCE dvobject_id_seq OWNED BY dvobject.id; + + +-- +-- Name: dvobject_id_seq; Type: SEQUENCE SET; Schema: public; Owner: dataverse_app +-- + +SELECT pg_catalog.setval('dvobject_id_seq', 4, true); + + +-- +-- Name: explicitgroup; Type: TABLE; Schema: public; Owner: dataverse_app; Tablespace: +-- + +CREATE TABLE explicitgroup ( + id integer NOT NULL, + description character varying(1024), + displayname character varying(255), + groupalias character varying(255), + groupaliasinowner character varying(255), + owner_id bigint +); + + +ALTER TABLE public.explicitgroup OWNER TO dataverse_app; + +-- +-- Name: explicitgroup_authenticateduser; Type: TABLE; Schema: public; Owner: dataverse_app; Tablespace: +-- + +CREATE TABLE explicitgroup_authenticateduser ( + explicitgroup_id bigint NOT NULL, + containedauthenticatedusers_id bigint NOT NULL +); + + +ALTER TABLE public.explicitgroup_authenticateduser OWNER TO dataverse_app; + +-- +-- Name: explicitgroup_containedroleassignees; Type: TABLE; Schema: public; Owner: dataverse_app; Tablespace: +-- + +CREATE TABLE explicitgroup_containedroleassignees ( + explicitgroup_id bigint, + containedroleassignees character varying(255) +); + + +ALTER TABLE public.explicitgroup_containedroleassignees OWNER TO dataverse_app; + +-- +-- Name: explicitgroup_explicitgroup; Type: TABLE; Schema: public; Owner: dataverse_app; Tablespace: +-- + +CREATE TABLE explicitgroup_explicitgroup ( + explicitgroup_id bigint NOT NULL, + containedexplicitgroups_id bigint NOT NULL +); + + +ALTER TABLE public.explicitgroup_explicitgroup OWNER TO dataverse_app; + +-- +-- Name: explicitgroup_id_seq; Type: SEQUENCE; Schema: public; Owner: dataverse_app +-- + +CREATE SEQUENCE explicitgroup_id_seq + START WITH 1 + INCREMENT BY 1 + NO MINVALUE + NO MAXVALUE + CACHE 1; + + +ALTER TABLE public.explicitgroup_id_seq OWNER TO dataverse_app; + +-- +-- Name: explicitgroup_id_seq; Type: SEQUENCE OWNED BY; Schema: public; Owner: dataverse_app +-- + +ALTER SEQUENCE explicitgroup_id_seq OWNED BY explicitgroup.id; + + +-- +-- Name: explicitgroup_id_seq; Type: SEQUENCE SET; Schema: public; Owner: dataverse_app +-- + +SELECT pg_catalog.setval('explicitgroup_id_seq', 1, false); + + +-- +-- Name: fileaccessrequests; Type: TABLE; Schema: public; Owner: dataverse_app; Tablespace: +-- + +CREATE TABLE fileaccessrequests ( + datafile_id bigint NOT NULL, + authenticated_user_id bigint NOT NULL +); + + +ALTER TABLE public.fileaccessrequests OWNER TO dataverse_app; + +-- +-- Name: filemetadata; Type: TABLE; Schema: public; Owner: dataverse_app; Tablespace: +-- + +CREATE TABLE filemetadata ( + id integer NOT NULL, + description text, + label character varying(255) NOT NULL, + restricted boolean, + version bigint, + datafile_id bigint NOT NULL, + datasetversion_id bigint NOT NULL +); + + +ALTER TABLE public.filemetadata OWNER TO dataverse_app; + +-- +-- Name: filemetadata_datafilecategory; Type: TABLE; Schema: public; Owner: dataverse_app; Tablespace: +-- + +CREATE TABLE filemetadata_datafilecategory ( + filecategories_id bigint NOT NULL, + filemetadatas_id bigint NOT NULL +); + + +ALTER TABLE public.filemetadata_datafilecategory OWNER TO dataverse_app; + +-- +-- Name: filemetadata_id_seq; Type: SEQUENCE; Schema: public; Owner: dataverse_app +-- + +CREATE SEQUENCE filemetadata_id_seq + START WITH 1 + INCREMENT BY 1 + NO MINVALUE + NO MAXVALUE + CACHE 1; + + +ALTER TABLE public.filemetadata_id_seq OWNER TO dataverse_app; + +-- +-- Name: filemetadata_id_seq; Type: SEQUENCE OWNED BY; Schema: public; Owner: dataverse_app +-- + +ALTER SEQUENCE filemetadata_id_seq OWNED BY filemetadata.id; + + +-- +-- Name: filemetadata_id_seq; Type: SEQUENCE SET; Schema: public; Owner: dataverse_app +-- + +SELECT pg_catalog.setval('filemetadata_id_seq', 1, true); + + +-- +-- Name: foreignmetadatafieldmapping; Type: TABLE; Schema: public; Owner: dataverse_app; Tablespace: +-- + +CREATE TABLE foreignmetadatafieldmapping ( + id integer NOT NULL, + datasetfieldname text, + foreignfieldxpath text, + isattribute boolean, + foreignmetadataformatmapping_id bigint, + parentfieldmapping_id bigint +); + + +ALTER TABLE public.foreignmetadatafieldmapping OWNER TO dataverse_app; + +-- +-- Name: foreignmetadatafieldmapping_id_seq; Type: SEQUENCE; Schema: public; Owner: dataverse_app +-- + +CREATE SEQUENCE foreignmetadatafieldmapping_id_seq + START WITH 1 + INCREMENT BY 1 + NO MINVALUE + NO MAXVALUE + CACHE 1; + + +ALTER TABLE public.foreignmetadatafieldmapping_id_seq OWNER TO dataverse_app; + +-- +-- Name: foreignmetadatafieldmapping_id_seq; Type: SEQUENCE OWNED BY; Schema: public; Owner: dataverse_app +-- + +ALTER SEQUENCE foreignmetadatafieldmapping_id_seq OWNED BY foreignmetadatafieldmapping.id; + + +-- +-- Name: foreignmetadatafieldmapping_id_seq; Type: SEQUENCE SET; Schema: public; Owner: dataverse_app +-- + +SELECT pg_catalog.setval('foreignmetadatafieldmapping_id_seq', 1, false); + + +-- +-- Name: foreignmetadataformatmapping; Type: TABLE; Schema: public; Owner: dataverse_app; Tablespace: +-- + +CREATE TABLE foreignmetadataformatmapping ( + id integer NOT NULL, + displayname character varying(255) NOT NULL, + name character varying(255) NOT NULL, + schemalocation character varying(255), + startelement character varying(255) +); + + +ALTER TABLE public.foreignmetadataformatmapping OWNER TO dataverse_app; + +-- +-- Name: foreignmetadataformatmapping_id_seq; Type: SEQUENCE; Schema: public; Owner: dataverse_app +-- + +CREATE SEQUENCE foreignmetadataformatmapping_id_seq + START WITH 1 + INCREMENT BY 1 + NO MINVALUE + NO MAXVALUE + CACHE 1; + + +ALTER TABLE public.foreignmetadataformatmapping_id_seq OWNER TO dataverse_app; + +-- +-- Name: foreignmetadataformatmapping_id_seq; Type: SEQUENCE OWNED BY; Schema: public; Owner: dataverse_app +-- + +ALTER SEQUENCE foreignmetadataformatmapping_id_seq OWNED BY foreignmetadataformatmapping.id; + + +-- +-- Name: foreignmetadataformatmapping_id_seq; Type: SEQUENCE SET; Schema: public; Owner: dataverse_app +-- + +SELECT pg_catalog.setval('foreignmetadataformatmapping_id_seq', 1, false); + + +-- +-- Name: guestbook; Type: TABLE; Schema: public; Owner: dataverse_app; Tablespace: +-- + +CREATE TABLE guestbook ( + id integer NOT NULL, + createtime timestamp without time zone NOT NULL, + emailrequired boolean, + enabled boolean, + institutionrequired boolean, + name character varying(255), + namerequired boolean, + positionrequired boolean, + dataverse_id bigint +); + + +ALTER TABLE public.guestbook OWNER TO dataverse_app; + +-- +-- Name: guestbook_id_seq; Type: SEQUENCE; Schema: public; Owner: dataverse_app +-- + +CREATE SEQUENCE guestbook_id_seq + START WITH 1 + INCREMENT BY 1 + NO MINVALUE + NO MAXVALUE + CACHE 1; + + +ALTER TABLE public.guestbook_id_seq OWNER TO dataverse_app; + +-- +-- Name: guestbook_id_seq; Type: SEQUENCE OWNED BY; Schema: public; Owner: dataverse_app +-- + +ALTER SEQUENCE guestbook_id_seq OWNED BY guestbook.id; + + +-- +-- Name: guestbook_id_seq; Type: SEQUENCE SET; Schema: public; Owner: dataverse_app +-- + +SELECT pg_catalog.setval('guestbook_id_seq', 1, false); + + +-- +-- Name: guestbookresponse; Type: TABLE; Schema: public; Owner: dataverse_app; Tablespace: +-- + +CREATE TABLE guestbookresponse ( + id integer NOT NULL, + downloadtype character varying(255), + email character varying(255), + institution character varying(255), + name character varying(255), + "position" character varying(255), + responsetime timestamp without time zone, + sessionid character varying(255), + authenticateduser_id bigint, + datafile_id bigint NOT NULL, + dataset_id bigint NOT NULL, + datasetversion_id bigint, + guestbook_id bigint NOT NULL +); + + +ALTER TABLE public.guestbookresponse OWNER TO dataverse_app; + +-- +-- Name: guestbookresponse_id_seq; Type: SEQUENCE; Schema: public; Owner: dataverse_app +-- + +CREATE SEQUENCE guestbookresponse_id_seq + START WITH 1 + INCREMENT BY 1 + NO MINVALUE + NO MAXVALUE + CACHE 1; + + +ALTER TABLE public.guestbookresponse_id_seq OWNER TO dataverse_app; + +-- +-- Name: guestbookresponse_id_seq; Type: SEQUENCE OWNED BY; Schema: public; Owner: dataverse_app +-- + +ALTER SEQUENCE guestbookresponse_id_seq OWNED BY guestbookresponse.id; + + +-- +-- Name: guestbookresponse_id_seq; Type: SEQUENCE SET; Schema: public; Owner: dataverse_app +-- + +SELECT pg_catalog.setval('guestbookresponse_id_seq', 1, false); + + +-- +-- Name: harvestingdataverseconfig; Type: TABLE; Schema: public; Owner: dataverse_app; Tablespace: +-- + +CREATE TABLE harvestingdataverseconfig ( + id bigint NOT NULL, + archivedescription text, + archiveurl character varying(255), + harveststyle character varying(255), + harvesttype character varying(255), + harvestingset character varying(255), + harvestingurl character varying(255), + dataverse_id bigint +); + + +ALTER TABLE public.harvestingdataverseconfig OWNER TO dataverse_app; + +-- +-- Name: ingestreport; Type: TABLE; Schema: public; Owner: dataverse_app; Tablespace: +-- + +CREATE TABLE ingestreport ( + id integer NOT NULL, + endtime timestamp without time zone, + report character varying(255), + starttime timestamp without time zone, + status integer, + type integer, + datafile_id bigint NOT NULL +); + + +ALTER TABLE public.ingestreport OWNER TO dataverse_app; + +-- +-- Name: ingestreport_id_seq; Type: SEQUENCE; Schema: public; Owner: dataverse_app +-- + +CREATE SEQUENCE ingestreport_id_seq + START WITH 1 + INCREMENT BY 1 + NO MINVALUE + NO MAXVALUE + CACHE 1; + + +ALTER TABLE public.ingestreport_id_seq OWNER TO dataverse_app; + +-- +-- Name: ingestreport_id_seq; Type: SEQUENCE OWNED BY; Schema: public; Owner: dataverse_app +-- + +ALTER SEQUENCE ingestreport_id_seq OWNED BY ingestreport.id; + + +-- +-- Name: ingestreport_id_seq; Type: SEQUENCE SET; Schema: public; Owner: dataverse_app +-- + +SELECT pg_catalog.setval('ingestreport_id_seq', 1, false); + + +-- +-- Name: ingestrequest; Type: TABLE; Schema: public; Owner: dataverse_app; Tablespace: +-- + +CREATE TABLE ingestrequest ( + id integer NOT NULL, + controlcard character varying(255), + labelsfile character varying(255), + textencoding character varying(255), + datafile_id bigint +); + + +ALTER TABLE public.ingestrequest OWNER TO dataverse_app; + +-- +-- Name: ingestrequest_id_seq; Type: SEQUENCE; Schema: public; Owner: dataverse_app +-- + +CREATE SEQUENCE ingestrequest_id_seq + START WITH 1 + INCREMENT BY 1 + NO MINVALUE + NO MAXVALUE + CACHE 1; + + +ALTER TABLE public.ingestrequest_id_seq OWNER TO dataverse_app; + +-- +-- Name: ingestrequest_id_seq; Type: SEQUENCE OWNED BY; Schema: public; Owner: dataverse_app +-- + +ALTER SEQUENCE ingestrequest_id_seq OWNED BY ingestrequest.id; + + +-- +-- Name: ingestrequest_id_seq; Type: SEQUENCE SET; Schema: public; Owner: dataverse_app +-- + +SELECT pg_catalog.setval('ingestrequest_id_seq', 1, false); + + +-- +-- Name: ipv4range; Type: TABLE; Schema: public; Owner: dataverse_app; Tablespace: +-- + +CREATE TABLE ipv4range ( + id bigint NOT NULL, + bottomaslong bigint, + topaslong bigint, + owner_id bigint +); + + +ALTER TABLE public.ipv4range OWNER TO dataverse_app; + +-- +-- Name: ipv6range; Type: TABLE; Schema: public; Owner: dataverse_app; Tablespace: +-- + +CREATE TABLE ipv6range ( + id bigint NOT NULL, + bottoma bigint, + bottomb bigint, + bottomc bigint, + bottomd bigint, + topa bigint, + topb bigint, + topc bigint, + topd bigint, + owner_id bigint +); + + +ALTER TABLE public.ipv6range OWNER TO dataverse_app; + +-- +-- Name: maplayermetadata; Type: TABLE; Schema: public; Owner: dataverse_app; Tablespace: +-- + +CREATE TABLE maplayermetadata ( + id integer NOT NULL, + embedmaplink character varying(255) NOT NULL, + layerlink character varying(255) NOT NULL, + layername character varying(255) NOT NULL, + mapimagelink character varying(255), + worldmapusername character varying(255) NOT NULL, + dataset_id bigint NOT NULL, + datafile_id bigint NOT NULL +); + + +ALTER TABLE public.maplayermetadata OWNER TO dataverse_app; + +-- +-- Name: maplayermetadata_id_seq; Type: SEQUENCE; Schema: public; Owner: dataverse_app +-- + +CREATE SEQUENCE maplayermetadata_id_seq + START WITH 1 + INCREMENT BY 1 + NO MINVALUE + NO MAXVALUE + CACHE 1; + + +ALTER TABLE public.maplayermetadata_id_seq OWNER TO dataverse_app; + +-- +-- Name: maplayermetadata_id_seq; Type: SEQUENCE OWNED BY; Schema: public; Owner: dataverse_app +-- + +ALTER SEQUENCE maplayermetadata_id_seq OWNED BY maplayermetadata.id; + + +-- +-- Name: maplayermetadata_id_seq; Type: SEQUENCE SET; Schema: public; Owner: dataverse_app +-- + +SELECT pg_catalog.setval('maplayermetadata_id_seq', 1, false); + + +-- +-- Name: metadatablock; Type: TABLE; Schema: public; Owner: dataverse_app; Tablespace: +-- + +CREATE TABLE metadatablock ( + id integer NOT NULL, + displayname character varying(255) NOT NULL, + name character varying(255) NOT NULL, + owner_id bigint +); + + +ALTER TABLE public.metadatablock OWNER TO dataverse_app; + +-- +-- Name: metadatablock_id_seq; Type: SEQUENCE; Schema: public; Owner: dataverse_app +-- + +CREATE SEQUENCE metadatablock_id_seq + START WITH 1 + INCREMENT BY 1 + NO MINVALUE + NO MAXVALUE + CACHE 1; + + +ALTER TABLE public.metadatablock_id_seq OWNER TO dataverse_app; + +-- +-- Name: metadatablock_id_seq; Type: SEQUENCE OWNED BY; Schema: public; Owner: dataverse_app +-- + +ALTER SEQUENCE metadatablock_id_seq OWNED BY metadatablock.id; + + +-- +-- Name: metadatablock_id_seq; Type: SEQUENCE SET; Schema: public; Owner: dataverse_app +-- + +SELECT pg_catalog.setval('metadatablock_id_seq', 6, true); + + +-- +-- Name: passwordresetdata; Type: TABLE; Schema: public; Owner: dataverse_app; Tablespace: +-- + +CREATE TABLE passwordresetdata ( + id integer NOT NULL, + created timestamp without time zone NOT NULL, + expires timestamp without time zone NOT NULL, + reason character varying(255), + token character varying(255), + builtinuser_id bigint NOT NULL +); + + +ALTER TABLE public.passwordresetdata OWNER TO dataverse_app; + +-- +-- Name: passwordresetdata_id_seq; Type: SEQUENCE; Schema: public; Owner: dataverse_app +-- + +CREATE SEQUENCE passwordresetdata_id_seq + START WITH 1 + INCREMENT BY 1 + NO MINVALUE + NO MAXVALUE + CACHE 1; + + +ALTER TABLE public.passwordresetdata_id_seq OWNER TO dataverse_app; + +-- +-- Name: passwordresetdata_id_seq; Type: SEQUENCE OWNED BY; Schema: public; Owner: dataverse_app +-- + +ALTER SEQUENCE passwordresetdata_id_seq OWNED BY passwordresetdata.id; + + +-- +-- Name: passwordresetdata_id_seq; Type: SEQUENCE SET; Schema: public; Owner: dataverse_app +-- + +SELECT pg_catalog.setval('passwordresetdata_id_seq', 1, false); + + +-- +-- Name: persistedglobalgroup; Type: TABLE; Schema: public; Owner: dataverse_app; Tablespace: +-- + +CREATE TABLE persistedglobalgroup ( + id bigint NOT NULL, + dtype character varying(31), + description character varying(255), + displayname character varying(255), + persistedgroupalias character varying(255) +); + + +ALTER TABLE public.persistedglobalgroup OWNER TO dataverse_app; + +-- +-- Name: roleassignment; Type: TABLE; Schema: public; Owner: dataverse_app; Tablespace: +-- + +CREATE TABLE roleassignment ( + id integer NOT NULL, + assigneeidentifier character varying(255) NOT NULL, + definitionpoint_id bigint NOT NULL, + role_id bigint NOT NULL +); + + +ALTER TABLE public.roleassignment OWNER TO dataverse_app; + +-- +-- Name: roleassignment_id_seq; Type: SEQUENCE; Schema: public; Owner: dataverse_app +-- + +CREATE SEQUENCE roleassignment_id_seq + START WITH 1 + INCREMENT BY 1 + NO MINVALUE + NO MAXVALUE + CACHE 1; + + +ALTER TABLE public.roleassignment_id_seq OWNER TO dataverse_app; + +-- +-- Name: roleassignment_id_seq; Type: SEQUENCE OWNED BY; Schema: public; Owner: dataverse_app +-- + +ALTER SEQUENCE roleassignment_id_seq OWNED BY roleassignment.id; + + +-- +-- Name: roleassignment_id_seq; Type: SEQUENCE SET; Schema: public; Owner: dataverse_app +-- + +SELECT pg_catalog.setval('roleassignment_id_seq', 3, true); + + +-- +-- Name: savedsearch; Type: TABLE; Schema: public; Owner: dataverse_app; Tablespace: +-- + +CREATE TABLE savedsearch ( + id integer NOT NULL, + query text, + creator_id bigint NOT NULL, + definitionpoint_id bigint NOT NULL +); + + +ALTER TABLE public.savedsearch OWNER TO dataverse_app; + +-- +-- Name: savedsearch_id_seq; Type: SEQUENCE; Schema: public; Owner: dataverse_app +-- + +CREATE SEQUENCE savedsearch_id_seq + START WITH 1 + INCREMENT BY 1 + NO MINVALUE + NO MAXVALUE + CACHE 1; + + +ALTER TABLE public.savedsearch_id_seq OWNER TO dataverse_app; + +-- +-- Name: savedsearch_id_seq; Type: SEQUENCE OWNED BY; Schema: public; Owner: dataverse_app +-- + +ALTER SEQUENCE savedsearch_id_seq OWNED BY savedsearch.id; + + +-- +-- Name: savedsearch_id_seq; Type: SEQUENCE SET; Schema: public; Owner: dataverse_app +-- + +SELECT pg_catalog.setval('savedsearch_id_seq', 1, false); + + +-- +-- Name: savedsearchfilterquery; Type: TABLE; Schema: public; Owner: dataverse_app; Tablespace: +-- + +CREATE TABLE savedsearchfilterquery ( + id integer NOT NULL, + filterquery text, + savedsearch_id bigint NOT NULL +); + + +ALTER TABLE public.savedsearchfilterquery OWNER TO dataverse_app; + +-- +-- Name: savedsearchfilterquery_id_seq; Type: SEQUENCE; Schema: public; Owner: dataverse_app +-- + +CREATE SEQUENCE savedsearchfilterquery_id_seq + START WITH 1 + INCREMENT BY 1 + NO MINVALUE + NO MAXVALUE + CACHE 1; + + +ALTER TABLE public.savedsearchfilterquery_id_seq OWNER TO dataverse_app; + +-- +-- Name: savedsearchfilterquery_id_seq; Type: SEQUENCE OWNED BY; Schema: public; Owner: dataverse_app +-- + +ALTER SEQUENCE savedsearchfilterquery_id_seq OWNED BY savedsearchfilterquery.id; + + +-- +-- Name: savedsearchfilterquery_id_seq; Type: SEQUENCE SET; Schema: public; Owner: dataverse_app +-- + +SELECT pg_catalog.setval('savedsearchfilterquery_id_seq', 1, false); + + +-- +-- Name: sequence; Type: TABLE; Schema: public; Owner: dataverse_app; Tablespace: +-- + +CREATE TABLE sequence ( + seq_name character varying(50) NOT NULL, + seq_count numeric(38,0) +); + + +ALTER TABLE public.sequence OWNER TO dataverse_app; + +-- +-- Name: setting; Type: TABLE; Schema: public; Owner: dataverse_app; Tablespace: +-- + +CREATE TABLE setting ( + name character varying(255) NOT NULL, + content text +); + + +ALTER TABLE public.setting OWNER TO dataverse_app; + +-- +-- Name: shibgroup; Type: TABLE; Schema: public; Owner: dataverse_app; Tablespace: +-- + +CREATE TABLE shibgroup ( + id integer NOT NULL, + attribute character varying(255) NOT NULL, + name character varying(255) NOT NULL, + pattern character varying(255) NOT NULL +); + + +ALTER TABLE public.shibgroup OWNER TO dataverse_app; + +-- +-- Name: shibgroup_id_seq; Type: SEQUENCE; Schema: public; Owner: dataverse_app +-- + +CREATE SEQUENCE shibgroup_id_seq + START WITH 1 + INCREMENT BY 1 + NO MINVALUE + NO MAXVALUE + CACHE 1; + + +ALTER TABLE public.shibgroup_id_seq OWNER TO dataverse_app; + +-- +-- Name: shibgroup_id_seq; Type: SEQUENCE OWNED BY; Schema: public; Owner: dataverse_app +-- + +ALTER SEQUENCE shibgroup_id_seq OWNED BY shibgroup.id; + + +-- +-- Name: shibgroup_id_seq; Type: SEQUENCE SET; Schema: public; Owner: dataverse_app +-- + +SELECT pg_catalog.setval('shibgroup_id_seq', 1, false); + + +-- +-- Name: summarystatistic; Type: TABLE; Schema: public; Owner: dataverse_app; Tablespace: +-- + +CREATE TABLE summarystatistic ( + id integer NOT NULL, + type integer, + value character varying(255), + datavariable_id bigint NOT NULL +); + + +ALTER TABLE public.summarystatistic OWNER TO dataverse_app; + +-- +-- Name: summarystatistic_id_seq; Type: SEQUENCE; Schema: public; Owner: dataverse_app +-- + +CREATE SEQUENCE summarystatistic_id_seq + START WITH 1 + INCREMENT BY 1 + NO MINVALUE + NO MAXVALUE + CACHE 1; + + +ALTER TABLE public.summarystatistic_id_seq OWNER TO dataverse_app; + +-- +-- Name: summarystatistic_id_seq; Type: SEQUENCE OWNED BY; Schema: public; Owner: dataverse_app +-- + +ALTER SEQUENCE summarystatistic_id_seq OWNED BY summarystatistic.id; + + +-- +-- Name: summarystatistic_id_seq; Type: SEQUENCE SET; Schema: public; Owner: dataverse_app +-- + +SELECT pg_catalog.setval('summarystatistic_id_seq', 1, false); + + +-- +-- Name: template; Type: TABLE; Schema: public; Owner: dataverse_app; Tablespace: +-- + +CREATE TABLE template ( + id integer NOT NULL, + createtime timestamp without time zone NOT NULL, + name character varying(255) NOT NULL, + usagecount bigint, + dataverse_id bigint +); + + +ALTER TABLE public.template OWNER TO dataverse_app; + +-- +-- Name: template_id_seq; Type: SEQUENCE; Schema: public; Owner: dataverse_app +-- + +CREATE SEQUENCE template_id_seq + START WITH 1 + INCREMENT BY 1 + NO MINVALUE + NO MAXVALUE + CACHE 1; + + +ALTER TABLE public.template_id_seq OWNER TO dataverse_app; + +-- +-- Name: template_id_seq; Type: SEQUENCE OWNED BY; Schema: public; Owner: dataverse_app +-- + +ALTER SEQUENCE template_id_seq OWNED BY template.id; + + +-- +-- Name: template_id_seq; Type: SEQUENCE SET; Schema: public; Owner: dataverse_app +-- + +SELECT pg_catalog.setval('template_id_seq', 1, false); + + +-- +-- Name: usernotification; Type: TABLE; Schema: public; Owner: dataverse_app; Tablespace: +-- + +CREATE TABLE usernotification ( + id integer NOT NULL, + emailed boolean, + objectid bigint, + readnotification boolean, + senddate timestamp without time zone, + type integer NOT NULL, + user_id bigint NOT NULL +); + + +ALTER TABLE public.usernotification OWNER TO dataverse_app; + +-- +-- Name: usernotification_id_seq; Type: SEQUENCE; Schema: public; Owner: dataverse_app +-- + +CREATE SEQUENCE usernotification_id_seq + START WITH 1 + INCREMENT BY 1 + NO MINVALUE + NO MAXVALUE + CACHE 1; + + +ALTER TABLE public.usernotification_id_seq OWNER TO dataverse_app; + +-- +-- Name: usernotification_id_seq; Type: SEQUENCE OWNED BY; Schema: public; Owner: dataverse_app +-- + +ALTER SEQUENCE usernotification_id_seq OWNED BY usernotification.id; + + +-- +-- Name: usernotification_id_seq; Type: SEQUENCE SET; Schema: public; Owner: dataverse_app +-- + +SELECT pg_catalog.setval('usernotification_id_seq', 2, true); + + +-- +-- Name: variablecategory; Type: TABLE; Schema: public; Owner: dataverse_app; Tablespace: +-- + +CREATE TABLE variablecategory ( + id integer NOT NULL, + catorder integer, + frequency double precision, + label character varying(255), + missing boolean, + value character varying(255), + datavariable_id bigint NOT NULL +); + + +ALTER TABLE public.variablecategory OWNER TO dataverse_app; + +-- +-- Name: variablecategory_id_seq; Type: SEQUENCE; Schema: public; Owner: dataverse_app +-- + +CREATE SEQUENCE variablecategory_id_seq + START WITH 1 + INCREMENT BY 1 + NO MINVALUE + NO MAXVALUE + CACHE 1; + + +ALTER TABLE public.variablecategory_id_seq OWNER TO dataverse_app; + +-- +-- Name: variablecategory_id_seq; Type: SEQUENCE OWNED BY; Schema: public; Owner: dataverse_app +-- + +ALTER SEQUENCE variablecategory_id_seq OWNED BY variablecategory.id; + + +-- +-- Name: variablecategory_id_seq; Type: SEQUENCE SET; Schema: public; Owner: dataverse_app +-- + +SELECT pg_catalog.setval('variablecategory_id_seq', 1, false); + + +-- +-- Name: variablerange; Type: TABLE; Schema: public; Owner: dataverse_app; Tablespace: +-- + +CREATE TABLE variablerange ( + id integer NOT NULL, + beginvalue character varying(255), + beginvaluetype integer, + endvalue character varying(255), + endvaluetype integer, + datavariable_id bigint NOT NULL +); + + +ALTER TABLE public.variablerange OWNER TO dataverse_app; + +-- +-- Name: variablerange_id_seq; Type: SEQUENCE; Schema: public; Owner: dataverse_app +-- + +CREATE SEQUENCE variablerange_id_seq + START WITH 1 + INCREMENT BY 1 + NO MINVALUE + NO MAXVALUE + CACHE 1; + + +ALTER TABLE public.variablerange_id_seq OWNER TO dataverse_app; + +-- +-- Name: variablerange_id_seq; Type: SEQUENCE OWNED BY; Schema: public; Owner: dataverse_app +-- + +ALTER SEQUENCE variablerange_id_seq OWNED BY variablerange.id; + + +-- +-- Name: variablerange_id_seq; Type: SEQUENCE SET; Schema: public; Owner: dataverse_app +-- + +SELECT pg_catalog.setval('variablerange_id_seq', 1, false); + + +-- +-- Name: variablerangeitem; Type: TABLE; Schema: public; Owner: dataverse_app; Tablespace: +-- + +CREATE TABLE variablerangeitem ( + id integer NOT NULL, + value numeric(38,0), + datavariable_id bigint NOT NULL +); + + +ALTER TABLE public.variablerangeitem OWNER TO dataverse_app; + +-- +-- Name: variablerangeitem_id_seq; Type: SEQUENCE; Schema: public; Owner: dataverse_app +-- + +CREATE SEQUENCE variablerangeitem_id_seq + START WITH 1 + INCREMENT BY 1 + NO MINVALUE + NO MAXVALUE + CACHE 1; + + +ALTER TABLE public.variablerangeitem_id_seq OWNER TO dataverse_app; + +-- +-- Name: variablerangeitem_id_seq; Type: SEQUENCE OWNED BY; Schema: public; Owner: dataverse_app +-- + +ALTER SEQUENCE variablerangeitem_id_seq OWNED BY variablerangeitem.id; + + +-- +-- Name: variablerangeitem_id_seq; Type: SEQUENCE SET; Schema: public; Owner: dataverse_app +-- + +SELECT pg_catalog.setval('variablerangeitem_id_seq', 1, false); + + +-- +-- Name: worldmapauth_token; Type: TABLE; Schema: public; Owner: dataverse_app; Tablespace: +-- + +CREATE TABLE worldmapauth_token ( + id integer NOT NULL, + created timestamp without time zone NOT NULL, + hasexpired boolean NOT NULL, + lastrefreshtime timestamp without time zone NOT NULL, + modified timestamp without time zone NOT NULL, + token character varying(255), + application_id bigint NOT NULL, + datafile_id bigint NOT NULL, + dataverseuser_id bigint NOT NULL +); + + +ALTER TABLE public.worldmapauth_token OWNER TO dataverse_app; + +-- +-- Name: worldmapauth_token_id_seq; Type: SEQUENCE; Schema: public; Owner: dataverse_app +-- + +CREATE SEQUENCE worldmapauth_token_id_seq + START WITH 1 + INCREMENT BY 1 + NO MINVALUE + NO MAXVALUE + CACHE 1; + + +ALTER TABLE public.worldmapauth_token_id_seq OWNER TO dataverse_app; + +-- +-- Name: worldmapauth_token_id_seq; Type: SEQUENCE OWNED BY; Schema: public; Owner: dataverse_app +-- + +ALTER SEQUENCE worldmapauth_token_id_seq OWNED BY worldmapauth_token.id; + + +-- +-- Name: worldmapauth_token_id_seq; Type: SEQUENCE SET; Schema: public; Owner: dataverse_app +-- + +SELECT pg_catalog.setval('worldmapauth_token_id_seq', 1, false); + + +-- +-- Name: worldmapauth_tokentype; Type: TABLE; Schema: public; Owner: dataverse_app; Tablespace: +-- + +CREATE TABLE worldmapauth_tokentype ( + id integer NOT NULL, + contactemail character varying(255), + created timestamp without time zone NOT NULL, + hostname character varying(255), + ipaddress character varying(255), + mapitlink character varying(255) NOT NULL, + md5 character varying(255) NOT NULL, + modified timestamp without time zone NOT NULL, + name character varying(255) NOT NULL, + timelimitminutes integer DEFAULT 30, + timelimitseconds bigint DEFAULT 1800 +); + + +ALTER TABLE public.worldmapauth_tokentype OWNER TO dataverse_app; + +-- +-- Name: worldmapauth_tokentype_id_seq; Type: SEQUENCE; Schema: public; Owner: dataverse_app +-- + +CREATE SEQUENCE worldmapauth_tokentype_id_seq + START WITH 1 + INCREMENT BY 1 + NO MINVALUE + NO MAXVALUE + CACHE 1; + + +ALTER TABLE public.worldmapauth_tokentype_id_seq OWNER TO dataverse_app; + +-- +-- Name: worldmapauth_tokentype_id_seq; Type: SEQUENCE OWNED BY; Schema: public; Owner: dataverse_app +-- + +ALTER SEQUENCE worldmapauth_tokentype_id_seq OWNED BY worldmapauth_tokentype.id; + + +-- +-- Name: worldmapauth_tokentype_id_seq; Type: SEQUENCE SET; Schema: public; Owner: dataverse_app +-- + +SELECT pg_catalog.setval('worldmapauth_tokentype_id_seq', 1, false); + + +-- +-- Name: id; Type: DEFAULT; Schema: public; Owner: dataverse_app +-- + +ALTER TABLE ONLY apitoken ALTER COLUMN id SET DEFAULT nextval('apitoken_id_seq'::regclass); + + +-- +-- Name: id; Type: DEFAULT; Schema: public; Owner: dataverse_app +-- + +ALTER TABLE ONLY authenticateduser ALTER COLUMN id SET DEFAULT nextval('authenticateduser_id_seq'::regclass); + + +-- +-- Name: id; Type: DEFAULT; Schema: public; Owner: dataverse_app +-- + +ALTER TABLE ONLY authenticateduserlookup ALTER COLUMN id SET DEFAULT nextval('authenticateduserlookup_id_seq'::regclass); + + +-- +-- Name: id; Type: DEFAULT; Schema: public; Owner: dataverse_app +-- + +ALTER TABLE ONLY builtinuser ALTER COLUMN id SET DEFAULT nextval('builtinuser_id_seq'::regclass); + + +-- +-- Name: id; Type: DEFAULT; Schema: public; Owner: dataverse_app +-- + +ALTER TABLE ONLY controlledvocabalternate ALTER COLUMN id SET DEFAULT nextval('controlledvocabalternate_id_seq'::regclass); + + +-- +-- Name: id; Type: DEFAULT; Schema: public; Owner: dataverse_app +-- + +ALTER TABLE ONLY controlledvocabularyvalue ALTER COLUMN id SET DEFAULT nextval('controlledvocabularyvalue_id_seq'::regclass); + + +-- +-- Name: id; Type: DEFAULT; Schema: public; Owner: dataverse_app +-- + +ALTER TABLE ONLY customfieldmap ALTER COLUMN id SET DEFAULT nextval('customfieldmap_id_seq'::regclass); + + +-- +-- Name: id; Type: DEFAULT; Schema: public; Owner: dataverse_app +-- + +ALTER TABLE ONLY customquestion ALTER COLUMN id SET DEFAULT nextval('customquestion_id_seq'::regclass); + + +-- +-- Name: id; Type: DEFAULT; Schema: public; Owner: dataverse_app +-- + +ALTER TABLE ONLY customquestionresponse ALTER COLUMN id SET DEFAULT nextval('customquestionresponse_id_seq'::regclass); + + +-- +-- Name: id; Type: DEFAULT; Schema: public; Owner: dataverse_app +-- + +ALTER TABLE ONLY customquestionvalue ALTER COLUMN id SET DEFAULT nextval('customquestionvalue_id_seq'::regclass); + + +-- +-- Name: id; Type: DEFAULT; Schema: public; Owner: dataverse_app +-- + +ALTER TABLE ONLY datafilecategory ALTER COLUMN id SET DEFAULT nextval('datafilecategory_id_seq'::regclass); + + +-- +-- Name: id; Type: DEFAULT; Schema: public; Owner: dataverse_app +-- + +ALTER TABLE ONLY datafiletag ALTER COLUMN id SET DEFAULT nextval('datafiletag_id_seq'::regclass); + + +-- +-- Name: id; Type: DEFAULT; Schema: public; Owner: dataverse_app +-- + +ALTER TABLE ONLY datasetfield ALTER COLUMN id SET DEFAULT nextval('datasetfield_id_seq'::regclass); + + +-- +-- Name: id; Type: DEFAULT; Schema: public; Owner: dataverse_app +-- + +ALTER TABLE ONLY datasetfieldcompoundvalue ALTER COLUMN id SET DEFAULT nextval('datasetfieldcompoundvalue_id_seq'::regclass); + + +-- +-- Name: id; Type: DEFAULT; Schema: public; Owner: dataverse_app +-- + +ALTER TABLE ONLY datasetfielddefaultvalue ALTER COLUMN id SET DEFAULT nextval('datasetfielddefaultvalue_id_seq'::regclass); + + +-- +-- Name: id; Type: DEFAULT; Schema: public; Owner: dataverse_app +-- + +ALTER TABLE ONLY datasetfieldtype ALTER COLUMN id SET DEFAULT nextval('datasetfieldtype_id_seq'::regclass); + + +-- +-- Name: id; Type: DEFAULT; Schema: public; Owner: dataverse_app +-- + +ALTER TABLE ONLY datasetfieldvalue ALTER COLUMN id SET DEFAULT nextval('datasetfieldvalue_id_seq'::regclass); + + +-- +-- Name: id; Type: DEFAULT; Schema: public; Owner: dataverse_app +-- + +ALTER TABLE ONLY datasetlinkingdataverse ALTER COLUMN id SET DEFAULT nextval('datasetlinkingdataverse_id_seq'::regclass); + + +-- +-- Name: id; Type: DEFAULT; Schema: public; Owner: dataverse_app +-- + +ALTER TABLE ONLY datasetlock ALTER COLUMN id SET DEFAULT nextval('datasetlock_id_seq'::regclass); + + +-- +-- Name: id; Type: DEFAULT; Schema: public; Owner: dataverse_app +-- + +ALTER TABLE ONLY datasetversion ALTER COLUMN id SET DEFAULT nextval('datasetversion_id_seq'::regclass); + + +-- +-- Name: id; Type: DEFAULT; Schema: public; Owner: dataverse_app +-- + +ALTER TABLE ONLY datasetversionuser ALTER COLUMN id SET DEFAULT nextval('datasetversionuser_id_seq'::regclass); + + +-- +-- Name: id; Type: DEFAULT; Schema: public; Owner: dataverse_app +-- + +ALTER TABLE ONLY datatable ALTER COLUMN id SET DEFAULT nextval('datatable_id_seq'::regclass); + + +-- +-- Name: id; Type: DEFAULT; Schema: public; Owner: dataverse_app +-- + +ALTER TABLE ONLY datavariable ALTER COLUMN id SET DEFAULT nextval('datavariable_id_seq'::regclass); + + +-- +-- Name: id; Type: DEFAULT; Schema: public; Owner: dataverse_app +-- + +ALTER TABLE ONLY dataversecontact ALTER COLUMN id SET DEFAULT nextval('dataversecontact_id_seq'::regclass); + + +-- +-- Name: id; Type: DEFAULT; Schema: public; Owner: dataverse_app +-- + +ALTER TABLE ONLY dataversefacet ALTER COLUMN id SET DEFAULT nextval('dataversefacet_id_seq'::regclass); + + +-- +-- Name: id; Type: DEFAULT; Schema: public; Owner: dataverse_app +-- + +ALTER TABLE ONLY dataversefeatureddataverse ALTER COLUMN id SET DEFAULT nextval('dataversefeatureddataverse_id_seq'::regclass); + + +-- +-- Name: id; Type: DEFAULT; Schema: public; Owner: dataverse_app +-- + +ALTER TABLE ONLY dataversefieldtypeinputlevel ALTER COLUMN id SET DEFAULT nextval('dataversefieldtypeinputlevel_id_seq'::regclass); + + +-- +-- Name: id; Type: DEFAULT; Schema: public; Owner: dataverse_app +-- + +ALTER TABLE ONLY dataverselinkingdataverse ALTER COLUMN id SET DEFAULT nextval('dataverselinkingdataverse_id_seq'::regclass); + + +-- +-- Name: id; Type: DEFAULT; Schema: public; Owner: dataverse_app +-- + +ALTER TABLE ONLY dataverserole ALTER COLUMN id SET DEFAULT nextval('dataverserole_id_seq'::regclass); + + +-- +-- Name: id; Type: DEFAULT; Schema: public; Owner: dataverse_app +-- + +ALTER TABLE ONLY dataversetheme ALTER COLUMN id SET DEFAULT nextval('dataversetheme_id_seq'::regclass); + + +-- +-- Name: id; Type: DEFAULT; Schema: public; Owner: dataverse_app +-- + +ALTER TABLE ONLY defaultvalueset ALTER COLUMN id SET DEFAULT nextval('defaultvalueset_id_seq'::regclass); + + +-- +-- Name: id; Type: DEFAULT; Schema: public; Owner: dataverse_app +-- + +ALTER TABLE ONLY dvobject ALTER COLUMN id SET DEFAULT nextval('dvobject_id_seq'::regclass); + + +-- +-- Name: id; Type: DEFAULT; Schema: public; Owner: dataverse_app +-- + +ALTER TABLE ONLY explicitgroup ALTER COLUMN id SET DEFAULT nextval('explicitgroup_id_seq'::regclass); + + +-- +-- Name: id; Type: DEFAULT; Schema: public; Owner: dataverse_app +-- + +ALTER TABLE ONLY filemetadata ALTER COLUMN id SET DEFAULT nextval('filemetadata_id_seq'::regclass); + + +-- +-- Name: id; Type: DEFAULT; Schema: public; Owner: dataverse_app +-- + +ALTER TABLE ONLY foreignmetadatafieldmapping ALTER COLUMN id SET DEFAULT nextval('foreignmetadatafieldmapping_id_seq'::regclass); + + +-- +-- Name: id; Type: DEFAULT; Schema: public; Owner: dataverse_app +-- + +ALTER TABLE ONLY foreignmetadataformatmapping ALTER COLUMN id SET DEFAULT nextval('foreignmetadataformatmapping_id_seq'::regclass); + + +-- +-- Name: id; Type: DEFAULT; Schema: public; Owner: dataverse_app +-- + +ALTER TABLE ONLY guestbook ALTER COLUMN id SET DEFAULT nextval('guestbook_id_seq'::regclass); + + +-- +-- Name: id; Type: DEFAULT; Schema: public; Owner: dataverse_app +-- + +ALTER TABLE ONLY guestbookresponse ALTER COLUMN id SET DEFAULT nextval('guestbookresponse_id_seq'::regclass); + + +-- +-- Name: id; Type: DEFAULT; Schema: public; Owner: dataverse_app +-- + +ALTER TABLE ONLY ingestreport ALTER COLUMN id SET DEFAULT nextval('ingestreport_id_seq'::regclass); + + +-- +-- Name: id; Type: DEFAULT; Schema: public; Owner: dataverse_app +-- + +ALTER TABLE ONLY ingestrequest ALTER COLUMN id SET DEFAULT nextval('ingestrequest_id_seq'::regclass); + + +-- +-- Name: id; Type: DEFAULT; Schema: public; Owner: dataverse_app +-- + +ALTER TABLE ONLY maplayermetadata ALTER COLUMN id SET DEFAULT nextval('maplayermetadata_id_seq'::regclass); + + +-- +-- Name: id; Type: DEFAULT; Schema: public; Owner: dataverse_app +-- + +ALTER TABLE ONLY metadatablock ALTER COLUMN id SET DEFAULT nextval('metadatablock_id_seq'::regclass); + + +-- +-- Name: id; Type: DEFAULT; Schema: public; Owner: dataverse_app +-- + +ALTER TABLE ONLY passwordresetdata ALTER COLUMN id SET DEFAULT nextval('passwordresetdata_id_seq'::regclass); + + +-- +-- Name: id; Type: DEFAULT; Schema: public; Owner: dataverse_app +-- + +ALTER TABLE ONLY roleassignment ALTER COLUMN id SET DEFAULT nextval('roleassignment_id_seq'::regclass); + + +-- +-- Name: id; Type: DEFAULT; Schema: public; Owner: dataverse_app +-- + +ALTER TABLE ONLY savedsearch ALTER COLUMN id SET DEFAULT nextval('savedsearch_id_seq'::regclass); + + +-- +-- Name: id; Type: DEFAULT; Schema: public; Owner: dataverse_app +-- + +ALTER TABLE ONLY savedsearchfilterquery ALTER COLUMN id SET DEFAULT nextval('savedsearchfilterquery_id_seq'::regclass); + + +-- +-- Name: id; Type: DEFAULT; Schema: public; Owner: dataverse_app +-- + +ALTER TABLE ONLY shibgroup ALTER COLUMN id SET DEFAULT nextval('shibgroup_id_seq'::regclass); + + +-- +-- Name: id; Type: DEFAULT; Schema: public; Owner: dataverse_app +-- + +ALTER TABLE ONLY summarystatistic ALTER COLUMN id SET DEFAULT nextval('summarystatistic_id_seq'::regclass); + + +-- +-- Name: id; Type: DEFAULT; Schema: public; Owner: dataverse_app +-- + +ALTER TABLE ONLY template ALTER COLUMN id SET DEFAULT nextval('template_id_seq'::regclass); + + +-- +-- Name: id; Type: DEFAULT; Schema: public; Owner: dataverse_app +-- + +ALTER TABLE ONLY usernotification ALTER COLUMN id SET DEFAULT nextval('usernotification_id_seq'::regclass); + + +-- +-- Name: id; Type: DEFAULT; Schema: public; Owner: dataverse_app +-- + +ALTER TABLE ONLY variablecategory ALTER COLUMN id SET DEFAULT nextval('variablecategory_id_seq'::regclass); + + +-- +-- Name: id; Type: DEFAULT; Schema: public; Owner: dataverse_app +-- + +ALTER TABLE ONLY variablerange ALTER COLUMN id SET DEFAULT nextval('variablerange_id_seq'::regclass); + + +-- +-- Name: id; Type: DEFAULT; Schema: public; Owner: dataverse_app +-- + +ALTER TABLE ONLY variablerangeitem ALTER COLUMN id SET DEFAULT nextval('variablerangeitem_id_seq'::regclass); + + +-- +-- Name: id; Type: DEFAULT; Schema: public; Owner: dataverse_app +-- + +ALTER TABLE ONLY worldmapauth_token ALTER COLUMN id SET DEFAULT nextval('worldmapauth_token_id_seq'::regclass); + + +-- +-- Name: id; Type: DEFAULT; Schema: public; Owner: dataverse_app +-- + +ALTER TABLE ONLY worldmapauth_tokentype ALTER COLUMN id SET DEFAULT nextval('worldmapauth_tokentype_id_seq'::regclass); + + +-- +-- Data for Name: actionlogrecord; Type: TABLE DATA; Schema: public; Owner: dataverse_app +-- + +COPY actionlogrecord (id, actionresult, actionsubtype, actiontype, endtime, info, starttime, useridentifier) FROM stdin; +111734e5-cc21-4ef1-917c-d5100e596be5 OK loadDatasetFields Admin 2015-06-08 13:08:17.955 rep4508757747349037455tmp 2015-06-08 13:08:15.768 \N +7e484d19-611e-47c6-b0d2-f9f50b63f2f3 OK loadDatasetFields Admin 2015-06-08 13:08:19.44 rep937678722988769217tmp 2015-06-08 13:08:17.985 \N +d6dc80fd-2d43-416e-9df8-2d7b3d552c73 OK loadDatasetFields Admin 2015-06-08 13:08:19.58 rep3716520730701613426tmp 2015-06-08 13:08:19.465 \N +64431d29-3993-4750-aaae-349df637f7a4 OK loadDatasetFields Admin 2015-06-08 13:08:19.825 rep6974913189748432210tmp 2015-06-08 13:08:19.601 \N +ec39e535-02db-4ea3-b92c-24232dc58ce2 OK loadDatasetFields Admin 2015-06-08 13:08:21.104 rep851714502082007892tmp 2015-06-08 13:08:19.863 \N +fbea7dcb-4903-4066-8ac9-df6a2679a9ae OK loadDatasetFields Admin 2015-06-08 13:08:21.268 rep342120996714352751tmp 2015-06-08 13:08:21.127 \N +c5dc0649-80a3-4fe0-953d-8d919558ddbf OK createBuiltInRole Admin 2015-06-08 13:08:21.571 admin:A person who has all permissions for dataverses, datasets, and files. 2015-06-08 13:08:21.557 \N +3f8be9a1-9a63-4205-b083-e9037cd2313d OK createBuiltInRole Admin 2015-06-08 13:08:21.602 fileDownloader:A person who can download a file. 2015-06-08 13:08:21.599 \N +1578195e-87b3-4482-a3ee-3496d92ef66a OK createBuiltInRole Admin 2015-06-08 13:08:21.628 fullContributor:A person who can add subdataverses and datasets within a dataverse. 2015-06-08 13:08:21.625 \N +d9e83295-2c89-44cd-afbe-2f555e48e00e OK createBuiltInRole Admin 2015-06-08 13:08:21.652 dvContributor:A person who can add subdataverses within a dataverse. 2015-06-08 13:08:21.65 \N +59661f33-746a-4d69-a412-92c9c4b1d66e OK createBuiltInRole Admin 2015-06-08 13:08:21.675 dsContributor:A person who can add datasets within a dataverse. 2015-06-08 13:08:21.672 \N +c027269c-e06b-4685-97ce-a16ea73e7307 OK createBuiltInRole Admin 2015-06-08 13:08:21.7 editor:For datasets, a person who can edit License + Terms, and then submit them for review. 2015-06-08 13:08:21.698 \N +c6989a37-1b0f-4d10-aad6-6cdc3369d72b OK createBuiltInRole Admin 2015-06-08 13:08:21.754 curator:For datasets, a person who can edit License + Terms, edit Permissions, and publish datasets. 2015-06-08 13:08:21.752 \N +ba926273-10b2-4c19-a945-f48e50d9a6f8 OK createBuiltInRole Admin 2015-06-08 13:08:21.778 member:A person who can view both unpublished dataverses and datasets. 2015-06-08 13:08:21.776 \N +d842e3ac-3982-4c20-ba5a-64486e08c0c1 OK deregisterProvider Auth 2015-06-08 13:08:21.823 builtin 2015-06-08 13:08:21.823 \N +b1cdc146-466e-4a12-bb00-2d46e318f0c2 OK registerProvider Auth 2015-06-08 13:08:21.827 builtin:Build-in Provider 2015-06-08 13:08:21.826 \N +a7a100fa-de33-4fb9-9892-f7452a8aaa5c OK deregisterProvider Auth 2015-06-08 13:08:21.856 echo-simple 2015-06-08 13:08:21.856 \N +63ce8b53-9abe-4cde-b7bf-c2b3afc178c8 OK registerProvider Auth 2015-06-08 13:08:21.858 echo-simple:Echo provider 2015-06-08 13:08:21.858 \N +949b0e25-b0e6-40a5-905d-3a693d209f82 OK deregisterProvider Auth 2015-06-08 13:08:21.879 echo-dignified 2015-06-08 13:08:21.879 \N +0d8067b9-2bc0-4ec1-be8b-f00a2ec6dac8 OK registerProvider Auth 2015-06-08 13:08:21.881 echo-dignified:Dignified Echo provider 2015-06-08 13:08:21.881 \N +0b4e73b1-f5a1-4dcd-9b4a-00ada47cdc62 OK set Setting 2015-06-08 13:08:21.908 :AllowSignUp: yes 2015-06-08 13:08:21.908 \N +036e7053-7ca0-4500-9e74-0b2754cb7f4f OK set Setting 2015-06-08 13:08:21.932 :SignUpUrl: /dataverseuser.xhtml?editMode=CREATE 2015-06-08 13:08:21.932 \N +206f3de4-c5be-4912-a17d-38647b22ccfd OK set Setting 2015-06-08 13:08:21.953 :Protocol: doi 2015-06-08 13:08:21.953 \N +9280cf0a-fe45-4a99-8fc5-91c26ce88fac OK set Setting 2015-06-08 13:08:21.977 :Authority: 10.5072/FK2 2015-06-08 13:08:21.977 \N +41ecc86a-d851-4738-8347-ebb3fd06da30 OK set Setting 2015-06-08 13:08:22.002 :DoiProvider: EZID 2015-06-08 13:08:22.001 \N +96eca709-071d-4ce8-8af4-43fca2b01595 OK set Setting 2015-06-08 13:08:22.023 :DoiSeparator: / 2015-06-08 13:08:22.023 \N +4a5b8a1a-af57-49f5-8c52-8e9331f85723 OK set Setting 2015-06-08 13:08:22.043 BuiltinUsers.KEY: burrito 2015-06-08 13:08:22.043 \N +8651ac19-16a7-4cf1-88d2-54d949d52b0b OK set Setting 2015-06-08 13:08:22.064 :BlockedApiKey: empanada 2015-06-08 13:08:22.064 \N +68db6078-d857-4e3b-93a4-67286e18bcdc OK set Setting 2015-06-08 13:08:22.083 :BlockedApiPolicy: localhost-only 2015-06-08 13:08:22.083 \N +908e955e-1b95-4811-b1a1-b5388382f192 OK createUser Auth 2015-06-08 13:08:22.253 @dataverseAdmin 2015-06-08 13:08:22.253 \N +7e06b039-4e9b-45ef-b332-6dbb63755761 OK create BuiltinUser 2015-06-08 13:08:22.276 builtinUser:dataverseAdmin authenticatedUser:@dataverseAdmin 2015-06-08 13:08:22.116 \N +dbe67569-f670-492f-a894-60a6c580ce6b OK toggleSuperuser Admin 2015-06-08 13:08:22.302 dataverseAdmin 2015-06-08 13:08:22.296 \N +00a8631d-83fe-4e72-a0b3-642e7aa2a94a OK edu.harvard.iq.dataverse.engine.command.impl.CreateDataverseCommand Command 2015-06-08 13:08:22.462 : 2015-06-08 13:08:22.367 @dataverseAdmin +3b5e97dd-502e-48ce-a5ff-0a424e9b5ae2 OK edu.harvard.iq.dataverse.engine.command.impl.UpdateDataverseMetadataBlocksCommand.SetRoot Command 2015-06-08 13:08:22.589 :[1 Root] 2015-06-08 13:08:22.578 @dataverseAdmin +58a1dc66-778c-4522-ad5c-de1fa7c477cd OK edu.harvard.iq.dataverse.engine.command.impl.UpdateDataverseMetadataBlocksCommand.SetBlocks Command 2015-06-08 13:08:22.591 :[1 Root] 2015-06-08 13:08:22.516 @dataverseAdmin +0c31f247-fdfe-4a5e-a3e5-791ce25f8c2e OK edu.harvard.iq.dataverse.engine.command.impl.UpdateDataverseCommand Command 2015-06-08 13:08:22.666 :[1 Root] 2015-06-08 13:08:22.629 @dataverseAdmin +14acfd86-aade-4ea5-aed9-a5d7f5d0ff4d OK updateUser Auth 2015-06-08 13:21:29.017 @dataverseAdmin 2015-06-08 13:21:29.017 \N +4a9910ce-ab98-4918-9baa-96fe423e4195 OK login SessionManagement 2015-06-08 13:21:29.023 \N 2015-06-08 13:21:29.023 @dataverseAdmin +c5180d89-c5b9-47c9-a961-e3e4a3879d56 OK edu.harvard.iq.dataverse.engine.command.impl.CreateDataverseCommand Command 2015-06-08 13:29:07.634 :[1 Root] 2015-06-08 13:29:07.303 @dataverseAdmin +b5706636-4797-4202-9cd0-ff2a8a079958 OK edu.harvard.iq.dataverse.engine.command.impl.PublishDataverseCommand Command 2015-06-08 13:29:18.388 :[1 Root] 2015-06-08 13:29:18.363 @dataverseAdmin +49d75936-04bb-4237-823a-7535cdd76ec5 OK edu.harvard.iq.dataverse.engine.command.impl.CreateDatasetCommand Command 2015-06-08 13:30:10.011 :[2 testDV] 2015-06-08 13:30:08.565 @dataverseAdmin +d1310999-acb9-4b09-831c-2b8ad4f2b00e OK registerProvider Auth 2015-06-08 14:27:00.22 builtin:Build-in Provider 2015-06-08 14:27:00.214 \N +989f37fb-48b7-4fb5-ae0f-9302cd5e87d0 OK registerProvider Auth 2015-06-08 14:27:00.231 echo-simple:Echo provider 2015-06-08 14:27:00.231 \N +03541856-1c8e-4267-9461-ce1328fc29d4 OK registerProvider Auth 2015-06-08 14:27:00.233 echo-dignified:Dignified Echo provider 2015-06-08 14:27:00.233 \N +3d683e6c-2a75-441d-893a-cb302725ad7f OK updateUser Auth 2015-06-08 14:27:07.812 @dataverseAdmin 2015-06-08 14:27:07.811 \N +30710fd9-5947-46b4-8829-5a8eccf9c58d OK login SessionManagement 2015-06-08 14:27:07.824 \N 2015-06-08 14:27:07.823 @dataverseAdmin +e2a0c5d5-d91a-460b-88ed-89ceb6339c6a OK edu.harvard.iq.dataverse.engine.command.impl.UpdateDatasetCommand Command 2015-06-08 15:05:01.065 :[3 Sample Dataset] 2015-06-08 15:05:00.469 @dataverseAdmin +42d5c863-48ca-4bee-b0ba-ad9f00a3487f OK registerProvider Auth 2015-06-08 15:40:06.501 builtin:Build-in Provider 2015-06-08 15:40:06.5 \N +628a317e-2b61-406e-89ce-6c05452f2007 OK registerProvider Auth 2015-06-08 15:40:06.506 echo-simple:Echo provider 2015-06-08 15:40:06.506 \N +7025d857-4e0d-43e3-9c84-8e3112279a88 OK registerProvider Auth 2015-06-08 15:40:06.508 echo-dignified:Dignified Echo provider 2015-06-08 15:40:06.508 \N +3a7f405a-7223-48cb-9059-4aa757089367 OK updateUser Auth 2015-06-08 15:40:09.28 @dataverseAdmin 2015-06-08 15:40:09.279 \N +48c9ad6d-1ab2-4886-8d59-fc0a909edde8 OK login SessionManagement 2015-06-08 15:40:09.285 \N 2015-06-08 15:40:09.285 @dataverseAdmin +5b0570c8-a702-46a5-a346-50bf76ead788 OK edu.harvard.iq.dataverse.engine.command.impl.PublishDataverseCommand Command 2015-06-08 15:40:14.328 :[2 testDV] 2015-06-08 15:40:14.147 @dataverseAdmin +b7f4217c-8c53-486f-b3d4-7e42536be1c6 OK edu.harvard.iq.dataverse.engine.command.impl.PublishDatasetCommand Command 2015-06-08 15:40:17.632 :[3 Sample Dataset] 2015-06-08 15:40:14.334 @dataverseAdmin +\. + + +-- +-- Data for Name: apitoken; Type: TABLE DATA; Schema: public; Owner: dataverse_app +-- + +COPY apitoken (id, createtime, disabled, expiretime, tokenstring, authenticateduser_id) FROM stdin; +1 2015-06-08 13:08:22.264 f 2016-06-08 13:08:22.264 a65048f8-875c-4479-a91d-33cb8cd12821 1 +\. + + +-- +-- Data for Name: authenticateduser; Type: TABLE DATA; Schema: public; Owner: dataverse_app +-- + +COPY authenticateduser (id, affiliation, email, firstname, lastname, modificationtime, name, "position", superuser, useridentifier) FROM stdin; +1 Dataverse.org dataverse@mailinator.com Dataverse Admin 2015-06-08 15:40:09.283 \N Admin t dataverseAdmin +\. + + +-- +-- Data for Name: authenticateduserlookup; Type: TABLE DATA; Schema: public; Owner: dataverse_app +-- + +COPY authenticateduserlookup (id, authenticationproviderid, persistentuserid, authenticateduser_id) FROM stdin; +1 builtin dataverseAdmin 1 +\. + + +-- +-- Data for Name: authenticationproviderrow; Type: TABLE DATA; Schema: public; Owner: dataverse_app +-- + +COPY authenticationproviderrow (id, enabled, factoryalias, factorydata, subtitle, title) FROM stdin; +builtin t BuiltinAuthenticationProvider Datavers' Internal Authentication provider Dataverse Local +echo-simple t Echo , Approves everyone, based on their credentials Echo provider +echo-dignified t Echo Sir,Esq. Approves everyone, based on their credentials, and adds some flair Dignified Echo provider +\. + + +-- +-- Data for Name: builtinuser; Type: TABLE DATA; Schema: public; Owner: dataverse_app +-- + +COPY builtinuser (id, affiliation, email, encryptedpassword, firstname, lastname, passwordencryptionversion, "position", username) FROM stdin; +1 Dataverse.org dataverse@mailinator.com $2a$10$NGp3jxhSh4IBfiGIb5CPsOUovwfZ2xT7sklweW.LInjKtAZcbWokO Dataverse Admin 1 Admin dataverseAdmin +\. + + +-- +-- Data for Name: controlledvocabalternate; Type: TABLE DATA; Schema: public; Owner: dataverse_app +-- + +COPY controlledvocabalternate (id, strvalue, controlledvocabularyvalue_id, datasetfieldtype_id) FROM stdin; +1 arxiv 17 30 +2 BOTSWANA 266 79 +3 Brasil 268 79 +4 Gambia, The 317 79 +5 Germany (Federal Republic of) 319 79 +6 GHANA 320 79 +7 INDIA 339 79 +8 Sumatra 340 79 +9 Iran 341 79 +10 Iran (Islamic Republic of) 341 79 +11 IRAQ 342 79 +12 Laos 358 79 +13 LESOTHO 361 79 +14 MOZAMBIQUE 388 79 +15 NAMIBIA 390 79 +16 SWAZILAND 450 79 +17 Taiwan 454 79 +18 Tanzania 456 79 +19 UAE 470 79 +20 USA 472 79 +21 U.S.A 472 79 +22 United States of America 472 79 +23 U.S.A. 472 79 +24 YEMEN 483 79 +\. + + +-- +-- Data for Name: controlledvocabularyvalue; Type: TABLE DATA; Schema: public; Owner: dataverse_app +-- + +COPY controlledvocabularyvalue (id, displayorder, identifier, strvalue, datasetfieldtype_id) FROM stdin; +1 0 \N N/A \N +2 0 D01 Agricultural Sciences 19 +3 1 D0 Arts and Humanities 19 +4 2 D1 Astronomy and Astrophysics 19 +5 3 D2 Business and Management 19 +6 4 D3 Chemistry 19 +7 5 D4 Earth and Environmental Sciences 19 +8 6 D5 Engineering 19 +9 7 D6 Medicine, Health and Life Sciences 19 +10 8 D7 Computer and Information Science 19 +11 9 D8 Law 19 +12 10 D9 Mathematical Sciences 19 +13 11 D10 Physics 19 +14 12 D11 Social Sciences 19 +15 13 D12 Other 19 +16 0 ark 30 +17 1 arXiv 30 +18 2 bibcode 30 +19 3 doi 30 +20 4 ean13 30 +21 5 eissn 30 +22 6 handle 30 +23 7 isbn 30 +24 8 issn 30 +25 9 istc 30 +26 10 lissn 30 +27 11 lsid 30 +28 12 pmid 30 +29 13 purl 30 +30 14 upc 30 +31 15 url 30 +32 16 urn 30 +33 0 Data Collector 44 +34 1 Data Curator 44 +35 2 Data Manager 44 +36 3 Editor 44 +37 4 Funder 44 +38 5 Hosting Institution 44 +39 6 Project Leader 44 +40 7 Project Manager 44 +41 8 Project Member 44 +42 9 Related Person 44 +43 10 Researcher 44 +44 11 Research Group 44 +45 12 Rights Holder 44 +46 13 Sponsor 44 +47 14 Supervisor 44 +48 15 Work Package Leader 44 +49 16 Other 44 +50 0 ORCID 10 +51 1 ISNI 10 +52 2 LCNA 10 +53 0 Abkhaz 34 +54 1 Afar 34 +55 2 Afrikaans 34 +56 3 Akan 34 +57 4 Albanian 34 +58 5 Amharic 34 +59 6 Arabic 34 +60 7 Aragonese 34 +61 8 Armenian 34 +62 9 Assamese 34 +63 10 Avaric 34 +64 11 Avestan 34 +65 12 Aymara 34 +66 13 Azerbaijani 34 +67 14 Bambara 34 +68 15 Bashkir 34 +69 16 Basque 34 +70 17 Belarusian 34 +71 18 Bengali, Bangla 34 +72 19 Bihari 34 +73 20 Bislama 34 +74 21 Bosnian 34 +75 22 Breton 34 +76 23 Bulgarian 34 +77 24 Burmese 34 +78 25 Catalan,Valencian 34 +79 26 Chamorro 34 +80 27 Chechen 34 +81 28 Chichewa, Chewa, Nyanja 34 +82 29 Chinese 34 +83 30 Chuvash 34 +84 31 Cornish 34 +85 32 Corsican 34 +86 33 Cree 34 +87 34 Croatian 34 +88 35 Czech 34 +89 36 Danish 34 +90 37 Divehi, Dhivehi, Maldivian 34 +91 38 Dutch 34 +92 39 Dzongkha 34 +93 40 English 34 +94 41 Esperanto 34 +95 42 Estonian 34 +96 43 Ewe 34 +97 44 Faroese 34 +98 45 Fijian 34 +99 46 Finnish 34 +100 47 French 34 +101 48 Fula, Fulah, Pulaar, Pular 34 +102 49 Galician 34 +103 50 Georgian 34 +104 51 German 34 +105 52 Greek (modern) 34 +106 53 Guaraní 34 +107 54 Gujarati 34 +108 55 Haitian, Haitian Creole 34 +109 56 Hausa 34 +110 57 Hebrew (modern) 34 +111 58 Herero 34 +112 59 Hindi 34 +113 60 Hiri Motu 34 +114 61 Hungarian 34 +115 62 Interlingua 34 +116 63 Indonesian 34 +117 64 Interlingue 34 +118 65 Irish 34 +119 66 Igbo 34 +120 67 Inupiaq 34 +121 68 Ido 34 +122 69 Icelandic 34 +123 70 Italian 34 +124 71 Inuktitut 34 +125 72 Japanese 34 +126 73 Javanese 34 +127 74 Kalaallisut, Greenlandic 34 +128 75 Kannada 34 +129 76 Kanuri 34 +130 77 Kashmiri 34 +131 78 Kazakh 34 +132 79 Khmer 34 +133 80 Kikuyu, Gikuyu 34 +134 81 Kinyarwanda 34 +135 82 Kyrgyz 34 +136 83 Komi 34 +137 84 Kongo 34 +138 85 Korean 34 +139 86 Kurdish 34 +140 87 Kwanyama, Kuanyama 34 +141 88 Latin 34 +142 89 Luxembourgish, Letzeburgesch 34 +143 90 Ganda 34 +144 91 Limburgish, Limburgan, Limburger 34 +145 92 Lingala 34 +146 93 Lao 34 +147 94 Lithuanian 34 +148 95 Luba-Katanga 34 +149 96 Latvian 34 +150 97 Manx 34 +151 98 Macedonian 34 +152 99 Malagasy 34 +153 100 Malay 34 +154 101 Malayalam 34 +155 102 Maltese 34 +156 103 Māori 34 +157 104 Marathi (Marāṭhī) 34 +158 105 Marshallese 34 +159 106 Mongolian 34 +160 107 Nauru 34 +161 108 Navajo, Navaho 34 +162 109 Northern Ndebele 34 +163 110 Nepali 34 +164 111 Ndonga 34 +165 112 Norwegian Bokmål 34 +166 113 Norwegian Nynorsk 34 +167 114 Norwegian 34 +168 115 Nuosu 34 +169 116 Southern Ndebele 34 +170 117 Occitan 34 +171 118 Ojibwe, Ojibwa 34 +172 119 Old Church Slavonic,Church Slavonic,Old Bulgarian 34 +173 120 Oromo 34 +174 121 Oriya 34 +175 122 Ossetian, Ossetic 34 +176 123 Panjabi, Punjabi 34 +177 124 Pāli 34 +178 125 Persian (Farsi) 34 +179 126 Polish 34 +180 127 Pashto, Pushto 34 +181 128 Portuguese 34 +182 129 Quechua 34 +183 130 Romansh 34 +184 131 Kirundi 34 +185 132 Romanian 34 +186 133 Russian 34 +187 134 Sanskrit (Saṁskṛta) 34 +188 135 Sardinian 34 +189 136 Sindhi 34 +190 137 Northern Sami 34 +191 138 Samoan 34 +192 139 Sango 34 +193 140 Serbian 34 +194 141 Scottish Gaelic, Gaelic 34 +195 142 Shona 34 +196 143 Sinhala, Sinhalese 34 +197 144 Slovak 34 +198 145 Slovene 34 +199 146 Somali 34 +200 147 Southern Sotho 34 +201 148 Spanish, Castilian 34 +202 149 Sundanese 34 +203 150 Swahili 34 +204 151 Swati 34 +205 152 Swedish 34 +206 153 Tamil 34 +207 154 Telugu 34 +208 155 Tajik 34 +209 156 Thai 34 +210 157 Tigrinya 34 +211 158 Tibetan Standard, Tibetan, Central 34 +212 159 Turkmen 34 +213 160 Tagalog 34 +214 161 Tswana 34 +215 162 Tonga (Tonga Islands) 34 +216 163 Turkish 34 +217 164 Tsonga 34 +218 165 Tatar 34 +219 166 Twi 34 +220 167 Tahitian 34 +221 168 Uyghur, Uighur 34 +222 169 Ukrainian 34 +223 170 Urdu 34 +224 171 Uzbek 34 +225 172 Venda 34 +226 173 Vietnamese 34 +227 174 Volapük 34 +228 175 Walloon 34 +229 176 Welsh 34 +230 177 Wolof 34 +231 178 Western Frisian 34 +232 179 Xhosa 34 +233 180 Yiddish 34 +234 181 Yoruba 34 +235 182 Zhuang, Chuang 34 +236 183 Zulu 34 +237 184 Not applicable 34 +238 0 Afghanistan 79 +239 1 Albania 79 +240 2 Algeria 79 +241 3 American Samoa 79 +242 4 Andorra 79 +243 5 Angola 79 +244 6 Anguilla 79 +245 7 Antarctica 79 +246 8 Antigua and Barbuda 79 +247 9 Argentina 79 +248 10 Armenia 79 +249 11 Aruba 79 +250 12 Australia 79 +251 13 Austria 79 +252 14 Azerbaijan 79 +253 15 Bahamas 79 +254 16 Bahrain 79 +255 17 Bangladesh 79 +256 18 Barbados 79 +257 19 Belarus 79 +258 20 Belgium 79 +259 21 Belize 79 +260 22 Benin 79 +261 23 Bermuda 79 +262 24 Bhutan 79 +263 25 Bolivia, Plurinational State of 79 +264 26 Bonaire, Sint Eustatius and Saba 79 +265 27 Bosnia and Herzegovina 79 +266 28 Botswana 79 +267 29 Bouvet Island 79 +268 30 Brazil 79 +269 31 British Indian Ocean Territory 79 +270 32 Brunei Darussalam 79 +271 33 Bulgaria 79 +272 34 Burkina Faso 79 +273 35 Burundi 79 +274 36 Cambodia 79 +275 37 Cameroon 79 +276 38 Canada 79 +277 39 Cape Verde 79 +278 40 Cayman Islands 79 +279 41 Central African Republic 79 +280 42 Chad 79 +281 43 Chile 79 +282 44 China 79 +283 45 Christmas Island 79 +284 46 Cocos (Keeling) Islands 79 +285 47 Colombia 79 +286 48 Comoros 79 +287 49 Congo 79 +288 50 Congo, the Democratic Republic of the 79 +289 51 Cook Islands 79 +290 52 Costa Rica 79 +291 53 Croatia 79 +292 54 Cuba 79 +293 55 Curaçao 79 +294 56 Cyprus 79 +295 57 Czech Republic 79 +296 58 Côte d'Ivoire 79 +297 59 Denmark 79 +298 60 Djibouti 79 +299 61 Dominica 79 +300 62 Dominican Republic 79 +301 63 Ecuador 79 +302 64 Egypt 79 +303 65 El Salvador 79 +304 66 Equatorial Guinea 79 +305 67 Eritrea 79 +306 68 Estonia 79 +307 69 Ethiopia 79 +308 70 Falkland Islands (Malvinas) 79 +309 71 Faroe Islands 79 +310 72 Fiji 79 +311 73 Finland 79 +312 74 France 79 +313 75 French Guiana 79 +314 76 French Polynesia 79 +315 77 French Southern Territories 79 +316 78 Gabon 79 +317 79 Gambia 79 +318 80 Georgia 79 +319 81 Germany 79 +320 82 Ghana 79 +321 83 Gibraltar 79 +322 84 Greece 79 +323 85 Greenland 79 +324 86 Grenada 79 +325 87 Guadeloupe 79 +326 88 Guam 79 +327 89 Guatemala 79 +328 90 Guernsey 79 +329 91 Guinea 79 +330 92 Guinea-Bissau 79 +331 93 Guyana 79 +332 94 Haiti 79 +333 95 Heard Island and Mcdonald Islands 79 +334 96 Holy See (Vatican City State) 79 +335 97 Honduras 79 +336 98 Hong Kong 79 +337 99 Hungary 79 +338 100 Iceland 79 +339 101 India 79 +340 102 Indonesia 79 +341 103 Iran, Islamic Republic of 79 +342 104 Iraq 79 +343 105 Ireland 79 +344 106 Isle of Man 79 +345 107 Israel 79 +346 108 Italy 79 +347 109 Jamaica 79 +348 110 Japan 79 +349 111 Jersey 79 +350 112 Jordan 79 +351 113 Kazakhstan 79 +352 114 Kenya 79 +353 115 Kiribati 79 +354 116 Korea, Democratic People's Republic of 79 +355 117 Korea, Republic of 79 +356 118 Kuwait 79 +357 119 Kyrgyzstan 79 +358 120 Lao People's Democratic Republic 79 +359 121 Latvia 79 +360 122 Lebanon 79 +361 123 Lesotho 79 +362 124 Liberia 79 +363 125 Libya 79 +364 126 Liechtenstein 79 +365 127 Lithuania 79 +366 128 Luxembourg 79 +367 129 Macao 79 +368 130 Macedonia, the Former Yugoslav Republic of 79 +369 131 Madagascar 79 +370 132 Malawi 79 +371 133 Malaysia 79 +372 134 Maldives 79 +373 135 Mali 79 +374 136 Malta 79 +375 137 Marshall Islands 79 +376 138 Martinique 79 +377 139 Mauritania 79 +378 140 Mauritius 79 +379 141 Mayotte 79 +380 142 Mexico 79 +381 143 Micronesia, Federated States of 79 +382 144 Moldova, Republic of 79 +383 145 Monaco 79 +384 146 Mongolia 79 +385 147 Montenegro 79 +386 148 Montserrat 79 +387 149 Morocco 79 +388 150 Mozambique 79 +389 151 Myanmar 79 +390 152 Namibia 79 +391 153 Nauru 79 +392 154 Nepal 79 +393 155 Netherlands 79 +394 156 New Caledonia 79 +395 157 New Zealand 79 +396 158 Nicaragua 79 +397 159 Niger 79 +398 160 Nigeria 79 +399 161 Niue 79 +400 162 Norfolk Island 79 +401 163 Northern Mariana Islands 79 +402 164 Norway 79 +403 165 Oman 79 +404 166 Pakistan 79 +405 167 Palau 79 +406 168 Palestine, State of 79 +407 169 Panama 79 +408 170 Papua New Guinea 79 +409 171 Paraguay 79 +410 172 Peru 79 +411 173 Philippines 79 +412 174 Pitcairn 79 +413 175 Poland 79 +414 176 Portugal 79 +415 177 Puerto Rico 79 +416 178 Qatar 79 +417 179 Romania 79 +418 180 Russian Federation 79 +419 181 Rwanda 79 +420 182 Réunion 79 +421 183 Saint Barthélemy 79 +422 184 Saint Helena, Ascension and Tristan da Cunha 79 +423 185 Saint Kitts and Nevis 79 +424 186 Saint Lucia 79 +425 187 Saint Martin (French part) 79 +426 188 Saint Pierre and Miquelon 79 +427 189 Saint Vincent and the Grenadines 79 +428 190 Samoa 79 +429 191 San Marino 79 +430 192 Sao Tome and Principe 79 +431 193 Saudi Arabia 79 +432 194 Senegal 79 +433 195 Serbia 79 +434 196 Seychelles 79 +435 197 Sierra Leone 79 +436 198 Singapore 79 +437 199 Sint Maarten (Dutch part) 79 +438 200 Slovakia 79 +439 201 Slovenia 79 +440 202 Solomon Islands 79 +441 203 Somalia 79 +442 204 South Africa 79 +443 205 South Georgia and the South Sandwich Islands 79 +444 206 South Sudan 79 +445 207 Spain 79 +446 208 Sri Lanka 79 +447 209 Sudan 79 +448 210 Suriname 79 +449 211 Svalbard and Jan Mayen 79 +450 212 Swaziland 79 +451 213 Sweden 79 +452 214 Switzerland 79 +453 215 Syrian Arab Republic 79 +454 216 Taiwan, Province of China 79 +455 217 Tajikistan 79 +456 218 Tanzania, United Republic of 79 +457 219 Thailand 79 +458 220 Timor-Leste 79 +459 221 Togo 79 +460 222 Tokelau 79 +461 223 Tonga 79 +462 224 Trinidad and Tobago 79 +463 225 Tunisia 79 +464 226 Turkey 79 +465 227 Turkmenistan 79 +466 228 Turks and Caicos Islands 79 +467 229 Tuvalu 79 +468 230 Uganda 79 +469 231 Ukraine 79 +470 232 United Arab Emirates 79 +471 233 United Kingdom 79 +472 234 United States 79 +473 235 United States Minor Outlying Islands 79 +474 236 Uruguay 79 +475 237 Uzbekistan 79 +476 238 Vanuatu 79 +477 239 Venezuela, Bolivarian Republic of 79 +478 240 Viet Nam 79 +479 241 Virgin Islands, British 79 +480 242 Virgin Islands, U.S. 79 +481 243 Wallis and Futuna 79 +482 244 Western Sahara 79 +483 245 Yemen 79 +484 246 Zambia 79 +485 247 Zimbabwe 79 +486 248 Åland Islands 79 +487 0 Image 115 +488 1 Mosaic 115 +489 2 EventList 115 +490 3 Spectrum 115 +491 4 Cube 115 +492 5 Table 115 +493 6 Catalog 115 +494 7 LightCurve 115 +495 8 Simulation 115 +496 9 Figure 115 +497 10 Artwork 115 +498 11 Animation 115 +499 12 PrettyPicture 115 +500 13 Documentation 115 +501 14 Other 115 +502 15 Library 115 +503 16 Press Release 115 +504 17 Facsimile 115 +505 18 Historical 115 +506 19 Observation 115 +507 20 Object 115 +508 21 Value 115 +509 22 ValuePair 115 +510 23 Survey 115 +511 0 EFO_0001427 Case Control 141 +512 1 EFO_0001428 Cross Sectional 141 +513 2 OCRE100078 Cohort Study 141 +514 3 NCI_C48202 Nested Case Control Design 141 +515 4 OTHER_DESIGN Not Specified 141 +516 5 OBI_0500006 Parallel Group Design 141 +517 6 OBI_0001033 Perturbation Design 141 +518 7 MESH_D016449 Randomized Controlled Trial 141 +519 8 TECH_DESIGN Technological Design 141 +520 0 EFO_0000246 Age 142 +521 1 BIOMARKERS Biomarkers 142 +522 2 CELL_SURFACE_M Cell Surface Markers 142 +523 3 EFO_0000324;EFO_0000322 Cell Type/Cell Line 142 +524 4 EFO_0000399 Developmental Stage 142 +525 5 OBI_0001293 Disease State 142 +526 6 IDO_0000469 Drug Susceptibility 142 +527 7 FBcv_0010001 Extract Molecule 142 +528 8 OBI_0001404 Genetic Characteristics 142 +529 9 OBI_0000690 Immunoprecipitation Antibody 142 +530 10 OBI_0100026 Organism 142 +531 11 OTHER_FACTOR Other 142 +532 12 PASSAGES_FACTOR Passages 142 +533 13 OBI_0000050 Platform 142 +534 14 EFO_0000695 Sex 142 +535 15 EFO_0005135 Strain 142 +536 16 EFO_0000724 Time Point 142 +537 17 BTO_0001384 Tissue Type 142 +538 18 EFO_0000369 Treatment Compound 142 +539 19 EFO_0000727 Treatment Type 142 +540 0 ERO_0001899 cell counting 145 +541 1 CHMO_0001085 cell sorting 145 +542 2 OBI_0000520 clinical chemistry analysis 145 +543 3 OBI_0000537 copy number variation profiling 145 +544 4 OBI_0000634 DNA methylation profiling 145 +545 5 OBI_0000748 DNA methylation profiling (Bisulfite-Seq) 145 +546 6 _OBI_0000634 DNA methylation profiling (MeDIP-Seq) 145 +547 7 _IDO_0000469 drug susceptibility 145 +548 8 ENV_GENE_SURVEY environmental gene survey 145 +549 9 ERO_0001183 genome sequencing 145 +550 10 OBI_0000630 hematology 145 +551 11 OBI_0600020 histology 145 +552 12 OBI_0002017 Histone Modification (ChIP-Seq) 145 +553 13 SO_0001786 loss of heterozygosity profiling 145 +554 14 OBI_0000366 metabolite profiling 145 +555 15 METAGENOME_SEQ metagenome sequencing 145 +556 16 OBI_0000615 protein expression profiling 145 +557 17 ERO_0000346 protein identification 145 +558 18 PROTEIN_DNA_BINDING protein-DNA binding site identification 145 +559 19 OBI_0000288 protein-protein interaction detection 145 +560 20 PROTEIN_RNA_BINDING protein-RNA binding (RIP-Seq) 145 +561 21 OBI_0000435 SNP analysis 145 +562 22 TARGETED_SEQ targeted sequencing 145 +563 23 OBI_0002018 transcription factor binding (ChIP-Seq) 145 +564 24 OBI_0000291 transcription factor binding site identification 145 +565 25 OBI_0000424 transcription profiling 145 +566 26 EFO_0001032 transcription profiling 145 +567 27 TRANSCRIPTION_PROF transcription profiling (Microarray) 145 +568 28 OBI_0001271 transcription profiling (RNA-Seq) 145 +569 29 TRAP_TRANS_PROF TRAP translational profiling 145 +570 30 OTHER_MEASUREMENT Other 145 +571 0 NCBITaxon_3702 Arabidopsis thaliana 143 +572 1 NCBITaxon_9913 Bos taurus 143 +573 2 NCBITaxon_6239 Caenorhabditis elegans 143 +574 3 NCBITaxon_3055 Chlamydomonas reinhardtii 143 +575 4 NCBITaxon_7955 Danio rerio (zebrafish) 143 +576 5 NCBITaxon_44689 Dictyostelium discoideum 143 +577 6 NCBITaxon_7227 Drosophila melanogaster 143 +578 7 NCBITaxon_562 Escherichia coli 143 +579 8 NCBITaxon_11103 Hepatitis C virus 143 +580 9 NCBITaxon_9606 Homo sapiens 143 +581 10 NCBITaxon_10090 Mus musculus 143 +582 11 NCBITaxon_33894 Mycobacterium africanum 143 +583 12 NCBITaxon_78331 Mycobacterium canetti 143 +584 13 NCBITaxon_1773 Mycobacterium tuberculosis 143 +585 14 NCBITaxon_2104 Mycoplasma pneumoniae 143 +586 15 NCBITaxon_4530 Oryza sativa 143 +587 16 NCBITaxon_5833 Plasmodium falciparum 143 +588 17 NCBITaxon_4754 Pneumocystis carinii 143 +589 18 NCBITaxon_10116 Rattus norvegicus 143 +590 19 NCBITaxon_4932 Saccharomyces cerevisiae (brewer's yeast) 143 +591 20 NCBITaxon_4896 Schizosaccharomyces pombe 143 +592 21 NCBITaxon_31033 Takifugu rubripes 143 +593 22 NCBITaxon_8355 Xenopus laevis 143 +594 23 NCBITaxon_4577 Zea mays 143 +595 24 OTHER_TAXONOMY Other 143 +596 0 CULTURE_DRUG_TEST_SINGLE culture based drug susceptibility testing, single concentration 147 +597 1 CULTURE_DRUG_TEST_TWO culture based drug susceptibility testing, two concentrations 147 +598 2 CULTURE_DRUG_TEST_THREE culture based drug susceptibility testing, three or more concentrations (minimium inhibitory concentration measurement) 147 +599 3 OBI_0400148 DNA microarray 147 +600 4 OBI_0000916 flow cytometry 147 +601 5 OBI_0600053 gel electrophoresis 147 +602 6 OBI_0000470 mass spectrometry 147 +603 7 OBI_0000623 NMR spectroscopy 147 +604 8 OBI_0000626 nucleotide sequencing 147 +605 9 OBI_0400149 protein microarray 147 +606 10 OBI_0000893 real time PCR 147 +607 11 NO_TECHNOLOGY no technology required 147 +608 12 OTHER_TECHNOLOGY Other 147 +609 0 210_MS_GC 210-MS GC Ion Trap (Varian) 148 +610 1 220_MS_GC 220-MS GC Ion Trap (Varian) 148 +611 2 225_MS_GC 225-MS GC Ion Trap (Varian) 148 +612 3 240_MS_GC 240-MS GC Ion Trap (Varian) 148 +613 4 300_MS_GCMS 300-MS quadrupole GC/MS (Varian) 148 +614 5 320_MS_LCMS 320-MS LC/MS (Varian) 148 +615 6 325_MS_LCMS 325-MS LC/MS (Varian) 148 +616 7 500_MS_GCMS 320-MS GC/MS (Varian) 148 +617 8 500_MS_LCMS 500-MS LC/MS (Varian) 148 +618 9 800D 800D (Jeol) 148 +619 10 910_MS_TQFT 910-MS TQ-FT (Varian) 148 +620 11 920_MS_TQFT 920-MS TQ-FT (Varian) 148 +621 12 3100_MASS_D 3100 Mass Detector (Waters) 148 +622 13 6110_QUAD_LCMS 6110 Quadrupole LC/MS (Agilent) 148 +623 14 6120_QUAD_LCMS 6120 Quadrupole LC/MS (Agilent) 148 +624 15 6130_QUAD_LCMS 6130 Quadrupole LC/MS (Agilent) 148 +625 16 6140_QUAD_LCMS 6140 Quadrupole LC/MS (Agilent) 148 +626 17 6310_ION_LCMS 6310 Ion Trap LC/MS (Agilent) 148 +627 18 6320_ION_LCMS 6320 Ion Trap LC/MS (Agilent) 148 +628 19 6330_ION_LCMS 6330 Ion Trap LC/MS (Agilent) 148 +629 20 6340_ION_LCMS 6340 Ion Trap LC/MS (Agilent) 148 +630 21 6410_TRIPLE_LCMS 6410 Triple Quadrupole LC/MS (Agilent) 148 +631 22 6430_TRIPLE_LCMS 6430 Triple Quadrupole LC/MS (Agilent) 148 +632 23 6460_TRIPLE_LCMS 6460 Triple Quadrupole LC/MS (Agilent) 148 +633 24 6490_TRIPLE_LCMS 6490 Triple Quadrupole LC/MS (Agilent) 148 +634 25 6530_Q_TOF_LCMS 6530 Q-TOF LC/MS (Agilent) 148 +635 26 6540_Q_TOF_LCMS 6540 Q-TOF LC/MS (Agilent) 148 +636 27 6210_Q_TOF_LCMS 6210 TOF LC/MS (Agilent) 148 +637 28 6220_Q_TOF_LCMS 6220 TOF LC/MS (Agilent) 148 +638 29 6230_Q_TOF_LCMS 6230 TOF LC/MS (Agilent) 148 +639 30 700B_TRIPLE_GCMS 7000B Triple Quadrupole GC/MS (Agilent) 148 +640 31 ACCUTO_DART AccuTO DART (Jeol) 148 +641 32 ACCUTOF_GC AccuTOF GC (Jeol) 148 +642 33 ACCUTOF_LC AccuTOF LC (Jeol) 148 +643 34 ACQUITY_SQD ACQUITY SQD (Waters) 148 +644 35 ACQUITY_TQD ACQUITY TQD (Waters) 148 +645 36 AGILENT Agilent 148 +646 37 AGILENT_ 5975E_GCMSD Agilent 5975E GC/MSD (Agilent) 148 +647 38 AGILENT_5975T_LTM_GCMSD Agilent 5975T LTM GC/MSD (Agilent) 148 +648 39 5975C_GCMSD 5975C Series GC/MSD (Agilent) 148 +649 40 AFFYMETRIX Affymetrix 148 +650 41 AMAZON_ETD_ESI amaZon ETD ESI Ion Trap (Bruker) 148 +651 42 AMAZON_X_ESI amaZon X ESI Ion Trap (Bruker) 148 +652 43 APEX_ULTRA_QQ_FTMS apex-ultra hybrid Qq-FTMS (Bruker) 148 +653 44 API_2000 API 2000 (AB Sciex) 148 +654 45 API_3200 API 3200 (AB Sciex) 148 +655 46 API_3200_QTRAP API 3200 QTRAP (AB Sciex) 148 +656 47 API_4000 API 4000 (AB Sciex) 148 +657 48 API_4000_QTRAP API 4000 QTRAP (AB Sciex) 148 +658 49 API_5000 API 5000 (AB Sciex) 148 +659 50 API_5500 API 5500 (AB Sciex) 148 +660 51 API_5500_QTRAP API 5500 QTRAP (AB Sciex) 148 +661 52 APPLIED_BIOSYSTEMS Applied Biosystems Group (ABI) 148 +662 53 AQI_BIOSCIENCES AQI Biosciences 148 +663 54 ATMOS_GC Atmospheric Pressure GC (Waters) 148 +664 55 AUTOFLEX_III_MALDI_TOF_MS autoflex III MALDI-TOF MS (Bruker) 148 +665 56 AUTOFLEX_SPEED autoflex speed(Bruker) 148 +666 57 AUTOSPEC_PREMIER AutoSpec Premier (Waters) 148 +667 58 AXIMA_MEGA_TOF AXIMA Mega TOF (Shimadzu) 148 +668 59 AXIMA_PERF_MALDI_TOF AXIMA Performance MALDI TOF/TOF (Shimadzu) 148 +669 60 A_10_ANALYZER A-10 Analyzer (Apogee) 148 +670 61 A_40_MINIFCM A-40-MiniFCM (Apogee) 148 +671 62 BACTIFLOW Bactiflow (Chemunex SA) 148 +672 63 BASE4INNOVATION Base4innovation 148 +673 64 BD_BACTEC_MGIT_320 BD BACTEC MGIT 320 148 +674 65 BD_BACTEC_MGIT_960 BD BACTEC MGIT 960 148 +675 66 BD_RADIO_BACTEC_460TB BD Radiometric BACTEC 460TB 148 +676 67 BIONANOMATRIX BioNanomatrix 148 +677 68 CELL_LAB_QUANTA_SC Cell Lab Quanta SC (Becman Coulter) 148 +678 69 CLARUS_560_D_GCMS Clarus 560 D GC/MS (PerkinElmer) 148 +679 70 CLARUS_560_S_GCMS Clarus 560 S GC/MS (PerkinElmer) 148 +680 71 CLARUS_600_GCMS Clarus 600 GC/MS (PerkinElmer) 148 +681 72 COMPLETE_GENOMICS Complete Genomics 148 +682 73 CYAN Cyan (Dako Cytomation) 148 +683 74 CYFLOW_ML CyFlow ML (Partec) 148 +684 75 CYFLOW_SL Cyow SL (Partec) 148 +685 76 CYFLOW_SL3 CyFlow SL3 (Partec) 148 +686 77 CYTOBUOY CytoBuoy (Cyto Buoy Inc) 148 +687 78 CYTOSENCE CytoSence (Cyto Buoy Inc) 148 +688 79 CYTOSUB CytoSub (Cyto Buoy Inc) 148 +689 80 DANAHER Danaher 148 +690 81 DFS DFS (Thermo Scientific) 148 +691 82 EXACTIVE Exactive(Thermo Scientific) 148 +692 83 FACS_CANTO FACS Canto (Becton Dickinson) 148 +693 84 FACS_CANTO2 FACS Canto2 (Becton Dickinson) 148 +694 85 FACS_SCAN FACS Scan (Becton Dickinson) 148 +695 86 FC_500 FC 500 (Becman Coulter) 148 +696 87 GCMATE_II GCmate II GC/MS (Jeol) 148 +697 88 GCMS_QP2010_PLUS GCMS-QP2010 Plus (Shimadzu) 148 +698 89 GCMS_QP2010S_PLUS GCMS-QP2010S Plus (Shimadzu) 148 +699 90 GCT_PREMIER GCT Premier (Waters) 148 +700 91 GENEQ GENEQ 148 +701 92 GENOME_CORP Genome Corp. 148 +702 93 GENOVOXX GenoVoxx 148 +703 94 GNUBIO GnuBio 148 +704 95 GUAVA_EASYCYTE_MINI Guava EasyCyte Mini (Millipore) 148 +705 96 GUAVA_EASYCYTE_PLUS Guava EasyCyte Plus (Millipore) 148 +706 97 GUAVA_PERSONAL_CELL Guava Personal Cell Analysis (Millipore) 148 +707 98 GUAVA_PERSONAL_CELL_96 Guava Personal Cell Analysis-96 (Millipore) 148 +708 99 HELICOS_BIO Helicos BioSciences 148 +709 100 ILLUMINA Illumina 148 +710 101 INDIRECT_LJ_MEDIUM Indirect proportion method on LJ medium 148 +711 102 INDIRECT_AGAR_7H9 Indirect proportion method on Middlebrook Agar 7H9 148 +712 103 INDIRECT_AGAR_7H10 Indirect proportion method on Middlebrook Agar 7H10 148 +713 104 INDIRECT_AGAR_7H11 Indirect proportion method on Middlebrook Agar 7H11 148 +714 105 INFLUX_ANALYZER inFlux Analyzer (Cytopeia) 148 +715 106 INTELLIGENT_BIOSYSTEMS Intelligent Bio-Systems 148 +716 107 ITQ_700 ITQ 700 (Thermo Scientific) 148 +717 108 ITQ_900 ITQ 900 (Thermo Scientific) 148 +718 109 ITQ_1100 ITQ 1100 (Thermo Scientific) 148 +719 110 JMS_53000_SPIRAL JMS-53000 SpiralTOF (Jeol) 148 +720 111 LASERGEN LaserGen 148 +721 112 LCMS_2020 LCMS-2020 (Shimadzu) 148 +722 113 LCMS_2010EV LCMS-2010EV (Shimadzu) 148 +723 114 LCMS_IT_TOF LCMS-IT-TOF (Shimadzu) 148 +724 115 LI_COR Li-Cor 148 +725 116 LIFE_TECH Life Tech 148 +726 117 LIGHTSPEED_GENOMICS LightSpeed Genomics 148 +727 118 LCT_PREMIER_XE LCT Premier XE (Waters) 148 +728 119 LCQ_DECA_XP_MAX LCQ Deca XP MAX (Thermo Scientific) 148 +729 120 LCQ_FLEET LCQ Fleet (Thermo Scientific) 148 +730 121 LXQ_THERMO LXQ (Thermo Scientific) 148 +731 122 LTQ_CLASSIC LTQ Classic (Thermo Scientific) 148 +732 123 LTQ_XL LTQ XL (Thermo Scientific) 148 +733 124 LTQ_VELOS LTQ Velos (Thermo Scientific) 148 +734 125 LTQ_ORBITRAP_CLASSIC LTQ Orbitrap Classic (Thermo Scientific) 148 +735 126 LTQ_ORBITRAP_XL LTQ Orbitrap XL (Thermo Scientific) 148 +736 127 LTQ_ORBITRAP_DISCOVERY LTQ Orbitrap Discovery (Thermo Scientific) 148 +737 128 LTQ_ORBITRAP_VELOS LTQ Orbitrap Velos (Thermo Scientific) 148 +738 129 LUMINEX_100 Luminex 100 (Luminex) 148 +739 130 LUMINEX_200 Luminex 200 (Luminex) 148 +740 131 MACS_QUANT MACS Quant (Miltenyi) 148 +741 132 MALDI_SYNAPT_G2_HDMS MALDI SYNAPT G2 HDMS (Waters) 148 +742 133 MALDI_SYNAPT_G2_MS MALDI SYNAPT G2 MS (Waters) 148 +743 134 MALDI_SYNAPT_HDMS MALDI SYNAPT HDMS (Waters) 148 +744 135 MALDI_SYNAPT_MS MALDI SYNAPT MS (Waters) 148 +745 136 MALDI_MICROMX MALDI micro MX (Waters) 148 +746 137 MAXIS maXis (Bruker) 148 +747 138 MAXISG4 maXis G4 (Bruker) 148 +748 139 MICROFLEX_LT_MALDI_TOF_MS microflex LT MALDI-TOF MS (Bruker) 148 +749 140 MICROFLEX_LRF_MALDI_TOF_MS microflex LRF MALDI-TOF MS (Bruker) 148 +750 141 MICROFLEX_III_TOF_MS microflex III MALDI-TOF MS (Bruker) 148 +751 142 MICROTOF_II_ESI_TOF micrOTOF II ESI TOF (Bruker) 148 +752 143 MICROTOF_Q_II_ESI_QQ_TOF micrOTOF-Q II ESI-Qq-TOF (Bruker) 148 +753 144 MICROPLATE_ALAMAR_BLUE_COLORIMETRIC microplate Alamar Blue (resazurin) colorimetric method 148 +754 145 MSTATION Mstation (Jeol) 148 +755 146 MSQ_PLUS MSQ Plus (Thermo Scientific) 148 +756 147 NABSYS NABsys 148 +757 148 NANOPHOTONICS_BIOSCIENCES Nanophotonics Biosciences 148 +758 149 NETWORK_BIOSYSTEMS Network Biosystems 148 +759 150 NIMBLEGEN Nimblegen 148 +760 151 OXFORD_NANOPORE_TECHNOLOGIES Oxford Nanopore Technologies 148 +761 152 PACIFIC_BIOSCIENCES Pacific Biosciences 148 +762 153 POPULATION_GENETICS_TECHNOLOGIES Population Genetics Technologies 148 +763 154 Q1000GC_ULTRAQUAD Q1000GC UltraQuad (Jeol) 148 +764 155 QUATTRO_MICRO_API Quattro micro API (Waters) 148 +765 156 QUATTRO_MICRO_GC Quattro micro GC (Waters) 148 +766 157 QUATTRO_PREMIER_XE Quattro Premier XE (Waters) 148 +767 158 QSTAR QSTAR (AB Sciex) 148 +768 159 REVEO Reveo 148 +769 160 ROCHE Roche 148 +770 161 SEIRAD Seirad 148 +771 162 SOLARIX_HYBRID_QQ_FTMS solariX hybrid Qq-FTMS (Bruker) 148 +772 163 SOMACOUNT Somacount (Bently Instruments) 148 +773 164 SOMASCOPE SomaScope (Bently Instruments) 148 +774 165 SYNAPT_G2_HDMS SYNAPT G2 HDMS (Waters) 148 +775 166 SYNAPT_G2_MS SYNAPT G2 MS (Waters) 148 +776 167 SYNAPT_HDMS SYNAPT HDMS (Waters) 148 +777 168 SYNAPT_MS SYNAPT MS (Waters) 148 +778 169 TRIPLETOF_5600 TripleTOF 5600 (AB Sciex) 148 +779 170 TSQ_QUANTUM_ULTRA TSQ Quantum Ultra (Thermo Scientific) 148 +780 171 TSQ_QUANTUM_ACCESS TSQ Quantum Access (Thermo Scientific) 148 +781 172 TSQ_QUANTUM_ACCESS_MAX TSQ Quantum Access MAX (Thermo Scientific) 148 +782 173 TSQ_QUANTUM_DISCOVERY_MAX TSQ Quantum Discovery MAX (Thermo Scientific) 148 +783 174 TSQ_QUANTUM_GC TSQ Quantum GC (Thermo Scientific) 148 +784 175 TSQ_QUANTUM_XLS TSQ Quantum XLS (Thermo Scientific) 148 +785 176 TSQ_VANTAGE TSQ Vantage (Thermo Scientific) 148 +786 177 ULTRAFLEXTREME_MALDI_TOF_MS ultrafleXtreme MALDI-TOF MS (Bruker) 148 +787 178 VISIGEN_BIO VisiGen Biotechnologies 148 +788 179 XEVO_G2_QTOF Xevo G2 QTOF (Waters) 148 +789 180 XEVO_QTOF_MS Xevo QTof MS (Waters) 148 +790 181 XEVO_TQ_MS Xevo TQ MS (Waters) 148 +791 182 XEVO_TQ_S Xevo TQ-S (Waters) 148 +792 183 OTHER_PLATFORM Other 148 +793 0 abstract 154 +794 1 addendum 154 +795 2 announcement 154 +796 3 article-commentary 154 +797 4 book review 154 +798 5 books received 154 +799 6 brief report 154 +800 7 calendar 154 +801 8 case report 154 +802 9 collection 154 +803 10 correction 154 +804 11 data paper 154 +805 12 discussion 154 +806 13 dissertation 154 +807 14 editorial 154 +808 15 in brief 154 +809 16 introduction 154 +810 17 letter 154 +811 18 meeting report 154 +812 19 news 154 +813 20 obituary 154 +814 21 oration 154 +815 22 partial retraction 154 +816 23 product review 154 +817 24 rapid communication 154 +818 25 reply 154 +819 26 reprint 154 +820 27 research article 154 +821 28 retraction 154 +822 29 review article 154 +823 30 translation 154 +824 31 other 154 +\. + + +-- +-- Data for Name: customfieldmap; Type: TABLE DATA; Schema: public; Owner: dataverse_app +-- + +COPY customfieldmap (id, sourcedatasetfield, sourcetemplate, targetdatasetfield) FROM stdin; +\. + + +-- +-- Data for Name: customquestion; Type: TABLE DATA; Schema: public; Owner: dataverse_app +-- + +COPY customquestion (id, displayorder, hidden, questionstring, questiontype, required, guestbook_id) FROM stdin; +\. + + +-- +-- Data for Name: customquestionresponse; Type: TABLE DATA; Schema: public; Owner: dataverse_app +-- + +COPY customquestionresponse (id, response, customquestion_id, guestbookresponse_id) FROM stdin; +\. + + +-- +-- Data for Name: customquestionvalue; Type: TABLE DATA; Schema: public; Owner: dataverse_app +-- + +COPY customquestionvalue (id, displayorder, valuestring, customquestion_id) FROM stdin; +\. + + +-- +-- Data for Name: datafile; Type: TABLE DATA; Schema: public; Owner: dataverse_app +-- + +COPY datafile (id, contenttype, filesystemname, filesize, ingeststatus, md5, name, restricted) FROM stdin; +4 application/vnd.google-earth.kmz 14dd48f37d9-68789d517db2 0 A cfaad1e9562443bb07119fcdbe11ccd2 \N f +\. + + +-- +-- Data for Name: datafilecategory; Type: TABLE DATA; Schema: public; Owner: dataverse_app +-- + +COPY datafilecategory (id, name, dataset_id) FROM stdin; +1 Code 3 +\. + + +-- +-- Data for Name: datafiletag; Type: TABLE DATA; Schema: public; Owner: dataverse_app +-- + +COPY datafiletag (id, type, datafile_id) FROM stdin; +\. + + +-- +-- Data for Name: dataset; Type: TABLE DATA; Schema: public; Owner: dataverse_app +-- + +COPY dataset (id, authority, doiseparator, fileaccessrequest, globalidcreatetime, identifier, protocol, guestbook_id, thumbnailfile_id) FROM stdin; +3 10.5072/FK2 / f 2015-06-08 13:30:09.023 A0Y3TZ doi \N \N +\. + + +-- +-- Data for Name: datasetfield; Type: TABLE DATA; Schema: public; Owner: dataverse_app +-- + +COPY datasetfield (id, datasetfieldtype_id, datasetversion_id, parentdatasetfieldcompoundvalue_id, template_id) FROM stdin; +1 16 1 \N \N +2 12 1 \N \N +3 7 1 \N \N +4 1 1 \N \N +5 14 \N 2 \N +6 8 \N 3 \N +7 19 1 \N \N +8 17 \N 1 \N +9 57 1 \N \N +10 10 \N 3 \N +11 13 \N 2 \N +12 15 \N 2 \N +13 9 \N 3 \N +14 56 1 \N \N +\. + + +-- +-- Data for Name: datasetfield_controlledvocabularyvalue; Type: TABLE DATA; Schema: public; Owner: dataverse_app +-- + +COPY datasetfield_controlledvocabularyvalue (datasetfield_id, controlledvocabularyvalues_id) FROM stdin; +7 3 +\. + + +-- +-- Data for Name: datasetfieldcompoundvalue; Type: TABLE DATA; Schema: public; Owner: dataverse_app +-- + +COPY datasetfieldcompoundvalue (id, displayorder, parentdatasetfield_id) FROM stdin; +1 0 1 +2 0 2 +3 0 3 +\. + + +-- +-- Data for Name: datasetfielddefaultvalue; Type: TABLE DATA; Schema: public; Owner: dataverse_app +-- + +COPY datasetfielddefaultvalue (id, displayorder, strvalue, datasetfield_id, defaultvalueset_id, parentdatasetfielddefaultvalue_id) FROM stdin; +\. + + +-- +-- Data for Name: datasetfieldtype; Type: TABLE DATA; Schema: public; Owner: dataverse_app +-- + +COPY datasetfieldtype (id, advancedsearchfieldtype, allowcontrolledvocabulary, allowmultiples, description, displayformat, displayoncreate, displayorder, facetable, fieldtype, name, required, title, watermark, metadatablock_id, parentdatasetfieldtype_id) FROM stdin; +1 t f f Full title by which the Dataset is known. t 0 f TEXT title t Title Enter title... 1 \N +2 f f f A secondary title used to amplify or state certain limitations on the main title. f 1 f TEXT subtitle f Subtitle 1 \N +3 f f f A title by which the work is commonly referred, or an abbreviation of the title. f 2 f TEXT alternativeTitle f Alternative Title 1 \N +4 f f t Another unique identifier that identifies this Dataset (e.g., producer's or another repository's number). : f 3 f NONE otherId f Other ID 1 \N +5 f f f Name of agency which generated this identifier. #VALUE f 4 f TEXT otherIdAgency f Agency 1 4 +6 f f f Other identifier that corresponds to this Dataset. #VALUE f 5 f TEXT otherIdValue f Identifier 1 4 +7 f f t The person(s), corporate body(ies), or agency(ies) responsible for creating the work. t 6 f NONE author f Author 1 \N +8 t f f The author's Family Name, Given Name or the name of the organization responsible for this Dataset. #VALUE t 7 t TEXT authorName t Name FamilyName, GivenName or Organization 1 7 +9 t f f The organization with which the author is affiliated. (#VALUE) t 8 t TEXT authorAffiliation f Affiliation 1 7 +10 f t f Name of the identifier scheme (ORCID, ISNI). - #VALUE: t 9 f TEXT authorIdentifierScheme f Identifier Scheme 1 7 +11 f f f Uniquely identifies an individual author or organization, according to various schemes. #VALUE t 10 f TEXT authorIdentifier f Identifier 1 7 +12 f f t The contact(s) for this Dataset. t 11 f NONE datasetContact f Contact 1 \N +13 f f f The contact's Family Name, Given Name or the name of the organization. #VALUE t 12 f TEXT datasetContactName f Name FamilyName, GivenName or Organization 1 12 +14 f f f The organization with which the contact is affiliated. (#VALUE) t 13 f TEXT datasetContactAffiliation f Affiliation 1 12 +15 f f f The e-mail address(es) of the contact(s) for the Dataset. This will not be displayed. #EMAIL t 14 f EMAIL datasetContactEmail t E-mail 1 12 +16 f f t A summary describing the purpose, nature, and scope of the Dataset. t 15 f NONE dsDescription f Description 1 \N +17 t f f A summary describing the purpose, nature, and scope of the Dataset. #VALUE t 16 f TEXTBOX dsDescriptionValue t Text 1 16 +18 f f f In cases where a Dataset contains more than one description (for example, one might be supplied by the data producer and another prepared by the data repository where the data are deposited), the date attribute is used to distinguish between the two descriptions. The date attribute follows the ISO convention of YYYY-MM-DD. (#VALUE) t 17 f DATE dsDescriptionDate f Date YYYY-MM-DD 1 16 +19 t t t Domain-specific Subject Categories that are topically relevant to the Dataset. t 18 t TEXT subject t Subject 1 \N +20 f f t Key terms that describe important aspects of the Dataset. t 19 f NONE keyword f Keyword 1 \N +21 t f f Key terms that describe important aspects of the Dataset. Can be used for building keyword indexes and for classification and retrieval purposes. A controlled vocabulary can be employed. The vocab attribute is provided for specification of the controlled vocabulary in use, such as LCSH, MeSH, or others. The vocabURI attribute specifies the location for the full controlled vocabulary. #VALUE t 20 t TEXT keywordValue f Term 1 20 +22 f f f For the specification of the keyword controlled vocabulary in use, such as LCSH, MeSH, or others. (#VALUE) t 21 f TEXT keywordVocabulary f Vocabulary 1 20 +23 f f f Keyword vocabulary URL points to the web presence that describes the keyword vocabulary, if appropriate. Enter an absolute URL where the keyword vocabulary web site is found, such as http://www.my.org. #VALUE t 22 f URL keywordVocabularyURI f Vocabulary URL Enter full URL, starting with http:// 1 20 +24 f f t The classification field indicates the broad important topic(s) and subjects that the data cover. Library of Congress subject terms may be used here. f 23 f NONE topicClassification f Topic Classification 1 \N +25 t f f Topic or Subject term that is relevant to this Dataset. #VALUE f 24 t TEXT topicClassValue f Term 1 24 +26 f f f Provided for specification of the controlled vocabulary in use, e.g., LCSH, MeSH, etc. (#VALUE) f 25 f TEXT topicClassVocab f Vocabulary 1 24 +27 f f f Specifies the URL location for the full controlled vocabulary. #VALUE f 26 f URL topicClassVocabURI f Vocabulary URL Enter full URL, starting with http:// 1 24 +28 f f t Publications that use the data from this Dataset. f 27 f NONE publication f Related Publication 1 \N +29 t f f The full bibliographic citation for this related publication. #VALUE f 28 f TEXTBOX publicationCitation f Citation 1 28 +30 t t f The type of digital identifier used for this publication (e.g., Digital Object Identifier (DOI)). #VALUE: f 29 f TEXT publicationIDType f ID Type 1 28 +31 t f f The identifier for the selected ID type. #VALUE f 30 f TEXT publicationIDNumber f ID Number 1 28 +32 f f f Link to the publication web page (e.g., journal article page, archive record page, or other). #VALUE f 31 f URL publicationURL f URL Enter full URL, starting with http:// 1 28 +33 f f f Additional important information about the Dataset. t 32 f TEXTBOX notesText f Notes 1 \N +34 t t t Language of the Dataset f 33 t TEXT language f Language 1 \N +35 f f t Person or organization with the financial or administrative responsibility over this Dataset f 34 f NONE producer f Producer 1 \N +36 t f f Producer name #VALUE f 35 t TEXT producerName f Name FamilyName, GivenName or Organization 1 35 +37 f f f The organization with which the producer is affiliated. (#VALUE) f 36 f TEXT producerAffiliation f Affiliation 1 35 +38 f f f The abbreviation by which the producer is commonly known. (ex. IQSS, ICPSR) (#VALUE) f 37 f TEXT producerAbbreviation f Abbreviation 1 35 +39 f f f Producer URL points to the producer's web presence, if appropriate. Enter an absolute URL where the producer's web site is found, such as http://www.my.org. #VALUE f 38 f URL producerURL f URL Enter full URL, starting with http:// 1 35 +40 f f f URL for the producer's logo, which points to this producer's web-accessible logo image. Enter an absolute URL where the producer's logo image is found, such as http://www.my.org/images/logo.gif.
                            f 39 f URL producerLogoURL f Logo URL Enter full URL for image, starting with http:// 1 35 +41 t f f Date when the data collection or other materials were produced (not distributed, published or archived). f 40 t DATE productionDate f Production Date YYYY-MM-DD 1 \N +42 f f f The location where the data collection and any other related materials were produced. f 41 f TEXT productionPlace f Production Place 1 \N +43 f f t The organization or person responsible for either collecting, managing, or otherwise contributing in some form to the development of the resource. : f 42 f NONE contributor f Contributor 1 \N +44 t t f The type of contributor of the resource. #VALUE f 43 t TEXT contributorType f Type 1 43 +45 t f f The Family Name, Given Name or organization name of the contributor. #VALUE f 44 t TEXT contributorName f Name FamilyName, GivenName or Organization 1 43 +46 f f t Grant Information : f 45 f NONE grantNumber f Grant Information 1 \N +47 f f f Grant Number Agency #VALUE f 46 f TEXT grantNumberAgency f Grant Agency 1 46 +48 f f f The grant or contract number of the project that sponsored the effort. #VALUE f 47 f TEXT grantNumberValue f Grant Number 1 46 +49 f f t The organization designated by the author or producer to generate copies of the particular work including any necessary editions or revisions. f 48 f NONE distributor f Distributor 1 \N +50 t f f Distributor name #VALUE f 49 t TEXT distributorName f Name FamilyName, GivenName or Organization 1 49 +51 f f f The organization with which the distributor contact is affiliated. (#VALUE) f 50 f TEXT distributorAffiliation f Affiliation 1 49 +52 f f f The abbreviation by which this distributor is commonly known (e.g., IQSS, ICPSR). (#VALUE) f 51 f TEXT distributorAbbreviation f Abbreviation 1 49 +53 f f f Distributor URL points to the distributor's web presence, if appropriate. Enter an absolute URL where the distributor's web site is found, such as http://www.my.org. #VALUE f 52 f URL distributorURL f URL Enter full URL, starting with http:// 1 49 +54 f f f URL of the distributor's logo, which points to this distributor's web-accessible logo image. Enter an absolute URL where the distributor's logo image is found, such as http://www.my.org/images/logo.gif.
                            f 53 f URL distributorLogoURL f Logo URL Enter full URL for image, starting with http:// 1 49 +55 t f f Date that the work was made available for distribution/presentation. f 54 t DATE distributionDate f Distribution Date YYYY-MM-DD 1 \N +56 f f f The person (Family Name, Given Name) or the name of the organization that deposited this Dataset to the repository. f 55 f TEXT depositor f Depositor 1 \N +57 f f f Date that the Dataset was deposited into the repository. f 56 t DATE dateOfDeposit f Deposit Date YYYY-MM-DD 1 \N +58 f f t Time period to which the data refer. This item reflects the time period covered by the data, not the dates of coding or making documents machine-readable or the dates the data were collected. Also known as span. ; f 57 f NONE timePeriodCovered f Time Period Covered 1 \N +59 t f f Start date which reflects the time period covered by the data, not the dates of coding or making documents machine-readable or the dates the data were collected. #NAME: #VALUE f 58 t DATE timePeriodCoveredStart f Start YYYY-MM-DD 1 58 +60 t f f End date which reflects the time period covered by the data, not the dates of coding or making documents machine-readable or the dates the data were collected. #NAME: #VALUE f 59 t DATE timePeriodCoveredEnd f End YYYY-MM-DD 1 58 +61 f f t Contains the date(s) when the data were collected. ; f 60 f NONE dateOfCollection f Date of Collection 1 \N +62 f f f Date when the data collection started. #NAME: #VALUE f 61 f DATE dateOfCollectionStart f Start YYYY-MM-DD 1 61 +63 f f f Date when the data collection ended. #NAME: #VALUE f 62 f DATE dateOfCollectionEnd f End YYYY-MM-DD 1 61 +64 t f t Type of data included in the file: survey data, census/enumeration data, aggregate data, clinical data, event/transaction data, program source code, machine-readable text, administrative records data, experimental data, psychological test, textual data, coded textual, coded documents, time budget diaries, observation data/ratings, process-produced data, or other. f 63 t TEXT kindOfData f Kind of Data 1 \N +65 f f f Information about the Dataset series. : f 64 f NONE series f Series 1 \N +66 t f f Name of the dataset series to which the Dataset belongs. #VALUE f 65 t TEXT seriesName f Name 1 65 +67 f f f History of the series and summary of those features that apply to the series as a whole. #VALUE f 66 f TEXTBOX seriesInformation f Information 1 65 +68 f f t Information about the software used to generate the Dataset. , f 67 f NONE software f Software 1 \N +69 f t f Name of software used to generate the Dataset. #VALUE f 68 f TEXT softwareName f Name 1 68 +70 f f f Version of the software used to generate the Dataset. #NAME: #VALUE f 69 f TEXT softwareVersion f Version 1 68 +71 f f t Any material related to this Dataset. f 70 f TEXTBOX relatedMaterial f Related Material 1 \N +72 f f t Any Datasets that are related to this Dataset, such as previous research on this subject. f 71 f TEXTBOX relatedDatasets f Related Datasets 1 \N +73 f f t Any references that would serve as background or supporting material to this Dataset. f 72 f TEXT otherReferences f Other References 1 \N +74 f f t List of books, articles, serials, or machine-readable data files that served as the sources of the data collection. f 73 f TEXTBOX dataSources f Data Sources 1 \N +75 f f f For historical materials, information about the origin of the sources and the rules followed in establishing the sources should be specified. f 74 f TEXTBOX originOfSources f Origin of Sources 1 \N +76 f f f Assessment of characteristics and source material. f 75 f TEXTBOX characteristicOfSources f Characteristic of Sources Noted 1 \N +77 f f f Level of documentation of the original sources. f 76 f TEXTBOX accessToSources f Documentation and Access to Sources 1 \N +78 f f t Information on the geographic coverage of the data. Includes the total geographic scope of the data. f 0 f NONE geographicCoverage f Geographic Coverage 2 \N +79 t t f The country or nation that the Dataset is about. f 1 t TEXT country f Country / Nation 2 78 +80 t f f The state or province that the Dataset is about. Use GeoNames for correct spelling and avoid abbreviations. f 2 t TEXT state f State / Province 2 78 +81 t f f The name of the city that the Dataset is about. Use GeoNames for correct spelling and avoid abbreviations. f 3 t TEXT city f City 2 78 +82 f f f Other information on the geographic coverage of the data. f 4 f TEXT otherGeographicCoverage f Other 2 78 +83 t f t Lowest level of geographic aggregation covered by the Dataset, e.g., village, county, region. f 5 t TEXT geographicUnit f Geographic Unit 2 \N +84 f f t The fundamental geometric description for any Dataset that models geography is the geographic bounding box. It describes the minimum box, defined by west and east longitudes and north and south latitudes, which includes the largest geographic extent of the Dataset's geographic coverage. This element is used in the first pass of a coordinate-based search. Inclusion of this element in the codebook is recommended, but is required if the bound polygon box is included. f 6 f NONE geographicBoundingBox f Geographic Bounding Box 2 \N +85 f f f Westernmost coordinate delimiting the geographic extent of the Dataset. A valid range of values, expressed in decimal degrees, is -180,0 <= West Bounding Longitude Value <= 180,0. f 7 f TEXT westLongitude f West Longitude 2 84 +86 f f f Easternmost coordinate delimiting the geographic extent of the Dataset. A valid range of values, expressed in decimal degrees, is -180,0 <= East Bounding Longitude Value <= 180,0. f 8 f TEXT eastLongitude f East Longitude 2 84 +87 f f f Northernmost coordinate delimiting the geographic extent of the Dataset. A valid range of values, expressed in decimal degrees, is -90,0 <= North Bounding Latitude Value <= 90,0. f 9 f TEXT northLongitude f North Latitude 2 84 +88 f f f Southernmost coordinate delimiting the geographic extent of the Dataset. A valid range of values, expressed in decimal degrees, is -90,0 <= South Bounding Latitude Value <= 90,0. f 10 f TEXT southLongitude f South Latitude 2 84 +89 t f t Basic unit of analysis or observation that this Dataset describes, such as individuals, families/households, groups, institutions/organizations, administrative units, and more. For information about the DDI's controlled vocabulary for this element, please refer to the DDI web page at http://www.ddialliance.org/Specification/DDI-CV/. f 0 t TEXTBOX unitOfAnalysis f Unit of Analysis 3 \N +90 t f t Description of the population covered by the data in the file; the group of people or other elements that are the object of the study and to which the study results refer. Age, nationality, and residence commonly help to delineate a given universe, but any number of other factors may be used, such as age limits, sex, marital status, race, ethnic group, nationality, income, veteran status, criminal convictions, and more. The universe may consist of elements other than persons, such as housing units, court cases, deaths, countries, and so on. In general, it should be possible to tell from the description of the universe whether a given individual or element is a member of the population under study. Also known as the universe of interest, population of interest, and target population. f 1 t TEXTBOX universe f Universe 3 \N +91 t f f The time method or time dimension of the data collection, such as panel, cross-sectional, trend, time- series, or other. f 2 t TEXT timeMethod f Time Method 3 \N +92 f f f Individual, agency or organization responsible for administering the questionnaire or interview or compiling the data. f 3 f TEXT dataCollector f Data Collector FamilyName, GivenName or Organization 3 \N +93 f f f Type of training provided to the data collector f 4 f TEXT collectorTraining f Collector Training 3 \N +94 t f f If the data collected includes more than one point in time, indicate the frequency with which the data was collected; that is, monthly, quarterly, or other. f 5 t TEXT frequencyOfDataCollection f Frequency 3 \N +95 f f f Type of sample and sample design used to select the survey respondents to represent the population. May include reference to the target sample size and the sampling fraction. f 6 f TEXTBOX samplingProcedure f Sampling Procedure 3 \N +96 f f f Specific information regarding the target sample size, actual sample size, and the formula used to determine this. f 7 f NONE targetSampleSize f Target Sample Size 3 \N +97 f f f Actual sample size. f 8 f INT targetSampleActualSize f Actual Enter an integer... 3 96 +98 f f f Formula used to determine target sample size. f 9 f TEXT targetSampleSizeFormula f Formula 3 96 +99 f f f Show correspondence as well as discrepancies between the sampled units (obtained) and available statistics for the population (age, sex-ratio, marital status, etc.) as a whole. f 10 f TEXT deviationsFromSampleDesign f Major Deviations for Sample Design 3 \N +100 f f f Method used to collect the data; instrumentation characteristics (e.g., telephone interview, mail questionnaire, or other). f 11 f TEXTBOX collectionMode f Collection Mode 3 \N +101 f f f Type of data collection instrument used. Structured indicates an instrument in which all respondents are asked the same questions/tests, possibly with precoded answers. If a small portion of such a questionnaire includes open-ended questions, provide appropriate comments. Semi-structured indicates that the research instrument contains mainly open-ended questions. Unstructured indicates that in-depth interviews were conducted. f 12 f TEXT researchInstrument f Type of Research Instrument 3 \N +102 f f f Description of noteworthy aspects of the data collection situation. Includes information on factors such as cooperativeness of respondents, duration of interviews, number of call backs, or similar. f 13 f TEXTBOX dataCollectionSituation f Characteristics of Data Collection Situation 3 \N +103 f f f Summary of actions taken to minimize data loss. Include information on actions such as follow-up visits, supervisory checks, historical matching, estimation, and so on. f 14 f TEXT actionsToMinimizeLoss f Actions to Minimize Losses 3 \N +104 f f f Control OperationsMethods to facilitate data control performed by the primary investigator or by the data archive. f 15 f TEXT controlOperations f Control Operations 3 \N +105 f f f The use of sampling procedures might make it necessary to apply weights to produce accurate statistical results. Describes the criteria for using weights in analysis of a collection. If a weighting formula or coefficient was developed, the formula is provided, its elements are defined, and it is indicated how the formula was applied to the data. f 16 f TEXTBOX weighting f Weighting 3 \N +106 f f f Methods used to clean the data collection, such as consistency checking, wildcode checking, or other. f 17 f TEXT cleaningOperations f Cleaning Operations 3 \N +107 f f f Note element used for any information annotating or clarifying the methodology and processing of the study. f 18 f TEXT datasetLevelErrorNotes f Study Level Error Notes 3 \N +108 t f f Percentage of sample members who provided information. f 19 t TEXTBOX responseRate f Response Rate 3 \N +109 f f f Measure of how precisely one can estimate a population value from a given sample. f 20 f TEXT samplingErrorEstimates f Estimates of Sampling Error 3 \N +110 f f f Other issues pertaining to the data appraisal. Describe issues such as response variance, nonresponse rate and testing for bias, interviewer and response bias, confidence levels, question bias, or similar. f 21 f TEXT otherDataAppraisal f Other Forms of Data Appraisal 3 \N +111 f f f General notes about this Dataset. f 22 f NONE socialScienceNotes f Notes 3 \N +112 f f f Type of note. f 23 f TEXT socialScienceNotesType f Type 3 111 +113 f f f Note subject. f 24 f TEXT socialScienceNotesSubject f Subject 3 111 +114 f f f Text for this note. f 25 f TEXTBOX socialScienceNotesText f Text 3 111 +115 t t t The nature or genre of the content of the files in the dataset. f 0 t TEXT astroType f Type 4 \N +116 t t t The observatory or facility where the data was obtained. f 1 t TEXT astroFacility f Facility 4 \N +117 t t t The instrument used to collect the data. f 2 t TEXT astroInstrument f Instrument 4 \N +118 t f t Astronomical Objects represented in the data (Given as SIMBAD recognizable names preferred). f 3 t TEXT astroObject f Object 4 \N +119 t f f The spatial (angular) resolution that is typical of the observations, in decimal degrees. f 4 t TEXT resolution.Spatial f Spatial Resolution 4 \N +120 t f f The spectral resolution that is typical of the observations, given as the ratio λ/Δλ. f 5 t TEXT resolution.Spectral f Spectral Resolution 4 \N +121 f f f The temporal resolution that is typical of the observations, given in seconds. f 6 f TEXT resolution.Temporal f Time Resolution 4 \N +122 t t t Conventional bandpass name f 7 t TEXT coverage.Spectral.Bandpass f Bandpass 4 \N +123 t f t The central wavelength of the spectral bandpass, in meters. f 8 t FLOAT coverage.Spectral.CentralWavelength f Central Wavelength (m) Enter a floating-point number. 4 \N +124 f f t The minimum and maximum wavelength of the spectral bandpass. f 9 f NONE coverage.Spectral.Wavelength f Wavelength Range Enter a floating-point number. 4 \N +125 t f f The minimum wavelength of the spectral bandpass, in meters. f 10 t FLOAT coverage.Spectral.MinimumWavelength f Minimum (m) Enter a floating-point number. 4 124 +126 t f f The maximum wavelength of the spectral bandpass, in meters. f 11 t FLOAT coverage.Spectral.MaximumWavelength f Maximum (m) Enter a floating-point number. 4 124 +127 f f t Time period covered by the data. f 12 f NONE coverage.Temporal f Dataset Date Range 4 \N +128 t f f Dataset Start Date f 13 t DATE coverage.Temporal.StartTime f Start YYYY-MM-DD 4 127 +129 t f f Dataset End Date f 14 t DATE coverage.Temporal.StopTime f End YYYY-MM-DD 4 127 +130 f f t The sky coverage of the data object. f 15 f TEXT coverage.Spatial f Sky Coverage 4 \N +131 f f f The (typical) depth coverage, or sensitivity, of the data object in Jy. f 16 f FLOAT coverage.Depth f Depth Coverage Enter a floating-point number. 4 \N +132 f f f The (typical) density of objects, catalog entries, telescope pointings, etc., on the sky, in number per square degree. f 17 f FLOAT coverage.ObjectDensity f Object Density Enter a floating-point number. 4 \N +133 f f f The total number of objects, catalog entries, etc., in the data object. f 18 f INT coverage.ObjectCount f Object Count Enter an integer. 4 \N +134 f f f The fraction of the sky represented in the observations, ranging from 0 to 1. f 19 f FLOAT coverage.SkyFraction f Fraction of Sky Enter a floating-point number. 4 \N +135 f f f The polarization coverage f 20 f TEXT coverage.Polarization f Polarization 4 \N +136 f f f RedshiftType string C "Redshift"; or "Optical" or "Radio" definitions of Doppler velocity used in the data object. f 21 f TEXT redshiftType f RedshiftType 4 \N +137 f f f The resolution in redshift (unitless) or Doppler velocity (km/s) in the data object. f 22 f FLOAT resolution.Redshift f Redshift Resolution Enter a floating-point number. 4 \N +138 f f t The value of the redshift (unitless) or Doppler velocity (km/s in the data object. f 23 f FLOAT coverage.RedshiftValue f Redshift Value Enter a floating-point number. 4 \N +139 f f f The minimum value of the redshift (unitless) or Doppler velocity (km/s in the data object. f 24 f FLOAT coverage.Redshift.MinimumValue f Minimum Enter a floating-point number. 4 138 +140 f f f The maximum value of the redshift (unitless) or Doppler velocity (km/s in the data object. f 25 f FLOAT coverage.Redshift.MaximumValue f Maximum Enter a floating-point number. 4 138 +141 t t t Design types that are based on the overall experimental design. f 0 t TEXT studyDesignType f Design Type 5 \N +142 t t t Factors used in the Dataset. f 1 t TEXT studyFactorType f Factor Type 5 \N +143 t t t The taxonomic name of the organism used in the Dataset or from which the starting biological material derives. f 2 t TEXT studyAssayOrganism f Organism 5 \N +144 t f t If Other was selected in Organism, list any other organisms that were used in this Dataset. Terms from the NCBI Taxonomy are recommended. f 3 t TEXT studyAssayOtherOrganism f Other Organism 5 \N +145 t t t A term to qualify the endpoint, or what is being measured (e.g. gene expression profiling; protein identification). f 4 t TEXT studyAssayMeasurementType f Measurement Type 5 \N +146 t f t If Other was selected in Measurement Type, list any other measurement types that were used. Terms from NCBO Bioportal are recommended. f 5 t TEXT studyAssayOtherMeasurmentType f Other Measurement Type 5 \N +147 t t t A term to identify the technology used to perform the measurement (e.g. DNA microarray; mass spectrometry). f 6 t TEXT studyAssayTechnologyType f Technology Type 5 \N +148 t t t The manufacturer and name of the technology platform used in the assay (e.g. Bruker AVANCE). f 7 t TEXT studyAssayPlatform f Technology Platform 5 \N +149 t t t The name of the cell line from which the source or sample derives. f 8 t TEXT studyAssayCellType f Cell Type 5 \N +150 f f t Indicates the volume, issue and date of a journal, which this Dataset is associated with. f 0 f NONE journalVolumeIssue f Journal 6 \N +151 t f f The journal volume which this Dataset is associated with (e.g., Volume 4). f 1 t TEXT journalVolume f Volume 6 150 +152 t f f The journal issue number which this Dataset is associated with (e.g., Number 2, Autumn). f 2 t TEXT journalIssue f Issue 6 150 +153 t f f The publication date for this journal volume/issue, which this Dataset is associated with (e.g., 1999). f 3 t DATE journalPubDate f Publication Date YYYY or YYYY-MM or YYYY-MM-DD 6 150 +154 t t f Indicates what kind of article this is, for example, a research article, a commentary, a book or product review, a case report, a calendar, etc (based on JATS). f 4 t TEXT journalArticleType f Type of Article 6 \N +\. + + +-- +-- Data for Name: datasetfieldvalue; Type: TABLE DATA; Schema: public; Owner: dataverse_app +-- + +COPY datasetfieldvalue (id, displayorder, value, datasetfield_id) FROM stdin; +1 0 We need to add files to this Dataset. 8 +2 0 Sample Dataset 4 +3 0 Dataverse.org 13 +4 0 Admin, Dataverse 6 +5 0 Admin, Dataverse 11 +6 0 2015-06-08 9 +7 0 dataverse@mailinator.com 12 +8 0 Admin, Dataverse 14 +9 0 Dataverse.org 5 +\. + + +-- +-- Data for Name: datasetlinkingdataverse; Type: TABLE DATA; Schema: public; Owner: dataverse_app +-- + +COPY datasetlinkingdataverse (id, linkcreatetime, dataset_id, linkingdataverse_id) FROM stdin; +\. + + +-- +-- Data for Name: datasetlock; Type: TABLE DATA; Schema: public; Owner: dataverse_app +-- + +COPY datasetlock (id, info, starttime, user_id, dataset_id) FROM stdin; +\. + + +-- +-- Data for Name: datasetversion; Type: TABLE DATA; Schema: public; Owner: dataverse_app +-- + +COPY datasetversion (id, unf, archivenote, archivetime, availabilitystatus, citationrequirements, conditions, confidentialitydeclaration, contactforaccess, createtime, dataaccessplace, deaccessionlink, depositorrequirements, disclaimer, fileaccessrequest, inreview, lastupdatetime, license, minorversionnumber, originalarchive, releasetime, restrictions, sizeofcollection, specialpermissions, studycompletion, termsofaccess, termsofuse, version, versionnote, versionnumber, versionstate, dataset_id) FROM stdin; +1 \N \N \N \N \N \N \N \N 2015-06-08 13:30:09.023 \N \N \N \N f f 2015-06-08 15:40:14.341 CC0 0 \N 2015-06-08 15:40:14.341 \N \N \N \N \N \N 2 \N 1 RELEASED 3 +\. + + +-- +-- Data for Name: datasetversionuser; Type: TABLE DATA; Schema: public; Owner: dataverse_app +-- + +COPY datasetversionuser (id, lastupdatedate, authenticateduser_id, datasetversion_id) FROM stdin; +1 2015-06-08 15:40:14.341 1 1 +\. + + +-- +-- Data for Name: datatable; Type: TABLE DATA; Schema: public; Owner: dataverse_app +-- + +COPY datatable (id, casequantity, originalfileformat, originalformatversion, recordspercase, unf, varquantity, datafile_id) FROM stdin; +\. + + +-- +-- Data for Name: datavariable; Type: TABLE DATA; Schema: public; Owner: dataverse_app +-- + +COPY datavariable (id, fileendposition, fileorder, filestartposition, format, formatcategory, "interval", label, name, numberofdecimalpoints, orderedfactor, recordsegmentnumber, type, unf, universe, weighted, datatable_id) FROM stdin; +\. + + +-- +-- Data for Name: dataverse; Type: TABLE DATA; Schema: public; Owner: dataverse_app +-- + +COPY dataverse (id, affiliation, alias, dataversetype, description, facetroot, guestbookroot, metadatablockroot, name, permissionroot, templateroot, themeroot, defaultcontributorrole_id, defaulttemplate_id) FROM stdin; +1 \N root UNCATEGORIZED The root dataverse. t f t Root t f t 6 \N +2 Dataverse.org test-dv RESEARCHERS \N f f f testDV t f t 6 \N +\. + + +-- +-- Data for Name: dataverse_metadatablock; Type: TABLE DATA; Schema: public; Owner: dataverse_app +-- + +COPY dataverse_metadatablock (dataverse_id, metadatablocks_id) FROM stdin; +1 1 +\. + + +-- +-- Data for Name: dataversecontact; Type: TABLE DATA; Schema: public; Owner: dataverse_app +-- + +COPY dataversecontact (id, contactemail, displayorder, dataverse_id) FROM stdin; +1 root@mailinator.com 0 1 +2 dataverse@mailinator.com 0 2 +\. + + +-- +-- Data for Name: dataversefacet; Type: TABLE DATA; Schema: public; Owner: dataverse_app +-- + +COPY dataversefacet (id, displayorder, datasetfieldtype_id, dataverse_id) FROM stdin; +1 3 57 1 +2 2 21 1 +3 0 8 1 +4 1 19 1 +\. + + +-- +-- Data for Name: dataversefeatureddataverse; Type: TABLE DATA; Schema: public; Owner: dataverse_app +-- + +COPY dataversefeatureddataverse (id, displayorder, dataverse_id, featureddataverse_id) FROM stdin; +\. + + +-- +-- Data for Name: dataversefieldtypeinputlevel; Type: TABLE DATA; Schema: public; Owner: dataverse_app +-- + +COPY dataversefieldtypeinputlevel (id, include, required, datasetfieldtype_id, dataverse_id) FROM stdin; +\. + + +-- +-- Data for Name: dataverselinkingdataverse; Type: TABLE DATA; Schema: public; Owner: dataverse_app +-- + +COPY dataverselinkingdataverse (id, linkcreatetime, dataverse_id, linkingdataverse_id) FROM stdin; +\. + + +-- +-- Data for Name: dataverserole; Type: TABLE DATA; Schema: public; Owner: dataverse_app +-- + +COPY dataverserole (id, alias, description, name, permissionbits, owner_id) FROM stdin; +1 admin A person who has all permissions for dataverses, datasets, and files. Admin 8191 \N +2 fileDownloader A person who can download a file. File Downloader 16 \N +3 fullContributor A person who can add subdataverses and datasets within a dataverse. Dataverse + Dataset Creator 3 \N +4 dvContributor A person who can add subdataverses within a dataverse. Dataverse Creator 1 \N +5 dsContributor A person who can add datasets within a dataverse. Dataset Creator 2 \N +6 editor For datasets, a person who can edit License + Terms, and then submit them for review. Contributor 4184 \N +7 curator For datasets, a person who can edit License + Terms, edit Permissions, and publish datasets. Curator 5471 \N +8 member A person who can view both unpublished dataverses and datasets. Member 12 \N +\. + + +-- +-- Data for Name: dataversesubjects; Type: TABLE DATA; Schema: public; Owner: dataverse_app +-- + +COPY dataversesubjects (dataverse_id, controlledvocabularyvalue_id) FROM stdin; +2 3 +1 3 +\. + + +-- +-- Data for Name: dataversetheme; Type: TABLE DATA; Schema: public; Owner: dataverse_app +-- + +COPY dataversetheme (id, backgroundcolor, linkcolor, linkurl, logo, logoalignment, logobackgroundcolor, logoformat, tagline, textcolor, dataverse_id) FROM stdin; +\. + + +-- +-- Data for Name: defaultvalueset; Type: TABLE DATA; Schema: public; Owner: dataverse_app +-- + +COPY defaultvalueset (id, name) FROM stdin; +\. + + +-- +-- Data for Name: dvobject; Type: TABLE DATA; Schema: public; Owner: dataverse_app +-- + +COPY dvobject (id, dtype, createdate, indextime, modificationtime, permissionindextime, permissionmodificationtime, publicationdate, creator_id, owner_id, releaseuser_id) FROM stdin; +1 Dataverse 2015-06-08 13:08:22.373 \N 2015-06-08 13:29:18.365 2015-06-08 13:29:18.388 2015-06-08 13:08:22.45 2015-06-08 13:29:18.365 1 \N 1 +4 DataFile 2015-06-08 15:05:00.586 \N 2015-06-08 15:05:00.586 2015-06-08 15:40:14.657 2015-06-08 15:04:25.299 2015-06-08 15:40:14.341 1 3 \N +3 Dataset 2015-06-08 13:30:09.023 2015-06-08 15:40:14.504 2015-06-08 15:40:14.341 2015-06-08 15:40:14.691 2015-06-08 13:30:09.845 2015-06-08 15:40:14.341 1 2 1 +2 Dataverse 2015-06-08 13:29:07.308 2015-06-08 15:40:14.739 2015-06-08 15:40:14.152 2015-06-08 15:40:14.768 2015-06-08 13:29:07.485 2015-06-08 15:40:14.152 1 1 1 +\. + + +-- +-- Data for Name: explicitgroup; Type: TABLE DATA; Schema: public; Owner: dataverse_app +-- + +COPY explicitgroup (id, description, displayname, groupalias, groupaliasinowner, owner_id) FROM stdin; +\. + + +-- +-- Data for Name: explicitgroup_authenticateduser; Type: TABLE DATA; Schema: public; Owner: dataverse_app +-- + +COPY explicitgroup_authenticateduser (explicitgroup_id, containedauthenticatedusers_id) FROM stdin; +\. + + +-- +-- Data for Name: explicitgroup_containedroleassignees; Type: TABLE DATA; Schema: public; Owner: dataverse_app +-- + +COPY explicitgroup_containedroleassignees (explicitgroup_id, containedroleassignees) FROM stdin; +\. + + +-- +-- Data for Name: explicitgroup_explicitgroup; Type: TABLE DATA; Schema: public; Owner: dataverse_app +-- + +COPY explicitgroup_explicitgroup (explicitgroup_id, containedexplicitgroups_id) FROM stdin; +\. + + +-- +-- Data for Name: fileaccessrequests; Type: TABLE DATA; Schema: public; Owner: dataverse_app +-- + +COPY fileaccessrequests (datafile_id, authenticated_user_id) FROM stdin; +\. + + +-- +-- Data for Name: filemetadata; Type: TABLE DATA; Schema: public; Owner: dataverse_app +-- + +COPY filemetadata (id, description, label, restricted, version, datafile_id, datasetversion_id) FROM stdin; +1 This is a description of the file. 2001, Palestinian Proposal at the Taba Conference.kmz f 1 4 1 +\. + + +-- +-- Data for Name: filemetadata_datafilecategory; Type: TABLE DATA; Schema: public; Owner: dataverse_app +-- + +COPY filemetadata_datafilecategory (filecategories_id, filemetadatas_id) FROM stdin; +1 1 +\. + + +-- +-- Data for Name: foreignmetadatafieldmapping; Type: TABLE DATA; Schema: public; Owner: dataverse_app +-- + +COPY foreignmetadatafieldmapping (id, datasetfieldname, foreignfieldxpath, isattribute, foreignmetadataformatmapping_id, parentfieldmapping_id) FROM stdin; +\. + + +-- +-- Data for Name: foreignmetadataformatmapping; Type: TABLE DATA; Schema: public; Owner: dataverse_app +-- + +COPY foreignmetadataformatmapping (id, displayname, name, schemalocation, startelement) FROM stdin; +\. + + +-- +-- Data for Name: guestbook; Type: TABLE DATA; Schema: public; Owner: dataverse_app +-- + +COPY guestbook (id, createtime, emailrequired, enabled, institutionrequired, name, namerequired, positionrequired, dataverse_id) FROM stdin; +\. + + +-- +-- Data for Name: guestbookresponse; Type: TABLE DATA; Schema: public; Owner: dataverse_app +-- + +COPY guestbookresponse (id, downloadtype, email, institution, name, "position", responsetime, sessionid, authenticateduser_id, datafile_id, dataset_id, datasetversion_id, guestbook_id) FROM stdin; +\. + + +-- +-- Data for Name: harvestingdataverseconfig; Type: TABLE DATA; Schema: public; Owner: dataverse_app +-- + +COPY harvestingdataverseconfig (id, archivedescription, archiveurl, harveststyle, harvesttype, harvestingset, harvestingurl, dataverse_id) FROM stdin; +\. + + +-- +-- Data for Name: ingestreport; Type: TABLE DATA; Schema: public; Owner: dataverse_app +-- + +COPY ingestreport (id, endtime, report, starttime, status, type, datafile_id) FROM stdin; +\. + + +-- +-- Data for Name: ingestrequest; Type: TABLE DATA; Schema: public; Owner: dataverse_app +-- + +COPY ingestrequest (id, controlcard, labelsfile, textencoding, datafile_id) FROM stdin; +\. + + +-- +-- Data for Name: ipv4range; Type: TABLE DATA; Schema: public; Owner: dataverse_app +-- + +COPY ipv4range (id, bottomaslong, topaslong, owner_id) FROM stdin; +\. + + +-- +-- Data for Name: ipv6range; Type: TABLE DATA; Schema: public; Owner: dataverse_app +-- + +COPY ipv6range (id, bottoma, bottomb, bottomc, bottomd, topa, topb, topc, topd, owner_id) FROM stdin; +\. + + +-- +-- Data for Name: maplayermetadata; Type: TABLE DATA; Schema: public; Owner: dataverse_app +-- + +COPY maplayermetadata (id, embedmaplink, layerlink, layername, mapimagelink, worldmapusername, dataset_id, datafile_id) FROM stdin; +\. + + +-- +-- Data for Name: metadatablock; Type: TABLE DATA; Schema: public; Owner: dataverse_app +-- + +COPY metadatablock (id, displayname, name, owner_id) FROM stdin; +1 Citation Metadata citation \N +2 Geospatial Metadata geospatial \N +3 Social Science and Humanities Metadata socialscience \N +4 Astronomy and Astrophysics Metadata astrophysics \N +5 Life Sciences Metadata biomedical \N +6 Journal Metadata journal \N +\. + + +-- +-- Data for Name: passwordresetdata; Type: TABLE DATA; Schema: public; Owner: dataverse_app +-- + +COPY passwordresetdata (id, created, expires, reason, token, builtinuser_id) FROM stdin; +\. + + +-- +-- Data for Name: persistedglobalgroup; Type: TABLE DATA; Schema: public; Owner: dataverse_app +-- + +COPY persistedglobalgroup (id, dtype, description, displayname, persistedgroupalias) FROM stdin; +\. + + +-- +-- Data for Name: roleassignment; Type: TABLE DATA; Schema: public; Owner: dataverse_app +-- + +COPY roleassignment (id, assigneeidentifier, definitionpoint_id, role_id) FROM stdin; +1 @dataverseAdmin 1 1 +2 @dataverseAdmin 2 1 +3 @dataverseAdmin 3 6 +\. + + +-- +-- Data for Name: savedsearch; Type: TABLE DATA; Schema: public; Owner: dataverse_app +-- + +COPY savedsearch (id, query, creator_id, definitionpoint_id) FROM stdin; +\. + + +-- +-- Data for Name: savedsearchfilterquery; Type: TABLE DATA; Schema: public; Owner: dataverse_app +-- + +COPY savedsearchfilterquery (id, filterquery, savedsearch_id) FROM stdin; +\. + + +-- +-- Data for Name: sequence; Type: TABLE DATA; Schema: public; Owner: dataverse_app +-- + +COPY sequence (seq_name, seq_count) FROM stdin; +SEQ_GEN 0 +\. + + +-- +-- Data for Name: setting; Type: TABLE DATA; Schema: public; Owner: dataverse_app +-- + +COPY setting (name, content) FROM stdin; +:AllowSignUp yes +:SignUpUrl /dataverseuser.xhtml?editMode=CREATE +:Protocol doi +:Authority 10.5072/FK2 +:DoiProvider EZID +:DoiSeparator / +BuiltinUsers.KEY burrito +:BlockedApiKey empanada +:BlockedApiPolicy localhost-only +\. + + +-- +-- Data for Name: shibgroup; Type: TABLE DATA; Schema: public; Owner: dataverse_app +-- + +COPY shibgroup (id, attribute, name, pattern) FROM stdin; +\. + + +-- +-- Data for Name: summarystatistic; Type: TABLE DATA; Schema: public; Owner: dataverse_app +-- + +COPY summarystatistic (id, type, value, datavariable_id) FROM stdin; +\. + + +-- +-- Data for Name: template; Type: TABLE DATA; Schema: public; Owner: dataverse_app +-- + +COPY template (id, createtime, name, usagecount, dataverse_id) FROM stdin; +\. + + +-- +-- Data for Name: usernotification; Type: TABLE DATA; Schema: public; Owner: dataverse_app +-- + +COPY usernotification (id, emailed, objectid, readnotification, senddate, type, user_id) FROM stdin; +1 f 2 f 2015-06-08 13:29:07.308 0 1 +2 f 1 f 2015-06-08 13:30:09.023 1 1 +\. + + +-- +-- Data for Name: variablecategory; Type: TABLE DATA; Schema: public; Owner: dataverse_app +-- + +COPY variablecategory (id, catorder, frequency, label, missing, value, datavariable_id) FROM stdin; +\. + + +-- +-- Data for Name: variablerange; Type: TABLE DATA; Schema: public; Owner: dataverse_app +-- + +COPY variablerange (id, beginvalue, beginvaluetype, endvalue, endvaluetype, datavariable_id) FROM stdin; +\. + + +-- +-- Data for Name: variablerangeitem; Type: TABLE DATA; Schema: public; Owner: dataverse_app +-- + +COPY variablerangeitem (id, value, datavariable_id) FROM stdin; +\. + + +-- +-- Data for Name: worldmapauth_token; Type: TABLE DATA; Schema: public; Owner: dataverse_app +-- + +COPY worldmapauth_token (id, created, hasexpired, lastrefreshtime, modified, token, application_id, datafile_id, dataverseuser_id) FROM stdin; +\. + + +-- +-- Data for Name: worldmapauth_tokentype; Type: TABLE DATA; Schema: public; Owner: dataverse_app +-- + +COPY worldmapauth_tokentype (id, contactemail, created, hostname, ipaddress, mapitlink, md5, modified, name, timelimitminutes, timelimitseconds) FROM stdin; +\. + + +-- +-- Name: actionlogrecord_pkey; Type: CONSTRAINT; Schema: public; Owner: dataverse_app; Tablespace: +-- + +ALTER TABLE ONLY actionlogrecord + ADD CONSTRAINT actionlogrecord_pkey PRIMARY KEY (id); + + +-- +-- Name: apitoken_pkey; Type: CONSTRAINT; Schema: public; Owner: dataverse_app; Tablespace: +-- + +ALTER TABLE ONLY apitoken + ADD CONSTRAINT apitoken_pkey PRIMARY KEY (id); + + +-- +-- Name: apitoken_tokenstring_key; Type: CONSTRAINT; Schema: public; Owner: dataverse_app; Tablespace: +-- + +ALTER TABLE ONLY apitoken + ADD CONSTRAINT apitoken_tokenstring_key UNIQUE (tokenstring); + + +-- +-- Name: authenticateduser_email_key; Type: CONSTRAINT; Schema: public; Owner: dataverse_app; Tablespace: +-- + +ALTER TABLE ONLY authenticateduser + ADD CONSTRAINT authenticateduser_email_key UNIQUE (email); + + +-- +-- Name: authenticateduser_pkey; Type: CONSTRAINT; Schema: public; Owner: dataverse_app; Tablespace: +-- + +ALTER TABLE ONLY authenticateduser + ADD CONSTRAINT authenticateduser_pkey PRIMARY KEY (id); + + +-- +-- Name: authenticateduser_useridentifier_key; Type: CONSTRAINT; Schema: public; Owner: dataverse_app; Tablespace: +-- + +ALTER TABLE ONLY authenticateduser + ADD CONSTRAINT authenticateduser_useridentifier_key UNIQUE (useridentifier); + + +-- +-- Name: authenticateduserlookup_authenticateduser_id_key; Type: CONSTRAINT; Schema: public; Owner: dataverse_app; Tablespace: +-- + +ALTER TABLE ONLY authenticateduserlookup + ADD CONSTRAINT authenticateduserlookup_authenticateduser_id_key UNIQUE (authenticateduser_id); + + +-- +-- Name: authenticateduserlookup_pkey; Type: CONSTRAINT; Schema: public; Owner: dataverse_app; Tablespace: +-- + +ALTER TABLE ONLY authenticateduserlookup + ADD CONSTRAINT authenticateduserlookup_pkey PRIMARY KEY (id); + + +-- +-- Name: authenticationproviderrow_pkey; Type: CONSTRAINT; Schema: public; Owner: dataverse_app; Tablespace: +-- + +ALTER TABLE ONLY authenticationproviderrow + ADD CONSTRAINT authenticationproviderrow_pkey PRIMARY KEY (id); + + +-- +-- Name: builtinuser_email_key; Type: CONSTRAINT; Schema: public; Owner: dataverse_app; Tablespace: +-- + +ALTER TABLE ONLY builtinuser + ADD CONSTRAINT builtinuser_email_key UNIQUE (email); + + +-- +-- Name: builtinuser_pkey; Type: CONSTRAINT; Schema: public; Owner: dataverse_app; Tablespace: +-- + +ALTER TABLE ONLY builtinuser + ADD CONSTRAINT builtinuser_pkey PRIMARY KEY (id); + + +-- +-- Name: builtinuser_username_key; Type: CONSTRAINT; Schema: public; Owner: dataverse_app; Tablespace: +-- + +ALTER TABLE ONLY builtinuser + ADD CONSTRAINT builtinuser_username_key UNIQUE (username); + + +-- +-- Name: controlledvocabalternate_pkey; Type: CONSTRAINT; Schema: public; Owner: dataverse_app; Tablespace: +-- + +ALTER TABLE ONLY controlledvocabalternate + ADD CONSTRAINT controlledvocabalternate_pkey PRIMARY KEY (id); + + +-- +-- Name: controlledvocabularyvalue_pkey; Type: CONSTRAINT; Schema: public; Owner: dataverse_app; Tablespace: +-- + +ALTER TABLE ONLY controlledvocabularyvalue + ADD CONSTRAINT controlledvocabularyvalue_pkey PRIMARY KEY (id); + + +-- +-- Name: customfieldmap_pkey; Type: CONSTRAINT; Schema: public; Owner: dataverse_app; Tablespace: +-- + +ALTER TABLE ONLY customfieldmap + ADD CONSTRAINT customfieldmap_pkey PRIMARY KEY (id); + + +-- +-- Name: customquestion_pkey; Type: CONSTRAINT; Schema: public; Owner: dataverse_app; Tablespace: +-- + +ALTER TABLE ONLY customquestion + ADD CONSTRAINT customquestion_pkey PRIMARY KEY (id); + + +-- +-- Name: customquestionresponse_pkey; Type: CONSTRAINT; Schema: public; Owner: dataverse_app; Tablespace: +-- + +ALTER TABLE ONLY customquestionresponse + ADD CONSTRAINT customquestionresponse_pkey PRIMARY KEY (id); + + +-- +-- Name: customquestionvalue_pkey; Type: CONSTRAINT; Schema: public; Owner: dataverse_app; Tablespace: +-- + +ALTER TABLE ONLY customquestionvalue + ADD CONSTRAINT customquestionvalue_pkey PRIMARY KEY (id); + + +-- +-- Name: datafile_pkey; Type: CONSTRAINT; Schema: public; Owner: dataverse_app; Tablespace: +-- + +ALTER TABLE ONLY datafile + ADD CONSTRAINT datafile_pkey PRIMARY KEY (id); + + +-- +-- Name: datafilecategory_pkey; Type: CONSTRAINT; Schema: public; Owner: dataverse_app; Tablespace: +-- + +ALTER TABLE ONLY datafilecategory + ADD CONSTRAINT datafilecategory_pkey PRIMARY KEY (id); + + +-- +-- Name: datafiletag_pkey; Type: CONSTRAINT; Schema: public; Owner: dataverse_app; Tablespace: +-- + +ALTER TABLE ONLY datafiletag + ADD CONSTRAINT datafiletag_pkey PRIMARY KEY (id); + + +-- +-- Name: dataset_pkey; Type: CONSTRAINT; Schema: public; Owner: dataverse_app; Tablespace: +-- + +ALTER TABLE ONLY dataset + ADD CONSTRAINT dataset_pkey PRIMARY KEY (id); + + +-- +-- Name: datasetfield_controlledvocabularyvalue_pkey; Type: CONSTRAINT; Schema: public; Owner: dataverse_app; Tablespace: +-- + +ALTER TABLE ONLY datasetfield_controlledvocabularyvalue + ADD CONSTRAINT datasetfield_controlledvocabularyvalue_pkey PRIMARY KEY (datasetfield_id, controlledvocabularyvalues_id); + + +-- +-- Name: datasetfield_pkey; Type: CONSTRAINT; Schema: public; Owner: dataverse_app; Tablespace: +-- + +ALTER TABLE ONLY datasetfield + ADD CONSTRAINT datasetfield_pkey PRIMARY KEY (id); + + +-- +-- Name: datasetfieldcompoundvalue_pkey; Type: CONSTRAINT; Schema: public; Owner: dataverse_app; Tablespace: +-- + +ALTER TABLE ONLY datasetfieldcompoundvalue + ADD CONSTRAINT datasetfieldcompoundvalue_pkey PRIMARY KEY (id); + + +-- +-- Name: datasetfielddefaultvalue_pkey; Type: CONSTRAINT; Schema: public; Owner: dataverse_app; Tablespace: +-- + +ALTER TABLE ONLY datasetfielddefaultvalue + ADD CONSTRAINT datasetfielddefaultvalue_pkey PRIMARY KEY (id); + + +-- +-- Name: datasetfieldtype_pkey; Type: CONSTRAINT; Schema: public; Owner: dataverse_app; Tablespace: +-- + +ALTER TABLE ONLY datasetfieldtype + ADD CONSTRAINT datasetfieldtype_pkey PRIMARY KEY (id); + + +-- +-- Name: datasetfieldvalue_pkey; Type: CONSTRAINT; Schema: public; Owner: dataverse_app; Tablespace: +-- + +ALTER TABLE ONLY datasetfieldvalue + ADD CONSTRAINT datasetfieldvalue_pkey PRIMARY KEY (id); + + +-- +-- Name: datasetlinkingdataverse_pkey; Type: CONSTRAINT; Schema: public; Owner: dataverse_app; Tablespace: +-- + +ALTER TABLE ONLY datasetlinkingdataverse + ADD CONSTRAINT datasetlinkingdataverse_pkey PRIMARY KEY (id); + + +-- +-- Name: datasetlock_pkey; Type: CONSTRAINT; Schema: public; Owner: dataverse_app; Tablespace: +-- + +ALTER TABLE ONLY datasetlock + ADD CONSTRAINT datasetlock_pkey PRIMARY KEY (id); + + +-- +-- Name: datasetversion_pkey; Type: CONSTRAINT; Schema: public; Owner: dataverse_app; Tablespace: +-- + +ALTER TABLE ONLY datasetversion + ADD CONSTRAINT datasetversion_pkey PRIMARY KEY (id); + + +-- +-- Name: datasetversionuser_pkey; Type: CONSTRAINT; Schema: public; Owner: dataverse_app; Tablespace: +-- + +ALTER TABLE ONLY datasetversionuser + ADD CONSTRAINT datasetversionuser_pkey PRIMARY KEY (id); + + +-- +-- Name: datatable_pkey; Type: CONSTRAINT; Schema: public; Owner: dataverse_app; Tablespace: +-- + +ALTER TABLE ONLY datatable + ADD CONSTRAINT datatable_pkey PRIMARY KEY (id); + + +-- +-- Name: datavariable_pkey; Type: CONSTRAINT; Schema: public; Owner: dataverse_app; Tablespace: +-- + +ALTER TABLE ONLY datavariable + ADD CONSTRAINT datavariable_pkey PRIMARY KEY (id); + + +-- +-- Name: dataverse_alias_key; Type: CONSTRAINT; Schema: public; Owner: dataverse_app; Tablespace: +-- + +ALTER TABLE ONLY dataverse + ADD CONSTRAINT dataverse_alias_key UNIQUE (alias); + + +-- +-- Name: dataverse_metadatablock_pkey; Type: CONSTRAINT; Schema: public; Owner: dataverse_app; Tablespace: +-- + +ALTER TABLE ONLY dataverse_metadatablock + ADD CONSTRAINT dataverse_metadatablock_pkey PRIMARY KEY (dataverse_id, metadatablocks_id); + + +-- +-- Name: dataverse_pkey; Type: CONSTRAINT; Schema: public; Owner: dataverse_app; Tablespace: +-- + +ALTER TABLE ONLY dataverse + ADD CONSTRAINT dataverse_pkey PRIMARY KEY (id); + + +-- +-- Name: dataversecontact_pkey; Type: CONSTRAINT; Schema: public; Owner: dataverse_app; Tablespace: +-- + +ALTER TABLE ONLY dataversecontact + ADD CONSTRAINT dataversecontact_pkey PRIMARY KEY (id); + + +-- +-- Name: dataversefacet_pkey; Type: CONSTRAINT; Schema: public; Owner: dataverse_app; Tablespace: +-- + +ALTER TABLE ONLY dataversefacet + ADD CONSTRAINT dataversefacet_pkey PRIMARY KEY (id); + + +-- +-- Name: dataversefeatureddataverse_pkey; Type: CONSTRAINT; Schema: public; Owner: dataverse_app; Tablespace: +-- + +ALTER TABLE ONLY dataversefeatureddataverse + ADD CONSTRAINT dataversefeatureddataverse_pkey PRIMARY KEY (id); + + +-- +-- Name: dataversefieldtypeinputlevel_pkey; Type: CONSTRAINT; Schema: public; Owner: dataverse_app; Tablespace: +-- + +ALTER TABLE ONLY dataversefieldtypeinputlevel + ADD CONSTRAINT dataversefieldtypeinputlevel_pkey PRIMARY KEY (id); + + +-- +-- Name: dataverselinkingdataverse_pkey; Type: CONSTRAINT; Schema: public; Owner: dataverse_app; Tablespace: +-- + +ALTER TABLE ONLY dataverselinkingdataverse + ADD CONSTRAINT dataverselinkingdataverse_pkey PRIMARY KEY (id); + + +-- +-- Name: dataverserole_alias_key; Type: CONSTRAINT; Schema: public; Owner: dataverse_app; Tablespace: +-- + +ALTER TABLE ONLY dataverserole + ADD CONSTRAINT dataverserole_alias_key UNIQUE (alias); + + +-- +-- Name: dataverserole_pkey; Type: CONSTRAINT; Schema: public; Owner: dataverse_app; Tablespace: +-- + +ALTER TABLE ONLY dataverserole + ADD CONSTRAINT dataverserole_pkey PRIMARY KEY (id); + + +-- +-- Name: dataversesubjects_pkey; Type: CONSTRAINT; Schema: public; Owner: dataverse_app; Tablespace: +-- + +ALTER TABLE ONLY dataversesubjects + ADD CONSTRAINT dataversesubjects_pkey PRIMARY KEY (dataverse_id, controlledvocabularyvalue_id); + + +-- +-- Name: dataversetheme_pkey; Type: CONSTRAINT; Schema: public; Owner: dataverse_app; Tablespace: +-- + +ALTER TABLE ONLY dataversetheme + ADD CONSTRAINT dataversetheme_pkey PRIMARY KEY (id); + + +-- +-- Name: defaultvalueset_pkey; Type: CONSTRAINT; Schema: public; Owner: dataverse_app; Tablespace: +-- + +ALTER TABLE ONLY defaultvalueset + ADD CONSTRAINT defaultvalueset_pkey PRIMARY KEY (id); + + +-- +-- Name: dvobject_pkey; Type: CONSTRAINT; Schema: public; Owner: dataverse_app; Tablespace: +-- + +ALTER TABLE ONLY dvobject + ADD CONSTRAINT dvobject_pkey PRIMARY KEY (id); + + +-- +-- Name: explicitgroup_authenticateduser_pkey; Type: CONSTRAINT; Schema: public; Owner: dataverse_app; Tablespace: +-- + +ALTER TABLE ONLY explicitgroup_authenticateduser + ADD CONSTRAINT explicitgroup_authenticateduser_pkey PRIMARY KEY (explicitgroup_id, containedauthenticatedusers_id); + + +-- +-- Name: explicitgroup_explicitgroup_pkey; Type: CONSTRAINT; Schema: public; Owner: dataverse_app; Tablespace: +-- + +ALTER TABLE ONLY explicitgroup_explicitgroup + ADD CONSTRAINT explicitgroup_explicitgroup_pkey PRIMARY KEY (explicitgroup_id, containedexplicitgroups_id); + + +-- +-- Name: explicitgroup_groupalias_key; Type: CONSTRAINT; Schema: public; Owner: dataverse_app; Tablespace: +-- + +ALTER TABLE ONLY explicitgroup + ADD CONSTRAINT explicitgroup_groupalias_key UNIQUE (groupalias); + + +-- +-- Name: explicitgroup_pkey; Type: CONSTRAINT; Schema: public; Owner: dataverse_app; Tablespace: +-- + +ALTER TABLE ONLY explicitgroup + ADD CONSTRAINT explicitgroup_pkey PRIMARY KEY (id); + + +-- +-- Name: fileaccessrequests_pkey; Type: CONSTRAINT; Schema: public; Owner: dataverse_app; Tablespace: +-- + +ALTER TABLE ONLY fileaccessrequests + ADD CONSTRAINT fileaccessrequests_pkey PRIMARY KEY (datafile_id, authenticated_user_id); + + +-- +-- Name: filemetadata_datafilecategory_pkey; Type: CONSTRAINT; Schema: public; Owner: dataverse_app; Tablespace: +-- + +ALTER TABLE ONLY filemetadata_datafilecategory + ADD CONSTRAINT filemetadata_datafilecategory_pkey PRIMARY KEY (filecategories_id, filemetadatas_id); + + +-- +-- Name: filemetadata_pkey; Type: CONSTRAINT; Schema: public; Owner: dataverse_app; Tablespace: +-- + +ALTER TABLE ONLY filemetadata + ADD CONSTRAINT filemetadata_pkey PRIMARY KEY (id); + + +-- +-- Name: foreignmetadatafieldmapping_pkey; Type: CONSTRAINT; Schema: public; Owner: dataverse_app; Tablespace: +-- + +ALTER TABLE ONLY foreignmetadatafieldmapping + ADD CONSTRAINT foreignmetadatafieldmapping_pkey PRIMARY KEY (id); + + +-- +-- Name: foreignmetadataformatmapping_pkey; Type: CONSTRAINT; Schema: public; Owner: dataverse_app; Tablespace: +-- + +ALTER TABLE ONLY foreignmetadataformatmapping + ADD CONSTRAINT foreignmetadataformatmapping_pkey PRIMARY KEY (id); + + +-- +-- Name: guestbook_pkey; Type: CONSTRAINT; Schema: public; Owner: dataverse_app; Tablespace: +-- + +ALTER TABLE ONLY guestbook + ADD CONSTRAINT guestbook_pkey PRIMARY KEY (id); + + +-- +-- Name: guestbookresponse_pkey; Type: CONSTRAINT; Schema: public; Owner: dataverse_app; Tablespace: +-- + +ALTER TABLE ONLY guestbookresponse + ADD CONSTRAINT guestbookresponse_pkey PRIMARY KEY (id); + + +-- +-- Name: harvestingdataverseconfig_pkey; Type: CONSTRAINT; Schema: public; Owner: dataverse_app; Tablespace: +-- + +ALTER TABLE ONLY harvestingdataverseconfig + ADD CONSTRAINT harvestingdataverseconfig_pkey PRIMARY KEY (id); + + +-- +-- Name: ingestreport_pkey; Type: CONSTRAINT; Schema: public; Owner: dataverse_app; Tablespace: +-- + +ALTER TABLE ONLY ingestreport + ADD CONSTRAINT ingestreport_pkey PRIMARY KEY (id); + + +-- +-- Name: ingestrequest_pkey; Type: CONSTRAINT; Schema: public; Owner: dataverse_app; Tablespace: +-- + +ALTER TABLE ONLY ingestrequest + ADD CONSTRAINT ingestrequest_pkey PRIMARY KEY (id); + + +-- +-- Name: ipv4range_pkey; Type: CONSTRAINT; Schema: public; Owner: dataverse_app; Tablespace: +-- + +ALTER TABLE ONLY ipv4range + ADD CONSTRAINT ipv4range_pkey PRIMARY KEY (id); + + +-- +-- Name: ipv6range_pkey; Type: CONSTRAINT; Schema: public; Owner: dataverse_app; Tablespace: +-- + +ALTER TABLE ONLY ipv6range + ADD CONSTRAINT ipv6range_pkey PRIMARY KEY (id); + + +-- +-- Name: maplayermetadata_datafile_id_key; Type: CONSTRAINT; Schema: public; Owner: dataverse_app; Tablespace: +-- + +ALTER TABLE ONLY maplayermetadata + ADD CONSTRAINT maplayermetadata_datafile_id_key UNIQUE (datafile_id); + + +-- +-- Name: maplayermetadata_pkey; Type: CONSTRAINT; Schema: public; Owner: dataverse_app; Tablespace: +-- + +ALTER TABLE ONLY maplayermetadata + ADD CONSTRAINT maplayermetadata_pkey PRIMARY KEY (id); + + +-- +-- Name: metadatablock_pkey; Type: CONSTRAINT; Schema: public; Owner: dataverse_app; Tablespace: +-- + +ALTER TABLE ONLY metadatablock + ADD CONSTRAINT metadatablock_pkey PRIMARY KEY (id); + + +-- +-- Name: passwordresetdata_pkey; Type: CONSTRAINT; Schema: public; Owner: dataverse_app; Tablespace: +-- + +ALTER TABLE ONLY passwordresetdata + ADD CONSTRAINT passwordresetdata_pkey PRIMARY KEY (id); + + +-- +-- Name: persistedglobalgroup_persistedgroupalias_key; Type: CONSTRAINT; Schema: public; Owner: dataverse_app; Tablespace: +-- + +ALTER TABLE ONLY persistedglobalgroup + ADD CONSTRAINT persistedglobalgroup_persistedgroupalias_key UNIQUE (persistedgroupalias); + + +-- +-- Name: persistedglobalgroup_pkey; Type: CONSTRAINT; Schema: public; Owner: dataverse_app; Tablespace: +-- + +ALTER TABLE ONLY persistedglobalgroup + ADD CONSTRAINT persistedglobalgroup_pkey PRIMARY KEY (id); + + +-- +-- Name: roleassignment_pkey; Type: CONSTRAINT; Schema: public; Owner: dataverse_app; Tablespace: +-- + +ALTER TABLE ONLY roleassignment + ADD CONSTRAINT roleassignment_pkey PRIMARY KEY (id); + + +-- +-- Name: savedsearch_pkey; Type: CONSTRAINT; Schema: public; Owner: dataverse_app; Tablespace: +-- + +ALTER TABLE ONLY savedsearch + ADD CONSTRAINT savedsearch_pkey PRIMARY KEY (id); + + +-- +-- Name: savedsearchfilterquery_pkey; Type: CONSTRAINT; Schema: public; Owner: dataverse_app; Tablespace: +-- + +ALTER TABLE ONLY savedsearchfilterquery + ADD CONSTRAINT savedsearchfilterquery_pkey PRIMARY KEY (id); + + +-- +-- Name: sequence_pkey; Type: CONSTRAINT; Schema: public; Owner: dataverse_app; Tablespace: +-- + +ALTER TABLE ONLY sequence + ADD CONSTRAINT sequence_pkey PRIMARY KEY (seq_name); + + +-- +-- Name: setting_pkey; Type: CONSTRAINT; Schema: public; Owner: dataverse_app; Tablespace: +-- + +ALTER TABLE ONLY setting + ADD CONSTRAINT setting_pkey PRIMARY KEY (name); + + +-- +-- Name: shibgroup_pkey; Type: CONSTRAINT; Schema: public; Owner: dataverse_app; Tablespace: +-- + +ALTER TABLE ONLY shibgroup + ADD CONSTRAINT shibgroup_pkey PRIMARY KEY (id); + + +-- +-- Name: summarystatistic_pkey; Type: CONSTRAINT; Schema: public; Owner: dataverse_app; Tablespace: +-- + +ALTER TABLE ONLY summarystatistic + ADD CONSTRAINT summarystatistic_pkey PRIMARY KEY (id); + + +-- +-- Name: template_pkey; Type: CONSTRAINT; Schema: public; Owner: dataverse_app; Tablespace: +-- + +ALTER TABLE ONLY template + ADD CONSTRAINT template_pkey PRIMARY KEY (id); + + +-- +-- Name: unq_authenticateduserlookup_0; Type: CONSTRAINT; Schema: public; Owner: dataverse_app; Tablespace: +-- + +ALTER TABLE ONLY authenticateduserlookup + ADD CONSTRAINT unq_authenticateduserlookup_0 UNIQUE (persistentuserid, authenticationproviderid); + + +-- +-- Name: unq_dataset_0; Type: CONSTRAINT; Schema: public; Owner: dataverse_app; Tablespace: +-- + +ALTER TABLE ONLY dataset + ADD CONSTRAINT unq_dataset_0 UNIQUE (authority, protocol, identifier, doiseparator); + + +-- +-- Name: unq_dataversefieldtypeinputlevel_0; Type: CONSTRAINT; Schema: public; Owner: dataverse_app; Tablespace: +-- + +ALTER TABLE ONLY dataversefieldtypeinputlevel + ADD CONSTRAINT unq_dataversefieldtypeinputlevel_0 UNIQUE (dataverse_id, datasetfieldtype_id); + + +-- +-- Name: unq_foreignmetadatafieldmapping_0; Type: CONSTRAINT; Schema: public; Owner: dataverse_app; Tablespace: +-- + +ALTER TABLE ONLY foreignmetadatafieldmapping + ADD CONSTRAINT unq_foreignmetadatafieldmapping_0 UNIQUE (foreignmetadataformatmapping_id, foreignfieldxpath); + + +-- +-- Name: unq_roleassignment_0; Type: CONSTRAINT; Schema: public; Owner: dataverse_app; Tablespace: +-- + +ALTER TABLE ONLY roleassignment + ADD CONSTRAINT unq_roleassignment_0 UNIQUE (assigneeidentifier, role_id, definitionpoint_id); + + +-- +-- Name: usernotification_pkey; Type: CONSTRAINT; Schema: public; Owner: dataverse_app; Tablespace: +-- + +ALTER TABLE ONLY usernotification + ADD CONSTRAINT usernotification_pkey PRIMARY KEY (id); + + +-- +-- Name: variablecategory_pkey; Type: CONSTRAINT; Schema: public; Owner: dataverse_app; Tablespace: +-- + +ALTER TABLE ONLY variablecategory + ADD CONSTRAINT variablecategory_pkey PRIMARY KEY (id); + + +-- +-- Name: variablerange_pkey; Type: CONSTRAINT; Schema: public; Owner: dataverse_app; Tablespace: +-- + +ALTER TABLE ONLY variablerange + ADD CONSTRAINT variablerange_pkey PRIMARY KEY (id); + + +-- +-- Name: variablerangeitem_pkey; Type: CONSTRAINT; Schema: public; Owner: dataverse_app; Tablespace: +-- + +ALTER TABLE ONLY variablerangeitem + ADD CONSTRAINT variablerangeitem_pkey PRIMARY KEY (id); + + +-- +-- Name: worldmapauth_token_pkey; Type: CONSTRAINT; Schema: public; Owner: dataverse_app; Tablespace: +-- + +ALTER TABLE ONLY worldmapauth_token + ADD CONSTRAINT worldmapauth_token_pkey PRIMARY KEY (id); + + +-- +-- Name: worldmapauth_tokentype_pkey; Type: CONSTRAINT; Schema: public; Owner: dataverse_app; Tablespace: +-- + +ALTER TABLE ONLY worldmapauth_tokentype + ADD CONSTRAINT worldmapauth_tokentype_pkey PRIMARY KEY (id); + + +-- +-- Name: application_name; Type: INDEX; Schema: public; Owner: dataverse_app; Tablespace: +-- + +CREATE UNIQUE INDEX application_name ON worldmapauth_tokentype USING btree (name); + + +-- +-- Name: index_actionlogrecord_actiontype; Type: INDEX; Schema: public; Owner: dataverse_app; Tablespace: +-- + +CREATE INDEX index_actionlogrecord_actiontype ON actionlogrecord USING btree (actiontype); + + +-- +-- Name: index_actionlogrecord_starttime; Type: INDEX; Schema: public; Owner: dataverse_app; Tablespace: +-- + +CREATE INDEX index_actionlogrecord_starttime ON actionlogrecord USING btree (starttime); + + +-- +-- Name: index_actionlogrecord_useridentifier; Type: INDEX; Schema: public; Owner: dataverse_app; Tablespace: +-- + +CREATE INDEX index_actionlogrecord_useridentifier ON actionlogrecord USING btree (useridentifier); + + +-- +-- Name: index_apitoken_authenticateduser_id; Type: INDEX; Schema: public; Owner: dataverse_app; Tablespace: +-- + +CREATE INDEX index_apitoken_authenticateduser_id ON apitoken USING btree (authenticateduser_id); + + +-- +-- Name: index_authenticationproviderrow_enabled; Type: INDEX; Schema: public; Owner: dataverse_app; Tablespace: +-- + +CREATE INDEX index_authenticationproviderrow_enabled ON authenticationproviderrow USING btree (enabled); + + +-- +-- Name: index_builtinuser_lastname; Type: INDEX; Schema: public; Owner: dataverse_app; Tablespace: +-- + +CREATE INDEX index_builtinuser_lastname ON builtinuser USING btree (lastname); + + +-- +-- Name: index_controlledvocabalternate_controlledvocabularyvalue_id; Type: INDEX; Schema: public; Owner: dataverse_app; Tablespace: +-- + +CREATE INDEX index_controlledvocabalternate_controlledvocabularyvalue_id ON controlledvocabalternate USING btree (controlledvocabularyvalue_id); + + +-- +-- Name: index_controlledvocabalternate_datasetfieldtype_id; Type: INDEX; Schema: public; Owner: dataverse_app; Tablespace: +-- + +CREATE INDEX index_controlledvocabalternate_datasetfieldtype_id ON controlledvocabalternate USING btree (datasetfieldtype_id); + + +-- +-- Name: index_controlledvocabularyvalue_datasetfieldtype_id; Type: INDEX; Schema: public; Owner: dataverse_app; Tablespace: +-- + +CREATE INDEX index_controlledvocabularyvalue_datasetfieldtype_id ON controlledvocabularyvalue USING btree (datasetfieldtype_id); + + +-- +-- Name: index_controlledvocabularyvalue_displayorder; Type: INDEX; Schema: public; Owner: dataverse_app; Tablespace: +-- + +CREATE INDEX index_controlledvocabularyvalue_displayorder ON controlledvocabularyvalue USING btree (displayorder); + + +-- +-- Name: index_customfieldmap_sourcedatasetfield; Type: INDEX; Schema: public; Owner: dataverse_app; Tablespace: +-- + +CREATE INDEX index_customfieldmap_sourcedatasetfield ON customfieldmap USING btree (sourcedatasetfield); + + +-- +-- Name: index_customfieldmap_sourcetemplate; Type: INDEX; Schema: public; Owner: dataverse_app; Tablespace: +-- + +CREATE INDEX index_customfieldmap_sourcetemplate ON customfieldmap USING btree (sourcetemplate); + + +-- +-- Name: index_customquestion_guestbook_id; Type: INDEX; Schema: public; Owner: dataverse_app; Tablespace: +-- + +CREATE INDEX index_customquestion_guestbook_id ON customquestion USING btree (guestbook_id); + + +-- +-- Name: index_customquestionresponse_guestbookresponse_id; Type: INDEX; Schema: public; Owner: dataverse_app; Tablespace: +-- + +CREATE INDEX index_customquestionresponse_guestbookresponse_id ON customquestionresponse USING btree (guestbookresponse_id); + + +-- +-- Name: index_datafile_contenttype; Type: INDEX; Schema: public; Owner: dataverse_app; Tablespace: +-- + +CREATE INDEX index_datafile_contenttype ON datafile USING btree (contenttype); + + +-- +-- Name: index_datafile_ingeststatus; Type: INDEX; Schema: public; Owner: dataverse_app; Tablespace: +-- + +CREATE INDEX index_datafile_ingeststatus ON datafile USING btree (ingeststatus); + + +-- +-- Name: index_datafile_md5; Type: INDEX; Schema: public; Owner: dataverse_app; Tablespace: +-- + +CREATE INDEX index_datafile_md5 ON datafile USING btree (md5); + + +-- +-- Name: index_datafile_restricted; Type: INDEX; Schema: public; Owner: dataverse_app; Tablespace: +-- + +CREATE INDEX index_datafile_restricted ON datafile USING btree (restricted); + + +-- +-- Name: index_datafilecategory_dataset_id; Type: INDEX; Schema: public; Owner: dataverse_app; Tablespace: +-- + +CREATE INDEX index_datafilecategory_dataset_id ON datafilecategory USING btree (dataset_id); + + +-- +-- Name: index_datafiletag_datafile_id; Type: INDEX; Schema: public; Owner: dataverse_app; Tablespace: +-- + +CREATE INDEX index_datafiletag_datafile_id ON datafiletag USING btree (datafile_id); + + +-- +-- Name: index_dataset_guestbook_id; Type: INDEX; Schema: public; Owner: dataverse_app; Tablespace: +-- + +CREATE INDEX index_dataset_guestbook_id ON dataset USING btree (guestbook_id); + + +-- +-- Name: index_dataset_thumbnailfile_id; Type: INDEX; Schema: public; Owner: dataverse_app; Tablespace: +-- + +CREATE INDEX index_dataset_thumbnailfile_id ON dataset USING btree (thumbnailfile_id); + + +-- +-- Name: index_datasetfield_controlledvocabularyvalue_controlledvocabula; Type: INDEX; Schema: public; Owner: dataverse_app; Tablespace: +-- + +CREATE INDEX index_datasetfield_controlledvocabularyvalue_controlledvocabula ON datasetfield_controlledvocabularyvalue USING btree (controlledvocabularyvalues_id); + + +-- +-- Name: index_datasetfield_controlledvocabularyvalue_datasetfield_id; Type: INDEX; Schema: public; Owner: dataverse_app; Tablespace: +-- + +CREATE INDEX index_datasetfield_controlledvocabularyvalue_datasetfield_id ON datasetfield_controlledvocabularyvalue USING btree (datasetfield_id); + + +-- +-- Name: index_datasetfield_datasetfieldtype_id; Type: INDEX; Schema: public; Owner: dataverse_app; Tablespace: +-- + +CREATE INDEX index_datasetfield_datasetfieldtype_id ON datasetfield USING btree (datasetfieldtype_id); + + +-- +-- Name: index_datasetfield_datasetversion_id; Type: INDEX; Schema: public; Owner: dataverse_app; Tablespace: +-- + +CREATE INDEX index_datasetfield_datasetversion_id ON datasetfield USING btree (datasetversion_id); + + +-- +-- Name: index_datasetfield_parentdatasetfieldcompoundvalue_id; Type: INDEX; Schema: public; Owner: dataverse_app; Tablespace: +-- + +CREATE INDEX index_datasetfield_parentdatasetfieldcompoundvalue_id ON datasetfield USING btree (parentdatasetfieldcompoundvalue_id); + + +-- +-- Name: index_datasetfield_template_id; Type: INDEX; Schema: public; Owner: dataverse_app; Tablespace: +-- + +CREATE INDEX index_datasetfield_template_id ON datasetfield USING btree (template_id); + + +-- +-- Name: index_datasetfieldcompoundvalue_parentdatasetfield_id; Type: INDEX; Schema: public; Owner: dataverse_app; Tablespace: +-- + +CREATE INDEX index_datasetfieldcompoundvalue_parentdatasetfield_id ON datasetfieldcompoundvalue USING btree (parentdatasetfield_id); + + +-- +-- Name: index_datasetfielddefaultvalue_datasetfield_id; Type: INDEX; Schema: public; Owner: dataverse_app; Tablespace: +-- + +CREATE INDEX index_datasetfielddefaultvalue_datasetfield_id ON datasetfielddefaultvalue USING btree (datasetfield_id); + + +-- +-- Name: index_datasetfielddefaultvalue_defaultvalueset_id; Type: INDEX; Schema: public; Owner: dataverse_app; Tablespace: +-- + +CREATE INDEX index_datasetfielddefaultvalue_defaultvalueset_id ON datasetfielddefaultvalue USING btree (defaultvalueset_id); + + +-- +-- Name: index_datasetfielddefaultvalue_displayorder; Type: INDEX; Schema: public; Owner: dataverse_app; Tablespace: +-- + +CREATE INDEX index_datasetfielddefaultvalue_displayorder ON datasetfielddefaultvalue USING btree (displayorder); + + +-- +-- Name: index_datasetfielddefaultvalue_parentdatasetfielddefaultvalue_i; Type: INDEX; Schema: public; Owner: dataverse_app; Tablespace: +-- + +CREATE INDEX index_datasetfielddefaultvalue_parentdatasetfielddefaultvalue_i ON datasetfielddefaultvalue USING btree (parentdatasetfielddefaultvalue_id); + + +-- +-- Name: index_datasetfieldtype_metadatablock_id; Type: INDEX; Schema: public; Owner: dataverse_app; Tablespace: +-- + +CREATE INDEX index_datasetfieldtype_metadatablock_id ON datasetfieldtype USING btree (metadatablock_id); + + +-- +-- Name: index_datasetfieldtype_parentdatasetfieldtype_id; Type: INDEX; Schema: public; Owner: dataverse_app; Tablespace: +-- + +CREATE INDEX index_datasetfieldtype_parentdatasetfieldtype_id ON datasetfieldtype USING btree (parentdatasetfieldtype_id); + + +-- +-- Name: index_datasetfieldvalue_datasetfield_id; Type: INDEX; Schema: public; Owner: dataverse_app; Tablespace: +-- + +CREATE INDEX index_datasetfieldvalue_datasetfield_id ON datasetfieldvalue USING btree (datasetfield_id); + + +-- +-- Name: index_datasetlinkingdataverse_dataset_id; Type: INDEX; Schema: public; Owner: dataverse_app; Tablespace: +-- + +CREATE INDEX index_datasetlinkingdataverse_dataset_id ON datasetlinkingdataverse USING btree (dataset_id); + + +-- +-- Name: index_datasetlinkingdataverse_linkingdataverse_id; Type: INDEX; Schema: public; Owner: dataverse_app; Tablespace: +-- + +CREATE INDEX index_datasetlinkingdataverse_linkingdataverse_id ON datasetlinkingdataverse USING btree (linkingdataverse_id); + + +-- +-- Name: index_datasetlock_dataset_id; Type: INDEX; Schema: public; Owner: dataverse_app; Tablespace: +-- + +CREATE INDEX index_datasetlock_dataset_id ON datasetlock USING btree (dataset_id); + + +-- +-- Name: index_datasetlock_user_id; Type: INDEX; Schema: public; Owner: dataverse_app; Tablespace: +-- + +CREATE INDEX index_datasetlock_user_id ON datasetlock USING btree (user_id); + + +-- +-- Name: index_datasetversion_dataset_id; Type: INDEX; Schema: public; Owner: dataverse_app; Tablespace: +-- + +CREATE INDEX index_datasetversion_dataset_id ON datasetversion USING btree (dataset_id); + + +-- +-- Name: index_datasetversionuser_authenticateduser_id; Type: INDEX; Schema: public; Owner: dataverse_app; Tablespace: +-- + +CREATE INDEX index_datasetversionuser_authenticateduser_id ON datasetversionuser USING btree (authenticateduser_id); + + +-- +-- Name: index_datasetversionuser_datasetversion_id; Type: INDEX; Schema: public; Owner: dataverse_app; Tablespace: +-- + +CREATE INDEX index_datasetversionuser_datasetversion_id ON datasetversionuser USING btree (datasetversion_id); + + +-- +-- Name: index_datatable_datafile_id; Type: INDEX; Schema: public; Owner: dataverse_app; Tablespace: +-- + +CREATE INDEX index_datatable_datafile_id ON datatable USING btree (datafile_id); + + +-- +-- Name: index_datavariable_datatable_id; Type: INDEX; Schema: public; Owner: dataverse_app; Tablespace: +-- + +CREATE INDEX index_datavariable_datatable_id ON datavariable USING btree (datatable_id); + + +-- +-- Name: index_dataverse_affiliation; Type: INDEX; Schema: public; Owner: dataverse_app; Tablespace: +-- + +CREATE INDEX index_dataverse_affiliation ON dataverse USING btree (affiliation); + + +-- +-- Name: index_dataverse_alias; Type: INDEX; Schema: public; Owner: dataverse_app; Tablespace: +-- + +CREATE INDEX index_dataverse_alias ON dataverse USING btree (alias); + + +-- +-- Name: index_dataverse_dataversetype; Type: INDEX; Schema: public; Owner: dataverse_app; Tablespace: +-- + +CREATE INDEX index_dataverse_dataversetype ON dataverse USING btree (dataversetype); + + +-- +-- Name: index_dataverse_defaultcontributorrole_id; Type: INDEX; Schema: public; Owner: dataverse_app; Tablespace: +-- + +CREATE INDEX index_dataverse_defaultcontributorrole_id ON dataverse USING btree (defaultcontributorrole_id); + + +-- +-- Name: index_dataverse_defaulttemplate_id; Type: INDEX; Schema: public; Owner: dataverse_app; Tablespace: +-- + +CREATE INDEX index_dataverse_defaulttemplate_id ON dataverse USING btree (defaulttemplate_id); + + +-- +-- Name: index_dataverse_facetroot; Type: INDEX; Schema: public; Owner: dataverse_app; Tablespace: +-- + +CREATE INDEX index_dataverse_facetroot ON dataverse USING btree (facetroot); + + +-- +-- Name: index_dataverse_guestbookroot; Type: INDEX; Schema: public; Owner: dataverse_app; Tablespace: +-- + +CREATE INDEX index_dataverse_guestbookroot ON dataverse USING btree (guestbookroot); + + +-- +-- Name: index_dataverse_metadatablockroot; Type: INDEX; Schema: public; Owner: dataverse_app; Tablespace: +-- + +CREATE INDEX index_dataverse_metadatablockroot ON dataverse USING btree (metadatablockroot); + + +-- +-- Name: index_dataverse_permissionroot; Type: INDEX; Schema: public; Owner: dataverse_app; Tablespace: +-- + +CREATE INDEX index_dataverse_permissionroot ON dataverse USING btree (permissionroot); + + +-- +-- Name: index_dataverse_templateroot; Type: INDEX; Schema: public; Owner: dataverse_app; Tablespace: +-- + +CREATE INDEX index_dataverse_templateroot ON dataverse USING btree (templateroot); + + +-- +-- Name: index_dataverse_themeroot; Type: INDEX; Schema: public; Owner: dataverse_app; Tablespace: +-- + +CREATE INDEX index_dataverse_themeroot ON dataverse USING btree (themeroot); + + +-- +-- Name: index_dataversecontact_contactemail; Type: INDEX; Schema: public; Owner: dataverse_app; Tablespace: +-- + +CREATE INDEX index_dataversecontact_contactemail ON dataversecontact USING btree (contactemail); + + +-- +-- Name: index_dataversecontact_dataverse_id; Type: INDEX; Schema: public; Owner: dataverse_app; Tablespace: +-- + +CREATE INDEX index_dataversecontact_dataverse_id ON dataversecontact USING btree (dataverse_id); + + +-- +-- Name: index_dataversecontact_displayorder; Type: INDEX; Schema: public; Owner: dataverse_app; Tablespace: +-- + +CREATE INDEX index_dataversecontact_displayorder ON dataversecontact USING btree (displayorder); + + +-- +-- Name: index_dataversefacet_datasetfieldtype_id; Type: INDEX; Schema: public; Owner: dataverse_app; Tablespace: +-- + +CREATE INDEX index_dataversefacet_datasetfieldtype_id ON dataversefacet USING btree (datasetfieldtype_id); + + +-- +-- Name: index_dataversefacet_dataverse_id; Type: INDEX; Schema: public; Owner: dataverse_app; Tablespace: +-- + +CREATE INDEX index_dataversefacet_dataverse_id ON dataversefacet USING btree (dataverse_id); + + +-- +-- Name: index_dataversefacet_displayorder; Type: INDEX; Schema: public; Owner: dataverse_app; Tablespace: +-- + +CREATE INDEX index_dataversefacet_displayorder ON dataversefacet USING btree (displayorder); + + +-- +-- Name: index_dataversefeatureddataverse_dataverse_id; Type: INDEX; Schema: public; Owner: dataverse_app; Tablespace: +-- + +CREATE INDEX index_dataversefeatureddataverse_dataverse_id ON dataversefeatureddataverse USING btree (dataverse_id); + + +-- +-- Name: index_dataversefeatureddataverse_displayorder; Type: INDEX; Schema: public; Owner: dataverse_app; Tablespace: +-- + +CREATE INDEX index_dataversefeatureddataverse_displayorder ON dataversefeatureddataverse USING btree (displayorder); + + +-- +-- Name: index_dataversefeatureddataverse_featureddataverse_id; Type: INDEX; Schema: public; Owner: dataverse_app; Tablespace: +-- + +CREATE INDEX index_dataversefeatureddataverse_featureddataverse_id ON dataversefeatureddataverse USING btree (featureddataverse_id); + + +-- +-- Name: index_dataversefieldtypeinputlevel_datasetfieldtype_id; Type: INDEX; Schema: public; Owner: dataverse_app; Tablespace: +-- + +CREATE INDEX index_dataversefieldtypeinputlevel_datasetfieldtype_id ON dataversefieldtypeinputlevel USING btree (datasetfieldtype_id); + + +-- +-- Name: index_dataversefieldtypeinputlevel_dataverse_id; Type: INDEX; Schema: public; Owner: dataverse_app; Tablespace: +-- + +CREATE INDEX index_dataversefieldtypeinputlevel_dataverse_id ON dataversefieldtypeinputlevel USING btree (dataverse_id); + + +-- +-- Name: index_dataversefieldtypeinputlevel_required; Type: INDEX; Schema: public; Owner: dataverse_app; Tablespace: +-- + +CREATE INDEX index_dataversefieldtypeinputlevel_required ON dataversefieldtypeinputlevel USING btree (required); + + +-- +-- Name: index_dataverselinkingdataverse_dataverse_id; Type: INDEX; Schema: public; Owner: dataverse_app; Tablespace: +-- + +CREATE INDEX index_dataverselinkingdataverse_dataverse_id ON dataverselinkingdataverse USING btree (dataverse_id); + + +-- +-- Name: index_dataverselinkingdataverse_linkingdataverse_id; Type: INDEX; Schema: public; Owner: dataverse_app; Tablespace: +-- + +CREATE INDEX index_dataverselinkingdataverse_linkingdataverse_id ON dataverselinkingdataverse USING btree (linkingdataverse_id); + + +-- +-- Name: index_dataverserole_alias; Type: INDEX; Schema: public; Owner: dataverse_app; Tablespace: +-- + +CREATE INDEX index_dataverserole_alias ON dataverserole USING btree (alias); + + +-- +-- Name: index_dataverserole_name; Type: INDEX; Schema: public; Owner: dataverse_app; Tablespace: +-- + +CREATE INDEX index_dataverserole_name ON dataverserole USING btree (name); + + +-- +-- Name: index_dataverserole_owner_id; Type: INDEX; Schema: public; Owner: dataverse_app; Tablespace: +-- + +CREATE INDEX index_dataverserole_owner_id ON dataverserole USING btree (owner_id); + + +-- +-- Name: index_dataversetheme_dataverse_id; Type: INDEX; Schema: public; Owner: dataverse_app; Tablespace: +-- + +CREATE INDEX index_dataversetheme_dataverse_id ON dataversetheme USING btree (dataverse_id); + + +-- +-- Name: index_dvobject_creator_id; Type: INDEX; Schema: public; Owner: dataverse_app; Tablespace: +-- + +CREATE INDEX index_dvobject_creator_id ON dvobject USING btree (creator_id); + + +-- +-- Name: index_dvobject_dtype; Type: INDEX; Schema: public; Owner: dataverse_app; Tablespace: +-- + +CREATE INDEX index_dvobject_dtype ON dvobject USING btree (dtype); + + +-- +-- Name: index_dvobject_owner_id; Type: INDEX; Schema: public; Owner: dataverse_app; Tablespace: +-- + +CREATE INDEX index_dvobject_owner_id ON dvobject USING btree (owner_id); + + +-- +-- Name: index_dvobject_releaseuser_id; Type: INDEX; Schema: public; Owner: dataverse_app; Tablespace: +-- + +CREATE INDEX index_dvobject_releaseuser_id ON dvobject USING btree (releaseuser_id); + + +-- +-- Name: index_explicitgroup_groupaliasinowner; Type: INDEX; Schema: public; Owner: dataverse_app; Tablespace: +-- + +CREATE INDEX index_explicitgroup_groupaliasinowner ON explicitgroup USING btree (groupaliasinowner); + + +-- +-- Name: index_explicitgroup_owner_id; Type: INDEX; Schema: public; Owner: dataverse_app; Tablespace: +-- + +CREATE INDEX index_explicitgroup_owner_id ON explicitgroup USING btree (owner_id); + + +-- +-- Name: index_filemetadata_datafile_id; Type: INDEX; Schema: public; Owner: dataverse_app; Tablespace: +-- + +CREATE INDEX index_filemetadata_datafile_id ON filemetadata USING btree (datafile_id); + + +-- +-- Name: index_filemetadata_datafilecategory_filecategories_id; Type: INDEX; Schema: public; Owner: dataverse_app; Tablespace: +-- + +CREATE INDEX index_filemetadata_datafilecategory_filecategories_id ON filemetadata_datafilecategory USING btree (filecategories_id); + + +-- +-- Name: index_filemetadata_datafilecategory_filemetadatas_id; Type: INDEX; Schema: public; Owner: dataverse_app; Tablespace: +-- + +CREATE INDEX index_filemetadata_datafilecategory_filemetadatas_id ON filemetadata_datafilecategory USING btree (filemetadatas_id); + + +-- +-- Name: index_filemetadata_datasetversion_id; Type: INDEX; Schema: public; Owner: dataverse_app; Tablespace: +-- + +CREATE INDEX index_filemetadata_datasetversion_id ON filemetadata USING btree (datasetversion_id); + + +-- +-- Name: index_foreignmetadatafieldmapping_foreignfieldxpath; Type: INDEX; Schema: public; Owner: dataverse_app; Tablespace: +-- + +CREATE INDEX index_foreignmetadatafieldmapping_foreignfieldxpath ON foreignmetadatafieldmapping USING btree (foreignfieldxpath); + + +-- +-- Name: index_foreignmetadatafieldmapping_foreignmetadataformatmapping_; Type: INDEX; Schema: public; Owner: dataverse_app; Tablespace: +-- + +CREATE INDEX index_foreignmetadatafieldmapping_foreignmetadataformatmapping_ ON foreignmetadatafieldmapping USING btree (foreignmetadataformatmapping_id); + + +-- +-- Name: index_foreignmetadatafieldmapping_parentfieldmapping_id; Type: INDEX; Schema: public; Owner: dataverse_app; Tablespace: +-- + +CREATE INDEX index_foreignmetadatafieldmapping_parentfieldmapping_id ON foreignmetadatafieldmapping USING btree (parentfieldmapping_id); + + +-- +-- Name: index_foreignmetadataformatmapping_name; Type: INDEX; Schema: public; Owner: dataverse_app; Tablespace: +-- + +CREATE INDEX index_foreignmetadataformatmapping_name ON foreignmetadataformatmapping USING btree (name); + + +-- +-- Name: index_guestbookresponse_datafile_id; Type: INDEX; Schema: public; Owner: dataverse_app; Tablespace: +-- + +CREATE INDEX index_guestbookresponse_datafile_id ON guestbookresponse USING btree (datafile_id); + + +-- +-- Name: index_guestbookresponse_dataset_id; Type: INDEX; Schema: public; Owner: dataverse_app; Tablespace: +-- + +CREATE INDEX index_guestbookresponse_dataset_id ON guestbookresponse USING btree (dataset_id); + + +-- +-- Name: index_guestbookresponse_guestbook_id; Type: INDEX; Schema: public; Owner: dataverse_app; Tablespace: +-- + +CREATE INDEX index_guestbookresponse_guestbook_id ON guestbookresponse USING btree (guestbook_id); + + +-- +-- Name: index_harvestingdataverseconfig_dataverse_id; Type: INDEX; Schema: public; Owner: dataverse_app; Tablespace: +-- + +CREATE INDEX index_harvestingdataverseconfig_dataverse_id ON harvestingdataverseconfig USING btree (dataverse_id); + + +-- +-- Name: index_harvestingdataverseconfig_harvestingurl; Type: INDEX; Schema: public; Owner: dataverse_app; Tablespace: +-- + +CREATE INDEX index_harvestingdataverseconfig_harvestingurl ON harvestingdataverseconfig USING btree (harvestingurl); + + +-- +-- Name: index_harvestingdataverseconfig_harveststyle; Type: INDEX; Schema: public; Owner: dataverse_app; Tablespace: +-- + +CREATE INDEX index_harvestingdataverseconfig_harveststyle ON harvestingdataverseconfig USING btree (harveststyle); + + +-- +-- Name: index_harvestingdataverseconfig_harvesttype; Type: INDEX; Schema: public; Owner: dataverse_app; Tablespace: +-- + +CREATE INDEX index_harvestingdataverseconfig_harvesttype ON harvestingdataverseconfig USING btree (harvesttype); + + +-- +-- Name: index_ingestreport_datafile_id; Type: INDEX; Schema: public; Owner: dataverse_app; Tablespace: +-- + +CREATE INDEX index_ingestreport_datafile_id ON ingestreport USING btree (datafile_id); + + +-- +-- Name: index_ingestrequest_datafile_id; Type: INDEX; Schema: public; Owner: dataverse_app; Tablespace: +-- + +CREATE INDEX index_ingestrequest_datafile_id ON ingestrequest USING btree (datafile_id); + + +-- +-- Name: index_ipv4range_owner_id; Type: INDEX; Schema: public; Owner: dataverse_app; Tablespace: +-- + +CREATE INDEX index_ipv4range_owner_id ON ipv4range USING btree (owner_id); + + +-- +-- Name: index_ipv6range_owner_id; Type: INDEX; Schema: public; Owner: dataverse_app; Tablespace: +-- + +CREATE INDEX index_ipv6range_owner_id ON ipv6range USING btree (owner_id); + + +-- +-- Name: index_maplayermetadata_dataset_id; Type: INDEX; Schema: public; Owner: dataverse_app; Tablespace: +-- + +CREATE INDEX index_maplayermetadata_dataset_id ON maplayermetadata USING btree (dataset_id); + + +-- +-- Name: index_metadatablock_name; Type: INDEX; Schema: public; Owner: dataverse_app; Tablespace: +-- + +CREATE INDEX index_metadatablock_name ON metadatablock USING btree (name); + + +-- +-- Name: index_metadatablock_owner_id; Type: INDEX; Schema: public; Owner: dataverse_app; Tablespace: +-- + +CREATE INDEX index_metadatablock_owner_id ON metadatablock USING btree (owner_id); + + +-- +-- Name: index_passwordresetdata_builtinuser_id; Type: INDEX; Schema: public; Owner: dataverse_app; Tablespace: +-- + +CREATE INDEX index_passwordresetdata_builtinuser_id ON passwordresetdata USING btree (builtinuser_id); + + +-- +-- Name: index_passwordresetdata_token; Type: INDEX; Schema: public; Owner: dataverse_app; Tablespace: +-- + +CREATE INDEX index_passwordresetdata_token ON passwordresetdata USING btree (token); + + +-- +-- Name: index_persistedglobalgroup_dtype; Type: INDEX; Schema: public; Owner: dataverse_app; Tablespace: +-- + +CREATE INDEX index_persistedglobalgroup_dtype ON persistedglobalgroup USING btree (dtype); + + +-- +-- Name: index_roleassignment_assigneeidentifier; Type: INDEX; Schema: public; Owner: dataverse_app; Tablespace: +-- + +CREATE INDEX index_roleassignment_assigneeidentifier ON roleassignment USING btree (assigneeidentifier); + + +-- +-- Name: index_roleassignment_definitionpoint_id; Type: INDEX; Schema: public; Owner: dataverse_app; Tablespace: +-- + +CREATE INDEX index_roleassignment_definitionpoint_id ON roleassignment USING btree (definitionpoint_id); + + +-- +-- Name: index_roleassignment_role_id; Type: INDEX; Schema: public; Owner: dataverse_app; Tablespace: +-- + +CREATE INDEX index_roleassignment_role_id ON roleassignment USING btree (role_id); + + +-- +-- Name: index_savedsearch_creator_id; Type: INDEX; Schema: public; Owner: dataverse_app; Tablespace: +-- + +CREATE INDEX index_savedsearch_creator_id ON savedsearch USING btree (creator_id); + + +-- +-- Name: index_savedsearch_definitionpoint_id; Type: INDEX; Schema: public; Owner: dataverse_app; Tablespace: +-- + +CREATE INDEX index_savedsearch_definitionpoint_id ON savedsearch USING btree (definitionpoint_id); + + +-- +-- Name: index_savedsearchfilterquery_savedsearch_id; Type: INDEX; Schema: public; Owner: dataverse_app; Tablespace: +-- + +CREATE INDEX index_savedsearchfilterquery_savedsearch_id ON savedsearchfilterquery USING btree (savedsearch_id); + + +-- +-- Name: index_summarystatistic_datavariable_id; Type: INDEX; Schema: public; Owner: dataverse_app; Tablespace: +-- + +CREATE INDEX index_summarystatistic_datavariable_id ON summarystatistic USING btree (datavariable_id); + + +-- +-- Name: index_template_dataverse_id; Type: INDEX; Schema: public; Owner: dataverse_app; Tablespace: +-- + +CREATE INDEX index_template_dataverse_id ON template USING btree (dataverse_id); + + +-- +-- Name: index_usernotification_user_id; Type: INDEX; Schema: public; Owner: dataverse_app; Tablespace: +-- + +CREATE INDEX index_usernotification_user_id ON usernotification USING btree (user_id); + + +-- +-- Name: index_variablecategory_datavariable_id; Type: INDEX; Schema: public; Owner: dataverse_app; Tablespace: +-- + +CREATE INDEX index_variablecategory_datavariable_id ON variablecategory USING btree (datavariable_id); + + +-- +-- Name: index_variablerange_datavariable_id; Type: INDEX; Schema: public; Owner: dataverse_app; Tablespace: +-- + +CREATE INDEX index_variablerange_datavariable_id ON variablerange USING btree (datavariable_id); + + +-- +-- Name: index_variablerangeitem_datavariable_id; Type: INDEX; Schema: public; Owner: dataverse_app; Tablespace: +-- + +CREATE INDEX index_variablerangeitem_datavariable_id ON variablerangeitem USING btree (datavariable_id); + + +-- +-- Name: index_worldmapauth_token_application_id; Type: INDEX; Schema: public; Owner: dataverse_app; Tablespace: +-- + +CREATE INDEX index_worldmapauth_token_application_id ON worldmapauth_token USING btree (application_id); + + +-- +-- Name: index_worldmapauth_token_datafile_id; Type: INDEX; Schema: public; Owner: dataverse_app; Tablespace: +-- + +CREATE INDEX index_worldmapauth_token_datafile_id ON worldmapauth_token USING btree (datafile_id); + + +-- +-- Name: index_worldmapauth_token_dataverseuser_id; Type: INDEX; Schema: public; Owner: dataverse_app; Tablespace: +-- + +CREATE INDEX index_worldmapauth_token_dataverseuser_id ON worldmapauth_token USING btree (dataverseuser_id); + + +-- +-- Name: token_value; Type: INDEX; Schema: public; Owner: dataverse_app; Tablespace: +-- + +CREATE UNIQUE INDEX token_value ON worldmapauth_token USING btree (token); + + +-- +-- Name: dtasetfieldcontrolledvocabularyvaluecntrolledvocabularyvaluesid; Type: FK CONSTRAINT; Schema: public; Owner: dataverse_app +-- + +ALTER TABLE ONLY datasetfield_controlledvocabularyvalue + ADD CONSTRAINT dtasetfieldcontrolledvocabularyvaluecntrolledvocabularyvaluesid FOREIGN KEY (controlledvocabularyvalues_id) REFERENCES controlledvocabularyvalue(id); + + +-- +-- Name: explicitgroup_authenticateduser_containedauthenticatedusers_id; Type: FK CONSTRAINT; Schema: public; Owner: dataverse_app +-- + +ALTER TABLE ONLY explicitgroup_authenticateduser + ADD CONSTRAINT explicitgroup_authenticateduser_containedauthenticatedusers_id FOREIGN KEY (containedauthenticatedusers_id) REFERENCES authenticateduser(id); + + +-- +-- Name: fk_apitoken_authenticateduser_id; Type: FK CONSTRAINT; Schema: public; Owner: dataverse_app +-- + +ALTER TABLE ONLY apitoken + ADD CONSTRAINT fk_apitoken_authenticateduser_id FOREIGN KEY (authenticateduser_id) REFERENCES authenticateduser(id); + + +-- +-- Name: fk_authenticateduserlookup_authenticateduser_id; Type: FK CONSTRAINT; Schema: public; Owner: dataverse_app +-- + +ALTER TABLE ONLY authenticateduserlookup + ADD CONSTRAINT fk_authenticateduserlookup_authenticateduser_id FOREIGN KEY (authenticateduser_id) REFERENCES authenticateduser(id); + + +-- +-- Name: fk_controlledvocabalternate_controlledvocabularyvalue_id; Type: FK CONSTRAINT; Schema: public; Owner: dataverse_app +-- + +ALTER TABLE ONLY controlledvocabalternate + ADD CONSTRAINT fk_controlledvocabalternate_controlledvocabularyvalue_id FOREIGN KEY (controlledvocabularyvalue_id) REFERENCES controlledvocabularyvalue(id); + + +-- +-- Name: fk_controlledvocabalternate_datasetfieldtype_id; Type: FK CONSTRAINT; Schema: public; Owner: dataverse_app +-- + +ALTER TABLE ONLY controlledvocabalternate + ADD CONSTRAINT fk_controlledvocabalternate_datasetfieldtype_id FOREIGN KEY (datasetfieldtype_id) REFERENCES datasetfieldtype(id); + + +-- +-- Name: fk_controlledvocabularyvalue_datasetfieldtype_id; Type: FK CONSTRAINT; Schema: public; Owner: dataverse_app +-- + +ALTER TABLE ONLY controlledvocabularyvalue + ADD CONSTRAINT fk_controlledvocabularyvalue_datasetfieldtype_id FOREIGN KEY (datasetfieldtype_id) REFERENCES datasetfieldtype(id); + + +-- +-- Name: fk_customquestion_guestbook_id; Type: FK CONSTRAINT; Schema: public; Owner: dataverse_app +-- + +ALTER TABLE ONLY customquestion + ADD CONSTRAINT fk_customquestion_guestbook_id FOREIGN KEY (guestbook_id) REFERENCES guestbook(id); + + +-- +-- Name: fk_customquestionresponse_customquestion_id; Type: FK CONSTRAINT; Schema: public; Owner: dataverse_app +-- + +ALTER TABLE ONLY customquestionresponse + ADD CONSTRAINT fk_customquestionresponse_customquestion_id FOREIGN KEY (customquestion_id) REFERENCES customquestion(id); + + +-- +-- Name: fk_customquestionresponse_guestbookresponse_id; Type: FK CONSTRAINT; Schema: public; Owner: dataverse_app +-- + +ALTER TABLE ONLY customquestionresponse + ADD CONSTRAINT fk_customquestionresponse_guestbookresponse_id FOREIGN KEY (guestbookresponse_id) REFERENCES guestbookresponse(id); + + +-- +-- Name: fk_customquestionvalue_customquestion_id; Type: FK CONSTRAINT; Schema: public; Owner: dataverse_app +-- + +ALTER TABLE ONLY customquestionvalue + ADD CONSTRAINT fk_customquestionvalue_customquestion_id FOREIGN KEY (customquestion_id) REFERENCES customquestion(id); + + +-- +-- Name: fk_datafile_id; Type: FK CONSTRAINT; Schema: public; Owner: dataverse_app +-- + +ALTER TABLE ONLY datafile + ADD CONSTRAINT fk_datafile_id FOREIGN KEY (id) REFERENCES dvobject(id); + + +-- +-- Name: fk_datafilecategory_dataset_id; Type: FK CONSTRAINT; Schema: public; Owner: dataverse_app +-- + +ALTER TABLE ONLY datafilecategory + ADD CONSTRAINT fk_datafilecategory_dataset_id FOREIGN KEY (dataset_id) REFERENCES dvobject(id); + + +-- +-- Name: fk_datafiletag_datafile_id; Type: FK CONSTRAINT; Schema: public; Owner: dataverse_app +-- + +ALTER TABLE ONLY datafiletag + ADD CONSTRAINT fk_datafiletag_datafile_id FOREIGN KEY (datafile_id) REFERENCES dvobject(id); + + +-- +-- Name: fk_dataset_guestbook_id; Type: FK CONSTRAINT; Schema: public; Owner: dataverse_app +-- + +ALTER TABLE ONLY dataset + ADD CONSTRAINT fk_dataset_guestbook_id FOREIGN KEY (guestbook_id) REFERENCES guestbook(id); + + +-- +-- Name: fk_dataset_id; Type: FK CONSTRAINT; Schema: public; Owner: dataverse_app +-- + +ALTER TABLE ONLY dataset + ADD CONSTRAINT fk_dataset_id FOREIGN KEY (id) REFERENCES dvobject(id); + + +-- +-- Name: fk_dataset_thumbnailfile_id; Type: FK CONSTRAINT; Schema: public; Owner: dataverse_app +-- + +ALTER TABLE ONLY dataset + ADD CONSTRAINT fk_dataset_thumbnailfile_id FOREIGN KEY (thumbnailfile_id) REFERENCES dvobject(id); + + +-- +-- Name: fk_datasetfield_controlledvocabularyvalue_datasetfield_id; Type: FK CONSTRAINT; Schema: public; Owner: dataverse_app +-- + +ALTER TABLE ONLY datasetfield_controlledvocabularyvalue + ADD CONSTRAINT fk_datasetfield_controlledvocabularyvalue_datasetfield_id FOREIGN KEY (datasetfield_id) REFERENCES datasetfield(id); + + +-- +-- Name: fk_datasetfield_datasetfieldtype_id; Type: FK CONSTRAINT; Schema: public; Owner: dataverse_app +-- + +ALTER TABLE ONLY datasetfield + ADD CONSTRAINT fk_datasetfield_datasetfieldtype_id FOREIGN KEY (datasetfieldtype_id) REFERENCES datasetfieldtype(id); + + +-- +-- Name: fk_datasetfield_datasetversion_id; Type: FK CONSTRAINT; Schema: public; Owner: dataverse_app +-- + +ALTER TABLE ONLY datasetfield + ADD CONSTRAINT fk_datasetfield_datasetversion_id FOREIGN KEY (datasetversion_id) REFERENCES datasetversion(id); + + +-- +-- Name: fk_datasetfield_parentdatasetfieldcompoundvalue_id; Type: FK CONSTRAINT; Schema: public; Owner: dataverse_app +-- + +ALTER TABLE ONLY datasetfield + ADD CONSTRAINT fk_datasetfield_parentdatasetfieldcompoundvalue_id FOREIGN KEY (parentdatasetfieldcompoundvalue_id) REFERENCES datasetfieldcompoundvalue(id); + + +-- +-- Name: fk_datasetfield_template_id; Type: FK CONSTRAINT; Schema: public; Owner: dataverse_app +-- + +ALTER TABLE ONLY datasetfield + ADD CONSTRAINT fk_datasetfield_template_id FOREIGN KEY (template_id) REFERENCES template(id); + + +-- +-- Name: fk_datasetfieldcompoundvalue_parentdatasetfield_id; Type: FK CONSTRAINT; Schema: public; Owner: dataverse_app +-- + +ALTER TABLE ONLY datasetfieldcompoundvalue + ADD CONSTRAINT fk_datasetfieldcompoundvalue_parentdatasetfield_id FOREIGN KEY (parentdatasetfield_id) REFERENCES datasetfield(id); + + +-- +-- Name: fk_datasetfielddefaultvalue_datasetfield_id; Type: FK CONSTRAINT; Schema: public; Owner: dataverse_app +-- + +ALTER TABLE ONLY datasetfielddefaultvalue + ADD CONSTRAINT fk_datasetfielddefaultvalue_datasetfield_id FOREIGN KEY (datasetfield_id) REFERENCES datasetfieldtype(id); + + +-- +-- Name: fk_datasetfielddefaultvalue_defaultvalueset_id; Type: FK CONSTRAINT; Schema: public; Owner: dataverse_app +-- + +ALTER TABLE ONLY datasetfielddefaultvalue + ADD CONSTRAINT fk_datasetfielddefaultvalue_defaultvalueset_id FOREIGN KEY (defaultvalueset_id) REFERENCES defaultvalueset(id); + + +-- +-- Name: fk_datasetfielddefaultvalue_parentdatasetfielddefaultvalue_id; Type: FK CONSTRAINT; Schema: public; Owner: dataverse_app +-- + +ALTER TABLE ONLY datasetfielddefaultvalue + ADD CONSTRAINT fk_datasetfielddefaultvalue_parentdatasetfielddefaultvalue_id FOREIGN KEY (parentdatasetfielddefaultvalue_id) REFERENCES datasetfielddefaultvalue(id); + + +-- +-- Name: fk_datasetfieldtype_metadatablock_id; Type: FK CONSTRAINT; Schema: public; Owner: dataverse_app +-- + +ALTER TABLE ONLY datasetfieldtype + ADD CONSTRAINT fk_datasetfieldtype_metadatablock_id FOREIGN KEY (metadatablock_id) REFERENCES metadatablock(id); + + +-- +-- Name: fk_datasetfieldtype_parentdatasetfieldtype_id; Type: FK CONSTRAINT; Schema: public; Owner: dataverse_app +-- + +ALTER TABLE ONLY datasetfieldtype + ADD CONSTRAINT fk_datasetfieldtype_parentdatasetfieldtype_id FOREIGN KEY (parentdatasetfieldtype_id) REFERENCES datasetfieldtype(id); + + +-- +-- Name: fk_datasetfieldvalue_datasetfield_id; Type: FK CONSTRAINT; Schema: public; Owner: dataverse_app +-- + +ALTER TABLE ONLY datasetfieldvalue + ADD CONSTRAINT fk_datasetfieldvalue_datasetfield_id FOREIGN KEY (datasetfield_id) REFERENCES datasetfield(id); + + +-- +-- Name: fk_datasetlinkingdataverse_dataset_id; Type: FK CONSTRAINT; Schema: public; Owner: dataverse_app +-- + +ALTER TABLE ONLY datasetlinkingdataverse + ADD CONSTRAINT fk_datasetlinkingdataverse_dataset_id FOREIGN KEY (dataset_id) REFERENCES dvobject(id); + + +-- +-- Name: fk_datasetlinkingdataverse_linkingdataverse_id; Type: FK CONSTRAINT; Schema: public; Owner: dataverse_app +-- + +ALTER TABLE ONLY datasetlinkingdataverse + ADD CONSTRAINT fk_datasetlinkingdataverse_linkingdataverse_id FOREIGN KEY (linkingdataverse_id) REFERENCES dvobject(id); + + +-- +-- Name: fk_datasetlock_dataset_id; Type: FK CONSTRAINT; Schema: public; Owner: dataverse_app +-- + +ALTER TABLE ONLY datasetlock + ADD CONSTRAINT fk_datasetlock_dataset_id FOREIGN KEY (dataset_id) REFERENCES dvobject(id); + + +-- +-- Name: fk_datasetlock_user_id; Type: FK CONSTRAINT; Schema: public; Owner: dataverse_app +-- + +ALTER TABLE ONLY datasetlock + ADD CONSTRAINT fk_datasetlock_user_id FOREIGN KEY (user_id) REFERENCES authenticateduser(id); + + +-- +-- Name: fk_datasetversion_dataset_id; Type: FK CONSTRAINT; Schema: public; Owner: dataverse_app +-- + +ALTER TABLE ONLY datasetversion + ADD CONSTRAINT fk_datasetversion_dataset_id FOREIGN KEY (dataset_id) REFERENCES dvobject(id); + + +-- +-- Name: fk_datasetversionuser_authenticateduser_id; Type: FK CONSTRAINT; Schema: public; Owner: dataverse_app +-- + +ALTER TABLE ONLY datasetversionuser + ADD CONSTRAINT fk_datasetversionuser_authenticateduser_id FOREIGN KEY (authenticateduser_id) REFERENCES authenticateduser(id); + + +-- +-- Name: fk_datasetversionuser_datasetversion_id; Type: FK CONSTRAINT; Schema: public; Owner: dataverse_app +-- + +ALTER TABLE ONLY datasetversionuser + ADD CONSTRAINT fk_datasetversionuser_datasetversion_id FOREIGN KEY (datasetversion_id) REFERENCES datasetversion(id); + + +-- +-- Name: fk_datatable_datafile_id; Type: FK CONSTRAINT; Schema: public; Owner: dataverse_app +-- + +ALTER TABLE ONLY datatable + ADD CONSTRAINT fk_datatable_datafile_id FOREIGN KEY (datafile_id) REFERENCES dvobject(id); + + +-- +-- Name: fk_datavariable_datatable_id; Type: FK CONSTRAINT; Schema: public; Owner: dataverse_app +-- + +ALTER TABLE ONLY datavariable + ADD CONSTRAINT fk_datavariable_datatable_id FOREIGN KEY (datatable_id) REFERENCES datatable(id); + + +-- +-- Name: fk_dataverse_defaultcontributorrole_id; Type: FK CONSTRAINT; Schema: public; Owner: dataverse_app +-- + +ALTER TABLE ONLY dataverse + ADD CONSTRAINT fk_dataverse_defaultcontributorrole_id FOREIGN KEY (defaultcontributorrole_id) REFERENCES dataverserole(id); + + +-- +-- Name: fk_dataverse_defaulttemplate_id; Type: FK CONSTRAINT; Schema: public; Owner: dataverse_app +-- + +ALTER TABLE ONLY dataverse + ADD CONSTRAINT fk_dataverse_defaulttemplate_id FOREIGN KEY (defaulttemplate_id) REFERENCES template(id); + + +-- +-- Name: fk_dataverse_id; Type: FK CONSTRAINT; Schema: public; Owner: dataverse_app +-- + +ALTER TABLE ONLY dataverse + ADD CONSTRAINT fk_dataverse_id FOREIGN KEY (id) REFERENCES dvobject(id); + + +-- +-- Name: fk_dataverse_metadatablock_dataverse_id; Type: FK CONSTRAINT; Schema: public; Owner: dataverse_app +-- + +ALTER TABLE ONLY dataverse_metadatablock + ADD CONSTRAINT fk_dataverse_metadatablock_dataverse_id FOREIGN KEY (dataverse_id) REFERENCES dvobject(id); + + +-- +-- Name: fk_dataverse_metadatablock_metadatablocks_id; Type: FK CONSTRAINT; Schema: public; Owner: dataverse_app +-- + +ALTER TABLE ONLY dataverse_metadatablock + ADD CONSTRAINT fk_dataverse_metadatablock_metadatablocks_id FOREIGN KEY (metadatablocks_id) REFERENCES metadatablock(id); + + +-- +-- Name: fk_dataversecontact_dataverse_id; Type: FK CONSTRAINT; Schema: public; Owner: dataverse_app +-- + +ALTER TABLE ONLY dataversecontact + ADD CONSTRAINT fk_dataversecontact_dataverse_id FOREIGN KEY (dataverse_id) REFERENCES dvobject(id); + + +-- +-- Name: fk_dataversefacet_datasetfieldtype_id; Type: FK CONSTRAINT; Schema: public; Owner: dataverse_app +-- + +ALTER TABLE ONLY dataversefacet + ADD CONSTRAINT fk_dataversefacet_datasetfieldtype_id FOREIGN KEY (datasetfieldtype_id) REFERENCES datasetfieldtype(id); + + +-- +-- Name: fk_dataversefacet_dataverse_id; Type: FK CONSTRAINT; Schema: public; Owner: dataverse_app +-- + +ALTER TABLE ONLY dataversefacet + ADD CONSTRAINT fk_dataversefacet_dataverse_id FOREIGN KEY (dataverse_id) REFERENCES dvobject(id); + + +-- +-- Name: fk_dataversefeatureddataverse_dataverse_id; Type: FK CONSTRAINT; Schema: public; Owner: dataverse_app +-- + +ALTER TABLE ONLY dataversefeatureddataverse + ADD CONSTRAINT fk_dataversefeatureddataverse_dataverse_id FOREIGN KEY (dataverse_id) REFERENCES dvobject(id); + + +-- +-- Name: fk_dataversefeatureddataverse_featureddataverse_id; Type: FK CONSTRAINT; Schema: public; Owner: dataverse_app +-- + +ALTER TABLE ONLY dataversefeatureddataverse + ADD CONSTRAINT fk_dataversefeatureddataverse_featureddataverse_id FOREIGN KEY (featureddataverse_id) REFERENCES dvobject(id); + + +-- +-- Name: fk_dataversefieldtypeinputlevel_datasetfieldtype_id; Type: FK CONSTRAINT; Schema: public; Owner: dataverse_app +-- + +ALTER TABLE ONLY dataversefieldtypeinputlevel + ADD CONSTRAINT fk_dataversefieldtypeinputlevel_datasetfieldtype_id FOREIGN KEY (datasetfieldtype_id) REFERENCES datasetfieldtype(id); + + +-- +-- Name: fk_dataversefieldtypeinputlevel_dataverse_id; Type: FK CONSTRAINT; Schema: public; Owner: dataverse_app +-- + +ALTER TABLE ONLY dataversefieldtypeinputlevel + ADD CONSTRAINT fk_dataversefieldtypeinputlevel_dataverse_id FOREIGN KEY (dataverse_id) REFERENCES dvobject(id); + + +-- +-- Name: fk_dataverselinkingdataverse_dataverse_id; Type: FK CONSTRAINT; Schema: public; Owner: dataverse_app +-- + +ALTER TABLE ONLY dataverselinkingdataverse + ADD CONSTRAINT fk_dataverselinkingdataverse_dataverse_id FOREIGN KEY (dataverse_id) REFERENCES dvobject(id); + + +-- +-- Name: fk_dataverselinkingdataverse_linkingdataverse_id; Type: FK CONSTRAINT; Schema: public; Owner: dataverse_app +-- + +ALTER TABLE ONLY dataverselinkingdataverse + ADD CONSTRAINT fk_dataverselinkingdataverse_linkingdataverse_id FOREIGN KEY (linkingdataverse_id) REFERENCES dvobject(id); + + +-- +-- Name: fk_dataverserole_owner_id; Type: FK CONSTRAINT; Schema: public; Owner: dataverse_app +-- + +ALTER TABLE ONLY dataverserole + ADD CONSTRAINT fk_dataverserole_owner_id FOREIGN KEY (owner_id) REFERENCES dvobject(id); + + +-- +-- Name: fk_dataversesubjects_controlledvocabularyvalue_id; Type: FK CONSTRAINT; Schema: public; Owner: dataverse_app +-- + +ALTER TABLE ONLY dataversesubjects + ADD CONSTRAINT fk_dataversesubjects_controlledvocabularyvalue_id FOREIGN KEY (controlledvocabularyvalue_id) REFERENCES controlledvocabularyvalue(id); + + +-- +-- Name: fk_dataversesubjects_dataverse_id; Type: FK CONSTRAINT; Schema: public; Owner: dataverse_app +-- + +ALTER TABLE ONLY dataversesubjects + ADD CONSTRAINT fk_dataversesubjects_dataverse_id FOREIGN KEY (dataverse_id) REFERENCES dvobject(id); + + +-- +-- Name: fk_dataversetheme_dataverse_id; Type: FK CONSTRAINT; Schema: public; Owner: dataverse_app +-- + +ALTER TABLE ONLY dataversetheme + ADD CONSTRAINT fk_dataversetheme_dataverse_id FOREIGN KEY (dataverse_id) REFERENCES dvobject(id); + + +-- +-- Name: fk_dvobject_creator_id; Type: FK CONSTRAINT; Schema: public; Owner: dataverse_app +-- + +ALTER TABLE ONLY dvobject + ADD CONSTRAINT fk_dvobject_creator_id FOREIGN KEY (creator_id) REFERENCES authenticateduser(id); + + +-- +-- Name: fk_dvobject_owner_id; Type: FK CONSTRAINT; Schema: public; Owner: dataverse_app +-- + +ALTER TABLE ONLY dvobject + ADD CONSTRAINT fk_dvobject_owner_id FOREIGN KEY (owner_id) REFERENCES dvobject(id); + + +-- +-- Name: fk_dvobject_releaseuser_id; Type: FK CONSTRAINT; Schema: public; Owner: dataverse_app +-- + +ALTER TABLE ONLY dvobject + ADD CONSTRAINT fk_dvobject_releaseuser_id FOREIGN KEY (releaseuser_id) REFERENCES authenticateduser(id); + + +-- +-- Name: fk_explicitgroup_authenticateduser_explicitgroup_id; Type: FK CONSTRAINT; Schema: public; Owner: dataverse_app +-- + +ALTER TABLE ONLY explicitgroup_authenticateduser + ADD CONSTRAINT fk_explicitgroup_authenticateduser_explicitgroup_id FOREIGN KEY (explicitgroup_id) REFERENCES explicitgroup(id); + + +-- +-- Name: fk_explicitgroup_containedroleassignees_explicitgroup_id; Type: FK CONSTRAINT; Schema: public; Owner: dataverse_app +-- + +ALTER TABLE ONLY explicitgroup_containedroleassignees + ADD CONSTRAINT fk_explicitgroup_containedroleassignees_explicitgroup_id FOREIGN KEY (explicitgroup_id) REFERENCES explicitgroup(id); + + +-- +-- Name: fk_explicitgroup_explicitgroup_containedexplicitgroups_id; Type: FK CONSTRAINT; Schema: public; Owner: dataverse_app +-- + +ALTER TABLE ONLY explicitgroup_explicitgroup + ADD CONSTRAINT fk_explicitgroup_explicitgroup_containedexplicitgroups_id FOREIGN KEY (containedexplicitgroups_id) REFERENCES explicitgroup(id); + + +-- +-- Name: fk_explicitgroup_explicitgroup_explicitgroup_id; Type: FK CONSTRAINT; Schema: public; Owner: dataverse_app +-- + +ALTER TABLE ONLY explicitgroup_explicitgroup + ADD CONSTRAINT fk_explicitgroup_explicitgroup_explicitgroup_id FOREIGN KEY (explicitgroup_id) REFERENCES explicitgroup(id); + + +-- +-- Name: fk_explicitgroup_owner_id; Type: FK CONSTRAINT; Schema: public; Owner: dataverse_app +-- + +ALTER TABLE ONLY explicitgroup + ADD CONSTRAINT fk_explicitgroup_owner_id FOREIGN KEY (owner_id) REFERENCES dvobject(id); + + +-- +-- Name: fk_fileaccessrequests_authenticated_user_id; Type: FK CONSTRAINT; Schema: public; Owner: dataverse_app +-- + +ALTER TABLE ONLY fileaccessrequests + ADD CONSTRAINT fk_fileaccessrequests_authenticated_user_id FOREIGN KEY (authenticated_user_id) REFERENCES authenticateduser(id); + + +-- +-- Name: fk_fileaccessrequests_datafile_id; Type: FK CONSTRAINT; Schema: public; Owner: dataverse_app +-- + +ALTER TABLE ONLY fileaccessrequests + ADD CONSTRAINT fk_fileaccessrequests_datafile_id FOREIGN KEY (datafile_id) REFERENCES dvobject(id); + + +-- +-- Name: fk_filemetadata_datafile_id; Type: FK CONSTRAINT; Schema: public; Owner: dataverse_app +-- + +ALTER TABLE ONLY filemetadata + ADD CONSTRAINT fk_filemetadata_datafile_id FOREIGN KEY (datafile_id) REFERENCES dvobject(id); + + +-- +-- Name: fk_filemetadata_datafilecategory_filecategories_id; Type: FK CONSTRAINT; Schema: public; Owner: dataverse_app +-- + +ALTER TABLE ONLY filemetadata_datafilecategory + ADD CONSTRAINT fk_filemetadata_datafilecategory_filecategories_id FOREIGN KEY (filecategories_id) REFERENCES datafilecategory(id); + + +-- +-- Name: fk_filemetadata_datafilecategory_filemetadatas_id; Type: FK CONSTRAINT; Schema: public; Owner: dataverse_app +-- + +ALTER TABLE ONLY filemetadata_datafilecategory + ADD CONSTRAINT fk_filemetadata_datafilecategory_filemetadatas_id FOREIGN KEY (filemetadatas_id) REFERENCES filemetadata(id); + + +-- +-- Name: fk_filemetadata_datasetversion_id; Type: FK CONSTRAINT; Schema: public; Owner: dataverse_app +-- + +ALTER TABLE ONLY filemetadata + ADD CONSTRAINT fk_filemetadata_datasetversion_id FOREIGN KEY (datasetversion_id) REFERENCES datasetversion(id); + + +-- +-- Name: fk_foreignmetadatafieldmapping_foreignmetadataformatmapping_id; Type: FK CONSTRAINT; Schema: public; Owner: dataverse_app +-- + +ALTER TABLE ONLY foreignmetadatafieldmapping + ADD CONSTRAINT fk_foreignmetadatafieldmapping_foreignmetadataformatmapping_id FOREIGN KEY (foreignmetadataformatmapping_id) REFERENCES foreignmetadataformatmapping(id); + + +-- +-- Name: fk_foreignmetadatafieldmapping_parentfieldmapping_id; Type: FK CONSTRAINT; Schema: public; Owner: dataverse_app +-- + +ALTER TABLE ONLY foreignmetadatafieldmapping + ADD CONSTRAINT fk_foreignmetadatafieldmapping_parentfieldmapping_id FOREIGN KEY (parentfieldmapping_id) REFERENCES foreignmetadatafieldmapping(id); + + +-- +-- Name: fk_guestbook_dataverse_id; Type: FK CONSTRAINT; Schema: public; Owner: dataverse_app +-- + +ALTER TABLE ONLY guestbook + ADD CONSTRAINT fk_guestbook_dataverse_id FOREIGN KEY (dataverse_id) REFERENCES dvobject(id); + + +-- +-- Name: fk_guestbookresponse_authenticateduser_id; Type: FK CONSTRAINT; Schema: public; Owner: dataverse_app +-- + +ALTER TABLE ONLY guestbookresponse + ADD CONSTRAINT fk_guestbookresponse_authenticateduser_id FOREIGN KEY (authenticateduser_id) REFERENCES authenticateduser(id); + + +-- +-- Name: fk_guestbookresponse_datafile_id; Type: FK CONSTRAINT; Schema: public; Owner: dataverse_app +-- + +ALTER TABLE ONLY guestbookresponse + ADD CONSTRAINT fk_guestbookresponse_datafile_id FOREIGN KEY (datafile_id) REFERENCES dvobject(id); + + +-- +-- Name: fk_guestbookresponse_dataset_id; Type: FK CONSTRAINT; Schema: public; Owner: dataverse_app +-- + +ALTER TABLE ONLY guestbookresponse + ADD CONSTRAINT fk_guestbookresponse_dataset_id FOREIGN KEY (dataset_id) REFERENCES dvobject(id); + + +-- +-- Name: fk_guestbookresponse_datasetversion_id; Type: FK CONSTRAINT; Schema: public; Owner: dataverse_app +-- + +ALTER TABLE ONLY guestbookresponse + ADD CONSTRAINT fk_guestbookresponse_datasetversion_id FOREIGN KEY (datasetversion_id) REFERENCES datasetversion(id); + + +-- +-- Name: fk_guestbookresponse_guestbook_id; Type: FK CONSTRAINT; Schema: public; Owner: dataverse_app +-- + +ALTER TABLE ONLY guestbookresponse + ADD CONSTRAINT fk_guestbookresponse_guestbook_id FOREIGN KEY (guestbook_id) REFERENCES guestbook(id); + + +-- +-- Name: fk_harvestingdataverseconfig_dataverse_id; Type: FK CONSTRAINT; Schema: public; Owner: dataverse_app +-- + +ALTER TABLE ONLY harvestingdataverseconfig + ADD CONSTRAINT fk_harvestingdataverseconfig_dataverse_id FOREIGN KEY (dataverse_id) REFERENCES dvobject(id); + + +-- +-- Name: fk_ingestreport_datafile_id; Type: FK CONSTRAINT; Schema: public; Owner: dataverse_app +-- + +ALTER TABLE ONLY ingestreport + ADD CONSTRAINT fk_ingestreport_datafile_id FOREIGN KEY (datafile_id) REFERENCES dvobject(id); + + +-- +-- Name: fk_ingestrequest_datafile_id; Type: FK CONSTRAINT; Schema: public; Owner: dataverse_app +-- + +ALTER TABLE ONLY ingestrequest + ADD CONSTRAINT fk_ingestrequest_datafile_id FOREIGN KEY (datafile_id) REFERENCES dvobject(id); + + +-- +-- Name: fk_ipv4range_owner_id; Type: FK CONSTRAINT; Schema: public; Owner: dataverse_app +-- + +ALTER TABLE ONLY ipv4range + ADD CONSTRAINT fk_ipv4range_owner_id FOREIGN KEY (owner_id) REFERENCES persistedglobalgroup(id); + + +-- +-- Name: fk_ipv6range_owner_id; Type: FK CONSTRAINT; Schema: public; Owner: dataverse_app +-- + +ALTER TABLE ONLY ipv6range + ADD CONSTRAINT fk_ipv6range_owner_id FOREIGN KEY (owner_id) REFERENCES persistedglobalgroup(id); + + +-- +-- Name: fk_maplayermetadata_datafile_id; Type: FK CONSTRAINT; Schema: public; Owner: dataverse_app +-- + +ALTER TABLE ONLY maplayermetadata + ADD CONSTRAINT fk_maplayermetadata_datafile_id FOREIGN KEY (datafile_id) REFERENCES dvobject(id); + + +-- +-- Name: fk_maplayermetadata_dataset_id; Type: FK CONSTRAINT; Schema: public; Owner: dataverse_app +-- + +ALTER TABLE ONLY maplayermetadata + ADD CONSTRAINT fk_maplayermetadata_dataset_id FOREIGN KEY (dataset_id) REFERENCES dvobject(id); + + +-- +-- Name: fk_metadatablock_owner_id; Type: FK CONSTRAINT; Schema: public; Owner: dataverse_app +-- + +ALTER TABLE ONLY metadatablock + ADD CONSTRAINT fk_metadatablock_owner_id FOREIGN KEY (owner_id) REFERENCES dvobject(id); + + +-- +-- Name: fk_passwordresetdata_builtinuser_id; Type: FK CONSTRAINT; Schema: public; Owner: dataverse_app +-- + +ALTER TABLE ONLY passwordresetdata + ADD CONSTRAINT fk_passwordresetdata_builtinuser_id FOREIGN KEY (builtinuser_id) REFERENCES builtinuser(id); + + +-- +-- Name: fk_roleassignment_definitionpoint_id; Type: FK CONSTRAINT; Schema: public; Owner: dataverse_app +-- + +ALTER TABLE ONLY roleassignment + ADD CONSTRAINT fk_roleassignment_definitionpoint_id FOREIGN KEY (definitionpoint_id) REFERENCES dvobject(id); + + +-- +-- Name: fk_roleassignment_role_id; Type: FK CONSTRAINT; Schema: public; Owner: dataverse_app +-- + +ALTER TABLE ONLY roleassignment + ADD CONSTRAINT fk_roleassignment_role_id FOREIGN KEY (role_id) REFERENCES dataverserole(id); + + +-- +-- Name: fk_savedsearch_creator_id; Type: FK CONSTRAINT; Schema: public; Owner: dataverse_app +-- + +ALTER TABLE ONLY savedsearch + ADD CONSTRAINT fk_savedsearch_creator_id FOREIGN KEY (creator_id) REFERENCES authenticateduser(id); + + +-- +-- Name: fk_savedsearch_definitionpoint_id; Type: FK CONSTRAINT; Schema: public; Owner: dataverse_app +-- + +ALTER TABLE ONLY savedsearch + ADD CONSTRAINT fk_savedsearch_definitionpoint_id FOREIGN KEY (definitionpoint_id) REFERENCES dvobject(id); + + +-- +-- Name: fk_savedsearchfilterquery_savedsearch_id; Type: FK CONSTRAINT; Schema: public; Owner: dataverse_app +-- + +ALTER TABLE ONLY savedsearchfilterquery + ADD CONSTRAINT fk_savedsearchfilterquery_savedsearch_id FOREIGN KEY (savedsearch_id) REFERENCES savedsearch(id); + + +-- +-- Name: fk_summarystatistic_datavariable_id; Type: FK CONSTRAINT; Schema: public; Owner: dataverse_app +-- + +ALTER TABLE ONLY summarystatistic + ADD CONSTRAINT fk_summarystatistic_datavariable_id FOREIGN KEY (datavariable_id) REFERENCES datavariable(id); + + +-- +-- Name: fk_template_dataverse_id; Type: FK CONSTRAINT; Schema: public; Owner: dataverse_app +-- + +ALTER TABLE ONLY template + ADD CONSTRAINT fk_template_dataverse_id FOREIGN KEY (dataverse_id) REFERENCES dvobject(id); + + +-- +-- Name: fk_usernotification_user_id; Type: FK CONSTRAINT; Schema: public; Owner: dataverse_app +-- + +ALTER TABLE ONLY usernotification + ADD CONSTRAINT fk_usernotification_user_id FOREIGN KEY (user_id) REFERENCES authenticateduser(id); + + +-- +-- Name: fk_variablecategory_datavariable_id; Type: FK CONSTRAINT; Schema: public; Owner: dataverse_app +-- + +ALTER TABLE ONLY variablecategory + ADD CONSTRAINT fk_variablecategory_datavariable_id FOREIGN KEY (datavariable_id) REFERENCES datavariable(id); + + +-- +-- Name: fk_variablerange_datavariable_id; Type: FK CONSTRAINT; Schema: public; Owner: dataverse_app +-- + +ALTER TABLE ONLY variablerange + ADD CONSTRAINT fk_variablerange_datavariable_id FOREIGN KEY (datavariable_id) REFERENCES datavariable(id); + + +-- +-- Name: fk_variablerangeitem_datavariable_id; Type: FK CONSTRAINT; Schema: public; Owner: dataverse_app +-- + +ALTER TABLE ONLY variablerangeitem + ADD CONSTRAINT fk_variablerangeitem_datavariable_id FOREIGN KEY (datavariable_id) REFERENCES datavariable(id); + + +-- +-- Name: fk_worldmapauth_token_application_id; Type: FK CONSTRAINT; Schema: public; Owner: dataverse_app +-- + +ALTER TABLE ONLY worldmapauth_token + ADD CONSTRAINT fk_worldmapauth_token_application_id FOREIGN KEY (application_id) REFERENCES worldmapauth_tokentype(id); + + +-- +-- Name: fk_worldmapauth_token_datafile_id; Type: FK CONSTRAINT; Schema: public; Owner: dataverse_app +-- + +ALTER TABLE ONLY worldmapauth_token + ADD CONSTRAINT fk_worldmapauth_token_datafile_id FOREIGN KEY (datafile_id) REFERENCES dvobject(id); + + +-- +-- Name: fk_worldmapauth_token_dataverseuser_id; Type: FK CONSTRAINT; Schema: public; Owner: dataverse_app +-- + +ALTER TABLE ONLY worldmapauth_token + ADD CONSTRAINT fk_worldmapauth_token_dataverseuser_id FOREIGN KEY (dataverseuser_id) REFERENCES authenticateduser(id); + + +-- +-- Name: public; Type: ACL; Schema: -; Owner: michael +-- + +REVOKE ALL ON SCHEMA public FROM PUBLIC; +REVOKE ALL ON SCHEMA public FROM michael; +GRANT ALL ON SCHEMA public TO michael; +GRANT ALL ON SCHEMA public TO dataverse_app; + + +-- +-- PostgreSQL database dump complete +-- + diff --git a/postgresql/testdata/scripts/issues/2102/setup.sh b/postgresql/testdata/scripts/issues/2102/setup.sh new file mode 100644 index 0000000..32b7285 --- /dev/null +++ b/postgresql/testdata/scripts/issues/2102/setup.sh @@ -0,0 +1,13 @@ +ENDPOINT=https://localhost:8181 +APIKEY=a65048f8-875c-4479-a91d-33cb8cd12821 +DATASET=3 + +echo Calling: +echo curl --insecure $ENDPOINT/api/datasets/$DATASET/versions/:latest?key=$APIKEY +echo +echo curl --insecure -X PUT -H "Content-Type:application/json" -d@dataset-metadata-next.json $ENDPOINT/api/datasets/$DATASET/versions/:draft?key=$APIKEY +echo + + +# get data: +# curl --insecure $ENDPOINT/api/datasets/$DATASET/versions/:latest?key=$APIKEY diff --git a/postgresql/testdata/scripts/issues/2132/find-multiple-drafts.sql b/postgresql/testdata/scripts/issues/2132/find-multiple-drafts.sql new file mode 100644 index 0000000..5af324c --- /dev/null +++ b/postgresql/testdata/scripts/issues/2132/find-multiple-drafts.sql @@ -0,0 +1 @@ +select dataset_id, count(*) from datasetversion where versionstate='DRAFT' group by dataset_id having count(*) >1; diff --git a/postgresql/testdata/scripts/issues/2132/one-draft-version-per-dataset-constraint.sql b/postgresql/testdata/scripts/issues/2132/one-draft-version-per-dataset-constraint.sql new file mode 100644 index 0000000..d945934 --- /dev/null +++ b/postgresql/testdata/scripts/issues/2132/one-draft-version-per-dataset-constraint.sql @@ -0,0 +1 @@ +CREATE UNIQUE INDEX one_draft_version_per_dataset ON datasetversion (dataset_id) WHERE versionstate='DRAFT'; diff --git a/postgresql/testdata/scripts/issues/2438/download.R b/postgresql/testdata/scripts/issues/2438/download.R new file mode 100644 index 0000000..eea7f18 --- /dev/null +++ b/postgresql/testdata/scripts/issues/2438/download.R @@ -0,0 +1,26 @@ +arg <- commandArgs(trailingOnly = TRUE) + +download.dataverse.file <- function(url) { + if (length(url) == 0L) { + return( + "Please provide a URL to a file: http://guides.dataverse.org/en/latest/api/dataaccess.html" + ) + } + # Examples of URLs for tsv, original, RData, JSON, DDI/XML: + # https://groups.google.com/d/msg/dataverse-community/fFrJi7NnBus/LNpfXItbtZYJ + # + # This script assume the tsv URL is used. File id 91 is just an example. You must + # look up the id of the file. As of this writing the easiest way is via SWORD: + # https://github.com/IQSS/dataverse/issues/1837#issuecomment-121736332 + # + # url.to.download = 'https://demo.dataverse.org/api/v1/access/datafile/91' + url.to.download = url + tsvfile = 'file.tsv' + download.file(url = url.to.download, destfile = + tsvfile, method = 'curl') + mydata <- read.table(tsvfile, header = TRUE, sep = "\t") + print(mydata) + unlink(tsvfile) +} + +download.dataverse.file(arg) diff --git a/postgresql/testdata/scripts/issues/2454/anAuthUser.json b/postgresql/testdata/scripts/issues/2454/anAuthUser.json new file mode 100644 index 0000000..621b7fe --- /dev/null +++ b/postgresql/testdata/scripts/issues/2454/anAuthUser.json @@ -0,0 +1,9 @@ +{ + "firstName":"Anau", + "lastName":"Thuser", + "userName":"anAuthUser", + "affiliation":"current Dataverse", + "position":"above and beyond", + "email":"anAuthUser@malinator.com", + "phone":"(888) 888-8888" +} diff --git a/postgresql/testdata/scripts/issues/2454/anotherAuthUser.json b/postgresql/testdata/scripts/issues/2454/anotherAuthUser.json new file mode 100644 index 0000000..94db83b --- /dev/null +++ b/postgresql/testdata/scripts/issues/2454/anotherAuthUser.json @@ -0,0 +1,9 @@ +{ + "firstName":"Another", + "lastName":"Authuser", + "userName":"anotherAuthUser", + "affiliation":"current Dataverse", + "position":"above and beyond", + "email":"anotherAuthUser@malinator.com", + "phone":"(888) 888-8888" +} diff --git a/postgresql/testdata/scripts/issues/2454/assignment.json b/postgresql/testdata/scripts/issues/2454/assignment.json new file mode 100644 index 0000000..69307d0 --- /dev/null +++ b/postgresql/testdata/scripts/issues/2454/assignment.json @@ -0,0 +1 @@ +{"assignee":":authenticated-users", "role":"curator"} diff --git a/postgresql/testdata/scripts/issues/2454/dataverse.json b/postgresql/testdata/scripts/issues/2454/dataverse.json new file mode 100644 index 0000000..0d2047b --- /dev/null +++ b/postgresql/testdata/scripts/issues/2454/dataverse.json @@ -0,0 +1,13 @@ +{ + "alias": "permissionsTestDv", + "name": "PermissionsTest", + "affiliation": "Affiliation value", + "permissionRoot": true, + "description": "A Dataverse where we test permissions", + "dataverseContacts": [ + { + "contactEmail": "test.script@mailinator.com" + } + ], + "dataverseSubjects": ["Arts and Humanities"] +} diff --git a/postgresql/testdata/scripts/issues/2454/group.json b/postgresql/testdata/scripts/issues/2454/group.json new file mode 100644 index 0000000..0457565 --- /dev/null +++ b/postgresql/testdata/scripts/issues/2454/group.json @@ -0,0 +1,5 @@ +{ + "displayName":"Permission test group", + "description":"Group for testing permissions", + "aliasInOwner":"PTG" +} diff --git a/postgresql/testdata/scripts/issues/2454/rollback.sh b/postgresql/testdata/scripts/issues/2454/rollback.sh new file mode 100755 index 0000000..9a1f2fa --- /dev/null +++ b/postgresql/testdata/scripts/issues/2454/rollback.sh @@ -0,0 +1,31 @@ +#!/bin/bash + +ENDPOINT=http://localhost:8080/api +DB="psql dvndb -At -c " +ROOT_USER=dataverseAdmin +ROOT_KEY=$($DB "select tokenstring \ + from authenticateduser au inner join apitoken apt \ + on au.id=apt.authenticateduser_id \ + where useridentifier='$ROOT_USER'") + +echo $ROOT_USER api key is $ROOT_KEY + + +# delete DV +curl -X DELETE $ENDPOINT/dataverses/permissionsTestDv?key=$ROOT_KEY +echo +echo dataverses deleted +echo + +# delete user +for USER_NICK in anAuthUser anotherAuthUser +do + echo deleting user $USER_NICK + QUERY="select id from authenticateduser where useridentifier='$USER_NICK'" + AUTH_USER_ID=$($DB "$QUERY") + echo Auth user id is $AUTH_USER_ID + $DB "delete from apitoken where authenticateduser_id=$AUTH_USER_ID" + $DB "delete from authenticateduserlookup where authenticateduser_id=$AUTH_USER_ID" + $DB "delete from authenticateduser where id=$AUTH_USER_ID" + $DB "delete from builtinuser where id=$AUTH_USER_ID" +done diff --git a/postgresql/testdata/scripts/issues/2454/run-test.sh b/postgresql/testdata/scripts/issues/2454/run-test.sh new file mode 100755 index 0000000..49eb45a --- /dev/null +++ b/postgresql/testdata/scripts/issues/2454/run-test.sh @@ -0,0 +1,108 @@ +#! /bin/bash + +# This script is an automated test to validate that issue https://github.com/IQSS/dataverse/issues/2454 +# has been properly implemented. +# The issue requires that we "Distinguish between "read" and "write" permissions, make the "write" ones apply only to AuthenticatedUsers" +# To test this, we do the following: +# 1. Create a dataverse D in root +# 2. Create a new explicit group G in D, containing :guest and @anAuthUser +# 3. Assign the Admin role to G +# 4. Validation: +# 4.1 `:guest` can view unpublished dataverse, can't manage permissions there +# 4.2 `@anAuthUser` can do both +# 4.3 `@anotherAuthUSer` can do none + +# +# /!\ This script requires jq, psql and curl. +# /!\ You can set turn off the state setup by setting SETUP_NEEDED to anything that's not "yes" +# + +##### +# Config - edit this to match your system. +ENDPOINT=http://localhost:8080/api +DB="psql dvndb -At -c " +ROOT_USER=dataverseAdmin +SETUP_NEEDED=yes + + +##### +# Setup = if needed +# +if [ $SETUP_NEEDED == "yes" ]; then + echo SETTING UP + + ROOT_KEY=$($DB "select tokenstring \ + from authenticateduser au inner join apitoken apt \ + on au.id=apt.authenticateduser_id \ + where useridentifier='$ROOT_USER'") + + echo $ROOT_USER api key is $ROOT_KEY + + # Create @anAuthUser + USER_CREATION_KEY=$($DB "SELECT content FROM setting WHERE name='BuiltinUsers.KEY'") + AN_AUTH_USER_KEY=$( curl -s -X POST -d@anAuthUser.json -H"Content-type:application/json" $ENDPOINT/builtin-users?password=XXX\&key=$USER_CREATION_KEY | jq .data.apiToken | tr -d \") + ANOTHER_AUTH_USER_KEY=$( curl -s -X POST -d@anotherAuthUser.json -H"Content-type:application/json" $ENDPOINT/builtin-users?password=XXX\&key=$USER_CREATION_KEY | jq .data.apiToken | tr -d \") + echo + echo user @anAuthUser created with key $AN_AUTH_USER_KEY + + # Create the test dataverses. + curl -s -X POST -d@dataverse.json -H "Content-type:application/json" $ENDPOINT/dataverses/:root/?key=$ROOT_KEY + echo + echo Dataverse created + + # Create the group and add the users + GROUP_ID=$( curl -s -X POST -d@group.json -H "Content-type:application/json" $ENDPOINT/dataverses/permissionsTestDv/groups/?key=$ROOT_KEY | jq .data.identifier | tr -d \" ) + echo Group created with id $GROUP_ID + curl -s -X POST -d'[":guest","@anAuthUser"]' -H "Content-type:application/json" $ENDPOINT/dataverses/permissionsTestDv/groups/PTG/roleAssignees?key=$ROOT_KEY + echo + echo added users to group + + # Assign the "Admin" role to the group + ASSIGNMENT="{\"assignee\":\"$GROUP_ID\", \"role\":\"admin\"}" + curl -s -X POST -d"$ASSIGNMENT" -H "Content-type:application/json" $ENDPOINT/dataverses/permissionsTestDv/assignments/?key=$ROOT_KEY + + echo + echo SETUP DONE + echo + +else + echo Skipping setup + AN_AUTH_USER_KEY=$($DB "select tokenstring \ + from authenticateduser au inner join apitoken apt \ + on au.id=apt.authenticateduser_id \ + where useridentifier='anAuthUser'") + ANOTHER_AUTH_USER_KEY=$($DB "select tokenstring \ + from authenticateduser au inner join apitoken apt \ + on au.id=apt.authenticateduser_id \ + where useridentifier='anotherAuthUser'") + echo + echo Keys + echo @anAuthUser $AN_AUTH_USER_KEY + echo @anotherAuthUser $ANOTHER_AUTH_USER_KEY +fi + +# Test permissions +echo :guest viewing inner dv ... expecting 200 OK +curl -si $ENDPOINT/dataverses/permissionsTestDv | head -n 1 +echo + +echo @anAuthUser viewing inner dv ... expecting 200 OK +curl -si $ENDPOINT/dataverses/permissionsTestDv?key=$AN_AUTH_USER_KEY | head -n 1 +echo + +echo @anotherAuthUser viewing inner dv ... expecting 401 Unauthorized +curl -si $ENDPOINT/dataverses/permissionsTestDv?key=$ANOTHER_AUTH_USER_KEY | head -n 1 +echo +# Assign the "Admin" role to the group + +echo :guest setting permissions ... Expecting 401 Unauthorized +curl -si -X POST -d@assignment.json -H "Content-type:application/json" $ENDPOINT/dataverses/permissionsTestDv/assignments/ | head -n 1 +echo + +echo @anotherAuthUser setting permissions ... Expecting 401 Unauthorized +curl -si -X POST -d@assignment.json -H "Content-type:application/json" $ENDPOINT/dataverses/permissionsTestDv/assignments/?key=$ANOTHER_AUTH_USER_KEY | head -n 1 +echo + +echo @anAuthUser setting permissions ... Expecting 200 OK +curl -si -X POST -d@assignment.json -H "Content-type:application/json" $ENDPOINT/dataverses/permissionsTestDv/assignments/?key=$AN_AUTH_USER_KEY | head -n 1 +echo diff --git a/postgresql/testdata/scripts/issues/2595/monitor.py b/postgresql/testdata/scripts/issues/2595/monitor.py new file mode 100755 index 0000000..3e92dec --- /dev/null +++ b/postgresql/testdata/scripts/issues/2595/monitor.py @@ -0,0 +1,73 @@ +#!/usr/bin/env python +import urllib2 +import json +import datetime +import csv +import os +from shutil import move +base_url = 'http://localhost:4848/monitoring/domain/server/resources/dvnDbPool' +request = urllib2.Request(base_url, headers = { 'Accept' : 'application/json'}) +json1 = urllib2.urlopen(request).read() +data1 = json.loads(json1) +#print json.dumps(data1, indent=2) +war_file = data1['extraProperties']['childResources'].keys()[0] +request = urllib2.Request(base_url + '/' + war_file, headers = { 'Accept' : 'application/json'}) +json2 = urllib2.urlopen(request).read() +data2 = json.loads(json2) +#print json.dumps(data2, indent=2) + +def highwater(data, metric): + columns = ['lastsampletime', 'current', 'highwatermark'] + obj = data['extraProperties']['entity'][metric] + time_readable = epoch2readable (obj, columns[0]) + current = obj[columns[1]] + highwater = obj[columns[2]] + filename = metric + '.tsv' + values = [[time_readable, current, highwater]]; + write_file(metric, columns, values) + +def count(data, metric): + columns = ['lastsampletime', 'count'] + obj = data['extraProperties']['entity'][metric] + time_readable = epoch2readable (obj, columns[0]) + count = obj['count'] + values = [[time_readable, count]]; + write_file(metric, columns, values) + +def epoch2readable(obj, key): + time_epochsec = obj[key] / 1000.0 + time_readable = datetime.datetime.fromtimestamp(time_epochsec).strftime('%Y-%m-%d %H:%M:%S.%f') + return time_readable + +def write_file(metric, columns, values): + filename = metric + '.tsv' + if not os.path.isfile(filename): + write_header(columns, filename) + write_values(values, filename) + uniq(filename) + +def write_header(headers, filename): + with open(filename, 'a') as fp: + a = csv.writer(fp, delimiter='\t'); + a.writerows([headers]); + +def write_values(values, filename): + with open(filename, 'a') as fp: + a = csv.writer(fp, delimiter='\t'); + a.writerows(values); + +def uniq(filename): + tmpfile = filename + '.tmp' + lines_seen = set() # holds lines already seen + outfile = open(tmpfile, 'w') + for line in open(filename, 'r'): + if line not in lines_seen: # not a duplicate + outfile.write(line) + lines_seen.add(line) + outfile.close() + move(tmpfile, filename) + +highwater(data1, 'numconnused') +highwater(data1, 'connrequestwaittime') +count(data1, 'numconnacquired') +count(data1, 'numconnreleased') diff --git a/postgresql/testdata/scripts/issues/2595/numconnacquired.tsv b/postgresql/testdata/scripts/issues/2595/numconnacquired.tsv new file mode 100644 index 0000000..97a0640 --- /dev/null +++ b/postgresql/testdata/scripts/issues/2595/numconnacquired.tsv @@ -0,0 +1,3 @@ +lastsampletime count +2015-10-14 09:34:10.553000 81572 +2015-10-14 09:49:10.695000 82053 diff --git a/postgresql/testdata/scripts/issues/2595/plot.py b/postgresql/testdata/scripts/issues/2595/plot.py new file mode 100755 index 0000000..640dba8 --- /dev/null +++ b/postgresql/testdata/scripts/issues/2595/plot.py @@ -0,0 +1,13 @@ +#!/usr/bin/env python +import sys +import numpy as np +import matplotlib.pyplot as plt +import matplotlib.dates as mdates +days, impressions = np.loadtxt("numconnacquired.tsv", delimiter='\t', skiprows=1, unpack=True, + converters={ 0: mdates.strpdate2num('%Y-%m-%d %H:%M:%S.%f')}) +plt.plot_date(x=days, y=impressions, fmt="r-") +plt.title("Number of logical connections acquired from the pool") +plt.ylabel("numconnacquired") +plt.grid(True) +plt.gcf().autofmt_xdate() +plt.savefig('out.png') diff --git a/postgresql/testdata/scripts/issues/2598/detect-duplicate-dataverse-aliases.sql b/postgresql/testdata/scripts/issues/2598/detect-duplicate-dataverse-aliases.sql new file mode 100644 index 0000000..58eec81 --- /dev/null +++ b/postgresql/testdata/scripts/issues/2598/detect-duplicate-dataverse-aliases.sql @@ -0,0 +1 @@ +select alias from dataverse where lower(alias) in (select lower(alias) from dataverse group by lower(alias) having count(*) >1); diff --git a/postgresql/testdata/scripts/issues/2598/insert-duplicate-alias.sql b/postgresql/testdata/scripts/issues/2598/insert-duplicate-alias.sql new file mode 100644 index 0000000..559692c --- /dev/null +++ b/postgresql/testdata/scripts/issues/2598/insert-duplicate-alias.sql @@ -0,0 +1,10 @@ +-- This script should fail to insert a duplicate datavers alias (different case) +-- after a constraint has been added in https://github.com/IQSS/dataverse/issues/2598 +DELETE FROM dataverse where id = 100; +DELETE FROM dataverse where id = 101; +DELETE FROM dvobject where id = 100; +DELETE FROM dvobject where id = 101; +INSERT INTO dvobject (id, createdate, modificationtime) VALUES (100, NOW(), NOW()); +INSERT INTO dataverse (id, alias, name, dataversetype, defaultcontributorrole_id) VALUES (100, 'foo', 'foo is mine', 'UNCATEGORIZED', 1); +INSERT INTO dvobject (id, createdate, modificationtime) VALUES (101, NOW(), NOW()); +INSERT INTO dataverse (id, alias, name, dataversetype, defaultcontributorrole_id) VALUES (101, 'FOO', 'uppercase foo', 'UNCATEGORIZED', 1); diff --git a/postgresql/testdata/scripts/issues/2648/reproduce b/postgresql/testdata/scripts/issues/2648/reproduce new file mode 100755 index 0000000..9ff8708 --- /dev/null +++ b/postgresql/testdata/scripts/issues/2648/reproduce @@ -0,0 +1,59 @@ +#!/bin/sh +# Test scenario: User "spruce" has marked "trees.png" as restricted* +# and has given "finch" the File Downloader role on "trees.png" at the file level +# but has not yet published the dataset. +# +# * Marking files as restricted can only be done through the GUI: https://github.com/IQSS/dataverse/issues/2497 +# +# Here is where "trees.png" lives: +# * Root Dataverse (entityId:1) +# * Trees Dataverse (entityId:7) +# * Spruce Dataverse (entityId:8) +# * Spruce Goose (entityId:10) +# * trees.png (entityId:12) +# +# Unknowns: +# - What is the behavior if you give the File Downloader role at the dataset level? +# - What is the behavior if you give the File Downloader role at the dataverse level? + +SPRUCE_STORY="spruce uploaded the file in the first place and should be able to download it." +SPARROW_STORY="sparrow has no special access and should not be able to download the file because a) it isn't published and b) it's restricted" +FINCH_STORY="finch has the DownloadFile permission but should not be able to download the file because the dataset is unpublished" + +FORBIDDEN=403 +. scripts/search/export-keys + +API_TOKEN=$SPRUCEKEY +echo "sparrow is attempting to download the file (should be forbidden)" +# Can't use header: https://github.com/IQSS/dataverse/issues/2662 +# curl -H "X-Dataverse-key:$API_TOKEN" http://localhost:8080/api/access/datafile/12 +SPRUCE_OUT=$(curl --write-out %{http_code} --silent --output /dev/null http://localhost:8080/api/access/datafile/12?key=$API_TOKEN ) +if [ $SPRUCE_OUT -ne $FORBIDDEN ]; then + echo "Good. $SPRUCE_STORY" +else + echo "Bug. $SPRUCE_STORY" +fi + +echo "---" + +# Yes, all this could be refactored to make it DRY. +API_TOKEN=$SPARROWKEY +echo "sparrow is attempting to download the file (should be forbidden)" +SPARROW_OUT=$(curl --write-out %{http_code} --silent --output /dev/null http://localhost:8080/api/access/datafile/12?key=$API_TOKEN ) +if [ $SPARROW_OUT -eq $FORBIDDEN ]; then + echo "Good. $SPARROW_STORY" +else + echo "Bug. $SPARROW_STORY" +fi + +echo "---" + +API_TOKEN=$FINCHKEY +echo "finch is attempting to download the file (should be forbidden)" +FINCH_OUT=$(curl --write-out %{http_code} --silent --output /dev/null http://localhost:8080/api/access/datafile/12?key=$API_TOKEN ) +#curl -s -i http://localhost:8080/api/access/datafile/12?key=$API_TOKEN | head | grep ^Content-Type +if [ $FINCH_OUT -eq $FORBIDDEN ]; then + echo "Good. $FINCH_STORY" +else + echo "Bug. $FINCH_STORY" +fi diff --git a/postgresql/testdata/scripts/issues/2649/reproduce b/postgresql/testdata/scripts/issues/2649/reproduce new file mode 100755 index 0000000..25985fd --- /dev/null +++ b/postgresql/testdata/scripts/issues/2649/reproduce @@ -0,0 +1,6 @@ +#!/bin/sh +# "File Downloader" role has already been assigned to "finch" to one of: +# - trees.png file +# - Spruce Goose dataset +# - Spruce dataverse +curl -s 'http://localhost:8080/api/mydata/retrieve?selected_page=1&dvobject_types=DataFile&published_states=Published&published_states=Unpublished&published_states=Draft&published_states=In+Review&published_states=Deaccessioned&role_ids=1&role_ids=2&role_ids=6&mydata_search_term=&userIdentifier=finch' | jq . diff --git a/postgresql/testdata/scripts/issues/2681/create-files b/postgresql/testdata/scripts/issues/2681/create-files new file mode 100755 index 0000000..7d5eda3 --- /dev/null +++ b/postgresql/testdata/scripts/issues/2681/create-files @@ -0,0 +1,18 @@ +#!/bin/sh +NUM_FILES=10 +if [ ! -z "$1" ]; then + NUM_FILES=$1 +fi +TMP="/tmp" +DIR_NAME="$TMP/${NUM_FILES}files" +TMP_DIR="$DIR_NAME" +rm -rf $TMP_DIR +mkdir $TMP_DIR +cd $TMP_DIR +for i in `seq -f "%04g" $NUM_FILES`; do + echo $i > $i.txt +done +cd $TMP +ZIP=${DIR_NAME}.zip +ls $DIR_NAME/* +zip $ZIP $DIR_NAME/* diff --git a/postgresql/testdata/scripts/issues/3354/createDatasetWithSha1Files.sh b/postgresql/testdata/scripts/issues/3354/createDatasetWithSha1Files.sh new file mode 100755 index 0000000..1792a9e --- /dev/null +++ b/postgresql/testdata/scripts/issues/3354/createDatasetWithSha1Files.sh @@ -0,0 +1,5 @@ +#!/bin/sh +# existing, works, no files, commenting out +#curl -s -X POST -H "Content-type:application/json" -d @scripts/search/tests/data/dataset-finch1.json "http://localhost:8080/api/dataverses/root/datasets/?key=$API_TOKEN" +# new, has files +curl -s -X POST -H "Content-type:application/json" -d @scripts/issues/3354/datasetWithSha1Files.json "http://localhost:8080/api/dataverses/root/datasets/?key=$API_TOKEN" diff --git a/postgresql/testdata/scripts/issues/3354/datasetWithSha1Files.json b/postgresql/testdata/scripts/issues/3354/datasetWithSha1Files.json new file mode 100644 index 0000000..95a4d3b --- /dev/null +++ b/postgresql/testdata/scripts/issues/3354/datasetWithSha1Files.json @@ -0,0 +1,86 @@ +{ + "datasetVersion": { + "files": [ + { + "label": "foo.txt", + "dataFile": { + "filename": "foo.txt", + "contentType": "text/plain", + "storageIdentifier": "157484f9d6c-c36006fa39e5", + "originalFormatLabel": "UNKNOWN", + "checksum": { + "type": "SHA-1", + "value": "f1d2d2f924e986ac86fdf7b36c94bcdf32beec15" + } + } + } + ], + "metadataBlocks": { + "citation": { + "fields": [ + { + "value": "Dataset with SHA-1 files", + "typeClass": "primitive", + "multiple": false, + "typeName": "title" + }, + { + "value": [ + { + "authorName": { + "value": "Finch, Fiona", + "typeClass": "primitive", + "multiple": false, + "typeName": "authorName" + }, + "authorAffiliation": { + "value": "Birds Inc.", + "typeClass": "primitive", + "multiple": false, + "typeName": "authorAffiliation" + } + } + ], + "typeClass": "compound", + "multiple": true, + "typeName": "author" + }, + { + "value": [ + { "datasetContactEmail" : { + "typeClass": "primitive", + "multiple": false, + "typeName": "datasetContactEmail", + "value" : "finch@mailinator.com" + } + }], + "typeClass": "compound", + "multiple": true, + "typeName": "datasetContact" + }, + { + "value": [ { + "dsDescriptionValue":{ + "value": "Some people prefer SHA-1 to MD5 for file fixity.", + "multiple":false, + "typeClass": "primitive", + "typeName": "dsDescriptionValue" + }}], + "typeClass": "compound", + "multiple": true, + "typeName": "dsDescription" + }, + { + "value": [ + "Other" + ], + "typeClass": "controlledVocabulary", + "multiple": true, + "typeName": "subject" + } + ], + "displayName": "Citation Metadata" + } + } + } +} diff --git a/postgresql/testdata/scripts/issues/3354/mydata b/postgresql/testdata/scripts/issues/3354/mydata new file mode 100755 index 0000000..eb76d06 --- /dev/null +++ b/postgresql/testdata/scripts/issues/3354/mydata @@ -0,0 +1,3 @@ +#!/bin/sh +# FIXME: Make this into a REST Assured test. +curl -s "http://localhost:8080/api/mydata/retrieve?key=$API_TOKEN&role_ids=1&dvobject_types=DataFile&published_states=Published&published_states=Unpublished&published_states=Draft&published_states=In+Review&published_states=Deaccessioned" | jq .data.items diff --git a/postgresql/testdata/scripts/issues/3543/dv-peteDelete1.json b/postgresql/testdata/scripts/issues/3543/dv-peteDelete1.json new file mode 100644 index 0000000..60b6aed --- /dev/null +++ b/postgresql/testdata/scripts/issues/3543/dv-peteDelete1.json @@ -0,0 +1,8 @@ + { + "alias":"peteDelete1", + "name":"A dataverse for testing", + "affiliation":"Affiliation value", + "contactEmail":"pete@mailinator.com", + "permissionRoot":false, + "description":"A dataverse that's added for testing purposes." +} diff --git a/postgresql/testdata/scripts/issues/3543/dv-peteDelete2.json b/postgresql/testdata/scripts/issues/3543/dv-peteDelete2.json new file mode 100644 index 0000000..6703297 --- /dev/null +++ b/postgresql/testdata/scripts/issues/3543/dv-peteDelete2.json @@ -0,0 +1,8 @@ + { + "alias":"peteDelete2", + "name":"A dataverse for testing", + "affiliation":"Affiliation value", + "contactEmail":"pete@mailinator.com", + "permissionRoot":false, + "description":"A dataverse that's added for testing purposes." +} diff --git a/postgresql/testdata/scripts/issues/3543/dv-peteDelete3.json b/postgresql/testdata/scripts/issues/3543/dv-peteDelete3.json new file mode 100644 index 0000000..66d1126 --- /dev/null +++ b/postgresql/testdata/scripts/issues/3543/dv-peteDelete3.json @@ -0,0 +1,8 @@ + { + "alias":"peteDelete3", + "name":"A dataverse for testing", + "affiliation":"Affiliation value", + "contactEmail":"pete@mailinator.com", + "permissionRoot":false, + "description":"A dataverse that's added for testing purposes." +} diff --git a/postgresql/testdata/scripts/issues/3543/dv-peteDeleteTop.json b/postgresql/testdata/scripts/issues/3543/dv-peteDeleteTop.json new file mode 100644 index 0000000..9a9d184 --- /dev/null +++ b/postgresql/testdata/scripts/issues/3543/dv-peteDeleteTop.json @@ -0,0 +1,8 @@ + { + "alias":"peteDeleteTop", + "name":"A dataverse for testing", + "affiliation":"Affiliation value", + "contactEmail":"pete@mailinator.com", + "permissionRoot":false, + "description":"A dataverse that's added for testing purposes." +} diff --git a/postgresql/testdata/scripts/issues/3543/setup.sh b/postgresql/testdata/scripts/issues/3543/setup.sh new file mode 100755 index 0000000..6f3483b --- /dev/null +++ b/postgresql/testdata/scripts/issues/3543/setup.sh @@ -0,0 +1,7 @@ +#!/bin/bash +echo Setting up dataverses for deletion, as described in https://redmine.hmdc.harvard.edu/issues/3543 + +curl -H"Content-type:application/json" -d @dv-peteDeleteTop.json http://localhost:8080/api/dataverses/peteTop?key=pete +curl -H"Content-type:application/json" -d @dv-peteDelete1.json http://localhost:8080/api/dataverses/peteDeleteTop?key=pete +curl -H"Content-type:application/json" -d @dv-peteDelete2.json http://localhost:8080/api/dataverses/peteDeleteTop?key=pete +curl -H"Content-type:application/json" -d @dv-peteDelete3.json http://localhost:8080/api/dataverses/peteDeleteTop?key=pete diff --git a/postgresql/testdata/scripts/issues/3543/test.sh b/postgresql/testdata/scripts/issues/3543/test.sh new file mode 100755 index 0000000..14061c7 --- /dev/null +++ b/postgresql/testdata/scripts/issues/3543/test.sh @@ -0,0 +1,6 @@ +#!/bin/bash + +curl -X DELETE http://localhost:8080/api/dataverses/peteDelete1?key=pete +curl -X DELETE http://localhost:8080/api/dataverses/peteDelete2?key=pete +curl -X DELETE http://localhost:8080/api/dataverses/peteDelete3?key=pete +curl -X DELETE http://localhost:8080/api/dataverses/peteDeleteTop?key=pete diff --git a/postgresql/testdata/scripts/issues/3544/delete.sh b/postgresql/testdata/scripts/issues/3544/delete.sh new file mode 100755 index 0000000..658d11d --- /dev/null +++ b/postgresql/testdata/scripts/issues/3544/delete.sh @@ -0,0 +1,8 @@ +#!/bin/bash + +# deleting an unreleased dataset, with a bunch of unreleased files. +# seems to be working like a charm - ? +# -- Leonid + +curl -X DELETE http://localhost:8080/api/datasets/43?key=pete + diff --git a/postgresql/testdata/scripts/issues/796/builtin2shib b/postgresql/testdata/scripts/issues/796/builtin2shib new file mode 100755 index 0000000..0cb6d3e --- /dev/null +++ b/postgresql/testdata/scripts/issues/796/builtin2shib @@ -0,0 +1,10 @@ +#!/bin/sh +echo args: $@ +if [ -z "$1" ]; then + echo "call with foo:bar:baz" +else + OUTPUT=`curl -s -X PUT -d "$@" http://localhost:8080/api/test/user/convert/builtin2shib` + echo $OUTPUT + echo + echo $OUTPUT | jq . +fi diff --git a/postgresql/testdata/scripts/issues/907/batchImportDv/version1.xml b/postgresql/testdata/scripts/issues/907/batchImportDv/version1.xml new file mode 100644 index 0000000..2965bfb --- /dev/null +++ b/postgresql/testdata/scripts/issues/907/batchImportDv/version1.xml @@ -0,0 +1,193 @@ + + + + + Black Professional Women, 1969 + hdl:1902.1/00012 + + + IQSS Test Dataverse Network + 2014-02-12 + + + 3 + Dwayne + + + 2 + Dwayne + Replaced by version 3 + 2014-02-13 + + + 1 + sonia, IQSSdvnAdmin + Initial version + Replaced by version 2 + 2013-09-20 + + Cynthia Fuchs Epstein, 1983, "Black Professional Women, 1969 ", http://hdl.handle.net/1902.1/00012 Murray Research Archive [Distributor] V1 [Version] + + + + + + + Black Professional Women, 1969 + hdl:1902.1/00012 + 00012 + + + Cynthia Fuchs Epstein + + + Cynthia Fuchs Epstein + + + Murray Research Archive + + + + 1983 + 1983 + + + 1 + + + + + Achievement + African American + Women + Professional + mra murraydiversity + 50 or fewer + female + mixed + African American + middle + 1 + yes + Women and education + Work + Women + + The purpose of this study was to explore the special conditions which enable African American professional women to create a self-image and achievement value system, the problems attendant to traditional female roles, and the reinforcing components of the work situation. The study was also conducted in order to test the findings of a similar study the researcher did with White women lawyers, also archived at the Murray Center (see Related Studies below).<br /> + <br /> In 1969, the researcher interviewed 35 African American women in the following professions: law, medicine, dentistry, university teaching, journalism, business, and social service administration. Nursing, social work, and teaching at other than the university level were excluded.<br /> + <br /> A structured, open-ended interview was employed. The questions included nature of work, clients, disadvantages of being a woman, disadvantages of being African American, relationships at work, professional associations, community organizations, education, demographics, family life, income, and dual-career conflict.<br /> + <br /> The Murray Archive holds additional analogue materials for this study: original record paper data for 35 women, including interviews and other descriptive materials. The Murray Archive also holds audiotaped interviews. If you would like to access these materials, please apply to use the data. + + 1969 + 1969 + United States + individuals + field study + + + + + quota sample + interview + + + + + Henry A. Murray Research Archive, Institute for Quantitative Social Sciences, Harvard University + Available + + + Submission of the following <a href= "http://www.murray.harvard.edu/application" target="_blank">Application For The Use Of Data</a> is required to access the data from this study. + I will use these data solely for the purposes stated in my application to use data, detailed in a written research proposal. I will honor all agreements and conditions made between the Contributor of the Data and the study participants, and between the Contributor of the Data and the Henry A. Murray Research Archive, Harvard University, as specified in the Memorandum of Agreement. + <a href="mailto:mra@help.hmdc.harvard.edu">Manager of Operations</a>, the Henry A. Murray Research Archive, Institute for Quantitative Social Sciences, 1737 Cambridge St, Cambridge, MA 02138, USA. + I will include a bibliographic citation acknowledging the use of these data in any publication or presentation in which these data are used. Such citations will appear in footnotes or in the reference section of any such manuscript. I understand the guideline in "How to Cite This Dataset" described in the Summary of this study. + Murray Research Archive will list my publication and manuscripts on the Archive website when I submit a bibliographic citation or title of the manuscript, and indicate the Henry A. Murray Research Archive data used. Doing this will also help Henry A. Murray Research Archive to provide funding agencies with essential information about use of archival resources, to fulfill requirements of some memoranda of agreement, and to promote the broader exchange of information about research activities. + The data are available without additional conditions other than those stated in the "Restrictions" Terms of Use above. + + <div style="padding-left: 30px;"> +<ul style="list-style-type: decimal;" ><li> + The Murray Archive (the Distributor) has granted me a revocable license to use this dataset solely for the purposes of conducting research, and the Distributor may terminate this license at any time and for any reason. +</li> +<li> + I will use the dataset solely for statistical analysis and reporting of aggregated information, and not for investigation of specific individuals or organizations, except when identification is authorized in writing by the Distributor. +</li> +<li>I will produce no links among the Distributor’s datasets or among the Distributor’s data and other datasets that could identify individuals or organizations. +</li> +<li>I represent that neither I, nor anyone I know, has any prior knowledge of the possible identities of any study participants in any dataset that I am being licensed to use. +</li> +<li> + I will not knowingly divulge any information that could be used to identify individual participants in the study, nor will I attempt to identify or contact any study participant, and I agree to use any precautions necessary to prevent such identification. +</li> +<li> + I will make no use of the identity of any person or establishment discovered inadvertently. If I suspect that I might recognize or know a study participant, I will immediately inform the Distributor, and I will not use or retain a copy of data regarding that study participant. If these measures to resolve an identity disclosure are not sufficient, the Distributor may terminate my use of the dataset. +</li> +<li> + I will not reproduce the dataset except as is necessary for my scholarly purposes. I will destroy the dataset upon the completion of my scholarly work with it. +</li> +<li> + I will not share data from the dataset (in any form or by any means) with any third party, including other members of my research team, as I understand that all users of data must obtain the data directly from the Distributor. +</li> +<li> + I will make appropriate acknowledgement of the contributor of the dataset as well as the Distributor in any manuscript or presentation (published or unpublished) using the citation standard documented here: <a href="http://thedata.org/citation"> http://thedata.org/citation</a> +</li> +<li> + THE DISTRIBUTOR MAKES NO WARRANTIES, EXPRESS OR IMPLIED, BY OPERATION OF LAW OR OTHERWISE, REGARDING OR RELATING TO THE DATASET. +</li> +</ul> +</div> + <b>IQSS Dataverse Network Terms and Conditions</b> + +<p>By downloading these Materials, I agree to the following:</p> + +<ol> +<li>I will not use the Materials to +<ol type="a"> +<li>obtain information that could directly or indirectly identify subjects.</li> +<li>produce links among the Distributor's datasets or among the Distributor's data and other datasets that could identify individuals or organizations.</li> +<li>obtain information about, or further contact with, subjects known to me except where the use and/or release of such identifying information has no potential for constituting an unwarranted invasion of privacy and/or breach of confidentiality.</li> +</ol> +</li> +<li>I agree not to download any Materials where prohibited by applicable law.</li> +<li>I agree not to use the Materials in any way prohibited by applicable law.</li> +<li>I agree that any books, articles, conference papers, theses, dissertations, reports, or other publications that I create which employ data reference the bibliographic citation accompanying this data. These citations include the data authors, data identifier, and other information accord with the Recommended Standard (<strong>http://thedata.org/citation/standard</strong>) for social science data.</li> +<li>THE DISTRIBUTOR MAKES NO WARRANTIES, EXPRESS OR IMPLIED, BY OPERATION OF LAW OR OTHERWISE, REGARDING OR RELATING TO THE DATASET</li> +</ol> + + + + Cynthia Fuchs Epstein, 1983, "Women in Law, 1965-1980", hdl:1902.1/00168 Murray Research Archive [Distributor]. <a href="http://dvn.iq.harvard.edu/dvn/dv/mra/faces/study/StudyPage.jsp?studyId=441" target= "_new">study available here</a> + + + + + 00012Epstein-Professional-Measures.pdf + Collection of blank measures used in the study + 1. Documentation + + + 00012Epstein-Professional-StudyDescription.pdf + Overview: abstract, research methodology, publications, and other info. + 1. Documentation + + + 00012Epstein-Professional-MeasuresForm.pdf + Usage guidelines relating to measures created by study investigators + 2. Supplemental Documentation + + + 00012Epstein-Professional-BoxCoverSheets.pdf + Describes contents of each box of a paper data set + 2. Supplemental Documentation + + + 00012Epstein-Professional-MemoOfAgreement.pdf + Legal agreement between data depositor and the Murray Archive + 3. Detailed Usage Terms + + + RM2562 Epstein + Box cover sheet for DVD's of .wavs from audio originals + 5.Other Resources + restricted + + \ No newline at end of file diff --git a/postgresql/testdata/scripts/issues/guestbook/insert-guestbook-responses.sh b/postgresql/testdata/scripts/issues/guestbook/insert-guestbook-responses.sh new file mode 100644 index 0000000..e1888db --- /dev/null +++ b/postgresql/testdata/scripts/issues/guestbook/insert-guestbook-responses.sh @@ -0,0 +1,5 @@ +-- select * from guestbookresponse; +-- 150K would be a better test, see https://github.com/IQSS/dataverse/issues/3609#issuecomment-322559209 +--for i in {0..2000}; do psql dataverse_db -f scripts/issues/3845/insert-guestbook-responses.sh; done +-- id | downloadtype | email | institution | name | position | responsetime | sessionid | authenticateduser_id | datafile_id | dataset_id | datasetversion_id | guestbook_id +insert into guestbookresponse values (default, 1, null, null, null, null, null, null, null, 104, 103, null, 2); diff --git a/postgresql/testdata/scripts/migration/HarvardCustomFields.csv b/postgresql/testdata/scripts/migration/HarvardCustomFields.csv new file mode 100644 index 0000000..3e8abe0 --- /dev/null +++ b/postgresql/testdata/scripts/migration/HarvardCustomFields.csv @@ -0,0 +1 @@ +,,Dataverse 4.0 Template Name,field_name,field_name Alliance for Research on Corporate Sustainability,1Wereanyofthesedatasetsapurchasedbobtainedthroughlicenseddatabasesorcprovidedbyanorganizationunderanondisclosureorotheragreement,ARCS1 Alliance for Research on Corporate Sustainability,2IfyourespondedYestoQ1haveyouensuredthatsharingthedatadoesnotviolatetermsoftheagreementIfyourespondedNotoQ1pleaseenterNAhere,ARCS2 Alliance for Research on Corporate Sustainability,3DoanyofthesedatasetsincludeindividualleveldataeithercollectedorpreexistinginthedatasetthatmightmakethemsubjecttoUSorinternationalhumansubjectsconsiderations,ARCS3 Alliance for Research on Corporate Sustainability,4IfyourespondedYestoQ3arethesedatasetstotallydeidentifiedorwassharingapprovedbyyourinstitutionalreviewboardIRBIfyourespondedNotoQ3pleaseenterNAhere,ARCS4 Alliance for Research on Corporate Sustainability,5DothesedatasetscontainsensitiveorpersonallyidentifiableprivateinformationHarvardResearchDataSecurityPolicywwwsecurityharvardeduresearchdatasecuritypolicymayapplybecausethisDataverseishostedbyHarvardUniversity,ARCS5 Multi-donor trust fund within the Human Development Network to carry out and support research evaluating the impact of programs to alleviate poverty.,1Wereanyofthesedatasetsapurchasedbobtainedthroughlicenseddatabasesorcprovidedbyanorganizationunderanondisclosureorotheragreement,ARCS1 Multi-donor trust fund within the Human Development Network to carry out and support research evaluating the impact of programs to alleviate poverty.,2IfyourespondedYestoQ1haveyouensuredthatsharingthedatadoesnotviolatetermsoftheagreementIfyourespondedNotoQ1pleaseenterNAhere,ARCS2 Multi-donor trust fund within the Human Development Network to carry out and support research evaluating the impact of programs to alleviate poverty.,3DoanyofthesedatasetsincludeindividualleveldataeithercollectedorpreexistinginthedatasetthatmightmakethemsubjecttoUSorinternationalhumansubjectsconsiderations,ARCS3 Multi-donor trust fund within the Human Development Network to carry out and support research evaluating the impact of programs to alleviate poverty.,4IfyourespondedYestoQ3arethesedatasetstotallydeidentifiedorwassharingapprovedbyyourinstitutionalreviewboardIRBIfyourespondedNotoQ3pleaseenterNAhere,ARCS4 Multi-donor trust fund within the Human Development Network to carry out and support research evaluating the impact of programs to alleviate poverty.,5DothesedatasetscontainsensitiveorpersonallyidentifiableprivateinformationHarvardResearchDataSecurityPolicywwwsecurityharvardeduresearchdatasecuritypolicymayapplybecausethisDataverseishostedbyHarvardUniversity,ARCS5 Project TIER,1Wereanyofthesedatasetsapurchasedbobtainedthroughlicenseddatabasesorcprovidedbyanorganizationunderanondisclosureorotheragreement,ARCS1 Project TIER,2IfyourespondedYestoQ1haveyouensuredthatsharingthedatadoesnotviolatetermsoftheagreementIfyourespondedNotoQ1pleaseenterNAhere,ARCS2 Project TIER,3DoanyofthesedatasetsincludeindividualleveldataeithercollectedorpreexistinginthedatasetthatmightmakethemsubjecttoUSorinternationalhumansubjectsconsiderations,ARCS3 Project TIER,4IfyourespondedYestoQ3arethesedatasetstotallydeidentifiedorwassharingapprovedbyyourinstitutionalreviewboardIRBIfyourespondedNotoQ3pleaseenterNAhere,ARCS4 Project TIER,5DothesedatasetscontainsensitiveorpersonallyidentifiableprivateinformationHarvardResearchDataSecurityPolicywwwsecurityharvardeduresearchdatasecuritypolicymayapplybecausethisDataverseishostedbyHarvardUniversity,ARCS5 GSD Studio Template,Accreditation,gsdAccreditation GSD Studio Template,City,city GSD Studio Template,CoreStudioCoordinator,gsdCoordinator GSD Studio Template,CountryNation,country GSD Studio Template,CourseName,gsdCourseName GSD Studio Template,DataUploadedBy,depositor GSD Studio Template,FacultyName,gsdFacultyName GSD Studio Template,FacultyRecommendation,gsdRecommendation GSD Studio Template,Notes,notesText GSD Studio Template,ProgramBrief,gsdProgramBrief GSD Studio Template,SemesterYear,gsdSemester GSD Studio Template,SiteType,gsdSiteType GSD Studio Template,StateProvince,state GSD Studio Template,StudentName,gsdStudentName GSD Studio Template,StudentNameFirstName,#IGNORE GSD Studio Template,StudentNameLastName,#IGNORE GSD Studio Template,StudentsProgramofStudy,gsdStudentProgram GSD Studio Template,Tags,gsdTags GSD Studio Template,TypesofRepresentationMediumFormat,gsdTypes GSD Studio Template Fall 2013 - OLD_OLD_OLD,City,city GSD Studio Template Fall 2013 - OLD_OLD_OLD,CoreStudioCoordinator,gsdCoordinator GSD Studio Template Fall 2013 - OLD_OLD_OLD,CountryNation,country GSD Studio Template Fall 2013 - OLD_OLD_OLD,CourseName,gsdCourseName GSD Studio Template Fall 2013 - OLD_OLD_OLD,DataUploadedBy,depositor GSD Studio Template Fall 2013 - OLD_OLD_OLD,FacultyName,gsdFacultyName GSD Studio Template Fall 2013 - OLD_OLD_OLD,FacultyRecommendation,gsdRecommendation GSD Studio Template Fall 2013 - OLD_OLD_OLD,Notes,notesText GSD Studio Template Fall 2013 - OLD_OLD_OLD,ProgramBrief,gsdProgramBrief GSD Studio Template Fall 2013 - OLD_OLD_OLD,SemesterYear,gsdSemester GSD Studio Template Fall 2013 - OLD_OLD_OLD,SiteType,gsdSiteType GSD Studio Template Fall 2013 - OLD_OLD_OLD,StateProvince,state GSD Studio Template Fall 2013 - OLD_OLD_OLD,StudentName,gsdStudentName GSD Studio Template Fall 2013 - OLD_OLD_OLD,StudentsProgramofStudy,gsdStudentProgram GSD Studio Template Fall 2013 - OLD_OLD_OLD,Tags,gsdTags GSD Studio Template Fall 2013 - OLD_OLD_OLD,TypesofRepresentationMediumFormat,gsdTypes GSD Research Fall 2013,City,city GSD Research Fall 2013,CoreStudioCoordinator,gsdCoordinator GSD Research Fall 2013,CountryNation,country GSD Research Fall 2013,CourseName,gsdCourseName GSD Research Fall 2013,DataUploadedBy,depositor GSD Research Fall 2013,FacultyName,gsdFacultyName GSD Research Fall 2013,FacultyRecommendation,gsdRecommendation GSD Research Fall 2013,Notes,notesText GSD Research Fall 2013,ProgramBrief,gsdProgramBrief GSD Research Fall 2013,SemesterYear,gsdSemester GSD Research Fall 2013,SiteType,gsdSiteType GSD Research Fall 2013,StateProvince,state GSD Research Fall 2013,StudentName,gsdStudentName GSD Research Fall 2013,StudentsProgramofStudy,gsdStudentProgram GSD Research Fall 2013,Tags,gsdTags GSD Research Fall 2013,TypesofRepresentationMediumFormat,gsdTypes GSD Studio Template Fall 2013-OLD_OLD,City,city GSD Studio Template Fall 2013-OLD_OLD,CoreStudioCoordinator,gsdCoordinator GSD Studio Template Fall 2013-OLD_OLD,CountryNation,country GSD Studio Template Fall 2013-OLD_OLD,CourseName,gsdCourseName GSD Studio Template Fall 2013-OLD_OLD,DataUploadedBy,depositor GSD Studio Template Fall 2013-OLD_OLD,FacultyName,gsdFacultyName GSD Studio Template Fall 2013-OLD_OLD,FacultyRecommendation,gsdRecommendation GSD Studio Template Fall 2013-OLD_OLD,Notes,notesText GSD Studio Template Fall 2013-OLD_OLD,ProgramBrief,gsdProgramBrief GSD Studio Template Fall 2013-OLD_OLD,SemesterYear,gsdSemester GSD Studio Template Fall 2013-OLD_OLD,SiteType,gsdSiteType GSD Studio Template Fall 2013-OLD_OLD,StateProvince,state GSD Studio Template Fall 2013-OLD_OLD,StudentName,gsdStudentName GSD Studio Template Fall 2013-OLD_OLD,StudentNameFirstName,#IGNORE GSD Studio Template Fall 2013-OLD_OLD,StudentNameLastName,#IGNORE GSD Studio Template Fall 2013-OLD_OLD,StudentsProgramofStudy,gsdStudentProgram GSD Studio Template Fall 2013-OLD_OLD,Tags,gsdTags GSD Studio Template Fall 2013-OLD_OLD,TypesofRepresentationMediumFormat,gsdTypes GSD Studio Template Fall 2013_OLD,City,city GSD Studio Template Fall 2013_OLD,CoreStudioCoordinator,gsdCoordinator GSD Studio Template Fall 2013_OLD,CountryNation,country GSD Studio Template Fall 2013_OLD,CourseName,gsdCourseName GSD Studio Template Fall 2013_OLD,DataUploadedBy,depositor GSD Studio Template Fall 2013_OLD,FacultyName,gsdFacultyName GSD Studio Template Fall 2013_OLD,FacultyRecommendation,gsdRecommendation GSD Studio Template Fall 2013_OLD,Notes,notesText GSD Studio Template Fall 2013_OLD,ProgramBrief,gsdProgramBrief GSD Studio Template Fall 2013_OLD,SemesterYear,gsdSemester GSD Studio Template Fall 2013_OLD,SiteType,gsdSiteType GSD Studio Template Fall 2013_OLD,StateProvince,state GSD Studio Template Fall 2013_OLD,StudentName,gsdStudentName GSD Studio Template Fall 2013_OLD,StudentNameFirstName,#IGNORE GSD Studio Template Fall 2013_OLD,StudentNameLastName,#IGNORE GSD Studio Template Fall 2013_OLD,StudentsProgramofStudy,gsdStudentProgram GSD Studio Template Fall 2013_OLD,Tags,gsdTags GSD Studio Template Fall 2013_OLD,TypesofRepresentationMediumFormat,gsdTypes Syllabus Template,City,city Syllabus Template,CoreStudioCoordinator,gsdCoordinator Syllabus Template,CountryNation,country Syllabus Template,CourseName,gsdCourseName Syllabus Template,DataUploadedBy,depositor Syllabus Template,FacultyName,gsdFacultyName Syllabus Template,FacultyRecommendation,gsdRecommendation Syllabus Template,Notes,notesText Syllabus Template,ProgramBrief,gsdProgramBrief Syllabus Template,SemesterYear,gsdSemester Syllabus Template,SiteType,gsdSiteType Syllabus Template,StateProvince,state Syllabus Template,StudentName,gsdStudentName Syllabus Template,StudentNameFirstName,#IGNORE Syllabus Template,StudentNameLastName,#IGNORE Syllabus Template,StudentsProgramofStudy,gsdStudentProgram Syllabus Template,Tags,gsdTags Syllabus Template,TypesofRepresentationMediumFormat,gsdTypes GSD Studio Template Fall 2013_OLD_Dec12,Accreditation,gsdAccreditation GSD Studio Template Fall 2013_OLD_Dec12,City,city GSD Studio Template Fall 2013_OLD_Dec12,CoreStudioCoordinator,gsdCoordinator GSD Studio Template Fall 2013_OLD_Dec12,CountryNation,country GSD Studio Template Fall 2013_OLD_Dec12,CourseName,gsdCourseName GSD Studio Template Fall 2013_OLD_Dec12,DataUploadedBy,depositor GSD Studio Template Fall 2013_OLD_Dec12,FacultyName,gsdFacultyName GSD Studio Template Fall 2013_OLD_Dec12,FacultyRecommendation,gsdRecommendation GSD Studio Template Fall 2013_OLD_Dec12,Notes,notesText GSD Studio Template Fall 2013_OLD_Dec12,ProgramBrief,gsdProgramBrief GSD Studio Template Fall 2013_OLD_Dec12,SemesterYear,gsdSemester GSD Studio Template Fall 2013_OLD_Dec12,SiteType,gsdSiteType GSD Studio Template Fall 2013_OLD_Dec12,StateProvince,state GSD Studio Template Fall 2013_OLD_Dec12,StudentName,gsdStudentName GSD Studio Template Fall 2013_OLD_Dec12,StudentNameFirstName,#IGNORE GSD Studio Template Fall 2013_OLD_Dec12,StudentNameLastName,#IGNORE GSD Studio Template Fall 2013_OLD_Dec12,StudentsProgramofStudy,gsdStudentProgram GSD Studio Template Fall 2013_OLD_Dec12,Tags,gsdTags GSD Studio Template Fall 2013_OLD_Dec12,TypesofRepresentationMediumFormat,gsdTypes GSD Studio Template Fall 2013,Accreditation,gsdAccreditation GSD Studio Template Fall 2013,City,city GSD Studio Template Fall 2013,CoreStudioCoordinator,gsdCoordinator GSD Studio Template Fall 2013,CountryNation,country GSD Studio Template Fall 2013,CourseName,gsdCourseName GSD Studio Template Fall 2013,DataUploadedBy,depositor GSD Studio Template Fall 2013,FacultyName,gsdFacultyName GSD Studio Template Fall 2013,FacultyRecommendation,gsdRecommendation GSD Studio Template Fall 2013,Notes,notesText GSD Studio Template Fall 2013,ProgramBrief,gsdProgramBrief GSD Studio Template Fall 2013,SemesterYear,gsdSemester GSD Studio Template Fall 2013,SiteType,gsdSiteType GSD Studio Template Fall 2013,StateProvince,state GSD Studio Template Fall 2013,StudentName,gsdStudentName GSD Studio Template Fall 2013,StudentNameFirstName,#IGNORE GSD Studio Template Fall 2013,StudentNameLastName,#IGNORE GSD Studio Template Fall 2013,StudentsProgramofStudy,gsdStudentProgram GSD Studio Template Fall 2013,Tags,gsdTags GSD Studio Template Fall 2013,TypesofRepresentationMediumFormat,gsdTypes GSSP Student Data,Accreditation,gsdAccreditation GSSP Student Data,City,city GSSP Student Data,CoreStudioCoordinator,gsdCoordinator GSSP Student Data,CountryNation,country GSSP Student Data,CourseName,gsdCourseName GSSP Student Data,DataUploadedBy,depositor GSSP Student Data,FacultyName,gsdFacultyName GSSP Student Data,FacultyRecommendation,gsdRecommendation GSSP Student Data,Notes,notesText GSSP Student Data,ProgramBrief,gsdProgramBrief GSSP Student Data,SemesterYear,gsdSemester GSSP Student Data,SiteType,gsdSiteType GSSP Student Data,StateProvince,state GSSP Student Data,StudentName,gsdStudentName GSSP Student Data,StudentNameFirstName,#IGNORE GSSP Student Data,StudentNameLastName,#IGNORE GSSP Student Data,StudentsProgramofStudy,gsdStudentProgram GSSP Student Data,Tags,gsdTags GSSP Student Data,TypesofRepresentationMediumFormat,gsdTypes Student Data,Accreditation,gsdAccreditation Student Data,City,city Student Data,CoreStudioCoordinator,gsdCoordinator Student Data,CountryNation,country Student Data,CourseName,gsdCourseName Student Data,DataUploadedBy,depositor Student Data,FacultyName,gsdFacultyName Student Data,FacultyRecommendation,gsdRecommendation Student Data,Notes,notesText Student Data,ProgramBrief,gsdProgramBrief Student Data,SemesterYear,gsdSemester Student Data,SiteType,gsdSiteType Student Data,StateProvince,state Student Data,StudentName,gsdStudentName Student Data,StudentNameFirstName,#IGNORE Student Data,StudentNameLastName,#IGNORE Student Data,StudentsProgramofStudy,gsdStudentProgram Student Data,Tags,gsdTags Student Data,TypesofRepresentationMediumFormat,gsdTypes Political Science Replication Initiative Draft Template,Aretheoriginaldatapubliclyavailable,PSRI1 Political Science Replication Initiative Draft Template,Didanotherstudentattempttoreplicatethereplicationintheclass,PSRI9 Political Science Replication Initiative Draft Template,Didanotherstudentreplicatethisreplicationsuccessfully,PSRI10 Political Science Replication Initiative Draft Template,Didaprofessorreadreviewadraftbeforethefinalversion,PSRI11 Political Science Replication Initiative Draft Template,DidyousendthereplicationmaterialstotheoriginalauthorsandnotifythemthatyoudbepostingyourreplicationonPSRI,PSRI7 Political Science Replication Initiative Draft Template,Istheoriginalcodeavailable,PSRI2 Political Science Replication Initiative Draft Template,WasthereplicationdoneinacourseIfsopleasecontinuetoanswerthesubsequentquestionsandifnotselectNA,PSRI8 Political Science Replication Initiative Draft Template,Wherearetheoriginaldataarchivednameandurl,PSRI3 Political Science Replication Initiative Draft Template,Whereistheoriginalcodepubliclyarchivednameandurl,PSRI4 Political Science Replication Initiative Draft Template,WillyousubmityourreplicationcodetothisDataverseThisisaPSRIrequirement,PSRI5 Political Science Replication Initiative Draft Template,WillyousubmityourreplicationwriteuptothisDataverseThisisaPSRIrequirement,PSRI6 PSRI dataverse,Aretheoriginaldatapubliclyavailable,PSRI1 PSRI dataverse,Didanotherstudentattempttoreplicatethereplication,PSRI9 PSRI dataverse,Didanotherstudentreplicatethisreplicationsuccessfully,PSRI10 PSRI dataverse,Didaprofessorreadreviewadraftbeforethefinalversion,PSRI11 PSRI dataverse,DidyousendthereplicationmaterialstotheoriginalauthorsandnotifythemthatyoudbepostingyourreplicationonPSRI,PSRI7 PSRI dataverse,Istheoriginalcodeavailable,PSRI2 PSRI dataverse,WasthereplicationdoneinacourseIfsopleasecontinuetoanswerthesubsequentquestionsandifnotselectNA,PSRI8 PSRI dataverse,Wherearetheoriginaldataarchivednameandurl,PSRI3 PSRI dataverse,Whereistheoriginalcodepubliclyarchivednameandurl,PSRI4 PSRI dataverse,WillyousubmityourreplicationcodetothisDataverseThisisaPSRIrequirement,PSRI5 PSRI dataverse,WillyousubmityourreplicationwriteuptothisDataverseThisisaPSRIrequirement,PSRI6 GSD 2013 2014,Accreditation,gsdAccreditation GSD 2013 2014,City,city GSD 2013 2014,CoreStudioCoordinator,gsdCoordinator GSD 2013 2014,CountryNation,country GSD 2013 2014,CourseName,gsdCourseName GSD 2013 2014,DataUploadedBy,depositor GSD 2013 2014,FacultyName,gsdFacultyName GSD 2013 2014,FacultyRecommendation,gsdRecommendation GSD 2013 2014,Notes,notesText GSD 2013 2014,ProgramBrief,gsdProgramBrief GSD 2013 2014,SemesterYear,gsdSemester GSD 2013 2014,SiteType,gsdSiteType GSD 2013 2014,StateProvince,state GSD 2013 2014,StudentName,gsdStudentName GSD 2013 2014,StudentNameFirstName,#IGNORE GSD 2013 2014,StudentNameLastName,#IGNORE GSD 2013 2014,StudentsProgramofStudy,gsdStudentProgram GSD 2013 2014,Tags,gsdTags GSD 2013 2014,TypesofRepresentationMediumFormat,gsdTypes Muthyamfirst,1Wereanyofthesedatasetsapurchasedbobtainedthroughlicenseddatabasesorcprovidedbyanorganizationunderanondisclosureorotheragreement,ARCS1 Muthyamfirst,2IfyourespondedYestoQ1haveyouensuredthatsharingthedatadoesnotviolatetermsoftheagreementIfyourespondedNotoQ1pleaseenterNAhere,ARCS2 Muthyamfirst,3DoanyofthesedatasetsincludeindividualleveldataeithercollectedorpreexistinginthedatasetthatmightmakethemsubjecttoUSorinternationalhumansubjectsconsiderations,ARCS3 Muthyamfirst,4IfyourespondedYestoQ3arethesedatasetstotallydeidentifiedorwassharingapprovedbyyourinstitutionalreviewboardIRBIfyourespondedNotoQ3pleaseenterNAhere,ARCS4 Muthyamfirst,5DothesedatasetscontainsensitiveorpersonallyidentifiableprivateinformationHarvardResearchDataSecurityPolicywwwsecurityharvardeduresearchdatasecuritypolicymayapplybecausethisDataverseishostedbyHarvardUniversity,ARCS5 PSRI Dataverse Template v2,Aretheoriginaldatapubliclyavailable,PSRI1 PSRI Dataverse Template v2,Didanotherstudentattempttoreplicatethereplicationintheclass,PSRI9 PSRI Dataverse Template v2,Didanotherstudentreplicatethisreplicationsuccessfully,PSRI10 PSRI Dataverse Template v2,Didaprofessorreadreviewadraftbeforethefinalversion,PSRI11 PSRI Dataverse Template v2,DidyousendthereplicationmaterialstotheoriginalauthorsandnotifythemthatyoudbepostingyourreplicationonPSRI,PSRI7 PSRI Dataverse Template v2,Istheoriginalcodeavailable,PSRI2 PSRI Dataverse Template v2,WasthereplicationdoneinacourseIfsopleasecontinuetoanswerthesubsequentquestionsandifnotselectNA,PSRI8 PSRI Dataverse Template v2,Wherearetheoriginaldataarchivednameandurl,PSRI3 PSRI Dataverse Template v2,Whereistheoriginalcodepubliclyarchivednameandurl,PSRI4 PSRI Dataverse Template v2,WillyousubmityourreplicationcodetothisDataverseThisisaPSRIrequirement,PSRI5 PSRI Dataverse Template v2,WillyousubmityourreplicationwriteuptothisDataverseThisisaPSRIrequirement,PSRI6 Local Monograph Template,Aretheoriginaldatapubliclyavailable,PSRI1 Local Monograph Template,Didanotherstudentattempttoreplicatethereplicationintheclass,PSRI9 Local Monograph Template,Didanotherstudentreplicatethisreplicationsuccessfully,PSRI10 Local Monograph Template,Didaprofessorreadreviewadraftbeforethefinalversion,PSRI11 Local Monograph Template,DidyousendthereplicationmaterialstotheoriginalauthorsandnotifythemthatyoudbepostingyourreplicationonPSRI,PSRI7 Local Monograph Template,Istheoriginalcodeavailable,PSRI2 Local Monograph Template,WasthereplicationdoneinacourseIfsopleasecontinuetoanswerthesubsequentquestionsandifnotselectNA,PSRI8 Local Monograph Template,Wherearetheoriginaldataarchivednameandurl,PSRI3 Local Monograph Template,Whereistheoriginalcodepubliclyarchivednameandurl,PSRI4 Local Monograph Template,WillyousubmityourreplicationcodetothisDataverseThisisaPSRIrequirement,PSRI5 Local Monograph Template,WillyousubmityourreplicationwriteuptothisDataverseThisisaPSRIrequirement,PSRI6 Journal of Human Rights (JHR),Aretheoriginaldatapubliclyavailable,PSRI1 Journal of Human Rights (JHR),Didanotherstudentattempttoreplicatethereplicationintheclass,PSRI9 Journal of Human Rights (JHR),Didanotherstudentreplicatethisreplicationsuccessfully,PSRI10 Journal of Human Rights (JHR),Didaprofessorreadreviewadraftbeforethefinalversion,PSRI11 Journal of Human Rights (JHR),DidyousendthereplicationmaterialstotheoriginalauthorsandnotifythemthatyoudbepostingyourreplicationonPSRI,PSRI7 Journal of Human Rights (JHR),Istheoriginalcodeavailable,PSRI2 Journal of Human Rights (JHR),WasthereplicationdoneinacourseIfsopleasecontinuetoanswerthesubsequentquestionsandifnotselectNA,PSRI8 Journal of Human Rights (JHR),Wherearetheoriginaldataarchivednameandurl,PSRI3 Journal of Human Rights (JHR),Whereistheoriginalcodepubliclyarchivednameandurl,PSRI4 Journal of Human Rights (JHR),WillyousubmityourreplicationcodetothisDataverseThisisaPSRIrequirement,PSRI5 Journal of Human Rights (JHR),WillyousubmityourreplicationwriteuptothisDataverseThisisaPSRIrequirement,PSRI6 GSD Fall 2014,Accreditation,gsdAccreditation GSD Fall 2014,City,city GSD Fall 2014,CoreStudioCoordinator,gsdCoordinator GSD Fall 2014,CountryNation,country GSD Fall 2014,CourseName,gsdCourseName GSD Fall 2014,DataUploadedBy,depositor GSD Fall 2014,FacultyName,gsdFacultyName GSD Fall 2014,FacultyRecommendation,gsdRecommendation GSD Fall 2014,Notes,notesText GSD Fall 2014,ProgramBrief,gsdProgramBrief GSD Fall 2014,SemesterYear,gsdSemester GSD Fall 2014,SiteType,gsdSiteType GSD Fall 2014,StateProvince,state GSD Fall 2014,StudentName,gsdStudentName GSD Fall 2014,StudentNameFirstName,#IGNORE GSD Fall 2014,StudentNameLastName,#IGNORE GSD Fall 2014,StudentsProgramofStudy,gsdStudentProgram GSD Fall 2014,Tags,gsdTags GSD Fall 2014,TypesofRepresentationMediumFormat,gsdTypes GSD_Fall2014_Platform,Accreditation,gsdAccreditation GSD_Fall2014_Platform,City,city GSD_Fall2014_Platform,CoreStudioCoordinator,gsdCoordinator GSD_Fall2014_Platform,CountryNation,country GSD_Fall2014_Platform,CourseName,gsdCourseName GSD_Fall2014_Platform,DataUploadedBy,depositor GSD_Fall2014_Platform,FacultyName,gsdFacultyName GSD_Fall2014_Platform,FacultyRecommendation,gsdRecommendation GSD_Fall2014_Platform,Notes,notesText GSD_Fall2014_Platform,ProgramBrief,gsdProgramBrief GSD_Fall2014_Platform,SemesterYear,gsdSemester GSD_Fall2014_Platform,SiteType,gsdSiteType GSD_Fall2014_Platform,StateProvince,state GSD_Fall2014_Platform,StudentName,gsdStudentName GSD_Fall2014_Platform,StudentNameFirstName,#IGNORE GSD_Fall2014_Platform,StudentNameLastName,#IGNORE GSD_Fall2014_Platform,StudentsProgramofStudy,gsdStudentProgram GSD_Fall2014_Platform,Tags,gsdTags GSD_Fall2014_Platform,TypesofRepresentationMediumFormat,gsdTypes Digaai Dataverse Jornais e Revistas,DatadePublicao,datadePublicao Digaai Dataverse Jornais e Revistas,LocaldePublicao,localdePublicao Digaai Dataverse Jornais e Revistas,Nmero,numero Digaai Dataverse Jornais e Revistas,Proprietrio,proprietrio Digaai Dataverse Jornais e Revistas,Ttulo,titulo FaceBrasil,DatadePublicao,datadePublicao FaceBrasil,LocaldePublicao,localdePublicao FaceBrasil,Nmero,numero FaceBrasil,Proprietrio,proprietrio FaceBrasil,Ttulo,titulo FaceBrasil 32,DatadePublicao,datadePublicao FaceBrasil 32,LocaldePublicao,localdePublicao FaceBrasil 32,Nmero,numero FaceBrasil 32,Proprietrio,proprietrio FaceBrasil 32,Ttulo,titulo CHIA World Historical Default Template,ClassificationSchema,classificationSchemaCHIA CHIA World Historical Default Template,Contributor,contributorName CHIA World Historical Default Template,DatesAdditionalInformation,datesAdditionalInformationCHIA CHIA World Historical Default Template,GeographicCoverageAdditionalInformation,otherGeographicCoverage CHIA World Historical Default Template,Language,language CHIA World Historical Default Template,Provenance,provenanceCHIA CHIA World Historical Default Template,RightsAvailability,rightsAvailabilityCHIA CHIA World Historical Default Template,Source,sourceCHIA CHIA World Historical Default Template,Variables,variablesCHIA Italian Political Science Review,Aretheoriginaldatapubliclyavailable,PSRI1 Italian Political Science Review,Didanotherstudentattempttoreplicatethereplicationintheclass,PSRI9 Italian Political Science Review,Didanotherstudentreplicatethisreplicationsuccessfully,PSRI10 Italian Political Science Review,Didaprofessorreadreviewadraftbeforethefinalversion,PSRI11 Italian Political Science Review,DidyousendthereplicationmaterialstotheoriginalauthorsandnotifythemthatyoudbepostingyourreplicationonPSRI,PSRI7 Italian Political Science Review,Istheoriginalcodeavailable,PSRI2 Italian Political Science Review,WasthereplicationdoneinacourseIfsopleasecontinuetoanswerthesubsequentquestionsandifnotselectNA,PSRI8 Italian Political Science Review,Wherearetheoriginaldataarchivednameandurl,PSRI3 Italian Political Science Review,Whereistheoriginalcodepubliclyarchivednameandurl,PSRI4 Italian Political Science Review,WillyousubmityourreplicationcodetothisDataverseThisisaPSRIrequirement,PSRI5 Italian Political Science Review,WillyousubmityourreplicationwriteuptothisDataverseThisisaPSRIrequirement,PSRI6 \ No newline at end of file diff --git a/postgresql/testdata/scripts/migration/HarvardPreMigrationDataScrub.sql b/postgresql/testdata/scripts/migration/HarvardPreMigrationDataScrub.sql new file mode 100644 index 0000000..af6b31b --- /dev/null +++ b/postgresql/testdata/scripts/migration/HarvardPreMigrationDataScrub.sql @@ -0,0 +1,32 @@ +select m.id, m.TimePeriodCoveredEnd, v.study_id from metadata m, studyversion v where v.study_id = 121855 and m.id = v.metadata_id and TimePeriodCoveredEnd = '[17820000]'; +select m.id, m.DistributionDate, v.study_id from metadata m, studyversion v where v.study_id = 117326 and m.id = v.metadata_id and DistributionDate = '2O14'; +select m.id, a.date, v.study_id from metadata m, studyversion v, studyabstract a where v.study_id=47799 and m.id=v.metadata_id and m.id=a.metadata_id and a.date='201-'; +select m.id, m.TimePeriodCoveredEnd, v.study_id from metadata m, studyversion v where v.study_id = 88283 and m.id = v.metadata_id and TimePeriodCoveredEnd = '198x'; +select m.id, m.TimePeriodCoveredStart, v.study_id from metadata m, studyversion v where v.study_id = 215 and m.id = v.metadata_id and TimePeriodCoveredStart = '70s'; --should return 3 records +select m.id, a.date, v.study_id from metadata m, studyversion v, studyabstract a where v.study_id=91709 and m.id=v.metadata_id and m.id=a.metadata_id and a.date='2-13'; --should return 3 records +select m.id, a.date, v.study_id from metadata m, studyversion v, studyabstract a where v.study_id=114372 and m.id=v.metadata_id and m.id=a.metadata_id and a.date='2-14'; +select m.id, m.DateOfCollectionStart, m.DateOfCollectionEnd, v.study_id from metadata m, studyversion v where v.study_id = 155 and m.id = v.metadata_id and DateOfCollectionStart = '2004-01-01 to 2004-12-31' and m.DateOfCollectionEnd = '' ; -- should return 10 records + +update metadata set TimePeriodCoveredEnd = '1782' from studyversion v where v.study_id = 121855 and metadata.id = v.metadata_id and TimePeriodCoveredEnd = '[17820000]'; +update metadata set DistributionDate = '2014' from studyversion v where v.study_id = 117326 and metadata.id = v.metadata_id and DistributionDate = '2O14'; +update studyabstract set date = '2010' from metadata m, studyversion v where v.study_id=47799 and m.id=v.metadata_id and m.id=studyabstract.metadata_id and studyabstract.date='201-'; +update metadata set TimePeriodCoveredEnd = '198?' from studyversion v where v.study_id = 88283 and metadata.id = v.metadata_id and TimePeriodCoveredEnd = '198x'; +update metadata set TimePeriodCoveredStart = '197?' from studyversion v where v.study_id = 215 and metadata.id = v.metadata_id and TimePeriodCoveredStart = '70s'; --should update 3 records +update studyabstract set date = '2014' from metadata m, studyversion v where v.study_id=114372 and m.id=v.metadata_id and m.id=studyabstract.metadata_id and studyabstract.date='2-14'; +update studyabstract set date = '2013' from metadata m, studyversion v where v.study_id=91709 and m.id=v.metadata_id and m.id=studyabstract.metadata_id and studyabstract.date='2-13'; --should update 3 records +update metadata set DateOfCollectionStart = '2004-01-01', DateOfCollectionEnd = '2004-12-31' from studyversion v where v.study_id = 155 and metadata.id = v.metadata_id and DateOfCollectionStart = '2004-01-01 to 2004-12-31' and DateOfCollectionEnd = ''; -- should update 10 records + + +update studyfieldvalue set strvalue='English' where metadata_id=273999 and studyfield_id=218 and strValue='English and Dutch'; +insert into studyfieldvalue (strvalue, metadata_id, studyfield_id, displayorder) values ('Dutch', 273999,218,1); + +--Added for datasets with multiple failues 3/30 +select m.id, m.TimePeriodCoveredStart, v.study_id from metadata m, studyversion v where v.study_id = 88283 and m.id = v.metadata_id and TimePeriodCoveredStart = '198x'; +select m.id, m.TimePeriodCoveredStart, v.study_id from metadata m, studyversion v where v.study_id = 121855 and m.id = v.metadata_id and TimePeriodCoveredStart = '[17820000]'; +select m.id, m.ProductionDate, v.study_id from metadata m, studyversion v where v.study_id = 121855 and m.id = v.metadata_id and ProductionDate = '[17820000]'; +select m.id, m.dateofdeposit, v.study_id from metadata m, studyversion v where v.study_id = 74738 and m.id = v.metadata_id and dateofdeposit = '\'; + +update metadata set TimePeriodCoveredStart = '198?' from studyversion v where v.study_id = 88283 and metadata.id = v.metadata_id and TimePeriodCoveredStart = '198x'; +update metadata set ProductionDate = '1782' from studyversion v where v.study_id = 121855 and metadata.id = v.metadata_id and ProductionDate = '[17820000]'; +update metadata set TimePeriodCoveredStart = '1782' from studyversion v where v.study_id = 121855 and metadata.id = v.metadata_id and TimePeriodCoveredStart = '[17820000]'; +update metadata set dateofdeposit = '' from studyversion v where v.study_id = 74738 and metadata.id = v.metadata_id and dateofdeposit = '\'; diff --git a/postgresql/testdata/scripts/migration/custom_field_map.sql b/postgresql/testdata/scripts/migration/custom_field_map.sql new file mode 100644 index 0000000..515c035 --- /dev/null +++ b/postgresql/testdata/scripts/migration/custom_field_map.sql @@ -0,0 +1,6 @@ + +delete from customfieldmap; + +COPY customfieldmap( sourcetemplate, sourcedatasetfield, targetdatasetfield) FROM '/scripts/migration/HarvardCustomFields.csv' DELIMITER ',' CSV HEADER; + + diff --git a/postgresql/testdata/scripts/migration/datafile_pub_date.sql b/postgresql/testdata/scripts/migration/datafile_pub_date.sql new file mode 100644 index 0000000..eb669a9 --- /dev/null +++ b/postgresql/testdata/scripts/migration/datafile_pub_date.sql @@ -0,0 +1,12 @@ +UPDATE dvobject +SET publicationdate = x.releasetime +FROM (SELECT f.id, f.filesystemname, min(v.releasetime) as releasetime +FROM datafile f, dvobject d, datasetversion v, filemetadata m +WHERE f.id = d.id +AND d.publicationdate IS null +AND m.datafile_id = f.id +AND m.datasetversion_id = v.id +AND v.versionstate = 'RELEASED' +-- AND (NOT f.filesystemname IS null AND NOT f.filesystemname LIKE 'http%') +GROUP BY f.id, f.filesystemname) x WHERE x.id = dvobject.id; + diff --git a/postgresql/testdata/scripts/migration/files_destination_step1_ b/postgresql/testdata/scripts/migration/files_destination_step1_ new file mode 100755 index 0000000..d8147f6 --- /dev/null +++ b/postgresql/testdata/scripts/migration/files_destination_step1_ @@ -0,0 +1,36 @@ +#!/usr/bin/perl + +use DBI; + +my $host = "localhost"; +my $username = "xxxxx"; +my $password = 'xxxxx'; +my $database = "xxxxx"; + +my $dbh = DBI->connect("DBI:Pg:dbname=$database;host=$host",$username,$password); +my $sth = $dbh->prepare(qq {SELECT d.protocol, d.authority, d.identifier, d.id, v.id, v.versionnumber FROM dataset d, datasetversion v WHERE v.dataset_id = d.id ORDER BY d.id, v.versionnumber}); +$sth->execute(); + +my $offset= 0; + +while ( @_ = $sth->fetchrow() ) +{ + $protocol = $_[0]; + $authority = $_[1]; + $identifier = $_[2]; + $id = $_[3]; + $vid = $_[4]; + $vnum = $_[5]; + + print $protocol . ":" . $authority . "/" . $identifier . "\t" . $id . "\t" . $vid . "\t" . $vnum . "\n"; + + $offset = $id; +} + +$sth->finish; +$dbh->disconnect; + +print STDERR "last ID in DVOBJECT table: " . $offset . "\n"; + +exit 0; + diff --git a/postgresql/testdata/scripts/migration/files_source_ b/postgresql/testdata/scripts/migration/files_source_ new file mode 100755 index 0000000..ad8b7ce --- /dev/null +++ b/postgresql/testdata/scripts/migration/files_source_ @@ -0,0 +1,624 @@ +#!/usr/bin/perl + +my $host = "localhost"; +my $username = "xxxxx"; +my $database = "xxxxx"; +my $password = "xxxxx"; + +my $dvobjectoffset = shift @ARGV; +my $filecatoffset = shift @ARGV; + +unless ($dvobjectoffset > 0) +{ + print STDERR "Usage: ./files_source_ \n"; + exit 1; +} + +unless ($filecatoffset) +{ + print STDERR "WARNING! file category offset is set to ZERO.\n"; +} + +my $filecatid = $filecatoffset; # file categories (this is a new object in 4.0, so there are no 3.6 IDs to reuse) + +use DBI; + +my $dbh = DBI->connect("DBI:Pg:dbname=$database;host=$host",$username,$password); + +open PL, ">packlist.txt"; + +%STUDYMAP = {}; +%STUDYFILEMAP = {}; +%VERSIONMAP = {}; + +while ( <> ) +{ + chop; + my ($globalid, $dsid, $dsvid, $dsvnum) = split("\t", $_); + $STUDYMAP{$globalid} = $dsid; + $VERSIONMAP{$globalid . "+++" . $dsvnum} = $dsid . "-" . $dsvid; + + %FILECATEGORIES = {}; # file categories for this dataset. + + if ($globalid =~/^([a-z]*):(.*)\/([^\/]*)$/) + { + $protocol = $1; + $authority = $2; + $identifier = $3; + +# print $protocol . " " . $authority . " " . $identifier . "\n"; + } + else + { + print STDERR "WARNING! illegal global id: " . $globalid . "\n"; + next; + } + + my $sth; + + $sth = $dbh->prepare(qq {SELECT s.id, v.id FROM study s, studyversion v WHERE v.study_id = s.id AND s.protocol = '$protocol' AND s.authority='$authority' AND s.studyid = '$identifier' AND v.versionnumber = $dsvnum}); + $sth->execute(); + + my $vercount = 0; + + my $sid; + my $svid; + + while ( @foo = $sth->fetchrow() ) + { + $sid = $foo[0]; + $svid = $foo[1]; + + $vercount++; + } + + $sth->finish; + + unless ($vercount == 1) + { + print STDERR "WARNING: invalid number of versions for study " . $globalid . ", with version number " . $dsvnum . " (" . $vercount . ")!\n"; + next; + } + + $sth = $dbh->prepare(qq {SELECT fm.label, fm.category, fm.description, sf.filetype, sf.filesystemlocation, sf.md5, sf.restricted, sf.originalfiletype, sf.unf, sf.id, sf.fileclass, fm.id FROM filemetadata fm, studyfile sf WHERE fm.studyfile_id = sf.id AND fm.studyversion_id = $svid}); + + $sth->execute(); + + my $newfile = 0; + + while ( @foo = $sth->fetchrow() ) + { + # new filemetadata fields: + $label = $foo[0]; + $description = $foo[2]; + $description =~s/\n/ /g; + $description = $dbh->quote($description); + # category: + $category = $foo[1]; + # new datafile fields: + $type = $foo[3]; + unless ($type =~m:/:) + { + $type = "application/octet-stream"; + } + $md5 = $foo[5]; + $restricted = $foo[6]; + + # "restricted" is a boolean: + + $restricted = 'TRUE' if $restricted; + $restricted = 'FALSE' unless $restricted; + + # location of the file, on the old filesystem: + $fslocation = $foo[4]; + $fslocation = "" unless $fslocation; + + # additional info for subsettable files: + # (will go into the new datatable) + $originalfiletype = $foo[7]; + $unf = $foo[8]; + # id of the existing studyfile: + $sfid = $foo[9]; + # "class" of the existing studyfile: + # (tabular, "other", etc.) + $fileclass = $foo[10]; + $fmid = $foo[11]; + + if ($label =~/[\\\/:\*\?\"\<\>\|;\#]/) + { + $preservedlabel = $label; + $label=~s/[\\\/:\*\?\"\<\>\|;\#]//g; + + print STDERR "LABEL REPLACED: (FILEMETA: " . $fmid . ", FILE: " . $sfid . ", STUDY: " . $sid . ", VERSION: " . $svid . ", GLOBALID: " . $globalid . ") OLD: \"" . $preservedlabel . "\", NEW: \"" . $label . "\"\n"; + } + + if ($label eq '') + { + $label = "UNKNOWN"; + } + + $label = $dbh->quote($label); + + + unless ($STUDYFILEMAP{$sfid}) + { + $newfile = 1; + # Certain things only need to be done once per file - + # namely, each file needs one dvobject and datafile each; + # same for the datatables and variables. + # Other things, like filemetadatas, need to be created one + # per version. + + $newdatafileid = ($dvobjectoffset+$sfid); + $STUDYFILEMAP{$sfid} = $newdatafileid; + + $fsname = $fslocation; + + if ($fslocation =~/^http/ ) + { + $fsize = 0; + $fmtime = &formatTimeStamp(time); + } + else + { + if ( -f $fslocation ) + { + @fstats = stat($fslocation); + $fsize = $fstats[7]; + $mtime = $fstats[9]; + + $fmtime = &formatTimeStamp($mtime); + $packlistentry = $fslocation; + $packlistentry =~s/.*\/DVN\/data\///; + print PL $packlistentry . "\n"; + } + else + { + print STDERR "WARNING: file " . $fslocation . " not found!\n"; + $fsize = 0; + $fmtime = &formatTimeStamp(time); + } + + $fsname =~s/^.*\///g; + } + + # dvobject: + + print qq {INSERT INTO dvobject (id, dtype, owner_id, createdate, modificationtime) VALUES ($newdatafileid, 'DataFile', $dsid, '$fmtime', '$fmtime');} . "\n"; + + # datafile object: + $fsname = $dbh->quote($fsname); + print qq {INSERT INTO datafile (id, contenttype, filesystemname, filesize, md5, restricted) VALUES ($newdatafileid, '$type', $fsname, $fsize, '$md5', $restricted);} . "\n"; + # Use the below line instead of the above if you are using 4.6 or above + # print qq {INSERT INTO datafile (id, contenttype, filesystemname, filesize, checksumtype, restricted,checksumvalue,rootdatafileid) VALUES ($newdatafileid, '$type', $fsname, $fsize, 'MD5', $restricted,'',-1);} . "\n"; + } + else + { + $newdatafileid = $STUDYFILEMAP{$sfid}; + $newfile = 0; + } + + # file metadata object: + print qq {INSERT INTO filemetadata (id, description, label, restricted, version, datasetversion_id, datafile_id) VALUES ($fmid, $description, $label, $restricted, 1, $dsvid, $newdatafileid);} . "\n"; + + # and the category, if exists: + + if ($category && $category ne "") + { + $category = $dbh->quote($category); + unless ($FILECATEGORIES{$category}) + { + # this is a new category (for this dataset), + # so it needs to be created: + + $filecatid++; + + print qq{INSERT INTO datafilecategory (id, name, dataset_id) VALUES ($filecatid, $category, $newdatafileid);} . "\n"; + + $FILECATEGORIES{$category} = $filecatid; + } + + my $fcid = $FILECATEGORIES{$category}; + print qq{INSERT INTO filemetadata_datafilecategory (filecategories_id, filemetadatas_id) VALUES ($fcid, $fmid);} . "\n"; + + } + + + # subsettable files: + # (again, this only needs to be done once per file!) + + + if ($newfile && ($fileclass eq "TabularDataFile")) + { + #print STDERR "this is a subsettable file.\n"; + + # NOTE: + # there's only one datatable per file - make sure to only run this once! + # (i.e., not for every version!) + + $sth1 = $dbh->prepare(qq {SELECT id, varquantity, casequantity, unf, recordspercase FROM datatable WHERE studyfile_id = $sfid}); + + $sth1->execute(); + + $count = 0; + + while ( @dt = $sth1->fetchrow() ) + { + $dtid = $dt[0]; + $varquantity = $dt[1]; + $casequantity = $dt[2]; + $dtunf = $dt[3]; + $recordspercase = $dt[4]; + + $count++; + + unless ($unf eq $dtunf) + { + print STDERR "WARNING: unf mismatch, between studyfile and datatable: " + $unf + ":" + $dtunf + "\n"; + } + + # datatable object: + + + if ($recordspercase) + { + print qq {INSERT INTO datatable (id, varquantity, casequantity, unf, originalfileformat, recordspercase, datafile_id) VALUES ($dtid, $varquantity, $casequantity, '$unf', '$originalfiletype', $recordspercase, $newdatafileid);} . "\n"; + } + else + { + print qq {INSERT INTO datatable (id, varquantity, casequantity, unf, originalfileformat, datafile_id) VALUES ($dtid, $varquantity, $casequantity, '$unf', '$originalfiletype', $newdatafileid);} . "\n"; + } + } + + $sth1->finish; + + unless ($count == 1) + { + print STDERR "WARNING: invalid numbe of datatables: " + $count +".\n"; + } + else + { + # variables: + $sth1 = $dbh->prepare(qq {SELECT name, label, variableformattype_id, variableintervaltype_id, formatcategory, formatschema, formatschemaname, unf, fileorder, weighted, orderedfactor, numberofdecimalpoints, universe, filestartposition, fileendposition, recordsegmentnumber, id FROM datavariable WHERE datatable_id = $dtid}); + + + $sth1->execute(); + + while ( @dv = $sth1->fetchrow() ) + { + $varname = $dv[0]; + $varname = $dbh->quote($varname); + $varlabel = $dv[1]; + $varlabel = $dbh->quote($varlabel); + $variableformattype_id = $dv[2]; + # the old school formattype_id and + # intervaltype_id need to be adjusted by 1, + # to match the new enum values used in the + # 4.0 datavariables: + $variableformattype_id--; + $variableintervaltype_id = $dv[3]; + $variableintervaltype_id--; + $varformatcategory = $dv[4]; + $varformatschema = $dv[5]; + $varformatschemaname = $dv[6]; + $varunf = $dv[7]; + $varfileorder = $dv[8]; + $varweighted = $dv[9]; + if ($varweighted) + { + $varweighted = "TRUE"; + } + else + { + $varweighted = "FALSE"; + } + $varorderedfactor = $dv[10]; + if ($varorderedfactor) + { + $varorderedfactor = "TRUE"; + } + else + { + $varorderedfactor = "FALSE"; + } + + $varnumberofdecimalpoints = $dv[11]; + $varuniverse = $dv[12]; + $varfilestartposition = $dv[13]; + $varfileendposition = $dv[14]; + $varrecordsegmentnumber = $dv[15]; + $varid = $dv[16]; + + + + # new datavariable object: + + $newdvfields = "id, name, label, interval, type, unf, fileorder, orderedfactor, weighted, datatable_id"; + $newdvvalues = qq {$varid, $varname, $varlabel, $variableintervaltype_id, $variableformattype_id, '$varunf', $varfileorder, $varorderedfactor, $varweighted, $dtid}; + + if ($varformatschemaname) + { + # becomes "format": + $newdvfields = $newdvfields . ", format"; + $newdvvalues = qq{$newdvvalues, '$varformatschemaname'}; + } + + if ($varformatcategory) + { + $newdvfields = $newdvfields . ", formatcategory"; + $newdvvalues = qq{$newdvvalues, '$varformatcategory'}; + } + + if ($varfilestartposition) + { + $newdvfields = $newdvfields . ", filestartposition"; + $newdvvalues = qq{$newdvvalues, $varfilestartposition}; + } + + if ($varfileendposition) + { + $newdvfields = $newdvfields . ", fileendposition"; + $newdvvalues = qq{$newdvvalues, $varfileendposition}; + } + + if ($varrecordsegmentnumber) + { + $newdvfields = $newdvfields . ", recordsegmentnumber"; + $newdvvalues = qq{$newdvvalues, $varrecordsegmentnumber}; + } + + if ($varuniverse) + { + $newdvfields = $newdvfields . ", universe"; + $newdvvalues = qq{$newdvvalues, '$varuniverse'}; + } + + if ($varnumberofdecimalpoints) + { + $newdvfields = $newdvfields . ", numberofdecimalpoints"; + $newdvvalues = qq{$newdvvalues, $numberofdecimalpoints}; + } + + + print qq {INSERT INTO datavariable ($newdvfields) VALUES ($newdvvalues);} . "\n"; + + # variable categories: + $sth2 = $dbh->prepare(qq {SELECT id, label, value, missing, catorder, frequency FROM variablecategory WHERE datavariable_id = $varid}); + $sth2->execute(); + + while ( @vc = $sth2->fetchrow() ) + { + $varcatid = $vc[0]; + $varcatlabel = $vc[1]; + $varcatvalue = $vc[2]; + $varcatmissing = $vc[3]; + if ($varcatmissing) + { + $varcatmissing = "true"; + } + else + { + $varcatmissing = "false"; + } + $varcatorder = $vc[4]; + unless ($varcatorder) + { + if ($varcatorder eq "" || $varcatorder != 0) + { + $varcatorder = "null"; + } + } + $varcatfreq = $vc[5]; + unless ($varcatfreq) + { + if ($varcatfreq eq "" || $varcatfreq != 0) + { + $varcatfreq = "null"; + } + } + + + # only migrate the *real* categories: + if ($varcatlabel) + { + $varcatlabel = $dbh->quote($varcatlabel); + unless ($varcatvalue || ($varcatvalue eq "") || ($varcatvalue == 0)) + { + print STDERR qq {INSERT INTO variablecategory (id, label, value, missing, catorder, frequency, datavariable_id) VALUES ($varcatid, $varcatlabel, $varcatvalue, $varcatmissing, $varcatorder, $varcatfreq, $varid);} . "\n"; + } + else + { + $varcatvalue = $dbh->quote($varcatvalue); + print qq {INSERT INTO variablecategory (id, label, value, missing, catorder, frequency, datavariable_id) VALUES ($varcatid, $varcatlabel, $varcatvalue, $varcatmissing, $varcatorder, $varcatfreq, $varid);} . "\n"; + } + } + else + { + #print STDERR "empty var cat label.\n"; + } + } + + $sth2->finish; + } + + $sth1->finish; + } + + + + } + } + + $sth->finish; + +} + +# Now, the guestbooks/download activity etc.: + +# guest books from the old "studyfile activity" entries: + +$sth = $dbh->prepare(qq {SELECT a.downloadcount, a.lastdownloadtime, a.studyfile_id, s.authority, s.studyid, s.protocol FROM studyfileactivity a, study s WHERE a.study_id = s.id AND a.downloadcount > 0}); +$sth->execute(); + +$id = 1000; + +while ( @foo = $sth->fetchrow() ) +{ + my $acount = $foo[0]; + my $adownloadtime = $foo[1]; + $adownloadtime = "TIMESTAMP " . $dbh->quote($adownloadtime) if $adownloadtime; + $adownloadtime = "NULL" unless $adownloadtime; + + my $astudyfile_id = $foo[2]; + my $sauthority = $foo[3]; + my $sidentifier = $foo[4]; + my $sprotocol = $foo[5]; + + next unless $astudyfile_id; + $astudyfile_id += $dvobjectoffset; + + my $globalid = $sprotocol . ":" . $sauthority . "/" . $sidentifier; + + unless ($STUDYMAP{$globalid}) + { + next; + } + + ($dataset_id) = $STUDYMAP{$globalid}; + + for ($i = 0; $i < $acount; $i++) + { + $id++; + + if ($i == $acount - 1) + { + $downloadtime = $adownloadtime; + } + else + { + $downloadtime = "NULL"; + } + + print qq {INSERT INTO guestbookresponse (id, email, name, institution, position, responsetime, guestbook_id, datafile_id, authenticateduser_id, downloadtype, sessionid, dataset_id, datasetversion_id) VALUES ($id, NULL, 'unknown', NULL, NULL, $downloadtime, 1, $astudyfile_id, NULL, 'download', NULL, $dataset_id, NULL);} . "\n"; + } + +} + +$guestbook_response_id_offset = $id; + +# Migrating guestbooks: + +$sth = $dbh->prepare(qq {SELECT id, emailrequired, enabled, firstnamerequired, lastnamerequired, institutionrequired, positionrequired, vdc_id FROM guestbookquestionnaire}); +$sth->execute(); + +while ( @foo = $sth->fetchrow() ) +{ + my $gid = $foo[0]; + + if ($gid == 1) + { + # print STDERR "found guestbook with id=1!\n"; + # This is the default guestbook; we don't need to migrate it, as the + # new Dataverse 4.0 will have its own default guestbook. + next; + } + + my $gemailrequired = $foo[1] ? "TRUE" : "FALSE"; + my $genabled = $foo[2] ? "TRUE" : "FALSE"; + + my $gnamerequired = ($foo[3] || $foo[4]) ? "TRUE" : "FALSE"; + + my $ginstitutionrequired = $foo[5] ? "TRUE" : "FALSE"; + my $gpositionrequired = $foo[6] ? "TRUE" : "FALSE"; + + my $gdataverse_id = $foo[7] + 9; + + print qq {INSERT INTO guestbook (id, createtime, emailrequired, enabled, institutionrequired, name, namerequired, positionrequired, dataverse_id) VALUES ($gid, TIMESTAMP '1970-01-01 00:00:00', $gemailrequired, $genabled, $ginstitutionrequired, '', $gnamerequired, $gpositionrequired, $gdataverse_id);} . "\n"; + +} + +# Finally, migrating guestbook responses: + +$sth = $dbh->prepare(qq {SELECT r.id, r.email, r.firstname, r.institution, r.lastname, r.position, r.responsetime, r.guestbookquestionnaire_id, r.studyfile_id, r.vdcuser_id, r.downloadtype, r.sessionid, s.authority, s.studyid, s.protocol, v.versionnumber FROM guestbookresponse r, studyversion v, study s WHERE r.study_id = s.id AND r.studyversion_id = v.id}); +$sth->execute(); + + +while ( @foo = $sth->fetchrow() ) +{ + my $rid = $foo[0]; + + $rid += $guestbook_response_id_offset; + + my $remail = $dbh->quote($foo[1]); + my $rfirstname = $foo[2]; + my $rinstitution = $dbh->quote($foo[3]); + my $rlastname = $foo[4]; + my $rposition = $dbh->quote($foo[5]); + my $rresponsetime = $foo[6]; + $rresponsetime = "TIMESTAMP " . $dbh->quote($rresponsetime) if $rresponsetime; + $rresponsetime = "NULL" unless $rresponsetime; + my $rgbqid = $foo[7]; + my $rstudyfileid = $foo[8]; + $rstudyfileid+=$dvobjectoffset; + my $rvdcuserid = $foo[9] ? $foo[9] : "NULL"; + my $rdownloadtype = $dbh->quote($foo[10]); + my $rsessionid = $dbh->quote($foo[11]); + + my $sauthority = $foo[12]; + my $sidentifier = $foo[13]; + my $sprotocol = $foo[14]; + my $vversionnumber = $foo[15]; + + + my $globalid = $sprotocol . ":" . $sauthority . "/" . $sidentifier; + + unless ($VERSIONMAP{$globalid . "+++" . $vversionnumber}) + { + print STDERR "WARNING: No entry for " . $globalid . "+++" . $vversionnumber. "!\n"; + next; + } + + ($dataset_id, $datasetversion_id) = split ("\-", $VERSIONMAP{$globalid . "+++" . $vversionnumber}); + + unless ($dataset_id > 0 && $datasetversion_id > 0) + { + print STDERR "Invalid entry for " . $globalid . "+++" . $vversionnumber. ": " . $VERSIONMAP{$globalid . "+++" . $vversionnumber} . "!\n"; + next; + } + + my $name = ""; + $name = $rfirstname . " " if $rfirstname; + $name .= $rlastname if $rlastname; + $name = $dbh->quote($name) if $name; + $name = "NULL" unless $name; + + print qq {INSERT INTO guestbookresponse (id, email, name, institution, position, responsetime, guestbook_id, datafile_id, authenticateduser_id, downloadtype, sessionid, dataset_id, datasetversion_id) VALUES ($rid, $remail, $name, $rinstitution, $rposition, $rresponsetime, $rgbqid, $rstudyfileid, $rvdcuserid, $rdownloadtype, $rsessionid, $dataset_id, $datasetversion_id);} . "\n"; + +} + + + +$dbh->disconnect; + +close PL; + +exit 0; + +sub formatTimeStamp () { + my ($mtime) = (@_); + my ($sec,$min,$hour,$mday,$mon,$year,$wday,$yday,$isdst) = localtime($mtime); + + $year+=1900; + $mon++; + + $fmt = $year . "-" . sprintf("%02d",$mon) . "-" . sprintf("%02d",$mday) . " " . + sprintf("%02d", $hour) . ":" . sprintf("%02d",$min) . ":" . sprintf("%02d",$sec); + + return $fmt; +} + + + + + + diff --git a/postgresql/testdata/scripts/migration/migrate_datasets.sql b/postgresql/testdata/scripts/migration/migrate_datasets.sql new file mode 100644 index 0000000..e5b265e --- /dev/null +++ b/postgresql/testdata/scripts/migration/migrate_datasets.sql @@ -0,0 +1,74 @@ +--copy studyversion fields to datasetversion +update datasetversion + set createtime = sv.createtime, + lastupdatetime = sv.lastupdatetime, + archivetime= sv.archivetime, + archivenote = sv.archivenote, + deaccessionlink = sv.deaccessionlink, + versionnote = sv.versionnote +from _dvn3_studyversion sv, dataset d, _dvn3_study s +where d.authority = s.authority +and d.protocol = s.protocol +and d.identifier = s.studyid +and datasetversion.dataset_id = d.id +and datasetversion.versionnumber = sv.versionnumber +and sv.study_id = s.id; + +-- set dataset.publication date to the releasetime of the earliest released studyversion +update dvobject +set publicationdate = m.releasetime +from (select dvobject.id, sv.study_id, min(sv.releasetime) as releasetime +from _dvn3_studyversion sv, dataset d, _dvn3_study s, dvobject +where d.authority = s.authority +and d.protocol = s.protocol +and d.identifier = s.studyid +and dvobject.id = d.id +and sv.study_id = s.id +and sv.versionstate!='DRAFT' group by sv.study_id, dvobject.id) m where m.id = dvobject.id; + +-- set dvobject creator_id for each dataset to study.creator_id +update dvobject +set creator_id = s.creator_id, createdate = s.createtime +from _dvn3_study s, dataset d +where d.authority = s.authority +and d.protocol = s.protocol +and d.identifier = s.studyid +and dvobject.id = d.id; + +-- migrate data from _dvn3_versioncontributor to datasetversionuser +insert into datasetversionuser ( lastupdatedate, authenticateduser_id, datasetversion_id ) ( +select vc.lastupdatetime, vc.contributor_id, dv.id +from _dvn3_versioncontributor vc, +_dvn3_studyversion sv, +_dvn3_study s, +dataset d, +datasetversion dv, +authenticateduser au +where vc.studyversion_id = sv.id +and sv.study_id = s.id +and d.authority = s.authority +and d.protocol = s.protocol +and d.identifier = s.studyid +and dv.dataset_id = d.id +and dv.versionnumber = sv.versionnumber +and au.id = vc.contributor_id); + +-- modify versionstate for older versions of deaccessioned studies +update datasetversion +set versionstate = 'DEACCESSIONED' +where id in ( +select dv1.id from datasetversion dv1, datasetversion dv2 +where dv1.dataset_id = dv2.dataset_id +and dv1.versionnumber < dv2.versionnumber +and dv2.versionstate = 'DEACCESSIONED'); + +-- update the globalidcreatetime to be equal to the createdate, +-- as it should have been registered when the draft was created in 3.6 +update dataset set globalidcreatetime = createdate +from dvobject dvo +where dataset.id = dvo.id; + +-- set the license for all versions to be NONE by default +-- TODO: once create commands are done, this can be done in the code. +update termsofuseandaccess set license = 'NONE'; + diff --git a/postgresql/testdata/scripts/migration/migrate_dataverses.sql b/postgresql/testdata/scripts/migration/migrate_dataverses.sql new file mode 100644 index 0000000..cc415d9 --- /dev/null +++ b/postgresql/testdata/scripts/migration/migrate_dataverses.sql @@ -0,0 +1,47 @@ +---------------------- +-- subnetworks +----------------------- + +insert into dvobject ( id, owner_id, dtype, createdate, publicationdate, modificationtime, permissionmodificationtime, creator_id, releaseuser_id) + select id, 1, 'Dataverse', networkcreated, networkcreated, now(), now(), creator_id, creator_id + from _dvn3_vdcnetwork; + +insert into dataverse ( id, affiliation, alias, dataversetype, description, name, defaultcontributorrole_id, + facetroot, metadatablockroot, templateroot, guestbookroot, permissionroot, themeroot ) + select vdcn.id, affiliation, urlalias, 'UNCATEGORIZED', announcements, vdcn.name, dr.id, + false, false, false, false, true, true + from _dvn3_vdcnetwork vdcn, dataverserole dr + where dr.alias = 'editor'; + +-- subnetworks use the same contact e-mails as the Dataverse 4.0 root +insert into dataversecontact ( contactemail, displayorder, dataverse_id) + select dc.contactemail, dc.displayorder, _dvn3_vdcnetwork.id from dataversecontact dc, _dvn3_vdcnetwork + where dc.dataverse_id=1; + +----------------------- +-- dataverses +----------------------- + + +insert into dvobject ( id, owner_id, dtype, createdate, publicationdate, modificationtime, permissionmodificationtime, creator_id, releaseuser_id) + select id, vdcnetwork_id + 1, 'Dataverse', createddate, releasedate, now(), now(), creator_id, creator_id + from _dvn3_vdc; + +insert into dataverse ( id, affiliation, alias, dataversetype, description, name, defaultcontributorrole_id, + facetroot, metadatablockroot, templateroot, guestbookroot, permissionroot, themeroot ) + select vdc.id, affiliation, vdc.alias, 'UNCATEGORIZED', announcements, vdc.name, dr.id, + false, false, false, false, true, true + from _dvn3_vdc vdc, dataverserole dr + where dr.alias = 'editor'; + +-- this query splits the contact e-mail by , and trims both sides +insert into dataversecontact ( contactemail, displayorder, dataverse_id) + select trim(unnest(string_to_array(contactemail, ','))), 0, id from _dvn3_vdc; + + + +----------------------- +-- reset sequences +----------------------- + +SELECT setval('dvobject_id_seq', (SELECT MAX(id) FROM dvobject)); \ No newline at end of file diff --git a/postgresql/testdata/scripts/migration/migrate_links.sql b/postgresql/testdata/scripts/migration/migrate_links.sql new file mode 100644 index 0000000..9685b81 --- /dev/null +++ b/postgresql/testdata/scripts/migration/migrate_links.sql @@ -0,0 +1,44 @@ + +-- links to datasets +insert into datasetlinkingdataverse (linkingdataverse_id, dataset_id, linkcreatetime) +select c.owner_id, ds.id, now() +from _dvn3_coll_studies link, _dvn3_vdccollection c, _dvn3_study s, dataset ds +where link.vdc_collection_id=c.id +and link.study_id=s.id +and s.owner_id != c.owner_id --don't include if already part of this dataverse +and ds.authority = s.authority +and ds.protocol = s.protocol +and ds.identifier = s.studyid; + + +-- links to root collections (now linked to dataverses) +insert into dataverselinkingdataverse (linkingdataverse_id, dataverse_id, linkcreatetime) +select vdc_id, owner_id, now() +from _dvn3_vdc_linked_collections link, _dvn3_vdccollection c +where link.linked_collection_id=c.id +and c.parentcollection_id is null; + +-- links to other, static collections (now linked to just the studies from them) +insert into datasetlinkingdataverse (linkingdataverse_id, dataset_id, linkcreatetime) +select vdc_id, ds.id, now() +from _dvn3_vdc_linked_collections link, _dvn3_coll_studies contents, _dvn3_vdccollection c, _dvn3_study s, dataset ds +where link.linked_collection_id=c.id +and c.parentcollection_id is not null +and c.type='static' +and c.id = contents.vdc_collection_id +and contents.study_id=s.id +and s.owner_id != vdc_id -- don't include if already part of this dataverse +and ds.authority = s.authority +and ds.protocol = s.protocol +and ds.identifier = s.studyid; + + + +----------------------- +-- reset sequences +----------------------- + +SELECT setval('datasetlinkingdataverse_id_seq', (SELECT MAX(id) FROM datasetlinkingdataverse)); +SELECT setval('dataverselinkingdataverse_id_seq', (SELECT MAX(id) FROM dataverselinkingdataverse)); + + diff --git a/postgresql/testdata/scripts/migration/migrate_passwords.sql b/postgresql/testdata/scripts/migration/migrate_passwords.sql new file mode 100644 index 0000000..ec2176e --- /dev/null +++ b/postgresql/testdata/scripts/migration/migrate_passwords.sql @@ -0,0 +1,5 @@ +update builtinuser +set passwordencryptionversion = 0, +encryptedpassword= _dvn3_vdcuser.encryptedpassword +from _dvn3_vdcuser +where _dvn3_vdcuser.username=builtinuser.username; diff --git a/postgresql/testdata/scripts/migration/migrate_permissions.sql b/postgresql/testdata/scripts/migration/migrate_permissions.sql new file mode 100644 index 0000000..4a98d87 --- /dev/null +++ b/postgresql/testdata/scripts/migration/migrate_permissions.sql @@ -0,0 +1,122 @@ +-- reference queries for duplicate roles in vdc_role +-- (created when user accounts where merged) +-- NOTE: may need to run multiple times + +/* +select * from _dvn3_vdcrole +where vdcuser_id || '|' || vdc_id || '|' || role_id in +( +select vdcuser_id || '|' || vdc_id || '|' || role_id from _dvn3_vdcrole +group by vdcuser_id, vdc_id, role_id +having count(*) > 1 +) +order by vdcuser_id, vdc_id, role_id + + +delete from _dvn3_vdcrole where id in +( +select max(id) from _dvn3_vdcrole +group by vdcuser_id, vdc_id, role_id +having count(*) >1 +order by max(id) +) +*/ + +----------------------- +-- dataverses role assignments +----------------------- + +-- admin (from the vdcnetwork creator) +insert into roleassignment ( assigneeidentifier, definitionpoint_id, role_id) + select '@'|| useridentifier, vdcn.id, dr.id + from _dvn3_vdcnetwork vdcn, authenticateduser, dataverserole dr + where vdcn.creator_id = authenticateduser.id + and dr.alias='admin'; + +-- admin +insert into roleassignment ( assigneeidentifier, definitionpoint_id, role_id) + select '@'|| useridentifier, vdc_id, dr.id + from _dvn3_vdcrole, authenticateduser, dataverserole dr + where _dvn3_vdcrole.vdcuser_id = authenticateduser.id + and role_id=3 and dr.alias='admin'; +-- curator +insert into roleassignment ( assigneeidentifier, definitionpoint_id, role_id) + select '@'|| useridentifier, vdc_id, dr.id + from _dvn3_vdcrole, authenticateduser, dataverserole dr + where _dvn3_vdcrole.vdcuser_id = authenticateduser.id + and role_id=2 and dr.alias='curator'; +-- contributor +insert into roleassignment ( assigneeidentifier, definitionpoint_id, role_id) + select '@'|| useridentifier, vdc_id, dr.id + from _dvn3_vdcrole, authenticateduser, dataverserole dr + where _dvn3_vdcrole.vdcuser_id = authenticateduser.id + and role_id=1 and dr.alias='dsContributor'; +-- member +insert into roleassignment ( assigneeidentifier, definitionpoint_id, role_id) + select '@'|| useridentifier, vdc_id, dr.id + from _dvn3_vdcrole, authenticateduser, dataverserole dr + where _dvn3_vdcrole.vdcuser_id = authenticateduser.id + and role_id=4 and dr.alias='member'; + +-- groups (as members) +insert into roleassignment ( assigneeidentifier, definitionpoint_id, role_id) + select '&'|| groupalias, vdcs_id, dr.id + from _dvn3_vdc_usergroup, explicitgroup, dataverserole dr + where _dvn3_vdc_usergroup.allowedgroups_id = explicitgroup.id + and dr.alias='member'; + +----------------------- +-- dataset role assignments +----------------------- + +-- contributor (from the study creator) +insert into roleassignment ( assigneeidentifier, definitionpoint_id, role_id) + select '@'|| useridentifier, ds.id, dr.id + from _dvn3_study s, authenticateduser, dataverserole dr, dataset ds + where s.creator_id = authenticateduser.id + and ds.authority = s.authority + and ds.protocol = s.protocol + and ds.identifier = s.studyid + and dr.alias='editor'; + +-- member +insert into roleassignment ( assigneeidentifier, definitionpoint_id, role_id) + select '@'|| useridentifier, ds.id, dr.id + from _dvn3_study_vdcuser, _dvn3_study s, authenticateduser, dataverserole dr, dataset ds + where _dvn3_study_vdcuser.allowedusers_id = authenticateduser.id + and _dvn3_study_vdcuser.studies_id = s.id + and ds.authority = s.authority + and ds.protocol = s.protocol + and ds.identifier = s.studyid + and dr.alias='member'; + +-- groups (as members) +insert into roleassignment ( assigneeidentifier, definitionpoint_id, role_id) + select '&'|| groupalias, ds.id, dr.id + from _dvn3_study_usergroup, _dvn3_study s, explicitgroup, dataverserole dr, dataset ds + where _dvn3_study_usergroup.allowedgroups_id = explicitgroup.id + and _dvn3_study_usergroup.studies_id = s.id + and ds.authority = s.authority + and ds.protocol = s.protocol + and ds.identifier = s.studyid + and dr.alias='member'; + +----------------------- +-- file role assignments +----------------------- + +insert into roleassignment ( assigneeidentifier, definitionpoint_id, role_id) + select '@'|| useridentifier, studyfiles_id, dr.id + from _dvn3_studyfile_vdcuser, authenticateduser, dataverserole dr + where _dvn3_studyfile_vdcuser.allowedusers_id = authenticateduser.id + and _dvn3_studyfile_vdcuser.studyfiles_id in (select id from datafile) + and dr.alias='fileDownloader'; + +insert into roleassignment ( assigneeidentifier, definitionpoint_id, role_id) + select '&'|| groupalias, studyfiles_id, dr.id + from _dvn3_studyfile_usergroup, explicitgroup, dataverserole dr + where _dvn3_studyfile_usergroup.allowedgroups_id = explicitgroup.id + and _dvn3_studyfile_usergroup.studyfiles_id in (select id from datafile) + and dr.alias='fileDownloader'; + + diff --git a/postgresql/testdata/scripts/migration/migrate_to_workflows.sql b/postgresql/testdata/scripts/migration/migrate_to_workflows.sql new file mode 100644 index 0000000..e1590f3 --- /dev/null +++ b/postgresql/testdata/scripts/migration/migrate_to_workflows.sql @@ -0,0 +1,76 @@ +------------ +-- Migrate the database to the workflow-enabled version +------------ + +------------ +-- Add new workflows-related tables +------------ + +CREATE TABLE WORKFLOW (ID SERIAL NOT NULL, NAME VARCHAR(255), PRIMARY KEY (ID)); + +CREATE TABLE WORKFLOWSTEPDATA (ID SERIAL NOT NULL, + PROVIDERID VARCHAR(255), + STEPTYPE VARCHAR(255), + PARENT_ID BIGINT, + index INTEGER, PRIMARY KEY (ID)); + +CREATE TABLE PENDINGWORKFLOWINVOCATION ( INVOCATIONID VARCHAR(255) NOT NULL, + DOIPROVIDER VARCHAR(255), + IPADDRESS VARCHAR(255), + NEXTMINORVERSIONNUMBER BIGINT, + NEXTVERSIONNUMBER BIGINT, + PENDINGSTEPIDX INTEGER, + TYPEORDINAL INTEGER, + USERID VARCHAR(255), + WORKFLOW_ID BIGINT, + DATASET_ID BIGINT, + PRIMARY KEY (INVOCATIONID)); + +CREATE TABLE WorkflowStepData_STEPPARAMETERS (WorkflowStepData_ID BIGINT, + STEPPARAMETERS VARCHAR(2048), + STEPPARAMETERS_KEY VARCHAR(255)); + +CREATE TABLE PendingWorkflowInvocation_LOCALDATA (PendingWorkflowInvocation_INVOCATIONID VARCHAR(255), + LOCALDATA VARCHAR(255), + LOCALDATA_KEY VARCHAR(255)); + +ALTER TABLE WORKFLOWSTEPDATA + ADD CONSTRAINT FK_WORKFLOWSTEPDATA_PARENT_ID + FOREIGN KEY (PARENT_ID) REFERENCES WORKFLOW (ID); + +ALTER TABLE PENDINGWORKFLOWINVOCATION + ADD CONSTRAINT FK_PENDINGWORKFLOWINVOCATION_WORKFLOW_ID + FOREIGN KEY (WORKFLOW_ID) REFERENCES WORKFLOW (ID); + +ALTER TABLE PENDINGWORKFLOWINVOCATION + ADD CONSTRAINT FK_PENDINGWORKFLOWINVOCATION_DATASET_ID + FOREIGN KEY (DATASET_ID) REFERENCES DVOBJECT (ID); + +ALTER TABLE WorkflowStepData_STEPPARAMETERS + ADD CONSTRAINT FK_WorkflowStepData_STEPPARAMETERS_WorkflowStepData_ID + FOREIGN KEY (WorkflowStepData_ID) REFERENCES WORKFLOWSTEPDATA (ID); + +ALTER TABLE PendingWorkflowInvocation_LOCALDATA + ADD CONSTRAINT PndngWrkflwInvocationLOCALDATAPndngWrkflwInvocationINVOCATIONID + FOREIGN KEY (PendingWorkflowInvocation_INVOCATIONID) REFERENCES PENDINGWORKFLOWINVOCATION (INVOCATIONID); + + +------------ +-- Add lockReason field to Dataset/DatasetVersion +------------ +TBC + +------------ +-- Validate there are no double-reason locks (??) +------------ +TBC + +------------ +-- Convert from boolean lock reasons to the enum-based one +------------ +TBC + +------------ +-- Delete lock reasons columns +------------ +TBC diff --git a/postgresql/testdata/scripts/migration/migrate_users.sql b/postgresql/testdata/scripts/migration/migrate_users.sql new file mode 100644 index 0000000..965675f --- /dev/null +++ b/postgresql/testdata/scripts/migration/migrate_users.sql @@ -0,0 +1,44 @@ +----------------------- +-- users +----------------------- + +insert into builtinuser( id, affiliation, email, firstname, lastname, position, username) + select id, institution, email, firstname, lastname, position, username + from _dvn3_vdcuser; + +insert into authenticateduser( id, affiliation, email, firstname, lastname, position, useridentifier, superuser) + select id, institution, email, firstname, lastname, position, username, false + from _dvn3_vdcuser; + +---------------------- +--use the below instead of the above query for migrating to 4.7.1 and above +--------------------- +--insert into authenticateduser(id, affiliation, email, firstname, lastname, position, useridentifier, superuser,createdtime) +-- select id, institution, email, firstname, lastname, position, username, false, '01-01-2000 00:00:00' +-- from _dvn3_vdcuser; + +insert into authenticateduserlookup( authenticationproviderid, persistentuserid, authenticateduser_id) + select 'builtin', username, id + from _dvn3_vdcuser; + +----------------------- +-- groups +----------------------- + +-- only copy over groups that have users +insert into explicitgroup( id, description, displayname, groupalias, groupaliasinowner, owner_id) + select id, friendlyname, friendlyname, '1-'||name, name, 1 + from _dvn3_usergroup + where id in (select usergroups_id from _dvn3_vdcuser_usergroup); + +insert into explicitgroup_authenticateduser( explicitgroup_id, containedauthenticatedusers_id) + select usergroups_id, users_id + from _dvn3_vdcuser_usergroup; + +----------------------- +-- reset sequences +----------------------- + +SELECT setval('builtinuser_id_seq', (SELECT MAX(id) FROM builtinuser)); +SELECT setval('authenticateduser_id_seq', (SELECT MAX(id) FROM authenticateduser)); +SELECT setval('explicitgroup_id_seq', (SELECT MAX(id) FROM explicitgroup)); diff --git a/postgresql/testdata/scripts/migration/migration_instructions.txt b/postgresql/testdata/scripts/migration/migration_instructions.txt new file mode 100644 index 0000000..e352020 --- /dev/null +++ b/postgresql/testdata/scripts/migration/migration_instructions.txt @@ -0,0 +1,167 @@ +Migration steps: + +Assumptions: + +- DVN 3.6 networkAdmin has id = 1 +- Dataverse 4.0 admin has id = 1 (created by setup-all.sh script) + + +Pre steps (contained in the migration_presteps document): + +-7. Make a copy of the production db, and point an app server to it +-6. (if there is any data that will fail validation, run scrubbing script - this will need to be custom per installation) +-5.9 run duplicate user scrubbing scripts +-5.8 run users as emails scripts +-5. Export DDI files from 3.6 copy for all datasets to be migrated + (this now includes exporting non-released versions - presteps doc. updated) +-4. Create copies of tables in 3.6 database for migrated data +-3. Run pg dump to extract tables copies +-2. Import copied tables into 4.0 database +-1. Run offsets on _dvn3_tables in the 4.0 DB + +Migration: + +1. run migrate_users.sql script + If you are migrating to 4.7.1 or above check the comment around line 14 of the migrate_users.sql script +2. run migrate_dataverses.sql script +2a. migrate preloaded customizations +3. run custom_field_map.sql script (this must be updated to contain the custom field mappings specific to + the migration source installation.) +4. run dataset APIs: execute the following HTTP request on the Dataverse 4.0 application to initiate dataset migration: + + http:///api/batch/migrate?path=&key= + + This will return a success message and begin an asynchronous migration job - the status of the job is viewable in the import-log file + in the Glassfish logs directory. + +5. run migrate_datasets.sql script (post migration scrubbing) + +6. Run file migration scripts: + +Before you can run these scripts, edit the files +files_destination_step1_ and files_source, and modify the following +lines at the top to be able to access your new (4.*) and old (3.*) +databases, respectively: + +my $host = "localhost"; +my $username = "xxxxx"; +my $database = "xxxxx"; +my $password = "xxxxx"; + +a. On the *destination* (4.0) server, step 1 +run the script, and save the output: + +./files_destination_step1_ > migrated_datasets.txt + +The script will also print the following message on +the stderr output (for example): + +last ID in DVOBJECT table: 12345 + +- you will need to use this number as a parameter in the +next step, below. + +b. On the *source* (3.6) server - +run the script on the input produced in a., +save the sql output: + +cat migrated_datasets.txt | ./files_source_ > files_import.sql + +where is the "last ID ..." from step a. + +If you are migrating to 4.6 or above check the comment around line 192 of files_source_ script +This script may produce a lot of stderr output, that you may want to save. +You can do that by running it as + +cat migrated_datasets.txt | ./files_source_ > files_import.sql 2>datafiles.sql.out + +(bash shell assumed) + +The script will also produce the output file packlist.txt, +that you *may* need to use in step d., below. + +c. On the destination server, import the sql produced in b.: + +psql -d -U -f files_import.sql + +d. [OPTIONAL] You can continue using your existing, DVN 3* files +driectory. In this case, this step can be omitted. But if you want to +preserve the DVN 3* directory and copy the files to the new Dataverse +4 server, you'll need to package the files on the source server, using +the files packlist.txt created in the step b.: + +tar cvzf packedfiles.tgz `cat packlist.txt` + +e. [OPTIONAL] If you are moving the files, unpack the files packaged +in the step d. on the destination server: + +cd +tar xvzf packedfiles.tgz + +7. run migrate_permissions.sql script (may need to delete some duplicates) + +8. run migrate_links.sql script + +10. reset sequences: + +sequence_script.sql + +11. Add publication dates to the migrated datafiles: + +datafile_pub_date.sql + +12. (when ready for users to log in) add user passwords + +migrate_passwords.sql + +__________________________________________________ + +Not being migrated (verify?): +-- Study Comments +-- File Access requests +-- Classifications +-- Study locks +-- VDCNetworkStats (generated data) + + +Post-migration tasks. +==================== + +If you have global IDs (handles or DOIs) registered, you may need to +re-register them. (Even if your Dataverse 4.0 is staying on the same +server as your old DVN 3* installation, the URLs of the study pages +have changed: what used to be /dvn/study?globalId=... is now +/dataset.xhtml?persistentId=...; this can be taken care of with URL +rewrite rules, but it may be cleaner to just update the registered +URLs for all your global identifiers). + +To update your registered handles: + +Generate the list of the database IDs of all your *released* datasets +with Handle global ids, and/or *all* your datasets with DOI ids. +(exercise for the reader). + +Use the modifyRegistration API call to update the registration for these datasets. +You can do something like + +cat list_of_db_ids.txt | while read dbid +do + curl 'http://localhost:8080/api/datasets/'$dbid'/modifyRegistration?key=' + echo +done + +TODO: + +script the above; make it less of an exercise for the reader. + +TODO: + +explain how to transfer the Handles configuration from DVN 3 to Dataverse 4. + +TODO: + +check with Steve and Raman if the above is actually going to work for DOIs. +(or if anything special needs to be done first...) + + + diff --git a/postgresql/testdata/scripts/migration/migration_presteps.txt b/postgresql/testdata/scripts/migration/migration_presteps.txt new file mode 100644 index 0000000..df2be49 --- /dev/null +++ b/postgresql/testdata/scripts/migration/migration_presteps.txt @@ -0,0 +1,144 @@ +---------------------------------------------- +-- Preparing the DDIs of production studies: +---------------------------------------------- + +All the existing DVN studies need to be re-exported *using the +specially modified version of the DVN3 DDI Export Servlet*. (This +version of the servlet exports not just the published, but all the +versions of each study). This servlet is NOT available in the DVN +v3.6.2, the last officially-released version. So please download the +specially-patched version of the DVN 3.6 war file: + +http://sourceforge.net/projects/dvn/files/dvn/3.6.2/DVN-web_v3_6_3_MIGRATION.war/download + +and deploy it on your DVN server (instead of the version 3.6.2 you are currently running). + +IMPORTANT: +---------- + +Remote access to the DDI Export servlet is restricted by default. +Access on the localhost interface is open however. So the easiest way +to perform the export is to run the script in the next step on the +same host where the DVN 3.* application is running. (And use +"http://localhost/dvn/ddi" for the export servlet URL parameter +there). + +If you must run the script on a different system, you can grant that +host access to the servlet by setting the following JVM option in +Glassfish 3 where the DVN app is running: + +-Dvdc.dsb.host= + +and restart glassfish. + + + +2. Run the script ./versions_source_. + +It will go through the list of the studies in the prod. db and issue a +call to the export servlet. The resulting DDIs will be saved in the +directory ./ddi. You will need to give the complete path of this +directory to the Dataverse 4 import process. + +Before you run the script, modify the following 3 lines at the top: + +my $host = "xxxxx"; +my $database = "xxxxx"; +my $username = "xxxxx"; +my $password = 'xxxxx'; + +To reflect your DVN 3 database location and credentials. Make sure you +can access the database from the host on which you'll be running this +script. + +Run it as follows: + +./versions_source_ "http:///dvn/ddi" "" + +The 2 arguments the script takes: + + - the URL of the DVN 3 Export Servlet; + - your local name space. + +For example: + +./versions_source_ http://localhost/dvn/ddi 1902.1 + +---------------------------------------------- +-- On 3.6 database, run the following to create copies of needed tables +---------------------------------------------- + +-- users / groups +-- ignore network admin (assumes id of 1) +create table _dvn3_vdcuser as select * from vdcuser where id != 1; +create table _dvn3_usergroup as select * from usergroup; +create table _dvn3_vdcuser_usergroup as select * from vdcuser_usergroup; + +-- dataverse networks / dataverses +-- ignore the root network +create table _dvn3_vdcnetwork as select * from vdcnetwork where id != 0; +create table _dvn3_vdc as select * from vdc; + +-- studies (for reference) +create table _dvn3_study as select * from study +-- where owner_id in (select id from _dvn3_vdc) +; + +create table _dvn3_studyversion as select * from studyversion +-- where study_id in (select id from _dvn3_study) +; + +create table _dvn3_versioncontributor as select * from versioncontributor +-- where studyversion_id in (select id from _dvn3_studyversion) +; + +-- collections (for reference) +create table _dvn3_vdccollection as select * from vdccollection; + +-- permissions +create table _dvn3_vdcrole as select * from vdcrole; +create table _dvn3_vdc_usergroup as select * from vdc_usergroup; + +create table _dvn3_study_vdcuser as select * from study_vdcuser; +create table _dvn3_study_usergroup as select * from study_usergroup; + +create table _dvn3_studyfile_vdcuser as select * from studyfile_vdcuser; +create table _dvn3_studyfile_usergroup as select * from studyfile_usergroup; + +-- links +create table _dvn3_coll_studies as select * from coll_studies; +create table _dvn3_vdc_linked_collections as select * from vdc_linked_collections; + + +---------------------------------------------- +-- run pg_dump to extract temp tables +---------------------------------------------- + +pg_dump -h localhost -U postgres <3.6 database name> -t _dvn3_* -f /tmp/dvn3_data.sql + +---------------------------------------------- +-- import temp tables into 4.0 db +---------------------------------------------- + +psql -h localhost -U postgres <4.0 database name> -f /tmp/dvn3_data.sql + +---------------------------------------------- +-- Run offsets on _dvn3_tables in the 4.0 DB +---------------------------------------------- + +-- offsets +update _dvn3_vdcnetwork set id = id + (select coalesce(max(id), 0) from dvobject); +update _dvn3_vdc set id = id + (select coalesce(max(id), 0) from _dvn3_vdcnetwork); +update _dvn3_vdcrole set vdc_id = vdc_id + (select coalesce(max(id), 0) from _dvn3_vdcnetwork); +update _dvn3_vdc_usergroup set vdcs_id = vdcs_id + (select coalesce(max(id), 0) from _dvn3_vdcnetwork); +update _dvn3_vdc_linked_collections set vdc_id = vdc_id + (select coalesce(max(id), 0) from _dvn3_vdcnetwork); +update _dvn3_study set owner_id = owner_id + (select coalesce(max(id), 0) from _dvn3_vdcnetwork); +update _dvn3_vdccollection set owner_id = owner_id + (select coalesce(max(id), 0) from _dvn3_vdcnetwork); + +-- note: need to determine what offset to use, based on the file scripts +--update _dvn3_studyfile_vdcuser set studyfiles_id = studyfiles_id +100000; +--update _dvn3_studyfile_usergroup set studyfiles_id = studyfiles_id + 100000; + + + + diff --git a/postgresql/testdata/scripts/migration/scrub_duplicate_emails.sql b/postgresql/testdata/scripts/migration/scrub_duplicate_emails.sql new file mode 100644 index 0000000..0599c20 --- /dev/null +++ b/postgresql/testdata/scripts/migration/scrub_duplicate_emails.sql @@ -0,0 +1,497 @@ +-------------------- +--REFERENCE QUERIES +-------------------- +/* +-- Query to list all user acocunts with duplicate e-mails +select id, username, lower(email) from vdcuser +where lower(email) in ( +select lower(email) from vdcuser +group by lower(email) +having count(*) > 1 +) +order by email + +-- Query to list all e-mails that have are duplicated (total = # of actual users, without duplicates) +select lower(email), count(*) from vdcuser +group by lower(email) +having count(*) > 1 +order by count(*) desc + +-- Query to list all e-mails that have are duplicated and reference to original account (account with lowest id) +select u1.id, u1.username, u1.active,u1.email, u2.id, u2.username, u2.active +from vdcuser u1, vdcuser u2 +where 1=1 +and u1.id != u2.id +and lower(u1.email) = lower(u2.email) +and lower(u1.email) in ( +select lower(email) from vdcuser +group by lower(email) +having count(*) > 1 +) +and u2.id in ( +select min(id) from vdcuser +group by lower(email) +having count(*) > 1 +) +order by lower(u1.email) + +-- Delete query, to be run after all the updates +delete from vdcuser where id in ( +select u1.id +from vdcuser u1, vdcuser u2 +where 1=1 +and u1.id != u2.id +and lower(u1.email) = lower(u2.email) +and lower(u1.email) in ( +select lower(email) from vdcuser +group by lower(email) +having count(*) > 1 +) +and u2.id in ( +select min(id) from vdcuser +group by lower(email) +having count(*) > 1 +) +) + +*/ +-------------------- +--UPDATE QUERIES +-------------------- +-- these queries will update the foreign key references in all (relevant) tables to the orignal account +-- +-- Generated by +/* +SELECT tc.table_schema, tc.constraint_name, tc.table_name, kcu.column_name, ccu.table_name AS foreign_table_name,ccu.column_name AS foreign_column_name, +'update ' || tc.table_name || ' ref set ' || kcu.column_name || ' = u2.id +from vdcuser u1, vdcuser u2 +where ref.' || kcu.column_name || ' = u1.id +and u1.id != u2.id +and u1.email = u2.email +and u1.email in ( +select email from vdcuser +group by email +having count(*) > 1 +) +and u2.id in ( +select min(id) from vdcuser +group by email +having count(*) > 1 +);' as query +FROM information_schema.table_constraints tc +JOIN information_schema.key_column_usage kcu ON tc.constraint_name = kcu.constraint_name +JOIN information_schema.constraint_column_usage ccu ON ccu.constraint_name = tc.constraint_name +WHERE constraint_type = 'FOREIGN KEY' AND ccu.table_name='vdcuser'; +*/ +-- +-- +-- if any of the below fail because of duplicate constraints, you will need to first delete the duplicates +-- here is a sample query for deleting the duplicate entries from studyfile_vdcuser (the most likey to fail)) +/* +delete from studyfile_vdcuser +where allowedusers_id || '_' || studyfiles_id in ( +select u1.id || '_' || fu1.studyfiles_id +from vdcuser u1, vdcuser u2, studyfile_vdcuser fu1, studyfile_vdcuser fu2 +where 1=1 +and fu1.studyfiles_id = fu2.studyfiles_id +and fu1.allowedusers_id = u1.id +and fu2.allowedusers_id = u2.id +and u1.id != u2.id +and u1.email = u2.email +and u1.email in ( +select email from vdcuser +group by email +having count(*) > 1 +) +and u2.id in ( +select min(id) from vdcuser -- also may need to run with max(id) or some other combinations! +group by email +having count(*) > 1 +) +) +*/ + + +update flagged_study_comments ref set user_id = u2.id +from vdcuser u1, vdcuser u2 +where ref.user_id = u1.id +and u1.id != u2.id +and u1.email = u2.email +and u1.email in ( +select email from vdcuser +group by email +having count(*) > 1 +) +and u2.id in ( +select min(id) from vdcuser +group by email +having count(*) > 1 +); + +update guestbookresponse ref set vdcuser_id = u2.id +from vdcuser u1, vdcuser u2 +where ref.vdcuser_id = u1.id +and u1.id != u2.id +and u1.email = u2.email +and u1.email in ( +select email from vdcuser +group by email +having count(*) > 1 +) +and u2.id in ( +select min(id) from vdcuser +group by email +having count(*) > 1 +); + +update harvestingdataverse_vdcuser ref set allowedfileusers_id = u2.id +from vdcuser u1, vdcuser u2 +where ref.allowedfileusers_id = u1.id +and u1.id != u2.id +and u1.email = u2.email +and u1.email in ( +select email from vdcuser +group by email +having count(*) > 1 +) +and u2.id in ( +select min(id) from vdcuser +group by email +having count(*) > 1 +); + +update networkrolerequest ref set vdcuser_id = u2.id +from vdcuser u1, vdcuser u2 +where ref.vdcuser_id = u1.id +and u1.id != u2.id +and u1.email = u2.email +and u1.email in ( +select email from vdcuser +group by email +having count(*) > 1 +) +and u2.id in ( +select min(id) from vdcuser +group by email +having count(*) > 1 +); + +update rolerequest ref set vdcuser_id = u2.id +from vdcuser u1, vdcuser u2 +where ref.vdcuser_id = u1.id +and u1.id != u2.id +and u1.email = u2.email +and u1.email in ( +select email from vdcuser +group by email +having count(*) > 1 +) +and u2.id in ( +select min(id) from vdcuser +group by email +having count(*) > 1 +); + +update studyaccessrequest ref set vdcuser_id = u2.id +from vdcuser u1, vdcuser u2 +where ref.vdcuser_id = u1.id +and u1.id != u2.id +and u1.email = u2.email +and u1.email in ( +select email from vdcuser +group by email +having count(*) > 1 +) +and u2.id in ( +select min(id) from vdcuser +group by email +having count(*) > 1 +); + +update studycomment ref set commentcreator_id = u2.id +from vdcuser u1, vdcuser u2 +where ref.commentcreator_id = u1.id +and u1.id != u2.id +and u1.email = u2.email +and u1.email in ( +select email from vdcuser +group by email +having count(*) > 1 +) +and u2.id in ( +select min(id) from vdcuser +group by email +having count(*) > 1 +); + +update study ref set creator_id = u2.id +from vdcuser u1, vdcuser u2 +where ref.creator_id = u1.id +and u1.id != u2.id +and u1.email = u2.email +and u1.email in ( +select email from vdcuser +group by email +having count(*) > 1 +) +and u2.id in ( +select min(id) from vdcuser +group by email +having count(*) > 1 +); + +update studyfile_vdcuser ref set allowedusers_id = u2.id +from vdcuser u1, vdcuser u2 +where ref.allowedusers_id = u1.id +and u1.id != u2.id +and u1.email = u2.email +and u1.email in ( +select email from vdcuser +group by email +having count(*) > 1 +) +and u2.id in ( +select min(id) from vdcuser +group by email +having count(*) > 1 +); + +update study ref set lastupdater_id = u2.id +from vdcuser u1, vdcuser u2 +where ref.lastupdater_id = u1.id +and u1.id != u2.id +and u1.email = u2.email +and u1.email in ( +select email from vdcuser +group by email +having count(*) > 1 +) +and u2.id in ( +select min(id) from vdcuser +group by email +having count(*) > 1 +); + +update studylock ref set user_id = u2.id +from vdcuser u1, vdcuser u2 +where ref.user_id = u1.id +and u1.id != u2.id +and u1.email = u2.email +and u1.email in ( +select email from vdcuser +group by email +having count(*) > 1 +) +and u2.id in ( +select min(id) from vdcuser +group by email +having count(*) > 1 +); + +update studyrequest ref set vdcuser_id = u2.id +from vdcuser u1, vdcuser u2 +where ref.vdcuser_id = u1.id +and u1.id != u2.id +and u1.email = u2.email +and u1.email in ( +select email from vdcuser +group by email +having count(*) > 1 +) +and u2.id in ( +select min(id) from vdcuser +group by email +having count(*) > 1 +); + +update study ref set reviewer_id = u2.id +from vdcuser u1, vdcuser u2 +where ref.reviewer_id = u1.id +and u1.id != u2.id +and u1.email = u2.email +and u1.email in ( +select email from vdcuser +group by email +having count(*) > 1 +) +and u2.id in ( +select min(id) from vdcuser +group by email +having count(*) > 1 +); + +update study_vdcuser ref set allowedusers_id = u2.id +from vdcuser u1, vdcuser u2 +where ref.allowedusers_id = u1.id +and u1.id != u2.id +and u1.email = u2.email +and u1.email in ( +select email from vdcuser +group by email +having count(*) > 1 +) +and u2.id in ( +select min(id) from vdcuser +group by email +having count(*) > 1 +); + +update vdc ref set creator_id = u2.id +from vdcuser u1, vdcuser u2 +where ref.creator_id = u1.id +and u1.id != u2.id +and u1.email = u2.email +and u1.email in ( +select email from vdcuser +group by email +having count(*) > 1 +) +and u2.id in ( +select min(id) from vdcuser +group by email +having count(*) > 1 +); + +update vdc_fileuser ref set allowedfileusers_id = u2.id +from vdcuser u1, vdcuser u2 +where ref.allowedfileusers_id = u1.id +and u1.id != u2.id +and u1.email = u2.email +and u1.email in ( +select email from vdcuser +group by email +having count(*) > 1 +) +and u2.id in ( +select min(id) from vdcuser +group by email +having count(*) > 1 +); + +update vdcnetwork ref set creator_id = u2.id +from vdcuser u1, vdcuser u2 +where ref.creator_id = u1.id +and u1.id != u2.id +and u1.email = u2.email +and u1.email in ( +select email from vdcuser +group by email +having count(*) > 1 +) +and u2.id in ( +select min(id) from vdcuser +group by email +having count(*) > 1 +); + +update vdcnetwork ref set defaultnetworkadmin_id = u2.id +from vdcuser u1, vdcuser u2 +where ref.defaultnetworkadmin_id = u1.id +and u1.id != u2.id +and u1.email = u2.email +and u1.email in ( +select email from vdcuser +group by email +having count(*) > 1 +) +and u2.id in ( +select min(id) from vdcuser +group by email +having count(*) > 1 +); + +update vdc ref set reviewer_id = u2.id +from vdcuser u1, vdcuser u2 +where ref.reviewer_id = u1.id +and u1.id != u2.id +and u1.email = u2.email +and u1.email in ( +select email from vdcuser +group by email +having count(*) > 1 +) +and u2.id in ( +select min(id) from vdcuser +group by email +having count(*) > 1 +); + +update vdcrole ref set vdcuser_id = u2.id +from vdcuser u1, vdcuser u2 +where ref.vdcuser_id = u1.id +and u1.id != u2.id +and u1.email = u2.email +and u1.email in ( +select email from vdcuser +group by email +having count(*) > 1 +) +and u2.id in ( +select min(id) from vdcuser +group by email +having count(*) > 1 +); + +update vdcuser_studycomment ref set vdcuser_id = u2.id +from vdcuser u1, vdcuser u2 +where ref.vdcuser_id = u1.id +and u1.id != u2.id +and u1.email = u2.email +and u1.email in ( +select email from vdcuser +group by email +having count(*) > 1 +) +and u2.id in ( +select min(id) from vdcuser +group by email +having count(*) > 1 +); + +update vdcuser_usergroup ref set users_id = u2.id +from vdcuser u1, vdcuser u2 +where ref.users_id = u1.id +and u1.id != u2.id +and u1.email = u2.email +and u1.email in ( +select email from vdcuser +group by email +having count(*) > 1 +) +and u2.id in ( +select min(id) from vdcuser +group by email +having count(*) > 1 +); + +update vdc_fileuser ref set allowedfileusers_id = u2.id +from vdcuser u1, vdcuser u2 +where ref.allowedfileusers_id = u1.id +and u1.id != u2.id +and u1.email = u2.email +and u1.email in ( +select email from vdcuser +group by email +having count(*) > 1 +) +and u2.id in ( +select min(id) from vdcuser +group by email +having count(*) > 1 +); + +update versioncontributor ref set contributor_id = u2.id +from vdcuser u1, vdcuser u2 +where ref.contributor_id = u1.id +and u1.id != u2.id +and u1.email = u2.email +and u1.email in ( +select email from vdcuser +group by email +having count(*) > 1 +) +and u2.id in ( +select min(id) from vdcuser +group by email +having count(*) > 1 +); diff --git a/postgresql/testdata/scripts/migration/scrub_email_usernames.sql b/postgresql/testdata/scripts/migration/scrub_email_usernames.sql new file mode 100644 index 0000000..04ed83d --- /dev/null +++ b/postgresql/testdata/scripts/migration/scrub_email_usernames.sql @@ -0,0 +1,28 @@ +-- first, find users with e-mails as usernames +select id, username, email from vdcuser where username like '%@%' +--and username != email; + +-- then find which those which would create duplicates after truncating +-- (verify that adding 1 would be OK; if not, you may need to update some individually) +select u1.id, u1.username, u2.id, u2.username from vdcuser u1, vdcuser u2 +where u1.id != u2.id +and u1.username like '%@%' +and split_part (u1.username, '@', 1) = u2.username + +-- for those usernames, truncate and add 1, so no duplicates +update vdcuser set username = split_part (username, '@', 1) ||'1' +where id in ( +select u1.id from vdcuser u1, vdcuser u2 +where u1.id != u2.id +and u1.username like '%@%' +and split_part (u1.username, '@', 1) = u2.username +) + +-- now truncate the rest +update vdcuser set username = split_part (username, '@', 1) where username like '%@%' + +-- confirm no duplicates +select id, username, email from vdcuser where username in ( +select username from vdcuser +group by username having count(*) > 1 +) \ No newline at end of file diff --git a/postgresql/testdata/scripts/migration/sequence_script.sql b/postgresql/testdata/scripts/migration/sequence_script.sql new file mode 100644 index 0000000..73f7661 --- /dev/null +++ b/postgresql/testdata/scripts/migration/sequence_script.sql @@ -0,0 +1,9 @@ +SELECT setval('datafile_id_seq', (SELECT MAX(id) FROM datafile)); +SELECT setval('datafilecategory_id_seq', (SELECT MAX(id) FROM datafilecategory)); +SELECT setval('datatable_id_seq', (SELECT MAX(id) FROM datatable)); +SELECT setval('datavariable_id_seq', (SELECT MAX(id) FROM datavariable)); +SELECT setval('dvobject_id_seq', (SELECT MAX(id) FROM dvobject)); +SELECT setval('filemetadata_id_seq', (SELECT MAX(id) FROM filemetadata)); +SELECT setval('variablecategory_id_seq', (SELECT MAX(id) FROM variablecategory)); +SELECT setval('guestbook_id_seq', (SELECT MAX(id) FROM guestbook)); +SELECT setval('guestbookresponse_id_seq', (SELECT MAX(id) FROM guestbookresponse)); diff --git a/postgresql/testdata/scripts/migration/versions_source_ b/postgresql/testdata/scripts/migration/versions_source_ new file mode 100755 index 0000000..872c7e5 --- /dev/null +++ b/postgresql/testdata/scripts/migration/versions_source_ @@ -0,0 +1,115 @@ +#!/usr/bin/perl + +my $url = shift @ARGV; +my $local_namespace = shift @ARGV; + +unless ( $url =~ /^http:/ && $local_namespace ne "" ) +{ + print STDERR "Usage: ./versions_source_ \n"; + exit 1; +} + +use DBI; + +my $host = "xxxxx"; +my $database = "xxxxx"; +my $username = "xxxxx"; +my $password = 'xxxxx'; + +my $dbh = DBI->connect("DBI:Pg:dbname=$database;host=$host",$username,$password); + +unless ( -d "ddi" ) +{ + mkdir "ddi"; +} + +my $sth; + +$sth = $dbh->prepare(qq {SELECT s.id, s.authority, s.studyid, v.alias FROM study s, vdc v WHERE s.owner_id = v.id}); + +$sth->execute(); + +my $sid; + +$studycounter = 0; +$releasedcounter = 0; +$extraversioncounter = 0; +$versioncounter = 0; + +while ( @foo = $sth->fetchrow() ) +{ + $sid = $foo[0]; + $authority = $foo[1]; + $studyid = $foo[2]; + $dvalias = $foo[3]; + + + unless ($authority eq $local_namespace ) { + next; + } + + unless ( -d "ddi/$dvalias" ) + { + mkdir "ddi/$dvalias"; + } + + + my $sth1; + + $sth1 = $dbh->prepare(qq {SELECT versionnumber,versionstate FROM studyversion v WHERE study_id=$sid}); + $sth1->execute(); + + my $vn; + + $localcounter = 0; + + while ( @bar = $sth1->fetchrow() ) + { + $vn = $bar[0]; + $vstate = $bar[1]; + + if ($vstate eq "RELEASED") + { + print STDERR "executing: wget -O ddi/" . $dvalias . "/" . $sid . ".xml '" . $url . "?studyId=" . $sid . "&versionNumber=" . $vn . "'\n"; + system "wget -O ddi/" . $dvalias . "/" . $sid . ".xml '" . $url . "?studyId=" . $sid . "&versionNumber=" . $vn . "'\n"; + $versioncounter++; + $releasedcounter++; + + } + else + { + print STDERR "executing: wget -O ddi/" . $dvalias . "/" . $sid . "-" . $vn . ".xml '" . $url . "?studyId=" . $sid . "&versionNumber=" . $vn . "'\n"; + system "wget -O ddi/" . $dvalias . "/" . $sid . "-" . $vn . ".xml '" . $url . "?studyId=" . $sid . "&versionNumber=" . $vn . "'\n"; + $versioncounter++; + $localcounter++; + } + } + $sth1->finish; + $studycounter++; + $extraversioncounter++ if $localcounter; + + unless ($studycounter % 1000) + { + print STDERR "\n" . $studycounter . " studies processed.\n"; + print STDERR $releasedcounter . " released versions;\n"; + print STDERR $extraversioncounter . " had versions other than released;\n"; + print STDERR "Total " . $versioncounter . " versions processed.\n"; + } +} + +print STDERR "\n" . $studycounter . " studies processed.\n"; +print STDERR $releasedcounter . " released versions;\n"; +print STDERR $extraversioncounter . " had versions other than released;\n"; +print STDERR "Total " . $versioncounter . " versions processed.\n"; + + +$sth->finish; + +$dbh->disconnect; + +exit 0; + + + + + diff --git a/postgresql/testdata/scripts/migration/versions_source_step2_ b/postgresql/testdata/scripts/migration/versions_source_step2_ new file mode 100755 index 0000000..9e15243 --- /dev/null +++ b/postgresql/testdata/scripts/migration/versions_source_step2_ @@ -0,0 +1,40 @@ +#!/usr/bin/perl + + +unless ( -d "/tmp/ddi" ) +{ + mkdir "/tmp/ddi"; +} + +while (<>) +{ + chop; + @_ = split ("\t"); + $alias = $_[0]; + $studyid = $_[1]; + $ddifile = $_[2]; + + $ddifile = "/nfs/iqss/DVN/data/" . $ddifile; + + if ( -f $ddifile ) + { + $total += (stat($study))[7]; + $tmpdir = "/tmp/ddi/" . $alias; + unless ( -d $tmpdir ) + { + mkdir $tmpdir; + } + + $tmpfile = $tmpdir . "/" . $studyid . "\.xml"; + system "cp $ddifile $tmpfile"; + + } + else + { + print STDERR "warning: missing ddi file! (" . $ddifile . ")\n"; + } +} + +print "Total of " . $total . " bytes copied.\n"; + + diff --git a/postgresql/testdata/scripts/rapache/build.sh b/postgresql/testdata/scripts/rapache/build.sh new file mode 100755 index 0000000..fc48237 --- /dev/null +++ b/postgresql/testdata/scripts/rapache/build.sh @@ -0,0 +1,10 @@ +#!/bin/sh +mkdir -p ~/rpmbuild/SOURCES +mkdir -p ~/rpmbuild/SPECS +wget https://github.com/jeffreyhorner/rapache/archive/v1.2.7.tar.gz -O rapache-1.2.7.tar.gz +tar xzvf rapache-1.2.7.tar.gz rapache-1.2.7/rpm/rapache.spec --strip-components 2 +# Move to build dirs +cp -f rapache-1.2.7.tar.gz ~/rpmbuild/SOURCES/ +cp -f rapache.spec ~/rpmbuild/SPECS/ +cd ~ +rpmbuild -ba ~/rpmbuild/SPECS/rapache.spec diff --git a/postgresql/testdata/scripts/search/.gitignore b/postgresql/testdata/scripts/search/.gitignore new file mode 100644 index 0000000..2360214 --- /dev/null +++ b/postgresql/testdata/scripts/search/.gitignore @@ -0,0 +1,4 @@ +data/in/users +data/in/dv-birds1 +data/in/dv-trees1 +data/in/dv-psi diff --git a/postgresql/testdata/scripts/search/add b/postgresql/testdata/scripts/search/add new file mode 100755 index 0000000..ec6355a --- /dev/null +++ b/postgresql/testdata/scripts/search/add @@ -0,0 +1,23 @@ +#!/bin/sh +# need the following in solr/collection1/conf/schema.xml +# +# +mkdir -p data +#echo "adding to solr..." +curl -s http://localhost:8080/api/dataverses > data/dataverses.json +#curl http://localhost:8983/solr/update/json?commit=true -H 'Content-type:application/json' --data-binary @data/dataverses.json + +curl -s http://localhost:8080/api/datasets > data/datasets.json +#curl http://localhost:8983/solr/update/json?commit=true -H 'Content-type:application/json' --data-binary @data/datasets.json + +echo "adding to elasticsearch..." +#curl -XPOST http://localhost:9200/dataverse/datasets/1 --data-binary @data/datasets/1.dump +for type in dataverses datasets; do + mkdir -p data/$type + for i in `./json2ids data/$type.json`; do + #echo "adding $i from $type..." + curl -s http://localhost:8080/api/$type/$i/dump > data/$type/$i.dump + curl -XPOST "http://localhost:9200/dataverse/$type/$i" --data-binary @data/$type/$i.dump + echo + done; +done diff --git a/postgresql/testdata/scripts/search/assumptions b/postgresql/testdata/scripts/search/assumptions new file mode 100755 index 0000000..7ac655c --- /dev/null +++ b/postgresql/testdata/scripts/search/assumptions @@ -0,0 +1,24 @@ +#!/bin/bash +# `source path/to/this/file` to get the same assumptions :) +export ADMIN_ROLE=1 +export ROOT_DATAVERSE=1 +export DV_CONTRIBUTOR_ROLE=4 +export BIRDS_DATAVERSE=`grep '"alias":"birds"' /tmp/bird-dvs1 | jq .data.id` +export SPRUCE_DATAVERSE=`grep '"alias":"spruce"' /tmp/tree-dvs1 | jq .data.id` +export SPRUCE_USERNAME='@spruce' +export FINCH_USERNAME='@finch' + +export SPRUCE_ADMIN_ON_BIRDS=`curl -s "http://localhost:8080/api/dataverses/$BIRDS_DATAVERSE/assignments?key=$FINCHKEY" | jq ".data[] | select(.assignee==\"$SPRUCE_USERNAME\") | .id"` + +export FINCH_ADMIN_ON_SPRUCE=`curl -s "http://localhost:8080/api/dataverses/$SPRUCE_DATAVERSE/assignments?key=$SPRUCEKEY" | jq .data[1].id` + +export FIRST_SPRUCE_DOI=`curl --insecure -s -u spruce:spruce https://localhost:8181/dvn/api/data-deposit/v1.1/swordv2/collection/dataverse/spruce | xmllint -format - | xmlstarlet sel -t -v '//_:id' 2>/dev/null | cut -d'/' -f11,12,13` + +export FIRST_SPRUCE_DATASET_ID=`curl -s "http://localhost:8080/api/dataverses/spruce/contents?key=$SPRUCEKEY" | jq '.data[0].id'` + +export FIRST_SPRUCE_DOI=doi:10.5072/FK2/`curl -s "http://localhost:8080/api/datasets/$FIRST_SPRUCE_DATASET_ID?key=$SPRUCEKEY" | jq .data.identifier | tr -d \"` + +export FIRST_SPRUCE_FILE=`scripts/api/data-deposit/show-statement $FIRST_SPRUCE_DOI 2>/dev/null | xmlstarlet sel -t -v '//_:feed/_:entry/_:id' 2>/dev/null | cut -d '/' -f11` + +export FIRST_FINCH_DOI=`curl --insecure -s -u finch:finch https://localhost:8181/dvn/api/data-deposit/v1.1/swordv2/collection/dataverse/finches | xmllint -format - | xmlstarlet sel -t -v '//_:id' 2>/dev/null | cut -d'/' -f11,12,13` +export FIRST_FINCH_DATASET_ID=`curl -s "http://localhost:8080/api/dataverses/finches/contents?key=$FINCHKEY" | jq '.data[0].id'` diff --git a/postgresql/testdata/scripts/search/clear b/postgresql/testdata/scripts/search/clear new file mode 100755 index 0000000..5908e9f --- /dev/null +++ b/postgresql/testdata/scripts/search/clear @@ -0,0 +1,5 @@ +#!/bin/sh +echo "deleting all data from Solr" +curl http://localhost:8983/solr/update/json?commit=true -H "Content-type: application/json" -X POST -d "{\"delete\": { \"query\":\"*:*\"}}" +# this was for elasticsearch +#curl -XDELETE http://localhost:9200/dataverse/ diff --git a/postgresql/testdata/scripts/search/compare b/postgresql/testdata/scripts/search/compare new file mode 100755 index 0000000..9dc8cc6 --- /dev/null +++ b/postgresql/testdata/scripts/search/compare @@ -0,0 +1,19 @@ +#!/bin/bash +DIR=/tmp/searchusers +FINCHKEY=`cat $DIR/1 | jq .data.apiToken | tr -d \"` +SPRUCKEY=`cat $DIR/4 | jq .data.apiToken | tr -d \"` +echo "Search API:" +curl -s "http://localhost:8080/api/search?q=*&key=$FINCHKEY" | jq '.data.fq_actual' +echo "Database:" +scripts/search/dbperms $1 | grep '|' +echo "Solr per group docs (old):" +curl -s "http://localhost:8983/solr/collection1/select?rows=100&wt=json&indent=true&q=entityid%3A$1" | jq '.response.docs[] | {id, name_sort, perms_ss}' +echo "Solr permission docs (new):" +curl -s "http://localhost:8983/solr/collection1/select?rows=100&wt=json&indent=true&q=definition_point_dvobject_id_s%3A$1" | jq '.response.docs[] | {definition_point_s,discoverable_by_ss}' +echo "Java:" +curl -s "http://localhost:8080/api/search/perms?q=*&key=$FINCHKEY&id=$1" | jq '.data[]' 2>/dev/null +exit +echo "Search API perms, items (finch):" +curl -s "http://localhost:8080/api/search?q=*&key=$FINCHKEY" | jq '.data | {fq_actual, items}' +echo "Search API perms, items (spruce):" +curl -s "http://localhost:8080/api/search?q=*&key=$SPRUCKEY" | jq '.data | {fq_actual, items}' diff --git a/postgresql/testdata/scripts/search/create b/postgresql/testdata/scripts/search/create new file mode 100755 index 0000000..1ee9648 --- /dev/null +++ b/postgresql/testdata/scripts/search/create @@ -0,0 +1,31 @@ +#!/bin/sh +DVDIR_ROOT='data/in/dataverses.root' +DVDIR_BIRDS='data/in/dataverses.birds' +DVDIR_TREES='data/in/dataverses.trees' +DSDIR='data/in/datasets' +FILESDIR='data/in/files' + +#rm data/in/dataverses/1 +for i in `ls $DVDIR_ROOT`; do + curl -s -H "Content-type:application/json" -X POST -d @$DVDIR_ROOT/$i "http://localhost:8080/api/dataverses/root?key=$PETEKEY" +done + +for i in `ls $DVDIR_BIRDS`; do + curl -s -H "Content-type:application/json" -X POST -d @$DVDIR_BIRDS/$i "http://localhost:8080/api/dataverses/birds?key=$PETEKEY" +done + +for i in `ls $DVDIR_TREES`; do + curl -s -H "Content-type:application/json" -X POST -d @$DVDIR_TREES/$i "http://localhost:8080/api/dataverses/trees?key=$PETEKEY" +done + +# 9 is "sparrows" +#curl -H "Content-type:application/json" -X POST -d @data/in/dataverses.misc/8 "http://localhost:8080/api/dataverses/9?key=pete" +#curl -H "Content-type:application/json" -X POST -d @data/in/dataverses.misc/9 "http://localhost:8080/api/dataverses/trees?key=pete" + +#for i in `ls $DSDIR`; do +# curl http://localhost:8080/api/datasets -H 'Content-type:application/json' --data-binary @$DSDIR/$i +#done + +#for i in `ls $FILESDIR`; do +# curl http://localhost:8080/api/files -H 'Content-type:application/json' --data-binary @$FILESDIR/$i +#done diff --git a/postgresql/testdata/scripts/search/create-bird-dvs1 b/postgresql/testdata/scripts/search/create-bird-dvs1 new file mode 100755 index 0000000..ba71e01 --- /dev/null +++ b/postgresql/testdata/scripts/search/create-bird-dvs1 @@ -0,0 +1,21 @@ +#!/bin/sh +DIR='scripts/search/data/in/dv-birds1' +USERDIR=/tmp/searchusers +ROOT_DV=root +FINCHKEY=`cat $USERDIR/1 | jq .data.apiToken | tr -d \"` +curl -s -H "Content-type:application/json" -X POST -d @$DIR/1 "http://localhost:8080/api/dataverses/$ROOT_DV?key=$FINCHKEY" +echo + +#PARENT=`xsltproc scripts/search/data/mkpaths.xsl scripts/search/data/nodes.xml | grep '/sparrows$' | tr / " " | awk '{print $(NF-1)}'` +PARENT=birds +curl -s -H "Content-type:application/json" -X POST -d @$DIR/2 "http://localhost:8080/api/dataverses/$PARENT?key=$FINCHKEY" +echo + +curl -s -H "Content-type:application/json" -X POST -d @$DIR/3 "http://localhost:8080/api/dataverses/birds?key=$FINCHKEY" +echo + +curl -s -H "Content-type:application/json" -X POST -d @$DIR/4 "http://localhost:8080/api/dataverses/birds?key=$FINCHKEY" +echo + +curl -s -H "Content-type:application/json" -X POST -d @$DIR/5 "http://localhost:8080/api/dataverses/sparrows?key=$FINCHKEY" +echo diff --git a/postgresql/testdata/scripts/search/create-psi-dvs b/postgresql/testdata/scripts/search/create-psi-dvs new file mode 100755 index 0000000..150f51c --- /dev/null +++ b/postgresql/testdata/scripts/search/create-psi-dvs @@ -0,0 +1,24 @@ +#!/bin/sh +. scripts/search/export-keys +DIR='scripts/search/data/in/dv-psi' +USERDIR=/tmp/searchusers +curl -s -H "Content-type:application/json" -X POST -d @$DIR/1 "http://localhost:8080/api/dataverses/root?key=$PSIADMINKEY" +echo + +PARENT=psi +for i in {2..9}; do + curl -s -H "Content-type:application/json" -X POST -d @$DIR/$i "http://localhost:8080/api/dataverses/$PARENT?key=$PSIADMINKEY" + echo +done + +curl -s -H "Content-type:application/json" -X POST -d @$DIR/10 "http://localhost:8080/api/dataverses/psimali?key=$PSIADMINKEY" +echo + +curl -s -H "Content-type:application/json" -X POST -d @$DIR/11 "http://localhost:8080/api/dataverses/psimali?key=$PSIADMINKEY" +echo + +curl -s -H "Content-type:application/json" -X POST -d @$DIR/12 "http://localhost:8080/api/dataverses/psimalihealth?key=$PSIADMINKEY" +echo + +curl -s -H "Content-type:application/json" -X POST -d @$DIR/13 "http://localhost:8080/api/dataverses/psimalihealthchild?key=$PSIADMINKEY" +echo diff --git a/postgresql/testdata/scripts/search/create-tree-dvs1 b/postgresql/testdata/scripts/search/create-tree-dvs1 new file mode 100755 index 0000000..b5ba864 --- /dev/null +++ b/postgresql/testdata/scripts/search/create-tree-dvs1 @@ -0,0 +1,13 @@ +#!/bin/sh +DIR='scripts/search/data/in/dv-trees1' +USERDIR=/tmp/searchusers +ROOT_DV=root +SPRUCEKEY=`cat $USERDIR/4 | jq .data.apiToken | tr -d \"` +curl -s -H "Content-type:application/json" -X POST -d @$DIR/1 "http://localhost:8080/api/dataverses/$ROOT_DV?key=$SPRUCEKEY" +echo + +curl -s -H "Content-type:application/json" -X POST -d @$DIR/2 "http://localhost:8080/api/dataverses/trees?key=$SPRUCEKEY" +echo + +curl -s -H "Content-type:application/json" -X POST -d @$DIR/3 "http://localhost:8080/api/dataverses/trees?key=$SPRUCEKEY" +echo diff --git a/postgresql/testdata/scripts/search/create-users b/postgresql/testdata/scripts/search/create-users new file mode 100755 index 0000000..bdee0c6 --- /dev/null +++ b/postgresql/testdata/scripts/search/create-users @@ -0,0 +1,19 @@ +#!/bin/bash +SERVER='http://localhost:8080/api' +BURRITO='burrito' +USERDIR='scripts/search/data/in/users' +OUTDIR='/tmp/searchusers' +rm -rf $OUTDIR +mkdir -p $OUTDIR + +create () { + pass=`cat $1 | jq .userName | tr -d \"` + echo $pass + resp=$(curl -s -H "Content-type:application/json" -X POST -d @$1 "$SERVER/builtin-users?password=$pass&key=$BURRITO") + echo $resp | jq . > $OUTDIR/$1 +} + +cd $USERDIR +for i in `ls`; do + create $i +done diff --git a/postgresql/testdata/scripts/search/data/binary/1000files.zip b/postgresql/testdata/scripts/search/data/binary/1000files.zip new file mode 100644 index 0000000..64d4cdd Binary files /dev/null and b/postgresql/testdata/scripts/search/data/binary/1000files.zip differ diff --git a/postgresql/testdata/scripts/search/data/binary/100files.zip b/postgresql/testdata/scripts/search/data/binary/100files.zip new file mode 100644 index 0000000..11dd6a1 Binary files /dev/null and b/postgresql/testdata/scripts/search/data/binary/100files.zip differ diff --git a/postgresql/testdata/scripts/search/data/binary/3files.zip b/postgresql/testdata/scripts/search/data/binary/3files.zip new file mode 100644 index 0000000..a4cd394 Binary files /dev/null and b/postgresql/testdata/scripts/search/data/binary/3files.zip differ diff --git a/postgresql/testdata/scripts/search/data/binary/health.zip b/postgresql/testdata/scripts/search/data/binary/health.zip new file mode 100644 index 0000000..2a81248 Binary files /dev/null and b/postgresql/testdata/scripts/search/data/binary/health.zip differ diff --git a/postgresql/testdata/scripts/search/data/binary/trees.png b/postgresql/testdata/scripts/search/data/binary/trees.png new file mode 100644 index 0000000..7aa5223 Binary files /dev/null and b/postgresql/testdata/scripts/search/data/binary/trees.png differ diff --git a/postgresql/testdata/scripts/search/data/binary/trees.zip b/postgresql/testdata/scripts/search/data/binary/trees.zip new file mode 100644 index 0000000..170c2d3 Binary files /dev/null and b/postgresql/testdata/scripts/search/data/binary/trees.zip differ diff --git a/postgresql/testdata/scripts/search/data/dv-birds1.tsv b/postgresql/testdata/scripts/search/data/dv-birds1.tsv new file mode 100755 index 0000000..e9af3e5 --- /dev/null +++ b/postgresql/testdata/scripts/search/data/dv-birds1.tsv @@ -0,0 +1,6 @@ +name alias permissionRoot subject contactEmail description affiliation +Birds birds true Arts and Humanities birds@birds.com A bird dataverse with some trees Birds Inc. +Finches finches false Chemistry finches@birds.com A dataverse with finches Birds Inc. +Sparrows sparrows false Law sparrows@birds.com A dataverse featuring sparrows Birds Inc. +Wrens wrens false Medicine, Health and Life Sciences wrens@birds.com A dataverse full of wrens Birds Inc. +Chestnut Sparrows chestnutsparrows false Other chestnutsparrows@birds.com A dataverse with chestnut sparrows Birds Inc. diff --git a/postgresql/testdata/scripts/search/data/dv-psi.tsv b/postgresql/testdata/scripts/search/data/dv-psi.tsv new file mode 100755 index 0000000..b3d39c0 --- /dev/null +++ b/postgresql/testdata/scripts/search/data/dv-psi.tsv @@ -0,0 +1,14 @@ +name alias permissionRoot subject contactEmail description affiliation +PSI psi true Social Science psi@mailinator.com PSI PSI +China psichina true Social Science psi@mailinator.com PSI PSI +Russia psirussia true Social Science psi@mailinator.com PSI PSI +India psiindia true Social Science psi@mailinator.com PSI PSI +Haiti psihaiti true Social Science psi@mailinator.com PSI PSI +Laos psilaos true Social Science psi@mailinator.com PSI PSI +Nepal psinepal true Social Science psi@mailinator.com PSI PSI +Togo psitogo true Social Science psi@mailinator.com PSI PSI +Mali psimali true Social Science psi@mailinator.com PSI PSI +Mali Health psimalihealth true Social Science psi@mailinator.com PSI PSI +Women in Mali psimaliwomen true Social Science psi@mailinator.com PSI PSI +Child of Mali Health psimalihealthchild true Social Science psi@mailinator.com PSI PSI +Grandchild of Mali Health psimalihealthgrandchild true Social Science psi@mailinator.com PSI PSI diff --git a/postgresql/testdata/scripts/search/data/dv-trees1.tsv b/postgresql/testdata/scripts/search/data/dv-trees1.tsv new file mode 100755 index 0000000..b0ac7e2 --- /dev/null +++ b/postgresql/testdata/scripts/search/data/dv-trees1.tsv @@ -0,0 +1,4 @@ +name alias permissionRoot subject contactEmail description affiliation +Trees trees true Other trees@trees.com A tree dataverse with some birds Trees Inc. +Spruce spruce false Other spruce@trees.com A spruce with some birds Trees Inc. +Chestnut Trees chestnuttrees false Other chestnuttrees@trees.com A dataverse with chestnut trees and an oriole Trees Inc. diff --git a/postgresql/testdata/scripts/search/data/group-explicit-trees.json b/postgresql/testdata/scripts/search/data/group-explicit-trees.json new file mode 100644 index 0000000..b518edc --- /dev/null +++ b/postgresql/testdata/scripts/search/data/group-explicit-trees.json @@ -0,0 +1,5 @@ +{ + "aliasInOwner": "trees", + "displayName": "Trees Dataverse Contributors", + "description": "Contributors to the Trees Dataverse." +} diff --git a/postgresql/testdata/scripts/search/data/in/dataverses.birds/4 b/postgresql/testdata/scripts/search/data/in/dataverses.birds/4 new file mode 100644 index 0000000..a54ea08 --- /dev/null +++ b/postgresql/testdata/scripts/search/data/in/dataverses.birds/4 @@ -0,0 +1,8 @@ +{ + "affiliation": "Birds Inc.", + "alias": "finches", + "contactEmail": "finches@birds.com", + "description": "A dataverse with finches", + "name": "Finches", + "permissionRoot": "false" +} diff --git a/postgresql/testdata/scripts/search/data/in/dataverses.birds/5 b/postgresql/testdata/scripts/search/data/in/dataverses.birds/5 new file mode 100644 index 0000000..2207109 --- /dev/null +++ b/postgresql/testdata/scripts/search/data/in/dataverses.birds/5 @@ -0,0 +1,8 @@ +{ + "affiliation": "Birds Inc.", + "alias": "sparrows", + "contactEmail": "sparrows@birds.com", + "description": "A dataverse featuring sparrows", + "name": "Sparrows", + "permissionRoot": "false" +} diff --git a/postgresql/testdata/scripts/search/data/in/dataverses.birds/6 b/postgresql/testdata/scripts/search/data/in/dataverses.birds/6 new file mode 100644 index 0000000..37a8627 --- /dev/null +++ b/postgresql/testdata/scripts/search/data/in/dataverses.birds/6 @@ -0,0 +1,8 @@ +{ + "affiliation": "Birds Inc.", + "alias": "wrens", + "contactEmail": "wrens@birds.com", + "description": "A dataverse full of wrens", + "name": "Wrens", + "permissionRoot": "false" +} diff --git a/postgresql/testdata/scripts/search/data/in/dataverses.root/2 b/postgresql/testdata/scripts/search/data/in/dataverses.root/2 new file mode 100644 index 0000000..c2b1ac0 --- /dev/null +++ b/postgresql/testdata/scripts/search/data/in/dataverses.root/2 @@ -0,0 +1,8 @@ +{ + "affiliation": "Birds Inc.", + "alias": "birds", + "contactEmail": "birds@birds.com", + "description": "A bird dataverse with some trees", + "name": "Birds", + "permissionRoot": "false" +} diff --git a/postgresql/testdata/scripts/search/data/in/dataverses.root/3 b/postgresql/testdata/scripts/search/data/in/dataverses.root/3 new file mode 100644 index 0000000..eef8f99 --- /dev/null +++ b/postgresql/testdata/scripts/search/data/in/dataverses.root/3 @@ -0,0 +1,8 @@ +{ + "affiliation": "Trees Inc.", + "alias": "trees", + "contactEmail": "trees@trees.com", + "description": "A tree dataverse with some birds", + "name": "Trees", + "permissionRoot": "false" +} diff --git a/postgresql/testdata/scripts/search/data/in/dataverses.trees/7 b/postgresql/testdata/scripts/search/data/in/dataverses.trees/7 new file mode 100644 index 0000000..7e8026f --- /dev/null +++ b/postgresql/testdata/scripts/search/data/in/dataverses.trees/7 @@ -0,0 +1,8 @@ +{ + "affiliation": "Trees Inc.", + "alias": "spruce", + "contactEmail": "spruce@trees.com", + "description": "A spruce with some birds", + "name": "Spruce", + "permissionRoot": "false" +} diff --git a/postgresql/testdata/scripts/search/data/in/dataverses.trees/9 b/postgresql/testdata/scripts/search/data/in/dataverses.trees/9 new file mode 100644 index 0000000..2410260 --- /dev/null +++ b/postgresql/testdata/scripts/search/data/in/dataverses.trees/9 @@ -0,0 +1,8 @@ +{ + "affiliation": "Trees Inc.", + "alias": "chestnuttrees", + "contactEmail": "chestnuttrees@trees.com", + "description": "A dataverse with chestnut trees and an oriole", + "name": "Chestnut Trees", + "permissionRoot": "false" +} diff --git a/postgresql/testdata/scripts/search/data/mkpaths.xsl b/postgresql/testdata/scripts/search/data/mkpaths.xsl new file mode 100644 index 0000000..c14d9f7 --- /dev/null +++ b/postgresql/testdata/scripts/search/data/mkpaths.xsl @@ -0,0 +1,14 @@ + + + + + + + + + + + + + + diff --git a/postgresql/testdata/scripts/search/data/nodes.xml b/postgresql/testdata/scripts/search/data/nodes.xml new file mode 100644 index 0000000..a635b2b --- /dev/null +++ b/postgresql/testdata/scripts/search/data/nodes.xml @@ -0,0 +1,12 @@ + + + + + + + + + + + + diff --git a/postgresql/testdata/scripts/search/data/replace_test/003.txt b/postgresql/testdata/scripts/search/data/replace_test/003.txt new file mode 100644 index 0000000..e440e5c --- /dev/null +++ b/postgresql/testdata/scripts/search/data/replace_test/003.txt @@ -0,0 +1 @@ +3 \ No newline at end of file diff --git a/postgresql/testdata/scripts/search/data/replace_test/004.txt b/postgresql/testdata/scripts/search/data/replace_test/004.txt new file mode 100644 index 0000000..bf0d87a --- /dev/null +++ b/postgresql/testdata/scripts/search/data/replace_test/004.txt @@ -0,0 +1 @@ +4 \ No newline at end of file diff --git a/postgresql/testdata/scripts/search/data/replace_test/005.txt b/postgresql/testdata/scripts/search/data/replace_test/005.txt new file mode 100644 index 0000000..7813681 --- /dev/null +++ b/postgresql/testdata/scripts/search/data/replace_test/005.txt @@ -0,0 +1 @@ +5 \ No newline at end of file diff --git a/postgresql/testdata/scripts/search/data/replace_test/growing_file/2016-01/data.tsv b/postgresql/testdata/scripts/search/data/replace_test/growing_file/2016-01/data.tsv new file mode 100644 index 0000000..4d75a0a --- /dev/null +++ b/postgresql/testdata/scripts/search/data/replace_test/growing_file/2016-01/data.tsv @@ -0,0 +1 @@ +2016-01 7 diff --git a/postgresql/testdata/scripts/search/data/replace_test/growing_file/2016-02/data.tsv b/postgresql/testdata/scripts/search/data/replace_test/growing_file/2016-02/data.tsv new file mode 100644 index 0000000..7a1f0a8 --- /dev/null +++ b/postgresql/testdata/scripts/search/data/replace_test/growing_file/2016-02/data.tsv @@ -0,0 +1,2 @@ +2016-01 7 +2016-02 9 diff --git a/postgresql/testdata/scripts/search/data/replace_test/growing_file/2016-03/data.tsv b/postgresql/testdata/scripts/search/data/replace_test/growing_file/2016-03/data.tsv new file mode 100644 index 0000000..7d7619a --- /dev/null +++ b/postgresql/testdata/scripts/search/data/replace_test/growing_file/2016-03/data.tsv @@ -0,0 +1,3 @@ +2016-01 7 +2016-02 9 +2016-03 8 diff --git a/postgresql/testdata/scripts/search/data/savedSearchAdvanced.json b/postgresql/testdata/scripts/search/data/savedSearchAdvanced.json new file mode 100644 index 0000000..00b8244 --- /dev/null +++ b/postgresql/testdata/scripts/search/data/savedSearchAdvanced.json @@ -0,0 +1,7 @@ +{ + "query": "*", + "definitionPointId": 2, + "filterQueries": [ + "date:2015" + ] +} diff --git a/postgresql/testdata/scripts/search/data/savedSearchBasic.json b/postgresql/testdata/scripts/search/data/savedSearchBasic.json new file mode 100644 index 0000000..26d04d8 --- /dev/null +++ b/postgresql/testdata/scripts/search/data/savedSearchBasic.json @@ -0,0 +1,4 @@ +{ + "query": "png", + "definitionPointId": 2 +} diff --git a/postgresql/testdata/scripts/search/data/savedSearchInvalidJson.json b/postgresql/testdata/scripts/search/data/savedSearchInvalidJson.json new file mode 100644 index 0000000..fe51488 --- /dev/null +++ b/postgresql/testdata/scripts/search/data/savedSearchInvalidJson.json @@ -0,0 +1 @@ +[] diff --git a/postgresql/testdata/scripts/search/data/savedSearchInvalidJsonNoQuery.json b/postgresql/testdata/scripts/search/data/savedSearchInvalidJsonNoQuery.json new file mode 100644 index 0000000..7d8433a --- /dev/null +++ b/postgresql/testdata/scripts/search/data/savedSearchInvalidJsonNoQuery.json @@ -0,0 +1,3 @@ +{ + "quarry": "can't spell" +} diff --git a/postgresql/testdata/scripts/search/data/savedSearchMaliBasicHealth.json b/postgresql/testdata/scripts/search/data/savedSearchMaliBasicHealth.json new file mode 100644 index 0000000..ca217d3 --- /dev/null +++ b/postgresql/testdata/scripts/search/data/savedSearchMaliBasicHealth.json @@ -0,0 +1,9 @@ +{ + "definitionPointId": 22, + "query": "health", + "filterQueries": [ + "dvObjectType:(dataverses OR datasets OR files)", + "subtreePaths:\"/13/21\"" + ], + "creatorId": 1 +} diff --git a/postgresql/testdata/scripts/search/data/tabular/120745.dta b/postgresql/testdata/scripts/search/data/tabular/120745.dta new file mode 100644 index 0000000..279cbfa Binary files /dev/null and b/postgresql/testdata/scripts/search/data/tabular/120745.dta differ diff --git a/postgresql/testdata/scripts/search/data/tabular/1char b/postgresql/testdata/scripts/search/data/tabular/1char new file mode 100644 index 0000000..7898192 --- /dev/null +++ b/postgresql/testdata/scripts/search/data/tabular/1char @@ -0,0 +1 @@ +a diff --git a/postgresql/testdata/scripts/search/data/tabular/50by1000.dta b/postgresql/testdata/scripts/search/data/tabular/50by1000.dta new file mode 100644 index 0000000..2cbadda Binary files /dev/null and b/postgresql/testdata/scripts/search/data/tabular/50by1000.dta differ diff --git a/postgresql/testdata/scripts/search/data/tabular/50by1000.dta.zip b/postgresql/testdata/scripts/search/data/tabular/50by1000.dta.zip new file mode 100644 index 0000000..4280a06 Binary files /dev/null and b/postgresql/testdata/scripts/search/data/tabular/50by1000.dta.zip differ diff --git a/postgresql/testdata/scripts/search/dataset-add b/postgresql/testdata/scripts/search/dataset-add new file mode 100755 index 0000000..2b222dc --- /dev/null +++ b/postgresql/testdata/scripts/search/dataset-add @@ -0,0 +1,2 @@ +#!/bin/sh +curl http://localhost:8080/api/datasets?owner=birds -H 'Content-type:application/json' --data-binary @data/in/datasets/1 diff --git a/postgresql/testdata/scripts/search/dbbuiltin2shib b/postgresql/testdata/scripts/search/dbbuiltin2shib new file mode 100755 index 0000000..1b548f4 --- /dev/null +++ b/postgresql/testdata/scripts/search/dbbuiltin2shib @@ -0,0 +1,11 @@ +#!/bin/bash -x +#psql -c "select id,name,useridentifier from authenticateduser order by id;" dataverse_db +psql -c "select * from authenticateduser order by id;" dataverse_db +psql -c "select * from authenticateduserlookup order by id;" dataverse_db +psql -c "select * from builtinuser order by id;" dataverse_db +#psql -c "select id,encryptedpassword,firstname,lastname,username from builtinuser order by id;" dataverse_db +exit +psql -c "select * from roleassignment;" dataverse_db +psql -c "select datasetversionid,useridentifier from datasetversion_dataverseuser;" dataverse_db +exit +psql -c "select * from explicitgroup;" dataverse_db diff --git a/postgresql/testdata/scripts/search/dbdatasetversion b/postgresql/testdata/scripts/search/dbdatasetversion new file mode 100755 index 0000000..eb0f04a --- /dev/null +++ b/postgresql/testdata/scripts/search/dbdatasetversion @@ -0,0 +1,5 @@ +#!/bin/sh +~/.homebrew/bin/psql -c " +select id,dataset_id,versionstate,license,termsofuse from datasetversion; +---select * from datasetversion; +" dataverse_db diff --git a/postgresql/testdata/scripts/search/dbdbobject b/postgresql/testdata/scripts/search/dbdbobject new file mode 100755 index 0000000..d69604c --- /dev/null +++ b/postgresql/testdata/scripts/search/dbdbobject @@ -0,0 +1,2 @@ +#!/bin/bash -x +~/.homebrew/bin/psql -c "select id, dtype, modificationtime, indextime, permissionmodificationtime, permissionindextime from dvobject order by id;" dataverse_db diff --git a/postgresql/testdata/scripts/search/dblinks b/postgresql/testdata/scripts/search/dblinks new file mode 100755 index 0000000..7d7c089 --- /dev/null +++ b/postgresql/testdata/scripts/search/dblinks @@ -0,0 +1,5 @@ +#!/bin/bash -x +~/.homebrew/bin/psql -c "select * from dataverselinkingdataverse order by id;" dataverse_db +~/.homebrew/bin/psql -c "select * from datasetlinkingdataverse order by id;" dataverse_db +exit +~/.homebrew/bin/psql -c "select id, alias from dataverse order by id;" dataverse_db diff --git a/postgresql/testdata/scripts/search/dblinks-delete b/postgresql/testdata/scripts/search/dblinks-delete new file mode 100755 index 0000000..1d9cd3e --- /dev/null +++ b/postgresql/testdata/scripts/search/dblinks-delete @@ -0,0 +1,3 @@ +#!/bin/bash -x +~/.homebrew/bin/psql -c "delete from dataverselinkingdataverse;" dataverse_db +~/.homebrew/bin/psql -c "delete from datasetlinkingdataverse;" dataverse_db diff --git a/postgresql/testdata/scripts/search/dbperms b/postgresql/testdata/scripts/search/dbperms new file mode 100755 index 0000000..c54a133 --- /dev/null +++ b/postgresql/testdata/scripts/search/dbperms @@ -0,0 +1,9 @@ +#!/bin/sh +~/.homebrew/bin/psql -c " +select dv.id as dvObject, au.id as user +from dvobject dv, roleassignment ra, authenticateduser au +where 1=1 +and dv.id = $1 +and dv.id = ra.definitionpoint_id +and '@'|| au.useridentifier = ra.assigneeidentifier; +" dataverse_db diff --git a/postgresql/testdata/scripts/search/dbsavedsearch b/postgresql/testdata/scripts/search/dbsavedsearch new file mode 100755 index 0000000..2ffb977 --- /dev/null +++ b/postgresql/testdata/scripts/search/dbsavedsearch @@ -0,0 +1,6 @@ +#!/bin/bash -x +~/.homebrew/bin/psql -c "select * from savedsearch order by id;" dataverse_db +~/.homebrew/bin/psql -c "select * from savedsearchfilterquery order by id;" dataverse_db +exit +~/.homebrew/bin/psql -c "drop table savedsearch cascade;" dataverse_db +~/.homebrew/bin/psql -c "drop table savedsearchfilterquery cascade;" dataverse_db diff --git a/postgresql/testdata/scripts/search/dbsavedsearch-delete b/postgresql/testdata/scripts/search/dbsavedsearch-delete new file mode 100755 index 0000000..6d0642f --- /dev/null +++ b/postgresql/testdata/scripts/search/dbsavedsearch-delete @@ -0,0 +1,6 @@ +#!/bin/bash -x +~/.homebrew/bin/psql -c "delete from savedsearchfilterquery;" dataverse_db +~/.homebrew/bin/psql -c "delete from savedsearch cascade;" dataverse_db +exit +~/.homebrew/bin/psql -c "drop table savedsearch cascade;" dataverse_db +~/.homebrew/bin/psql -c "drop table savedsearchfilterquery cascade;" dataverse_db diff --git a/postgresql/testdata/scripts/search/dbshibgroups b/postgresql/testdata/scripts/search/dbshibgroups new file mode 100755 index 0000000..93c93cc --- /dev/null +++ b/postgresql/testdata/scripts/search/dbshibgroups @@ -0,0 +1,5 @@ +#!/bin/bash -x +psql -c "select * from shibgroup;" dataverse_db +psql -c "select * from authenticateduser;" dataverse_db +psql -c "select * from persistedglobalgroup;" dataverse_db +psql -c "select * from roleassignment;" dataverse_db diff --git a/postgresql/testdata/scripts/search/dbusers b/postgresql/testdata/scripts/search/dbusers new file mode 100755 index 0000000..283aa99 --- /dev/null +++ b/postgresql/testdata/scripts/search/dbusers @@ -0,0 +1,10 @@ +#!/bin/sh +~/.homebrew/bin/psql -c " +select * from builtinuser; +" dataverse_db +~/.homebrew/bin/psql -c " +select * from authenticateduser; +" dataverse_db +~/.homebrew/bin/psql -c " +select * from authenticateduserlookup; +" dataverse_db diff --git a/postgresql/testdata/scripts/search/ds.tsv b/postgresql/testdata/scripts/search/ds.tsv new file mode 100644 index 0000000..de48427 --- /dev/null +++ b/postgresql/testdata/scripts/search/ds.tsv @@ -0,0 +1,8 @@ +id title author owner description citationDate distributor +1 general dataset Dr. Doctor 1 About birds 2013-12-11 For All +2 bird dataset Dr. Bird 2 bird study 1 2003-12-11 For the Birds +3 bird dataset Dr. Bird 2 bird study 2 2003-12-11 For the Birds +4 finch dataset Dr. Bird 3 bird study 2 2003-12-11 For the Birds +5 goldfinch dataset Dr. Bird 5 bird study 2 2003-12-11 For the Birds +6 tree dataset Dr. Tree 4 tree study 2 2003-12-11 For the Trees +7 chestnut dataset Dr. Tree 6 tree study 2003-12-11 For the Trees diff --git a/postgresql/testdata/scripts/search/dv.tsv b/postgresql/testdata/scripts/search/dv.tsv new file mode 100755 index 0000000..3480924 --- /dev/null +++ b/postgresql/testdata/scripts/search/dv.tsv @@ -0,0 +1,10 @@ +id name alias owner contactEmail description affiliation +1 Nature nature root@nature.com (not used) Earth Inc. +2 Birds birds 1 birds@birds.com A bird dataverse with some trees Birds Inc. +3 Trees trees 1 trees@trees.com A tree dataverse with some birds Trees Inc. +4 Finches finches 2 finches@birds.com A dataverse with finches Birds Inc. +5 Sparrows sparrows 2 sparrows@birds.com A dataverse featuring sparrows Birds Inc. +6 Wrens wrens 2 wrens@birds.com A dataverse full of wrens Birds Inc. +7 Spruce spruce 3 spruce@trees.com A spruce with some birds Trees Inc. +8 Chestnut Sparrows chestnutsparrows 5 chestnutsparrows@birds.com A dataverse with chestnut sparrows Birds Inc. +9 Chestnut Trees chestnuttrees 3 chestnuttrees@trees.com A dataverse with chestnut trees and an oriole Trees Inc. diff --git a/postgresql/testdata/scripts/search/empty-entityid-check b/postgresql/testdata/scripts/search/empty-entityid-check new file mode 100755 index 0000000..e9ea02b --- /dev/null +++ b/postgresql/testdata/scripts/search/empty-entityid-check @@ -0,0 +1,3 @@ +#!/bin/sh +# see also https://redmine.hmdc.harvard.edu/issues/3809 +curl 'http://localhost:8983/solr/collection1/select?rows=100&wt=json&indent=true&q=-entityid:*' diff --git a/postgresql/testdata/scripts/search/export-keys b/postgresql/testdata/scripts/search/export-keys new file mode 100755 index 0000000..b568879 --- /dev/null +++ b/postgresql/testdata/scripts/search/export-keys @@ -0,0 +1,10 @@ +#!/bin/bash +# `source path/to/this/file` so you can use these keys elsewhere +export ADMINKEY=`cat /tmp/setup-all.sh.out | grep apiToken| jq .data.apiToken | tr -d \"` +export SEARCH_USER_DIR=/tmp/searchusers +export FINCHKEY=`cat $SEARCH_USER_DIR/1 | jq .data.apiToken | tr -d \"` +export SPARROWKEY=`cat $SEARCH_USER_DIR/2 | jq .data.apiToken | tr -d \"` +export WRENKEY=`cat $SEARCH_USER_DIR/3 | jq .data.apiToken | tr -d \"` +export SPRUCEKEY=`cat $SEARCH_USER_DIR/4 | jq .data.apiToken | tr -d \"` +export CHESTNUTKEY=`cat $SEARCH_USER_DIR/5 | jq .data.apiToken | tr -d \"` +export PSIADMINKEY=`cat $SEARCH_USER_DIR/6 | jq .data.apiToken | tr -d \"` diff --git a/postgresql/testdata/scripts/search/files b/postgresql/testdata/scripts/search/files new file mode 100755 index 0000000..361c984 --- /dev/null +++ b/postgresql/testdata/scripts/search/files @@ -0,0 +1,3 @@ +#!/bin/sh +curl http://localhost:8080/api/index +curl -s 'http://localhost:8983/solr/collection1/select?rows=100&wt=json&indent=true&q=*&fq=dvtype:files' | jq '.response.docs[] | {name_sort, id, parentid}' diff --git a/postgresql/testdata/scripts/search/go b/postgresql/testdata/scripts/search/go new file mode 100755 index 0000000..e13a5cf --- /dev/null +++ b/postgresql/testdata/scripts/search/go @@ -0,0 +1,10 @@ +#!/bin/bash -x +./clear +sleep .5 +#./populate +#./create +./add +# elasticsearch might need more time before query +sleep 1 +./query +./search diff --git a/postgresql/testdata/scripts/search/index b/postgresql/testdata/scripts/search/index new file mode 100755 index 0000000..448c7a3 --- /dev/null +++ b/postgresql/testdata/scripts/search/index @@ -0,0 +1,6 @@ +#!/bin/bash +# curl -s "http://localhost:8080/api/admin/index?numPartitions=$1&partitionIdToProcess=$2&previewOnly=$3" +scripts/search/clear +curl -s -X DELETE http://localhost:8080/api/admin/index/timestamps +curl -s "http://localhost:8080/api/admin/index/continue?numPartitions=1&partitionIdToProcess=0&previewOnly=true" | jq .data.previewOfPartitionWorkload.dvContainerIds.dataverses[] | while read j; do curl http://localhost:8080/api/admin/index/dataverses/$j; done +curl -s "http://localhost:8080/api/admin/index/continue?numPartitions=1&partitionIdToProcess=0&previewOnly=true" | jq .data.previewOfPartitionWorkload.dvContainerIds.datasets[] | while read i; do curl http://localhost:8080/api/admin/index/datasets/$i; done diff --git a/postgresql/testdata/scripts/search/index-status b/postgresql/testdata/scripts/search/index-status new file mode 100755 index 0000000..8575223 --- /dev/null +++ b/postgresql/testdata/scripts/search/index-status @@ -0,0 +1,2 @@ +#!/bin/sh +curl -s http://localhost:8080/api/admin/index/status | jq . diff --git a/postgresql/testdata/scripts/search/json2ids b/postgresql/testdata/scripts/search/json2ids new file mode 100755 index 0000000..7afa9a5 --- /dev/null +++ b/postgresql/testdata/scripts/search/json2ids @@ -0,0 +1,23 @@ +#!/usr/bin/python +"""Find ids in JSON document""" +import sys +try: + import json +except ImportError: + import simplejson as json +import optparse +parser = optparse.OptionParser(description=__doc__) +options, args = parser.parse_args() + +if not args: + print "Please supply a filename to process" + sys.exit(1) + +json_data=open(args[0]) +data = json.load(json_data) +ids=[] +for i in data: + id = i["entityid_l"] + ids.append(str(id)) +print ' '.join(ids) +json_data.close() diff --git a/postgresql/testdata/scripts/search/populate b/postgresql/testdata/scripts/search/populate new file mode 100755 index 0000000..77b0a0f --- /dev/null +++ b/postgresql/testdata/scripts/search/populate @@ -0,0 +1,27 @@ +#!/bin/bash +DVDIR='data/in/dataverses' +DVDIR_ROOT='data/in/dataverses.root' +DVDIR_BIRDS='data/in/dataverses.birds' +DVDIR_TREES='data/in/dataverses.trees' +#DSDIR='data/in/datasets' +#FILESDIR='data/in/files' +#mkdir -p $DSDIR +#mkdir -p $FILESDIR +rm -rf data/in +mkdir -p $DVDIR +mkdir -p $DVDIR_ROOT +mkdir -p $DVDIR_BIRDS +mkdir -p $DVDIR_TREES +count=1; ./tab2json dv.tsv | while read i; do echo $i | python -m json.tool > $DVDIR/$count; let count++; done +rm $DVDIR/1 +mv $DVDIR/2 $DVDIR_ROOT/2 +mv $DVDIR/3 $DVDIR_ROOT/3 +mv $DVDIR/4 $DVDIR_BIRDS/4 +mv $DVDIR/5 $DVDIR_BIRDS/5 +mv $DVDIR/6 $DVDIR_BIRDS/6 +mv $DVDIR/7 $DVDIR_TREES/7 +rm $DVDIR/8 +mv $DVDIR/9 $DVDIR_TREES/9 +rmdir $DVDIR +#count=1; ./tab2json ds.tsv | while read i; do echo $i | python -m json.tool > $DSDIR/$count; let count++; done +#count=1; ./tab2json files.tsv | while read i; do echo $i | python -m json.tool > $FILESDIR/$count; let count++; done diff --git a/postgresql/testdata/scripts/search/populate-bird-dvs1 b/postgresql/testdata/scripts/search/populate-bird-dvs1 new file mode 100755 index 0000000..b7a9d6a --- /dev/null +++ b/postgresql/testdata/scripts/search/populate-bird-dvs1 @@ -0,0 +1,8 @@ +#!/bin/bash +BASEDIR='scripts/search' +OUTDIR='data/in/dv-birds1' +FULL_OUTDIR="$BASEDIR/$OUTDIR" +rm -rf $FULL_OUTDIR +mkdir -p $FULL_OUTDIR +cd $BASEDIR +count=1; ./tab2json-dvs data/dv-birds1.tsv | while read i; do echo $i | python -m json.tool > $OUTDIR/$count; let count++; done diff --git a/postgresql/testdata/scripts/search/populate-psi-dvs b/postgresql/testdata/scripts/search/populate-psi-dvs new file mode 100755 index 0000000..ec966cc --- /dev/null +++ b/postgresql/testdata/scripts/search/populate-psi-dvs @@ -0,0 +1,8 @@ +#!/bin/bash +BASEDIR='scripts/search' +OUTDIR='data/in/dv-psi' +FULL_OUTDIR="$BASEDIR/$OUTDIR" +rm -rf $FULL_OUTDIR +mkdir -p $FULL_OUTDIR +cd $BASEDIR +count=1; ./tab2json-dvs data/dv-psi.tsv | while read i; do echo $i | python -m json.tool > $OUTDIR/$count; let count++; done diff --git a/postgresql/testdata/scripts/search/populate-tree-dvs1 b/postgresql/testdata/scripts/search/populate-tree-dvs1 new file mode 100755 index 0000000..27473bf --- /dev/null +++ b/postgresql/testdata/scripts/search/populate-tree-dvs1 @@ -0,0 +1,8 @@ +#!/bin/bash +BASEDIR='scripts/search' +OUTDIR='data/in/dv-trees1' +FULL_OUTDIR="$BASEDIR/$OUTDIR" +rm -rf $FULL_OUTDIR +mkdir -p $FULL_OUTDIR +cd $BASEDIR +count=1; ./tab2json-dvs data/dv-trees1.tsv | while read i; do echo $i | python -m json.tool > $OUTDIR/$count; let count++; done diff --git a/postgresql/testdata/scripts/search/populate-users b/postgresql/testdata/scripts/search/populate-users new file mode 100755 index 0000000..c24ef96 --- /dev/null +++ b/postgresql/testdata/scripts/search/populate-users @@ -0,0 +1,8 @@ +#!/bin/bash +BASEDIR='scripts/search' +USERDIR='data/in/users' +FULL_USERDIR="$BASEDIR/$USERDIR" +rm -rf $FULL_USERDIR +mkdir -p $FULL_USERDIR +cd $BASEDIR +count=1; ./tab2json-users users.tsv | while read i; do echo $i | python -m json.tool > $USERDIR/$count; let count++; done diff --git a/postgresql/testdata/scripts/search/query b/postgresql/testdata/scripts/search/query new file mode 100755 index 0000000..bd13c3e --- /dev/null +++ b/postgresql/testdata/scripts/search/query @@ -0,0 +1,8 @@ +#!/bin/sh +curl -s 'http://localhost:8983/solr/collection1/select?rows=1000000&wt=json&indent=true&q=*%3A*' +# show combination of public stuff OR pete's private stuff +# curl -s --globoff 'http://localhost:8983/solr/collection1/select?rows=100&wt=json&indent=true&q=*&fq=({!join+from=groups_s+to=perms_ss}id:group_public+OR+{!join+from=groups_s+to=perms_ss}id:group_user2)' | jq '.response.docs[] | {name_sort}' +# https://github.com/IQSS/dataverse/issues/1262 +# curl 'http://localhost:8983/solr/collection1/select?rows=1000000&wt=json&indent=true&hl=true&hl.fl=*&q=wright&hl.snippets=10' +# remember elasticsearch? :) +#curl 'http://localhost:9200/_search?pretty=true&q=*' diff --git a/postgresql/testdata/scripts/search/saved-search b/postgresql/testdata/scripts/search/saved-search new file mode 100755 index 0000000..a483930 --- /dev/null +++ b/postgresql/testdata/scripts/search/saved-search @@ -0,0 +1,15 @@ +#!/bin/bash +. scripts/search/export-keys +# 2015-03-26 11:48:50.43 +curl -s http://localhost:8080/api/admin/savedsearches/list?key=$ADMINKEY | jq . +if [ ! -z "$1" ]; then + curl -s http://localhost:8080/api/dataverses/$1/links?key=$ADMINKEY | jq . +fi +if [ ! -z "$2" ]; then + curl -s http://localhost:8080/api/datasets/$2/links?key=$ADMINKEY | jq . +fi +exit +curl -s http://localhost:8080/api/admin/savedsearches -X POST -H 'Content-type:application/json' --upload-file scripts/search/data/savedSearchBasic.json | jq . +curl -s http://localhost:8080/api/admin/savedsearches -X POST -H 'Content-type:application/json' --upload-file scripts/search/data/savedSearchAdvanced.json | jq . +# curl -s -X DELETE http://localhost:8080/api/admin/savedsearches/999 +scripts/search/dbsavedsearch diff --git a/postgresql/testdata/scripts/search/saved-search-setup b/postgresql/testdata/scripts/search/saved-search-setup new file mode 100755 index 0000000..d99c2c2 --- /dev/null +++ b/postgresql/testdata/scripts/search/saved-search-setup @@ -0,0 +1,4 @@ +#!/bin/bash +curl -X PUT -d true http://localhost:8080/api/admin/settings/:SearchApiNonPublicAllowed +echo +curl -s http://localhost:8080/api/admin/savedsearches -X POST -H 'Content-type:application/json' --upload-file scripts/search/data/savedSearchMaliBasicHealth.json | jq . diff --git a/postgresql/testdata/scripts/search/saved-search-test b/postgresql/testdata/scripts/search/saved-search-test new file mode 100755 index 0000000..96def1b --- /dev/null +++ b/postgresql/testdata/scripts/search/saved-search-test @@ -0,0 +1,5 @@ +#!/bin/bash +. scripts/search/export-keys +#curl -s -X PUT http://localhost:8080/api/admin/savedsearches/makelinks/all | jq . +diff -u scripts/search/tests/expected/saved-search <(curl -s "http://localhost:8080/api/search?key=$ADMINKEY&sort=name&subtree=psimalihealth&q=*" | jq '.data.items[] | {name,type}') +diff -u scripts/search/tests/expected/saved-search-links <(curl -s http://localhost:8080/api/dataverses/psimalihealth/links?key=$ADMINKEY | jq .data) diff --git a/postgresql/testdata/scripts/search/search b/postgresql/testdata/scripts/search/search new file mode 100755 index 0000000..ac14596 --- /dev/null +++ b/postgresql/testdata/scripts/search/search @@ -0,0 +1,11 @@ +#!/bin/sh +if [ -z "$1" ]; then + curl -H "X-Dataverse-key: $API_TOKEN" -s 'http://localhost:8080/api/search?q=*' + #curl -s 'http://localhost:8080/api/search?q=*&key=pete' +else + # i.e. ./search 'q=*&fq=filetype_s:"image"&fq=dvtype:files' + # i.e. ./search 'q=*&start=10' + # i.e. ./search 'q=*&sort=name_sort&order=asc' + # i.e. ./search 'q=*&sort=name_sort&order=asc' | jq '.itemsJson[] | {name_sort}' + curl -H "X-Dataverse-key: $API_TOKEN" -s "http://localhost:8080/api/search?$1" +fi diff --git a/postgresql/testdata/scripts/search/solr-delete-id b/postgresql/testdata/scripts/search/solr-delete-id new file mode 100755 index 0000000..302a84e --- /dev/null +++ b/postgresql/testdata/scripts/search/solr-delete-id @@ -0,0 +1,12 @@ +#!/bin/bash +if [ -z "$1" ]; then + echo "No Solr ID provided." + exit 1 +else + echo "Deleting Solr id $1" + OUTPUT=`curl -s http://localhost:8983/solr/update/json?commit=true -H "Content-type: application/json" -X POST -d "{\"delete\": { \"query\":\"id:$1\"}}"` + # exit code 7 is expected when Solr is down + EXIT_CODE=$? + #echo $EXIT_CODE + #echo $OUTPUT +fi diff --git a/postgresql/testdata/scripts/search/spellcheck b/postgresql/testdata/scripts/search/spellcheck new file mode 100755 index 0000000..6ae8fee --- /dev/null +++ b/postgresql/testdata/scripts/search/spellcheck @@ -0,0 +1,5 @@ +#!/bin/sh +# output: +# "hits",1, +# "misspellingsAndCorrections",["datvrse","dataverse"] +curl -s 'http://localhost:8983/solr/spell?spellcheck=true&wt=json&indent=true&q=datvrse' diff --git a/postgresql/testdata/scripts/search/tab2json b/postgresql/testdata/scripts/search/tab2json new file mode 100755 index 0000000..a4cdc3d --- /dev/null +++ b/postgresql/testdata/scripts/search/tab2json @@ -0,0 +1,53 @@ +#!/usr/bin/env python +import sys +from optparse import OptionParser +import csv +try: + import json +except ImportError: + import simplejson as json + +parser = OptionParser() +options, args = parser.parse_args() + +if args: + csv_file = open(args[0]) +else: + csv_file = sys.stdin + +reader = csv.DictReader(csv_file, delimiter="\t") +rows = [row for row in reader] +for row in rows: + if "name" in row and "alias" in row and row["id"] == "1": + del row["id"] + del row["owner"] + row["permissionRoot"] = "false" + elif "title" in row: + row["@type"] = "dataset" + row["files"] = [] + row["versions"] = [] + del row["id"] + del row["owner"] + del row["title"] + del row["author"] + del row["citationDate"] + del row["distributor"] + elif "contentType" in row: + del row["id"] + row["permissionRoot"] = "false" + #print "must be a file..." + dataset_id = row["dataset"] + #row["dataset"] = {"id": dataset_id} + else: + del row["id"] + row["permissionRoot"] = "false" + del row["owner"] + #if row["id"] == "1": + print json.dumps(row) +csv_file.close() +# sample dataverse file: +#id name alias owner contactEmail description affiliation +#2 Birds birds 1 birds@birds.com A birds dataverse Birds Inc. +# sample dataset file: +#id title author owner description citationDate distributor +#1 birdstudy1 Dr. Finch 1 About birds 2013-12-11 For the Birds diff --git a/postgresql/testdata/scripts/search/tab2json-dvs b/postgresql/testdata/scripts/search/tab2json-dvs new file mode 100755 index 0000000..1864532 --- /dev/null +++ b/postgresql/testdata/scripts/search/tab2json-dvs @@ -0,0 +1,34 @@ +#!/usr/bin/env python +import sys +from optparse import OptionParser +import csv +try: + import json +except ImportError: + import simplejson as json + +parser = OptionParser() +options, args = parser.parse_args() + +if args: + csv_file = open(args[0]) +else: + csv_file = sys.stdin + +reader = csv.DictReader(csv_file, delimiter="\t") +rows = [row for row in reader] +for row in rows: + if "contactEmail" in row: + contactArray = [] + contactHash = {} + contactHash["contactEmail"] = row["contactEmail"] + contactArray.append(contactHash) + row["dataverseContacts"] = contactArray + del row["contactEmail"] + if "subject" in row: + subjectsArray = [] + subjectsArray.append(row["subject"]) + row["dataverseSubjects"] = subjectsArray + del row["subject"] + print json.dumps(row) +csv_file.close() diff --git a/postgresql/testdata/scripts/search/tab2json-users b/postgresql/testdata/scripts/search/tab2json-users new file mode 100755 index 0000000..388d54d --- /dev/null +++ b/postgresql/testdata/scripts/search/tab2json-users @@ -0,0 +1,22 @@ +#!/usr/bin/env python +import sys +from optparse import OptionParser +import csv +try: + import json +except ImportError: + import simplejson as json + +parser = OptionParser() +options, args = parser.parse_args() + +if args: + csv_file = open(args[0]) +else: + csv_file = sys.stdin + +reader = csv.DictReader(csv_file, delimiter="\t") +rows = [row for row in reader] +for row in rows: + print json.dumps(row) +csv_file.close() diff --git a/postgresql/testdata/scripts/search/tests/add-members-to-trees-group b/postgresql/testdata/scripts/search/tests/add-members-to-trees-group new file mode 100755 index 0000000..20d9e95 --- /dev/null +++ b/postgresql/testdata/scripts/search/tests/add-members-to-trees-group @@ -0,0 +1,3 @@ +#!/bin/sh +curl -X PUT "http://localhost:8080/api/dataverses/root/groups/trees/roleAssignees/@chestnut?key=$ADMINKEY" +curl -X PUT "http://localhost:8080/api/dataverses/root/groups/trees/roleAssignees/@spruce?key=$ADMINKEY" diff --git a/postgresql/testdata/scripts/search/tests/create-all-and-test b/postgresql/testdata/scripts/search/tests/create-all-and-test new file mode 100755 index 0000000..a49b0e2 --- /dev/null +++ b/postgresql/testdata/scripts/search/tests/create-all-and-test @@ -0,0 +1,38 @@ +#!/bin/sh +. scripts/search/export-keys +echo "Creating bird and tree dataverses" +scripts/search/populate-bird-dvs1 +scripts/search/create-bird-dvs1 > /tmp/bird-dvs1 +scripts/search/populate-tree-dvs1 +scripts/search/create-tree-dvs1 > /tmp/tree-dvs1 +echo "Creating some datasets" +curl -s --insecure --data-binary @scripts/search/tests/data/dataset-trees1.xml -H 'Content-Type: application/atom+xml' -u $SPRUCEKEY: https://localhost:8181/dvn/api/data-deposit/v1.1/swordv2/collection/dataverse/spruce | xmllint -format - >/dev/null +curl -s -X POST -H "Content-type:application/json" -d @scripts/search/tests/data/dataset-finch1.json "http://localhost:8080/api/dataverses/finches/datasets/?key=$FINCHKEY" >/dev/null +echo "Uploading a file via the SWORD API" +. scripts/search/assumptions +curl -s --insecure --data-binary @scripts/search/data/binary/trees.zip -H 'Content-Disposition: filename=trees.zip' -H 'Content-Type: application/zip' -H 'Packaging: http://purl.org/net/sword/package/SimpleZip' -u $SPRUCEKEY: https://localhost:8181/dvn/api/data-deposit/v1.1/swordv2/edit-media/study/$FIRST_SPRUCE_DOI >/dev/null +echo "Uploading a file via the native API" +# echo $FIRST_FINCH_DOI # FIXME: Why is this empty? +STATUS_CODE_FROM_UPLOADING_FILE_VIA_NATIVE=$(curl -H "X-Dataverse-key:$FINCHKEY" --insecure --write-out %{http_code} --silent --output /dev/null -X POST -F "file=@scripts/search/data/replace_test/growing_file/2016-01/data.tsv" -F 'jsonData={"description":"My description.","categories":["Data"]}' "http://localhost:8080/api/v1/datasets/$FIRST_FINCH_DATASET_ID/add") +if [[ "$STATUS_CODE_FROM_UPLOADING_FILE_VIA_NATIVE" != 200 ]]; then + echo "Couldn't upload file to dataset $FIRST_FINCH_DATASET_ID via native API!" + exit 1 +fi +# give the file a little time to ingest +sleep 2 +echo "Everything in draft, checking permissions. Silence is golden." +scripts/search/tests/permissions1 +echo "Done." +. scripts/search/assumptions +echo "Giving $SPRUCE_USERNAME "admin" on Birds dataverse" +scripts/search/tests/grant-spruce-admin-on-birds +echo Re-testing permissions. Silence is golden +scripts/search/tests/permissions2 +echo Done +. scripts/search/assumptions +echo "Revoking that role" +#curl -s -X DELETE "http://localhost:8080/api/dataverses/$BIRDS_DATAVERSE/assignments/$SPRUCE_ADMIN_ON_BIRDS?key=$FINCHKEY" >/dev/null +scripts/search/tests/revoke-spruce-admin-on-birds +echo "Making sure original permissions are back. Silence is golden." +scripts/search/tests/permissions1 +echo "Done" diff --git a/postgresql/testdata/scripts/search/tests/create-saved-search-and-test b/postgresql/testdata/scripts/search/tests/create-saved-search-and-test new file mode 100755 index 0000000..ac54092 --- /dev/null +++ b/postgresql/testdata/scripts/search/tests/create-saved-search-and-test @@ -0,0 +1,15 @@ +#!/bin/sh +. scripts/search/export-keys +curl -X PUT -d true http://localhost:8080/api/admin/settings/:SearchApiNonPublicAllowed +echo +scripts/search/populate-psi-dvs +scripts/search/create-psi-dvs > /tmp/psi-dvs1 +curl -s -X POST -H "Content-type:application/json" -d @scripts/search/tests/data/dataset-mali1.json "http://localhost:8080/api/dataverses/psimali/datasets/?key=$PSIADMINKEY" >/dev/null +curl -s -X POST -H "Content-type:application/json" -d @scripts/search/tests/data/dataset-mali2.json "http://localhost:8080/api/dataverses/psimali/datasets/?key=$PSIADMINKEY" >/dev/null +WOMEN_IN_MALI_DOI=`curl -s --globoff "http://localhost:8080/api/search?key=$ADMINKEY&q=title:\"Women+in+Mali+dataset+1\"" | jq '.data.items[].global_id' | sed 's/"//g'` +curl -s --insecure --data-binary @scripts/search/data/binary/health.zip -H 'Content-Disposition: filename=health.zip' -H 'Content-Type: application/zip' -H 'Packaging: http://purl.org/net/sword/package/SimpleZip' -u $PSIADMINKEY: https://localhost:8181/dvn/api/data-deposit/v1.1/swordv2/edit-media/study/$WOMEN_IN_MALI_DOI >/dev/null +scripts/search/saved-search-setup +curl -s -X PUT http://localhost:8080/api/admin/savedsearches/makelinks/all | jq . +echo "Running verification tests (silence is golden)" +scripts/search/saved-search-test +echo "Done" diff --git a/postgresql/testdata/scripts/search/tests/data/dataset-finch1.json b/postgresql/testdata/scripts/search/tests/data/dataset-finch1.json new file mode 100644 index 0000000..ec0856a --- /dev/null +++ b/postgresql/testdata/scripts/search/tests/data/dataset-finch1.json @@ -0,0 +1,77 @@ +{ + "datasetVersion": { + "metadataBlocks": { + "citation": { + "fields": [ + { + "value": "Darwin's Finches", + "typeClass": "primitive", + "multiple": false, + "typeName": "title" + }, + { + "value": [ + { + "authorName": { + "value": "Finch, Fiona", + "typeClass": "primitive", + "multiple": false, + "typeName": "authorName" + }, + "authorAffiliation": { + "value": "Birds Inc.", + "typeClass": "primitive", + "multiple": false, + "typeName": "authorAffiliation" + } + } + ], + "typeClass": "compound", + "multiple": true, + "typeName": "author" + }, + { + "value": [ + { "datasetContactEmail" : { + "typeClass": "primitive", + "multiple": false, + "typeName": "datasetContactEmail", + "value" : "finch@mailinator.com" + }, + "datasetContactName" : { + "typeClass": "primitive", + "multiple": false, + "typeName": "datasetContactName", + "value": "Finch, Fiona" + } + }], + "typeClass": "compound", + "multiple": true, + "typeName": "datasetContact" + }, + { + "value": [ { + "dsDescriptionValue":{ + "value": "Darwin's finches (also known as the Galápagos finches) are a group of about fifteen species of passerine birds.", + "multiple":false, + "typeClass": "primitive", + "typeName": "dsDescriptionValue" + }}], + "typeClass": "compound", + "multiple": true, + "typeName": "dsDescription" + }, + { + "value": [ + "Medicine, Health and Life Sciences" + ], + "typeClass": "controlledVocabulary", + "multiple": true, + "typeName": "subject" + } + ], + "displayName": "Citation Metadata" + } + } + } +} diff --git a/postgresql/testdata/scripts/search/tests/data/dataset-finch2.json b/postgresql/testdata/scripts/search/tests/data/dataset-finch2.json new file mode 100644 index 0000000..d20f835 --- /dev/null +++ b/postgresql/testdata/scripts/search/tests/data/dataset-finch2.json @@ -0,0 +1,82 @@ +{ + "datasetVersion": { + "metadataBlocks": { + "citation": { + "fields": [ + { + "value": "HTML & More", + "typeClass": "primitive", + "multiple": false, + "typeName": "title" + }, + { + "value": [ + { + "authorName": { + "value": "Markup, Marty", + "typeClass": "primitive", + "multiple": false, + "typeName": "authorName" + }, + "authorAffiliation": { + "value": "W4C", + "typeClass": "primitive", + "multiple": false, + "typeName": "authorAffiliation" + } + } + ], + "typeClass": "compound", + "multiple": true, + "typeName": "author" + }, + { + "value": [ + { + "datasetContactEmail": { + "typeClass": "primitive", + "multiple": false, + "typeName": "datasetContactEmail", + "value": "markup@mailinator.com" + }, + "datasetContactName": { + "typeClass": "primitive", + "multiple": false, + "typeName": "datasetContactName", + "value": "Markup, Marty" + } + } + ], + "typeClass": "compound", + "multiple": true, + "typeName": "datasetContact" + }, + { + "value": [ + { + "dsDescriptionValue": { + "value": "BEGIN

                            END", + "multiple": false, + "typeClass": "primitive", + "typeName": "dsDescriptionValue" + } + } + ], + "typeClass": "compound", + "multiple": true, + "typeName": "dsDescription" + }, + { + "value": [ + "Medicine, Health and Life Sciences" + ], + "typeClass": "controlledVocabulary", + "multiple": true, + "typeName": "subject" + } + ], + "displayName": "Citation Metadata" + } + } + } +} diff --git a/postgresql/testdata/scripts/search/tests/data/dataset-mali1.json b/postgresql/testdata/scripts/search/tests/data/dataset-mali1.json new file mode 100644 index 0000000..372a4a9 --- /dev/null +++ b/postgresql/testdata/scripts/search/tests/data/dataset-mali1.json @@ -0,0 +1,71 @@ +{ + "datasetVersion": { + "metadataBlocks": { + "citation": { + "fields": [ + { + "value": "Mali health dataset 1", + "typeClass": "primitive", + "multiple": false, + "typeName": "title" + }, + { + "value": [ + { + "authorName": { + "value": "Admin, PSI", + "typeClass": "primitive", + "multiple": false, + "typeName": "authorName" + }, + "authorAffiliation": { + "value": "PSI", + "typeClass": "primitive", + "multiple": false, + "typeName": "authorAffiliation" + } + } + ], + "typeClass": "compound", + "multiple": true, + "typeName": "author" + }, + { + "value": [ + { "datasetContactEmail" : { + "typeClass": "primitive", + "multiple": false, + "typeName": "datasetContactEmail", + "value" : "psiadmin@mailinator.com" + } + }], + "typeClass": "compound", + "multiple": true, + "typeName": "datasetContact" + }, + { + "value": [ { + "dsDescriptionValue":{ + "value": "Sample dataset about health in Mali used for saved search testing.", + "multiple":false, + "typeClass": "primitive", + "typeName": "dsDescriptionValue" + }}], + "typeClass": "compound", + "multiple": true, + "typeName": "dsDescription" + }, + { + "value": [ + "Social Sciences" + ], + "typeClass": "controlledVocabulary", + "multiple": true, + "typeName": "subject" + } + ], + "displayName": "Citation Metadata" + } + } + } +} diff --git a/postgresql/testdata/scripts/search/tests/data/dataset-mali2.json b/postgresql/testdata/scripts/search/tests/data/dataset-mali2.json new file mode 100644 index 0000000..e9c3286 --- /dev/null +++ b/postgresql/testdata/scripts/search/tests/data/dataset-mali2.json @@ -0,0 +1,71 @@ +{ + "datasetVersion": { + "metadataBlocks": { + "citation": { + "fields": [ + { + "value": "Women in Mali dataset 1", + "typeClass": "primitive", + "multiple": false, + "typeName": "title" + }, + { + "value": [ + { + "authorName": { + "value": "Admin, PSI", + "typeClass": "primitive", + "multiple": false, + "typeName": "authorName" + }, + "authorAffiliation": { + "value": "PSI", + "typeClass": "primitive", + "multiple": false, + "typeName": "authorAffiliation" + } + } + ], + "typeClass": "compound", + "multiple": true, + "typeName": "author" + }, + { + "value": [ + { "datasetContactEmail" : { + "typeClass": "primitive", + "multiple": false, + "typeName": "datasetContactEmail", + "value" : "psiadmin@mailinator.com" + } + }], + "typeClass": "compound", + "multiple": true, + "typeName": "datasetContact" + }, + { + "value": [ { + "dsDescriptionValue":{ + "value": "Sample dataset about women in Mali used for saved search testing.", + "multiple":false, + "typeClass": "primitive", + "typeName": "dsDescriptionValue" + }}], + "typeClass": "compound", + "multiple": true, + "typeName": "dsDescription" + }, + { + "value": [ + "Social Sciences" + ], + "typeClass": "controlledVocabulary", + "multiple": true, + "typeName": "subject" + } + ], + "displayName": "Citation Metadata" + } + } + } +} diff --git a/postgresql/testdata/scripts/search/tests/data/dataset-trees1-edit-subject.xml b/postgresql/testdata/scripts/search/tests/data/dataset-trees1-edit-subject.xml new file mode 100644 index 0000000..d5db66c --- /dev/null +++ b/postgresql/testdata/scripts/search/tests/data/dataset-trees1-edit-subject.xml @@ -0,0 +1,14 @@ + + + Spruce Goose + Spruce, Sabrina + What the Spruce Goose was really made of. + Creative Commons CC-BY 3.0 (unported) http://creativecommons.org/licenses/by/3.0/ + + + Engineering + diff --git a/postgresql/testdata/scripts/search/tests/data/dataset-trees1-edit.xml b/postgresql/testdata/scripts/search/tests/data/dataset-trees1-edit.xml new file mode 100644 index 0000000..98cfa40 --- /dev/null +++ b/postgresql/testdata/scripts/search/tests/data/dataset-trees1-edit.xml @@ -0,0 +1,12 @@ + + + Spruce Goose + Spruce, Sabrina + What the Spruce Goose was *really* made of. + NONE + + diff --git a/postgresql/testdata/scripts/search/tests/data/dataset-trees1.xml b/postgresql/testdata/scripts/search/tests/data/dataset-trees1.xml new file mode 100644 index 0000000..ab2a610 --- /dev/null +++ b/postgresql/testdata/scripts/search/tests/data/dataset-trees1.xml @@ -0,0 +1,18 @@ + + + Spruce Goose + Spruce, Sabrina + What the Spruce Goose was really made of. + Downloader will not use the Materials in any way prohibited by applicable laws. + + + diff --git a/postgresql/testdata/scripts/search/tests/data/dv-dash.json b/postgresql/testdata/scripts/search/tests/data/dv-dash.json new file mode 100644 index 0000000..4d97418 --- /dev/null +++ b/postgresql/testdata/scripts/search/tests/data/dv-dash.json @@ -0,0 +1,8 @@ + { + "alias":"dash", + "name":"Titanic - 1999", + "affiliation":"Affiliation value", + "contactEmail":"pete@mailinator.com", + "permissionRoot":false, + "description":"A dataverse with a - (a dash) in the description" +} diff --git a/postgresql/testdata/scripts/search/tests/delete-all-and-test b/postgresql/testdata/scripts/search/tests/delete-all-and-test new file mode 100755 index 0000000..8b3c5a6 --- /dev/null +++ b/postgresql/testdata/scripts/search/tests/delete-all-and-test @@ -0,0 +1,27 @@ +#!/bin/bash +. scripts/search/export-keys +. scripts/search/assumptions + +# delete spruce file +curl -s --insecure -X DELETE -u $SPRUCEKEY: https://localhost:8181/dvn/api/data-deposit/v1.1/swordv2/edit-media/file/$FIRST_SPRUCE_FILE >/dev/null +# delete spruce dataset +curl -s --insecure -X DELETE -u $SPRUCEKEY: https://localhost:8181/dvn/api/data-deposit/v1.1/swordv2/edit/study/$FIRST_SPRUCE_DOI >/dev/null +# delete finch dataset +curl -s -X DELETE "http://localhost:8080/api/datasets/$FIRST_FINCH_DATASET_ID?key=$FINCHKEY" >/dev/null + +# delete all dataverses +curl -s -X DELETE "http://localhost:8080/api/dataverses/chestnutsparrows?key=$FINCHKEY" >/dev/null +curl -s -X DELETE "http://localhost:8080/api/dataverses/sparrows?key=$FINCHKEY" >/dev/null +curl -s -X DELETE "http://localhost:8080/api/dataverses/finches?key=$FINCHKEY" >/dev/null +curl -s -X DELETE "http://localhost:8080/api/dataverses/wrens?key=$FINCHKEY" >/dev/null +curl -s -X DELETE "http://localhost:8080/api/dataverses/birds?key=$FINCHKEY" >/dev/null +curl -s -X DELETE "http://localhost:8080/api/dataverses/spruce?key=$SPRUCEKEY" >/dev/null +curl -s -X DELETE "http://localhost:8080/api/dataverses/chestnuttrees?key=$SPRUCEKEY" >/dev/null +curl -s -X DELETE "http://localhost:8080/api/dataverses/trees?key=$SPRUCEKEY" >/dev/null + +echo "Making sure finch can't see anything (silence is golden)" +diff <(curl -s "http://localhost:8080/api/search?q=*&key=$FINCHKEY" | jq '.data.total_count') scripts/search/tests/expected/zero +echo Done +echo "Making sure spruce can't see anything (silence is golden)" +diff <(curl -s "http://localhost:8080/api/search?q=*&key=$SPRUCEKEY" | jq '.data.total_count') scripts/search/tests/expected/zero +echo Done diff --git a/postgresql/testdata/scripts/search/tests/destroy-dataset-finch1 b/postgresql/testdata/scripts/search/tests/destroy-dataset-finch1 new file mode 100755 index 0000000..21c5574 --- /dev/null +++ b/postgresql/testdata/scripts/search/tests/destroy-dataset-finch1 @@ -0,0 +1,7 @@ +#!/bin/bash +. scripts/search/export-keys +. scripts/search/assumptions +echo $FIRST_FINCH_DATASET_ID +OUTPUT=`curl -s -X DELETE http://localhost:8080/api/datasets/$FIRST_FINCH_DATASET_ID/destroy?key=$FINCHKEY` +echo $OUTPUT +echo $OUTPUT | jq . diff --git a/postgresql/testdata/scripts/search/tests/destroy-dataset-spruce1 b/postgresql/testdata/scripts/search/tests/destroy-dataset-spruce1 new file mode 100755 index 0000000..55b72a4 --- /dev/null +++ b/postgresql/testdata/scripts/search/tests/destroy-dataset-spruce1 @@ -0,0 +1,9 @@ +#!/bin/bash +# destroying requires publishing so uncomment this if need be +# scripts/search/tests/publish-spruce1-and-test +sleep 2 +. scripts/search/export-keys +. scripts/search/assumptions +OUTPUT=`curl -s -X DELETE http://localhost:8080/api/datasets/$FIRST_SPRUCE_DATASET_ID/destroy?key=$ADMINKEY` +echo $OUTPUT +echo $OUTPUT | jq . diff --git a/postgresql/testdata/scripts/search/tests/edit-dataset-finch1 b/postgresql/testdata/scripts/search/tests/edit-dataset-finch1 new file mode 100755 index 0000000..3b794c9 --- /dev/null +++ b/postgresql/testdata/scripts/search/tests/edit-dataset-finch1 @@ -0,0 +1,10 @@ +#!/bin/bash +. scripts/search/export-keys +. scripts/search/assumptions +GET_VERSION_OUTPUT=`curl -s GET http://localhost:8080/api/datasets/$FIRST_FINCH_DATASET_ID/versions/:latest?key=$FINCHKEY` +echo $GET_VERSION_OUTPUT | jq .data > /tmp/old +cp /tmp/old /tmp/new +sed -i -e "s/Darwin's Finches/Darwin's Galápagos Finches/" /tmp/new +EDIT_OUTPUT=`curl -s -H "Content-type:application/json" -X PUT -d @/tmp/new http://localhost:8080/api/datasets/$FIRST_FINCH_DATASET_ID/versions/:draft?key=$FINCHKEY` +echo $EDIT_OUTPUT +echo $EDIT_OUTPUT | jq . diff --git a/postgresql/testdata/scripts/search/tests/expected/anon b/postgresql/testdata/scripts/search/tests/expected/anon new file mode 100644 index 0000000..9832b0d --- /dev/null +++ b/postgresql/testdata/scripts/search/tests/expected/anon @@ -0,0 +1 @@ +"Please provide a key query parameter (?key=XXX) or via the HTTP header X-Dataverse-key" diff --git a/postgresql/testdata/scripts/search/tests/expected/anon-empty b/postgresql/testdata/scripts/search/tests/expected/anon-empty new file mode 100644 index 0000000..fe51488 --- /dev/null +++ b/postgresql/testdata/scripts/search/tests/expected/anon-empty @@ -0,0 +1 @@ +[] diff --git a/postgresql/testdata/scripts/search/tests/expected/anon3 b/postgresql/testdata/scripts/search/tests/expected/anon3 new file mode 100644 index 0000000..b8626c4 --- /dev/null +++ b/postgresql/testdata/scripts/search/tests/expected/anon3 @@ -0,0 +1 @@ +4 diff --git a/postgresql/testdata/scripts/search/tests/expected/anon3-full b/postgresql/testdata/scripts/search/tests/expected/anon3-full new file mode 100644 index 0000000..945a89c --- /dev/null +++ b/postgresql/testdata/scripts/search/tests/expected/anon3-full @@ -0,0 +1,50 @@ +{ + "data": { + "count_in_response": 4, + "fq_provided": "[]", + "items": [ + { + "alias": "trees", + "description": "A tree dataverse with some birds", + "name": "Trees", + "published_at": "2015-01-08T03:27Z", + "type": "dataverses", + "url": "https://murphy.local/dataverse/trees" + }, + { + "authors": [ + "Spruce, Sabrina" + ], + "citation": "Spruce, Sabrina, 2015, \"Spruce Goose\", http://dx.doi.org/10.5072/FK2/I4VPEZ, Root Dataverse, V0", + "global_id": "doi:10.5072/FK2/I4VPEZ", + "name": "Spruce Goose", + "persistent_url": "http://dx.doi.org/10.5072/FK2/I4VPEZ", + "published_at": "2015-01-08T03:27Z", + "type": "datasets", + "url": "https://murphy.local/dataset.xhtml?globalId=doi:10.5072/FK2/I4VPEZ" + }, + { + "description": "", + "file_id": "12", + "file_type": "PNG Image", + "name": "trees.png", + "persistent_url": "http://dx.doi.org/10.5072/FK2/I4VPEZ", + "published_at": "2015-01-08T03:27Z", + "type": "files", + "url": "https://murphy.local/dataset.xhtml?globalId=doi:10.5072/FK2/I4VPEZ" + }, + { + "alias": "spruce", + "description": "A spruce with some birds", + "name": "Spruce", + "published_at": "2015-01-08T03:27Z", + "type": "dataverses", + "url": "https://murphy.local/dataverse/spruce" + } + ], + "q": "*", + "start": 0, + "total_count": 4 + }, + "status": "OK" +} diff --git a/postgresql/testdata/scripts/search/tests/expected/anontest3 b/postgresql/testdata/scripts/search/tests/expected/anontest3 new file mode 100644 index 0000000..325d80c --- /dev/null +++ b/postgresql/testdata/scripts/search/tests/expected/anontest3 @@ -0,0 +1,6 @@ +[ + "files:trees.png", + "datasets:Spruce Goose", + "dataverses:Trees", + "dataverses:Spruce" +] diff --git a/postgresql/testdata/scripts/search/tests/expected/finch1 b/postgresql/testdata/scripts/search/tests/expected/finch1 new file mode 100644 index 0000000..f9fa17c --- /dev/null +++ b/postgresql/testdata/scripts/search/tests/expected/finch1 @@ -0,0 +1,9 @@ +[ + "files:data.tsv", + "datasets:Darwin's Finches", + "dataverses:Birds", + "dataverses:Finches", + "dataverses:Sparrows", + "dataverses:Wrens", + "dataverses:Chestnut Sparrows" +] diff --git a/postgresql/testdata/scripts/search/tests/expected/finch3 b/postgresql/testdata/scripts/search/tests/expected/finch3 new file mode 100644 index 0000000..1d82dc5 --- /dev/null +++ b/postgresql/testdata/scripts/search/tests/expected/finch3 @@ -0,0 +1,12 @@ +[ + "files:trees.png", + "datasets:Spruce Goose", + "datasets:Darwin's Finches", + "dataverses:Birds", + "dataverses:Finches", + "dataverses:Sparrows", + "dataverses:Wrens", + "dataverses:Chestnut Sparrows", + "dataverses:Trees", + "dataverses:Spruce" +] diff --git a/postgresql/testdata/scripts/search/tests/expected/nosuchuser b/postgresql/testdata/scripts/search/tests/expected/nosuchuser new file mode 100644 index 0000000..450d91a --- /dev/null +++ b/postgresql/testdata/scripts/search/tests/expected/nosuchuser @@ -0,0 +1 @@ +"Bad api key 'nosuchuser'" diff --git a/postgresql/testdata/scripts/search/tests/expected/saved-search b/postgresql/testdata/scripts/search/tests/expected/saved-search new file mode 100644 index 0000000..ff494dc --- /dev/null +++ b/postgresql/testdata/scripts/search/tests/expected/saved-search @@ -0,0 +1,12 @@ +{ + "type": "dataverse", + "name": "Child of Mali Health" +} +{ + "type": "dataverse", + "name": "Grandchild of Mali Health" +} +{ + "type": "dataset", + "name": "Mali health dataset 1" +} diff --git a/postgresql/testdata/scripts/search/tests/expected/saved-search-links b/postgresql/testdata/scripts/search/tests/expected/saved-search-links new file mode 100644 index 0000000..c80800d --- /dev/null +++ b/postgresql/testdata/scripts/search/tests/expected/saved-search-links @@ -0,0 +1,7 @@ +{ + "datasets that the psimalihealth has linked to": [ + "Mali health dataset 1" + ], + "dataverses that link to the psimalihealth": [], + "dataverses that the psimalihealth dataverse has linked to": [] +} diff --git a/postgresql/testdata/scripts/search/tests/expected/solr-down b/postgresql/testdata/scripts/search/tests/expected/solr-down new file mode 100644 index 0000000..93ff7bd --- /dev/null +++ b/postgresql/testdata/scripts/search/tests/expected/solr-down @@ -0,0 +1,4 @@ +{ + "message": "Exception running search for [*] with filterQueries [] and paginationStart [0]: edu.harvard.iq.dataverse.search.SearchException: Internal Dataverse Search Engine Error org.apache.solr.client.solrj.SolrServerException org.apache.solr.client.solrj.SolrServerException: Server refused connection at: http://localhost:8983/solr org.apache.http.conn.HttpHostConnectException org.apache.http.conn.HttpHostConnectException: Connection to http://localhost:8983 refused java.net.ConnectException java.net.ConnectException: Connection refused ", + "status": "ERROR" +} diff --git a/postgresql/testdata/scripts/search/tests/expected/spruce1 b/postgresql/testdata/scripts/search/tests/expected/spruce1 new file mode 100644 index 0000000..f11cd12 --- /dev/null +++ b/postgresql/testdata/scripts/search/tests/expected/spruce1 @@ -0,0 +1,7 @@ +[ + "files:trees.png", + "datasets:Spruce Goose", + "dataverses:Trees", + "dataverses:Spruce", + "dataverses:Chestnut Trees" +] diff --git a/postgresql/testdata/scripts/search/tests/expected/spruce2 b/postgresql/testdata/scripts/search/tests/expected/spruce2 new file mode 100644 index 0000000..89ecebe --- /dev/null +++ b/postgresql/testdata/scripts/search/tests/expected/spruce2 @@ -0,0 +1,8 @@ +[ + "files:trees.png", + "datasets:Spruce Goose", + "dataverses:Birds", + "dataverses:Trees", + "dataverses:Spruce", + "dataverses:Chestnut Trees" +] diff --git a/postgresql/testdata/scripts/search/tests/expected/zero b/postgresql/testdata/scripts/search/tests/expected/zero new file mode 100644 index 0000000..573541a --- /dev/null +++ b/postgresql/testdata/scripts/search/tests/expected/zero @@ -0,0 +1 @@ +0 diff --git a/postgresql/testdata/scripts/search/tests/explicit-group-add b/postgresql/testdata/scripts/search/tests/explicit-group-add new file mode 100755 index 0000000..d872d55 --- /dev/null +++ b/postgresql/testdata/scripts/search/tests/explicit-group-add @@ -0,0 +1,2 @@ +#!/bin/sh +curl -X POST http://localhost:8080/api/dataverses/root/groups?key=$ADMINKEY -H "Content-type: application/json" --upload-file scripts/search/data/group-explicit-trees.json diff --git a/postgresql/testdata/scripts/search/tests/files b/postgresql/testdata/scripts/search/tests/files new file mode 100755 index 0000000..8874c83 --- /dev/null +++ b/postgresql/testdata/scripts/search/tests/files @@ -0,0 +1,3 @@ +#!/bin/sh +OUT=`curl -s "http://localhost:8080/api/admin/index/filesearch?persistentId=$1&q=$2"` +echo $OUT | jq . diff --git a/postgresql/testdata/scripts/search/tests/grant-authusers-add-on-root b/postgresql/testdata/scripts/search/tests/grant-authusers-add-on-root new file mode 100755 index 0000000..08b245f --- /dev/null +++ b/postgresql/testdata/scripts/search/tests/grant-authusers-add-on-root @@ -0,0 +1,5 @@ +#!/bin/sh +. scripts/search/export-keys +OUTPUT=`curl -s -X POST -H "Content-type:application/json" -d "{\"assignee\": \":authenticated-users\",\"role\": \"fullContributor\"}" "http://localhost:8080/api/dataverses/root/assignments?key=$ADMINKEY"` +echo $OUTPUT +echo $OUTPUT | jq ' .data | {assignee,_roleAlias}' diff --git a/postgresql/testdata/scripts/search/tests/grant-finch-admin-on-spruce b/postgresql/testdata/scripts/search/tests/grant-finch-admin-on-spruce new file mode 100755 index 0000000..f564033 --- /dev/null +++ b/postgresql/testdata/scripts/search/tests/grant-finch-admin-on-spruce @@ -0,0 +1,3 @@ +#!/bin/bash +. scripts/search/assumptions +curl -s -X POST -H 'Content-Type: application/x-www-form-urlencoded' "http://localhost:8080/api/roles/assignments?username=$FINCH_USERNAME&roleId=$ADMIN_ROLE&definitionPointId=$SPRUCE_DATAVERSE&key=$SPRUCEKEY" | jq ' .data | {assignee,_roleAlias}' diff --git a/postgresql/testdata/scripts/search/tests/grant-ipgroup3-add-on-root b/postgresql/testdata/scripts/search/tests/grant-ipgroup3-add-on-root new file mode 100755 index 0000000..cd58cea --- /dev/null +++ b/postgresql/testdata/scripts/search/tests/grant-ipgroup3-add-on-root @@ -0,0 +1,5 @@ +#!/bin/sh +. scripts/search/export-keys +OUTPUT=`curl -s -X POST -H "Content-type:application/json" -d "{\"assignee\": \"&ip/ipGroup3\",\"role\": \"dvContributor\"}" "http://localhost:8080/api/dataverses/root/assignments?key=$ADMINKEY"` +echo $OUTPUT +echo $OUTPUT | jq ' .data | {assignee,_roleAlias}' diff --git a/postgresql/testdata/scripts/search/tests/grant-shibgroup1-add-on-root b/postgresql/testdata/scripts/search/tests/grant-shibgroup1-add-on-root new file mode 100755 index 0000000..f016c8a --- /dev/null +++ b/postgresql/testdata/scripts/search/tests/grant-shibgroup1-add-on-root @@ -0,0 +1,6 @@ +#!/bin/sh +. scripts/search/export-keys +OUTPUT=`curl -s -X POST -H "Content-type:application/json" -d "{\"assignee\": \"&shib/1\",\"role\": \"dvContributor\"}" "http://localhost:8080/api/dataverses/root/assignments?key=$ADMINKEY"` +echo $OUTPUT +echo $OUTPUT | jq . +#echo $OUTPUT | jq ' .data | {assignee,_roleAlias}' diff --git a/postgresql/testdata/scripts/search/tests/grant-spruce-admin-on-birds b/postgresql/testdata/scripts/search/tests/grant-spruce-admin-on-birds new file mode 100755 index 0000000..70515ad --- /dev/null +++ b/postgresql/testdata/scripts/search/tests/grant-spruce-admin-on-birds @@ -0,0 +1,6 @@ +#!/bin/sh +. scripts/search/assumptions +OUTPUT=`curl -s -X POST -H "Content-type:application/json" -d "{\"assignee\": \"@spruce\",\"role\": \"admin\"}" "http://localhost:8080/api/dataverses/birds/assignments?key=$ADMINKEY"` +echo $OUTPUT +echo +echo $OUTPUT | jq ' .data | {assignee,_roleAlias}' diff --git a/postgresql/testdata/scripts/search/tests/ipgroup-add b/postgresql/testdata/scripts/search/tests/ipgroup-add new file mode 100755 index 0000000..d41679f --- /dev/null +++ b/postgresql/testdata/scripts/search/tests/ipgroup-add @@ -0,0 +1,5 @@ +#!/bin/sh +. scripts/search/export-keys +OUTPUT=`curl -s -X POST -d @scripts/api/data/ipGroup-all.json http://localhost:8080/api/admin/groups/ip -H "Content-type:application/json"` +echo $OUTPUT +echo $OUTPUT | jq . diff --git a/postgresql/testdata/scripts/search/tests/permissions1 b/postgresql/testdata/scripts/search/tests/permissions1 new file mode 100755 index 0000000..dfb9648 --- /dev/null +++ b/postgresql/testdata/scripts/search/tests/permissions1 @@ -0,0 +1,18 @@ +#!/bin/bash +# After dropping your datbase and getting set up again per the dev guide, +# You should see no output from this script. Silence is golden. +# If you start creating dataverses and datasets, you should expect to see output. +# we plan to support API keys/tokens in https://github.com/IQSS/dataverse/issues/1299 +diff <(curl -s 'http://localhost:8080/api/search?q=*&key=nosuchuser' | jq .message) scripts/search/tests/expected/nosuchuser + +diff <(curl -s 'http://localhost:8080/api/search?q=*' | jq .message) scripts/search/tests/expected/anon + +diff <(curl -s "http://localhost:8080/api/admin/index/test?q=*&key=$FINCHKEY" | jq .data) scripts/search/tests/expected/finch1 + +diff <(curl -s "http://localhost:8080/api/admin/index/test?q=*&key=$SPRUCEKEY" | jq .data) scripts/search/tests/expected/spruce1 + +diff <(curl -s "http://localhost:8080/api/admin/index/test?q=*&key=$SPARROWKEY" | jq .data) scripts/search/tests/expected/anon-empty + +diff <(curl -s "http://localhost:8080/api/admin/index/test?q=*&key=$WRENKEY" | jq .data) scripts/search/tests/expected/anon-empty + +diff <(curl -s "http://localhost:8080/api/admin/index/test?q=*&key=$CHESTNUTKEY" | jq .data) scripts/search/tests/expected/anon-empty diff --git a/postgresql/testdata/scripts/search/tests/permissions2 b/postgresql/testdata/scripts/search/tests/permissions2 new file mode 100755 index 0000000..2f650f0 --- /dev/null +++ b/postgresql/testdata/scripts/search/tests/permissions2 @@ -0,0 +1,10 @@ +#!/bin/bash +diff <(curl -s "http://localhost:8080/api/admin/index/test?q=*&key=$FINCHKEY" | jq .data) scripts/search/tests/expected/finch1 + +diff <(curl -s "http://localhost:8080/api/admin/index/test?q=*&key=$SPRUCEKEY" | jq .data) scripts/search/tests/expected/spruce2 + +diff <(curl -s "http://localhost:8080/api/admin/index/test?q=*&key=$SPARROWKEY" | jq .data) scripts/search/tests/expected/anon-empty + +diff <(curl -s "http://localhost:8080/api/admin/index/test?q=*&key=$WRENKEY" | jq .data) scripts/search/tests/expected/anon-empty + +diff <(curl -s "http://localhost:8080/api/admin/index/test?q=*&key=$CHESTNUTKEY" | jq .data) scripts/search/tests/expected/anon-empty diff --git a/postgresql/testdata/scripts/search/tests/permissions3 b/postgresql/testdata/scripts/search/tests/permissions3 new file mode 100755 index 0000000..8c105e7 --- /dev/null +++ b/postgresql/testdata/scripts/search/tests/permissions3 @@ -0,0 +1,12 @@ +#!/bin/bash +diff <(curl -s "http://localhost:8080/api/search?q=*" | jq .data.count_in_response) scripts/search/tests/expected/anon3 + +diff <(curl -s "http://localhost:8080/api/admin/index/test?q=*&key=$FINCHKEY" | jq .data) scripts/search/tests/expected/finch3 + +diff <(curl -s "http://localhost:8080/api/admin/index/test?q=*&key=$SPRUCEKEY" | jq .data) scripts/search/tests/expected/spruce2 + +diff <(curl -s "http://localhost:8080/api/admin/index/test?q=*&key=$SPARROWKEY" | jq .data) scripts/search/tests/expected/anontest3 + +diff <(curl -s "http://localhost:8080/api/admin/index/test?q=*&key=$WRENKEY" | jq .data) scripts/search/tests/expected/anontest3 + +diff <(curl -s "http://localhost:8080/api/admin/index/test?q=*&key=$CHESTNUTKEY" | jq .data) scripts/search/tests/expected/anontest3 diff --git a/postgresql/testdata/scripts/search/tests/permissions3-full-anon b/postgresql/testdata/scripts/search/tests/permissions3-full-anon new file mode 100755 index 0000000..4dc24b7 --- /dev/null +++ b/postgresql/testdata/scripts/search/tests/permissions3-full-anon @@ -0,0 +1,3 @@ +#!/bin/bash +#curl -s "http://localhost:8080/api/search?q=*" | python -m json.tool > scripts/search/tests/expected/anon3-full +diff <(curl -s "http://localhost:8080/api/search?q=*" | python -m json.tool) scripts/search/tests/expected/anon3-full diff --git a/postgresql/testdata/scripts/search/tests/publish-dataset-spruce1 b/postgresql/testdata/scripts/search/tests/publish-dataset-spruce1 new file mode 100755 index 0000000..14b0f80 --- /dev/null +++ b/postgresql/testdata/scripts/search/tests/publish-dataset-spruce1 @@ -0,0 +1,7 @@ +#!/bin/sh +. scripts/search/export-keys +. scripts/search/assumptions +OUTPUT=`cat /dev/null | curl -s --insecure -u $SPRUCEKEY: -X POST -H 'In-Progress: false' --data-binary @- "https://localhost:8181/dvn/api/data-deposit/v1.1/swordv2/edit/study/$FIRST_SPRUCE_DOI"` +echo $OUTPUT +echo +echo $OUTPUT | xmllint -format - diff --git a/postgresql/testdata/scripts/search/tests/publish-dataverse-birds b/postgresql/testdata/scripts/search/tests/publish-dataverse-birds new file mode 100755 index 0000000..5e8af36 --- /dev/null +++ b/postgresql/testdata/scripts/search/tests/publish-dataverse-birds @@ -0,0 +1,5 @@ +#!/bin/sh +OUTPUT=`cat /dev/null | curl -s --insecure -X POST -H 'In-Progress: false' --data-binary @- https://admin:admin@localhost:8181/dvn/api/data-deposit/v1.1/swordv2/edit/dataverse/birds` +echo $OUTPUT +echo +echo $OUTPUT | xmllint -format - diff --git a/postgresql/testdata/scripts/search/tests/publish-dataverse-finches b/postgresql/testdata/scripts/search/tests/publish-dataverse-finches new file mode 100755 index 0000000..0632bc5 --- /dev/null +++ b/postgresql/testdata/scripts/search/tests/publish-dataverse-finches @@ -0,0 +1,5 @@ +#!/bin/sh +OUTPUT=`cat /dev/null | curl -s --insecure -X POST -H 'In-Progress: false' --data-binary @- https://finch:finch@localhost:8181/dvn/api/data-deposit/v1.1/swordv2/edit/dataverse/finches` +echo $OUTPUT +echo +echo $OUTPUT | xmllint -format - diff --git a/postgresql/testdata/scripts/search/tests/publish-dataverse-root b/postgresql/testdata/scripts/search/tests/publish-dataverse-root new file mode 100755 index 0000000..a4c1585 --- /dev/null +++ b/postgresql/testdata/scripts/search/tests/publish-dataverse-root @@ -0,0 +1,6 @@ +#!/bin/sh +. scripts/search/export-keys +OUTPUT=`cat /dev/null | curl -s --insecure -u $ADMINKEY: -X POST -H 'In-Progress: false' --data-binary @- https://localhost:8181/dvn/api/data-deposit/v1.1/swordv2/edit/dataverse/root` +echo $OUTPUT +echo +echo $OUTPUT | xmllint -format - diff --git a/postgresql/testdata/scripts/search/tests/publish-dataverse-spruce b/postgresql/testdata/scripts/search/tests/publish-dataverse-spruce new file mode 100755 index 0000000..bf2746b --- /dev/null +++ b/postgresql/testdata/scripts/search/tests/publish-dataverse-spruce @@ -0,0 +1,6 @@ +#!/bin/sh +. scripts/search/export-keys +OUTPUT=`cat /dev/null | curl -s --insecure -u $SPRUCEKEY: -X POST -H 'In-Progress: false' --data-binary @- https://localhost:8181/dvn/api/data-deposit/v1.1/swordv2/edit/dataverse/spruce` +echo $OUTPUT +echo +echo $OUTPUT | xmllint -format - diff --git a/postgresql/testdata/scripts/search/tests/publish-dataverse-trees b/postgresql/testdata/scripts/search/tests/publish-dataverse-trees new file mode 100755 index 0000000..65c58de --- /dev/null +++ b/postgresql/testdata/scripts/search/tests/publish-dataverse-trees @@ -0,0 +1,6 @@ +#!/bin/sh +. scripts/search/export-keys +OUTPUT=`cat /dev/null | curl -s --insecure -u $SPRUCEKEY: -X POST -H 'In-Progress: false' --data-binary @- https://localhost:8181/dvn/api/data-deposit/v1.1/swordv2/edit/dataverse/trees` +echo $OUTPUT +echo +echo $OUTPUT | xmllint -format - diff --git a/postgresql/testdata/scripts/search/tests/publish-spruce1-and-test b/postgresql/testdata/scripts/search/tests/publish-spruce1-and-test new file mode 100755 index 0000000..cc363fe --- /dev/null +++ b/postgresql/testdata/scripts/search/tests/publish-spruce1-and-test @@ -0,0 +1,7 @@ +#!/bin/sh +scripts/search/tests/publish-dataverse-root +scripts/search/tests/publish-dataverse-trees +scripts/search/tests/publish-dataverse-spruce +scripts/search/tests/publish-dataset-spruce1 +#scripts/search/tests/permissions3 +#scripts/search/tests/permissions3-full-anon diff --git a/postgresql/testdata/scripts/search/tests/revoke-finch-admin-on-spruce b/postgresql/testdata/scripts/search/tests/revoke-finch-admin-on-spruce new file mode 100755 index 0000000..dfe6c7e --- /dev/null +++ b/postgresql/testdata/scripts/search/tests/revoke-finch-admin-on-spruce @@ -0,0 +1,3 @@ +#!/bin/sh +. scripts/search/assumptions +curl -s -X DELETE "http://localhost:8080/api/dataverses/$SPRUCE_DATAVERSE/assignments/$FINCH_ADMIN_ON_SPRUCE?key=$SPRUCEKEY" | jq .data.message diff --git a/postgresql/testdata/scripts/search/tests/revoke-spruce-admin-on-birds b/postgresql/testdata/scripts/search/tests/revoke-spruce-admin-on-birds new file mode 100755 index 0000000..b1bfff3 --- /dev/null +++ b/postgresql/testdata/scripts/search/tests/revoke-spruce-admin-on-birds @@ -0,0 +1,6 @@ +#!/bin/sh +. scripts/search/assumptions +OUTPUT=`curl -s -X DELETE "http://localhost:8080/api/dataverses/$BIRDS_DATAVERSE/assignments/$SPRUCE_ADMIN_ON_BIRDS?key=$FINCHKEY"` +echo $OUTPUT +echo +echo $OUTPUT | jq .data.message diff --git a/postgresql/testdata/scripts/search/tests/solr-down b/postgresql/testdata/scripts/search/tests/solr-down new file mode 100755 index 0000000..534380a --- /dev/null +++ b/postgresql/testdata/scripts/search/tests/solr-down @@ -0,0 +1,2 @@ +#!/bin/bash +diff <(curl -s 'http://localhost:8080/api/search?q=*' | jq .) scripts/search/tests/expected/solr-down diff --git a/postgresql/testdata/scripts/search/tests/special-characters b/postgresql/testdata/scripts/search/tests/special-characters new file mode 100755 index 0000000..812c638 --- /dev/null +++ b/postgresql/testdata/scripts/search/tests/special-characters @@ -0,0 +1,9 @@ +#!/bin/bash +# curl -H "Content-type:application/json" -X POST -d @scripts/search/tests/data/dv-colon.json "http://localhost:8080/api/dataverses/peteTop?key=pete" +# curl 'http://localhost:8983/solr/collection1/select?rows=100&wt=json&indent=true&q="description:\:"' +diff <(curl -s 'http://localhost:8080/api/search?q=:') scripts/search/tests/expected/colon + +# http://stackoverflow.com/questions/18277609/search-in-solr-with-special-characters +# curl -H "Content-type:application/json" -X POST -d @scripts/search/tests/data/dv-dash.json "http://localhost:8080/api/dataverses/peteTop?key=pete" +# curl 'http://localhost:8983/solr/collection1/select?rows=100&wt=json&indent=true&q=name:\-' +# diff <(curl -s 'http://localhost:8080/api/search?q=name:"Titanic - 1999"') scripts/search/tests/expected/dash diff --git a/postgresql/testdata/scripts/search/tests/upload-1000-files b/postgresql/testdata/scripts/search/tests/upload-1000-files new file mode 100755 index 0000000..a4c1d46 --- /dev/null +++ b/postgresql/testdata/scripts/search/tests/upload-1000-files @@ -0,0 +1,5 @@ +#!/bin/sh +. scripts/search/export-keys +. scripts/search/assumptions +echo "Uploading 1000 files" +curl -s --insecure --data-binary @scripts/search/data/binary/1000files.zip -H 'Content-Disposition: filename=1000files.zip' -H 'Content-Type: application/zip' -H 'Packaging: http://purl.org/net/sword/package/SimpleZip' -u spruce:spruce https://localhost:8181/dvn/api/data-deposit/v1.1/swordv2/edit-media/study/$FIRST_SPRUCE_DOI diff --git a/postgresql/testdata/scripts/search/users.tsv b/postgresql/testdata/scripts/search/users.tsv new file mode 100755 index 0000000..4422bea --- /dev/null +++ b/postgresql/testdata/scripts/search/users.tsv @@ -0,0 +1,7 @@ +userName firstName lastName email +finch Fiona Finch finch@mailinator.com +sparrow Sammy Sparrow sparrow@mailinator.com +wren Wilbur Wren wren@mailinator.com +spruce Sabrina Spruce spruce@mailinator.com +chestnut Caleb Chestnut chestnut@mailinator.com +psiadmin PSI Admin psi@mailinator.com diff --git a/postgresql/testdata/scripts/setup/asadmin-setup.sh b/postgresql/testdata/scripts/setup/asadmin-setup.sh new file mode 100755 index 0000000..8b50d2f --- /dev/null +++ b/postgresql/testdata/scripts/setup/asadmin-setup.sh @@ -0,0 +1,212 @@ +#!/bin/bash +# STOP! +# DO NOT ADD MORE ASADMIN COMMANDS TO THIS SCRIPT! +# IF YOU NEED TO ADD MORE GLASSFISH CONFIG SETTINGS, ADD THEM +# TO THE ../installer/glassfish-setup.sh SCRIPT. +# I'M ASSUMING THAT WE'LL WANT TO CONTINUE MAINTAINING THIS SCRIPT, +# (FOR VAGRANT SETUPS, etc.?); IT SHOULD STILL BE WORKING, BY +# CALLING THE NEW SCRIPT ABOVE - SO NO NEED TO DUPLICATE THE ASADMIN +# COMMANDS HERE. +# FROM NOW ON, ONLY NON-ASADMIN CONFIGURATION SHOULD GO INTO THIS +# SCRIPT. (which makes the name especially misleading - but I didn't +# want to change it, in case other scripts are calling it by name!) +# -Leonid 4.0 beta + +# This is a setup script for setting up Glassfish 4 to run Dataverse +# The script was tested on Mac OS X.9 +# ASSUMPTIONS +# * Script has to run locally (i.e. on the machine that hosts the server) +# * Internet connectivity is assumed, in order to get the postgresql driver. + +## +# Default values - Change to suit your machine. +DEFAULT_GLASSFISH_ROOT=/Applications/NetBeans/glassfish-4.0 +DEFAULT_DOMAIN=domain1 +DEFAULT_ASADMIN_OPTS=" " + +### +# Database values. Update as needed. +# Note: DB_USER "dvnApp" is case-sensitive and later used in "scripts/database/reference_data.sql" +# +DB_PORT=5432; export DB_PORT +DB_HOST=localhost; export DB_HOST +DB_NAME=dvndb; export DB_NAME +DB_USER=dvnApp; export DB_USER +DB_PASS=dvnAppPass; export DB_PASS + +### +# Rserve configuration: +RSERVE_HOST=localhost; export RSERVE_HOST +RSERVE_PORT=6311; export RSERVE_PORT +RSERVE_USER=rserve; export RSERVE_USER +RSERVE_PASS=rserve; export RSERVE_PASS + +### +# Other configuration values: +MEM_HEAP_SIZE=1024; export MEM_HEAP_SIZE +HOST_ADDRESS=localhost; export HOST_ADDRESS +SMTP_SERVER=mail.hmdc.harvard.edu; export SMTP_SERVER +FILES_DIR=${HOME}/dataverse/files; export FILES_DIR + +### End of default configuration values. + +# "${VAR+xxx}" for unset vs. empty per http://stackoverflow.com/questions/228544/how-to-tell-if-a-string-is-not-defined-in-a-bash-shell-script/230593#230593 + +if [ "${DB_NAME_CUSTOM+xxx}" ] + then + echo "Default DB_NAME ($DB_NAME) overridden: $DB_NAME_CUSTOM" + DB_NAME=$DB_NAME_CUSTOM +fi + +if [ "${DB_USER_CUSTOM+xxx}" ] + then + echo "Default DB_USER ($DB_USER) overridden: $DB_USER_CUSTOM" + DB_USER=$DB_USER_CUSTOM +fi + +if [ "${DB_PASS_CUSTOM+xxx}" ] + then + echo "Default DB_PASS ($DB_PASS) overridden: $DB_PASS_CUSTOM" + DB_PASS=$DB_PASS_CUSTOM +fi + +#echo "end" +#exit + +## +# External dependencies +PGSQL_DRIVER_URL=http://jdbc.postgresql.org/download/postgresql-9.3-1100.jdbc41.jar + +if [ "$SUDO_USER" = "vagrant" ] + then + echo "We are running in a Vagrant environment." + cat /etc/redhat-release + # Choosing all lower case indentifiers for DB_NAME and DB_USER for this reason: + # + # Quoting an identifier also makes it case-sensitive, whereas unquoted names + # are always folded to lower case. For example, the identifiers FOO, foo, and + # "foo" are considered the same by PostgreSQL, but "Foo" and "FOO" are + # different from these three and each other. (The folding of unquoted names + # to lower case in PostgreSQL is incompatible with the SQL standard, which + # says that unquoted names should be folded to upper case. Thus, foo should + # be equivalent to "FOO" not "foo" according to the standard. If you want to + # write portable applications you are advised to always quote a particular + # name or never quote it.) -- + # http://www.postgresql.org/docs/9.3/static/sql-syntax-lexical.html + DB_NAME=dataverse_db + DB_USER=dataverse_app + DB_PASS=secret + echo "Configuring EPEL Maven repo " + cd /etc/yum.repos.d + wget http://repos.fedorapeople.org/repos/dchen/apache-maven/epel-apache-maven.repo + cd + echo "Installing dependencies via yum" + yum install -y -q java-1.7.0-openjdk-devel postgresql-server apache-maven httpd mod_ssl + rpm -q postgresql-server + echo "Starting PostgreSQL" + chkconfig postgresql on + /sbin/service postgresql initdb + cp -a /var/lib/pgsql/data/pg_hba.conf /var/lib/pgsql/data/pg_hba.conf.orig + sed -i -e 's/ident$/trust/' /var/lib/pgsql/data/pg_hba.conf + /sbin/service postgresql start + POSTGRES_USER=postgres + echo "Creating database user $DB_USER" + su $POSTGRES_USER -s /bin/sh -c "psql -c \"CREATE ROLE \"$DB_USER\" UNENCRYPTED PASSWORD '$DB_PASS' NOSUPERUSER CREATEDB CREATEROLE NOINHERIT LOGIN\"" + #su $POSTGRES_USER -s /bin/sh -c "psql -c '\du'" + echo "Creating database $DB_NAME" + su $POSTGRES_USER -s /bin/sh -c "psql -c 'CREATE DATABASE \"$DB_NAME\" WITH OWNER = \"$DB_USER\"'" + GLASSFISH_USER=glassfish + echo "Ensuring Unix user '$GLASSFISH_USER' exists" + useradd $GLASSFISH_USER || : + GLASSFISH_ZIP=`ls /downloads/glassfish*zip` + GLASSFISH_USER_HOME=~glassfish + echo "Copying $GLASSFISH_ZIP to $GLASSFISH_USER_HOME and unzipping" + su $GLASSFISH_USER -s /bin/sh -c "cp $GLASSFISH_ZIP $GLASSFISH_USER_HOME" + su $GLASSFISH_USER -s /bin/sh -c "cd $GLASSFISH_USER_HOME && unzip -q $GLASSFISH_ZIP" + DEFAULT_GLASSFISH_ROOT=$GLASSFISH_USER_HOME/glassfish4 + su $GLASSFISH_USER -s /bin/sh -c "/scripts/installer/glassfish-setup.sh" +fi + + +# Set the scripts parameters (if needed) +if [ -z "${GLASSFISH_ROOT+xxx}" ] + then + echo setting GLASSFISH_ROOT to $DEFAULT_GLASSFISH_ROOT + GLASSFISH_ROOT=$DEFAULT_GLASSFISH_ROOT; export GLASSFISH_ROOT +fi +if [ ! -d "$GLASSFISH_ROOT" ] + then + echo Glassfish root '$GLASSFISH_ROOT' does not exist + exit 1 +fi +GLASSFISH_BIN_DIR=$GLASSFISH_ROOT/bin + +if [ -z "${DOMAIN+xxx}" ] + then + echo setting DOMAIN to $DEFAULT_DOMAIN + DOMAIN=$DEFAULT_DOMAIN + # setting the environmental variable GLASSFISH_DOMAIN, + # for the ../installer/glassfish-setup.sh script, that runs + # all the required asadmin comands + GLASSFISH_DOMAIN=$DOMAIN; export GLASSFISH_DOMAIN +fi +DOMAIN_DIR=$GLASSFISH_ROOT/glassfish/domains/$DOMAIN +if [ ! -d "$DOMAIN_DIR" ] + then + echo Domain directory '$DOMAIN_DIR' does not exist + exit 2 +fi +if [ -z "$ASADMIN_OPTS" ] + then + ASADMIN_OPTS=$DEFAULT_ASADMIN_OPTS; export ASADMIN_OPTS +fi + +echo "Setting up your glassfish4 to support Dataverse" +echo "Glassfish directory: "$GLASSFISH_ROOT +echo "Domain directory: "$DOMAIN_DIR + +### +# getting the postgres driver +DOMAIN_LIB=$DOMAIN_DIR/lib +if ! grep -qs postgres $DOMAIN_LIB/* + then + DRIVER_NAME=$(echo $PGSQL_DRIVER_URL | tr / \\n | tail -n1) + echo Downloading postgresql driver '$DRIVER_NAME' + wget $PGSQL_DRIVER_URL -O $DOMAIN_LIB/$DRIVER_NAME + else + echo postgresql driver already installed. +fi + +if [ "$SUDO_USER" = "vagrant" ] + then + /scripts/installer/glassfish-setup.sh + echo "Done configuring Vagrant environment" + exit 0 +fi + +### +# Move to the glassfish dir +pushd $GLASSFISH_BIN_DIR + +### +# take the domain up, if needed. +DOMAIN_DOWN=$(./asadmin list-domains | grep "$DOMAIN " | grep "not running") +if [ $(echo $DOMAIN_DOWN|wc -c) -ne 1 ]; + then + echo Trying to start domain $DOMAIN up... + ./asadmin $ASADMIN_OPTS start-domain $DOMAIN + else + echo domain running +fi + +# ONCE AGAIN, ASADMIN COMMANDS BELOW HAVE ALL BEEN MOVED INTO scripts/installer/glassfish-setup.sh + +# TODO: diagnostics + +### +# Clean up +popd + +echo "Glassfish setup complete" +date + diff --git a/postgresql/testdata/scripts/trello/trello b/postgresql/testdata/scripts/trello/trello new file mode 100755 index 0000000..6b1b7cb --- /dev/null +++ b/postgresql/testdata/scripts/trello/trello @@ -0,0 +1,4 @@ +curl -s https://api.trello.com/1/boards/527d1605c7b30060420027b0 | python -m json.tool +#curl -s https://api.trello.com/1/lists/527d1605c7b30060420027b0?fields=name&cards=open&card_fields=name +# https://api.trello.com/1/lists/4eea4ffc91e31d174600004a?fields=name&cards=open&card_fields=name&key=[application_key]&token=[optional_auth_token] + diff --git a/postgresql/testdata/scripts/vagrant/install-dataverse.sh b/postgresql/testdata/scripts/vagrant/install-dataverse.sh new file mode 100644 index 0000000..ac48217 --- /dev/null +++ b/postgresql/testdata/scripts/vagrant/install-dataverse.sh @@ -0,0 +1,35 @@ +#!/usr/bin/env bash + +if [ ! -z "$1" ]; then + MAILSERVER=$1 + MAILSERVER_ARG="--mailserver $MAILSERVER" +fi +WAR=/dataverse/target/dataverse*.war +if [ ! -f $WAR ]; then + echo "no war file found... building" + echo "Installing nss on CentOS 6 to avoid java.security.KeyException while building war file: https://github.com/IQSS/dataverse/issues/2744" + yum install -y nss + su $SUDO_USER -s /bin/sh -c "cd /dataverse && mvn package" +fi +cd /dataverse/scripts/installer + +# move any pre-existing `default.config` file out of the way to avoid overwriting +pid=$$ +if [ -e default.config ]; then + mv default.config tmp-${pid}-default.config +fi + +echo "HOST_DNS_ADDRESS localhost" > default.config +echo "GLASSFISH_DIRECTORY /home/glassfish/glassfish4" >> default.config + +if [ ! -z "$MAILSERVER" ]; then + echo "MAIL_SERVER $MAILSERVER" >> default.config +fi + +./install -y -f + +if [ -e tmp-${pid}-default.config ]; then # if we moved it out, move it back + mv -f tmp-${pid}-default.config default.config +fi + +echo "If "vagrant up" was successful (check output above) Dataverse is running on port 8080 of the Linux machine running within Vagrant, but this port has been forwarded to port 8888 of the computer you ran "vagrant up" on. For this reason you should go to http://localhost:8888 to see the Dataverse app running." diff --git a/postgresql/testdata/scripts/vagrant/install-tworavens.sh b/postgresql/testdata/scripts/vagrant/install-tworavens.sh new file mode 100755 index 0000000..3e1fb1b --- /dev/null +++ b/postgresql/testdata/scripts/vagrant/install-tworavens.sh @@ -0,0 +1,35 @@ +#!/bin/bash +echo "This script is highly experimental and makes many assumptions about how Dataverse is running in Vagrant. Please consult the TwoRavens section of the Dataverse Installation Guide instead." +exit 1 +cd /root +yum install -y https://dl.fedoraproject.org/pub/epel/epel-release-latest-7.noarch.rpm +yum install -y R R-devel +# FIXME: /dataverse is mounted in Vagrant but not other places +yum install -y /dataverse/doc/sphinx-guides/source/_static/installation/files/home/rpmbuild/rpmbuild/RPMS/x86_64/rapache-1.2.6-rpm0.x86_64.rpm +yum install -y gcc-gfortran # to build R packages +COMMIT=a6869eb28693d6df529e7cb3888c40de5f302b66 +UNZIPPED=TwoRavens-$COMMIT +if [ ! -f $COMMIT ]; then + wget https://github.com/IQSS/TwoRavens/archive/$COMMIT.zip + unzip $COMMIT + cd $UNZIPPED/r-setup + ./r-setup.sh # This is expected to take a while. Look for lines like "Package Zelig successfully installed" and "Successfully installed Dataverse R framework". +fi +# FIXME: copy preprocess.R into Glassfish while running and overwrite it +curl -X PUT -d true http://localhost:8080/api/admin/settings/:TwoRavensTabularView +# Port 8888 because we're running in Vagrant. On the dev1 server we use https://dev1.dataverse.org/dataexplore/gui.html +curl -X PUT -d http://localhost:8888/dataexplore/gui.html http://localhost:8080/api/admin/settings/:TwoRavensUrl +cd /root +DIR=/var/www/html/dataexplore +if [ ! -d $DIR ]; then + cp -r $UNZIPPED $DIR +fi +cd $DIR +# The plan is to remove this hack of dropping preprocess.R into a deployed war file directory. See https://github.com/IQSS/dataverse/issues/3372 +# FIXME: don't assume version 4.6.1 +#diff /var/www/html/dataexplore/rook/preprocess/preprocess.R /usr/local/glassfish4/glassfish/domains/domain1/applications/dataverse-4.6.1/WEB-INF/classes/edu/harvard/iq/dataverse/rserve/scripts/preprocess.R +# FIXME: If `diff` shows a difference, which is likely, copy the version from TwoRavens to the Glassfish directory. +#cp /var/www/html/dataexplore/rook/preprocess/preprocess.R /usr/local/glassfish4/glassfish/domains/domain1/applications/dataverse-4.6.1/WEB-INF/classes/edu/harvard/iq/dataverse/rserve/scripts/preprocess.R +# FIXME: restart Glassfish if you had to update preprocess.R above. +# FIXME: Vagrant with it's weird 8888 port forwarding isn't working. On the dev1 server, TwoRavens works fine if you supply "https://dev1.dataverse.org" for both URLs. +echo "Next, run ./install.pl after you cd to $DIR" diff --git a/postgresql/testdata/scripts/vagrant/rpmbuild.sh b/postgresql/testdata/scripts/vagrant/rpmbuild.sh new file mode 100755 index 0000000..f10830a --- /dev/null +++ b/postgresql/testdata/scripts/vagrant/rpmbuild.sh @@ -0,0 +1,3 @@ +#!/bin/sh +rpm -Uvh http://dl.fedoraproject.org/pub/epel/7/x86_64/e/epel-release-7-7.noarch.rpm +yum install -y rpm-build httpd-devel libapreq2-devel R-devel diff --git a/postgresql/testdata/scripts/vagrant/setup-solr.sh b/postgresql/testdata/scripts/vagrant/setup-solr.sh new file mode 100755 index 0000000..f4a5bd2 --- /dev/null +++ b/postgresql/testdata/scripts/vagrant/setup-solr.sh @@ -0,0 +1,10 @@ +#!/bin/bash +echo "Setting up Solr" +GLASSFISH_USER=glassfish +GLASSFISH_USER_HOME=~glassfish +SOLR_HOME=$GLASSFISH_USER_HOME/solr +su $GLASSFISH_USER -s /bin/sh -c "mkdir $SOLR_HOME" +su $GLASSFISH_USER -s /bin/sh -c "cp /downloads/solr-4.6.0.tgz $SOLR_HOME" +su $GLASSFISH_USER -s /bin/sh -c "cd $SOLR_HOME && tar xfz solr-4.6.0.tgz" +su $GLASSFISH_USER -s /bin/sh -c "cp /conf/solr/4.6.0/schema.xml $SOLR_HOME/solr-4.6.0/example/solr/collection1/conf/schema.xml" +su $GLASSFISH_USER -s /bin/sh -c "cd $SOLR_HOME/solr-4.6.0/example && java -jar start.jar &" diff --git a/postgresql/testdata/scripts/vagrant/setup.sh b/postgresql/testdata/scripts/vagrant/setup.sh new file mode 100644 index 0000000..0ab2daf --- /dev/null +++ b/postgresql/testdata/scripts/vagrant/setup.sh @@ -0,0 +1,72 @@ +#!/bin/bash +echo "Installing dependencies for Dataverse" + +# Add JQ +echo "Installing jq for the setup scripts" +wget http://stedolan.github.io/jq/download/linux64/jq +chmod +x jq +# this is where EPEL puts it +sudo mv jq /usr/bin/jq + +echo "Adding Shibboleth yum repo" +cp /dataverse/conf/vagrant/etc/yum.repos.d/shibboleth.repo /etc/yum.repos.d +cp /dataverse/conf/vagrant/etc/yum.repos.d/epel-apache-maven.repo /etc/yum.repos.d +# Uncomment this (and other shib stuff below) if you want +# to use Vagrant (and maybe PageKite) to test Shibboleth. +#yum install -y shibboleth shibboleth-embedded-ds +yum install -y java-1.8.0-openjdk-devel postgresql-server apache-maven httpd mod_ssl unzip +alternatives --set java /usr/lib/jvm/jre-1.8.0-openjdk.x86_64/bin/java +alternatives --set javac /usr/lib/jvm/java-1.8.0-openjdk.x86_64/bin/javac +java -version +javac -version +service postgresql initdb +service postgresql stop +cp /dataverse/conf/vagrant/var/lib/pgsql/data/pg_hba.conf /var/lib/pgsql/data/pg_hba.conf +service postgresql start +chkconfig postgresql on +GLASSFISH_USER=glassfish +echo "Ensuring Unix user '$GLASSFISH_USER' exists" +useradd $GLASSFISH_USER || : +DOWNLOAD_DIR='/dataverse/downloads' +GLASSFISH_ZIP="$DOWNLOAD_DIR/glassfish-4.1.zip" +SOLR_TGZ="$DOWNLOAD_DIR/solr-4.6.0.tgz" +WELD_PATCH="$DOWNLOAD_DIR/weld-osgi-bundle-2.2.10.Final-glassfish4.jar" +if [ ! -f $GLASSFISH_ZIP ] || [ ! -f $SOLR_TGZ ]; then + echo "Couldn't find $GLASSFISH_ZIP or $SOLR_TGZ! Running download script...." + cd $DOWNLOAD_DIR && ./download.sh && cd + echo "Done running download script." +fi +GLASSFISH_USER_HOME=~glassfish +GLASSFISH_ROOT=$GLASSFISH_USER_HOME/glassfish4 +if [ ! -d $GLASSFISH_ROOT ]; then + echo "Copying $GLASSFISH_ZIP to $GLASSFISH_USER_HOME and unzipping" + su $GLASSFISH_USER -s /bin/sh -c "cp $GLASSFISH_ZIP $GLASSFISH_USER_HOME" + su $GLASSFISH_USER -s /bin/sh -c "cd $GLASSFISH_USER_HOME && unzip -q $GLASSFISH_ZIP" + su $GLASSFISH_USER -s /bin/sh -c "mv $GLASSFISH_ROOT/glassfish/modules/weld-osgi-bundle.jar /tmp" + su $GLASSFISH_USER -s /bin/sh -c "cp $WELD_PATCH $GLASSFISH_ROOT/glassfish/modules" +else + echo "$GLASSFISH_ROOT already exists" +fi +#service shibd start +service httpd stop +cp /dataverse/conf/httpd/conf.d/dataverse.conf /etc/httpd/conf.d/dataverse.conf +mkdir -p /var/www/dataverse/error-documents +cp /dataverse/conf/vagrant/var/www/dataverse/error-documents/503.html /var/www/dataverse/error-documents +service httpd start +#curl -k --sslv3 https://pdurbin.pagekite.me/Shibboleth.sso/Metadata > /tmp/pdurbin.pagekite.me +#cp -a /etc/shibboleth/shibboleth2.xml /etc/shibboleth/shibboleth2.xml.orig +#cp -a /etc/shibboleth/attribute-map.xml /etc/shibboleth/attribute-map.xml.orig +# need more attributes, such as sn, givenName, mail +#cp /dataverse/conf/vagrant/etc/shibboleth/attribute-map.xml /etc/shibboleth/attribute-map.xml +# FIXME: automate this? +#curl 'https://www.testshib.org/cgi-bin/sp2config.cgi?dist=Others&hostname=pdurbin.pagekite.me' > /etc/shibboleth/shibboleth2.xml +#cp /dataverse/conf/vagrant/etc/shibboleth/shibboleth2.xml /etc/shibboleth/shibboleth2.xml +#service shibd restart +#curl -k --sslv3 https://pdurbin.pagekite.me/Shibboleth.sso/Metadata > /downloads/pdurbin.pagekite.me +#service httpd restart +echo "#########################################################################################" +echo "# This is a Vagrant test box, so we're disabling firewalld. # +echo "# Re-enable it with $ sudo systemctl enable firewalld && sudo systemctl start firewalld #" +echo "#########################################################################################" +systemctl disable firewalld +systemctl stop firewalld diff --git a/postgresql/testdata/scripts/vagrant/test.sh b/postgresql/testdata/scripts/vagrant/test.sh new file mode 100755 index 0000000..3c5b835 --- /dev/null +++ b/postgresql/testdata/scripts/vagrant/test.sh @@ -0,0 +1,6 @@ +#!/bin/sh +echo "running tests..." +echo "running search tests..." +cd / +scripts/search/tests/permissions +echo "done running tests. no output is good. silence is golden" diff --git a/postgresql/testscripts/db.sh b/postgresql/testscripts/db.sh new file mode 100755 index 0000000..aeb09f0 --- /dev/null +++ b/postgresql/testscripts/db.sh @@ -0,0 +1,3 @@ +#!/bin/sh +psql -U postgres -c "CREATE ROLE dvnapp UNENCRYPTED PASSWORD 'secret' SUPERUSER CREATEDB CREATEROLE INHERIT LOGIN" template1 +psql -U dvnapp -c 'CREATE DATABASE "dvndb" WITH OWNER = "dvnapp"' template1 diff --git a/postgresql/testscripts/install b/postgresql/testscripts/install new file mode 100755 index 0000000..32f3a39 --- /dev/null +++ b/postgresql/testscripts/install @@ -0,0 +1,21 @@ +#!/bin/sh +export HOST_ADDRESS=localhost +export GLASSFISH_ROOT=/usr/local/glassfish4 +export FILES_DIR=/usr/local/glassfish4/glassfish/domains/domain1/files +export DB_NAME=dvndb +export DB_PORT=5432 +export DB_HOST=localhost +export DB_USER=dvnapp +export DB_PASS=secret +export RSERVE_HOST=localhost +export RSERVE_PORT=6311 +export RSERVE_USER=rserve +export RSERVE_PASS=rserve +export SMTP_SERVER=localhost +export MEM_HEAP_SIZE=2048 +export GLASSFISH_DOMAIN=domain1 +cd scripts/installer +cp pgdriver/postgresql-8.4-703.jdbc4.jar $GLASSFISH_ROOT/glassfish/lib +#cp ../../conf/jhove/jhove.conf $GLASSFISH_ROOT/glassfish/domains/$GLASSFISH_DOMAIN/config/jhove.conf +cp /opt/dv/testdata/jhove.conf $GLASSFISH_ROOT/glassfish/domains/$GLASSFISH_DOMAIN/config/jhove.conf +./glassfish-setup.sh diff --git a/postgresql/testscripts/post b/postgresql/testscripts/post new file mode 100755 index 0000000..03eaf59 --- /dev/null +++ b/postgresql/testscripts/post @@ -0,0 +1,15 @@ +#/bin/sh +cd scripts/api +./setup-all.sh --insecure | tee /tmp/setup-all.sh.out +cd ../.. +psql -U dvnapp dvndb -f scripts/database/reference_data.sql +psql -U dvnapp dvndb -f doc/sphinx-guides/source/_static/util/pg8-createsequence-prep.sql +psql -U dvnapp dvndb -f doc/sphinx-guides/source/_static/util/createsequence.sql +scripts/search/tests/publish-dataverse-root +#git checkout scripts/api/data/dv-root.json +scripts/search/tests/grant-authusers-add-on-root +scripts/search/populate-users +scripts/search/create-users +scripts/search/tests/create-all-and-test +scripts/search/tests/publish-spruce1-and-test +#java -jar downloads/schemaSpy_5.0.0.jar -t pgsql -host localhost -db dvndb -u postgres -p secret -s public -dp scripts/installer/pgdriver/postgresql-9.1-902.jdbc4.jar -o /var/www/html/schemaspy/latest diff --git a/solr/4.6.0/readme.me b/solr/4.6.0/readme.me new file mode 100644 index 0000000..38c2594 --- /dev/null +++ b/solr/4.6.0/readme.me @@ -0,0 +1,3 @@ +Please see the dev guide for what to do with Solr config file(s). + +schema.xml.4.6.0.dist is the original schema.xml file that came from the 4.6.0 Solr distribution. It's only included so you can diff the files to see what has changed. diff --git a/solr/4.6.0/schema.xml b/solr/4.6.0/schema.xml new file mode 100644 index 0000000..323429b --- /dev/null +++ b/solr/4.6.0/schema.xml @@ -0,0 +1,1692 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + id + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/solr/4.6.0/schema.xml.4.6.0.dist b/solr/4.6.0/schema.xml.4.6.0.dist new file mode 100644 index 0000000..4501f64 --- /dev/null +++ b/solr/4.6.0/schema.xml.4.6.0.dist @@ -0,0 +1,1134 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + id + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/solr/4.6.0/stopwords_en.txt b/solr/4.6.0/stopwords_en.txt new file mode 100644 index 0000000..4437ee2 --- /dev/null +++ b/solr/4.6.0/stopwords_en.txt @@ -0,0 +1,53 @@ +# Licensed to the Apache Software Foundation (ASF) under one or more +# contributor license agreements. See the NOTICE file distributed with +# this work for additional information regarding copyright ownership. +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# a couple of test stopwords to test that the words are really being +# configured from this file: +stopworda +stopwordb + +# Standard english stop words taken from Lucene's StopAnalyzer +a +an +and +are +as +at +be +but +by +for +if +in +into +is +it +not +of +on +or +such +that +the +their +then +there +these +they +this +to +was +will +with diff --git a/solr/4.6.0/stopwords_en.txt.dist b/solr/4.6.0/stopwords_en.txt.dist new file mode 100644 index 0000000..2c164c0 --- /dev/null +++ b/solr/4.6.0/stopwords_en.txt.dist @@ -0,0 +1,54 @@ +# Licensed to the Apache Software Foundation (ASF) under one or more +# contributor license agreements. See the NOTICE file distributed with +# this work for additional information regarding copyright ownership. +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# a couple of test stopwords to test that the words are really being +# configured from this file: +stopworda +stopwordb + +# Standard english stop words taken from Lucene's StopAnalyzer +a +an +and +are +as +at +be +but +by +for +if +in +into +is +it +no +not +of +on +or +such +that +the +their +then +there +these +they +this +to +was +will +with diff --git a/solr/Dockerfile b/solr/Dockerfile new file mode 100644 index 0000000..92d8a5d --- /dev/null +++ b/solr/Dockerfile @@ -0,0 +1,3 @@ +#FROM ndslabs/dataverse-solr:latest +FROM vtti/dataverse-solr +COPY schema.xml /usr/local/solr-4.6.0/example/solr/collection1/conf/schema.xml diff --git a/solr/schema.xml b/solr/schema.xml new file mode 100644 index 0000000..323429b --- /dev/null +++ b/solr/schema.xml @@ -0,0 +1,1692 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + id + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +