diff --git a/docker-compose.yml b/docker-compose.yml index 42e4c2106..2005b10b0 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -16,15 +16,15 @@ services: command: mongod --smallfiles --logpath=/dev/null --quiet # ODS service that runs the Maven container - odsbackend: - # image: onedatashare_odsbackend:latest #replace this with "build: ." if u want to build what you have locally. Make sure to update your env appropriately - build: . - env_file: - - .ods.env - restart: always - ports: - - 8080:8080 - depends_on : - - mongodb # ensuring that the Maven container is started only after the mongo-service is up +# odsbackend: +# # image: onedatashare_odsbackend:latest #replace this with "build: ." if u want to build what you have locally. Make sure to update your env appropriately +# build: . +# env_file: +# - .ods.env +# restart: always +# ports: +# - 8080:8080 +# depends_on : +# - mongodb # ensuring that the Maven container is started only after the mongo-service is up volumes: mongodb: diff --git a/pom.xml b/pom.xml index 91199d6f4..fc6f2908a 100644 --- a/pom.xml +++ b/pom.xml @@ -302,6 +302,11 @@ 3.1.0 provided + + io.netty + netty-resolver-dns-native-macos + osx-aarch_64 + diff --git a/src/main/java/org/onedatashare/server/config/EurekaConfig.java b/src/main/java/org/onedatashare/server/config/EurekaConfig.java index a09095cd4..12bf00ed2 100644 --- a/src/main/java/org/onedatashare/server/config/EurekaConfig.java +++ b/src/main/java/org/onedatashare/server/config/EurekaConfig.java @@ -16,7 +16,7 @@ public class EurekaConfig { @Bean @LoadBalanced - public WebClient.Builder webClientBuilder(){ + public WebClient.Builder webClientBuilder() { return WebClient.builder(); } } diff --git a/src/main/java/org/onedatashare/server/controller/TransferJobController.java b/src/main/java/org/onedatashare/server/controller/TransferJobController.java deleted file mode 100644 index 4f05eb4c1..000000000 --- a/src/main/java/org/onedatashare/server/controller/TransferJobController.java +++ /dev/null @@ -1,49 +0,0 @@ -package org.onedatashare.server.controller; - -import org.onedatashare.server.model.request.StopRequest; -import org.onedatashare.server.model.request.TransferJobRequest; -import org.onedatashare.server.model.response.TransferJobSubmittedResponse; -import org.onedatashare.server.service.TransferJobService; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; -import org.springframework.beans.factory.annotation.Autowired; -import org.springframework.http.HttpStatus; -import org.springframework.web.bind.annotation.PostMapping; -import org.springframework.web.bind.annotation.RequestBody; -import org.springframework.web.bind.annotation.RequestMapping; -import org.springframework.web.bind.annotation.RestController; -import org.springframework.web.server.ResponseStatusException; -import reactor.core.publisher.Mono; - -import java.security.Principal; - -/** - * This class is meant to take a valid TransferJobRequest and then forward that message to the transfer-scheduler which will aggregate credentials - * for the request and expand the file system of all the user requested resources. - * - */ -@RestController -@RequestMapping("/api/transferjob") -public class TransferJobController { - @Autowired - private TransferJobService transferJobService; - public TransferJobController(TransferJobService transferJobService) { - this.transferJobService = transferJobService; - } - - - Logger logger = LoggerFactory.getLogger(TransferJobController.class); - - @PostMapping - public Mono submit(@RequestBody TransferJobRequest request, - Mono principalMono){ - logger.info("Recieved request: " + request.toString()); - return principalMono.flatMap(p -> transferJobService.submitTransferJobRequest(p.getName(), request)); - } - @PostMapping("/stop") - public Mono stopJob(@RequestBody StopRequest stopRequest){ - return transferJobService.stopTransferJob(stopRequest) - .onErrorResume(e -> Mono.error(new ResponseStatusException(HttpStatus.INTERNAL_SERVER_ERROR, - "Failed to stop job execution"))); - } -} diff --git a/src/main/java/org/onedatashare/server/controller/TransferSchedulerController.java b/src/main/java/org/onedatashare/server/controller/TransferSchedulerController.java new file mode 100644 index 000000000..9ed168569 --- /dev/null +++ b/src/main/java/org/onedatashare/server/controller/TransferSchedulerController.java @@ -0,0 +1,64 @@ +package org.onedatashare.server.controller; + +import org.onedatashare.server.model.ScheduledTransferJobRequest; +import org.onedatashare.server.model.TransferJobRequestDTO; +import org.onedatashare.server.model.request.StopRequest; +import org.onedatashare.server.service.TransferSchedulerService; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; +import org.springframework.format.annotation.DateTimeFormat; +import org.springframework.http.HttpStatus; +import org.springframework.http.ResponseEntity; +import org.springframework.web.bind.annotation.*; +import org.springframework.web.server.ResponseStatusException; +import reactor.core.publisher.Mono; + +import java.security.Principal; +import java.time.LocalDateTime; +import java.util.List; +import java.util.UUID; + +@RestController +@RequestMapping("/api/job") +public class TransferSchedulerController { + + private final TransferSchedulerService transferSchedulerService; + + public TransferSchedulerController(TransferSchedulerService transferSchedulerService) { + this.transferSchedulerService = transferSchedulerService; + } + + + Logger logger = LoggerFactory.getLogger(TransferSchedulerController.class); + + @PostMapping("/schedule") + public ResponseEntity> runJob(@RequestBody TransferJobRequestDTO request, + Principal principal) { + logger.debug("Recieved request: " + request.toString()); + request.setOwnerId(principal.getName()); + return ResponseEntity.ok(transferSchedulerService.scheduleJob(request)); + } + + @PostMapping("/stop") + public Mono stopJob(@RequestBody StopRequest stopRequest) { + return transferSchedulerService.stopTransferJob(stopRequest) + .onErrorResume(e -> Mono.error(new ResponseStatusException(HttpStatus.INTERNAL_SERVER_ERROR, + "Failed to stop job execution"))); + } + @GetMapping("/list") + public ResponseEntity>> listScheduledJobs(@RequestParam String userEmail) { + return ResponseEntity.ok(this.transferSchedulerService.listScheduledJobs(userEmail)); + } + + @GetMapping("/details") + public ResponseEntity> getScheduledJob(@RequestParam UUID jobUuid) { + return ResponseEntity.ok(this.transferSchedulerService.getJobDetails(jobUuid)); + } + + @DeleteMapping("/delete") + public ResponseEntity deleteScheduledJob(@RequestParam UUID jobUuid) { + this.transferSchedulerService.deleteScheduledJob(jobUuid); + return ResponseEntity.accepted().build(); + } + +} diff --git a/src/main/java/org/onedatashare/server/model/EntityInfo.java b/src/main/java/org/onedatashare/server/model/EntityInfo.java new file mode 100644 index 000000000..06c5172e1 --- /dev/null +++ b/src/main/java/org/onedatashare/server/model/EntityInfo.java @@ -0,0 +1,14 @@ +package org.onedatashare.server.model; + +import lombok.AllArgsConstructor; +import lombok.Data; +import lombok.NoArgsConstructor; + +@Data +@AllArgsConstructor +@NoArgsConstructor +public class EntityInfo { + protected String id; + protected String path; + protected long size; +} diff --git a/src/main/java/org/onedatashare/server/model/FileDestination.java b/src/main/java/org/onedatashare/server/model/FileDestination.java new file mode 100644 index 000000000..1168f104f --- /dev/null +++ b/src/main/java/org/onedatashare/server/model/FileDestination.java @@ -0,0 +1,24 @@ +package org.onedatashare.server.model; + +import lombok.AllArgsConstructor; +import lombok.Data; +import lombok.NoArgsConstructor; +import lombok.NonNull; +import org.onedatashare.server.model.core.EndpointType; + +import java.io.Serializable; + +@Data +@AllArgsConstructor +@NoArgsConstructor +public class FileDestination implements Serializable { + + @NonNull + private String credId; + + @NonNull + private EndpointType type; + + + String fileDestinationPath = ""; +} \ No newline at end of file diff --git a/src/main/java/org/onedatashare/server/model/FileSource.java b/src/main/java/org/onedatashare/server/model/FileSource.java new file mode 100644 index 000000000..054094524 --- /dev/null +++ b/src/main/java/org/onedatashare/server/model/FileSource.java @@ -0,0 +1,26 @@ +package org.onedatashare.server.model; + +import lombok.AllArgsConstructor; +import lombok.Data; +import lombok.NoArgsConstructor; +import lombok.NonNull; +import org.onedatashare.server.model.core.EndpointType; + +import java.io.Serializable; +import java.util.ArrayList; + +@Data +@AllArgsConstructor +@NoArgsConstructor +public class FileSource implements Serializable { + @NonNull + private String credId; + @NonNull + public EndpointType type; + + public String fileSourcePath = ""; //can also be the parent Id to the directory to find all data in the infoList + + @NonNull + public ArrayList resourceList; //a list of files and folders. This will end up being a list of only files with paths fully expanded + +} \ No newline at end of file diff --git a/src/main/java/org/onedatashare/server/model/ScheduledTransferJobRequest.java b/src/main/java/org/onedatashare/server/model/ScheduledTransferJobRequest.java new file mode 100644 index 000000000..40b7a82ea --- /dev/null +++ b/src/main/java/org/onedatashare/server/model/ScheduledTransferJobRequest.java @@ -0,0 +1,20 @@ +package org.onedatashare.server.model; + +import lombok.Data; +import org.onedatashare.server.model.request.UserTransferOptions; + +import java.io.Serializable; +import java.time.LocalDateTime; +import java.util.UUID; + +@Data +public class ScheduledTransferJobRequest implements Serializable { + + LocalDateTime jobStartTime; + UUID jobUuid; + String ownerId; + FileSource source; + FileDestination destination; + UserTransferOptions options; + String transferNodeName; +} diff --git a/src/main/java/org/onedatashare/server/model/TransferJobRequestDTO.java b/src/main/java/org/onedatashare/server/model/TransferJobRequestDTO.java new file mode 100644 index 000000000..9025c5592 --- /dev/null +++ b/src/main/java/org/onedatashare/server/model/TransferJobRequestDTO.java @@ -0,0 +1,18 @@ +package org.onedatashare.server.model; + +import com.fasterxml.jackson.annotation.JsonInclude; +import lombok.Data; +import org.onedatashare.server.model.request.UserTransferOptions; + +import java.io.Serializable; + +@Data +@JsonInclude(JsonInclude.Include.NON_NULL) +public class TransferJobRequestDTO implements Serializable { + private String ownerId; + private FileSource source; + private FileDestination destination; + private UserTransferOptions options; + private String transferNodeName; + +} diff --git a/src/main/java/org/onedatashare/server/model/core/Stat.java b/src/main/java/org/onedatashare/server/model/core/Stat.java index 41ee62d65..91adcf80c 100644 --- a/src/main/java/org/onedatashare/server/model/core/Stat.java +++ b/src/main/java/org/onedatashare/server/model/core/Stat.java @@ -157,7 +157,7 @@ public String path() { * Set the files underneath this tree and reset cached values. */ public Stat setFiles(Collection fs) { - return setFiles(fs.toArray(new Stat[fs.size()])); + return setFiles(fs.toArray(new Stat[0])); } public void setFilesList(List fs){ diff --git a/src/main/java/org/onedatashare/server/model/request/UserTransferOptions.java b/src/main/java/org/onedatashare/server/model/request/UserTransferOptions.java index 6b43f8730..fb99b9da7 100644 --- a/src/main/java/org/onedatashare/server/model/request/UserTransferOptions.java +++ b/src/main/java/org/onedatashare/server/model/request/UserTransferOptions.java @@ -1,29 +1,32 @@ /** - ##************************************************************** - ## - ## Copyright (C) 2018-2020, OneDataShare Team, - ## Department of Computer Science and Engineering, - ## University at Buffalo, Buffalo, NY, 14260. - ## - ## Licensed under the Apache License, Version 2.0 (the "License"); you - ## may not use this file except in compliance with the License. You may - ## obtain a copy of the License at - ## - ## http://www.apache.org/licenses/LICENSE-2.0 - ## - ## Unless required by applicable law or agreed to in writing, software - ## distributed under the License is distributed on an "AS IS" BASIS, - ## WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - ## See the License for the specific language governing permissions and - ## limitations under the License. - ## - ##************************************************************** + * ##************************************************************** + * ## + * ## Copyright (C) 2018-2020, OneDataShare Team, + * ## Department of Computer Science and Engineering, + * ## University at Buffalo, Buffalo, NY, 14260. + * ## + * ## Licensed under the Apache License, Version 2.0 (the "License"); you + * ## may not use this file except in compliance with the License. You may + * ## obtain a copy of the License at + * ## + * ## http://www.apache.org/licenses/LICENSE-2.0 + * ## + * ## Unless required by applicable law or agreed to in writing, software + * ## distributed under the License is distributed on an "AS IS" BASIS, + * ## WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * ## See the License for the specific language governing permissions and + * ## limitations under the License. + * ## + * ##************************************************************** */ package org.onedatashare.server.model.request; import lombok.Data; +import org.springframework.format.annotation.DateTimeFormat; + +import java.time.LocalDateTime; @Data public class UserTransferOptions { @@ -37,4 +40,20 @@ public class UserTransferOptions { private Integer parallelThreadCount; //supported private Integer pipeSize; //supported private Integer chunkSize; //supported + @DateTimeFormat(iso = DateTimeFormat.ISO.DATE_TIME) + private LocalDateTime scheduledTime; + + public UserTransferOptions() { + this.compress = false; + this.encrypt = false; + this.optimizer = ""; + this.overwrite = false; + this.retry = 0; + this.verify = false; + this.concurrencyThreadCount = 1; + this.pipeSize = 1; + this.parallelThreadCount = 1; + this.chunkSize = 10 << 1024 << 1024; + this.scheduledTime = LocalDateTime.now(); + } } diff --git a/src/main/java/org/onedatashare/server/model/requestdata/InfluxData.java b/src/main/java/org/onedatashare/server/model/requestdata/InfluxData.java index ae2430ac0..79ea6585c 100644 --- a/src/main/java/org/onedatashare/server/model/requestdata/InfluxData.java +++ b/src/main/java/org/onedatashare/server/model/requestdata/InfluxData.java @@ -11,106 +11,69 @@ @JsonIgnoreProperties(ignoreUnknown = true) public class InfluxData { - @JsonProperty(value = "interface") private String networkInterface; - @JsonProperty(value = "ods_user") private String odsUser; - @JsonProperty(value = "transfer_node_name") private String transferNodeName; - @JsonProperty(value = "active_core_count") - private Double coreCount; + private Long coreCount; - @JsonProperty(value = "cpu_frequency_max") private Double cpu_frequency_max; - @JsonProperty(value = "cpu_frequency_current") private Double cpu_frequency_current; - @JsonProperty(value = "cpu_frequency_min") private Double cpu_frequency_min; - @JsonProperty(value = "energy_consumed") - private Double energyConsumed; - - @JsonProperty(value = "cpu_arch") private String cpuArchitecture; - @JsonProperty(value = "packet_loss_rate") private Double packetLossRate; - @JsonProperty(value = "link_capacity") - private Double linkCapacity; - - /* Delta values*/ - private Long bytesSentDelta; - - private Long bytesReceivedDelta; - - private Long packetsSentDelta; - - private Long packetsReceivedDelta; - - //NIC values - - @JsonProperty(value = "bytes_sent") private Long bytesSent; - @JsonProperty(value = "bytes_recv") private Long bytesReceived; - @JsonProperty(value = "packets_sent") private Long packetSent; - @JsonProperty(value = "packets_recv") private Long packetReceived; - @JsonProperty(value = "dropin") - private Double dropin; + private Long dropin; - @JsonProperty(value = "dropout") - private Double dropout; + private Long dropout; - @JsonProperty(value = "nic_speed") - private Double nicSpeed; + private Long nicMtu; - @JsonProperty(value = "nic_mtu") - private Double nicMtu; + private Double latency; - //2022-06-01 10:41:15.123591 - @JsonFormat(pattern = "yyyy-MM-dd HH:mm:ss.SSSSSS") - @JsonProperty(value = "start_time") - private LocalDateTime startTime; + private Double rtt; - @JsonFormat(pattern = "yyyy-MM-dd HH:mm:ss.SSSSSS") - @JsonProperty(value = "end_time") - private LocalDateTime endTime; + private Double sourceRtt; - @JsonProperty(value = "latency") - private Double latency; + private Double sourceLatency; - @JsonProperty(value = "rtt") - private Double rtt; + private Double destinationRtt; - @JsonProperty(value = "errin") - private Double errin; + private Double destLatency; - @JsonProperty(value = "errout") - private Double errout; + private Long errin; - //Job Values + private Long errout; - private Long jobId; + private String jobId; - private Double throughput; + private Double readThroughput; + + private Double writeThroughput; + + private Long bytesWritten; - private Integer concurrency; + private Long bytesRead; - private Integer parallelism; + private Long concurrency; - private Integer pipelining; + private Long parallelism; + + private Long pipelining; private Long memory; @@ -118,18 +81,33 @@ public class InfluxData { private Long freeMemory; + private Long allocatedMemory; + private Long jobSize; private Long avgFileSize; - private Long dataBytesSent; + private String sourceType; - private Boolean compression; + private String sourceCredId; - private Long allocatedMemory; + private String destType; - private String sourceType; + private String destCredId; + + @JsonFormat(pattern = "yyyy-MM-dd HH:mm:ss.SSSSSS") + @JsonProperty(value = "start_time") + private LocalDateTime startTime; + + @JsonFormat(pattern = "yyyy-MM-dd HH:mm:ss.SSSSSS") + @JsonProperty(value = "end_time") + private LocalDateTime endTime; + + private Double throughput; + + private Long dataBytesSent; + + private Boolean compression; - private String destType; } diff --git a/src/main/java/org/onedatashare/server/model/response/FineUploaderResponse.java b/src/main/java/org/onedatashare/server/model/response/FineUploaderResponse.java deleted file mode 100644 index c8e3f6536..000000000 --- a/src/main/java/org/onedatashare/server/model/response/FineUploaderResponse.java +++ /dev/null @@ -1,24 +0,0 @@ -package org.onedatashare.server.model.response; - -import lombok.Data; - -@Data -public class FineUploaderResponse { - private boolean success; - private boolean error; - - public static FineUploaderResponse ok(){ - FineUploaderResponse fineUploaderResponse = new FineUploaderResponse(); - fineUploaderResponse.error = false; - fineUploaderResponse.success = true; - return fineUploaderResponse; - } - - public static FineUploaderResponse error(){ - FineUploaderResponse fineUploaderResponse = new FineUploaderResponse(); - fineUploaderResponse.error = true; - fineUploaderResponse.success = false; - return fineUploaderResponse; - } - -} diff --git a/src/main/java/org/onedatashare/server/module/BoxResource.java b/src/main/java/org/onedatashare/server/module/BoxResource.java index 83eca09e9..5681d127e 100644 --- a/src/main/java/org/onedatashare/server/module/BoxResource.java +++ b/src/main/java/org/onedatashare/server/module/BoxResource.java @@ -55,14 +55,14 @@ public Mono delete(DeleteOperation operation) { file.delete(); s.success(); } catch (BoxAPIException boxAPIException) { - logger.error("Failed to delete this id" + operation.getToDelete() + "as a file but failedEndpointAuthenticateComponent.js"); + logger.error("Failed to delete this id " + operation.getToDelete() + "as a file but failedEndpointAuthenticateComponent.js"); } try { BoxFolder folder = new BoxFolder(this.client, operation.getToDelete()); folder.delete(true); s.success(); } catch (BoxAPIException boxAPIResponseException) { - logger.error("Failed to delete this id " + operation.getToDelete() + "as a folder recursively but failed"); + logger.error("Failed to delete this id " + operation.getToDelete() + " as a folder recursively but failed"); } s.success(); }); diff --git a/src/main/java/org/onedatashare/server/service/CredentialService.java b/src/main/java/org/onedatashare/server/service/CredentialService.java index 1785e0ff7..a30750ad2 100644 --- a/src/main/java/org/onedatashare/server/service/CredentialService.java +++ b/src/main/java/org/onedatashare/server/service/CredentialService.java @@ -55,12 +55,15 @@ public class CredentialService { private static final int TIMEOUT_IN_MILLIS = 10000; - @Autowired private WebClient.Builder webClientBuilder; - @Autowired private EurekaClient discoveryClient; + public CredentialService(EurekaClient discoveryClient, WebClient.Builder webClientBuilder){ + this.webClientBuilder = webClientBuilder; + this.discoveryClient = discoveryClient; + } + @PostConstruct private void initialize(){ this.urlFormatted = this.credentialServiceUrl + "/%s/%s/%s"; diff --git a/src/main/java/org/onedatashare/server/service/MetaDataService.java b/src/main/java/org/onedatashare/server/service/MetaDataService.java index 22549a756..69f07471e 100644 --- a/src/main/java/org/onedatashare/server/service/MetaDataService.java +++ b/src/main/java/org/onedatashare/server/service/MetaDataService.java @@ -44,8 +44,6 @@ public class MetaDataService { @Autowired private WebClient.Builder webClientBuilder; - //CockroachDB data calls below - @SneakyThrows public Mono> getAllJobIds(String userId) { @@ -221,7 +219,8 @@ private Mono> influxDataCall(URI uri) { .get() .uri(uri) .retrieve() - .bodyToMono(new ParameterizedTypeReference>() {}); + .bodyToMono(new ParameterizedTypeReference>() { + }); } public Mono> getJobMeasurementsUniversal(String user, Long jobId, LocalDateTime start, String appId) { @@ -237,6 +236,7 @@ public Mono> getJobMeasurementsUniversal(String user, .get() .uri(uri) .retrieve() - .bodyToMono(new ParameterizedTypeReference>() {}); + .bodyToMono(new ParameterizedTypeReference>() { + }); } } diff --git a/src/main/java/org/onedatashare/server/service/ResourceServiceBase.java b/src/main/java/org/onedatashare/server/service/ResourceServiceBase.java index dec2922c7..b16eec92c 100644 --- a/src/main/java/org/onedatashare/server/service/ResourceServiceBase.java +++ b/src/main/java/org/onedatashare/server/service/ResourceServiceBase.java @@ -5,7 +5,6 @@ import org.onedatashare.server.model.filesystem.operations.DownloadOperation; import org.onedatashare.server.model.filesystem.operations.ListOperation; import org.onedatashare.server.model.filesystem.operations.MkdirOperation; -import org.onedatashare.server.model.request.TransferJobRequest; import org.onedatashare.server.module.Resource; import reactor.core.publisher.Mono; diff --git a/src/main/java/org/onedatashare/server/service/SSHConsoleService.java b/src/main/java/org/onedatashare/server/service/SSHConsoleService.java index 01f0b8c77..f250b9b9c 100644 --- a/src/main/java/org/onedatashare/server/service/SSHConsoleService.java +++ b/src/main/java/org/onedatashare/server/service/SSHConsoleService.java @@ -1,19 +1,14 @@ package org.onedatashare.server.service; import com.jcraft.jsch.Channel; -import com.jcraft.jsch.ChannelExec; import com.jcraft.jsch.JSch; import com.jcraft.jsch.Session; -import org.onedatashare.server.model.response.ShellCommandResponse; import org.onedatashare.server.model.useraction.UserAction; -import org.onedatashare.server.model.SSHUserInfo; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.stereotype.Service; import reactor.core.publisher.Flux; import reactor.core.publisher.Mono; -import java.io.InputStream; - @Service public class SSHConsoleService { @@ -22,7 +17,7 @@ public class SSHConsoleService { @Autowired private DecryptionService decryptionService; - public SSHConsoleService(){ + public SSHConsoleService() { jsch = new JSch(); } @@ -41,7 +36,7 @@ public Mono createSession(String uri, String userName, String password, return null; } - public Flux runCommand(UserAction ua, String commandWithPath){ + public Flux runCommand(UserAction ua, String commandWithPath) { // return createSession(ua.getUri(), ua.getCredential().getUsername(), ua.getCredential().getPassword(), Integer.parseInt(ua.getPortNumber())) // .flux() // .flatMap(session -> { @@ -60,7 +55,7 @@ public Flux runCommand(UserAction ua, String commandWithPath){ return null; } - public Flux connectAndReadOutput(Channel channel){ + public Flux connectAndReadOutput(Channel channel) { // try { // InputStream in = channel.getInputStream(); // InputStream err = channel.getExtInputStream(); @@ -99,8 +94,8 @@ public Flux connectAndReadOutput(Channel channel){ // response.setError(errorBuffer.toString()); // } // return Flux.just(response); - return null; - } + return null; + } } diff --git a/src/main/java/org/onedatashare/server/service/SupportTicketService.java b/src/main/java/org/onedatashare/server/service/SupportTicketService.java index 84bc98a32..6a24137e4 100644 --- a/src/main/java/org/onedatashare/server/service/SupportTicketService.java +++ b/src/main/java/org/onedatashare/server/service/SupportTicketService.java @@ -65,13 +65,14 @@ public class SupportTicketService { private GHRepository repository; - @PostConstruct - public void postConstruct() throws IOException { - GitHub github = new GitHubBuilder() - .withOAuthToken(githubToken, organizationId) - .build(); - this.repository = github.getRepository(repositoryString); - } +// @PostConstruct +// public void postConstruct() throws IOException { +// GitHub github = new GitHubBuilder() +// +// .withOAuthToken(githubToken, organizationId) +// .build(); +// this.repository = github.getRepository(repositoryString); +// } public Mono createGitHubSuppTicket(SupportTicketRequest supportTicketRequest) { return captchaService.verifyValue(supportTicketRequest.getCaptchaVerificationValue()) diff --git a/src/main/java/org/onedatashare/server/service/TransferJobService.java b/src/main/java/org/onedatashare/server/service/TransferJobService.java deleted file mode 100644 index be4ef9179..000000000 --- a/src/main/java/org/onedatashare/server/service/TransferJobService.java +++ /dev/null @@ -1,99 +0,0 @@ -/** - ##************************************************************** - ## - ## Copyright (C) 2018-2020, OneDataShare Team, - ## Department of Computer Science and Engineering, - ## University at Buffalo, Buffalo, NY, 14260. - ## - ## Licensed under the Apache License, Version 2.0 (the "License"); you - ## may not use this file except in compliance with the License. You may - ## obtain a copy of the License at - ## - ## http://www.apache.org/licenses/LICENSE-2.0 - ## - ## Unless required by applicable law or agreed to in writing, software - ## distributed under the License is distributed on an "AS IS" BASIS, - ## WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - ## See the License for the specific language governing permissions and - ## limitations under the License. - ## - ##************************************************************** - */ - - -package org.onedatashare.server.service; - -import org.apache.http.entity.ContentType; -import org.onedatashare.server.controller.TransferJobController; -import org.onedatashare.server.model.error.CredentialNotFoundException; -import org.onedatashare.server.model.request.StopRequest; -import org.onedatashare.server.model.request.TransferJobRequest; -import org.onedatashare.server.model.request.TransferJobRequestWithMetaData; -import org.onedatashare.server.model.response.TransferJobSubmittedResponse; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; -import org.springframework.beans.factory.annotation.Autowired; -import org.springframework.beans.factory.annotation.Value; -import org.springframework.http.HttpHeaders; -import org.springframework.http.HttpStatus; -import org.springframework.http.MediaType; -import org.springframework.stereotype.Service; -import org.springframework.web.reactive.function.client.WebClient; -import reactor.core.publisher.Mono; - -import javax.annotation.PostConstruct; -import java.net.URI; -import java.time.Duration; -import java.util.HashSet; -import java.util.List; - -@Service -public class TransferJobService { - @Value("${transfer.job.service.uri}") - private String transferQueueingServiceUri; - private static Logger logger = LoggerFactory.getLogger(TransferJobService.class); - - @Autowired - private WebClient.Builder webClientBuilder; - - private static final Duration timeoutDuration = Duration.ofSeconds(10); - -// @PostConstruct -// private void initialize(){ -// this.client = WebClient.builder() -// .defaultHeader(HttpHeaders.CONTENT_TYPE, ContentType.APPLICATION_JSON.toString()) -// .baseUrl(transferQueueingServiceUri) -// .build(); -// } - - public Mono submitTransferJobRequest(String ownerId, TransferJobRequest jobRequest){ - logger.info(transferQueueingServiceUri + "/receiveRequest"); - return Mono.just(TransferJobRequestWithMetaData.getTransferRequestWithMetaData(ownerId, jobRequest)) - .flatMap(requestWithMetaData -> webClientBuilder.build().post() - .uri(transferQueueingServiceUri + "/receiveRequest") - .contentType(MediaType.APPLICATION_JSON) - .syncBody(requestWithMetaData) - .retrieve() - .onStatus(HttpStatus::isError, - clientResponse -> Mono.error(new Exception(clientResponse.toString()))) - .onStatus(HttpStatus::is4xxClientError, - response -> Mono.error(new CredentialNotFoundException())) - .onStatus(HttpStatus::is5xxServerError, - response -> Mono.error(new Exception("Internal server error"))) - .bodyToMono(TransferJobSubmittedResponse.class)); - } - public Mono stopTransferJob(StopRequest stopRequest) { - return webClientBuilder.build().post() - .uri(transferQueueingServiceUri + "/stopJob") - .contentType(MediaType.APPLICATION_JSON) - .body(stopRequest, StopRequest.class) - .retrieve() - .onStatus(HttpStatus::isError, - clientResponse -> Mono.error(new Exception(clientResponse.toString()))) - .onStatus(HttpStatus::is4xxClientError, - response -> Mono.error(new CredentialNotFoundException())) - .onStatus(HttpStatus::is5xxServerError, - response -> Mono.error(new Exception("Internal server error"))) - .bodyToMono(Void.class); - } -} diff --git a/src/main/java/org/onedatashare/server/service/TransferSchedulerService.java b/src/main/java/org/onedatashare/server/service/TransferSchedulerService.java new file mode 100644 index 000000000..69c6a3a9c --- /dev/null +++ b/src/main/java/org/onedatashare/server/service/TransferSchedulerService.java @@ -0,0 +1,115 @@ +/** + * ##************************************************************** + * ## + * ## Copyright (C) 2018-2020, OneDataShare Team, + * ## Department of Computer Science and Engineering, + * ## University at Buffalo, Buffalo, NY, 14260. + * ## + * ## Licensed under the Apache License, Version 2.0 (the "License"); you + * ## may not use this file except in compliance with the License. You may + * ## obtain a copy of the License at + * ## + * ## http://www.apache.org/licenses/LICENSE-2.0 + * ## + * ## Unless required by applicable law or agreed to in writing, software + * ## distributed under the License is distributed on an "AS IS" BASIS, + * ## WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * ## See the License for the specific language governing permissions and + * ## limitations under the License. + * ## + * ##************************************************************** + */ + + +package org.onedatashare.server.service; + +import org.onedatashare.server.model.ScheduledTransferJobRequest; +import org.onedatashare.server.model.TransferJobRequestDTO; +import org.onedatashare.server.model.error.CredentialNotFoundException; +import org.onedatashare.server.model.request.StopRequest; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; +import org.springframework.beans.factory.annotation.Value; +import org.springframework.core.ParameterizedTypeReference; +import org.springframework.http.HttpStatus; +import org.springframework.http.MediaType; +import org.springframework.stereotype.Service; +import org.springframework.web.reactive.function.client.WebClient; +import reactor.core.publisher.Mono; + +import javax.annotation.PostConstruct; +import java.util.List; +import java.util.UUID; + +@Service +public class TransferSchedulerService { + + @Value("${transfer.scheduler.service.uri}") + private String transferQueueingServiceUri; + private static Logger logger = LoggerFactory.getLogger(TransferSchedulerService.class); + + private WebClient.Builder webClientBuilder; + + + public TransferSchedulerService(WebClient.Builder webClientBuilder) { + this.webClientBuilder = webClientBuilder; + } + + @PostConstruct + private void initialize() { + this.webClientBuilder = WebClient.builder(); + } + + public Mono stopTransferJob(StopRequest stopRequest) { + return webClientBuilder.build().post() + .uri(transferQueueingServiceUri + "/stopJob") + .contentType(MediaType.APPLICATION_JSON) + .body(stopRequest, StopRequest.class) + .retrieve() + .onStatus(HttpStatus::isError, + clientResponse -> Mono.error(new Exception(clientResponse.toString()))) + .onStatus(HttpStatus::is4xxClientError, + response -> Mono.error(new CredentialNotFoundException())) + .onStatus(HttpStatus::is5xxServerError, + response -> Mono.error(new Exception("Internal server error"))) + .bodyToMono(Void.class); + } + + public Mono scheduleJob(TransferJobRequestDTO transferRequest) { + logger.info(transferRequest.toString()); + return webClientBuilder.build() + .post() + .uri(this.transferQueueingServiceUri, uriBuilder -> uriBuilder.path("/job/schedule").queryParam("jobStartTime", transferRequest.getOptions().getScheduledTime()).build()) + .contentType(MediaType.APPLICATION_JSON) + .body(Mono.just(transferRequest), TransferJobRequestDTO.class) + .retrieve() + .bodyToMono(UUID.class); + } + + public Mono> listScheduledJobs(String userEmail) { + return webClientBuilder.build() + .get() + .uri(this.transferQueueingServiceUri, uriBuilder -> uriBuilder.path("/jobs").queryParam("userEmail", userEmail).build()) + .accept(MediaType.APPLICATION_JSON) + .retrieve() + .bodyToMono(new ParameterizedTypeReference>() { + }); + } + + public Mono getJobDetails(UUID jobUuid) { + return this.webClientBuilder.build() + .get() + .uri(this.transferQueueingServiceUri, uriBuilder -> uriBuilder.path("/job/details").queryParam("jobUuid", jobUuid).build()) + .accept(MediaType.APPLICATION_JSON) + .retrieve() + .bodyToMono(TransferJobRequestDTO.class); + } + + public void deleteScheduledJob(UUID jobUuid) { + this.webClientBuilder.build() + .delete() + .uri(transferQueueingServiceUri, uriBuilder -> uriBuilder.path("/job/delete").queryParam("jobUuid", jobUuid).build()) + .retrieve(); + + } +} diff --git a/src/main/react-front-end/package.json b/src/main/react-front-end/package.json index fbe04f01c..30e700ba4 100644 --- a/src/main/react-front-end/package.json +++ b/src/main/react-front-end/package.json @@ -3,11 +3,18 @@ "version": "0.1.0", "private": true, "dependencies": { + "@emotion/react": "^11.11.1", + "@emotion/styled": "^11.11.0", "@material-ui/core": "^4.12.4", "@material-ui/icons": "^4.11.3", + "@mui/lab": "^5.0.0-alpha.144", + "@mui/material": "^5.14.9", + "@mui/styled-engine-sc": "^5.14.9", + "@mui/x-date-pickers": "^6.14.0", "axios": "^1.3.4", "bootstrap": "^3.4.1", "clsx": "^1.2.1", + "dayjs": "^1.11.9", "eventemitter3": "^4.0.7", "fg-loadcss": "^3.1.0", "moment": "^2.29.4", @@ -29,7 +36,7 @@ "react-scripts": "^5.0.1", "redux": "^4.0.5", "sass": "^1.49.9", - "styled-components": "^4.4.1", + "styled-components": "^6.0.8", "universal-cookie": "^4.0.4" }, "scripts": { diff --git a/src/main/react-front-end/src/APICalls/APICalls.js b/src/main/react-front-end/src/APICalls/APICalls.js index 5442ff0a1..ab8dc55f6 100755 --- a/src/main/react-front-end/src/APICalls/APICalls.js +++ b/src/main/react-front-end/src/APICalls/APICalls.js @@ -435,6 +435,7 @@ export async function getJobsForUser(pageNo, pageSize, sortBy, order, accept, fa if(!(response.status === 200)) callback = fail; statusHandle(response, callback); + console.log(response) }) .catch((error) => { handleRequestFailure(error, fail); @@ -486,29 +487,27 @@ export async function getSearchJobs(username, startJobId, endJobId, progress, pa }); } -export async function getJobUpdatesForUser(jobIds, accept, fail){ +export async function getJobUpdatesForUser(jobId, accept, fail){ let callback = accept; var influx_data = []; var flag = 0; - console.log("job ids",jobIds); - for(let jobId in jobIds) - { axios.get("/api/metadata/measurements/job",{ params : { - jobId:jobIds[jobId] + jobId: jobId } }) .then((response) => { + console.log(response) if(!(response.status === 200)) callback = fail; flag=1; influx_data.push(response); }) .catch((error) => { + console.log("Failed") handleRequestFailure(error, fail); }); - } if (flag==1) { console.log("Influx data",influx_data); @@ -545,10 +544,12 @@ export async function submitIssue(reqBody, success, fail) { } export async function submitTransferRequest(source,dest,options,accept,fail){ let callback = accept; + source.resourceList = [source.resourceList] axios.post(transferJobUrl, { source: source, destination: dest, - options: options + options: options, + transferNodeName: "", }).then((response) => { if (!(response.status === 200)) callback = fail; diff --git a/src/main/react-front-end/src/APICalls/EndpointAPICalls.js b/src/main/react-front-end/src/APICalls/EndpointAPICalls.js index 34ad6bb16..011778dc0 100644 --- a/src/main/react-front-end/src/APICalls/EndpointAPICalls.js +++ b/src/main/react-front-end/src/APICalls/EndpointAPICalls.js @@ -25,7 +25,7 @@ import { ENDPOINT_OP_URL, LIST_OP_URL, MKDIR_OP_URL, SFTP_DOWNLOAD_URL, DEL_OP_U import { axios, statusHandle, handleRequestFailure } from "./APICalls"; import { getIdsFromEndpoint } from '../views/Transfer/initialize_dnd.js'; import { cookies } from "../model/reducers"; -import { GOOGLEDRIVE_TYPE, BOX_TYPE, DROPBOX_TYPE, GRIDFTP_TYPE, apiBaseUrl, getType, showType, isOAuth} from "../constants.js"; +import { GOOGLEDRIVE_TYPE, BOX_TYPE, DROPBOX_TYPE, apiBaseUrl, getType, showType, isOAuth} from "../constants.js"; function getUriType(uri) { return uri.split(":")[0].toLowerCase(); @@ -175,10 +175,6 @@ export async function openGoogleDriveOAuth() { openOAuth(apiBaseUrl + "oauth?type=gdrive"); } -export async function openGridFtpOAuth() { - openOAuth(apiBaseUrl + "oauth?type=gftp"); -} - export async function openBoxOAuth(){ openOAuth(apiBaseUrl + "oauth?type=box"); } @@ -190,7 +186,6 @@ export async function openOAuth(url){ export const OAuthFunctions = { [DROPBOX_TYPE]: openDropboxOAuth, [GOOGLEDRIVE_TYPE]: openGoogleDriveOAuth, - [GRIDFTP_TYPE]: openGridFtpOAuth, [BOX_TYPE]: openBoxOAuth, other: openOAuth }; diff --git a/src/main/react-front-end/src/constants.js b/src/main/react-front-end/src/constants.js index 995b749e3..b73e5fa6e 100755 --- a/src/main/react-front-end/src/constants.js +++ b/src/main/react-front-end/src/constants.js @@ -44,7 +44,7 @@ export const LOGOUT_ENDPOINT = "/deauthenticate"; // export const apiBaseUrl = "/api/" + version + "/"; // export const apiCredUrl = apiBaseUrl + "cred/"; export const url = "/api/stork/"; -export const transferJobUrl = "/api/transferjob" +export const transferJobUrl = "/api/job/schedule" export const apiBaseUrl = "/api/"; export const apiCredUrl = apiBaseUrl + "cred/"; @@ -149,7 +149,6 @@ export const GOOGLEDRIVE_TYPE = "gdrive:/"; export const BOX_TYPE = "box:///"; export const FTP_TYPE = "ftp://"; export const SFTP_TYPE = "sftp://"; -export const GRIDFTP_TYPE = "gsiftp://"; export const HTTP_TYPE = "http://"; export const HTTPS_TYPE = "https://"; export const S3_TYPE = "s3:"; @@ -161,7 +160,6 @@ export const BOX_NAME = "Box"; export const FTP_NAME = "FTP"; export const SFTP_NAME = "SFTP"; export const HTTP_NAME = "HTTP"; -export const GRIDFTP_NAME = "GridFTP"; export const S3_NAME = "S3"; export const VFS_NAME = "VFS"; @@ -172,7 +170,6 @@ export const FTP = "ftp"; export const SFTP = "sftp"; export const HTTP = "http"; export const HTTPS = "https"; -export const GRIDFTP = "gsiftp"; export const VFS = "vfs"; export const S3 = "s3"; @@ -216,7 +213,6 @@ export const showText = { ftp: FTP_NAME, sftp: SFTP_NAME, http: HTTP_NAME, - gsiftp: GRIDFTP_NAME, https: HTTP_NAME, s3: S3_NAME, vfs: VFS_NAME @@ -229,7 +225,6 @@ export const showType = { ftp: FTP_TYPE, sftp: SFTP_TYPE, http: HTTP_TYPE, - gsiftp: GRIDFTP_TYPE, https: HTTP_TYPE, s3: S3_TYPE, vfs: VFS_TYPE, @@ -242,7 +237,6 @@ export const isOAuth = { [FTP_TYPE]: false, [SFTP_TYPE]: false, [HTTP_TYPE]: false, - [GRIDFTP_TYPE]: true, [HTTPS_TYPE]: false, [S3_TYPE]: false, [VFS_TYPE]: false, @@ -264,7 +258,6 @@ export const SERVICES = { [DROPBOX_TYPE, DROPBOX_NAME, DROPBOX], [GOOGLEDRIVE_TYPE, GOOGLEDRIVE_NAME, GOOGLEDRIVE], [BOX_TYPE, BOX_NAME, BOX], - [GRIDFTP_TYPE, GRIDFTP_NAME, GRIDFTP], ], Login: [ [FTP_TYPE, FTP_NAME, FTP], diff --git a/src/main/react-front-end/src/utils.js b/src/main/react-front-end/src/utils.js index c9c16ac07..941960425 100644 --- a/src/main/react-front-end/src/utils.js +++ b/src/main/react-front-end/src/utils.js @@ -23,7 +23,7 @@ export function humanReadableSpeed(size) { if (size < 1024) - return parseFloat(size.toFixed(4)) + ' B/s'; + return parseFloat(size.toFixed(4)) + ' Mb/s'; let i = Math.floor(Math.log(size) / Math.log(1024)); let num = (size / Math.pow(1024, i)); let round = Math.round(num); diff --git a/src/main/react-front-end/src/views/OauthProcessComponent.js b/src/main/react-front-end/src/views/OauthProcessComponent.js index 4150c8a5c..dc5d8d8a3 100755 --- a/src/main/react-front-end/src/views/OauthProcessComponent.js +++ b/src/main/react-front-end/src/views/OauthProcessComponent.js @@ -29,7 +29,6 @@ import { sideLeft, DROPBOX_NAME, GOOGLEDRIVE_NAME, - GRIDFTP_NAME, BOX_NAME, } from "../constants"; import { eventEmitter } from "../App"; @@ -95,9 +94,6 @@ export default class OauthProcessComponent extends Component { } else if (tag === "googledrive") { console.log("Google drive oAuth identifier received"); this.updateLocalCredStore(GOOGLEDRIVE_NAME, qsObj); - } else if (tag === "gridftp") { - console.log("GridFTP oAuth identifier received"); - this.updateLocalCredStore(GRIDFTP_NAME, qsObj); } else if (tag === "box") { console.log("Box oAuth identifier received"); this.updateLocalCredStore(BOX_NAME, qsObj); diff --git a/src/main/react-front-end/src/views/Queue/QueueComponent.js b/src/main/react-front-end/src/views/Queue/QueueComponent.js index 8b00903db..6f1615d30 100755 --- a/src/main/react-front-end/src/views/Queue/QueueComponent.js +++ b/src/main/react-front-end/src/views/Queue/QueueComponent.js @@ -57,7 +57,6 @@ class QueueComponent extends Component { this.deleteButtonOnClick = this.deleteButtonOnClick.bind(this) this.handleChangeRowsPerPage = this.handleChangeRowsPerPage.bind(this) this.handleChangePage = this.handleChangePage.bind(this) - this.interval = setInterval(this.update, 2000) // making a queue request every 2 seconds this.queueFuncSuccess = this.queueFuncSuccess.bind(this) this.queueFuncFail = this.queueFuncFail.bind(this) updateGAPageView() @@ -65,6 +64,7 @@ class QueueComponent extends Component { componentDidMount() { document.title = "OneDataShare - Queue" + this.interval = setInterval(() => this.update(), 5000); this.queueFunc() } @@ -90,10 +90,11 @@ class QueueComponent extends Component { } update() { + const statusSet = new Set(["STARTED", "STARTING", "STOPPED", "STOPPING", "UNKNOWN"]) const {responsesToDisplay} = this.state let jobIds = [] responsesToDisplay.forEach(job => { - if (job.status === jobStatus.TRANSFERRING || job.status === jobStatus.SCHEDULED) { + if (statusSet.has(job.status)) { jobIds.push(job.id) } }) @@ -101,18 +102,15 @@ class QueueComponent extends Component { getJobUpdatesForUser(jobIds, resp => { let jobs = resp //TODO: use hash keys and values instead of finding on each update - let existingData = [...responsesToDisplay] + // let existingData = [...responsesToDisplay] jobs.forEach(job => { - let existingJob = existingData.find(item => item.id === job.id) + let existingJob = responsesToDisplay.find(item => item.id === job.id) existingJob.status = job.status existingJob.bytes.total = job.bytes.total existingJob.bytes.done = job.bytes.done existingJob.bytes.avg = job.bytes.avg }) - this.setState({responsesToDisplay: existingData}) - }, error => { - console.log('Failed to get job updates') - }); + }) } } @@ -126,7 +124,7 @@ class QueueComponent extends Component { //success //let responsesToDisplay = this.paginateResults(resp.jobs, page, rowsPerPage); //commented to fix second page render issue as it slices all jobs and returns null object - + console.log(resp) this.setState({ response: resp.content, responsesToDisplay: resp.content, @@ -247,11 +245,11 @@ class QueueComponent extends Component { render() { const rowsPerPageOptions = [10, 20, 50, 100]; const sortableColumns = { - jobId: 'job_id', - status: 'status', - avgSpeed : "bytes.avg", - source : "src.uri", - destination: "dest.uri" + // jobId: 'job_id', + // status: 'status', + // avgSpeed : "bytes.avg", + // source : "src.uri", + // destination: "dest.uri" }; return( ); + console.log(resp.jobParameters.jobSize) let actions = (this.renderActions(resp.owner, resp.job_id, resp.status, resp.deleted)); - let difference = Date.parse(resp.endTime)/1000 - Date.parse(resp.startTime)/1000; + let difference = (Date.parse(resp.endTime) - Date.parse(resp.startTime))/1000; let speed = parseFloat((resp.jobParameters.jobSize/1000000)*8)/(difference); if (isNaN(speed)) { @@ -85,10 +86,10 @@ export default class RowElement extends React.Component {

{humanReadableSpeed(speed)}

-

{resp.jobParameters.sourceBasePath}

+

{resp.jobParameters.sourceCredential}

-

{resp.jobParameters.destBasePath}

+

{resp.jobParameters.destCredential}

{ this.props.adminPg && diff --git a/src/main/react-front-end/src/views/Transfer/BrowseModuleComponent.js b/src/main/react-front-end/src/views/Transfer/BrowseModuleComponent.js index 731d3e5b9..b1626ca31 100755 --- a/src/main/react-front-end/src/views/Transfer/BrowseModuleComponent.js +++ b/src/main/react-front-end/src/views/Transfer/BrowseModuleComponent.js @@ -39,7 +39,7 @@ import {styled} from "@material-ui/core/styles"; import EndpointBrowseComponent from "./EndpointBrowseComponent"; import EndpointAuthenticateComponent from "./EndpointAuthenticateComponent"; -import { GRIDFTP, VFS, getType} from "../../constants"; +import { VFS, getType} from "../../constants"; import {showText, showType, showDisplay} from "../../constants"; import {OAuthFunctions} from "../../APICalls/EndpointAPICalls"; @@ -65,12 +65,6 @@ export default class BrowseModuleComponent extends Component { const checkIfOneSideIsLoggedInAsGrid = (currentState) => { return (getType(currentState.endpoint1) === showType.gsiftp || getType(currentState.endpoint2) === showType.gsiftp) && (currentState.endpoint1.login || currentState.endpoint1.login); } - const checkIfGridftpIsOpen = (currentState) => { - return (getType(currentState.endpoint1) === showType.gsiftp - || getType(currentState.endpoint2) === showType.gsiftp) - || !(currentState.endpoint1.login || currentState.endpoint1.login); - } - let constructState = store.getState(); @@ -80,21 +74,9 @@ export default class BrowseModuleComponent extends Component { endpoint: props.endpoint, mode: props.mode, loading: false, - oneSideIsLoggedInAsGridftp: checkIfOneSideIsLoggedInAsGrid(constructState), - gridftpIsOpen: checkIfGridftpIsOpen(constructState) }; - this.unsubcribe = store.subscribe(() => { - let currentState = store.getState(); - // Check if either side is logged in as GRID_FTP - let oneSideIsLoggedInAsGrid = checkIfOneSideIsLoggedInAsGrid(currentState); - let gridftpIsOpen = checkIfGridftpIsOpen(currentState); - if(oneSideIsLoggedInAsGrid !== this.state.oneSideIsLoggedInAsGridftp || gridftpIsOpen !== this.state.gridftpIsOpen){ - this.setState({oneSideIsLoggedInAsGridftp: oneSideIsLoggedInAsGrid, gridftpIsOpen: gridftpIsOpen}); - } - }); - this.setLoading = this.setLoading.bind(this); this.getLoading = this.getLoading.bind(this); @@ -182,7 +164,7 @@ export default class BrowseModuleComponent extends Component { } render() { - const {endpoint, mode, history, type, loading, creds, oneSideIsLoggedInAsGridftp, gridftpIsOpen} = this.state; + const {endpoint, mode, history, type, loading, creds} = this.state; const {update} = this.props; @@ -196,7 +178,7 @@ export default class BrowseModuleComponent extends Component { {(!endpoint.login && mode === pickModule) &&
{displays.map( (service) => { - const disable = (service[0] === GRIDFTP ? !gridftpIsOpen : oneSideIsLoggedInAsGridftp) || (service[0] === VFS && this.checkIfOneSideIsLoggedInAsVFS()); + const disable = (service[0] === VFS && this.checkIfOneSideIsLoggedInAsVFS()); return( {this.login(service)}}> diff --git a/src/main/react-front-end/src/views/Transfer/EndpointAuthenticateComponent.js b/src/main/react-front-end/src/views/Transfer/EndpointAuthenticateComponent.js index cb542cc2a..725ba2bc9 100755 --- a/src/main/react-front-end/src/views/Transfer/EndpointAuthenticateComponent.js +++ b/src/main/react-front-end/src/views/Transfer/EndpointAuthenticateComponent.js @@ -21,7 +21,7 @@ */ -import React, { Component } from 'react'; +import React, { Component, Fragment } from 'react'; import PropTypes from "prop-types"; import {/*openDropboxOAuth, openGoogleDriveOAuth, openBoxOAuth,*/ listFiles} from "../../APICalls/EndpointAPICalls"; @@ -98,6 +98,7 @@ export default class EndpointAuthenticateComponent extends Component { needPassword: false, username: "", password: "", + credentialID: "", endpointSelected: {}, selectingEndpoint: false, portNum: -1, @@ -114,9 +115,7 @@ export default class EndpointAuthenticateComponent extends Component { let loginType = getType(props.endpoint) let endpointName = getName(props.endpoint) - if(loginType === showType.gsiftp /*loginType === GRIDFTP_TYPE*/){ - this.endpointIdsListUpdateFromBackend(); - }else if(!isOAuth[loginType]/*loginType === FTP_TYPE || loginType === SFTP_TYPE || loginType === HTTP_TYPE*/){ + if(!isOAuth[loginType]/*loginType === FTP_TYPE || loginType === SFTP_TYPE || loginType === HTTP_TYPE*/){ this.historyListUpdateFromBackend(endpointName); } this.handleChange = this.handleChange.bind(this); @@ -523,7 +522,7 @@ export default class EndpointAuthenticateComponent extends Component { } // Encrypting user password - const credId = username+"@"+ url.toString().split("://")[1]; + const credId = this.state.credentialID === ""? username+"@"+ url.toString().split("://")[1]: this.state.credentialID; this.endpointCheckin(url, @@ -704,8 +703,6 @@ export default class EndpointAuthenticateComponent extends Component { {/* Google Drive, Dropbox, Box login handler */} {(isOAuth[loginType] && loginType !== showType.gsiftp) && this.getCredentialListComponentFromList(credList, type, loginType)} - {/* GridFTP OAuth handler */} - {loginType === showType.gsiftp && this.getEndpointListComponentFromList(endpointIdsList)} {/* Other login handlers*/} {!isOAuth[loginType] && historyList && this.getHistoryListComponentFromList(historyList)} @@ -780,6 +777,24 @@ export default class EndpointAuthenticateComponent extends Component { } }} /> + {loginType === showType.s3? + ():( + { + if (e.key === 'Enter') { + this.handleClick() + } + }} + /> + )}
diff --git a/src/main/react-front-end/src/views/Transfer/TransferComponent.js b/src/main/react-front-end/src/views/Transfer/TransferComponent.js index 686d38c89..6b0897d06 100755 --- a/src/main/react-front-end/src/views/Transfer/TransferComponent.js +++ b/src/main/react-front-end/src/views/Transfer/TransferComponent.js @@ -41,6 +41,11 @@ import AccordionDetails from "@material-ui/core/AccordionDetails"; import Divider from "@material-ui/core/Divider"; + import { DateTimePicker } from '@mui/x-date-pickers'; + import { LocalizationProvider } from '@mui/x-date-pickers'; + import { AdapterDayjs } from '@mui/x-date-pickers/AdapterDayjs' + import dayjs from 'dayjs'; + import {KeyboardArrowRightRounded, KeyboardArrowLeftRounded, KeyboardArrowDownRounded, KeyboardArrowUpRounded, ExpandMore} from "@material-ui/icons"; import { submitTransferRequest } from "../../APICalls/APICalls"; @@ -81,7 +86,7 @@ pipeSize:localStorage.hasOwnProperty("pipeSize")?Number(localStorage.getItem("pipeSize")):1, chunkSize:localStorage.hasOwnProperty("chunkSize")?Number(localStorage.getItem("chunkSize")):10400000, parallelThreadCount:localStorage.hasOwnProperty("parallelThreadCount")?Number(localStorage.getItem("parallelThreadCount")):1, - + scheduledTime: new Date().toISOString(), }, compact: store.getState().compactViewEnabled, notif: false, @@ -104,6 +109,7 @@ this.sendFile = this.sendFile.bind(this); this.onSendToRight = this.onSendToRight.bind(this); this.onSendToLeft = this.onSendToLeft.bind(this); + this.setDate = this.setDate.bind(this); this.printError(); @@ -145,6 +151,12 @@ // this.setState({ width: window.innerWidth, height: window.innerHeight }); this.setState({ compact: store.getState().compactViewEnabled }); } + + setDate = (new_date) => { + const date = new Date(new_date); + const iso8601_conversion = date.toISOString(); + this.setState({ settings: { ...this.state.settings, scheduledTime: iso8601_conversion } }) + } sendFile = (processed) => { if (processed.selectedTasks.length === 0) { @@ -160,7 +172,7 @@ let sourceParent = "" let destParent = "" - let infoList=[] + let infoList={} let sourceCredId ="" let destCredId = "" if(isOAuth[showType[sType]]){ @@ -174,14 +186,14 @@ sourceCredId = endpointSrc?.credential?.credId sourceParent = Array.isArray(processed.fromTo[0].path) ? "" : processed.fromTo[0].path processed.selectedTasks.forEach(x=>{ - infoList.push({path:x.value,id:x.value, size: x.size}) + infoList = {path:x.value,id:x.value, size: x.size} }) } else{ sourceParent = longestCommonPrefix(processed.fromTo[0].selectedTasks.map(x=>x.id)) sourceParent = sourceParent.includes(".") ? sourceParent.substr(0,sourceParent.lastIndexOf("/"))+(sourceParent!=="")?"":"/" : sourceParent sourceCredId = endpointSrc.credential.credId - processed.selectedTasks.forEach(x=>infoList.push({path:x.id, id:x.name ,size:x.size})) + processed.selectedTasks.forEach(x=>infoList = {path:x.id, id:x.name ,size:x.size}) } if(isOAuth[showType[dType]]){ let ids = processed.fromTo[1].ids @@ -201,26 +213,23 @@ destParent = destParent.includes(".") ? destParent.substr(0,destParent.lastIndexOf("/"))+"/":destParent destCredId = endpointDest.credential.credId } - + let source = { credId:sourceCredId, type:sType, - parentInfo:{ - id:sourceParent, - size:"", - path:sourceParent + fileSourcePath: sourceParent, + resourceList:{ + id: infoList.id, + size: infoList.size, + path: infoList.path }, - infoList:infoList } let destination={ credId:destCredId, type:dType, - parentInfo:{ - id:destParent, - size:"", - path:destParent - } + fileDesinationPath: destParent, } + var optionParsed = {} Object.keys(options).forEach((v)=>{ var value = options[v]; @@ -229,6 +238,7 @@ } optionParsed[v] = value }) + console.log({source, destination, optionParsed}) submitTransferRequest(source,destination, optionParsed, (response) => { eventEmitter.emit("messageOccured", "Transfer initiated! Please visit the queue page to monitor the transfer"); setBeforeTransferReorder(processed); @@ -501,6 +511,15 @@ {this.state.settings.retry} Times + + + + Date + + Date} defaultValue={dayjs()} onChange={(e) => this.setDate(e)} minDate={dayjs()}/> + + + diff --git a/src/main/resources/application-dev.properties b/src/main/resources/application-dev.properties new file mode 100644 index 000000000..71dae0ce3 --- /dev/null +++ b/src/main/resources/application-dev.properties @@ -0,0 +1,3 @@ +cred.service.uri=${ODS_CRED_URI:http://localhost:8081/v1/endpoint-cred} +transfer.scheduler.service.uri="http://localhost:8061" +meta.service.uri=${ODS_META_URI:http://localhost:8084} diff --git a/src/main/resources/application-prod.properties b/src/main/resources/application-prod.properties new file mode 100644 index 000000000..85c4feb1c --- /dev/null +++ b/src/main/resources/application-prod.properties @@ -0,0 +1,3 @@ +spring.data.mongodb.username=${DOCUMENTDB_USER} +spring.data.mongodb.password=${DOCUMENTDB_PASS} +spring.data.mongodb.uri=mongodb://${DOCUMENTDB_USER}:${DOCUMENTDB_PASS}@${DOCUMENTDB_HOST}/${spring.data.mongodb.database}?retryWrites=false&ssl=true&replicaSet=rs0&readpreference=secondaryPreferred diff --git a/src/main/resources/application.properties b/src/main/resources/application.properties index cce9105b2..a178b9866 100644 --- a/src/main/resources/application.properties +++ b/src/main/resources/application.properties @@ -2,7 +2,7 @@ # Define a custom port instead of the default 8080 server.port=${ODS_APP_PORT:8080} -spring.application.name=OneDataShareMonolith +spring.application.name=ODSCore redirect.uri=${ODS_REDIRECT_URI:http://localhost:${server.port}} redirect.uri.string=http://localhost:8080 @@ -13,8 +13,7 @@ cred.service.uri=${ODS_CRED_URI:http://EndpointCredentialService/v1/endpoint-cre meta.service.uri=${ODS_META_URI:http://ODSMETADATA} #Transfer Scheduler Url will be using Eureka the env variable isfor local testing and should be added to the boot.sh otherwise you probably want to use eureka -transfer.job.service.uri=${ODS_SCHEDULER_URI:http://ODSTRANSFERSCHEDULERSERVICE} - +transfer.scheduler.service.uri=${ODS_SCHEDULER_URI:http://ODSTRANSFERSCHEDULERSERVICE} #Private key for ods ods.rsa.private.key=${ODS_RSA_PRIVATE_KEY} @@ -48,9 +47,6 @@ gftp.clientSecret=${ODS_GSIFTP_CLIENT_SECRET} # Database settings spring.data.mongodb.database=onedatashare -#spring.data.mongodb.username=${DOCUMENTDB_USER} -#spring.data.mongodb.password=${DOCUMENTDB_PASS} -spring.data.mongodb.uri=mongodb://${DOCUMENTDB_USER}:${DOCUMENTDB_PASS}@${DOCUMENTDB_HOST}/${spring.data.mongodb.database}?retryWrites=false&ssl=true&replicaSet=rs0&readpreference=secondaryPreferred #logging.file=${HOME}/logs/application.log logging.level.org.springframework.web=DEBUG @@ -73,7 +69,7 @@ springbootwebfluxjjwt.jjwt.secret=${ODS_JWT_SECRET} eureka.client.enabled=true eureka.client.serviceUrl.defaultZone= http://${EUREKA_USER:admin}:${EUREKA_PASS:admin}@${EUREKA_URI:localhost:8090}/eureka eureka.client.healthcheck.enabled=true -eureka.instance.metadata-map.startup= ${random.int} +eureka.instance.metadata-map.startup=${random.int} management.endpoint.health.enabled=true