diff --git a/CHANGELOG.unreleased.md b/CHANGELOG.unreleased.md index d9b47112fc7..f793032c737 100644 --- a/CHANGELOG.unreleased.md +++ b/CHANGELOG.unreleased.md @@ -19,6 +19,7 @@ For upgrade instructions, please check the [migration guide](MIGRATIONS.released - Adjusted the names of custom model inference jobs and train model jobs to match the worker's naming. [#8524](https://github.com/scalableminds/webknossos/pull/8524) - Updated screenshot tests to use `vitest` framework instead of `ava`. [#8553](https://github.com/scalableminds/webknossos/pull/8553) - The mapping dropdown for segmentation is wider now so that mapping names are fully readable. [#8570](https://github.com/scalableminds/webknossos/pull/8570) +- When loading data from a data layer that has data stored beyond the bounding box specified in the datasource-properties.json, data outside of the bounding box is now zeroed. (the layer is “clipped”). [#8551](https://github.com/scalableminds/webknossos/pull/8551) ### Fixed - Fixed a bug in the trees tab where the color change of a tree would affect the tree on which the context menu was previously opened. [#8562](https://github.com/scalableminds/webknossos/pull/8562) diff --git a/util/src/main/scala/com/scalableminds/util/geometry/BoundingBox.scala b/util/src/main/scala/com/scalableminds/util/geometry/BoundingBox.scala index b74c8d9b7f7..ef7aa90e70a 100644 --- a/util/src/main/scala/com/scalableminds/util/geometry/BoundingBox.scala +++ b/util/src/main/scala/com/scalableminds/util/geometry/BoundingBox.scala @@ -6,7 +6,7 @@ import play.api.libs.json.{JsObject, Json} case class BoundingBox(topLeft: Vec3Int, width: Int, height: Int, depth: Int) { - val bottomRight: Vec3Int = topLeft.move(width, height, depth) + lazy val bottomRight: Vec3Int = topLeft.move(width, height, depth) def intersects(other: BoundingBox): Boolean = math.max(topLeft.x, other.topLeft.x) < math.min(bottomRight.x, other.bottomRight.x) && @@ -45,6 +45,9 @@ case class BoundingBox(topLeft: Vec3Int, width: Int, height: Int, depth: Int) { BoundingBox(Vec3Int(x, y, z), w, h, d) } + def isFullyContainedIn(other: BoundingBox): Boolean = + this.intersection(other).contains(this) + def isEmpty: Boolean = width <= 0 || height <= 0 || depth <= 0 @@ -61,6 +64,9 @@ case class BoundingBox(topLeft: Vec3Int, width: Int, height: Int, depth: Int) { // Since floorDiv is used for topLeft, ceilDiv is used for the size to avoid voxels being lost at the border BoundingBox(topLeft / that, ceilDiv(width, that.x), ceilDiv(height, that.y), ceilDiv(depth, that.z)) + def move(delta: Vec3Int): BoundingBox = + this.copy(topLeft = this.topLeft + delta) + def toSql: List[Int] = List(topLeft.x, topLeft.y, topLeft.z, width, height, depth) diff --git a/util/src/main/scala/com/scalableminds/util/geometry/Vec3Int.scala b/util/src/main/scala/com/scalableminds/util/geometry/Vec3Int.scala index 758327662f6..196f3cea78a 100644 --- a/util/src/main/scala/com/scalableminds/util/geometry/Vec3Int.scala +++ b/util/src/main/scala/com/scalableminds/util/geometry/Vec3Int.scala @@ -23,6 +23,9 @@ case class Vec3Int(x: Int, y: Int, z: Int) { def /(that: Vec3Int): Vec3Int = Vec3Int(x / that.x, y / that.y, z / that.z) + def unary_- : Vec3Int = + Vec3Int(-x, -y, -z) + def scale(s: Float): Vec3Int = Vec3Int((x * s).toInt, (y * s).toInt, (z * s).toInt) @@ -53,8 +56,6 @@ case class Vec3Int(x: Int, y: Int, z: Int) { def move(other: Vec3Int): Vec3Int = move(other.x, other.y, other.z) - def negate: Vec3Int = Vec3Int(-x, -y, -z) - def to(bottomRight: Vec3Int): Seq[Vec3Int] = range(bottomRight, _ to _) diff --git a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/controllers/BinaryDataController.scala b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/controllers/BinaryDataController.scala index af576a3def1..4ca208457cd 100644 --- a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/controllers/BinaryDataController.scala +++ b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/controllers/BinaryDataController.scala @@ -62,7 +62,7 @@ class BinaryDataController @Inject()( (dataSource, dataLayer) <- dataSourceRepository.getDataSourceAndDataLayer(organizationId, datasetDirectoryName, dataLayerName) ~> NOT_FOUND - (data, indices) <- requestData(dataSource, dataLayer, request.body) + (data, indices) <- requestData(dataSource.id, dataLayer, request.body) duration = Instant.since(t) _ = if (duration > (10 seconds)) logger.info( @@ -110,7 +110,7 @@ class BinaryDataController @Inject()( depth, DataServiceRequestSettings(halfByte = halfByte, appliedAgglomerate = mappingName) ) - (data, indices) <- requestData(dataSource, dataLayer, dataRequest) + (data, indices) <- requestData(dataSource.id, dataLayer, dataRequest) } yield Ok(data).withHeaders(createMissingBucketsHeaders(indices): _*) } } @@ -126,7 +126,7 @@ class BinaryDataController @Inject()( (dataSource, dataLayer) <- dataSourceRepository.getDataSourceAndDataLayer(organizationId, datasetDirectoryName, dataLayerName) ~> NOT_FOUND - (data, indices) <- requestData(dataSource, dataLayer, request.body) + (data, indices) <- requestData(dataSource.id, dataLayer, request.body) } yield Ok(data).withHeaders(createMissingBucketsHeaders(indices): _*) } } @@ -154,7 +154,7 @@ class BinaryDataController @Inject()( cubeSize, cubeSize ) - (data, indices) <- requestData(dataSource, dataLayer, dataRequest) + (data, indices) <- requestData(dataSource.id, dataLayer, dataRequest) } yield Ok(data).withHeaders(createMissingBucketsHeaders(indices): _*) } } @@ -188,7 +188,7 @@ class BinaryDataController @Inject()( depth = 1, DataServiceRequestSettings(appliedAgglomerate = mappingName) ) - (data, _) <- requestData(dataSource, dataLayer, dataRequest) + (data, _) <- requestData(dataSource.id, dataLayer, dataRequest) intensityRange: Option[(Double, Double)] = intensityMin.flatMap(min => intensityMax.map(max => (min, max))) layerColor = color.flatMap(Color.fromHTML) params = ImageCreatorParameters( @@ -227,7 +227,7 @@ class BinaryDataController @Inject()( datasetDirectoryName, dataLayerName) ~> NOT_FOUND segmentationLayer <- tryo(dataLayer.asInstanceOf[SegmentationLayer]).toFox ?~> Messages("dataLayer.notFound") - mappingRequest = DataServiceMappingRequest(dataSource, segmentationLayer, mappingName) + mappingRequest = DataServiceMappingRequest(Some(dataSource.id), segmentationLayer, mappingName) result <- mappingService.handleMappingRequest(mappingRequest) } yield Ok(result) } @@ -248,7 +248,7 @@ class BinaryDataController @Inject()( dataLayerName) ~> NOT_FOUND segmentationLayer <- tryo(dataLayer.asInstanceOf[SegmentationLayer]).toFox ?~> "dataLayer.mustBeSegmentation" adHocMeshRequest = AdHocMeshRequest( - Some(dataSource), + Some(dataSource.id), segmentationLayer, request.body.cuboid(dataLayer), request.body.segmentId, @@ -286,7 +286,7 @@ class BinaryDataController @Inject()( (dataSource, dataLayer) <- dataSourceRepository.getDataSourceAndDataLayer(organizationId, datasetDirectoryName, dataLayerName) ~> NOT_FOUND - positionAndMagOpt <- findDataService.findPositionWithData(dataSource, dataLayer) + positionAndMagOpt <- findDataService.findPositionWithData(dataSource.id, dataLayer) } yield Ok(Json.obj("position" -> positionAndMagOpt.map(_._1), "mag" -> positionAndMagOpt.map(_._2))) } } @@ -300,19 +300,19 @@ class BinaryDataController @Inject()( datasetDirectoryName, dataLayerName) ?~> Messages( "dataSource.notFound") ~> NOT_FOUND ?~> Messages("histogram.layerMissing", dataLayerName) - listOfHistograms <- findDataService.createHistogram(dataSource, dataLayer) ?~> Messages("histogram.failed", - dataLayerName) + listOfHistograms <- findDataService.createHistogram(dataSource.id, dataLayer) ?~> Messages("histogram.failed", + dataLayerName) } yield Ok(Json.toJson(listOfHistograms)) } } private def requestData( - dataSource: DataSource, + dataSourceId: DataSourceId, dataLayer: DataLayer, dataRequests: DataRequestCollection )(implicit tc: TokenContext): Fox[(Array[Byte], List[Int])] = { val requests = - dataRequests.map(r => DataServiceDataRequest(dataSource, dataLayer, r.cuboid(dataLayer), r.settings)) + dataRequests.map(r => DataServiceDataRequest(Some(dataSourceId), dataLayer, r.cuboid(dataLayer), r.settings)) binaryDataService.handleDataRequests(requests) } diff --git a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/controllers/ZarrStreamingController.scala b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/controllers/ZarrStreamingController.scala index abffb67f073..6d26f9672c5 100644 --- a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/controllers/ZarrStreamingController.scala +++ b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/controllers/ZarrStreamingController.scala @@ -276,7 +276,7 @@ class ZarrStreamingController @Inject()( _ <- Fox.fromBool(dataLayer.containsMag(magParsed)) ?~> Messages("dataLayer.wrongMag", dataLayerName, mag) ~> NOT_FOUND cubeSize = DataLayer.bucketLength request = DataServiceDataRequest( - dataSource, + Some(dataSource.id), dataLayer, Cuboid( topLeft = VoxelPosition(x * cubeSize * magParsed.x, diff --git a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/dataformats/DatasetArrayBucketProvider.scala b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/dataformats/DatasetArrayBucketProvider.scala index 7490ea7c3c1..0c78fa32c7c 100644 --- a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/dataformats/DatasetArrayBucketProvider.scala +++ b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/dataformats/DatasetArrayBucketProvider.scala @@ -73,7 +73,7 @@ class DatasetArrayBucketProvider(dataLayer: DataLayer, case Some(remoteSourceDescriptorService: RemoteSourceDescriptorService) => for { magPath: VaultPath <- remoteSourceDescriptorService.vaultPathFor(readInstruction.baseDir, - readInstruction.dataSource.id, + readInstruction.dataSourceId, readInstruction.dataLayer.name, magLocator) chunkContentsCache <- sharedChunkContentsCacheOpt.toFox diff --git a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/dataformats/MappingProvider.scala b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/dataformats/MappingProvider.scala index 52e86be17ad..86ed7cc242f 100644 --- a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/dataformats/MappingProvider.scala +++ b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/dataformats/MappingProvider.scala @@ -12,8 +12,8 @@ class MappingProvider(layer: SegmentationLayer) { def load(readInstruction: MappingReadInstruction): Box[Array[Byte]] = { val mappingFile = readInstruction.baseDir - .resolve(readInstruction.dataSource.id.organizationId) - .resolve(readInstruction.dataSource.id.directoryName) + .resolve(readInstruction.dataSourceId.organizationId) + .resolve(readInstruction.dataSourceId.directoryName) .resolve(layer.name) .resolve(MappingProvider.mappingsDir) .resolve(s"${readInstruction.mapping}.${MappingProvider.mappingFileExtension}") diff --git a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/models/datasource/DataSource.scala b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/models/datasource/DataSource.scala index 30ce869ea51..ec2f87eb350 100644 --- a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/models/datasource/DataSource.scala +++ b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/models/datasource/DataSource.scala @@ -8,7 +8,7 @@ import play.api.libs.json._ package object datasource { case class DataSourceId(directoryName: String, organizationId: String) { - override def toString: String = s"DataSourceId($organizationId/$directoryName)" + override def toString: String = s"$organizationId/$directoryName" } object DataSourceId { diff --git a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/models/requests/Cuboid.scala b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/models/requests/Cuboid.scala index 11bceadc1d9..9196f4652a5 100644 --- a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/models/requests/Cuboid.scala +++ b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/models/requests/Cuboid.scala @@ -1,6 +1,6 @@ package com.scalableminds.webknossos.datastore.models.requests -import com.scalableminds.util.geometry.Vec3Int +import com.scalableminds.util.geometry.{BoundingBox, Vec3Int} import com.scalableminds.webknossos.datastore.models.{BucketPosition, VoxelPosition} /** @@ -51,4 +51,10 @@ case class Cuboid(topLeft: VoxelPosition, width: Int, height: Int, depth: Int) { height * mag.y, depth * mag.z ) + + def toBoundingBoxInMag: BoundingBox = + BoundingBox(Vec3Int(topLeft.voxelXInMag, topLeft.voxelYInMag, topLeft.voxelZInMag), width, height, depth) + + def toMag1BoundingBox: BoundingBox = + toMag1.toBoundingBoxInMag } diff --git a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/models/requests/DataServiceRequests.scala b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/models/requests/DataServiceRequests.scala index 96cbbb82132..f11b992a2cd 100644 --- a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/models/requests/DataServiceRequests.scala +++ b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/models/requests/DataServiceRequests.scala @@ -1,7 +1,8 @@ package com.scalableminds.webknossos.datastore.models.requests +import com.scalableminds.util.geometry.Vec3Int import com.scalableminds.webknossos.datastore.models.{AdditionalCoordinate, BucketPosition} -import com.scalableminds.webknossos.datastore.models.datasource.{DataLayer, DataSource, SegmentationLayer} +import com.scalableminds.webknossos.datastore.models.datasource.{DataLayer, DataSourceId, SegmentationLayer} import java.nio.file.Path @@ -15,32 +16,41 @@ object DataServiceRequestSettings { } case class DataServiceDataRequest( - dataSource: DataSource, // null in VolumeTracings + dataSourceId: Option[DataSourceId], // None in case of volume tracings dataLayer: DataLayer, cuboid: Cuboid, settings: DataServiceRequestSettings ) { def isSingleBucket: Boolean = cuboid.isSingleBucket(DataLayer.bucketLength) + def mag: Vec3Int = cuboid.mag + + // dataSource is None and unused for volume tracings. Insert dummy DataSourceId + // (also unused in that case, except for logging and bucket provider cache key) + def dataSourceIdOrVolumeDummy: DataSourceId = dataSourceId.getOrElse(DataSourceId("VolumeTracing", dataLayer.name)) } case class DataReadInstruction( baseDir: Path, - dataSource: DataSource, + dataSourceId: DataSourceId, // Dummy value in case of volume tracings dataLayer: DataLayer, bucket: BucketPosition, version: Option[Long] = None ) { - def layerSummary: String = f"${dataSource.id.organizationId}/${dataSource.id.directoryName}/${dataLayer.name}" + def layerSummary: String = f"$dataSourceId/${dataLayer.name}" } case class DataServiceMappingRequest( - dataSource: DataSource, + dataSourceId: Option[DataSourceId], // None in case of volume tracings dataLayer: SegmentationLayer, mapping: String -) +) { + // dataSource is None and unused for volume tracings. Insert dummy DataSourceId + // (also unused in that case, except for logging and bucket provider cache key) + def dataSourceIdOrVolumeDummy: DataSourceId = dataSourceId.getOrElse(DataSourceId("VolumeTracing", dataLayer.name)) +} case class MappingReadInstruction( baseDir: Path, - dataSource: DataSource, + dataSourceId: DataSourceId, // Dummy value in case of volume tracings mapping: String ) diff --git a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/services/AdHocMeshService.scala b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/services/AdHocMeshService.scala index 5e557e656bd..977f3ed9e00 100644 --- a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/services/AdHocMeshService.scala +++ b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/services/AdHocMeshService.scala @@ -10,7 +10,7 @@ import org.apache.pekko.util.Timeout import com.scalableminds.util.geometry.{BoundingBox, Vec3Double, Vec3Int} import com.scalableminds.util.tools.{Fox, FoxImplicits} import com.scalableminds.webknossos.datastore.models.AdditionalCoordinate -import com.scalableminds.webknossos.datastore.models.datasource.{DataSource, ElementClass, SegmentationLayer} +import com.scalableminds.webknossos.datastore.models.datasource.{DataSourceId, ElementClass, SegmentationLayer} import com.scalableminds.webknossos.datastore.models.requests.{ Cuboid, DataServiceDataRequest, @@ -26,7 +26,7 @@ import scala.concurrent.duration._ import scala.concurrent.{Await, ExecutionContext} import scala.reflect.ClassTag -case class AdHocMeshRequest(dataSource: Option[DataSource], +case class AdHocMeshRequest(dataSourceId: Option[DataSourceId], dataLayer: SegmentationLayer, cuboid: Cuboid, segmentId: Long, @@ -102,7 +102,7 @@ class AdHocMeshService(binaryDataService: BinaryDataService, request.mappingType match { case Some("JSON") => mappingService.applyMapping( - DataServiceMappingRequest(request.dataSource.orNull, request.dataLayer, mappingName), + DataServiceMappingRequest(request.dataSourceId, request.dataLayer, mappingName), data, dataTypeFunctors.fromLong) case _ => Fox.successful(data) @@ -118,7 +118,7 @@ class AdHocMeshService(binaryDataService: BinaryDataService, case Some("HDF5") => binaryDataService.agglomerateServiceOpt.map { agglomerateService => val dataRequest = DataServiceDataRequest( - request.dataSource.orNull, + request.dataSourceId, request.dataLayer, request.cuboid, DataServiceRequestSettings(halfByte = false, request.mapping, None) @@ -177,7 +177,7 @@ class AdHocMeshService(binaryDataService: BinaryDataService, val cuboid = request.cuboid val dataRequest = DataServiceDataRequest( - request.dataSource.orNull, + request.dataSourceId, request.dataLayer, cuboid, DataServiceRequestSettings.default.copy(additionalCoordinates = request.additionalCoordinates) diff --git a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/services/BinaryDataService.scala b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/services/BinaryDataService.scala index d17b500f621..7586367726b 100644 --- a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/services/BinaryDataService.scala +++ b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/services/BinaryDataService.scala @@ -61,10 +61,8 @@ class BinaryDataService(val dataBaseDir: Path, for { _ <- Fox.fromBool(requests.forall(_.isSingleBucket)) ?~> "data requests handed to handleMultipleBucketRequests don’t contain bucket requests" dataLayer <- SequenceUtils.findUniqueElement(requests.map(_.dataLayer)).toFox - dataSource <- SequenceUtils.findUniqueElement(requests.map(_.dataSource)).toFox - // dataSource is null and unused for volume tracings. Insert dummy DataSourceId (also unused in that case, except for logging) - dataSourceId = if (dataSource != null) dataSource.id - else DataSourceId("Volume Annotation Layer", dataLayer.name) + // dataSource is None and unused for volume tracings. Insert dummy DataSourceId (also unused in that case, except for logging) + dataSourceId <- SequenceUtils.findUniqueElement(requests.map(_.dataSourceIdOrVolumeDummy)).toFox firstRequest <- requests.headOption.toFox // Requests outside of the layer range can be skipped. They will be answered with Empty below. indicesWhereOutsideRange: Set[Int] = requests.zipWithIndex.collect { @@ -77,7 +75,7 @@ class BinaryDataService(val dataBaseDir: Path, case (request, idx) if !indicesWhereOutsideRange.contains(idx) => request } readInstructions = requestsSelected.map(r => - DataReadInstruction(dataBaseDir, dataSource, dataLayer, r.cuboid.topLeft.toBucket, r.settings.version)) + DataReadInstruction(dataBaseDir, dataSourceId, dataLayer, r.cuboid.topLeft.toBucket, r.settings.version)) bucketProvider = bucketProviderCache.getOrLoadAndPut((dataSourceId, dataLayer.bucketProviderCacheKey))(_ => dataLayer.bucketProvider(remoteSourceDescriptorServiceOpt, dataSourceId, sharedChunkContentsCache)) bucketBoxes <- datasetErrorLoggingService.withErrorLoggingMultiple( @@ -102,22 +100,58 @@ class BinaryDataService(val dataBaseDir: Path, inputArray: Array[Byte], conversionFunc: Array[Byte] => Fox[Array[Byte]], request: DataServiceDataRequest): Fox[Array[Byte]] = - if (isNecessary) - datasetErrorLoggingService.withErrorLogging(request.dataSource.id, + if (isNecessary) { + datasetErrorLoggingService.withErrorLogging(request.dataSourceIdOrVolumeDummy, "converting bucket data", conversionFunc(inputArray)) - else Fox.successful(inputArray) + } else Fox.successful(inputArray) + + /* + * Everything outside of the layer bounding box is set to black (zero) so data outside of the specified + * bounding box is not exposed to the user + */ + private def clipToLayerBoundingBox(request: DataServiceDataRequest)(inputArray: Array[Byte]): Box[Array[Byte]] = { + val bytesPerElement = request.dataLayer.bytesPerElement + val requestBboxInMag = request.cuboid.toBoundingBoxInMag + val layerBboxInMag = request.dataLayer.boundingBox / request.mag // Note that this div is implemented to round to the bigger bbox so we don’t lose voxels inside. + val intersectionOpt = requestBboxInMag.intersection(layerBboxInMag).map(_.move(-requestBboxInMag.topLeft)) + val outputArray = Array.fill[Byte](inputArray.length)(0) + intersectionOpt.foreach { intersection => + for { + z <- intersection.topLeft.z until intersection.bottomRight.z + y <- intersection.topLeft.y until intersection.bottomRight.y + // We can bulk copy a row of voxels and do not need to iterate in the x dimension + } { + val offset = + (intersection.topLeft.x + + y * requestBboxInMag.width + + z * requestBboxInMag.width * requestBboxInMag.height) * bytesPerElement + System.arraycopy(inputArray, + offset, + outputArray, + offset, + (intersection.bottomRight.x - intersection.topLeft.x) * bytesPerElement) + } + } + Full(outputArray) + } private def convertAccordingToRequest(request: DataServiceDataRequest, inputArray: Array[Byte]): Fox[Array[Byte]] = for { + clippedData <- convertIfNecessary( + !request.cuboid.toMag1BoundingBox.isFullyContainedIn(request.dataLayer.boundingBox), + inputArray, + data => clipToLayerBoundingBox(request)(data).toFox, + request + ) mappedDataFox <- agglomerateServiceOpt.map { agglomerateService => convertIfNecessary( request.settings.appliedAgglomerate.isDefined && request.dataLayer.category == Category.segmentation && request.cuboid.mag.maxDim <= MaxMagForAgglomerateMapping, - inputArray, + clippedData, data => agglomerateService.applyAgglomerate(request)(data).toFox, request ) - }.toFox.fillEmpty(Fox.successful(inputArray)) ?~> "Failed to apply agglomerate mapping" + }.toFox.fillEmpty(Fox.successful(clippedData)) ?~> "Failed to apply agglomerate mapping" mappedData <- mappedDataFox resultData <- convertIfNecessary(request.settings.halfByte, mappedData, convertToHalfByte, request) } yield resultData @@ -147,11 +181,12 @@ class BinaryDataService(val dataBaseDir: Path, implicit tc: TokenContext): Fox[Array[Byte]] = if (request.dataLayer.doesContainBucket(bucket) && request.dataLayer.containsMag(bucket.mag)) { val readInstruction = - DataReadInstruction(dataBaseDir, request.dataSource, request.dataLayer, bucket, request.settings.version) - // dataSource is null and unused for volume tracings. Insert dummy DataSourceId (also unused in that case, except for logging) - val dataSourceId = - if (request.dataSource != null) request.dataSource.id - else DataSourceId("Volume Annotation Layer", request.dataLayer.name) + DataReadInstruction(dataBaseDir, + request.dataSourceIdOrVolumeDummy, + request.dataLayer, + bucket, + request.settings.version) + val dataSourceId = request.dataSourceIdOrVolumeDummy val bucketProvider = bucketProviderCache.getOrLoadAndPut((dataSourceId, request.dataLayer.bucketProviderCacheKey))(_ => request.dataLayer.bucketProvider(remoteSourceDescriptorServiceOpt, dataSourceId, sharedChunkContentsCache)) @@ -168,7 +203,6 @@ class BinaryDataService(val dataBaseDir: Path, private def cutOutCuboid(request: DataServiceDataRequest, rs: List[(BucketPosition, Array[Byte])]): Array[Byte] = { val bytesPerElement = request.dataLayer.bytesPerElement val cuboid = request.cuboid - val subsamplingStrides = Vec3Int.ones val resultShape = Vec3Int(cuboid.width, cuboid.height, cuboid.depth) val result = new Array[Byte](cuboid.volume * bytesPerElement) @@ -194,9 +228,9 @@ class BinaryDataService(val dataBaseDir: Path, y % bucketLength * bucketLength + z % bucketLength * bucketLength * bucketLength) * bytesPerElement - val rx = (xMin - cuboid.topLeft.voxelXInMag) / subsamplingStrides.x - val ry = (y - cuboid.topLeft.voxelYInMag) / subsamplingStrides.y - val rz = (z - cuboid.topLeft.voxelZInMag) / subsamplingStrides.z + val rx = xMin - cuboid.topLeft.voxelXInMag + val ry = y - cuboid.topLeft.voxelYInMag + val rz = z - cuboid.topLeft.voxelZInMag val resultOffset = (rx + ry * resultShape.x + rz * resultShape.x * resultShape.y) * bytesPerElement System.arraycopy(data, dataOffset, result, resultOffset, (xMax - xMin) * bytesPerElement) diff --git a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/services/DSFullMeshService.scala b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/services/DSFullMeshService.scala index 9b525292f44..6931f6fef88 100644 --- a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/services/DSFullMeshService.scala +++ b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/services/DSFullMeshService.scala @@ -96,7 +96,7 @@ class DSFullMeshService @Inject()(dataSourceRepository: DataSourceRepository, implicit ec: ExecutionContext, tc: TokenContext): Fox[List[Array[Float]]] = { val adHocMeshRequest = AdHocMeshRequest( - Some(dataSource), + Some(dataSource.id), segmentationLayer, Cuboid(topLeft, chunkSize.x + 1, chunkSize.y + 1, chunkSize.z + 1), fullMeshRequest.segmentId, diff --git a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/services/FindDataService.scala b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/services/FindDataService.scala index 3b045d72002..391af995c37 100644 --- a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/services/FindDataService.scala +++ b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/services/FindDataService.scala @@ -4,7 +4,7 @@ import com.google.inject.Inject import com.scalableminds.util.accesscontext.TokenContext import com.scalableminds.util.geometry.Vec3Int import com.scalableminds.util.tools.{Fox, FoxImplicits} -import com.scalableminds.webknossos.datastore.models.datasource.{DataLayer, DataSource, ElementClass} +import com.scalableminds.webknossos.datastore.models.datasource.{DataLayer, DataSourceId, ElementClass} import com.scalableminds.webknossos.datastore.models.requests.DataServiceDataRequest import com.scalableminds.webknossos.datastore.models.{DataRequest, VoxelPosition} import net.liftweb.common.Full @@ -23,7 +23,7 @@ class FindDataService @Inject()(dataServicesHolder: BinaryDataServiceHolder)(imp with FoxImplicits { val binaryDataService: BinaryDataService = dataServicesHolder.binaryDataService - private def getDataFor(dataSource: DataSource, dataLayer: DataLayer, position: Vec3Int, mag: Vec3Int)( + private def getDataFor(dataSourceId: DataSourceId, dataLayer: DataLayer, position: Vec3Int, mag: Vec3Int)( implicit tc: TokenContext): Fox[Array[Byte]] = { val request = DataRequest( VoxelPosition(position.x, position.y, position.z, mag), @@ -32,7 +32,7 @@ class FindDataService @Inject()(dataServicesHolder: BinaryDataServiceHolder)(imp DataLayer.bucketLength ) binaryDataService.handleDataRequest( - DataServiceDataRequest(dataSource, dataLayer, request.cuboid(dataLayer), request.settings)) + DataServiceDataRequest(Some(dataSourceId), dataLayer, request.cuboid(dataLayer), request.settings)) } private def concatenateBuckets(buckets: Seq[Array[Byte]]): Array[Byte] = @@ -42,13 +42,13 @@ class FindDataService @Inject()(dataServicesHolder: BinaryDataServiceHolder)(imp } } - private def getConcatenatedDataFor(dataSource: DataSource, + private def getConcatenatedDataFor(dataSourceId: DataSourceId, dataLayer: DataLayer, positions: List[Vec3Int], mag: Vec3Int)(implicit tc: TokenContext) = for { dataBucketWise: Seq[Array[Byte]] <- Fox.fromFuture( - Fox.sequenceOfFulls(positions.map(getDataFor(dataSource, dataLayer, _, mag)))) + Fox.sequenceOfFulls(positions.map(getDataFor(dataSourceId, dataLayer, _, mag)))) _ <- Fox.fromBool(dataBucketWise.nonEmpty) ?~> "dataset.noData" dataConcatenated = concatenateBuckets(dataBucketWise) } yield dataConcatenated @@ -95,7 +95,7 @@ class FindDataService @Inject()(dataServicesHolder: BinaryDataServiceHolder)(imp positions.map(_.alignWithGridFloor(Vec3Int.full(DataLayer.bucketLength))).distinct } - private def checkAllPositionsForData(dataSource: DataSource, dataLayer: DataLayer)( + private def checkAllPositionsForData(dataSourceId: DataSourceId, dataLayer: DataLayer)( implicit tc: TokenContext): Fox[Option[(Vec3Int, Vec3Int)]] = { def searchPositionIter(positions: List[Vec3Int], mag: Vec3Int): Fox[Option[Vec3Int]] = @@ -110,7 +110,7 @@ class FindDataService @Inject()(dataServicesHolder: BinaryDataServiceHolder)(imp def checkIfPositionHasData(position: Vec3Int, mag: Vec3Int) = for { - data <- getDataFor(dataSource, dataLayer, position, mag) + data <- getDataFor(dataSourceId, dataLayer, position, mag) position <- getPositionOfNonZeroData(data, position, dataLayer.bytesPerElement).toFox } yield position @@ -130,13 +130,14 @@ class FindDataService @Inject()(dataServicesHolder: BinaryDataServiceHolder)(imp magIter(createPositions(dataLayer).distinct, dataLayer.resolutions.sortBy(_.maxDim)) } - def findPositionWithData(dataSource: DataSource, dataLayer: DataLayer)( + def findPositionWithData(dataSourceId: DataSourceId, dataLayer: DataLayer)( implicit tc: TokenContext): Fox[Option[(Vec3Int, Vec3Int)]] = for { - positionAndMagOpt <- checkAllPositionsForData(dataSource, dataLayer) + positionAndMagOpt <- checkAllPositionsForData(dataSourceId, dataLayer) } yield positionAndMagOpt - def createHistogram(dataSource: DataSource, dataLayer: DataLayer)(implicit tc: TokenContext): Fox[List[Histogram]] = { + def createHistogram(dataSourceId: DataSourceId, dataLayer: DataLayer)( + implicit tc: TokenContext): Fox[List[Histogram]] = { def calculateHistogramValues( data: Array[_ >: UByte with Byte with UShort with Short with UInt with Int with ULong with Long with Float], @@ -198,7 +199,7 @@ class FindDataService @Inject()(dataServicesHolder: BinaryDataServiceHolder)(imp def histogramForPositions(positions: List[Vec3Int], mag: Vec3Int) = for { - dataConcatenated <- getConcatenatedDataFor(dataSource, dataLayer, positions, mag) ?~> "dataset.noData" + dataConcatenated <- getConcatenatedDataFor(dataSourceId, dataLayer, positions, mag) ?~> "dataset.noData" isUint24 = dataLayer.elementClass == ElementClass.uint24 convertedData = toUnsignedIfNeeded( filterZeroes(convertData(dataConcatenated, dataLayer.elementClass), skip = isUint24), diff --git a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/services/MappingService.scala b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/services/MappingService.scala index 387af61b222..48fa09a6ed1 100644 --- a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/services/MappingService.scala +++ b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/services/MappingService.scala @@ -20,7 +20,9 @@ class MappingService @Inject()(config: DataStoreConfig)(implicit ec: ExecutionCo def handleMappingRequest(request: DataServiceMappingRequest): Fox[Array[Byte]] = { val readInstruction = - MappingReadInstruction(Paths.get(config.Datastore.baseDirectory), request.dataSource, request.mapping) + MappingReadInstruction(Paths.get(config.Datastore.baseDirectory), + request.dataSourceIdOrVolumeDummy, + request.mapping) request.dataLayer.mappingProvider.load(readInstruction).toFox } diff --git a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/services/SegmentIndexFileService.scala b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/services/SegmentIndexFileService.scala index 8ff94c3d5ff..ba8389bdea1 100644 --- a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/services/SegmentIndexFileService.scala +++ b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/services/SegmentIndexFileService.scala @@ -231,7 +231,7 @@ class SegmentIndexFileService @Inject()(config: DataStoreConfig, mappingName: Option[String])(implicit tc: TokenContext): Fox[Array[Byte]] = { val dataRequests = mag1BucketPositions.map { position => DataServiceDataRequest( - dataSource = dataSource, + dataSourceId = Some(dataSource.id), dataLayer = dataLayer, cuboid = Cuboid( VoxelPosition(position.x * DataLayer.bucketLength, diff --git a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/storage/AgglomerateFileCache.scala b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/storage/AgglomerateFileCache.scala index 66d4059d30d..018bd27e9f9 100644 --- a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/storage/AgglomerateFileCache.scala +++ b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/storage/AgglomerateFileCache.scala @@ -37,8 +37,8 @@ case class AgglomerateFileKey( object AgglomerateFileKey { def fromDataRequest(dataRequest: DataServiceDataRequest): AgglomerateFileKey = AgglomerateFileKey( - dataRequest.dataSource.id.organizationId, - dataRequest.dataSource.id.directoryName, + dataRequest.dataSourceIdOrVolumeDummy.organizationId, + dataRequest.dataSourceIdOrVolumeDummy.directoryName, dataRequest.dataLayer.name, dataRequest.settings.appliedAgglomerate.get ) diff --git a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/storage/ParsedMappingCache.scala b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/storage/ParsedMappingCache.scala index 836428bd8a8..27f93f0f5ff 100644 --- a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/storage/ParsedMappingCache.scala +++ b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/storage/ParsedMappingCache.scala @@ -17,10 +17,12 @@ case class CachedMapping( object CachedMapping { def fromMappingRequest(mappingRequest: DataServiceMappingRequest): CachedMapping = - storage.CachedMapping(mappingRequest.dataSource.id.organizationId, - mappingRequest.dataSource.id.directoryName, - mappingRequest.dataLayer.name, - mappingRequest.mapping) + storage.CachedMapping( + mappingRequest.dataSourceIdOrVolumeDummy.organizationId, + mappingRequest.dataSourceIdOrVolumeDummy.directoryName, + mappingRequest.dataLayer.name, + mappingRequest.mapping + ) } class ParsedMappingCache(val maxEntries: Int) diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/editablemapping/EditableMappingService.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/editablemapping/EditableMappingService.scala index 92622e6e75d..ad7deee62d1 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/editablemapping/EditableMappingService.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/editablemapping/EditableMappingService.scala @@ -210,7 +210,7 @@ class EditableMappingService @Inject()( implicit tc: TokenContext): Fox[(Array[Byte], List[Int])] = { val requests = dataRequests.map( r => - DataServiceDataRequest(null, + DataServiceDataRequest(None, editableMappingLayer, r.cuboid(editableMappingLayer), r.settings.copy(appliedAgglomerate = None))) @@ -221,7 +221,7 @@ class EditableMappingService @Inject()( implicit tc: TokenContext): Fox[Seq[Box[Array[Byte]]]] = { val requests = dataRequests.map( r => - DataServiceDataRequest(null, + DataServiceDataRequest(None, editableMappingLayer, r.cuboid(editableMappingLayer), r.settings.copy(appliedAgglomerate = None))) @@ -400,7 +400,7 @@ class EditableMappingService @Inject()( def createAdHocMesh(editableMappingLayer: EditableMappingLayer, request: WebknossosAdHocMeshRequest)( implicit tc: TokenContext): Fox[(Array[Float], List[Int])] = { val adHocMeshRequest = AdHocMeshRequest( - dataSource = None, + dataSourceId = None, dataLayer = editableMappingLayer, cuboid = request.cuboid(editableMappingLayer), segmentId = request.segmentId, diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeTracingService.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeTracingService.scala index 57e1df9d7bc..75c1aab2113 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeTracingService.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeTracingService.scala @@ -502,7 +502,7 @@ class VolumeTracingService @Inject()( isTemporaryTracing, includeFallbackDataIfAvailable) requests = dataRequests.map(r => - DataServiceDataRequest(null, volumeLayer, r.cuboid(volumeLayer), r.settings.copy(appliedAgglomerate = None))) + DataServiceDataRequest(None, volumeLayer, r.cuboid(volumeLayer), r.settings.copy(appliedAgglomerate = None))) data <- binaryDataService.handleDataRequests(requests) } yield data @@ -520,7 +520,7 @@ class VolumeTracingService @Inject()( isTemporaryTracing, includeFallbackDataIfAvailable) requests = dataRequests.map(r => - DataServiceDataRequest(null, volumeLayer, r.cuboid(volumeLayer), r.settings.copy(appliedAgglomerate = None))) + DataServiceDataRequest(None, volumeLayer, r.cuboid(volumeLayer), r.settings.copy(appliedAgglomerate = None))) data <- binaryDataService.handleMultipleBucketRequests(requests) } yield data