Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
1 change: 1 addition & 0 deletions CHANGELOG.unreleased.md
Original file line number Diff line number Diff line change
Expand Up @@ -20,6 +20,7 @@ For upgrade instructions, please check the [migration guide](MIGRATIONS.released
- New config option `datastore.dataVaults.credentials` allows admins to set up global credentials for remote dataset loading. [#8509](https://github.com/scalableminds/webknossos/pull/8509)

### Changed
- Added a parameter to the reserve manual upload route allowing to make the request fail if the name is already taken. Moreover, the new dataset's id and directory name are returned in the response. [#8476](https://github.com/scalableminds/webknossos/pull/8476)
- The skeleton tool can no longer be activated if the skeleton layer is invisible. [#8501](https://github.com/scalableminds/webknossos/pull/8501)
- Improved speed of mesh rendering and mouse interaction in 3D viewport. [#8106](https://github.com/scalableminds/webknossos/pull/8106)

Expand Down
6 changes: 5 additions & 1 deletion app/controllers/WKRemoteDataStoreController.scala
Original file line number Diff line number Diff line change
Expand Up @@ -83,7 +83,11 @@ class WKRemoteDataStoreController @Inject()(
_ <- folderDAO.assertUpdateAccess(folderId)(AuthorizedAccessContext(user)) ?~> "folder.noWriteAccess"
layersToLinkWithDatasetId <- Fox.serialCombined(uploadInfo.layersToLink.getOrElse(List.empty))(l =>
validateLayerToLink(l, user)) ?~> "dataset.upload.invalidLinkedLayers"
dataset <- datasetService.createPreliminaryDataset(uploadInfo.name, uploadInfo.organization, dataStore) ?~> "dataset.name.alreadyTaken"
dataset <- datasetService.createPreliminaryDataset(
uploadInfo.name,
uploadInfo.organization,
dataStore,
uploadInfo.requireUniqueName.getOrElse(false)) ?~> "dataset.upload.creation.failed"
_ <- datasetDAO.updateFolder(dataset._id, folderId)(GlobalAccessContext)
_ <- datasetService.addInitialTeams(dataset, uploadInfo.initialTeams, user)(AuthorizedAccessContext(user))
_ <- datasetService.addUploader(dataset, user._id)(AuthorizedAccessContext(user))
Expand Down
12 changes: 8 additions & 4 deletions app/models/dataset/DatasetService.scala
Original file line number Diff line number Diff line change
Expand Up @@ -65,12 +65,16 @@ class DatasetService @Inject()(organizationDAO: OrganizationDAO,
_ <- bool2Fox(!name.startsWith(".")) ?~> "dataset.layer.name.invalid.startsWithDot"
} yield ()

def createPreliminaryDataset(datasetName: String, organizationId: String, dataStore: DataStore): Fox[Dataset] = {
def createPreliminaryDataset(datasetName: String,
organizationId: String,
dataStore: DataStore,
requireUniqueName: Boolean): Fox[Dataset] = {
val newDatasetId = ObjectId.generate
for {
datasetDirectoryName <- datasetDAO
.doesDatasetDirectoryExistInOrganization(datasetName, organizationId)(GlobalAccessContext)
.map(if (_) s"$datasetName-${newDatasetId.toString}" else datasetName)
isDatasetNameAlreadyTaken <- datasetDAO.doesDatasetDirectoryExistInOrganization(datasetName, organizationId)(
GlobalAccessContext)
_ <- bool2Fox(!(isDatasetNameAlreadyTaken && requireUniqueName)) ?~> "dataset.name.alreadyTaken"
datasetDirectoryName = if (isDatasetNameAlreadyTaken) s"$datasetName-${newDatasetId.toString}" else datasetName
unreportedDatasource = UnusableDataSource(DataSourceId(datasetDirectoryName, organizationId),
notYetUploadedStatus)
newDataset <- createDataset(dataStore, newDatasetId, datasetName, unreportedDatasource)
Expand Down
1 change: 1 addition & 0 deletions conf/messages
Original file line number Diff line number Diff line change
Expand Up @@ -113,6 +113,7 @@ dataset.upload.storageExceeded=Cannot upload dataset because the storage quota o
dataset.upload.finishFailed=Failed to finalize dataset upload.
dataset.upload.moveToTarget.failed=Failed to move uploaded dataset to target directory.
dataset.upload.moreBytesThanReserved=The uploaded dataset contains more bytes than originally reserved. Please make sure to reserve the correct amount of bytes.
dataset.upload.creation.failed=Failed to create dataset.
dataset.explore.failed.readFile=Failed to read remote file
dataset.explore.magDtypeMismatch=Element class must be the same for all mags of a layer. Got {0}
dataset.explore.autoAdd.failed=Failed to automatically import the explored dataset.
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -113,7 +113,7 @@ class DataSourceController @Inject()(
accessTokenService.validateAccessFromTokenContext(
UserAccessRequest.administrateDataSources(request.body.organization)) {
for {
_ <- dsRemoteWebknossosClient.reserveDataSourceUpload(
reservedDatasetInfo <- dsRemoteWebknossosClient.reserveDataSourceUpload(
ReserveUploadInformation(
"aManualUpload",
request.body.datasetName,
Expand All @@ -123,10 +123,14 @@ class DataSourceController @Inject()(
None,
None,
request.body.initialTeamIds,
request.body.folderId
request.body.folderId,
Some(request.body.requireUniqueName)
)
) ?~> "dataset.upload.validation.failed"
} yield Ok
} yield
Ok(
Json.obj("newDatasetId" -> reservedDatasetInfo.newDatasetId,
"directoryName" -> reservedDatasetInfo.directoryName))
}
}

Expand Down Expand Up @@ -419,6 +423,7 @@ class DataSourceController @Inject()(
layersToLink = None,
initialTeams = List.empty,
folderId = folderId,
requireUniqueName = Some(false),
)
) ?~> "dataset.upload.validation.failed"
datasourceId = DataSourceId(reservedAdditionalInfo.directoryName, organizationId)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -63,7 +63,8 @@ class ComposeService @Inject()(dataSourceRepository: DataSourceRepository,
None,
None,
List(),
Some(composeRequest.targetFolderId)
Some(composeRequest.targetFolderId),
requireUniqueName = Some(false)
)
reservedAdditionalInfo <- remoteWebknossosClient.reserveDataSourceUpload(reserveUploadInfo) ?~> "Failed to reserve upload."
directory = uploadDirectory(composeRequest.organizationId, reservedAdditionalInfo.directoryName)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -38,15 +38,18 @@ case class ReserveUploadInformation(
totalFileSizeInBytes: Option[Long],
layersToLink: Option[List[LinkedLayerIdentifier]],
initialTeams: List[String], // team ids
folderId: Option[String])
folderId: Option[String],
requireUniqueName: Option[Boolean])
object ReserveUploadInformation {
implicit val reserveUploadInformation: OFormat[ReserveUploadInformation] = Json.format[ReserveUploadInformation]
}
case class ReserveManualUploadInformation(datasetName: String,
datasetDirectoryName: String,
organization: String,
initialTeamIds: List[String],
folderId: Option[String])
case class ReserveManualUploadInformation(
datasetName: String,
organization: String,
initialTeamIds: List[String],
folderId: Option[String],
requireUniqueName: Boolean = false,
)
object ReserveManualUploadInformation {
implicit val reserveUploadInformation: OFormat[ReserveManualUploadInformation] =
Json.format[ReserveManualUploadInformation]
Expand Down