Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
105 changes: 42 additions & 63 deletions app/controllers/DatasetController.scala
Original file line number Diff line number Diff line change
Expand Up @@ -176,91 +176,70 @@ class DatasetController @Inject()(userService: UserService,
// Change output format to return only a compact list with essential information on the datasets
compact: Option[Boolean]
): Action[AnyContent] = sil.UserAwareAction.async { implicit request =>
log() {
for {
folderIdValidated <- Fox.runOptional(folderId)(ObjectId.fromString)
uploaderIdValidated <- Fox.runOptional(uploaderId)(ObjectId.fromString)
organizationIdOpt = if (onlyMyOrganization.getOrElse(false))
request.identity.map(_._organization)
else
organizationId
js <- if (compact.getOrElse(false)) {
for {
datasetInfos <- datasetDAO.findAllCompactWithSearch(
isActive,
isUnreported,
organizationIdOpt,
folderIdValidated,
uploaderIdValidated,
searchQuery,
request.identity.map(_._id),
recursive.getOrElse(false),
limitOpt = limit
)
} yield Json.toJson(datasetInfos)
} else {
for {
_ <- Fox.successful(())
_ = logger.info(
s"Requesting listing datasets with isActive '$isActive', isUnreported '$isUnreported', organizationId '$organizationIdOpt', folderId '$folderIdValidated', uploaderId '$uploaderIdValidated', searchQuery '$searchQuery', recursive '$recursive', limit '$limit'")
datasets <- datasetDAO.findAllWithSearch(isActive,
isUnreported,
organizationIdOpt,
folderIdValidated,
uploaderIdValidated,
searchQuery,
recursive.getOrElse(false),
limit) ?~> "dataset.list.failed" ?~> "Dataset listing failed"
_ = logger.info(s"Found ${datasets.size} datasets successfully")
js <- listGrouped(datasets, request.identity) ?~> "dataset.list.failed" ?~> "Grouping datasets failed"
} yield Json.toJson(js)
}
_ = Fox.runOptional(request.identity)(user => userDAO.updateLastActivity(user._id))
} yield addRemoteOriginHeaders(Ok(js))
}
for {
folderIdValidated <- Fox.runOptional(folderId)(ObjectId.fromString)
uploaderIdValidated <- Fox.runOptional(uploaderId)(ObjectId.fromString)
organizationIdOpt = if (onlyMyOrganization.getOrElse(false))
request.identity.map(_._organization)
else
organizationId
js <- if (compact.getOrElse(false)) {
for {
datasetInfos <- datasetDAO.findAllCompactWithSearch(
isActive,
isUnreported,
organizationIdOpt,
folderIdValidated,
uploaderIdValidated,
searchQuery,
request.identity.map(_._id),
recursive.getOrElse(false),
limitOpt = limit
)
} yield Json.toJson(datasetInfos)
} else {
for {
datasets <- datasetDAO.findAllWithSearch(isActive,
isUnreported,
organizationIdOpt,
folderIdValidated,
uploaderIdValidated,
searchQuery,
recursive.getOrElse(false),
limit) ?~> "dataset.list.failed"
js <- listGrouped(datasets, request.identity) ?~> "dataset.list.grouping.failed"
} yield Json.toJson(js)
}
_ = Fox.runOptional(request.identity)(user => userDAO.updateLastActivity(user._id))
} yield addRemoteOriginHeaders(Ok(js))
}

private def listGrouped(datasets: List[Dataset], requestingUser: Option[User])(
implicit ctx: DBAccessContext,
m: MessagesProvider): Fox[List[JsObject]] =
for {
_ <- Fox.successful(())
_ = logger.info(s"datasets: $datasets, requestingUser: ${requestingUser.map(_._id)}")
requestingUserTeamManagerMemberships <- Fox.runOptional(requestingUser)(user =>
userService
.teamManagerMembershipsFor(user._id)) ?~> s"Could not find team manager memberships for user ${requestingUser
.map(_._id)}"
_ = logger.info(
s"requestingUserTeamManagerMemberships: ${requestingUserTeamManagerMemberships.map(_.map(_.toString))}")
userService.teamManagerMembershipsFor(user._id))
groupedByOrga = datasets.groupBy(_._organization).toList
js <- Fox.serialCombined(groupedByOrga) { byOrgaTuple: (String, List[Dataset]) =>
for {
_ <- Fox.successful(())
_ = logger.info(s"byOrgaTuple orga: ${byOrgaTuple._1}, datasets: ${byOrgaTuple._2}")
organization <- organizationDAO.findOne(byOrgaTuple._1)(GlobalAccessContext) ?~> s"Could not find organization ${byOrgaTuple._1}"
organization <- organizationDAO.findOne(byOrgaTuple._1)(GlobalAccessContext) ?~> "organization.notFound"
groupedByDataStore = byOrgaTuple._2.groupBy(_._dataStore).toList
_ <- Fox.serialCombined(groupedByDataStore) { byDataStoreTuple: (String, List[Dataset]) =>
{
logger.info(s"datastore: ${byDataStoreTuple._1}, datasets: ${byDataStoreTuple._2}")
Fox.successful(())
}
}
result <- Fox.serialCombined(groupedByDataStore) { byDataStoreTuple: (String, List[Dataset]) =>
for {
dataStore <- dataStoreDAO.findOneByName(byDataStoreTuple._1.trim)(GlobalAccessContext) ?~>
s"Could not find data store ${byDataStoreTuple._1}"
dataStore <- dataStoreDAO.findOneByName(byDataStoreTuple._1.trim)(GlobalAccessContext)
resultByDataStore: Seq[JsObject] <- Fox.serialCombined(byDataStoreTuple._2) { d =>
datasetService.publicWrites(
d,
requestingUser,
Some(organization),
Some(dataStore),
requestingUserTeamManagerMemberships) ?~> Messages("dataset.list.writesFailed", d.name)
} ?~> "Could not find public writes for datasets"
}
} yield resultByDataStore
} ?~> s"Could not group by datastore for datasets ${byOrgaTuple._2.map(_._id)}"
}
} yield result.flatten
} ?~> s"Could not group by organization for datasets ${datasets.map(_._id)}"
}
} yield js.flatten

def accessList(datasetId: String): Action[AnyContent] = sil.SecuredAction.async { implicit request =>
Expand Down
14 changes: 5 additions & 9 deletions app/models/dataset/Dataset.scala
Original file line number Diff line number Diff line change
Expand Up @@ -115,14 +115,12 @@ class DatasetDAO @Inject()(sqlClient: SqlClient, datasetLayerDAO: DatasetLayerDA

protected def parse(r: DatasetsRow): Fox[Dataset] =
for {
voxelSize <- parseVoxelSizeOpt(r.voxelsizefactor, r.voxelsizeunit) ?~> "could not parse dataset voxel size"
voxelSize <- parseVoxelSizeOpt(r.voxelsizefactor, r.voxelsizeunit)
defaultViewConfigurationOpt <- Fox.runOptional(r.defaultviewconfiguration)(
JsonHelper
.parseAndValidateJson[DatasetViewConfiguration](_)) ?~> "could not parse dataset default view configuration"
JsonHelper.parseAndValidateJson[DatasetViewConfiguration](_))
adminViewConfigurationOpt <- Fox.runOptional(r.adminviewconfiguration)(
JsonHelper
.parseAndValidateJson[DatasetViewConfiguration](_)) ?~> "could not parse dataset admin view configuration"
metadata <- JsonHelper.parseAndValidateJson[JsArray](r.metadata) ?~> "could not parse dataset metadata"
JsonHelper.parseAndValidateJson[DatasetViewConfiguration](_))
metadata <- JsonHelper.parseAndValidateJson[JsArray](r.metadata)
} yield {
Dataset(
ObjectId(r._Id),
Expand Down Expand Up @@ -220,11 +218,9 @@ class DatasetDAO @Inject()(sqlClient: SqlClient, datasetLayerDAO: DatasetLayerDA
includeSubfolders,
None,
None)
_ = logger.info(s"Requesting datasets with selection predicates '$selectionPredicates'")
limitQuery = limitOpt.map(l => q"LIMIT $l").getOrElse(q"")
_ = logger.info("Requesting datasets with query")
r <- run(q"SELECT $columns FROM $existingCollectionName WHERE $selectionPredicates $limitQuery".as[DatasetsRow])
parsed <- parseAll(r) ?~> "Parsing datasets failed"
parsed <- parseAll(r)
} yield parsed

def findAllCompactWithSearch(isActiveOpt: Option[Boolean] = None,
Expand Down
22 changes: 11 additions & 11 deletions app/models/dataset/DatasetService.scala
Original file line number Diff line number Diff line change
Expand Up @@ -349,19 +349,19 @@ class DatasetService @Inject()(organizationDAO: OrganizationDAO,
organizationDAO.findOne(dataset._organization) ?~> "organization.notFound"
}
dataStore <- Fox.fillOption(dataStore) {
dataStoreFor(dataset) ?~> s"fetching data store failed for dataset ${dataset._id}"
dataStoreFor(dataset) ?~> "dataStore.notFound"
}
teams <- teamService.allowedTeamsForDataset(dataset, cumulative = false, requestingUserOpt) ?~> "dataset.list.fetchAllowedTeamsFailed" ?~> s"for dataset ${dataset._id}"
teamsJs <- Fox.serialCombined(teams)(t => teamService.publicWrites(t, Some(organization))) ?~> "dataset.list.teamWritesFailed" ?~> s"for dataset ${dataset._id}"
teamsCumulative <- teamService.allowedTeamsForDataset(dataset, cumulative = true, requestingUserOpt) ?~> "dataset.list.fetchAllowedTeamsFailed" ?~> s"for dataset ${dataset._id}"
teamsCumulativeJs <- Fox.serialCombined(teamsCumulative)(t => teamService.publicWrites(t, Some(organization))) ?~> "dataset.list.teamWritesFailed" ?~> s"for dataset ${dataset._id}"
logoUrl <- logoUrlFor(dataset, Some(organization)) ?~> "dataset.list.fetchLogoUrlFailed" ?~> s"for dataset ${dataset._id}"
isEditable <- isEditableBy(dataset, requestingUserOpt, requestingUserTeamManagerMemberships) ?~> "dataset.list.isEditableCheckFailed" ?~> s"for dataset ${dataset._id}"
lastUsedByUser <- lastUsedTimeFor(dataset._id, requestingUserOpt) ?~> "dataset.list.fetchLastUsedTimeFailed" ?~> s"for dataset ${dataset._id}"
dataStoreJs <- dataStoreService.publicWrites(dataStore) ?~> "dataset.list.dataStoreWritesFailed" ?~> s"for dataset ${dataset._id}"
dataSource <- dataSourceFor(dataset, Some(organization)) ?~> "dataset.list.fetchDataSourceFailed" ?~> s"for dataset ${dataset._id}"
teams <- teamService.allowedTeamsForDataset(dataset, cumulative = false, requestingUserOpt) ?~> "dataset.list.fetchAllowedTeamsFailed"
teamsJs <- Fox.serialCombined(teams)(t => teamService.publicWrites(t, Some(organization))) ?~> "dataset.list.teamWritesFailed"
teamsCumulative <- teamService.allowedTeamsForDataset(dataset, cumulative = true, requestingUserOpt) ?~> "dataset.list.fetchAllowedTeamsFailed"
teamsCumulativeJs <- Fox.serialCombined(teamsCumulative)(t => teamService.publicWrites(t, Some(organization))) ?~> "dataset.list.teamWritesFailed"
logoUrl <- logoUrlFor(dataset, Some(organization)) ?~> "dataset.list.fetchLogoUrlFailed"
isEditable <- isEditableBy(dataset, requestingUserOpt, requestingUserTeamManagerMemberships) ?~> "dataset.list.isEditableCheckFailed"
lastUsedByUser <- lastUsedTimeFor(dataset._id, requestingUserOpt) ?~> "dataset.list.fetchLastUsedTimeFailed"
dataStoreJs <- dataStoreService.publicWrites(dataStore) ?~> "dataset.list.dataStoreWritesFailed"
dataSource <- dataSourceFor(dataset, Some(organization)) ?~> "dataset.list.fetchDataSourceFailed"
usedStorageBytes <- Fox.runIf(requestingUserOpt.exists(u => u._organization == dataset._organization))(
organizationDAO.getUsedStorageForDataset(dataset._id)) ?~> s"fetching used storage failed for ${dataset._id}"
organizationDAO.getUsedStorageForDataset(dataset._id))
} yield {
Json.obj(
"id" -> dataset._id,
Expand Down
4 changes: 2 additions & 2 deletions app/models/user/UserService.scala
Original file line number Diff line number Diff line change
Expand Up @@ -279,11 +279,11 @@ class UserService @Inject()(conf: WkConf,
userExperiencesDAO.findAllExperiencesForUser(_user)

def teamMembershipsFor(_user: ObjectId): Fox[List[TeamMembership]] =
userDAO.findTeamMembershipsForUser(_user)
userDAO.findTeamMembershipsForUser(_user) ?~> "user.team.memberships.failed"

def teamManagerMembershipsFor(_user: ObjectId): Fox[List[TeamMembership]] =
for {
teamMemberships <- teamMembershipsFor(_user)
teamMemberships <- teamMembershipsFor(_user) ?~> "user.team.memberships.failed"
} yield teamMemberships.filter(_.isTeamManager)

def teamManagerTeamIdsFor(_user: ObjectId): Fox[List[ObjectId]] =
Expand Down
4 changes: 3 additions & 1 deletion conf/messages
Original file line number Diff line number Diff line change
Expand Up @@ -68,6 +68,7 @@ user.notAuthorised=You are not authorized to view this resource. Please log in.
user.id.notFound=We could not find a user id in the request.
user.id.invalid=The provided user id is invalid.
user.creation.failed=Failed to create user
user.team.memberships.failed=Failed to retrieve team memberships for user

oidc.disabled=OIDC is disabled
oidc.configuration.invalid=OIDC configuration is invalid
Expand All @@ -88,7 +89,8 @@ dataset.name.alreadyTaken=This name is already being used by a different dataset
dataset.source.usableButNoScale=Dataset {0} is marked as active but has no scale.
dataset.import.fileAccessDenied=Cannot create organization folder. Please make sure WEBKNOSSOS has write permissions in the “binaryData” directory
dataset.type.invalid=External data set of type “{0}” is not supported
dataset.list.failed=Failed to retrieve list of data sets.
dataset.list.failed=Failed to retrieve list of datasets.
dataset.list.grouping.failed=Failed group retrieved datasets.
dataset.list.writesFailed=Failed to write json for dataset {0}
dataset.noMags=Data layer does not contain mags
dataset.sampledOnlyBlack=Sampled data positions contained only black data
Expand Down