From 1fa450e565ebc8ea130c34c07bbb58ed144e89c6 Mon Sep 17 00:00:00 2001 From: MichaelBuessemeyer <39529669+MichaelBuessemeyer@users.noreply.github.com> Date: Mon, 2 Dec 2024 16:15:11 +0100 Subject: [PATCH 1/3] Revert "Re-Add debug logging for ds listing (#8251)" This reverts commit 0a2afa4b9ad08e03f96ea164fe71e415521215f1. --- app/controllers/DatasetController.scala | 105 ++++++++++-------------- app/models/dataset/Dataset.scala | 14 ++-- app/models/dataset/DatasetService.scala | 22 ++--- 3 files changed, 58 insertions(+), 83 deletions(-) diff --git a/app/controllers/DatasetController.scala b/app/controllers/DatasetController.scala index cf90bdfa10f..142cda633f5 100755 --- a/app/controllers/DatasetController.scala +++ b/app/controllers/DatasetController.scala @@ -176,79 +176,58 @@ class DatasetController @Inject()(userService: UserService, // Change output format to return only a compact list with essential information on the datasets compact: Option[Boolean] ): Action[AnyContent] = sil.UserAwareAction.async { implicit request => - log() { - for { - folderIdValidated <- Fox.runOptional(folderId)(ObjectId.fromString) - uploaderIdValidated <- Fox.runOptional(uploaderId)(ObjectId.fromString) - organizationIdOpt = if (onlyMyOrganization.getOrElse(false)) - request.identity.map(_._organization) - else - organizationId - js <- if (compact.getOrElse(false)) { - for { - datasetInfos <- datasetDAO.findAllCompactWithSearch( - isActive, - isUnreported, - organizationIdOpt, - folderIdValidated, - uploaderIdValidated, - searchQuery, - request.identity.map(_._id), - recursive.getOrElse(false), - limitOpt = limit - ) - } yield Json.toJson(datasetInfos) - } else { - for { - _ <- Fox.successful(()) - _ = logger.info( - s"Requesting listing datasets with isActive '$isActive', isUnreported '$isUnreported', organizationId '$organizationIdOpt', folderId '$folderIdValidated', uploaderId '$uploaderIdValidated', searchQuery '$searchQuery', recursive '$recursive', limit '$limit'") - datasets <- datasetDAO.findAllWithSearch(isActive, - isUnreported, - organizationIdOpt, - folderIdValidated, - uploaderIdValidated, - searchQuery, - recursive.getOrElse(false), - limit) ?~> "dataset.list.failed" ?~> "Dataset listing failed" - _ = logger.info(s"Found ${datasets.size} datasets successfully") - js <- listGrouped(datasets, request.identity) ?~> "dataset.list.failed" ?~> "Grouping datasets failed" - } yield Json.toJson(js) - } - _ = Fox.runOptional(request.identity)(user => userDAO.updateLastActivity(user._id)) - } yield addRemoteOriginHeaders(Ok(js)) - } + for { + folderIdValidated <- Fox.runOptional(folderId)(ObjectId.fromString) + uploaderIdValidated <- Fox.runOptional(uploaderId)(ObjectId.fromString) + organizationIdOpt = if (onlyMyOrganization.getOrElse(false)) + request.identity.map(_._organization) + else + organizationId + js <- if (compact.getOrElse(false)) { + for { + datasetInfos <- datasetDAO.findAllCompactWithSearch( + isActive, + isUnreported, + organizationIdOpt, + folderIdValidated, + uploaderIdValidated, + searchQuery, + request.identity.map(_._id), + recursive.getOrElse(false), + limitOpt = limit + ) + } yield Json.toJson(datasetInfos) + } else { + for { + datasets <- datasetDAO.findAllWithSearch(isActive, + isUnreported, + organizationIdOpt, + folderIdValidated, + uploaderIdValidated, + searchQuery, + recursive.getOrElse(false), + limit) ?~> "dataset.list.failed" + js <- listGrouped(datasets, request.identity) ?~> "dataset.list.failed" + } yield Json.toJson(js) + } + _ = Fox.runOptional(request.identity)(user => userDAO.updateLastActivity(user._id)) + } yield addRemoteOriginHeaders(Ok(js)) } private def listGrouped(datasets: List[Dataset], requestingUser: Option[User])( implicit ctx: DBAccessContext, m: MessagesProvider): Fox[List[JsObject]] = for { - _ <- Fox.successful(()) - _ = logger.info(s"datasets: $datasets, requestingUser: ${requestingUser.map(_._id)}") requestingUserTeamManagerMemberships <- Fox.runOptional(requestingUser)(user => - userService - .teamManagerMembershipsFor(user._id)) ?~> s"Could not find team manager memberships for user ${requestingUser - .map(_._id)}" - _ = logger.info( - s"requestingUserTeamManagerMemberships: ${requestingUserTeamManagerMemberships.map(_.map(_.toString))}") + userService.teamManagerMembershipsFor(user._id)) groupedByOrga = datasets.groupBy(_._organization).toList js <- Fox.serialCombined(groupedByOrga) { byOrgaTuple: (String, List[Dataset]) => for { - _ <- Fox.successful(()) - _ = logger.info(s"byOrgaTuple orga: ${byOrgaTuple._1}, datasets: ${byOrgaTuple._2}") - organization <- organizationDAO.findOne(byOrgaTuple._1)(GlobalAccessContext) ?~> s"Could not find organization ${byOrgaTuple._1}" + organization <- organizationDAO.findOne(byOrgaTuple._1)(GlobalAccessContext) groupedByDataStore = byOrgaTuple._2.groupBy(_._dataStore).toList - _ <- Fox.serialCombined(groupedByDataStore) { byDataStoreTuple: (String, List[Dataset]) => - { - logger.info(s"datastore: ${byDataStoreTuple._1}, datasets: ${byDataStoreTuple._2}") - Fox.successful(()) - } - } result <- Fox.serialCombined(groupedByDataStore) { byDataStoreTuple: (String, List[Dataset]) => for { - dataStore <- dataStoreDAO.findOneByName(byDataStoreTuple._1.trim)(GlobalAccessContext) ?~> - s"Could not find data store ${byDataStoreTuple._1}" + dataStore <- dataStoreDAO.findOneByName(byDataStoreTuple._1.trim)(GlobalAccessContext) resultByDataStore: Seq[JsObject] <- Fox.serialCombined(byDataStoreTuple._2) { d => datasetService.publicWrites( d, @@ -256,11 +235,11 @@ class DatasetController @Inject()(userService: UserService, Some(organization), Some(dataStore), requestingUserTeamManagerMemberships) ?~> Messages("dataset.list.writesFailed", d.name) - } ?~> "Could not find public writes for datasets" + } } yield resultByDataStore - } ?~> s"Could not group by datastore for datasets ${byOrgaTuple._2.map(_._id)}" + } } yield result.flatten - } ?~> s"Could not group by organization for datasets ${datasets.map(_._id)}" + } } yield js.flatten def accessList(datasetId: String): Action[AnyContent] = sil.SecuredAction.async { implicit request => diff --git a/app/models/dataset/Dataset.scala b/app/models/dataset/Dataset.scala index bb6feade662..ebfa63387a1 100755 --- a/app/models/dataset/Dataset.scala +++ b/app/models/dataset/Dataset.scala @@ -115,14 +115,12 @@ class DatasetDAO @Inject()(sqlClient: SqlClient, datasetLayerDAO: DatasetLayerDA protected def parse(r: DatasetsRow): Fox[Dataset] = for { - voxelSize <- parseVoxelSizeOpt(r.voxelsizefactor, r.voxelsizeunit) ?~> "could not parse dataset voxel size" + voxelSize <- parseVoxelSizeOpt(r.voxelsizefactor, r.voxelsizeunit) defaultViewConfigurationOpt <- Fox.runOptional(r.defaultviewconfiguration)( - JsonHelper - .parseAndValidateJson[DatasetViewConfiguration](_)) ?~> "could not parse dataset default view configuration" + JsonHelper.parseAndValidateJson[DatasetViewConfiguration](_)) adminViewConfigurationOpt <- Fox.runOptional(r.adminviewconfiguration)( - JsonHelper - .parseAndValidateJson[DatasetViewConfiguration](_)) ?~> "could not parse dataset admin view configuration" - metadata <- JsonHelper.parseAndValidateJson[JsArray](r.metadata) ?~> "could not parse dataset metadata" + JsonHelper.parseAndValidateJson[DatasetViewConfiguration](_)) + metadata <- JsonHelper.parseAndValidateJson[JsArray](r.metadata) } yield { Dataset( ObjectId(r._Id), @@ -220,11 +218,9 @@ class DatasetDAO @Inject()(sqlClient: SqlClient, datasetLayerDAO: DatasetLayerDA includeSubfolders, None, None) - _ = logger.info(s"Requesting datasets with selection predicates '$selectionPredicates'") limitQuery = limitOpt.map(l => q"LIMIT $l").getOrElse(q"") - _ = logger.info("Requesting datasets with query") r <- run(q"SELECT $columns FROM $existingCollectionName WHERE $selectionPredicates $limitQuery".as[DatasetsRow]) - parsed <- parseAll(r) ?~> "Parsing datasets failed" + parsed <- parseAll(r) } yield parsed def findAllCompactWithSearch(isActiveOpt: Option[Boolean] = None, diff --git a/app/models/dataset/DatasetService.scala b/app/models/dataset/DatasetService.scala index f607fdeb92f..20cf025fba7 100644 --- a/app/models/dataset/DatasetService.scala +++ b/app/models/dataset/DatasetService.scala @@ -349,19 +349,19 @@ class DatasetService @Inject()(organizationDAO: OrganizationDAO, organizationDAO.findOne(dataset._organization) ?~> "organization.notFound" } dataStore <- Fox.fillOption(dataStore) { - dataStoreFor(dataset) ?~> s"fetching data store failed for dataset ${dataset._id}" + dataStoreFor(dataset) } - teams <- teamService.allowedTeamsForDataset(dataset, cumulative = false, requestingUserOpt) ?~> "dataset.list.fetchAllowedTeamsFailed" ?~> s"for dataset ${dataset._id}" - teamsJs <- Fox.serialCombined(teams)(t => teamService.publicWrites(t, Some(organization))) ?~> "dataset.list.teamWritesFailed" ?~> s"for dataset ${dataset._id}" - teamsCumulative <- teamService.allowedTeamsForDataset(dataset, cumulative = true, requestingUserOpt) ?~> "dataset.list.fetchAllowedTeamsFailed" ?~> s"for dataset ${dataset._id}" - teamsCumulativeJs <- Fox.serialCombined(teamsCumulative)(t => teamService.publicWrites(t, Some(organization))) ?~> "dataset.list.teamWritesFailed" ?~> s"for dataset ${dataset._id}" - logoUrl <- logoUrlFor(dataset, Some(organization)) ?~> "dataset.list.fetchLogoUrlFailed" ?~> s"for dataset ${dataset._id}" - isEditable <- isEditableBy(dataset, requestingUserOpt, requestingUserTeamManagerMemberships) ?~> "dataset.list.isEditableCheckFailed" ?~> s"for dataset ${dataset._id}" - lastUsedByUser <- lastUsedTimeFor(dataset._id, requestingUserOpt) ?~> "dataset.list.fetchLastUsedTimeFailed" ?~> s"for dataset ${dataset._id}" - dataStoreJs <- dataStoreService.publicWrites(dataStore) ?~> "dataset.list.dataStoreWritesFailed" ?~> s"for dataset ${dataset._id}" - dataSource <- dataSourceFor(dataset, Some(organization)) ?~> "dataset.list.fetchDataSourceFailed" ?~> s"for dataset ${dataset._id}" + teams <- teamService.allowedTeamsForDataset(dataset, cumulative = false, requestingUserOpt) ?~> "dataset.list.fetchAllowedTeamsFailed" + teamsJs <- Fox.serialCombined(teams)(t => teamService.publicWrites(t, Some(organization))) ?~> "dataset.list.teamWritesFailed" + teamsCumulative <- teamService.allowedTeamsForDataset(dataset, cumulative = true, requestingUserOpt) ?~> "dataset.list.fetchAllowedTeamsFailed" + teamsCumulativeJs <- Fox.serialCombined(teamsCumulative)(t => teamService.publicWrites(t, Some(organization))) ?~> "dataset.list.teamWritesFailed" + logoUrl <- logoUrlFor(dataset, Some(organization)) ?~> "dataset.list.fetchLogoUrlFailed" + isEditable <- isEditableBy(dataset, requestingUserOpt, requestingUserTeamManagerMemberships) ?~> "dataset.list.isEditableCheckFailed" + lastUsedByUser <- lastUsedTimeFor(dataset._id, requestingUserOpt) ?~> "dataset.list.fetchLastUsedTimeFailed" + dataStoreJs <- dataStoreService.publicWrites(dataStore) ?~> "dataset.list.dataStoreWritesFailed" + dataSource <- dataSourceFor(dataset, Some(organization)) ?~> "dataset.list.fetchDataSourceFailed" usedStorageBytes <- Fox.runIf(requestingUserOpt.exists(u => u._organization == dataset._organization))( - organizationDAO.getUsedStorageForDataset(dataset._id)) ?~> s"fetching used storage failed for ${dataset._id}" + organizationDAO.getUsedStorageForDataset(dataset._id)) } yield { Json.obj( "id" -> dataset._id, From ede5197923b02792e9b4b5ac06853703b51a7f41 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Michael=20B=C3=BC=C3=9Femeyer?= Date: Mon, 2 Dec 2024 17:48:53 +0100 Subject: [PATCH 2/3] add some error message --- app/controllers/DatasetController.scala | 4 ++-- app/models/dataset/DataStore.scala | 2 +- app/models/dataset/DatasetService.scala | 2 +- app/models/user/UserService.scala | 4 ++-- conf/messages | 4 +++- 5 files changed, 9 insertions(+), 7 deletions(-) diff --git a/app/controllers/DatasetController.scala b/app/controllers/DatasetController.scala index 142cda633f5..59e0af498b0 100755 --- a/app/controllers/DatasetController.scala +++ b/app/controllers/DatasetController.scala @@ -207,7 +207,7 @@ class DatasetController @Inject()(userService: UserService, searchQuery, recursive.getOrElse(false), limit) ?~> "dataset.list.failed" - js <- listGrouped(datasets, request.identity) ?~> "dataset.list.failed" + js <- listGrouped(datasets, request.identity) ?~> "dataset.list.grouping.failed" } yield Json.toJson(js) } _ = Fox.runOptional(request.identity)(user => userDAO.updateLastActivity(user._id)) @@ -223,7 +223,7 @@ class DatasetController @Inject()(userService: UserService, groupedByOrga = datasets.groupBy(_._organization).toList js <- Fox.serialCombined(groupedByOrga) { byOrgaTuple: (String, List[Dataset]) => for { - organization <- organizationDAO.findOne(byOrgaTuple._1)(GlobalAccessContext) + organization <- organizationDAO.findOne(byOrgaTuple._1)(GlobalAccessContext) ?~> "organization.notFound" groupedByDataStore = byOrgaTuple._2.groupBy(_._dataStore).toList result <- Fox.serialCombined(groupedByDataStore) { byDataStoreTuple: (String, List[Dataset]) => for { diff --git a/app/models/dataset/DataStore.scala b/app/models/dataset/DataStore.scala index e4450c3cf15..1b4cb175306 100644 --- a/app/models/dataset/DataStore.scala +++ b/app/models/dataset/DataStore.scala @@ -114,7 +114,7 @@ class DataStoreDAO @Inject()(sqlClient: SqlClient)(implicit ec: ExecutionContext for { accessQuery <- readAccessQuery r <- run(q"SELECT $columns FROM $existingCollectionName WHERE name = $name AND $accessQuery".as[DatastoresRow]) - parsed <- parseFirst(r, name) + parsed <- parseFirst(r, name) ?~> "dataStore.notFound" } yield parsed def findOneByUrl(url: String)(implicit ctx: DBAccessContext): Fox[DataStore] = diff --git a/app/models/dataset/DatasetService.scala b/app/models/dataset/DatasetService.scala index 20cf025fba7..16498011695 100644 --- a/app/models/dataset/DatasetService.scala +++ b/app/models/dataset/DatasetService.scala @@ -349,7 +349,7 @@ class DatasetService @Inject()(organizationDAO: OrganizationDAO, organizationDAO.findOne(dataset._organization) ?~> "organization.notFound" } dataStore <- Fox.fillOption(dataStore) { - dataStoreFor(dataset) + dataStoreFor(dataset) ?~> "dataStore.notFound" } teams <- teamService.allowedTeamsForDataset(dataset, cumulative = false, requestingUserOpt) ?~> "dataset.list.fetchAllowedTeamsFailed" teamsJs <- Fox.serialCombined(teams)(t => teamService.publicWrites(t, Some(organization))) ?~> "dataset.list.teamWritesFailed" diff --git a/app/models/user/UserService.scala b/app/models/user/UserService.scala index be07b9d6115..e8a1872e7fa 100755 --- a/app/models/user/UserService.scala +++ b/app/models/user/UserService.scala @@ -279,11 +279,11 @@ class UserService @Inject()(conf: WkConf, userExperiencesDAO.findAllExperiencesForUser(_user) def teamMembershipsFor(_user: ObjectId): Fox[List[TeamMembership]] = - userDAO.findTeamMembershipsForUser(_user) + userDAO.findTeamMembershipsForUser(_user) ?~> "user.team.memberships.failed" def teamManagerMembershipsFor(_user: ObjectId): Fox[List[TeamMembership]] = for { - teamMemberships <- teamMembershipsFor(_user) + teamMemberships <- teamMembershipsFor(_user) ?~> "user.team.memberships.failed" } yield teamMemberships.filter(_.isTeamManager) def teamManagerTeamIdsFor(_user: ObjectId): Fox[List[ObjectId]] = diff --git a/conf/messages b/conf/messages index 34aa12fa2a7..eb03c7c06f5 100644 --- a/conf/messages +++ b/conf/messages @@ -68,6 +68,7 @@ user.notAuthorised=You are not authorized to view this resource. Please log in. user.id.notFound=We could not find a user id in the request. user.id.invalid=The provided user id is invalid. user.creation.failed=Failed to create user +user.team.memberships.failed=Failed to retrieve team memberships for user oidc.disabled=OIDC is disabled oidc.configuration.invalid=OIDC configuration is invalid @@ -88,7 +89,8 @@ dataset.name.alreadyTaken=This name is already being used by a different dataset dataset.source.usableButNoScale=Dataset {0} is marked as active but has no scale. dataset.import.fileAccessDenied=Cannot create organization folder. Please make sure WEBKNOSSOS has write permissions in the “binaryData” directory dataset.type.invalid=External data set of type “{0}” is not supported -dataset.list.failed=Failed to retrieve list of data sets. +dataset.list.failed=Failed to retrieve list of datasets. +dataset.list.grouping.failed=Failed group retrieved datasets. dataset.list.writesFailed=Failed to write json for dataset {0} dataset.noMags=Data layer does not contain mags dataset.sampledOnlyBlack=Sampled data positions contained only black data From f55fd1b09fbeb9e489745ee04a33f16409db6486 Mon Sep 17 00:00:00 2001 From: MichaelBuessemeyer <39529669+MichaelBuessemeyer@users.noreply.github.com> Date: Thu, 5 Dec 2024 16:03:17 +0100 Subject: [PATCH 3/3] Update app/models/dataset/DataStore.scala Co-authored-by: Florian M --- app/models/dataset/DataStore.scala | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/app/models/dataset/DataStore.scala b/app/models/dataset/DataStore.scala index 1b4cb175306..e4450c3cf15 100644 --- a/app/models/dataset/DataStore.scala +++ b/app/models/dataset/DataStore.scala @@ -114,7 +114,7 @@ class DataStoreDAO @Inject()(sqlClient: SqlClient)(implicit ec: ExecutionContext for { accessQuery <- readAccessQuery r <- run(q"SELECT $columns FROM $existingCollectionName WHERE name = $name AND $accessQuery".as[DatastoresRow]) - parsed <- parseFirst(r, name) ?~> "dataStore.notFound" + parsed <- parseFirst(r, name) } yield parsed def findOneByUrl(url: String)(implicit ctx: DBAccessContext): Fox[DataStore] =