Skip to content
Merged
40 changes: 39 additions & 1 deletion app/controllers/DatasetController.scala
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,7 @@ import com.scalableminds.util.objectid.ObjectId
import com.scalableminds.util.time.Instant
import com.scalableminds.util.tools.{Fox, TristateOptionJsonHelper}
import com.scalableminds.webknossos.datastore.models.AdditionalCoordinate
import com.scalableminds.webknossos.datastore.models.datasource.ElementClass
import com.scalableminds.webknossos.datastore.models.datasource.{DataSource, ElementClass}
import mail.{MailchimpClient, MailchimpTag}
import models.analytics.{AnalyticsService, ChangeDatasetSettingsEvent, OpenDatasetEvent}
import models.dataset._
Expand Down Expand Up @@ -71,6 +71,12 @@ object SegmentAnythingMaskParameters {
implicit val jsonFormat: Format[SegmentAnythingMaskParameters] = Json.format[SegmentAnythingMaskParameters]
}

case class DataSourceRegistrationInfo(dataSource: DataSource, folderId: Option[String], dataStoreName: String)

object DataSourceRegistrationInfo {
implicit val jsonFormat: OFormat[DataSourceRegistrationInfo] = Json.format[DataSourceRegistrationInfo]
}

class DatasetController @Inject()(userService: UserService,
userDAO: UserDAO,
datasetService: DatasetService,
Expand Down Expand Up @@ -153,6 +159,27 @@ class DatasetController @Inject()(userService: UserService,
} yield Ok
}

def addVirtualDataset(name: String): Action[DataSourceRegistrationInfo] =
sil.SecuredAction.async(validateJson[DataSourceRegistrationInfo]) { implicit request =>
for {
dataStore <- dataStoreDAO.findOneByName(request.body.dataStoreName) ?~> Messages(
"datastore.notFound",
request.body.dataStoreName) ~> NOT_FOUND
user = request.identity
isTeamManagerOrAdmin <- userService.isTeamManagerOrAdminOfOrg(user, user._organization)
_ <- Fox.fromBool(isTeamManagerOrAdmin || user.isDatasetManager) ~> FORBIDDEN
_ <- Fox.fromBool(request.body.dataSource.dataLayers.nonEmpty) ?~> "dataset.explore.zeroLayers"
_ <- datasetService.validatePaths(request.body.dataSource.allExplicitPaths, dataStore) ?~> "dataSource.add.pathsNotAllowed"
dataset <- datasetService.createVirtualDataset(
name,
dataStore,
request.body.dataSource,
request.body.folderId,
user
)
} yield Ok(Json.obj("newDatasetId" -> dataset._id))
}
Comment on lines +162 to +181
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

⚠️ Potential issue

Missed enforcement: honor dataStore.onlyAllowedOrganization for virtual adds

Add the same restriction applied in upload flows so users cannot use a datastore restricted to a different organization to create virtual datasets.

Also, map path validation failures to a client error.

   def addVirtualDataset(name: String): Action[DataSourceRegistrationInfo] =
     sil.SecuredAction.async(validateJson[DataSourceRegistrationInfo]) { implicit request =>
       for {
         dataStore <- dataStoreDAO.findOneByName(request.body.dataStoreName) ?~> Messages(
           "datastore.notFound",
           request.body.dataStoreName) ~> NOT_FOUND
         user = request.identity
         isTeamManagerOrAdmin <- userService.isTeamManagerOrAdminOfOrg(user, user._organization)
         _ <- Fox.fromBool(isTeamManagerOrAdmin || user.isDatasetManager) ~> FORBIDDEN
         _ <- Fox.fromBool(request.body.dataSource.dataLayers.nonEmpty) ?~> "dataset.explore.zeroLayers"
-        _ <- datasetService.validatePaths(request.body.dataSource.allExplicitPaths, dataStore) ?~> "dataSource.add.pathsNotAllowed"
+        _ <- Fox.fromBool(dataStore.onlyAllowedOrganization.forall(_ == user._organization)) ?~> "dataset.upload.Datastore.restricted" ~> FORBIDDEN
+        _ <- datasetService.validatePaths(request.body.dataSource.allExplicitPaths, dataStore) ?~> "dataSource.add.pathsNotAllowed" ~> BAD_REQUEST
         dataset <- datasetService.createVirtualDataset(
           name,
           dataStore,
           request.body.dataSource,
           request.body.folderId,
           user
         )
       } yield Ok(Json.obj("newDatasetId" -> dataset._id))
     }
📝 Committable suggestion

‼️ IMPORTANT
Carefully review the code before committing. Ensure that it accurately replaces the highlighted code, contains no missing lines, and has no issues with indentation. Thoroughly test & benchmark the code to ensure it meets the requirements.

Suggested change
def addVirtualDataset(name: String): Action[DataSourceRegistrationInfo] =
sil.SecuredAction.async(validateJson[DataSourceRegistrationInfo]) { implicit request =>
for {
dataStore <- dataStoreDAO.findOneByName(request.body.dataStoreName) ?~> Messages(
"datastore.notFound",
request.body.dataStoreName) ~> NOT_FOUND
user = request.identity
isTeamManagerOrAdmin <- userService.isTeamManagerOrAdminOfOrg(user, user._organization)
_ <- Fox.fromBool(isTeamManagerOrAdmin || user.isDatasetManager) ~> FORBIDDEN
_ <- Fox.fromBool(request.body.dataSource.dataLayers.nonEmpty) ?~> "dataset.explore.zeroLayers"
_ <- datasetService.validatePaths(request.body.dataSource.allExplicitPaths, dataStore) ?~> "dataSource.add.pathsNotAllowed"
dataset <- datasetService.createVirtualDataset(
name,
dataStore,
request.body.dataSource,
request.body.folderId,
user
)
} yield Ok(Json.obj("newDatasetId" -> dataset._id))
}
def addVirtualDataset(name: String): Action[DataSourceRegistrationInfo] =
sil.SecuredAction.async(validateJson[DataSourceRegistrationInfo]) { implicit request =>
for {
dataStore <- dataStoreDAO.findOneByName(request.body.dataStoreName) ?~> Messages(
"datastore.notFound",
request.body.dataStoreName) ~> NOT_FOUND
user = request.identity
isTeamManagerOrAdmin <- userService.isTeamManagerOrAdminOfOrg(user, user._organization)
_ <- Fox.fromBool(isTeamManagerOrAdmin || user.isDatasetManager) ~> FORBIDDEN
_ <- Fox.fromBool(request.body.dataSource.dataLayers.nonEmpty) ?~> "dataset.explore.zeroLayers"
_ <- Fox.fromBool(dataStore.onlyAllowedOrganization.forall(_ == user._organization)) ?~> "dataset.upload.Datastore.restricted" ~> FORBIDDEN
_ <- datasetService.validatePaths(request.body.dataSource.allExplicitPaths, dataStore) ?~> "dataSource.add.pathsNotAllowed" ~> BAD_REQUEST
dataset <- datasetService.createVirtualDataset(
name,
dataStore,
request.body.dataSource,
request.body.folderId,
user
)
} yield Ok(Json.obj("newDatasetId" -> dataset._id))
}
🤖 Prompt for AI Agents
In app/controllers/DatasetController.scala around lines 162–181, enforce
dataStore.onlyAllowedOrganization for virtual dataset creation and map path
validation failures to a client error: after loading dataStore and resolving
user/team manager check, add a guard that if
dataStore.onlyAllowedOrganization.isDefined and != user._organization then fail
with FORBIDDEN; change the datasetService.validatePaths line to map failures to
a BAD_REQUEST (use the same pattern as other checks: ?~>
"dataSource.add.pathsNotAllowed" ~> BAD_REQUEST) so path validation returns a
4xx instead of an internal error.


// List all accessible datasets (list of json objects, one per dataset)
def list(
// Optional filtering: If true, list only active datasets, if false, list only inactive datasets
Expand Down Expand Up @@ -491,6 +518,17 @@ class DatasetController @Inject()(userService: UserService,
}
}

def deleteOnDisk(datasetId: ObjectId): Action[AnyContent] =
sil.SecuredAction.async { implicit request =>
for {
dataset <- datasetDAO.findOne(datasetId) ?~> notFoundMessage(datasetId.toString) ~> NOT_FOUND
_ <- Fox.fromBool(conf.Features.allowDeleteDatasets) ?~> "dataset.delete.disabled"
_ <- Fox.assertTrue(datasetService.isEditableBy(dataset, Some(request.identity))) ?~> "notAllowed" ~> FORBIDDEN
_ <- Fox.fromBool(request.identity.isAdminOf(dataset._organization)) ~> FORBIDDEN
_ <- datasetService.deleteVirtualOrDiskDataset(dataset)
} yield Ok
}

def compose(): Action[ComposeRequest] =
sil.SecuredAction.async(validateJson[ComposeRequest]) { implicit request =>
for {
Expand Down
17 changes: 4 additions & 13 deletions app/controllers/WKRemoteDataStoreController.scala
Original file line number Diff line number Diff line change
Expand Up @@ -9,7 +9,7 @@ import com.scalableminds.webknossos.datastore.helpers.{LayerMagLinkInfo, MagLink
import com.scalableminds.webknossos.datastore.models.UnfinishedUpload
import com.scalableminds.webknossos.datastore.models.datasource.{AbstractDataLayer, DataSource, DataSourceId}
import com.scalableminds.webknossos.datastore.models.datasource.inbox.{InboxDataSourceLike => InboxDataSource}
import com.scalableminds.webknossos.datastore.services.{DataSourcePathInfo, DataSourceRegistrationInfo, DataStoreStatus}
import com.scalableminds.webknossos.datastore.services.{DataSourcePathInfo, DataStoreStatus}
import com.scalableminds.webknossos.datastore.services.uploading.{
LinkedLayerIdentifier,
ReserveAdditionalInformation,
Expand Down Expand Up @@ -226,6 +226,9 @@ class WKRemoteDataStoreController @Inject()(
}
}

/**
* Called by the datastore after a dataset has been deleted on disk.
*/
def deleteDataset(name: String, key: String): Action[DataSourceId] = Action.async(validateJson[DataSourceId]) {
implicit request =>
dataStoreService.validateAccess(name, key) { _ =>
Expand All @@ -245,17 +248,6 @@ class WKRemoteDataStoreController @Inject()(
}
}

def deleteVirtualDataset(name: String, key: String): Action[ObjectId] =
Action.async(validateJson[ObjectId]) { implicit request =>
dataStoreService.validateAccess(name, key) { _ =>
for {
dataset <- datasetDAO.findOne(request.body)(GlobalAccessContext) ~> NOT_FOUND
_ <- Fox.fromBool(dataset.isVirtual) ?~> "dataset.delete.notVirtual" ~> FORBIDDEN
_ <- datasetDAO.deleteDataset(dataset._id, onlyMarkAsDeleted = true)
} yield Ok
}
}

def findDatasetId(name: String,
key: String,
datasetDirectoryName: String,
Expand Down Expand Up @@ -316,7 +308,6 @@ class WKRemoteDataStoreController @Inject()(
_ <- Fox.fromBool(organization._id == user._organization) ?~> "notAllowed" ~> FORBIDDEN
dataset <- datasetService.createVirtualDataset(
directoryName,
organizationId,
dataStore,
request.body.dataSource,
request.body.folderId,
Expand Down
1 change: 0 additions & 1 deletion app/models/dataset/ComposeService.scala
Original file line number Diff line number Diff line change
Expand Up @@ -44,7 +44,6 @@ class ComposeService @Inject()(datasetDAO: DatasetDAO, dataStoreDAO: DataStoreDA
dataSource <- createDatasource(composeRequest, composeRequest.newDatasetName, composeRequest.organizationId)
dataStore <- dataStoreDAO.findOneWithUploadsAllowed
dataset <- datasetService.createVirtualDataset(composeRequest.newDatasetName,
composeRequest.organizationId,
dataStore,
dataSource,
Some(composeRequest.targetFolderId.toString),
Expand Down
33 changes: 29 additions & 4 deletions app/models/dataset/DatasetService.scala
Original file line number Diff line number Diff line change
Expand Up @@ -39,6 +39,7 @@ import models.team._
import models.user.{User, UserService}
import com.scalableminds.util.tools.Box.tryo
import com.scalableminds.util.tools.{Empty, EmptyBox, Full}
import com.scalableminds.webknossos.datastore.controllers.PathValidationResult
import play.api.libs.json.{JsObject, Json}
import security.RandomIDGenerator
import utils.WkConf
Expand Down Expand Up @@ -101,17 +102,16 @@ class DatasetService @Inject()(organizationDAO: OrganizationDAO,
}

def createVirtualDataset(datasetName: String,
organizationId: String,
dataStore: DataStore,
dataSource: DataSource,
folderId: Option[String],
user: User): Fox[Dataset] =
for {
_ <- assertValidDatasetName(datasetName)
isDatasetNameAlreadyTaken <- datasetDAO.doesDatasetDirectoryExistInOrganization(datasetName, organizationId)(
isDatasetNameAlreadyTaken <- datasetDAO.doesDatasetDirectoryExistInOrganization(datasetName, user._organization)(
GlobalAccessContext)
_ <- Fox.fromBool(!isDatasetNameAlreadyTaken) ?~> "dataset.name.alreadyTaken"
organization <- organizationDAO.findOne(organizationId)(GlobalAccessContext) ?~> "organization.notFound"
organization <- organizationDAO.findOne(user._organization)(GlobalAccessContext) ?~> "organization.notFound"
folderId <- ObjectId.fromString(folderId.getOrElse(organization._rootFolder.toString)) ?~> "dataset.upload.folderId.invalid"
_ <- folderDAO.assertUpdateAccess(folderId)(AuthorizedAccessContext(user)) ?~> "folder.noWriteAccess"
newDatasetId = ObjectId.generate
Expand Down Expand Up @@ -155,7 +155,7 @@ class DatasetService @Inject()(organizationDAO: OrganizationDAO,

val dataSourceHash = if (dataSource.isUsable) Some(dataSource.hashCode()) else None
for {
organization <- organizationDAO.findOne(dataSource.id.organizationId)
organization <- organizationDAO.findOne(dataSource.id.organizationId) ?~> "organization.notFound"
organizationRootFolder <- folderDAO.findOne(organization._rootFolder)
dataset = Dataset(
datasetId,
Expand Down Expand Up @@ -657,6 +657,31 @@ class DatasetService @Inject()(organizationDAO: OrganizationDAO,
})
} yield magInfosAndLinkedMags

def validatePaths(paths: Seq[String], dataStore: DataStore): Fox[Unit] =
for {
_ <- Fox.successful(())
client = new WKRemoteDataStoreClient(dataStore, rpc)
pathValidationResults <- client.validatePaths(paths)
_ <- Fox.serialCombined(pathValidationResults)({
case PathValidationResult(_, true) => Fox.successful(())
case PathValidationResult(path, false) => Fox.failure(s"Path validation failed for path: $path")
})
} yield ()

def deleteVirtualOrDiskDataset(dataset: Dataset)(implicit ctx: DBAccessContext): Fox[Unit] =
for {
_ <- if (dataset.isVirtual) {
// At this point, we should also free space in S3 once implemented.
// Right now, we can just mark the dataset as deleted in the database.
datasetDAO.deleteDataset(dataset._id, onlyMarkAsDeleted = true)
} else {
for {
datastoreClient <- clientFor(dataset)
_ <- datastoreClient.deleteOnDisk(dataset._id)
} yield ()
} ?~> "dataset.delete.failed"
} yield ()

def publicWrites(dataset: Dataset,
requestingUserOpt: Option[User],
organization: Option[Organization] = None,
Expand Down
13 changes: 13 additions & 0 deletions app/models/dataset/WKRemoteDataStoreClient.scala
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,7 @@ import com.scalableminds.util.cache.AlfuCache
import com.scalableminds.util.geometry.{BoundingBox, Vec3Int}
import com.scalableminds.util.objectid.ObjectId
import com.scalableminds.util.tools.Fox
import com.scalableminds.webknossos.datastore.controllers.PathValidationResult
import com.scalableminds.webknossos.datastore.explore.{
ExploreRemoteDatasetRequest,
ExploreRemoteDatasetResponse,
Expand Down Expand Up @@ -98,11 +99,23 @@ class WKRemoteDataStoreClient(dataStore: DataStore, rpc: RPC) extends LazyLoggin
.postJsonWithJsonResponse[ExploreRemoteDatasetRequest, ExploreRemoteDatasetResponse](
ExploreRemoteDatasetRequest(layerParameters, organizationId))

def validatePaths(paths: Seq[String]): Fox[List[PathValidationResult]] =
rpc(s"${dataStore.url}/data/datasets/validatePaths")
.addQueryString("token" -> RpcTokenHolder.webknossosToken)
.postJsonWithJsonResponse[Seq[String], List[PathValidationResult]](paths)

def updateDatasetInDSCache(datasetId: String): Fox[Unit] =
for {
_ <- rpc(s"${dataStore.url}/data/datasets/$datasetId")
.addQueryString("token" -> RpcTokenHolder.webknossosToken)
.delete()
} yield ()

def deleteOnDisk(datasetId: ObjectId): Fox[Unit] =
for {
_ <- rpc(s"${dataStore.url}/data/datasets/$datasetId/deleteOnDisk")
.addQueryString("token" -> RpcTokenHolder.webknossosToken)
.delete()
} yield ()

}
2 changes: 0 additions & 2 deletions app/models/dataset/explore/WKExploreRemoteLayerService.scala
Original file line number Diff line number Diff line change
Expand Up @@ -111,11 +111,9 @@ class WKExploreRemoteLayerService @Inject()(credentialService: CredentialService
folderId: Option[ObjectId])(implicit ctx: DBAccessContext): Fox[Unit] =
for {
dataStore <- dataStoreDAO.findOneWithUploadsAllowed
organizationId = user._organization
_ <- datasetService.assertValidDatasetName(datasetName)
_ <- datasetService.createVirtualDataset(
datasetName,
organizationId,
dataStore,
dataSource,
folderId.map(_.toString),
Expand Down
4 changes: 2 additions & 2 deletions conf/webknossos.latest.routes
Original file line number Diff line number Diff line change
Expand Up @@ -88,6 +88,7 @@ GET /datasets
POST /datasets controllers.DatasetController.create(typ: String)
POST /datasets/exploreRemote controllers.DatasetController.exploreRemoteDataset()
POST /datasets/exploreAndAddRemote controllers.DatasetController.exploreAndAddRemoteDataset()
POST /datasets/addVirtualDataset/:name controllers.DatasetController.addVirtualDataset(name: String)
GET /datasets/disambiguate/:datasetName/toNew controllers.DatasetController.getOrganizationForDataset(datasetName: String, sharingToken: Option[String])
GET /datasets/disambiguate/:organizationId/:datasetName/toId controllers.DatasetController.getDatasetIdFromNameAndOrganization(datasetName: String, organizationId: String, sharingToken: Option[String])
GET /datasets/:datasetId/health controllers.DatasetController.health(datasetId: ObjectId, sharingToken: Option[String])
Expand All @@ -102,6 +103,7 @@ POST /datasets/:datasetId/layers/:layer/segmentAnythingMask
PUT /datasets/:datasetId/clearThumbnailCache controllers.DatasetController.removeFromThumbnailCache(datasetId: ObjectId)
GET /datasets/:datasetName/isValidNewName controllers.DatasetController.isValidNewName(datasetName: String)
GET /datasets/:datasetId controllers.DatasetController.read(datasetId: ObjectId, sharingToken: Option[String])
DELETE /datasets/:datasetId/deleteOnDisk controllers.DatasetController.deleteOnDisk(datasetId: ObjectId)
POST /datasets/compose controllers.DatasetController.compose()

# Folders
Expand All @@ -120,14 +122,12 @@ PUT /datastores/:name/datasources
PUT /datastores/:name/datasources/paths controllers.WKRemoteDataStoreController.updatePaths(name: String, key: String)
GET /datastores/:name/datasources/:datasetId/paths controllers.WKRemoteDataStoreController.getPaths(name: String, key: String, datasetId: ObjectId)
GET /datastores/:name/datasources/:datasetId controllers.WKRemoteDataStoreController.getDataSource(name: String, key: String, datasetId: ObjectId)
POST /datastores/:name/datasources/:organizationId/:directoryName controllers.WKRemoteDataStoreController.registerDataSource(name: String, key: String, organizationId: String, directoryName: String, token: String)
PUT /datastores/:name/datasources/:datasetId controllers.WKRemoteDataStoreController.updateDataSource(name: String, key: String, datasetId: ObjectId, allowNewPaths: Boolean)
PATCH /datastores/:name/status controllers.WKRemoteDataStoreController.statusUpdate(name: String, key: String)
POST /datastores/:name/reserveUpload controllers.WKRemoteDataStoreController.reserveDatasetUpload(name: String, key: String, token: String)
GET /datastores/:name/getUnfinishedUploadsForUser controllers.WKRemoteDataStoreController.getUnfinishedUploadsForUser(name: String, key: String, token: String, organizationName: String)
POST /datastores/:name/reportDatasetUpload controllers.WKRemoteDataStoreController.reportDatasetUpload(name: String, key: String, token: String, datasetDirectoryName: String, datasetSizeBytes: Long, needsConversion: Boolean, viaAddRoute: Boolean)
POST /datastores/:name/deleteDataset controllers.WKRemoteDataStoreController.deleteDataset(name: String, key: String)
POST /datastores/:name/deleteVirtualDataset controllers.WKRemoteDataStoreController.deleteVirtualDataset(name: String, key: String)
GET /datastores/:name/findDatasetId controllers.WKRemoteDataStoreController.findDatasetId(name: String, key: String, datasetDirectoryName: String, organizationId: String)
GET /datastores/:name/jobExportProperties controllers.WKRemoteDataStoreController.jobExportProperties(name: String, key: String, jobId: ObjectId)
GET /datastores/:name/findCredential controllers.WKRemoteDataStoreController.findCredential(name: String, key: String, credentialId: ObjectId)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -146,11 +146,11 @@ function DatasetAddRemoteView(props: Props) {
if (nameValidationResult) {
throw new Error(nameValidationResult);
}
const dataSourceJson = JSON.parse(dataSourceJsonStr);
const { newDatasetId } = await storeRemoteDataset(
datastoreToUse.url,
datastoreToUse.name,
datasetName,
activeUser.organization,
dataSourceJsonStr,
dataSourceJson,
targetFolderId,
);
onAdded(newDatasetId, datasetName);
Expand Down
45 changes: 22 additions & 23 deletions frontend/javascripts/admin/rest_api.ts
Original file line number Diff line number Diff line change
Expand Up @@ -1256,27 +1256,29 @@ export async function exploreRemoteDataset(
return { dataSource, report };
}

type StoreRemoteDatasetArgs = {
dataStoreName: string;
dataSource: APIDataSource;
folderId?: string | null;
};

export async function storeRemoteDataset(
datastoreUrl: string,
dataStoreName: string,
datasetName: string,
organizationId: string,
datasource: string,
dataSource: APIDataSource,
folderId: string | null,
): Promise<NewDatasetReply> {
return doWithToken((token) => {
const params = new URLSearchParams();
params.set("token", token);
if (folderId) {
params.set("folderId", folderId);
}
const payload: StoreRemoteDatasetArgs = {
dataSource,
dataStoreName: dataStoreName,
};
if (folderId) {
payload["folderId"] = folderId;
}

return Request.sendJSONReceiveJSON(
`${datastoreUrl}/data/datasets/${organizationId}/${datasetName}?${params}`,
{
method: "POST",
data: datasource,
},
);
return Request.sendJSONReceiveJSON(`/api/datasets/addVirtualDataset/${datasetName}`, {
method: "POST",
data: payload,
});
}

Expand Down Expand Up @@ -1343,13 +1345,10 @@ export async function triggerDatasetClearCache(
});
}

export async function deleteDatasetOnDisk(datastoreHost: string, datasetId: string): Promise<void> {
await doWithToken((token) =>
Request.triggerRequest(`/data/datasets/${datasetId}/deleteOnDisk?token=${token}`, {
host: datastoreHost,
method: "DELETE",
}),
);
export async function deleteDatasetOnDisk(datasetId: string): Promise<void> {
await Request.triggerRequest(`/api/datasets/${datasetId}/deleteOnDisk`, {
method: "DELETE",
});
}

export async function triggerDatasetClearThumbnailCache(datasetId: string): Promise<void> {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -163,7 +163,7 @@ function DatasetActionView(props: Props) {
return;
}

await deleteDatasetOnDisk(dataset.dataStore.url, dataset.id);
await deleteDatasetOnDisk(dataset.id);

Toast.success(
messages["dataset.delete_success"]({
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -31,7 +31,7 @@ const DatasetSettingsDeleteTab = () => {
}

setIsDeleting(true);
await deleteDatasetOnDisk(dataset.dataStore.url, dataset.id);
await deleteDatasetOnDisk(dataset.id);
Toast.success(
messages["dataset.delete_success"]({
datasetName: dataset.name,
Expand Down
Loading