Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
1 change: 1 addition & 0 deletions CHANGELOG.unreleased.md
Original file line number Diff line number Diff line change
Expand Up @@ -26,5 +26,6 @@ For upgrade instructions, please check the [migration guide](MIGRATIONS.released
- Fixed the dataset search which was broken when only the root folder existed. [#7177](https://github.com/scalableminds/webknossos/pull/7177)

### Removed
- Removed the "Globalize Floodfill" feature that could extend partial floodfills across an entire dataset. Please use the fill tool multiple times instead or make use of the proofreading tool when correcting large structures. [#7173](https://github.com/scalableminds/webknossos/pull/7173)

### Breaking Changes
39 changes: 0 additions & 39 deletions app/controllers/JobsController.scala
Original file line number Diff line number Diff line change
Expand Up @@ -185,45 +185,6 @@ class JobsController @Inject()(jobDAO: JobDAO,
}
}

def runGlobalizeFloodfills(
organizationName: String,
dataSetName: String,
fallbackLayerName: String,
annotationId: String,
annotationType: String,
newDatasetName: String,
volumeLayerName: Option[String]
): Action[AnyContent] =
sil.SecuredAction.async { implicit request =>
log(Some(slackNotificationService.noticeFailedJobRequest)) {
for {
organization <- organizationDAO.findOneByName(organizationName)(GlobalAccessContext) ?~> Messages(
"organization.notFound",
organizationName)
_ <- bool2Fox(request.identity._organization == organization._id) ?~> "job.globalizeFloodfill.notAllowed.organization" ~> FORBIDDEN
userAuthToken <- wkSilhouetteEnvironment.combinedAuthenticatorService.findOrCreateToken(
request.identity.loginInfo)
dataSet <- dataSetDAO.findOneByNameAndOrganization(dataSetName, organization._id) ?~> Messages(
"dataSet.notFound",
dataSetName) ~> NOT_FOUND
command = JobCommand.globalize_floodfills
commandArgs = Json.obj(
"organization_name" -> organizationName,
"dataset_name" -> dataSetName,
"fallback_layer_name" -> fallbackLayerName,
"webknossos_token" -> RpcTokenHolder.webKnossosToken,
"user_auth_token" -> userAuthToken.id,
"annotation_id" -> annotationId,
"annotation_type" -> annotationType,
"new_dataset_name" -> newDatasetName,
"volume_layer_name" -> volumeLayerName
)
job <- jobService.submitJob(command, commandArgs, request.identity, dataSet._dataStore) ?~> "job.couldNotRunGlobalizeFloodfills"
js <- jobService.publicWrites(job)
} yield Ok(js)
}
}

def runExportTiffJob(organizationName: String,
dataSetName: String,
bbox: String,
Expand Down
8 changes: 1 addition & 7 deletions app/models/job/Job.scala
Original file line number Diff line number Diff line change
Expand Up @@ -354,12 +354,6 @@ class JobService @Inject()(wkConf: WkConf,
"Volume Annotation Merged",
"Your volume annotation has been successfully merged with the existing segmentation. The result is available as a new dataset in your dashboard."
))
case JobCommand.globalize_floodfills =>
Some(
genericEmailTemplate(
"Globalize Flood Fill",
"The flood fill operations have been extended to the whole dataset. The result is available as a new dataset in your dashboard."
))
case JobCommand.compute_mesh_file =>
Some(
genericEmailTemplate(
Expand All @@ -368,7 +362,7 @@ class JobService @Inject()(wkConf: WkConf,
))
case _ => None
}) ?~> "job.emailNotifactionsDisabled"
// some jobs, e.g. "globalize flood fill"/"find largest segment ideas", do not require an email notification
// some jobs, e.g. "find largest segment ideas", do not require an email notification
_ = Mailer ! Send(emailTemplate)
} yield ()

Expand Down
1 change: 0 additions & 1 deletion conf/webknossos.latest.routes
Original file line number Diff line number Diff line change
Expand Up @@ -257,7 +257,6 @@ POST /jobs/run/computeMeshFile/:organizationName/:dataSetName
POST /jobs/run/exportTiff/:organizationName/:dataSetName controllers.JobsController.runExportTiffJob(organizationName: String, dataSetName: String, bbox: String, layerName: Option[String], mag: Option[String], annotationLayerName: Option[String], annotationId: Option[String], asOmeTiff: Boolean)
POST /jobs/run/inferNuclei/:organizationName/:dataSetName controllers.JobsController.runInferNucleiJob(organizationName: String, dataSetName: String, layerName: String, newDatasetName: String)
POST /jobs/run/inferNeurons/:organizationName/:dataSetName controllers.JobsController.runInferNeuronsJob(organizationName: String, dataSetName: String, layerName: String, bbox: String, newDatasetName: String)
POST /jobs/run/globalizeFloodfills/:organizationName/:dataSetName controllers.JobsController.runGlobalizeFloodfills(organizationName: String, dataSetName: String, fallbackLayerName: String, annotationId: String, annotationType: String, newDatasetName: String, volumeLayerName: Option[String])
POST /jobs/run/materializeVolumeAnnotation/:organizationName/:dataSetName controllers.JobsController.runMaterializeVolumeAnnotationJob(organizationName: String, dataSetName: String, fallbackLayerName: String, annotationId: String, annotationType: String, newDatasetName: String, outputSegmentationLayerName: String, mergeSegments: Boolean, volumeLayerName: Option[String])
POST /jobs/run/findLargestSegmentId/:organizationName/:dataSetName controllers.JobsController.runFindLargestSegmentIdJob(organizationName: String, dataSetName: String, layerName: String)
GET /jobs/:id controllers.JobsController.get(id: String)
Expand Down
5 changes: 2 additions & 3 deletions docs/volume_annotation.md
Original file line number Diff line number Diff line change
Expand Up @@ -101,9 +101,8 @@ WEBKNOSSOS supports volumetric flood fills (3D) to relabel a segment with a new
- For split errors: Combine two segments by relabeling one segment with the ID of the other. Since this operation is fairly compute-intensive you might be better of with the `Merger Mode`, explained above.
- For merge errors: You have to manually split two segments at their intersection/border, e.g. a cell boundary. Use the eraser brush and make sure to establish a clear cut between both segments on a slice-by-slice basis. Both segments must not touch any longer. Create a new segment ID from the toolbar and apply it to one of the partial segments that you just divided.

Due to performance reasons, 3D flood-fills only work in a small, local bounding box. WEBKNOSSOS will add a bounding box around the affected area. To truly propagate the new segment ID(s) throughout a whole dataset, you can trigger a WEBKNOSSOS job to apply this change globally. From the `BBox` tab in the right-hand menu, press the "Globalize Flood-Fill" button. Make sure to do all local fill operations first and apply them all at once.

Check the [`Processing Jobs` page](./jobs.md) from the `Admin` menu at the top of the screen to track progress or cancel the operation. The finished, processed dataset will appear as a new dataset in your dashboard.
Note that due to performance reasons, 3D flood-fills only work in a small, local bounding box.
For larger areas we recommend working with the [proofreading tool](./proof_reading.md) instead.

### Mappings / On-Demand Agglomeration

Expand Down
21 changes: 0 additions & 21 deletions frontend/javascripts/admin/admin_rest_api.ts
Original file line number Diff line number Diff line change
Expand Up @@ -1290,27 +1290,6 @@ function startSegmentationAnnotationDependentJob(
});
}

export function startGlobalizeFloodfillsJob(
organizationName: string,
datasetName: string,
fallbackLayerName: string,
volumeLayerName: string | null | undefined,
newDatasetName: string,
annotationId: string,
annotationType: APIAnnotationType,
): Promise<APIJob> {
return startSegmentationAnnotationDependentJob(
"globalizeFloodfills",
organizationName,
datasetName,
fallbackLayerName,
volumeLayerName,
newDatasetName,
annotationId,
annotationType,
);
}

export function startMaterializingVolumeAnnotationJob(
organizationName: string,
datasetName: string,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -87,7 +87,6 @@ import {
import VolumeLayer from "oxalis/model/volumetracing/volumelayer";
import { Model } from "oxalis/singletons";
import type { Flycam, SegmentMap, VolumeTracing } from "oxalis/store";
import { getBBoxNameForPartialFloodfill } from "oxalis/view/right-border-tabs/bounding_box_tab";
import React from "react";
import { actionChannel, call, fork, put, takeEvery, takeLatest } from "typed-redux-saga";
import {
Expand Down Expand Up @@ -463,7 +462,9 @@ export function* floodFill(): Saga<void> {
yield* put(
addUserBoundingBoxAction({
boundingBox: coveredBoundingBox,
name: getBBoxNameForPartialFloodfill(oldSegmentIdAtSeed, activeCellId, seedPosition),
name: `Limits of flood-fill (source_id=${oldSegmentIdAtSeed}, target_id=${activeCellId}, seed=${seedPosition.join(
Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

I am correctly assuming that this code is for the local, frontend only flood-fill operation?

Copy link
Member Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Exactly 👍

",",
)}, timestamp=${new Date().getTime()})`,
color: Utils.getRandomColor(),
isVisible: true,
}),
Expand Down
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
import { Button, Tooltip } from "antd";
import { Tooltip } from "antd";
import { PlusSquareOutlined } from "@ant-design/icons";
import { useSelector, useDispatch } from "react-redux";
import React, { useState } from "react";
Expand All @@ -10,40 +10,18 @@ import {
addUserBoundingBoxAction,
deleteUserBoundingBoxAction,
} from "oxalis/model/actions/annotation_actions";
import { StartGlobalizeFloodfillsModal } from "oxalis/view/right-border-tabs/starting_job_modals";
import { getActiveSegmentationTracingLayer } from "oxalis/model/accessors/volumetracing_accessor";
import { getSomeTracing } from "oxalis/model/accessors/tracing_accessor";
import { setPositionAction } from "oxalis/model/actions/flycam_actions";
import * as Utils from "libs/utils";
import features from "features";
import { OxalisState, UserBoundingBox } from "oxalis/store";
import { APISegmentationLayer, APIUser } from "types/api_flow_types";
import DownloadModalView from "../action-bar/download_modal_view";

// NOTE: The regexp and getBBoxNameForPartialFloodfill need to stay in sync.
// That way, bboxes created by the floodfill can be detected as such and
// a job for globalizing floodfills can be started.
const GLOBALIZE_FLOODFILL_REGEX =
/Limits of flood-fill \(source_id=(\d+), target_id=(\d+), seed=([\d,]+), timestamp=(\d+)\)/;
export function getBBoxNameForPartialFloodfill(
oldSegmentIdAtSeed: number,
activeCellId: number,
seedPosition: Vector3,
) {
return `Limits of flood-fill (source_id=${oldSegmentIdAtSeed}, target_id=${activeCellId}, seed=${seedPosition.join(
",",
)}, timestamp=${new Date().getTime()})`;
}

export default function BoundingBoxTab() {
const [selectedBoundingBoxForExport, setSelectedBoundingBoxForExport] =
useState<UserBoundingBox | null>(null);
const [isGlobalizeFloodfillsModalVisible, setIsGlobalizeFloodfillsModalVisible] = useState(false);
const tracing = useSelector((state: OxalisState) => state.tracing);
const allowUpdate = tracing.restrictions.allowUpdate;
const dataset = useSelector((state: OxalisState) => state.dataset);
const activeUser = useSelector((state: OxalisState) => state.activeUser);
const activeSegmentationTracingLayer = useSelector(getActiveSegmentationTracingLayer);
const { userBoundingBoxes } = getSomeTracing(tracing);
const dispatch = useDispatch();

Expand Down Expand Up @@ -101,12 +79,6 @@ export default function BoundingBoxTab() {
setPosition(center);
}

const globalizeFloodfillsButtonDisabledReason = getInfoForGlobalizeFloodfill(
userBoundingBoxes,
activeSegmentationTracingLayer,
activeUser,
);

const isViewMode = useSelector(
(state: OxalisState) => state.temporaryConfiguration.controlMode === ControlModeEnum.VIEW,
);
Expand All @@ -126,27 +98,6 @@ export default function BoundingBoxTab() {
minWidth: 300,
}}
>
<div
style={{
display: "flex",
justifyContent: "flex-end",
}}
>
<Tooltip title={globalizeFloodfillsButtonDisabledReason.title}>
<Button
size="small"
style={{
marginBottom: 8,
}}
disabled={globalizeFloodfillsButtonDisabledReason.disabled}
onClick={() => setIsGlobalizeFloodfillsModalVisible(true)}
>
<i className="fas fa-fill-drip" />
Globalize Flood-Fills
</Button>
</Tooltip>
</div>

{/* In view mode, it's okay to render an empty list, since there will be
an explanation below, anyway.
*/}
Expand Down Expand Up @@ -198,46 +149,6 @@ export default function BoundingBoxTab() {
initialTab="export"
/>
) : null}
{isGlobalizeFloodfillsModalVisible ? (
<StartGlobalizeFloodfillsModal
handleClose={() => setIsGlobalizeFloodfillsModalVisible(false)}
/>
) : null}
</div>
);
}

function getInfoForGlobalizeFloodfill(
userBoundingBoxes: UserBoundingBox[],
activeSegmentationTracingLayer: APISegmentationLayer | null | undefined,
activeUser: APIUser | null | undefined,
) {
if (!userBoundingBoxes.some((bbox) => bbox.name.match(GLOBALIZE_FLOODFILL_REGEX) != null)) {
return { disabled: true, title: "No partial floodfills to globalize." };
}
if (activeSegmentationTracingLayer == null) {
return {
disabled: true,
title:
"Partial floodfills can only be globalized when a segmentation annotation layer exists.",
};
}
if (activeUser == null) {
return {
disabled: true,
title: "Partial floodfills can only be globalized as a registered user.",
};
}
if (!features().jobsEnabled) {
return {
disabled: true,
title: "Partial floodfills can only be globalized when a WEBKNOSSOS worker was set up.",
};
}

return {
disabled: false,
title:
"For this annotation some floodfill operations have not run to completion, because they covered a too large volume. WEBKNOSSOS can finish these operations via a long-running job.",
};
}
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,6 @@ import {
startNucleiInferralJob,
startNeuronInferralJob,
startMaterializingVolumeAnnotationJob,
startGlobalizeFloodfillsJob,
} from "admin/admin_rest_api";
import { useSelector } from "react-redux";
import { DatasetNameFormItem } from "admin/dataset/dataset_components";
Expand Down Expand Up @@ -33,13 +32,11 @@ const enum JobNames {
NEURON_INFERRAL = "neuron inferral",
NUCLEI_INFERRAL = "nuclei inferral",
MATERIALIZE_VOLUME_ANNOTATION = "materialize volume annotation",
GLOBALIZE_FLODDFILLS = "globalization of the floodfill operation(s)",
}
const jobNameToImagePath: Record<JobNames, string | null> = {
"neuron inferral": "neuron_inferral_example.jpg",
"nuclei inferral": "nuclei_inferral_example.jpg",
"materialize volume annotation": "materialize_volume_annotation_example.jpg",
"globalization of the floodfill operation(s)": null,
};
type Props = {
handleClose: () => void;
Expand Down Expand Up @@ -638,39 +635,3 @@ export function MaterializeVolumeAnnotationModal({
/>
);
}

export function StartGlobalizeFloodfillsModal({ handleClose }: Props) {
const dataset = useSelector((state: OxalisState) => state.dataset);
const tracing = useSelector((state: OxalisState) => state.tracing);
return (
<StartingJobModal
handleClose={handleClose}
title="Start Globalizing of the Floodfill Operation(s)"
jobName={JobNames.GLOBALIZE_FLODDFILLS}
suggestedDatasetSuffix="with_floodfills"
chooseSegmentationLayer
jobApiCall={async ({ newDatasetName, selectedLayer: segmentationLayer }) => {
const volumeLayerName = getReadableNameOfVolumeLayer(segmentationLayer, tracing);
const baseSegmentationName = getBaseSegmentationName(segmentationLayer);
return startGlobalizeFloodfillsJob(
dataset.owningOrganization,
dataset.name,
baseSegmentationName,
volumeLayerName,
newDatasetName,
tracing.annotationId,
tracing.annotationType,
);
}}
description={
<p>
For this annotation some floodfill operations have not run to completion, because they
covered a too large volume. WEBKNOSSOS can finish these operations via a long-running job.
This job will copy the current dataset, apply the changes of the current volume annotation
into the volume layer and use the existing bounding boxes as seeds to continue the
remaining floodfill operations (i.e., &quot;globalize&quot; them).
</p>
}
/>
);
}
Original file line number Diff line number Diff line change
Expand Up @@ -230,7 +230,6 @@ class EditableMappingService @Inject()(
remoteFallbackLayer: RemoteFallbackLayer,
userToken: Option[String]): Fox[EditableMappingInfo] =
for {
before <- Fox.successful(Instant.now)
closestMaterializedWithVersion <- getClosestMaterialized(editableMappingId, desiredVersion)
updatedEditableMappingInfo: EditableMappingInfo <- if (desiredVersion == closestMaterializedWithVersion.version)
Fox.successful(closestMaterializedWithVersion.value)
Expand Down