diff --git a/README.md b/README.md index 83c607fb3..be392cec2 100644 --- a/README.md +++ b/README.md @@ -155,7 +155,19 @@ node.js is required to build the viewer. `npm test -- --pattern='src/neuroglancer/util/uint64*'` -6. See [package.json](package.json) for other commands available. +6. To run python scripts for the local server: + + Run + + `npm run build-python` + + `pip install .` + + from the neuroglancer folder. + Now python scripts can run for the local server. + + +7. See [package.json](package.json) for other commands available. # Creating a dependent project diff --git a/python/neuroglancer/__init__.py b/python/neuroglancer/__init__.py index 996270585..67791eb28 100644 --- a/python/neuroglancer/__init__.py +++ b/python/neuroglancer/__init__.py @@ -49,6 +49,7 @@ BlendTool, # noqa: F401 OpacityTool, # noqa: F401 VolumeRenderingDepthSamplesTool, # noqa: F401 + VolumeRenderingModeTool, # noqa: F401 CrossSectionRenderScaleTool, # noqa: F401 SelectedAlphaTool, # noqa: F401 NotSelectedAlphaTool, # noqa: F401 diff --git a/python/neuroglancer/viewer_state.py b/python/neuroglancer/viewer_state.py index 37f27065b..4cba53d8a 100644 --- a/python/neuroglancer/viewer_state.py +++ b/python/neuroglancer/viewer_state.py @@ -170,6 +170,12 @@ class VolumeRenderingDepthSamplesTool(Tool): TOOL_TYPE = "volumeRenderingDepthSamples" +@export_tool +class VolumeRenderingModeTool(Tool): + __slots__ = () + TOOL_TYPE = "volumeRenderingMode" + + @export_tool class CrossSectionRenderScaleTool(Tool): __slots__ = () @@ -549,6 +555,9 @@ def __init__(self, *args, **kwargs): cross_section_render_scale = crossSectionRenderScale = wrapped_property( "crossSectionRenderScale", optional(float, 1) ) + volume_rendering_mode = volumeRenderingMode = wrapped_property( + "volumeRenderingMode", optional(str) + ) @staticmethod def interpolate(a, b, t): diff --git a/src/neuroglancer/image_user_layer.ts b/src/neuroglancer/image_user_layer.ts index fc6737856..dd836288a 100644 --- a/src/neuroglancer/image_user_layer.ts +++ b/src/neuroglancer/image_user_layer.ts @@ -27,13 +27,13 @@ import {MultiscaleVolumeChunkSource} from 'neuroglancer/sliceview/volume/fronten import {defineImageLayerShader, getTrackableFragmentMain, ImageRenderLayer} from 'neuroglancer/sliceview/volume/image_renderlayer'; import {trackableAlphaValue} from 'neuroglancer/trackable_alpha'; import {trackableBlendModeValue} from 'neuroglancer/trackable_blend'; -import {TrackableBoolean} from 'neuroglancer/trackable_boolean'; import {makeCachedDerivedWatchableValue, makeCachedLazyDerivedWatchableValue, registerNested, WatchableValue, WatchableValueInterface} from 'neuroglancer/trackable_value'; import {UserLayerWithAnnotationsMixin} from 'neuroglancer/ui/annotations'; import {setClipboard} from 'neuroglancer/util/clipboard'; import {Borrowed} from 'neuroglancer/util/disposable'; import {makeValueOrError} from 'neuroglancer/util/error'; import {verifyOptionalObjectProperty} from 'neuroglancer/util/json'; +import {trackableShaderModeValue, VOLUME_RENDERING_MODES} from 'neuroglancer/volume_rendering/trackable_volume_rendering_mode'; import {getVolumeRenderingDepthSamplesBoundsLogScale, VOLUME_RENDERING_DEPTH_SAMPLES_DEFAULT_VALUE, VolumeRenderingRenderLayer} from 'neuroglancer/volume_rendering/volume_render_layer'; import {makeWatchableShaderError, ParameterizedShaderGetterResult} from 'neuroglancer/webgl/dynamic_shader'; import {setControlsInShader, ShaderControlsBuilderState, ShaderControlState} from 'neuroglancer/webgl/shader_ui_controls'; @@ -42,7 +42,6 @@ import {makeCopyButton} from 'neuroglancer/widget/copy_button'; import {DependentViewContext} from 'neuroglancer/widget/dependent_view_widget'; import {makeHelpButton} from 'neuroglancer/widget/help_button'; import {addLayerControlToOptionsTab, LayerControlDefinition, registerLayerControl} from 'neuroglancer/widget/layer_control'; -import {checkboxLayerControl} from 'neuroglancer/widget/layer_control_checkbox'; import {enumLayerControl} from 'neuroglancer/widget/layer_control_enum'; import {rangeLayerControl} from 'neuroglancer/widget/layer_control_range'; import {makeMaximizeButton} from 'neuroglancer/widget/maximize_button'; @@ -57,8 +56,8 @@ const SHADER_JSON_KEY = 'shader'; const SHADER_CONTROLS_JSON_KEY = 'shaderControls'; const CROSS_SECTION_RENDER_SCALE_JSON_KEY = 'crossSectionRenderScale'; const CHANNEL_DIMENSIONS_JSON_KEY = 'channelDimensions'; -const VOLUME_RENDERING_JSON_KEY = 'volumeRendering'; const VOLUME_RENDERING_DEPTH_SAMPLES_JSON_KEY = 'volumeRenderingDepthSamples'; +const VOLUME_RENDERING_MODE_JSON_KEY = 'volumeRenderingMode'; export interface ImageLayerSelectionState extends UserLayerSelectionState { value: any; @@ -87,7 +86,7 @@ export class ImageUserLayer extends Base { channelSpace = this.registerDisposer(makeCachedLazyDerivedWatchableValue( channelCoordinateSpace => makeValueOrError(() => getChannelSpace(channelCoordinateSpace)), this.channelCoordinateSpace)); - volumeRendering = new TrackableBoolean(false, false); + volumeRenderingMode = trackableShaderModeValue(); shaderControlState = this.registerDisposer(new ShaderControlState( this.fragmentMain, @@ -127,9 +126,10 @@ export class ImageUserLayer extends Base { this.opacity.changed.add(this.specificationChanged.dispatch); this.fragmentMain.changed.add(this.specificationChanged.dispatch); this.shaderControlState.changed.add(this.specificationChanged.dispatch); + this.shaderControlState.changed.add(() => {console.log("shaderControlState changed");}); this.sliceViewRenderScaleTarget.changed.add(this.specificationChanged.dispatch); - this.volumeRendering.changed.add(this.specificationChanged.dispatch); this.volumeRenderingDepthSamplesTarget.changed.add(this.specificationChanged.dispatch); + this.volumeRenderingMode.changed.add(this.specificationChanged.dispatch); this.tabs.add( 'rendering', {label: 'Rendering', order: -100, getter: () => new RenderingOptionsTab(this)}); @@ -173,12 +173,13 @@ export class ImageUserLayer extends Base { chunkResolutionHistogram: this.volumeRenderingChunkResolutionHistogram, localPosition: this.localPosition, channelCoordinateSpace: this.channelCoordinateSpace, + mode: this.volumeRenderingMode, })); context.registerDisposer(loadedSubsource.messages.addChild(volumeRenderLayer.messages)); - context.registerDisposer(registerNested((context, volumeRendering) => { - if (!volumeRendering) return; + context.registerDisposer(registerNested((context, volumeRenderingMode) => { + if (volumeRenderingMode === VOLUME_RENDERING_MODES.OFF) return; context.registerDisposer(this.addRenderLayer(volumeRenderLayer.addRef())); - }, this.volumeRendering)); + }, this.volumeRenderingMode)); this.shaderError.changed.dispatch(); }); } @@ -195,9 +196,11 @@ export class ImageUserLayer extends Base { this.sliceViewRenderScaleTarget.restoreState( specification[CROSS_SECTION_RENDER_SCALE_JSON_KEY]); this.channelCoordinateSpace.restoreState(specification[CHANNEL_DIMENSIONS_JSON_KEY]); - this.volumeRendering.restoreState(specification[VOLUME_RENDERING_JSON_KEY]); this.volumeRenderingDepthSamplesTarget.restoreState( specification[VOLUME_RENDERING_DEPTH_SAMPLES_JSON_KEY]); + verifyOptionalObjectProperty( + specification, VOLUME_RENDERING_MODE_JSON_KEY, + mode => this.volumeRenderingMode.restoreState(mode)); } toJSON() { const x = super.toJSON(); @@ -207,8 +210,8 @@ export class ImageUserLayer extends Base { x[SHADER_CONTROLS_JSON_KEY] = this.shaderControlState.toJSON(); x[CROSS_SECTION_RENDER_SCALE_JSON_KEY] = this.sliceViewRenderScaleTarget.toJSON(); x[CHANNEL_DIMENSIONS_JSON_KEY] = this.channelCoordinateSpace.toJSON(); - x[VOLUME_RENDERING_JSON_KEY] = this.volumeRendering.toJSON(); x[VOLUME_RENDERING_DEPTH_SAMPLES_JSON_KEY] = this.volumeRenderingDepthSamplesTarget.toJSON(); + x[VOLUME_RENDERING_MODE_JSON_KEY] = this.volumeRenderingMode.toJSON(); return x; } @@ -318,20 +321,22 @@ const LAYER_CONTROLS: LayerControlDefinition[] = [ }, { label: 'Volume rendering (experimental)', - toolJson: VOLUME_RENDERING_JSON_KEY, - ...checkboxLayerControl(layer => layer.volumeRendering), + toolJson: VOLUME_RENDERING_MODE_JSON_KEY, + ...enumLayerControl(layer => layer.volumeRenderingMode), }, { label: 'Resolution (3D)', toolJson: VOLUME_RENDERING_DEPTH_SAMPLES_JSON_KEY, - isValid: layer => layer.volumeRendering, + isValid: layer => makeCachedDerivedWatchableValue( + volumeRendering => (volumeRendering !== VOLUME_RENDERING_MODES.OFF), + [layer.volumeRenderingMode]), ...renderScaleLayerControl( layer => ({ histogram: layer.volumeRenderingChunkResolutionHistogram, target: layer.volumeRenderingDepthSamplesTarget }), VolumeRenderingRenderScaleWidget, - ) + ), }, ]; diff --git a/src/neuroglancer/layer.ts b/src/neuroglancer/layer.ts index 90ef9607b..f31f07adc 100644 --- a/src/neuroglancer/layer.ts +++ b/src/neuroglancer/layer.ts @@ -59,6 +59,8 @@ const SOURCE_JSON_KEY = 'source'; const TRANSFORM_JSON_KEY = 'transform'; const PICK_JSON_KEY = 'pick'; +const METACELL_TAG = ' 011223' + export interface UserLayerSelectionState { generation: number; @@ -269,7 +271,7 @@ export class UserLayer extends RefCounted { this.dataSourcesChanged.add(() => this.updateDataSubsourceActivations()); this.messages.changed.add(this.layersChanged.dispatch); this.tabs.add('source', { - label: 'Source', + label: `Source ${METACELL_TAG}`, order: -100, getter: () => new LayerDataSourcesTab(this), }); diff --git a/src/neuroglancer/sliceview/volume/image_renderlayer.ts b/src/neuroglancer/sliceview/volume/image_renderlayer.ts index bd53ecb98..344ffe276 100644 --- a/src/neuroglancer/sliceview/volume/image_renderlayer.ts +++ b/src/neuroglancer/sliceview/volume/image_renderlayer.ts @@ -26,8 +26,14 @@ import {ShaderBuilder, ShaderProgram} from 'neuroglancer/webgl/shader'; import {addControlsToBuilder, getFallbackBuilderState, parseShaderUiControls, setControlsInShader, ShaderControlsBuilderState, ShaderControlState} from 'neuroglancer/webgl/shader_ui_controls'; const DEFAULT_FRAGMENT_MAIN = `#uicontrol invlerp normalized +#uicontrol transferFunction colormap void main() { - emitGrayscale(normalized()); + if (!VOLUME_RENDERING) { + emitGrayscale(normalized()); + } + else { + emitRGBA(colormap()); + } } `; @@ -46,6 +52,8 @@ export function defineImageLayerShader( builder: ShaderBuilder, shaderBuilderState: ShaderControlsBuilderState) { builder.addFragmentCode(` #define VOLUME_RENDERING false +#define MAX_PROJECTION false +float maxIntensity = 0.0; void emitRGBA(vec4 rgba) { emit(vec4(rgba.rgb, rgba.a * uOpacity)); diff --git a/src/neuroglancer/volume_rendering/trackable_volume_rendering_mode.ts b/src/neuroglancer/volume_rendering/trackable_volume_rendering_mode.ts new file mode 100644 index 000000000..5d40114d7 --- /dev/null +++ b/src/neuroglancer/volume_rendering/trackable_volume_rendering_mode.ts @@ -0,0 +1,13 @@ +import {TrackableEnum} from 'neuroglancer/util/trackable_enum'; + +export enum VOLUME_RENDERING_MODES { + OFF = 0, + ON = 1, + MAX = 2 +} + +export type TrackableVolumeRenderingModeValue = TrackableEnum; + +export function trackableShaderModeValue(initialValue = VOLUME_RENDERING_MODES.OFF) { + return new TrackableEnum(VOLUME_RENDERING_MODES, initialValue); +} \ No newline at end of file diff --git a/src/neuroglancer/volume_rendering/volume_render_layer.ts b/src/neuroglancer/volume_rendering/volume_render_layer.ts index 186c095b1..fe816b827 100644 --- a/src/neuroglancer/volume_rendering/volume_render_layer.ts +++ b/src/neuroglancer/volume_rendering/volume_render_layer.ts @@ -13,7 +13,6 @@ * See the License for the specific language governing permissions and * limitations under the License. */ - import {ChunkState} from 'neuroglancer/chunk_manager/base'; import {ChunkRenderLayerFrontend} from 'neuroglancer/chunk_manager/frontend'; import {CoordinateSpace} from 'neuroglancer/coordinate_transform'; @@ -29,15 +28,16 @@ import {SliceViewRenderLayer} from 'neuroglancer/sliceview/renderlayer'; import {ChunkFormat, defineChunkDataShaderAccess, MultiscaleVolumeChunkSource, VolumeChunk, VolumeChunkSource} from 'neuroglancer/sliceview/volume/frontend'; import {makeCachedDerivedWatchableValue, NestedStateManager, registerNested, WatchableValueInterface} from 'neuroglancer/trackable_value'; import {getFrustrumPlanes, mat4, vec3} from 'neuroglancer/util/geom'; +import {clampToInterval} from 'neuroglancer/util/lerp'; import {getObjectId} from 'neuroglancer/util/object_id'; import {forEachVisibleVolumeRenderingChunk, getVolumeRenderingNearFarBounds, HistogramInformation, VOLUME_RENDERING_RENDER_LAYER_RPC_ID, VOLUME_RENDERING_RENDER_LAYER_UPDATE_SOURCES_RPC_ID} from 'neuroglancer/volume_rendering/base'; +import {TrackableVolumeRenderingModeValue, VOLUME_RENDERING_MODES} from 'neuroglancer/volume_rendering/trackable_volume_rendering_mode'; import {drawBoxes, glsl_getBoxFaceVertexPosition} from 'neuroglancer/webgl/bounding_box'; import {glsl_COLORMAPS} from 'neuroglancer/webgl/colormaps'; import {ParameterizedContextDependentShaderGetter, parameterizedContextDependentShaderGetter, ParameterizedShaderGetterResult, shaderCodeWithLineDirective, WatchableShaderError} from 'neuroglancer/webgl/dynamic_shader'; import {ShaderModule, ShaderProgram} from 'neuroglancer/webgl/shader'; import {addControlsToBuilder, setControlsInShader, ShaderControlsBuilderState, ShaderControlState} from 'neuroglancer/webgl/shader_ui_controls'; import {defineVertexId, VertexIdHelper} from 'neuroglancer/webgl/vertex_id'; -import {clampToInterval} from 'src/neuroglancer/util/lerp'; export const VOLUME_RENDERING_DEPTH_SAMPLES_DEFAULT_VALUE = 64 const VOLUME_RENDERING_DEPTH_SAMPLES_LOG_SCALE_ORIGIN = 1; @@ -59,6 +59,7 @@ export interface VolumeRenderingRenderLayerOptions { localPosition: WatchableValueInterface; depthSamplesTarget: WatchableValueInterface; chunkResolutionHistogram: RenderScaleHistogram; + mode: TrackableVolumeRenderingModeValue; } const tempMat4 = mat4.create(); @@ -78,6 +79,17 @@ function clampAndRoundResolutionTargetValue(value: number) { return clampToInterval(depthSamplesBounds, Math.round(value)) as number; } +interface VolumeRenderingShaderParameters { + numChannelDimensions: number; + mode: VOLUME_RENDERING_MODES; +} + +interface VolumeRenderingShaderSnippets { + colorEmissionFunctions: string; + intensityCalculation: string; + beforeColorEmission: string; +} + export class VolumeRenderingRenderLayer extends PerspectiveViewRenderLayer { multiscaleSource: MultiscaleVolumeChunkSource; transform: WatchableValueInterface; @@ -86,11 +98,13 @@ export class VolumeRenderingRenderLayer extends PerspectiveViewRenderLayer { shaderControlState: ShaderControlState; depthSamplesTarget: WatchableValueInterface; chunkResolutionHistogram: RenderScaleHistogram; + mode: TrackableVolumeRenderingModeValue; backend: ChunkRenderLayerFrontend; private vertexIdHelper: VertexIdHelper; private shaderGetter: ParameterizedContextDependentShaderGetter< - {emitter: ShaderModule, chunkFormat: ChunkFormat}, ShaderControlsBuilderState, number>; + {emitter: ShaderModule, chunkFormat: ChunkFormat, wireFrame: boolean}, + ShaderControlsBuilderState, VolumeRenderingShaderParameters>; get gl() { return this.multiscaleSource.chunkManager.gl; @@ -114,15 +128,22 @@ export class VolumeRenderingRenderLayer extends PerspectiveViewRenderLayer { this.depthSamplesTarget = options.depthSamplesTarget; this.chunkResolutionHistogram = options.chunkResolutionHistogram; this.registerDisposer(this.chunkResolutionHistogram.visibility.add(this.visibility)); - const numChannelDimensions = this.registerDisposer( - makeCachedDerivedWatchableValue(space => space.rank, [this.channelCoordinateSpace])); + this.mode = options.mode; + const extraParameters = this.registerDisposer(makeCachedDerivedWatchableValue( + (space: CoordinateSpace, mode: VOLUME_RENDERING_MODES) => + ({numChannelDimensions: space.rank, mode}), + [this.channelCoordinateSpace, this.mode])); + this.shaderGetter = parameterizedContextDependentShaderGetter(this, this.gl, { memoizeKey: 'VolumeRenderingRenderLayer', parameters: options.shaderControlState.builderState, - getContextKey: ({emitter, chunkFormat}) => `${getObjectId(emitter)}:${chunkFormat.shaderKey}`, + getContextKey: ({emitter, chunkFormat, wireFrame}) => + `${getObjectId(emitter)}:${chunkFormat.shaderKey}:${wireFrame}`, shaderError: options.shaderError, - extraParameters: numChannelDimensions, - defineShader: (builder, {emitter, chunkFormat}, shaderBuilderState, numChannelDimensions) => { + extraParameters: extraParameters, + defineShader: ( + builder, {emitter, chunkFormat, wireFrame}, shaderBuilderState, + shaderParametersState) => { if (shaderBuilderState.parseResult.errors.length !== 0) { throw new Error('Invalid UI control specification'); } @@ -130,7 +151,6 @@ export class VolumeRenderingRenderLayer extends PerspectiveViewRenderLayer { builder.addFragmentCode(` #define VOLUME_RENDERING true `); - emitter(builder); // Near limit in [0, 1] as fraction of full limit. builder.addUniform('highp float', 'uNearLimitFraction'); @@ -141,18 +161,18 @@ export class VolumeRenderingRenderLayer extends PerspectiveViewRenderLayer { // Specifies translation of the current chunk. builder.addUniform('highp vec3', 'uTranslation'); - // Matrix by which computed vertices will be transformed. builder.addUniform('highp mat4', 'uModelViewProjectionMatrix'); builder.addUniform('highp mat4', 'uInvModelViewProjectionMatrix'); // Chunk size in voxels. builder.addUniform('highp vec3', 'uChunkDataSize'); + builder.addUniform('highp float', 'uChunkNumber'); builder.addUniform('highp vec3', 'uLowerClipBound'); builder.addUniform('highp vec3', 'uUpperClipBound'); - builder.addUniform('highp float', 'uBrightnessFactor'); + builder.addUniform('highp float', 'uSamplingRatio'); builder.addVarying('highp vec4', 'vNormalizedPosition'); builder.addVertexCode(glsl_getBoxFaceVertexPosition); @@ -162,28 +182,85 @@ vec3 position = max(uLowerClipBound, min(uUpperClipBound, uTranslation + boxVert vNormalizedPosition = gl_Position = uModelViewProjectionMatrix * vec4(position, 1.0); gl_Position.z = 0.0; `); + // TODO (skm) build a UI instead of maxParameters - most likely shader widget builder.addFragmentCode(` vec3 curChunkPosition; vec4 outputColor; +float maxIntensity; +vec3 maxParameters; void userMain(); `); + const numChannelDimensions = shaderParametersState.numChannelDimensions; defineChunkDataShaderAccess(builder, chunkFormat, numChannelDimensions, `curChunkPosition`); - builder.addFragmentCode(` + let glslSnippets: VolumeRenderingShaderSnippets; + // TODO (skm) provide a switch for interpolated vs. nearest neighbor + switch (shaderParametersState.mode) { + case VOLUME_RENDERING_MODES.MAX: + builder.addFragmentCode(`#define MAX_PROJECTION true\n`) + glslSnippets = { + colorEmissionFunctions: ` void emitRGBA(vec4 rgba) { - float alpha = rgba.a * uBrightnessFactor; - outputColor += vec4(rgba.rgb * alpha, alpha); + outputColor = rgba; +} +void emitRGB(vec3 rgb) { + emitRGBA(vec4(rgb, 1.0)); +} +void emitGrayscale(float ignored) { + emitRGBA(vec4(maxIntensity, maxIntensity, maxIntensity, maxIntensity)); +} +void emitTransparent() { + emitRGBA(vec4(0.0, 0.0, 0.0, 0.0)); +} +`, + intensityCalculation: ` + float normChunkValue = float(toRaw(getInterpolatedDataValue(0))); + maxIntensity = max(maxIntensity, normChunkValue); +`, + beforeColorEmission: ` + userMain(); +` + }; + break; + default: + builder.addFragmentCode(`#define MAX_PROJECTION false\n`) + glslSnippets = { + colorEmissionFunctions: ` +void emitRGBA(vec4 rgba) { + float alpha = 1.0 - (pow(clamp(1.0 - rgba.a, 0.0, 1.0), uSamplingRatio)); + outputColor.rgb += (1.0 - outputColor.a) * alpha * rgba.rgb; + outputColor.a += (1.0 - outputColor.a) * alpha; } void emitRGB(vec3 rgb) { emitRGBA(vec4(rgb, 1.0)); } void emitGrayscale(float value) { - emitRGB(vec3(value, value, value)); + emitRGBA(vec4(value, value, value, value)); } void emitTransparent() { emitRGBA(vec4(0.0, 0.0, 0.0, 0.0)); } -`); - builder.setFragmentMainFunction(` +`, + intensityCalculation: ` + userMain(); + if (outputColor.a > 0.99) { + break; + } +`, + beforeColorEmission: `` + }; + break; + }; + builder.addFragmentCode(glslSnippets.colorEmissionFunctions); + if (wireFrame) { + builder.setFragmentMainFunction(` + void main() { + outputColor = vec4(uChunkNumber, uChunkNumber, uChunkNumber, 1.0); + emit(outputColor, 0u); + } + `) +} + else { + builder.setFragmentMainFunction(` void main() { vec2 normalizedPosition = vNormalizedPosition.xy / vNormalizedPosition.w; vec4 nearPointH = uInvModelViewProjectionMatrix * vec4(normalizedPosition, -1.0, 1.0); @@ -220,14 +297,18 @@ void main() { int startStep = int(floor((intersectStart - uNearLimitFraction) / stepSize)); int endStep = min(uMaxSteps, int(floor((intersectEnd - uNearLimitFraction) / stepSize)) + 1); outputColor = vec4(0, 0, 0, 0); + maxIntensity = 0.0; + maxParameters = vec3(1.0, 0.0, 0.0); for (int step = startStep; step < endStep; ++step) { vec3 position = mix(nearPoint, farPoint, uNearLimitFraction + float(step) * stepSize); curChunkPosition = position - uTranslation; - userMain(); + ${glslSnippets.intensityCalculation} } + ${glslSnippets.beforeColorEmission} emit(outputColor, 0u); -} -`); +} +`) +}; builder.addFragmentCode(glsl_COLORMAPS); addControlsToBuilder(shaderBuilderState, builder); builder.addFragmentCode( @@ -241,6 +322,7 @@ void main() { this.registerDisposer(this.shaderControlState.changed.add(this.redrawNeeded.dispatch)); this.registerDisposer(this.localPosition.changed.add(this.redrawNeeded.dispatch)); this.registerDisposer(this.transform.changed.add(this.redrawNeeded.dispatch)); + this.registerDisposer(this.mode.changed.add(this.redrawNeeded.dispatch)); this.registerDisposer( this.shaderControlState.fragmentMain.changed.add(this.redrawNeeded.dispatch)); const {chunkManager} = this.multiscaleSource; @@ -310,7 +392,8 @@ void main() { }; let shader: ShaderProgram|null = null; let prevChunkFormat: ChunkFormat|undefined|null; - let shaderResult: ParameterizedShaderGetterResult; + let shaderResult: ParameterizedShaderGetterResult< + ShaderControlsBuilderState, VolumeRenderingShaderParameters>; // Size of chunk (in voxels) in the "display" subspace of the chunk coordinate space. const chunkDataDisplaySize = vec3.create(); @@ -351,6 +434,7 @@ void main() { let chunks: Map; let presentCount = 0, notPresentCount = 0; let chunkDataSize: Uint32Array|undefined; + let chunkNumber = 1; const chunkRank = this.multiscaleSource.rank; const chunkPosition = vec3.create(); @@ -379,11 +463,15 @@ void main() { if (chunkFormat !== prevChunkFormat) { prevChunkFormat = chunkFormat; endShader(); - shaderResult = - this.shaderGetter({emitter: renderContext.emitter, chunkFormat: chunkFormat!}); + shaderResult = this.shaderGetter({ + emitter: renderContext.emitter, + chunkFormat: chunkFormat!, + wireFrame: renderContext.wireFrame + }); shader = shaderResult.shader; if (shader !== null) { shader.bind(); + if (chunkFormat !== null) { setControlsInShader( gl, shader, this.shaderControlState, @@ -412,9 +500,13 @@ void main() { const {near, far, adjustedNear, adjustedFar} = getVolumeRenderingNearFarBounds( clippingPlanes, transformedSource.lowerClipDisplayBound, transformedSource.upperClipDisplayBound); - const step = (adjustedFar - adjustedNear) / (this.depthSamplesTarget.value - 1); - const brightnessFactor = step / (far - near); - gl.uniform1f(shader.uniform('uBrightnessFactor'), brightnessFactor); + // The sampling ratio is used for opacity correction. + // arguably, the reference sampling rate should be at the nyquist frequency + // to avoid aliasing, but this is not always practical for large datasets. + const actualSamplingRate = (adjustedFar - adjustedNear) / (this.depthSamplesTarget.value - 1); + const referenceSamplingRate = (adjustedFar - adjustedNear) / (optimalSamples - 1); + const samplingRatio = actualSamplingRate / referenceSamplingRate; + gl.uniform1f(shader.uniform('uSamplingRatio'), samplingRatio); const nearLimitFraction = (adjustedNear - near) / (far - near); const farLimitFraction = (adjustedFar - near) / (far - near); gl.uniform1f(shader.uniform('uNearLimitFraction'), nearLimitFraction); @@ -436,6 +528,11 @@ void main() { chunkTransform: {channelToChunkDimensionIndices} } = transformedSource; const {} = transformedSource; + if (renderContext.wireFrame) { + const normChunkNumber = chunkNumber / chunks.size; + gl.uniform1f(shader.uniform('uChunkNumber'), normChunkNumber); + ++chunkNumber; + } if (newChunkDataSize !== chunkDataSize) { chunkDataSize = newChunkDataSize; diff --git a/src/neuroglancer/webgl/shader_ui_controls.ts b/src/neuroglancer/webgl/shader_ui_controls.ts index ea1f7ab90..06ccd8618 100644 --- a/src/neuroglancer/webgl/shader_ui_controls.ts +++ b/src/neuroglancer/webgl/shader_ui_controls.ts @@ -14,22 +14,24 @@ * limitations under the License. */ + import {CoordinateSpaceCombiner} from 'neuroglancer/coordinate_transform'; import {TrackableBoolean} from 'neuroglancer/trackable_boolean'; import {constantWatchableValue, makeCachedDerivedWatchableValue, makeCachedLazyDerivedWatchableValue, TrackableValue, TrackableValueInterface, WatchableValueInterface} from 'neuroglancer/trackable_value'; import {arraysEqual, arraysEqualWithPredicate} from 'neuroglancer/util/array'; -import {parseRGBColorSpecification, TrackableRGB} from 'neuroglancer/util/color'; +import {parseRGBColorSpecification, serializeColor, TrackableRGB} from 'neuroglancer/util/color'; import {DataType} from 'neuroglancer/util/data_type'; import {RefCounted} from 'neuroglancer/util/disposable'; -import {vec3} from 'neuroglancer/util/geom'; -import {parseFixedLengthArray, verifyFiniteFloat, verifyInt, verifyObject, verifyOptionalObjectProperty, verifyString} from 'neuroglancer/util/json'; -import {convertDataTypeInterval, DataTypeInterval, dataTypeIntervalToJson, defaultDataTypeRange, normalizeDataTypeInterval, parseDataTypeInterval, parseUnknownDataTypeInterval, validateDataTypeInterval} from 'neuroglancer/util/lerp'; +import {vec3, vec4} from 'neuroglancer/util/geom'; +import {parseArray, parseFixedLengthArray, verifyFiniteFloat, verifyInt, verifyObject, verifyOptionalObjectProperty, verifyString} from 'neuroglancer/util/json'; +import {computeLerp, convertDataTypeInterval, DataTypeInterval, dataTypeIntervalToJson, defaultDataTypeRange, normalizeDataTypeInterval, parseDataTypeInterval, parseUnknownDataTypeInterval, validateDataTypeInterval} from 'neuroglancer/util/lerp'; import {NullarySignal} from 'neuroglancer/util/signal'; import {Trackable} from 'neuroglancer/util/trackable'; import {GL} from 'neuroglancer/webgl/context'; import {HistogramChannelSpecification, HistogramSpecifications} from 'neuroglancer/webgl/empirical_cdf'; import {defineInvlerpShaderFunction, enableLerpShaderFunction} from 'neuroglancer/webgl/lerp'; import {ShaderBuilder, ShaderProgram} from 'neuroglancer/webgl/shader'; +import {ControlPoint, defineTransferFunctionShader, enableTransferFunctionShader, TRANSFER_FUNCTION_LENGTH} from 'neuroglancer/widget/transfer_function' export interface ShaderSliderControl { type: 'slider'; @@ -69,8 +71,15 @@ export interface ShaderCheckboxControl { default: boolean; } -export type ShaderUiControl = - ShaderSliderControl|ShaderColorControl|ShaderImageInvlerpControl|ShaderPropertyInvlerpControl|ShaderCheckboxControl; +// TODO (skm) make control points a class potentially +export interface ShaderTransferFunctionControl { + type: 'transferFunction'; + dataType: DataType; + default: TransferFunctionParameters; +} + +export type ShaderUiControl = ShaderSliderControl|ShaderColorControl|ShaderImageInvlerpControl| + ShaderPropertyInvlerpControl|ShaderCheckboxControl|ShaderTransferFunctionControl; export interface ShaderControlParseError { line: number; @@ -488,6 +497,81 @@ function parsePropertyInvlerpDirective( }; } +function parseTransferFunctionDirective( + valueType: string, parameters: DirectiveParameters, + dataContext: ShaderDataContext): DirectiveParseResult { + const imageData = dataContext.imageData; + const dataType = imageData?.dataType; + const channelRank = imageData?.channelRank; + let errors = []; + let channel = new Array(channelRank).fill(0); + let color = vec3.fromValues(1.0, 1.0, 1.0); + let range: DataTypeInterval|undefined; + const controlPoints = new Array(); + if (valueType !== 'transferFunction') { + errors.push('type must be transferFunction'); + } + if (dataType === undefined) { + errors.push('image data must be provided to use transfer function'); + } else { + range = defaultDataTypeRange[dataType]; + } + for (let [key, value] of parameters) { + try { + switch (key) { + case 'channel': { + channel = parseInvlerpChannel(value, channel.length); + break; + } + // TODO (skm) parse hex color values + case 'color': { + color = parseRGBColorSpecification(value); + break; + } + case 'range': { + if (dataType !== undefined) { + range = validateDataTypeInterval(parseDataTypeInterval(value, dataType)); + } + break; + } + case 'points': { + if (dataType !== undefined) { + controlPoints.push(...convertTransferFunctionControlPoints(value, dataType)); + } + break; + } + default: + errors.push(`Invalid parameter: ${key}`); + break; + } + } catch (e) { + errors.push(`Invalid ${key} value: ${e.message}`); + } + } + if (errors.length > 0) { + return {errors}; + } + if (controlPoints.length === 0) { + const transferFunctionRange = [0, TRANSFER_FUNCTION_LENGTH - 1] as [number, number]; + const startPoint = computeLerp(transferFunctionRange, DataType.UINT16, 0.4) as number; + const endPoint = computeLerp(transferFunctionRange, DataType.UINT16, 0.7) as number; + // TODO (skm) when using texture, need to use normalized values + controlPoints.push({position: startPoint, color: vec4.fromValues(0, 0, 0, 0)}); + controlPoints.push({position: endPoint, color: vec4.fromValues(255, 255, 255, 255)}); + } + if (range === undefined && dataType !== undefined) { + range = defaultDataTypeRange[dataType]; + } else if (range === undefined) { + range = [0, 1] as [number, number]; + } + return { + control: + {type: 'transferFunction', dataType, default: {controlPoints, channel, color, range}} as + ShaderTransferFunctionControl, + errors: undefined, + }; +} + export interface ImageDataSpecification { dataType: DataType; channelRank: number; @@ -506,6 +590,7 @@ const controlParsers = new Map< ['color', parseColorDirective], ['invlerp', parseInvlerpDirective], ['checkbox', parseCheckboxDirective], + ['transferFunction', parseTransferFunctionDirective], ]); export function parseShaderUiControls( @@ -608,6 +693,12 @@ float ${uName}() { builder.addVertexCode(code); break; } + case 'transferFunction': { + builder.addFragmentCode(`#define ${name} ${uName}\n`) + builder.addFragmentCode( + defineTransferFunctionShader(builder, uName, control.dataType, builderValue.channel)); + break; + } default: { builder.addUniform(`highp ${control.valueType}`, uName); builder.addVertexCode(`#define ${name} ${uName}\n`); @@ -708,14 +799,13 @@ function parsePropertyInvlerpParameters( defaultValue: PropertyInvlerpParameters): PropertyInvlerpParameters { if (obj === undefined) return defaultValue; verifyObject(obj); - const property = - verifyOptionalObjectProperty(obj, 'property', property => { - property = verifyString(property); - if (!properties.has(property)) { - throw new Error(`Invalid value: ${JSON.stringify(property)}`); - } - return property; - }, defaultValue.property); + const property = verifyOptionalObjectProperty(obj, 'property', property => { + property = verifyString(property); + if (!properties.has(property)) { + throw new Error(`Invalid value: ${JSON.stringify(property)}`); + } + return property; + }, defaultValue.property); const dataType = properties.get(property)!; return { property, @@ -729,7 +819,8 @@ function parsePropertyInvlerpParameters( } class TrackablePropertyInvlerpParameters extends TrackableValue { - constructor(public properties: PropertiesSpecification, public defaultValue: PropertyInvlerpParameters) { + constructor( + public properties: PropertiesSpecification, public defaultValue: PropertyInvlerpParameters) { super(defaultValue, obj => parsePropertyInvlerpParameters(obj, properties, defaultValue)); } @@ -748,6 +839,118 @@ class TrackablePropertyInvlerpParameters extends TrackableValue; + channel: number[]; + color: vec3; + range: DataTypeInterval; +} + +function convertTransferFunctionControlPoints(value: unknown, dataType: DataType) { + dataType; + return parseArray(value, x => { + if (x.length !== 5) { + throw new Error( + `Expected array of length 5 (x, R, G, B, A), but received: ${JSON.stringify(x)}`); + } + // TODO (skm) implement proper validation of array elements + for (const val of x) { + if (typeof val !== 'number') { + throw new Error(`Expected number, but received: ${JSON.stringify(val)}`); + } + } + return {position: x[0], color: vec4.fromValues(x[1], x[2], x[3], x[4])}; + }); +} + +function parseTransferFunctionControlPoints(value: unknown, dataType: DataType) { + dataType; + return parseArray(value, x => { + if (x.position === undefined || x.color === undefined) { + throw new Error( + `Expected object with position and color properties, but received: ${JSON.stringify(x)}`); + } + if (typeof x.position !== 'number') { + // TODO (skm) might need to be of dataType depending on final implementation + throw new Error(`Expected number, but received: ${JSON.stringify(x.position)}`); + } + if (Object.keys(x.color).length !== 4) { + throw new Error( + `Expected array of length 4 (R, G, B, A), but received: ${JSON.stringify(x.color)}`); + } + if (typeof x.color[0] !== 'number' || typeof x.color[1] !== 'number' || + typeof x.color[2] !== 'number' || typeof x.color[3] !== 'number') { + throw new Error(`Expected number, but received: ${JSON.stringify(x.color)}`); + } + return { + position: x.position, + color: vec4.fromValues(x.color[0], x.color[1], x.color[2], x.color[3]) + }; + }); +} + +function parseTransferFunctionParameters( + obj: unknown, dataType: DataType, + defaultValue: TransferFunctionParameters): TransferFunctionParameters { + if (obj === undefined) return defaultValue; + verifyObject(obj); + return { + controlPoints: verifyOptionalObjectProperty( + obj, 'controlPoints', x => parseTransferFunctionControlPoints(x, dataType), + defaultValue.controlPoints), + channel: verifyOptionalObjectProperty( + obj, 'channel', x => parseInvlerpChannel(x, defaultValue.channel.length), + defaultValue.channel), + color: verifyOptionalObjectProperty( + obj, 'color', x => parseRGBColorSpecification(x), defaultValue.color), + range: verifyOptionalObjectProperty( + obj, 'range', x => validateDataTypeInterval(parseDataTypeInterval(x, dataType)), + defaultValue.range), + }; +} + +function deepCopyTransferFunctionParameters(defaultValue: TransferFunctionParameters) { + return { + controlPoints: + defaultValue.controlPoints.map(x => ({position: x.position, color: vec4.clone(x.color)})), + channel: defaultValue.channel, + color: vec3.clone(defaultValue.color), + range: [defaultValue.range[0], defaultValue.range[1]] as [number, number] + }; +} + +class TrackableTransferFunctionParameters extends TrackableValue { + constructor(public dataType: DataType, public defaultValue: TransferFunctionParameters) { + const defaultValueCopy = deepCopyTransferFunctionParameters(defaultValue); + super(defaultValueCopy, obj => parseTransferFunctionParameters(obj, dataType, defaultValue)); + } + + toJSON() { + const {value: {channel, controlPoints, color}, dataType, defaultValue} = this; + let range = this.value.range; + const rangeJson = dataTypeIntervalToJson(range, dataType, defaultValue.range); + const channelJson = arraysEqual(defaultValue.channel, channel) ? undefined : channel; + const colorJson = + arraysEqual(defaultValue.color, color) ? undefined : serializeColor(this.value.color); + const controlPointsJson = + arraysEqualWithPredicate( + defaultValue.controlPoints, controlPoints, + (a, b) => arraysEqual(a.color, b.color) && a.position == b.position) ? + undefined : + this.value.controlPoints; + if (rangeJson === undefined && channelJson === undefined && colorJson === undefined && + controlPointsJson === undefined) { + return undefined; + } + return { + range: rangeJson, + channel: channelJson, + color: colorJson, + controlPoints: controlPointsJson + }; + } +} + function getControlTrackable(control: ShaderUiControl): {trackable: TrackableValueInterface, getBuilderValue: (value: any) => any} { switch (control.type) { @@ -789,6 +992,12 @@ function getControlTrackable(control: ShaderUiControl): trackable: new TrackableBoolean(control.default), getBuilderValue: value => ({value}), }; + case 'transferFunction': + return { + trackable: new TrackableTransferFunctionParameters(control.dataType, control.default), + getBuilderValue: (value: TransferFunctionParameters) => + ({channel: value.channel, dataType: control.dataType}), + }; } } @@ -1075,7 +1284,6 @@ export class ShaderControlState extends RefCounted implements let empty = true; for (const [key, value] of state) { const valueJson = value.trackable.toJSON(); - ; if (valueJson !== undefined) { obj[key] = valueJson; empty = false; @@ -1116,6 +1324,10 @@ function setControlInShader( case 'checkbox': // Value is hard-coded in shader. break; + case 'transferFunction': + enableTransferFunctionShader( + shader, uName, control.dataType, value.controlPoints, value.range); + break; } } diff --git a/src/neuroglancer/widget/invlerp.ts b/src/neuroglancer/widget/invlerp.ts index addf3c4f2..0ecd02fb2 100644 --- a/src/neuroglancer/widget/invlerp.ts +++ b/src/neuroglancer/widget/invlerp.ts @@ -492,11 +492,11 @@ function createRangeBoundInputs( return {container, inputs, spacers}; } -function updateInputBoundWidth(inputElement: HTMLInputElement) { +export function updateInputBoundWidth(inputElement: HTMLInputElement) { updateInputFieldWidth(inputElement, Math.max(1, inputElement.value.length + 0.1)); } -function updateInputBoundValue(inputElement: HTMLInputElement, bound: number|Uint64) { +export function updateInputBoundValue(inputElement: HTMLInputElement, bound: number|Uint64) { let boundString: string; if (bound instanceof Uint64 || Number.isInteger(bound)) { boundString = bound.toString(); diff --git a/src/neuroglancer/widget/render_scale_widget.ts b/src/neuroglancer/widget/render_scale_widget.ts index e92762180..09e733b81 100644 --- a/src/neuroglancer/widget/render_scale_widget.ts +++ b/src/neuroglancer/widget/render_scale_widget.ts @@ -26,11 +26,11 @@ import {hsvToRgb} from 'neuroglancer/util/colorspace'; import {RefCounted} from 'neuroglancer/util/disposable'; import {ActionEvent, EventActionMap, registerActionListener} from 'neuroglancer/util/event_action_map'; import {vec3} from 'neuroglancer/util/geom'; +import {clampToInterval} from 'neuroglancer/util/lerp'; import {MouseEventBinder} from 'neuroglancer/util/mouse_bindings'; import {numberToStringFixed} from 'neuroglancer/util/number_to_string'; import {formatScaleWithUnitAsString} from 'neuroglancer/util/si_units'; import {LayerControlFactory} from 'neuroglancer/widget/layer_control'; -import {clampToInterval} from 'src/neuroglancer/util/lerp'; const updateInterval = 200; diff --git a/src/neuroglancer/widget/shader_controls.ts b/src/neuroglancer/widget/shader_controls.ts index ef34642d0..5f2ca2726 100644 --- a/src/neuroglancer/widget/shader_controls.ts +++ b/src/neuroglancer/widget/shader_controls.ts @@ -32,6 +32,7 @@ import {colorLayerControl} from 'neuroglancer/widget/layer_control_color'; import {propertyInvlerpLayerControl} from 'neuroglancer/widget/layer_control_property_invlerp'; import {rangeLayerControl} from 'neuroglancer/widget/layer_control_range'; import {Tab} from 'neuroglancer/widget/tab_view'; +import {transferFunctionLayerControl} from 'neuroglancer/widget/transfer_function'; export interface LegendShaderOptions extends ParameterizedEmitterDependentShaderOptions { initializeShader: (shaderResult: ParameterizedShaderGetterResult) => void; @@ -92,6 +93,12 @@ function getShaderLayerControlFactory( legendShaderOptions: layerShaderControls.legendShaderOptions, })); } + case 'transferFunction': { + return transferFunctionLayerControl(() => ({ + dataType: control.dataType, + watchableValue: controlState.trackable, + })); + } } } diff --git a/src/neuroglancer/widget/transfer_function.css b/src/neuroglancer/widget/transfer_function.css new file mode 100644 index 000000000..e8037b4ff --- /dev/null +++ b/src/neuroglancer/widget/transfer_function.css @@ -0,0 +1,44 @@ +/** + * @license + * Copyright 2020 Google Inc. + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +.neuroglancer-transfer-function-panel { + height: 60px; + border: 1px solid #666; +} + +.neuroglancer-transfer-function-padding { + padding-top: 15px; +} + +.neuroglancer-transfer-function-color-picker { + text-align: right; +} + +.neuroglancer-transfer-function-widget-bound { + background-color: transparent; + border-color: transparent; + box-shadow: none; + border: 0; + margin: 0; + font-family: monospace; + font-size: medium; + color: cyan; +} + +.neuroglancer-transfer-function-range-bounds { + display: flex; + justify-content: space-between; +} \ No newline at end of file diff --git a/src/neuroglancer/widget/transfer_function.ts b/src/neuroglancer/widget/transfer_function.ts new file mode 100644 index 000000000..dca3c4f2f --- /dev/null +++ b/src/neuroglancer/widget/transfer_function.ts @@ -0,0 +1,881 @@ +/** + * @license + * Copyright 2023 Google Inc. + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +import './transfer_function.css'; + +import {DisplayContext, IndirectRenderedPanel} from 'neuroglancer/display_context'; +import {UserLayer} from 'neuroglancer/layer'; +import {makeCachedDerivedWatchableValue, WatchableValueInterface} from 'neuroglancer/trackable_value'; +import {ToolActivation} from 'neuroglancer/ui/tool'; +import {DataType} from 'neuroglancer/util/data_type'; +import {RefCounted} from 'neuroglancer/util/disposable'; +import {EventActionMap, registerActionListener} from 'neuroglancer/util/event_action_map'; +import {vec3, vec4} from 'neuroglancer/util/geom'; +import {computeLerp, DataTypeInterval, parseDataTypeValue} from 'neuroglancer/util/lerp'; +import {MouseEventBinder} from 'neuroglancer/util/mouse_bindings'; +import {WatchableVisibilityPriority} from 'neuroglancer/visibility_priority/frontend'; +import {Buffer, getMemoizedBuffer} from 'neuroglancer/webgl/buffer'; +import {GL} from 'neuroglancer/webgl/context'; +import {defineInvlerpShaderFunction} from 'neuroglancer/webgl/lerp'; +import {defineLineShader, drawLines, initializeLineShader, VERTICES_PER_LINE} from 'neuroglancer/webgl/lines'; +import {VERTICES_PER_QUAD} from 'neuroglancer/webgl/quad'; +import {ShaderBuilder, ShaderCodePart, ShaderProgram} from 'neuroglancer/webgl/shader'; +import {getShaderType} from 'neuroglancer/webgl/shader_lib'; +import {TransferFunctionParameters} from 'neuroglancer/webgl/shader_ui_controls'; +import {setRawTextureParameters} from 'neuroglancer/webgl/texture'; +import {ColorWidget} from 'neuroglancer/widget/color'; +import {getUpdatedRangeAndWindowParameters, updateInputBoundValue, updateInputBoundWidth} from 'neuroglancer/widget/invlerp'; +import {LayerControlFactory, LayerControlTool} from 'neuroglancer/widget/layer_control'; +import {Tab} from 'neuroglancer/widget/tab_view'; +import {startRelativeMouseDrag} from 'src/neuroglancer/util/mouse_drag'; + +const NUM_COLOR_CHANNELS = 4; +const POSITION_VALUES_PER_LINE = 4; // x1, y1, x2, y2 +export const TRANSFER_FUNCTION_LENGTH = 512; +const CONTROL_POINT_GRAB_DISTANCE = TRANSFER_FUNCTION_LENGTH / 40; +const TRANSFER_FUNCTION_BORDER_WIDTH = 23; + +const transferFunctionSamplerTextureUnit = Symbol('transferFunctionSamplerTexture'); + +export interface ControlPoint { + position: number; + color: vec4; +} + +export interface TransferFunctionTextureOptions { + controlPoints: ControlPointsLookupTable; + textureUnit: number; +} + +interface CanvasPosition { + normalizedX: number; + normalizedY: number; +} + +/** + * Fill a lookup table with color values between control points via linear interpolation. Everything + * before the first point is transparent, everything after the last point has the color of the last + * point. + * @param out The lookup table to fill + * @param controlPoints The control points to interpolate between + */ +function lerpBetweenControlPoints(out: Int32Array|Uint8Array, controlPoints: Array) { + function addLookupValue(index: number, color: vec4) { + out[index] = color[0]; + out[index + 1] = color[1]; + out[index + 2] = color[2]; + out[index + 3] = color[3]; + } + + if (controlPoints.length === 0) { + out.fill(0); + return; + } + const firstPoint = controlPoints[0]; + + if (firstPoint.position > 0) { + const transparent = vec4.fromValues(0, 0, 0, 0); + for (let i = 0; i < firstPoint.position; ++i) { + const index = i * NUM_COLOR_CHANNELS; + addLookupValue(index, transparent); + } + } + + let controlPointIndex = 0; + for (let i = firstPoint.position; i < TRANSFER_FUNCTION_LENGTH; ++i) { + const currentPoint = controlPoints[controlPointIndex]; + const nextPoint = controlPoints[Math.min(controlPointIndex + 1, controlPoints.length - 1)]; + const lookupIndex = i * NUM_COLOR_CHANNELS; + if (currentPoint === nextPoint) { + addLookupValue(lookupIndex, currentPoint.color); + } else if (i < nextPoint.position) { + const t = (i - currentPoint.position) / (nextPoint.position - currentPoint.position); + const lerpedColor = lerpUint8Color(currentPoint.color, nextPoint.color, t); + addLookupValue(lookupIndex, lerpedColor); + } else { + addLookupValue(lookupIndex, nextPoint.color); + controlPointIndex++; + } + } +} + +// TODO (skm) move this to a more general location +function findClosestValueIndexInSortedArray(array: Array, value: number) { + if (array.length === 0) { + return -1; + } + + let start = 0; + let end = array.length - 1; + + while (start <= end) { + const mid = Math.floor((start + end) / 2); + if (array[mid] === value) { + return mid; + } else if (array[mid] < value) { + start = mid + 1; + } else { + end = mid - 1; + } + } + + start = Math.min(start, array.length - 1); + end = Math.max(end, 0); + const startDiff = Math.abs(array[start] - value); + const endDiff = Math.abs(array[end] - value); + return startDiff < endDiff ? start : end; +} + +/** + * Convert a [0, 1] float to a uint8 value between 0 and 255 + */ +function floatToUint8(float: number) { + return Math.min(255, Math.max(Math.round(float * 255), 0)); +} + +/** + * Linearly interpolate between each component of two vec4s (color values) + */ +function lerpUint8Color(startColor: vec4, endColor: vec4, t: number) { + const color = vec4.create(); + for (let i = 0; i < 4; ++i) { + color[i] = computeLerp([startColor[i], endColor[i]], DataType.UINT8, t) as number; + } + return color; +} + +/** + * Create a Float32Array of vertices for a canvas filling rectangle with the given number of grids + * in the x direction + */ +function griddedRectangleArray(numGrids: number): Float32Array { + const result = new Float32Array(numGrids * VERTICES_PER_QUAD * 2); + const width = 2; + const height = 1; + let start = -width / 2; + const step = width / numGrids; + for (let i = 0; i < numGrids; ++i) { + const end = start + step; + const index = i * VERTICES_PER_QUAD * 2; + + // Triangle 1 - top-left, top-right, bottom-right + result[index] = start; // top-left x + result[index + 1] = height; // top-left y + result[index + 2] = end // top-right x + result[index + 3] = height; // top-right y + result[index + 4] = end; // bottom-right x + result[index + 5] = -height; // bottom-right y + + // Triangle 2 - top-left, bottom-right, bottom-left + result[index + 6] = start; // top-left x + result[index + 7] = height; // top-left y + result[index + 8] = end; // bottom-right x + result[index + 9] = -height; // bottom-right y + result[index + 10] = start; // bottom-left x + result[index + 11] = -height; // bottom-left y + start += step; + } + return result; +} + +/** + * Represent the underlying transfer function as a texture + */ +class TransferFunctionTexture extends RefCounted { + texture: WebGLTexture|null = null; + width: number = TRANSFER_FUNCTION_LENGTH; + height: number = 1; + private priorOptions: TransferFunctionTextureOptions|undefined = undefined; + + constructor(public gl: GL) { + super(); + } + + updateAndActivate(options: TransferFunctionTextureOptions) { + const {gl} = this; + let {texture} = this; + // TODO (skm) might be able to do more efficient updates + if (texture !== null && options === this.priorOptions) { + gl.activeTexture(WebGL2RenderingContext.TEXTURE0 + options.textureUnit); + gl.bindTexture(WebGL2RenderingContext.TEXTURE_2D, texture); + return; + } + if (texture === null) { + texture = this.texture = gl.createTexture(); + } + gl.activeTexture(WebGL2RenderingContext.TEXTURE0 + options.textureUnit); + gl.bindTexture(WebGL2RenderingContext.TEXTURE_2D, texture); + setRawTextureParameters(gl); + gl.texImage2D( + WebGL2RenderingContext.TEXTURE_2D, 0, WebGL2RenderingContext.RGBA, this.width, 1, 0, + WebGL2RenderingContext.RGBA, WebGL2RenderingContext.UNSIGNED_BYTE, + options.controlPoints.lookupTable); + this.priorOptions = options; + } + + disposed() { + this.gl.deleteTexture(this.texture); + this.texture = null; + super.disposed(); + } +} + +/** + * Display the UI canvas for the transfer function widget and handle shader updates for elements of + * the canvas + */ +class TransferFunctionPanel extends IndirectRenderedPanel { + texture: TransferFunctionTexture; + private vertexBuffer: Buffer; + private controlPointsVertexBuffer: Buffer; + private controlPointsColorBuffer: Buffer; + private controlPointsPositionArray = new Float32Array(); + private controlPointsColorArray = new Float32Array(); + private linePositionBuffer: Buffer; + private linePositionArray = new Float32Array(); + get drawOrder() { + return 1; + } + controlPointsLookupTable = this.registerDisposer( + new ControlPointsLookupTable(this.parent.dataType, this.parent.trackable)); + controller = this.registerDisposer(new TransferFunctionController( + this.element, this.parent.dataType, this.controlPointsLookupTable, + () => this.parent.trackable.value, (value: TransferFunctionParameters) => { + this.parent.trackable.value = value; + })); + constructor(public parent: TransferFunctionWidget) { + super(parent.display, document.createElement('div'), parent.visibility); + const {element} = this; + element.classList.add('neuroglancer-transfer-function-panel'); + this.texture = this.registerDisposer(new TransferFunctionTexture(this.gl)); + this.vertexBuffer = this.registerDisposer(getMemoizedBuffer( + this.gl, WebGL2RenderingContext.ARRAY_BUFFER, + griddedRectangleArray, TRANSFER_FUNCTION_LENGTH)) + .value; + this.controlPointsVertexBuffer = + this.registerDisposer(getMemoizedBuffer( + this.gl, WebGL2RenderingContext.ARRAY_BUFFER, + () => this.controlPointsPositionArray)) + .value; + this.controlPointsColorBuffer = + this + .registerDisposer(getMemoizedBuffer( + this.gl, WebGL2RenderingContext.ARRAY_BUFFER, () => this.controlPointsColorArray)) + .value; + this.linePositionBuffer = + this + .registerDisposer(getMemoizedBuffer( + this.gl, WebGL2RenderingContext.ARRAY_BUFFER, () => this.linePositionArray)) + .value; + } + + updateTransferFunctionPanelLines() { + function normalizePosition(position: number) { + return (position / (TRANSFER_FUNCTION_LENGTH - 1)) * 2 - 1; + } + function normalizeOpacity(opacity: number) { + return (opacity / 255) * 2 - 1; + } + function normalizeColor(colorComponent: number) { + return (colorComponent / 255); + } + + function createLinePoints(array: Float32Array, index: number, positions: vec4): number { + for (let i = 0; i < VERTICES_PER_LINE; ++i) { + array[index++] = normalizePosition(positions[0]); + array[index++] = normalizeOpacity(positions[1]); + array[index++] = normalizePosition(positions[2]); + array[index++] = normalizeOpacity(positions[3]); + } + return index + } + + const colorChannels = NUM_COLOR_CHANNELS - 1; // ignore alpha + const controlPoints = this.controlPointsLookupTable.trackable.value.controlPoints; + const colorArray = new Float32Array(controlPoints.length * colorChannels); + const positionArray = new Float32Array(controlPoints.length * 2); + let numLines = controlPoints.length - 1; + let startAdd = null; + let endAdd = null; + let lineIndex = 0; + if (controlPoints.length > 0) { + if (controlPoints[0].position > 0) { + numLines += 1; + startAdd = {position: controlPoints[0].position, color: vec4.fromValues(0, 0, 0, 0)}; + } + if (controlPoints[controlPoints.length - 1].position < TRANSFER_FUNCTION_LENGTH - 1) { + numLines += 1; + endAdd = { + position: TRANSFER_FUNCTION_LENGTH - 1, + color: controlPoints[controlPoints.length - 1].color + }; + } + } else { + numLines = 0; + } + + const linePositionArray = + new Float32Array(numLines * VERTICES_PER_LINE * POSITION_VALUES_PER_LINE); + if (startAdd !== null) { + const linePosition = vec4.fromValues( + startAdd.position, startAdd.color[3], controlPoints[0].position, + controlPoints[0].color[3]); + lineIndex = createLinePoints(linePositionArray, lineIndex, linePosition); + } + + for (let i = 0; i < controlPoints.length; ++i) { + const colorIndex = i * colorChannels; + const positionIndex = i * 2; + const {color, position} = controlPoints[i]; + colorArray[colorIndex] = normalizeColor(color[0]); + colorArray[colorIndex + 1] = normalizeColor(color[1]); + colorArray[colorIndex + 2] = normalizeColor(color[2]); + positionArray[positionIndex] = normalizePosition(position); + positionArray[positionIndex + 1] = normalizeOpacity(color[3]); + if (i < controlPoints.length - 1) { + const linePosition = vec4.fromValues( + position, color[3], controlPoints[i + 1].position, controlPoints[i + 1].color[3]); + lineIndex = createLinePoints(linePositionArray, lineIndex, linePosition); + } + } + + if (endAdd !== null) { + const linePosition = vec4.fromValues( + controlPoints[controlPoints.length - 1].position, + controlPoints[controlPoints.length - 1].color[3], endAdd.position, endAdd.color[3]); + lineIndex = createLinePoints(linePositionArray, lineIndex, linePosition); + } + + this.controlPointsColorArray = colorArray; + this.controlPointsPositionArray = positionArray; + this.linePositionArray = linePositionArray; + this.controlPointsVertexBuffer.setData(this.controlPointsPositionArray); + this.controlPointsColorBuffer.setData(this.controlPointsColorArray); + this.linePositionBuffer.setData(this.linePositionArray); + } + + private transferFunctionLineShader = this.registerDisposer((() => { + const builder = new ShaderBuilder(this.gl); + defineLineShader(builder); + builder.addAttribute('vec4', 'aLineStartEnd'); + builder.addOutputBuffer('vec4', 'out_color', 0); + builder.addVarying('float', 'vColor'); + builder.setVertexMain(` +vec4 start = vec4(aLineStartEnd[0], aLineStartEnd[1], 0.0, 1.0); +vec4 end = vec4(aLineStartEnd[2], aLineStartEnd[3], 0.0, 1.0); +emitLine(start, end, 1.0); +`); + builder.setFragmentMain(` +out_color = vec4(0.0, 1.0, 1.0, getLineAlpha()); +`); + return builder.build(); + })()); + + private transferFunctionShader = this.registerDisposer((() => { + const builder = new ShaderBuilder(this.gl); + builder.addAttribute('vec2', 'aVertexPosition'); + builder.addVarying('vec2', 'vTexCoord'); + builder.addOutputBuffer('vec4', 'out_color', 0); + builder.addTextureSampler('sampler2D', 'uSampler', transferFunctionSamplerTextureUnit); + builder.addUniform('float', 'uTransferFunctionEnd'); + builder.setVertexMain(` +gl_Position = vec4(aVertexPosition, 0.0, 1.0); +vTexCoord = (aVertexPosition + 1.0) / 2.0; +`); + builder.setFragmentMain(` +ivec2 texel = ivec2(floor(vTexCoord.x * uTransferFunctionEnd), 0); +out_color = texelFetch(uSampler, texel, 0); +`); + return builder.build(); + })()); + + private controlPointsShader = this.registerDisposer((() => { + const builder = new ShaderBuilder(this.gl); + builder.addAttribute('vec2', 'aVertexPosition'); + builder.addAttribute('vec3', 'aVertexColor'); + builder.addVarying('vec3', 'vColor'); + builder.addOutputBuffer('vec4', 'out_color', 0); + builder.setVertexMain(` +gl_Position = vec4(aVertexPosition, 0.0, 1.0); +gl_PointSize = 14.0; +vColor = aVertexColor; +`); + builder.setFragmentMain(` +float vColorSum = vColor.r + vColor.g + vColor.b; +vec3 bordercolor = vec3(0.0, 0.0, 0.0); +if (vColorSum < 0.4) { + bordercolor = vec3(1.0, 1.0, 1.0); +} +float dist = distance(gl_PointCoord, vec2(0.5, 0.5)); +float alpha = smoothstep(0.25, 0.4, dist); +vec4 tempColor = vec4(mix(vColor, bordercolor, alpha), 1.0); +alpha = 1.0 - smoothstep(0.4, 0.5, dist); +out_color = tempColor * alpha; +`); + return builder.build(); + })()); + + drawIndirect() { + const {transferFunctionLineShader, gl, transferFunctionShader, controlPointsShader} = this; + this.setGLLogicalViewport(); + gl.clearColor(0.0, 0.0, 0.0, 0.0); + gl.clear(WebGL2RenderingContext.COLOR_BUFFER_BIT); + gl.enable(WebGL2RenderingContext.BLEND); + gl.blendFunc(WebGL2RenderingContext.SRC_ALPHA, WebGL2RenderingContext.ONE_MINUS_SRC_ALPHA); + gl.disable(WebGL2RenderingContext.DEPTH_TEST); + gl.disable(WebGL2RenderingContext.STENCIL_TEST); + { + transferFunctionShader.bind(); + const aVertexPosition = transferFunctionShader.attribute('aVertexPosition'); + gl.uniform1f( + transferFunctionShader.uniform('uTransferFunctionEnd'), TRANSFER_FUNCTION_LENGTH - 1); + this.vertexBuffer.bindToVertexAttrib( + aVertexPosition, /*components=*/ 2, /*attributeType=*/ WebGL2RenderingContext.FLOAT); + const textureUnit = transferFunctionShader.textureUnit(transferFunctionSamplerTextureUnit); + this.texture.updateAndActivate({controlPoints: this.controlPointsLookupTable, textureUnit}); + gl.drawArrays(gl.TRIANGLES, 0, TRANSFER_FUNCTION_LENGTH * VERTICES_PER_QUAD); + gl.disableVertexAttribArray(aVertexPosition); + gl.bindTexture(WebGL2RenderingContext.TEXTURE_2D, null); + } + if (this.controlPointsPositionArray.length > 0) { + const {renderViewport} = this; + transferFunctionLineShader.bind(); + const aLineStartEnd = transferFunctionLineShader.attribute('aLineStartEnd'); + this.linePositionBuffer.bindToVertexAttrib( + aLineStartEnd, /*components=*/ 4, /*attributeType=*/ WebGL2RenderingContext.FLOAT); + initializeLineShader( + transferFunctionLineShader, + {width: renderViewport.logicalWidth, height: renderViewport.logicalHeight}, + /*featherWidthInPixels=*/ 1); + drawLines( + gl, this.linePositionArray.length / (VERTICES_PER_LINE * POSITION_VALUES_PER_LINE), 1); + gl.disableVertexAttribArray(aLineStartEnd); + + controlPointsShader.bind(); + const aVertexPosition = controlPointsShader.attribute('aVertexPosition'); + this.controlPointsVertexBuffer.bindToVertexAttrib( + aVertexPosition, /*components=*/ 2, /*attributeType=*/ WebGL2RenderingContext.FLOAT); + const aVertexColor = controlPointsShader.attribute('aVertexColor'); + this.controlPointsColorBuffer.bindToVertexAttrib( + aVertexColor, /*components=*/ 3, /*attributeType=*/ WebGL2RenderingContext.FLOAT); + gl.drawArrays(gl.POINTS, 0, this.controlPointsPositionArray.length / 2); + gl.disableVertexAttribArray(aVertexPosition); + gl.disableVertexAttribArray(aVertexColor); + } + gl.disable(WebGL2RenderingContext.BLEND); + } + update() { + this.controlPointsLookupTable.lookupTableFromControlPoints(); + this.updateTransferFunctionPanelLines(); + } + isReady() { + return true; + } +} + +// TODO (skm) control points might need two positions, one for the actual position and one for the +// display position +// TODO (skm) however, this might make it a bit awkward for texturing +// TODO (skm) does this need data type? +/** + * Lookup table for control points. Handles adding, removing, and updating control points as well as + * consequent updates to the underlying lookup table formed from the control points. + */ +class ControlPointsLookupTable extends RefCounted { + lookupTable: Uint8Array; + constructor( + public dataType: DataType, + public trackable: WatchableValueInterface) { + super(); + this.lookupTable = new Uint8Array(TRANSFER_FUNCTION_LENGTH * NUM_COLOR_CHANNELS).fill(0); + } + positionToIndex(position: number) { + return Math.floor(position * (TRANSFER_FUNCTION_LENGTH - 1)); + } + opacityToIndex(opacity: number) { + let opacityAsUint8 = floatToUint8(opacity); + if (opacityAsUint8 <= TRANSFER_FUNCTION_BORDER_WIDTH) { + opacityAsUint8 = 0; + } else if (opacityAsUint8 >= 255 - TRANSFER_FUNCTION_BORDER_WIDTH) { + opacityAsUint8 = 255; + } + return opacityAsUint8; + } + findNearestControlPointIndex(position: number) { + return findClosestValueIndexInSortedArray( + this.trackable.value.controlPoints.map((point) => point.position), + this.positionToIndex(position)); + } + grabControlPoint(position: number) { + const nearestIndex = this.findNearestControlPointIndex(position); + if (nearestIndex === -1) { + return -1; + } + const nearestPosition = this.trackable.value.controlPoints[nearestIndex].position; + const desiredPosition = this.positionToIndex(position); + if (Math.abs(nearestPosition - desiredPosition) < CONTROL_POINT_GRAB_DISTANCE) { + return nearestIndex; + } else { + return -1; + } + } + addPoint(position: number, opacity: number, color: vec3) { + const colorAsUint8 = + vec3.fromValues(floatToUint8(color[0]), floatToUint8(color[1]), floatToUint8(color[2])); + let opacityAsUint8 = this.opacityToIndex(opacity); + const controlPoints = this.trackable.value.controlPoints; + const positionAsIndex = this.positionToIndex(position); + const existingIndex = controlPoints.findIndex((point) => point.position === positionAsIndex); + if (existingIndex !== -1) { + controlPoints.splice(existingIndex, 1); + } + controlPoints.push({ + position: positionAsIndex, + color: vec4.fromValues(colorAsUint8[0], colorAsUint8[1], colorAsUint8[2], opacityAsUint8) + }); + controlPoints.sort((a, b) => a.position - b.position); + } + lookupTableFromControlPoints() { + const {lookupTable} = this; + const {controlPoints} = this.trackable.value; + lerpBetweenControlPoints(lookupTable, controlPoints); + } + updatePoint(index: number, position: number, opacity: number) { + const {controlPoints} = this.trackable.value; + const positionAsIndex = this.positionToIndex(position); + let opacityAsUint8 = floatToUint8(opacity); + const color = controlPoints[index].color; + controlPoints[index] = { + position: positionAsIndex, + color: vec4.fromValues(color[0], color[1], color[2], opacityAsUint8) + }; + controlPoints.sort((a, b) => a.position - b.position); + const newControlPointIndex = + controlPoints.findIndex((point) => point.position === positionAsIndex); + return newControlPointIndex; + } + setPointColor(index: number, color: vec3) { + const {controlPoints} = this.trackable.value; + const colorAsUint8 = + vec3.fromValues(floatToUint8(color[0]), floatToUint8(color[1]), floatToUint8(color[2])); + controlPoints[index].color = vec4.fromValues( + colorAsUint8[0], colorAsUint8[1], colorAsUint8[2], controlPoints[index].color[3]); + } +} + + +/** + * Create the bounds on the UI range inputs for the transfer function widget + */ +function createRangeBoundInputs( + dataType: DataType, model: WatchableValueInterface) { + function createRangeBoundInput(endpoint: number): HTMLInputElement { + const e = document.createElement('input'); + e.addEventListener('focus', () => { + e.select(); + }); + e.classList.add('neuroglancer-transfer-function-widget-bound'); + e.type = 'text'; + e.spellcheck = false; + e.autocomplete = 'off'; + e.title = `${endpoint === 0 ? 'Lower' : 'Upper'} bound for transfer function range`; + return e; + } + + const container = document.createElement('div'); + container.classList.add('neuroglancer-transfer-function-range-bounds'); + const inputs = [createRangeBoundInput(0), createRangeBoundInput(1)]; + for (let endpointIndex = 0; endpointIndex < 2; ++endpointIndex) { + const input = inputs[endpointIndex]; + input.addEventListener('input', () => { + updateInputBoundWidth(input); + }); + input.addEventListener('change', () => { + const existingBounds = model.value.range; + const intervals = {range: existingBounds, window: existingBounds}; + try { + const value = parseDataTypeValue(dataType, input.value); + const range = getUpdatedRangeAndWindowParameters( + intervals, 'window', endpointIndex, value, /*fitRangeInWindow=*/ true) + .window; + model.value = {...model.value, range}; + } catch { + updateInputBoundValue(input, existingBounds[endpointIndex]); + } + }); + } + container.appendChild(inputs[0]); + container.appendChild(inputs[1]); + return { + container, inputs + } +} + +const inputEventMap = EventActionMap.fromObject({ + 'shift?+mousedown0': {action: 'add-or-drag-point'}, + 'shift?+dblclick0': {action: 'remove-point'}, + 'shift?+mousedown2': {action: 'change-point-color'}, +}); + +/** + * Controller for the transfer function widget. Handles mouse events and updates to the model. + */ +class TransferFunctionController extends RefCounted { + private currentGrabbedControlPointIndex: number = -1; + constructor( + public element: HTMLElement, public dataType: DataType, + private controlPointsLookupTable: ControlPointsLookupTable, + public getModel: () => TransferFunctionParameters, + public setModel: (value: TransferFunctionParameters) => void) { + super(); + element.title = inputEventMap.describe(); + this.registerDisposer(new MouseEventBinder(element, inputEventMap)); + registerActionListener(element, 'add-or-drag-point', actionEvent => { + const mouseEvent = actionEvent.detail; + this.updateValue(this.addControlPoint(mouseEvent)); + startRelativeMouseDrag(mouseEvent, (newEvent: MouseEvent) => { + this.updateValue(this.moveControlPoint(newEvent)); + }); + }); + registerActionListener(element, 'remove-point', actionEvent => { + const mouseEvent = actionEvent.detail; + const nearestIndex = this.findNearestControlPointIndex(mouseEvent); + if (nearestIndex !== -1) { + this.controlPointsLookupTable.trackable.value.controlPoints.splice(nearestIndex, 1); + this.updateValue({...this.getModel(), controlPoints: this.controlPointsLookupTable.trackable.value.controlPoints}); + } + }); + registerActionListener(element, 'change-point-color', actionEvent => { + const mouseEvent = actionEvent.detail; + const nearestIndex = this.findNearestControlPointIndex(mouseEvent); + if (nearestIndex !== -1) { + const color = this.controlPointsLookupTable.trackable.value.color; + this.controlPointsLookupTable.setPointColor(nearestIndex, color); + this.updateValue({...this.getModel(), controlPoints: this.controlPointsLookupTable.trackable.value.controlPoints}); + } + }); + } + updateValue(value: TransferFunctionParameters|undefined) { + if (value === undefined) return; + this.setModel(value); + } + findNearestControlPointIndex(event: MouseEvent) { + const {normalizedX} = this.getControlPointPosition(event) as CanvasPosition; + return this.controlPointsLookupTable.grabControlPoint(normalizedX); + } + addControlPoint(event: MouseEvent) : TransferFunctionParameters|undefined{ + const color = this.controlPointsLookupTable.trackable.value.color; + const nearestIndex = this.findNearestControlPointIndex(event); + if (nearestIndex !== -1) { + this.currentGrabbedControlPointIndex = nearestIndex; + // if (shouldChangeColor) { + // this.controlPointsLookupTable.setPointColor(this.currentGrabbedControlPointIndex, color); + // } + return undefined; + } else { + this.addPoint(event, color); + this.currentGrabbedControlPointIndex = this.findNearestControlPointIndex(event); + return {...this.getModel(), controlPoints: this.controlPointsLookupTable.trackable.value.controlPoints}; + } + } + addPoint(event: MouseEvent, color: vec3) { + const {normalizedX, normalizedY} = this.getControlPointPosition(event) as CanvasPosition; + this.controlPointsLookupTable.addPoint(normalizedX, normalizedY, color); + } + moveControlPoint(event: MouseEvent): TransferFunctionParameters|undefined { + if (this.currentGrabbedControlPointIndex !== -1) { + const position = this.getControlPointPosition(event); + console.log(position); + if (position === undefined) return undefined; + const {normalizedX, normalizedY} = position; + this.currentGrabbedControlPointIndex = this.controlPointsLookupTable.updatePoint( + this.currentGrabbedControlPointIndex, normalizedX, normalizedY); + return {...this.getModel(), controlPoints: this.controlPointsLookupTable.trackable.value.controlPoints}; + } + return undefined; + } + getControlPointPosition(event: MouseEvent) : CanvasPosition|undefined { + const clientRect = this.element.getBoundingClientRect(); + const normalizedX = (event.clientX - clientRect.left) / clientRect.width; + const normalizedY = (clientRect.bottom - event.clientY) / clientRect.height; + if (normalizedX < 0 || normalizedX > 1 || normalizedY < 0 || normalizedY > 1) return undefined; + return {normalizedX, normalizedY}; + } +} + +// TODO (skm) the widget needs to have a controller for bindings +/** + * Widget for the transfer function. Creates the UI elements required for the transfer function. + */ +class TransferFunctionWidget extends Tab { + private transferFunctionPanel = this.registerDisposer(new TransferFunctionPanel(this)); + + range = createRangeBoundInputs(this.dataType, this.trackable); + constructor( + visibility: WatchableVisibilityPriority, public display: DisplayContext, + public dataType: DataType, + public trackable: WatchableValueInterface) { + super(visibility); + const {element} = this; + element.classList.add('neuroglancer-transfer-function-widget'); + element.appendChild(this.transferFunctionPanel.element); + + // Range bounds element + element.appendChild(this.range.container); + this.range.container.dispatchEvent(new Event('change')) + + // Color picker element + const colorPickerDiv = document.createElement('div'); + colorPickerDiv.classList.add('neuroglancer-transfer-function-color-picker'); + colorPickerDiv.addEventListener('mouseenter', (event: MouseEvent) => { + event.stopPropagation(); + event.preventDefault(); + colorPicker.element.disabled = false; + }) + colorPickerDiv.addEventListener('mouseleave', (event: MouseEvent) => { + event.stopPropagation(); + event.preventDefault(); + colorPicker.element.disabled = true; + }) + const colorPicker = this.registerDisposer(new ColorWidget( + makeCachedDerivedWatchableValue((x: TransferFunctionParameters) => x.color, [trackable]), + () => vec3.fromValues(1, 1, 1))); + colorPicker.element.disabled = true; + colorPicker.element.title = 'Transfer Function Color Picker' + colorPicker.element.id = 'neuroglancer-tf-color-widget'; + colorPicker.element.addEventListener('change', () => { + trackable.value = {...this.trackable.value, color: colorPicker.model.value}; + }); + colorPicker.element.addEventListener('input', () => { + trackable.value = {...this.trackable.value, color: colorPicker.model.value}; + }); + colorPickerDiv.appendChild(colorPicker.element); + + const colorLabel = document.createElement('label'); + colorLabel.setAttribute('for', 'neuroglancer-tf-color-widget'); + colorPickerDiv.appendChild(colorLabel); + element.appendChild(colorPickerDiv); + this.updateControlPointsAndDraw(); + this.registerDisposer(this.trackable.changed.add(() => { + this.updateControlPointsAndDraw(); + })); + updateInputBoundValue(this.range.inputs[0], this.trackable.value.range[0]); + updateInputBoundValue(this.range.inputs[1], this.trackable.value.range[1]); + }; + updateView() { + this.transferFunctionPanel.scheduleRedraw(); + } + updateControlPointsAndDraw() { + this.transferFunctionPanel.update(); + this.updateView(); + } +} +// TODO (skm) may need to follow the VariableDataTypeInvlerpWidget pattern + +/** + * Create a shader function for the transfer function to grab the nearest lookup table value + */ +export function defineTransferFunctionShader( + builder: ShaderBuilder, name: string, dataType: DataType, channel: number[]) { + builder.addUniform(`highp ivec4`, `uTransferFunctionParams_${name}`, TRANSFER_FUNCTION_LENGTH); + builder.addUniform(`float`, `uTransferFunctionGridSize_${name}`); + const invlerpShaderCode = + defineInvlerpShaderFunction(builder, name, dataType, true) as ShaderCodePart[]; + const shaderType = getShaderType(dataType); + // TODO (SKM) - bring in intepolation code option + // TODO (SKM) - use invlerp code to help this + let code = ` +vec4 ${name}(float inputValue) { + float gridMultiplier = uTransferFunctionGridSize_${name} - 1.0; + int index = clamp(int(round(inputValue * gridMultiplier)), 0, int(gridMultiplier)); + return vec4(uTransferFunctionParams_${name}[index]) / 255.0; +} +vec4 ${name}(${shaderType} inputValue) { + float v = computeInvlerp(inputValue, uLerpParams_${name}); + return ${name}(clamp(v, 0.0, 1.0)); +} +vec4 ${name}() { + if (!MAX_PROJECTION) { + return ${name}(getInterpolatedDataValue(${channel.join(',')})); + } + else { + float v = computeInvlerp(maxIntensity, uLerpParams_${name}); + return ${name}(clamp(v, 0.0, 1.0)); + } +} +`; + return [invlerpShaderCode[0], invlerpShaderCode[1], invlerpShaderCode[2], code] +} + +// TODO (skm) can likely optimize this +/** + * Create a lookup table and bind that lookup table to a shader via uniforms + */ +export function enableTransferFunctionShader( + shader: ShaderProgram, name: string, dataType: DataType, controlPoints: Array, + interval: DataTypeInterval) { + const {gl} = shader; + const transferFunction = new Int32Array(TRANSFER_FUNCTION_LENGTH * NUM_COLOR_CHANNELS); + lerpBetweenControlPoints(transferFunction, controlPoints); + switch (dataType) { + case DataType.UINT8: + case DataType.UINT16: + case DataType.INT8: + case DataType.INT16: + case DataType.FLOAT32: + gl.uniform4iv(shader.uniform(`uTransferFunctionParams_${name}`), transferFunction); + gl.uniform1f(shader.uniform(`uTransferFunctionGridSize_${name}`), TRANSFER_FUNCTION_LENGTH); + gl.uniform2f( + shader.uniform(`uLerpParams_${name}`), interval[0] as number, + 1 / ((interval[1] as number) - (interval[0] as number))); + break; + // TODO (skm) add support for other data types + // TODO (skm) easiest way might be to use the invlerp function + // TODO (skm) might be able to use a texture for this + default: + throw new Error(`Data type for transfer function not yet implemented: ${dataType}`); + } +} + +/** + * Describe the transfer function widget in the popup window for a tool + */ +export function activateTransferFunctionTool( + activation: ToolActivation, control: TransferFunctionWidget) { + activation.bindInputEventMap(inputEventMap); + control; +} + +/** + * Create a layer control factory for the transfer function widget + */ +export function transferFunctionLayerControl( + getter: (layer: LayerType) => { + watchableValue: WatchableValueInterface, + dataType: DataType, + }): LayerControlFactory { + return { + makeControl: (layer, context, options) => { + const {watchableValue, dataType} = getter(layer); + const control = context.registerDisposer(new TransferFunctionWidget( + options.visibility, options.display, dataType, watchableValue)); + return {control, controlElement: control.element}; + }, + activateTool: (activation, control) => { + activateTransferFunctionTool(activation, control); + }, + }; +} \ No newline at end of file