Need help implementing CPR with vtk.js and OHIF

Hello,

I’m trying to implement CPR visualization using vtk.js in OHIF. I am following the example here:

I have implemented my own viewport in OHIF to display CPR result:

import React, { useEffect, useRef } from 'react';

import { eventTarget, Enums as CoreEnums } from '@cornerstonejs/core';

import { Enums } from '@cornerstonejs/tools';

import vtkRenderer from '@kitware/vtk.js/Rendering/Core/Renderer';

import vtkRenderWindow from '@kitware/vtk.js/Rendering/Core/RenderWindow';

import vtkOpenGLRenderWindow from '@kitware/vtk.js/Rendering/OpenGL/RenderWindow';

import vtkRenderWindowInteractor from '@kitware/vtk.js/Rendering/Core/RenderWindowInteractor';

import vtkImageCPRMapper from '@kitware/vtk.js/Rendering/Core/ImageCPRMapper';

import vtkImageSlice from '@kitware/vtk.js/Rendering/Core/ImageSlice';

import vtkPolyData from '@kitware/vtk.js/Common/DataModel/PolyData';

import vtkPoints from '@kitware/vtk.js/Common/Core/Points';

import { ProjectionMode } from '@kitware/vtk.js/Rendering/Core/ImageCPRMapper/Constants';

import vtkDataArray from '@kitware/vtk.js/Common/Core/DataArray';




/**

 * HOW IT WORKS: convertPolylineToPolyline

 * * This function bridges Cornerstone3D (Point3[]) to VTK.js (vtkPolyData).

 * * 1. THE DATA STRUCTURES:

 * Cornerstone uses a Javascript Array of Arrays: [[x,y,z], [x,y,z], ...]

 * VTK.js uses "Points" (raw coordinates) and "Cells" (how points connect).

 * * 2. THE POINTS (The "Where"):

 * VTK requires a single "Flat" array of numbers (TypedArray).

 * We turn [[1,1,1], [2,2,2]] into [1,1,1,2,2,2].

 * This is much faster for the GPU to process than nested arrays.

 * * 3. THE TOPOLOGY/CELLS (The "How"):

 * Simply having points doesn't make a line; VTK needs to know the order.

 * We define a "Line Cell". VTK expects a specific format:

 * [count, index0, index1, index2, ...]

 * Example: [4, 0, 1, 2, 3] means "A single line connecting these 4 point-indices".

 */




export function convertPolylineToPolyData(polyline: number[][]) {

  // --- PART 1: POINTS ---

  const vtkPts = vtkPoints.newInstance();




  // Create a high-performance buffer (Float32Array)

  // Length is polyline length * 3 (for x, y, and z)

  const flatPoints = new Float32Array(polyline.length * 3);




  for (let i = 0; i < polyline.length; i++) {

    flatPoints[i * 3]     = polyline[i][0]; // x

    flatPoints[i * 3 + 1] = polyline[i][1]; // y

    flatPoints[i * 3 + 2] = polyline[i][2]; // z

  }




  // Load the flattened numbers into the VTK points object

  // The '3' tells VTK to group every 3 numbers as one point

  vtkPts.setData(flatPoints, 3);




  // --- PART 2: CONNECTIVITY (CELLS) ---

  const polyData = vtkPolyData.newInstance();




  // We define one single "Cell" (the polyline)

  // Format: [numPoints, ptId1, ptId2, ptId3...]

  const numPoints = polyline.length;

  const lineCells = new Uint32Array(numPoints + 1);




  lineCells[0] = numPoints; // The first number is the count of points in the line

  for (let i = 0; i < numPoints; i++) {

    lineCells[i + 1] = i; // The following numbers are the IDs (0, 1, 2, 3...)

  }




  // --- PART 3: ASSEMBLY ---

  // Connect the points and the topology into one PolyData object

  polyData.setPoints(vtkPts);

  polyData.getLines().setData(lineCells);




  // ADD THIS:

  polyData.buildLinks();




  return polyData;

}




function OHIFVtkCPRViewport({ displaySets, viewportOptions, servicesManager }) {

  const containerRef = useRef(null);




  useEffect(() => {

    // const imageData = volume.imageData;




    // 1. Initialize VTK Generic Render Window

    const renderer = vtkRenderer.newInstance();

    const renderWindow = vtkRenderWindow.newInstance();

    renderWindow.addRenderer(renderer);




    const openGLRenderWindow = vtkOpenGLRenderWindow.newInstance();

    renderWindow.addView(openGLRenderWindow);

    openGLRenderWindow.setContainer(containerRef.current);




    const interactor = vtkRenderWindowInteractor.newInstance();

    interactor.setView(openGLRenderWindow);

    interactor.initialize();





    // 2. Setup CPR Mapper and Actor

    const mapper = vtkImageCPRMapper.newInstance();

    mapper.useStraightenedMode();

    mapper.setUseUniformOrientation(true);

    // 3. Set the width (if 0, nothing renders)

    mapper.setWidth(100);




    // 2. Provide an Identity Matrix as the base orientation

      // This is a 16-element array (4x4 matrix)

    mapper.setUniformOrientation([

      1, 0, 0, 0,

      0, 1, 0, 0,

      0, 0, 1, 0,

      0, 0, 0, 1

    ]);




    const actor = vtkImageSlice.newInstance();

    // mapper.setImageData(imageData)




    actor.setMapper(mapper as any);

    renderer.addActor(actor);

    renderer.setBackground(0.0, 0.0, 0.0)




    // 3. Load Volume Data (Simplified)

    // const displaySet = displaySets[0];

    // const { imageData } = displaySet; // Assuming displaySet has vtkImageData

    // mapper.setInputData(imageData);




    const onAnnotationModified = (evt) => {

      const { annotation } = evt.detail;




      // Ensure we only update if it's our CenterlineTool

      if (annotation.metadata.toolName === 'CenterlineTool') {

        const polyline = annotation.data.polyline;





        // 1. Check if we actually have image data loaded in the mapper




        if (polyline && polyline.length >= 2) {

          // Convert to vtkPolyData

          const centerlinePolyData = convertPolylineToPolyData(polyline);




          if (!mapper.getInputData()) {

            console.warn("Mapper has no volume data yet. Skipping render.");

            return;

          }




          // // Feed it to your mapper

          mapper.setCenterlineData(centerlinePolyData);

          console.log("Centerline data set on mapper", centerlinePolyData.get())

          // Force the mapper to compute its new bounds based on the new centerline




          // 3. Now reset the camera to fit the data bounds

          renderer.resetCamera();




          try {

            renderWindow.render();

          } catch (e) {

            console.error("VTK Render failed:", e);

          }

      }

      }

    };




    const handleVolumeLoaded = (evt) => {

      const { volume } = evt.detail;




      const targetVolumeId = `cornerstoneStreamingImageVolume:${displaySets[0].displaySetInstanceUID}`;

      if (volume && volume.volumeId === targetVolumeId) {

        const imageData = volume.imageData;

        // THE FIX FROM ISSUE 1677:

    // 1. Get the raw scalar data using the VoxelManager

        const scalarData = volume.voxelManager.getCompleteScalarDataArray();




        // 2. Create a fresh vtkDataArray and force it into the vtkImageData

        const scalarArray = vtkDataArray.newInstance({

          name: 'Pixels', // Usually 'Scalars' or 'Pixels'

          numberOfComponents: 1,

          values: scalarData,

        });




        // 3. Manually set the scalars so VTK recognizes them as 'valid' for textures

        imageData.getPointData().setScalars(scalarArray);

        imageData.modified();

        mapper.setImageData(imageData);

        mapper.setProjectionMode(ProjectionMode.AVERAGE);

        mapper.setProjectionSlabNumberOfSamples(100);

        mapper.setProjectionSlabThickness(20);

        try {

          renderer.resetCamera();

          renderWindow.render();

        } catch (e) {

          console.warn("Initial VTK sync render skipped (expected if no centerline)");

        }

      }

    }




    // Listen for updates from Cornerstone

    eventTarget.addEventListener(Enums.Events.ANNOTATION_MODIFIED, onAnnotationModified);

    eventTarget.addEventListener(Enums.Events.ANNOTATION_COMPLETED, onAnnotationModified);

    eventTarget.addEventListener(CoreEnums.Events.VOLUME_LOADED, handleVolumeLoaded)




    return () => {

      // Cleanup

      interactor.delete();

      openGLRenderWindow.delete();

      renderWindow.delete();

      eventTarget.removeEventListener(Enums.Events.ANNOTATION_MODIFIED, onAnnotationModified);

      eventTarget.removeEventListener(Enums.Events.ANNOTATION_COMPLETED, onAnnotationModified);

      eventTarget.removeEventListener(CoreEnums.Events.VOLUME_LOADED, handleVolumeLoaded)

    };

  }, []);




  return <div id="cpr-result" ref={containerRef} style={{ width: '100%', height: '100%' }} />;

}




OHIFVtkCPRViewport.displayName = 'OHIFVtkCPRViewport';

export default OHIFVtkCPRViewport

I also implement my own Centerline Tool that I use to draw centerline on to MPR planes here:

import { AnnotationTool, annotation, drawing, utilities } from '@cornerstonejs/tools';

import { getEnabledElement, getEnabledElements, eventTarget } from '@cornerstonejs/core';

import { Events } from '@cornerstonejs/tools/enums';

import { Point3 } from '@cornerstonejs/core/types/Point3';




// Destructure the correct sub-modules

const { state } = annotation;

const { drawHandles, drawPolyline } = drawing;

// Accessing math and spline utilities correctly

const { lineSegment } = utilities.math;

const { triggerAnnotationRenderForViewportIds } = utilities;

const { getViewportIdsWithToolToRender } = utilities.viewportFilters;




class CenterlineTool extends AnnotationTool {

  static toolName = 'CenterlineTool';

  private onAnnotationUpdate: Function;

  private element;




  constructor(

    toolProps = {},

    defaultToolProps = {

      supportedInteractionTypes: ['Mouse', 'Touch'],

      configuration: {

        // Spline resolution: points between control handles

        resolution: 20,

      },

    }

  ) {

    super(toolProps, defaultToolProps);

    this.onAnnotationUpdate = this.configuration.onAnnotationUpdate;

    // Listen for when annotations are removed (happens during Undo)

    // eventTarget.addEventListener(Events.ANNOTATION_REMOVED, this._onAnnotationRemoved);

    // eventTarget.addEventListener(Events.ANNOTATION_REMOVED, this._onAnnotationRemoved);

  }




  // _onAnnotationRemoved = evt => {

  //   console.log('Annotation removed event detected:', evt);

  //   const { annotation } = evt.detail;




  //   // 1. Only act if it's our tool

  //   if (annotation.metadata.toolName !== CenterlineTool.toolName) {

  //     return;

  //   }




  //   // Get the Frame of Reference from the deleted annotation's metadata

  //   const { FrameOfReferenceUID } = annotation.metadata;




  //   // Corrected: Pass the toolName AND the FrameOfReferenceUID

  //   const remainingAnnotations = state.getAnnotations(CenterlineTool.toolName, FrameOfReferenceUID);




  //   // 3. Only clear the CPR if that was the last one

  //   if (!remainingAnnotations || remainingAnnotations.length === 0) {

  //     this._forceResetCPR();

  //   }

  // };




  // _forceResetCPR() {

  //   // We need to tell the CPR Viewport there is no path left.

  //   // We do this by triggering a modified event with a "null" or empty object

  //   // that the CPR mapper is listening for.

  //   const emptyData = {

  //     metadata: { toolName: CenterlineTool.toolName },

  //     data: { polyline: [] },

  //     isClearEvent: true, // Custom flag your CPR mapper can check

  //   };




  //   state.triggerAnnotationModified(emptyData);




  //   // 1. Get all viewports currently "alive" in the browser

  //   const allEnabledElements = getEnabledElements();




  //   if (allEnabledElements.length === 0) return;




  //   // 2. Use the first one to find the ToolGroup viewports

  //   // We use the .viewport.element property

  //   const firstElement = allEnabledElements[0].viewport.element;




  //   const viewportIds = getViewportIdsWithToolToRender(firstElement, CenterlineTool.toolName);




  //   // 3. Trigger the render

  //   if (viewportIds.length > 0) {

  //     triggerAnnotationRenderForViewportIds(viewportIds);

  //   }

  // }




  // --- IMPLEMENTING ABSTRACT METHODS ---




  isPointNearTool(element, annotation, canvasCoords, proximity) {

    const enabledElement = getEnabledElement(element);

    const { viewport } = enabledElement;

    const { points } = annotation.data.handles;




    for (let i = 0; i < points.length - 1; i++) {

      const p1 = viewport.worldToCanvas(points[i]);

      const p2 = viewport.worldToCanvas(points[i + 1]);




      // Corrected utility path: utilities.math.lineSegment

      const distance = lineSegment.distanceToPoint(

        [p1[0], p1[1]],

        [p2[0], p2[1]],

        [canvasCoords[0], canvasCoords[1]]

      );

      if (distance <= proximity) return true;

    }

    return false;

  }




  toolSelectedCallback(evt, annotation) {

    annotation.highlighted = true;

    evt.preventDefault();

  }




  handleSelectedCallback(evt, annotation, handle) {

    const { element } = evt.detail;

    const { points } = annotation.data.handles;

    const handleIndex = points.indexOf(handle);




    annotation.highlighted = true;

    this.editData = {

      annotation,

      viewportIdsToRender: [getEnabledElement(element).viewport.id],

      handleIndex,

      // We remove movingPoint as it's not in the standard interface

      newAnnotation: false,

      hasMoved: false,

    };




    this._activateModify(element);

    evt.preventDefault();

  }




  cancel(element) {

    if (this.editData) {

      const { annotation } = this.editData;

      if (this.editData.newAnnotation) {

        state.removeAnnotation(annotation.annotationUID);

      }

    }

    this.editData = null;

  }




  // --- CORE LOGIC: DRAWING & INTERPOLATION ---




  addNewAnnotation = evt => {

    const eventDetail = evt.detail;

    const { currentPoints, element } = eventDetail;

    const worldPos = currentPoints.world;

    const enabledElement = getEnabledElement(element);

    const { viewport } = enabledElement;




    this.isDrawing = true; // Track drawing state




    // 1. Check if there is already a centerline in this Frame of Reference

    const annotate = CenterlineTool.createAnnotationForViewport(viewport, {

      highlighted: true,

      invalidated: true,

      metadata: {

        toolName: CenterlineTool.toolName,

        viewPlaneNormal: [...viewport.getCamera().viewPlaneNormal],

        FrameOfReferenceUID: viewport.getFrameOfReferenceUID(),

        referencedImageId: viewport.getCurrentImageId(),

      },

      data: {

        handles: {

          points: [

            [worldPos[0], worldPos[1], worldPos[2]] as Point3,

            [worldPos[0], worldPos[1], worldPos[2]] as Point3,

          ],

          activeHandleIndex: null,

        },

        polyline: [[...worldPos], [...worldPos]],

      },

    });




    state.addAnnotation(annotate, element);




    const viewportIdsToRender = getViewportIdsWithToolToRender(element, this.getToolName());




    this.editData = {

      annotation: annotate,

      viewportIdsToRender,

      handleIndex: 1,

      newAnnotation: true,

      hasMoved: false,

    };




    this._activateDraw(element);

    // Start the memo for undo/redo immediately

    this.createMemo(element, annotate, { newAnnotation: true });

    evt.preventDefault();




    triggerAnnotationRenderForViewportIds(viewportIdsToRender);




    return annotate;

  };




  /**

   * Generates a smooth curve between control points.

   */

  _getCustomSplinePoints(canvasPoints: any[], resolution: number) {

    if (canvasPoints.length < 2) return canvasPoints;




    const newPoints = [];

    // We iterate through each segment between handles

    for (let i = 0; i < canvasPoints.length - 1; i++) {

      const p0 = i > 0 ? canvasPoints[i - 1] : canvasPoints[i];

      const p1 = canvasPoints[i];

      const p2 = canvasPoints[i + 1];

      const p3 = i < canvasPoints.length - 2 ? canvasPoints[i + 2] : p2;




      for (let t = 0; t < resolution; t++) {

        const alpha = t / resolution;

        const alpha2 = alpha * alpha;

        const alpha3 = alpha2 * alpha;




        // Catmull-Rom formula

        const x =

          0.5 *

          (2 * p1[0] +

            (-p0[0] + p2[0]) * alpha +

            (2 * p0[0] - 5 * p1[0] + 4 * p2[0] - p3[0]) * alpha2 +

            (-p0[0] + 3 * p1[0] - 3 * p2[0] + p3[0]) * alpha3);




        const y =

          0.5 *

          (2 * p1[1] +

            (-p0[1] + p2[1]) * alpha +

            (2 * p0[1] - 5 * p1[1] + 4 * p2[1] - p3[1]) * alpha2 +

            (-p0[1] + 3 * p1[1] - 3 * p2[1] + p3[1]) * alpha3);




        newPoints.push([x, y]);

      }

    }

    // Add the very last point

    newPoints.push(canvasPoints[canvasPoints.length - 1]);

    return newPoints;

  }




  renderAnnotation(enabledElement, svgDrawingHelper) {

    const { viewport } = enabledElement;

    const { element } = viewport;

    const annotations = state.getAnnotations(CenterlineTool.toolName, element);




    if (!annotations?.length) return;




    annotations.forEach(annotation => {

      const { points } = annotation.data.handles;

      const canvasPoints = points.map(p => viewport.worldToCanvas(p));

      const { annotationUID } = annotation;




      if (canvasPoints.length >= 2) {

        // Use the spline utility to generate the smooth path

        // This is where Cornerstone3D handles Catmull-Rom

        const interpolatedCanvasPoints = this._getCustomSplinePoints(

          canvasPoints,

          this.configuration.resolution

        );




        drawPolyline(svgDrawingHelper, annotationUID, 'centerline', interpolatedCanvasPoints, {

          color: 'red',

          lineWidth: 2,

        });




        // Store the smooth path in world coordinates for vtkImageCPRMapper

        annotation.data.polyline = interpolatedCanvasPoints.map(p => viewport.canvasToWorld(p));

      }




      drawHandles(svgDrawingHelper, annotationUID, 'handles', canvasPoints, {

        color: 'rgb(255, 255, 0)', // Yellow border

        fill: 'rgb(255, 255, 0)', // Yellow fill

        fillOpacity: 1.0, // Ensure it is fully opaque/filled

        handleRadius: 3, // Smaller size (default is usually 6)

        type: 'rect',

      });

    });

  }




  // --- EVENT HELPERS ---

  _activateDraw(element) {

    // We switch from MOUSE_DRAG to MOUSE_MOVE for the follower effect

    element.addEventListener(Events.MOUSE_MOVE, this._mouseMoveCallback);

    element.addEventListener(Events.MOUSE_CLICK, this._clickCallback);

    element.addEventListener(Events.MOUSE_DOUBLE_CLICK, this._endCallback);




    // Store element in editData so the keydown callback can find it

    this.element = element;

    // 2. Use 'true' for the useCapture parameter.

    // This lets your tool "intercept" the key before OHIF sees it.

    window.addEventListener('keydown', this._keyDownCallback, true);

  }




  _deactivateDraw(element) {

    element.removeEventListener(Events.MOUSE_MOVE, this._mouseMoveCallback);

    element.removeEventListener(Events.MOUSE_CLICK, this._clickCallback);

    element.removeEventListener(Events.MOUSE_DOUBLE_CLICK, this._endCallback);

    // 2. Use 'true' for the useCapture parameter.

    // This lets your tool "intercept" the key before OHIF sees it.

    this.element = null;

    window.removeEventListener('keydown', this._keyDownCallback, true);

  }




  _clickCallback = evt => {

    const { currentPoints, element } = evt.detail;

    const worldPos = currentPoints.world;

    const { annotation } = this.editData;




    // 1. Commit the memo for the segment you just finished positioning

    this.doneEditMemo();

    this.onAnnotationUpdate();




    // Add point and advance the index

    annotation.data.handles.points.push([...worldPos]);

    annotation.invalidated = true;

    this.editData.handleIndex++;




    // 3. Start a NEW memo for the next segment that will follow the mouse

    // This ensures that if you undo, you only remove the last point, not the whole line

    this.createMemo(element, annotation, { newAnnotation: false });




    // 2. IMPORTANT: Tell other viewports (like CPR) the data is changing

    // We use ChangeTypes.HandlesUpdated to be specific

    state.triggerAnnotationModified(annotation, element);

  };




  _keyDownCallback = evt => {

    // Use evt.key for modern browsers

    if (evt.key === 'Escape') {

      console.log('CenterlineTool: Escape pressed');

      this._endCallback({ detail: { element: this.element } });

    }

  };




  _mouseMoveCallback = evt => {

    const { currentPoints, element } = evt.detail;

    const worldPos = currentPoints.world;

    const { annotation, viewportIdsToRender, handleIndex } = this.editData;




    const { points } = annotation.data.handles;




    // Update the follower point to the current mouse position

    points[handleIndex] = [...worldPos];

    annotation.invalidated = true;

    this.editData.hasMoved = true;




    triggerAnnotationRenderForViewportIds(viewportIdsToRender);




    // 2. IMPORTANT: Tell other viewports (like CPR) the data is changing

    // We use ChangeTypes.HandlesUpdated to be specific

    // state.triggerAnnotationModified(annotation, element);

  };




  _endCallback = evt => {

    const eventDetail = evt.detail;

    const { element } = eventDetail;




    const { annotation, viewportIdsToRender, newAnnotation, hasMoved } = this.editData;




    this._deactivateModify(element);

    this._deactivateDraw(element);




    // --- MISSING PIECE 1: Commit the history snapshot ---

    this.doneEditMemo();

    this.isDrawing = false;

    this.onAnnotationUpdate();




    // 2. Trigger completion (This tells the Undo system the tool is "done")

    if (newAnnotation) {

      state.triggerAnnotationCompleted(annotation);

    }




    // 3. IMPORTANT: Manually tell the annotation manager the data has changed

    // one last time. This often nudges the HistoryService to refresh its 'canUndo' state.

    state.triggerAnnotationModified(annotation, element);




    this.editData = null;

    this.isDrawing = false;




    // Re-render to ensure visual state is clean

    triggerAnnotationRenderForViewportIds(viewportIdsToRender);




    // // --- MISSING PIECE 2: Trigger the completion event ---

    // if (newAnnotation) {

    //   // annotation.helpers.state is usually where triggerAnnotationCompleted comes from

    //   // or you can use the utility triggerAnnotationCompleted(annotation)

    //   state.triggerAnnotationCompleted(annotation);

    // }

  };




  _activateModify(element) {

    /* implementation for moving handles */

  }

  _deactivateModify(element) {

    /* implementation for moving handles */

  }




  _clearCPRViewports(annotation) {

    // Create a dummy "empty" version of the data

    const emptyAnnotation = {

      ...annotation,

      data: {

        ...annotation.data,

        polyline: [], // Clear the path

        handles: { points: [] },

      },

      isDeleted: true, // Custom flag if your CPR logic checks for it

    };




    // Trigger modified one last time to force the CPR viewport to re-render with no points

    state.triggerAnnotationModified(emptyAnnotation);

  }

}

export default CenterlineTool;

The result is my final straightened image is not what I want and sometimes the image doesn’t show. I’m not entirely sure where is the problem

I suggest you try simple cases where you change the configuration of the provided centerline (i.e. permute some columns or rows in the provided centerline matrices)