Hi Julien,
Thanks for that tip. I ran into the same error when I copied both the lines you’ve provided here, but instead, when I took reader.GetOutput() and assigned it to outDS, that seems to work.
However, I’m running into another issue now. When I create the VTKHDF file with 100 png image slices as a 3D stack, the rendering works as expected. However, if I consider 500 png image slices, I get an error from the hdf5 library. My datasets are much larger than that, so this is a concern.
I wrote this script that creates a VTKHDF file from a folder containing png images and tries to render one slice. If I set the num_images variable to 100, it works, however, I get an error if I change it to 500.
import h5py
from PIL import Image
import numpy as np
from tqdm import tqdm
import matplotlib.pyplot as plt
import glob
import vtk
def create_vtkhdf_dataset(output_file, image_dir, image_height, image_width, num_images, pixel_size_xy, pixel_size_z):
with h5py.File(output_file,'w') as hdffile:
# write support data
vtkhdf_group = hdffile.create_group("VTKHDF")
vtkhdf_group.attrs.create("Version", [1, 0])
vtkhdf_group.attrs.create("Type", np.string_("ImageData"))
whole_extent = (0, image_width-1, 0, image_height-1, 0, num_images-1)
vtkhdf_group.attrs.create("WholeExtent", whole_extent)
vtkhdf_group.attrs.create("Origin", (0.0, 0.0, 0.0))
vtkhdf_group.attrs.create("Spacing", (pixel_size_xy, pixel_size_xy, pixel_size_z))
vtkhdf_group.attrs.create("Direction", (1.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 1.0))
# create the pointdata group and the dataset inside it
field_data_group = vtkhdf_group.create_group("PointData")
field_data_group.attrs.create('Scalars', np.string_("PNGImage"))
dset = field_data_group.create_dataset('PNGImage',dtype=np.uint8,shape=(num_images,image_height,image_width))
image_filenames = glob.glob(image_dir + '\\*.png')
# Iterate over the PNG image files and add them to the dataset
for i, filename in enumerate(tqdm(image_filenames[:num_images])):
with Image.open(filename) as img:
img = np.asarray(img)
dset[i] = img
# To access individual 2D image slices from the HDF5 file, we can define a method like this:
def get_image_slice(hdf5_file, index):
with h5py.File(hdf5_file, "r") as f:
img_data = f["VTKHDF"]["PointData"]["PNGImage"][index, :, :]
return img_data
def render_data(file):
reader = vtk.vtkHDFReader()
reader.SetFileName(file)
reader.Update()
# Instead, this worked when I took the reader output, but only when num_images = 100, not 500
outDS = reader.GetOutput()
outDS.GetPointData().SetScalars(outDS.GetPointData().GetArray("PNGImage"))
imageXY = vtk.vtkExtractVOI()
imageXY.SetInputConnection(reader.GetOutputPort())
imageXY.SetVOI(0, 2999, 0, 2999, 0, 0)
# This did not work - got the same error 'No scalar Field has been specified - assuming 1 component!'
# outDS = imageXY.GetOutput()
# outDS.GetPointData().SetScalars(outDS.GetPointData().GetArray("PNGImage"))
imageXY.Update()
XYSliceActor = vtk.vtkImageActor()
XYSliceActor.SetPosition(-1500, -1500, -500)
XYSliceActor.GetMapper().SetInputConnection(imageXY.GetOutputPort())
ip = vtk.vtkImageProperty()
ip.SetColorWindow(255)
ip.SetColorLevel(128)
ip.SetAmbient(0.0)
ip.SetDiffuse(1.0)
ip.SetOpacity(1.0)
ip.SetInterpolationTypeToLinear()
XYSliceActor.SetProperty(ip)
XYSliceActor.Update()
colors = vtk.vtkNamedColors()
# Create the Renderer
renderer = vtk.vtkRenderer()
renderer.AddActor(XYSliceActor)
renderer.ResetCamera()
renderer.SetBackground(colors.GetColor3d('Silver'))
# Create the RendererWindow
renderer_window = vtk.vtkRenderWindow()
renderer_window.AddRenderer(renderer)
renderer_window.SetWindowName('ReadImageData')
# Create the RendererWindowInteractor and display the VTKHDF file
interactor = vtk.vtkRenderWindowInteractor()
interactor.SetRenderWindow(renderer_window)
interactor.Initialize()
interactor.Start()
if __name__ == '__main__':
# Define the paths to the PNG images and the output HDF5 file
# Specify path to the PNG images
folder_path = r'path\to\data'
image_dir = folder_path
output_file = folder_path + '\stack.hdf'
image_height = 3000
image_width = 3000
# Works when this is 100, but not 500
num_images = 500
pixel_size = 1
pixel_size_z = 1
# Create the dataset
create_vtkhdf_dataset(output_file, image_dir, image_height, image_width, num_images, pixel_size, pixel_size_z)
# The method takes the path to the HDF5 file and the index of the image slice to retrieve.
# It returns the image data as a numpy array.
# Example usage:
img_slice = get_image_slice(output_file, num_images - 10)
plt.imshow(img_slice)
plt.show()
render_data(output_file)
I get the following error when num_images is set to 500.
HDF5-DIAG: Error detected in HDF5 (1.13.1) thread 0:
#000: C:\glr\builds\vtk\vtk-ci-ext\0\ThirdParty\hdf5\vtkhdf5\src\H5D.c line 1021 in vtkhdf5_H5Dread(): can't synchronously read data
major: Dataset
minor: Read failed
#001: C:\glr\builds\vtk\vtk-ci-ext\0\ThirdParty\hdf5\vtkhdf5\src\H5D.c line 970 in H5D__read_api_common(): can't read data
major: Dataset
minor: Read failed
#002: C:\glr\builds\vtk\vtk-ci-ext\0\ThirdParty\hdf5\vtkhdf5\src\H5VLcallback.c line 2079 in vtkhdf5_H5VL_dataset_read(): dataset read failed
major: Virtual Object Layer
minor: Read failed
#003: C:\glr\builds\vtk\vtk-ci-ext\0\ThirdParty\hdf5\vtkhdf5\src\H5VLcallback.c line 2046 in H5VL__dataset_read(): dataset read failed
major: Virtual Object Layer
minor: Read failed
#004: C:\glr\builds\vtk\vtk-ci-ext\0\ThirdParty\hdf5\vtkhdf5\src\H5VLnative_dataset.c line 294 in vtkhdf5_H5VL__native_dataset_read(): can't read data
major: Dataset
minor: Read failed
#005: C:\glr\builds\vtk\vtk-ci-ext\0\ThirdParty\hdf5\vtkhdf5\src\H5Dio.c line 262 in vtkhdf5_H5D__read(): can't read data
major: Dataset
minor: Read failed
#006: C:\glr\builds\vtk\vtk-ci-ext\0\ThirdParty\hdf5\vtkhdf5\src\H5Dcontig.c line 610 in vtkhdf5_H5D__contig_read(): contiguous read failed
major: Dataset
minor: Read failed
#007: C:\glr\builds\vtk\vtk-ci-ext\0\ThirdParty\hdf5\vtkhdf5\src\H5Dselect.c line 465 in vtkhdf5_H5D__select_read(): read error
major: Dataspace
minor: Read failed
#008: C:\glr\builds\vtk\vtk-ci-ext\0\ThirdParty\hdf5\vtkhdf5\src\H5Dselect.c line 220 in H5D__select_io(): read error
major: Dataspace
minor: Read failed
#009: C:\glr\builds\vtk\vtk-ci-ext\0\ThirdParty\hdf5\vtkhdf5\src\H5Dcontig.c line 934 in H5D__contig_readvv(): can't perform vectorized sieve buffer read
major: Dataset
minor: Can't operate on object
#010: C:\glr\builds\vtk\vtk-ci-ext\0\ThirdParty\hdf5\vtkhdf5\src\H5VM.c line 1401 in vtkhdf5_H5VM_opvv(): can't perform operation
major: Internal error (too specific to document in detail)
minor: Can't operate on object
#011: C:\glr\builds\vtk\vtk-ci-ext\0\ThirdParty\hdf5\vtkhdf5\src\H5Dcontig.c line 739 in H5D__contig_readvv_sieve_cb(): block read failed
major: Dataset
minor: Read failed
#012: C:\glr\builds\vtk\vtk-ci-ext\0\ThirdParty\hdf5\vtkhdf5\src\H5Fio.c line 105 in vtkhdf5_H5F_shared_block_read(): read through page buffer failed
major: Low-level I/O
minor: Read failed
#013: C:\glr\builds\vtk\vtk-ci-ext\0\ThirdParty\hdf5\vtkhdf5\src\H5PB.c line 718 in vtkhdf5_H5PB_read(): read through metadata accumulator failed
major: Page Buffering
minor: Read failed
#014: C:\glr\builds\vtk\vtk-ci-ext\0\ThirdParty\hdf5\vtkhdf5\src\H5Faccum.c line 252 in vtkhdf5_H5F__accum_read(): driver read request failed
major: Low-level I/O
minor: Read failed
#015: C:\glr\builds\vtk\vtk-ci-ext\0\ThirdParty\hdf5\vtkhdf5\src\H5FDint.c line 205 in vtkhdf5_H5FD_read(): driver read request failed
major: Virtual File Layer
minor: Read failed
#016: C:\glr\builds\vtk\vtk-ci-ext\0\ThirdParty\hdf5\vtkhdf5\src\H5FDsec2.c line 743 in H5FD__sec2_read(): file read failed: time = Wed Apr 19 20:06:35 2023
, filename = 'D:\data\example\stack.hdf', file descriptor = 4, errno = 22, error message = 'Invalid argument', buf = 00000237C0218040, total read size = 4500000000, bytes this sub-read = 2147483647, bytes actually read = 18446744073709551615, offset = 4112
major: Low-level I/O
minor: Read failed
2023-04-19 20:06:35.254 ( 21.489s) [ ]vtkHDFReaderImplementat:942 ERR| vtkHDFReader (00000237BBAD7EA0): Error H5Dread start: 0, 0, 0 count: 500, 3000, 3000
2023-04-19 20:06:35.313 ( 21.549s) [ ] vtkHDFReader.cxx:391 ERR| vtkHDFReader (00000237BBAD7EA0): Error reading array PNGImage
2023-04-19 20:06:35.322 ( 21.558s) [ ] vtkExecutive.cxx:741 ERR| vtkCompositeDataPipeline (00000237B8F5C6B0): Algorithm vtkHDFReader (00000237BBAD7EA0) returned failure for request: vtkInformation (00000237B8D7E810)
Debug: Off
Modified Time: 171
Reference Count: 1
Registered Events: (none)
Request: REQUEST_DATA
FORWARD_DIRECTION: 0
ALGORITHM_AFTER_FORWARD: 1
FROM_OUTPUT_PORT: 0
2023-04-19 20:06:35.879 ( 22.114s) [ ] vtkImageData.cxx:1412 ERR| vtkImageData (00000237B3692970): No Scalar Field has been specified - assuming 1 component!
I’m also attaching an example png file here, which can be duplicated using shutil.copy2() to quickly create as large an image stack as desired. The variable folder_path needs to be modified to point to that directory if you’d like to test it at your end.
Do you have any thoughts on why this is happening? I really appreciate all your help so far.
Thanks,
Chaitanya