Skip to content

Commit 89bb8a3

Browse files
committed
feat: many more examples
1 parent 9cb5e18 commit 89bb8a3

9 files changed

+165
-40
lines changed

new_examples/compare_decimated.py

+52
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,52 @@
1+
import logging
2+
from pathlib import Path
3+
4+
import trimesh
5+
from cryoet_data_portal_neuroglancer.precompute.instance_mesh import (
6+
scale_and_decimate_mesh,
7+
)
8+
from cryoet_data_portal_neuroglancer.io import load_glb_file
9+
from cryoet_data_portal_neuroglancer.precompute.mesh import (
10+
generate_standalone_sharded_multiresolution_mesh,
11+
)
12+
13+
14+
JSON_PATH = Path(
15+
r"/media/starfish/LargeSSD/data/cryoET/data/Annotations-test-run/100-proton_transporting_atp_synthase_complex-1.0.json"
16+
)
17+
18+
MESH_PATH = Path(
19+
r"/media/starfish/LargeSSD/data/cryoET/data/meshes-oriented/atpase.glb"
20+
)
21+
22+
OUTPUT_PATH = Path(r"/media/starfish/LargeSSD/data/cryoET/data/converted-01122021/")
23+
24+
logging.basicConfig(level=logging.INFO, force=True)
25+
scene = load_glb_file(MESH_PATH)
26+
scaled, decimated_meshes = scale_and_decimate_mesh(scene, 10, 4.5)
27+
28+
for i, mesh in enumerate(decimated_meshes):
29+
print(i, len(mesh.faces))
30+
31+
for i, mesh in enumerate(decimated_meshes):
32+
mesh.export(OUTPUT_PATH / f"mesh_lod{i}.glb")
33+
34+
new_scene = trimesh.Scene()
35+
for i, mesh in enumerate(decimated_meshes):
36+
new_scene.add_geometry(mesh.copy().apply_translation([i * 20, 0, 0]))
37+
38+
39+
new_scene.export(OUTPUT_PATH / "meshoutput.glb")
40+
41+
# new_scene.show()
42+
43+
min_lod_mesh = decimated_meshes[0]
44+
max_lod_mesh = decimated_meshes[-1]
45+
print(len(decimated_meshes))
46+
47+
generate_standalone_sharded_multiresolution_mesh(
48+
trimesh.Scene(min_lod_mesh), OUTPUT_PATH / "min_lod_mesh", 0
49+
)
50+
generate_standalone_sharded_multiresolution_mesh(
51+
trimesh.Scene(max_lod_mesh), OUTPUT_PATH / "max_lod_mesh", 0
52+
)

new_examples/convert_mesh_seg.py

+7-7
Original file line numberDiff line numberDiff line change
@@ -1,21 +1,21 @@
11
from pathlib import Path
2+
import logging
23
from cryoet_data_portal_neuroglancer.precompute.segmentation_mask import (
34
encode_segmentation,
45
)
56

6-
INPUT_FILENAME = (
7-
r"/media/starfish/LargeSSD/data/cryoET/data/00004_actin_ground_truth_zarr"
8-
)
9-
OUTPUT_PATH = Path(
10-
r"/media/starfish/LargeSSD/data/cryoET/data/new_actin_converted_mesh_no_res/"
11-
)
7+
# Set up logging to debug level
8+
logging.basicConfig(level=logging.INFO, force=True)
9+
10+
INPUT_FILENAME = r"/media/starfish/LargeSSD/data/cryoET/data/00004_MT_ground_truth_zarr"
11+
OUTPUT_PATH = Path(r"/media/starfish/LargeSSD/data/cryoET/data/new_MT_converted_mesh/")
1212

1313

1414
encode_segmentation(
1515
filename=INPUT_FILENAME,
1616
output_path=OUTPUT_PATH,
1717
resolution=(1.048, 1.048, 1.048),
18-
num_lod=3,
18+
max_lod=2,
1919
include_mesh=True,
2020
delete_existing=True,
2121
)

new_examples/convert_oriented_annotation-egczi1-mesh.py

+12-10
Original file line numberDiff line numberDiff line change
@@ -1,8 +1,7 @@
11
import json
2+
import logging
23

3-
from cryoet_data_portal_neuroglancer.precompute.points import (
4-
encode_annotation,
5-
)
4+
from cryoet_data_portal_neuroglancer.precompute.points import encode_annotation
65
from cryoet_data_portal_neuroglancer.precompute.instance_mesh import (
76
encode_oriented_mesh,
87
)
@@ -13,8 +12,8 @@
1312
)
1413
from cryoet_data_portal_neuroglancer.io import load_glb_file, load_oriented_point_data
1514
from pathlib import Path
16-
from cryoet_data_portal_neuroglancer.precompute.glb_meshes import (
17-
generate_standalone_sharded_multiresolution_mesh,
15+
from cryoet_data_portal_neuroglancer.precompute.mesh import (
16+
generate_sharded_mesh_from_lods,
1817
)
1918

2019

@@ -35,13 +34,16 @@
3534
encode_annotation(
3635
data, metadata, OUTPUT_PATH, 0.784 * 1e-9, shard_by_id=(0, 10), is_oriented=True
3736
)
38-
mesh = load_glb_file(MESH_PATH)
37+
logging.basicConfig(level=logging.INFO, force=True)
38+
scene = load_glb_file(MESH_PATH)
3939

40-
sub_result = encode_oriented_mesh(mesh, data, metadata, OUTPUT_PATH, 2)
41-
generate_standalone_sharded_multiresolution_mesh(
42-
sub_result,
43-
OUTPUT_PATH / "meshoutput",
40+
copy_pasted_lods = encode_oriented_mesh(
41+
scene,
42+
data,
43+
num_lods=3,
44+
max_faces_for_first_lod=10e10,
4445
)
46+
generate_sharded_mesh_from_lods(copy_pasted_lods, OUTPUT_PATH / "meshoutput")
4547

4648
SOURCE = "http://127.0.0.1:9000/converted-01122021/"
4749

Original file line numberDiff line numberDiff line change
@@ -1,6 +1,6 @@
11
from cloudvolume import CloudVolume
22

3-
FILEPATH = r"file:///media/starfish/LargeSSD/data/cryoET/data/new_actin_converted_mesh"
3+
FILEPATH = r"file:///media/starfish/LargeSSD/data/cryoET/data/new_MT_converted_mesh/"
44

55
cv = CloudVolume(FILEPATH)
6-
cv.viewer()
6+
cv.viewer(port=1030)

new_examples/serve_actin_mesh.py

+8
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,8 @@
1+
from cloudvolume import CloudVolume
2+
3+
FILEPATH = (
4+
r"file:///media/starfish/LargeSSD/data/cryoET/data/new_actin_converted_mesh_no_res/"
5+
)
6+
7+
cv = CloudVolume(FILEPATH)
8+
cv.viewer(port=1031)

new_examples/serve_max_lod.py

+8
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,8 @@
1+
from cloudvolume import CloudVolume
2+
3+
FILEPATH = (
4+
r"file:///media/starfish/LargeSSD/data/cryoET/data/converted-01122021/max_lod_mesh"
5+
)
6+
7+
cv = CloudVolume(FILEPATH)
8+
cv.viewer(port=1032)

new_examples/serve_min_lod.py

+8
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,8 @@
1+
from cloudvolume import CloudVolume
2+
3+
FILEPATH = (
4+
r"file:///media/starfish/LargeSSD/data/cryoET/data/converted-01122021/min_lod_mesh"
5+
)
6+
7+
cv = CloudVolume(FILEPATH)
8+
cv.viewer(port=1031)

new_examples/serve_oriented_mesh.py

+8
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,8 @@
1+
from cloudvolume import CloudVolume
2+
3+
FILEPATH = (
4+
r"file:///media/starfish/LargeSSD/data/cryoET/data/converted-01122021/meshoutput/"
5+
)
6+
7+
cv = CloudVolume(FILEPATH)
8+
cv.viewer()

new_examples/volume_rendering.py

+60-21
Original file line numberDiff line numberDiff line change
@@ -1,3 +1,4 @@
1+
import itertools
12
import matplotlib.pyplot as plt
23

34
from cryoet_data_portal_neuroglancer.io import load_omezarr_data
@@ -9,27 +10,65 @@
910

1011
FILEPATH = r"/media/starfish/LargeSSD/data/cryoET/data/0004_image/Tomograms/VoxelSpacing13.48/CanonicalTomogram/TS_0004.zarr"
1112

12-
dask_data = load_omezarr_data(FILEPATH, resolution_level=2)
13+
dask_data = load_omezarr_data(FILEPATH, resolution_level=1)
1314
data_array = dask_data.compute()
14-
contrast_calculator = ContrastLimitCalculator(data_array)
15-
contrast_calculator.set_volume_and_z_limits(data_array, central_z_slice=10, z_radius=5)
16-
contrast_limits = contrast_calculator.contrast_limits_from_percentiles(0.0, 100.0)
17-
print(contrast_limits)
1815

19-
rendered_mem, image_shape = volume_render(
20-
data_array, contrast_limits=contrast_limits, depth_samples=256
21-
)
22-
volume_rendered_image = np.ndarray(
23-
image_shape, dtype=np.float32, buffer=rendered_mem.buf
24-
)
16+
# def rms(data):
17+
# return np.sqrt(np.mean(data**2))
18+
19+
20+
# standard_deviation_per_z_slice = np.std(data_array, axis=(1, 2))
21+
# standard_deviation_per_z_slice = np.nan_to_num(
22+
# standard_deviation_per_z_slice, copy=False
23+
# )
24+
# for i, std in enumerate(standard_deviation_per_z_slice):
25+
# print(f"Standard deviation for z-slice {i}: {std:.2f}")
26+
27+
# lowest_points = find_peaks(-standard_deviation_per_z_slice, prominence=0.1)
28+
# print(lowest_points)
29+
30+
# fig, ax = plt.subplots()
31+
# ax.plot(standard_deviation_per_z_slice)
32+
# ax.set_xlabel("Z-slice")
33+
# ax.set_ylabel("Standard deviation")
34+
# plt.show()
35+
36+
# exit(-1)
37+
38+
contrast_calculator = ContrastLimitCalculator()
39+
40+
percentile_thresholds = [(0.0, 100.0), (45.0, 99.5), (5.0, 95.0)]
41+
for (low_percentile, high_percentile), clip in itertools.product(
42+
percentile_thresholds, [True, False]
43+
):
44+
contrast_calculator.volume = data_array
45+
if clip:
46+
contrast_calculator.trim_volume_around_central_zslice()
47+
48+
contrast_limits = contrast_calculator.contrast_limits_from_percentiles(
49+
low_percentile, high_percentile
50+
)
51+
52+
rendered_mem, image_shape = volume_render(
53+
data_array, contrast_limits=contrast_limits, depth_samples=256
54+
)
55+
volume_rendered_image = np.ndarray(
56+
image_shape, dtype=np.float32, buffer=rendered_mem.buf
57+
)
2558

26-
try:
27-
# Save the RGBA image
28-
print("Saving the volume rendered image to volume_rendered_image.png")
29-
plt.imsave("volume_rendered_image.png", volume_rendered_image)
30-
del volume_rendered_image
31-
except Exception as e:
32-
print(f"Error saving the volume rendered image: {e}")
33-
finally:
34-
rendered_mem.close()
35-
rendered_mem.unlink()
59+
try:
60+
# Save the RGBA image
61+
print(
62+
f"Saving volume rendered image with contrast limits {contrast_limits} and clipping {clip}"
63+
)
64+
clip_string = "clipped" if clip else "unclipped"
65+
plt.imsave(
66+
f"volume_rendered_image_{low_percentile}_{high_percentile}_{clip_string}_{contrast_limits[0]:.2f}_{contrast_limits[1]:.2f}.png",
67+
volume_rendered_image,
68+
)
69+
del volume_rendered_image
70+
except Exception as e:
71+
print(f"Error saving the volume rendered image: {e}")
72+
finally:
73+
rendered_mem.close()
74+
rendered_mem.unlink()

0 commit comments

Comments
 (0)