Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion package.json
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
{
"name": "ouroboros",
"productName": "Ouroboros",
"version": "1.0.1",
"version": "1.0.2",
"description": "Quickly extract ROIs from cloud-hosted medical scans.",
"main": "./out/main/index.js",
"author": "Weaver Goldman <we.goldm@gmail.com>",
Expand Down
6 changes: 3 additions & 3 deletions python/coverage.svg
Loading
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
28 changes: 24 additions & 4 deletions python/ouroboros/cli.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
from multiprocessing import freeze_support

import argparse
import sys

from ouroboros.common.pipelines import backproject_pipeline, slice_pipeline
from ouroboros.helpers.models import pretty_json_output
Expand Down Expand Up @@ -73,14 +73,20 @@ def main():


def handle_slice(args):
print(f"Loading slice options from: {args.options}")
slice_options = SliceOptions.load_from_json(args.options)

if isinstance(slice_options, str):
print("Exiting due to errors loading slice options.", file=sys.stderr)
sys.exit(1)

print("Slice options loaded successfully.")
pipeline, input_data = slice_pipeline(slice_options, True)

_, error = pipeline.process(input_data)

if error:
print(error)
print(f"Pipeline Error: {error}", file=sys.stderr)

if args.verbose:
print("\nCalculation Statistics:\n")
Expand All @@ -90,16 +96,30 @@ def handle_slice(args):


def handle_backproject(args):
print(f"Loading backproject options from: {args.options}")
backproject_options = BackprojectOptions.load_from_json(args.options)

if isinstance(backproject_options, str):
print("Exiting due to errors loading backproject options.", file=sys.stderr)
sys.exit(1)

print("Backproject options loaded successfully."
f"Loading slice options from: {backproject_options.slice_options_path}")

slice_options = SliceOptions.load_from_json(backproject_options.slice_options_path)


if isinstance(slice_options, str):
print("Exiting due to errors loading slice options file specified within backproject options"
f"({backproject_options.slice_options_path}).", file=sys.stderr)
sys.exit(1)

print("Slice options loaded successfully.")
pipeline, input_data = backproject_pipeline(backproject_options, slice_options, True)

_, error = pipeline.process(input_data)

if error:
print(error)
print(f"Pipeline Error: {error}", file=sys.stderr)

if args.verbose:
print("\nCalculation Statistics:\n")
Expand Down
6 changes: 3 additions & 3 deletions python/ouroboros/common/file_system.py
Original file line number Diff line number Diff line change
Expand Up @@ -31,8 +31,8 @@ def load_options_for_backproject(options_path: str) -> BackprojectOptions | str:

Returns
-------
BackprojectOptions | str
The options for backprojecting the volume.
BackprojectOptions | Exception
The options for backprojecting the volume, or a string of the exception if it could not be loaded.
"""

options = BackprojectOptions.load_from_json(options_path)
Expand Down Expand Up @@ -184,7 +184,7 @@ def load_options_for_slice(options_path: str) -> SliceOptions | str:
Returns
-------
SliceOptions | str
The options for slicing the volume, or an error message if the options could not be loaded.
The options for slicing the volume, or a string of the exception if the options could not be loaded.
"""

slice_options = SliceOptions.load_from_json(options_path)
Expand Down
4 changes: 2 additions & 2 deletions python/ouroboros/common/server_api.py
Original file line number Diff line number Diff line change
Expand Up @@ -116,8 +116,8 @@ async def on_demand_slice_visualization(options: str):
result["error"] = f"Error loading options: {str(e)}"
return JSONResponse(result, status_code=400)

if isinstance(result, str):
result["error"] = slice_options
if isinstance(load_result, str):
result["error"] = load_result
return JSONResponse(result, status_code=400)

slice_options = load_result[0] if docker else load_result
Expand Down
16 changes: 16 additions & 0 deletions python/ouroboros/common/server_handlers.py
Original file line number Diff line number Diff line change
Expand Up @@ -37,6 +37,11 @@ def handle_slice_core(task: SliceTask, slice_options):
def handle_slice(task: SliceTask):
slice_options = load_options_for_slice(task.options)

if isinstance(slice_options, str):
task.error = slice_options
task.status = "error"
return

slice_result = handle_slice_core(task, slice_options)

if isinstance(slice_result, str):
Expand Down Expand Up @@ -99,8 +104,19 @@ def handle_backproject_core(task: BackProjectTask, options, slice_options):

def handle_backproject(task: BackProjectTask):
options = load_options_for_backproject(task.options)

if isinstance(options, str):
task.error = options
task.status = "error"
return

slice_options = load_options_for_slice(options.slice_options_path)

if isinstance(slice_options, str):
task.error = slice_options
task.status = "error"
return

backproject_result = handle_backproject_core(task, options, slice_options)

if isinstance(backproject_result, str):
Expand Down
2 changes: 1 addition & 1 deletion python/ouroboros/helpers/memory_usage.py
Original file line number Diff line number Diff line change
Expand Up @@ -40,7 +40,7 @@ def calculate_gigabytes_from_dimensions(shape: tuple[int], dtype: np.dtype) -> f
num_elements = np.prod(shape)

# Calculate the total number of bytes
num_bytes = num_elements * dtype_size
num_bytes = np.multiply(num_elements, dtype_size, dtype=np.float64)

return num_bytes / GIGABYTE

Expand Down
90 changes: 71 additions & 19 deletions python/ouroboros/helpers/models.py
Original file line number Diff line number Diff line change
@@ -1,5 +1,6 @@
import json
from pydantic import BaseModel, ValidationError
import sys


def pretty_json_output(obj: object) -> str:
Expand All @@ -26,9 +27,9 @@ def model_with_json(cls):
raise TypeError("model_with_json must be applied to a BaseModel type")

cls.to_dict = cls.model_dump
cls.from_dict = classmethod(cls.model_validate)
cls.from_dict = classmethod(from_dict)
cls.to_json = cls.model_dump_json
cls.from_json = classmethod(cls.model_validate_json)
cls.from_json = classmethod(from_json)
cls.save_to_json = save_to_json
cls.load_from_json = classmethod(load_from_json)
cls.copy_values_from_other = copy_values_from_other
Expand All @@ -37,28 +38,79 @@ def model_with_json(cls):


def save_to_json(self: BaseModel, json_path: str):
with open(json_path, "w") as f:
f.write(self.to_json())
# Also specify encoding here for consistency
with open(json_path, "w", encoding='utf-8') as f:
# Add indent here for consistency? Or handle in Pydantic model
f.write(self.to_json(indent=4))


@classmethod
def load_from_json(cls: BaseModel, json_path: str):
def from_dict(cls: type[BaseModel], class_dict: dict) -> BaseModel | str:
try:
with open(json_path, "r") as f:
result = cls.from_json(f.read())
result = cls.model_validate(class_dict)
return result
except (ValidationError, json.JSONDecodeError) as vse:
# Catch specific Pydantic validation errors and JSON syntax errors
print(f"Error in validation of dict data for {cls.__name__}:\n{vse}", file=sys.stderr)
return str(vse)
except Exception as e:
# Catch other potential errors like permission denied, unicode issues etc.
print(f"Error parsing dict data: {e}", file=sys.stderr)
return str(e)


@classmethod
def from_json(cls: type[BaseModel], json: str) -> BaseModel | str:
"""Loads a Pydantic model from a JSON string.

Args:
cls: The Pydantic model class.
json: JSON-format string of the object.

Returns:
An instance of the model, or the exception if loading fails.
"""
try:
result = cls.model_validate_json(json)
return result
except (ValidationError, json.JSONDecodeError) as vse:
# Catch specific Pydantic validation errors and JSON syntax errors
print(f"Error in validation of JSON for {cls.__name__}:\n{vse}", file=sys.stderr)
return str(vse)
except Exception as e:
# Catch other potential errors like permission denied, unicode issues etc.
print(f"Error parsing json: {e}", file=sys.stderr)
return str(e)


@classmethod
def load_from_json(cls: type[BaseModel], json_path: str) -> BaseModel | str:
"""Loads a Pydantic model from a JSON file.

Args:
cls: The Pydantic model class.
json_path: Path to the JSON file.

Returns:
An instance of the model, or the exception if loading fails.
"""
try:
# Explicitly use utf-8 encoding
with open(json_path, "r", encoding='utf-8') as f:
# Use model_validate_json directly for better error context from Pydantic
result = cls.model_validate_json(f.read())
return result
except ValidationError as e:
err = f"Error loading {cls.__name__} from JSON: {e}"
print(err)
return err
except FileNotFoundError:
err = f"File not found at {json_path}"
print(err)
return err
except BaseException:
err = f"File at {json_path} is not a valid JSON file"
print(err)
return err
except FileNotFoundError as fe:
print(f"Error: File not found at {json_path}", file=sys.stderr)
return str(fe)
except (ValidationError, json.JSONDecodeError) as vse:
# Catch specific Pydantic validation errors and JSON syntax errors
print(f"Error loading {cls.__name__} from JSON file '{json_path}':\n{vse}", file=sys.stderr)
return str(vse)
except Exception as e:
# Catch other potential errors like permission denied, unicode issues etc.
print(f"Error reading or parsing file '{json_path}': {e}", file=sys.stderr)
return str(e)


def copy_values_from_other(self: BaseModel, other: BaseModel):
Expand Down
2 changes: 1 addition & 1 deletion python/pyproject.toml
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
[tool.poetry]
name = "ouroboros"
version = "1.0.1"
version = "1.0.2"
description = "Extract ROIs (e.g. nerves, blood vessels) from multi-terabyte cloud-hosted medical scans."
authors = ["Weaver Goldman <we.goldm@gmail.com>"]
readme = "README.md"
Expand Down
4 changes: 2 additions & 2 deletions python/test/helpers/test_models.py
Original file line number Diff line number Diff line change
Expand Up @@ -55,14 +55,14 @@ def test_invalid_load_json(tmp_path):
f.write("invalid json")

loaded_sample = SampleModel.load_from_json(json_path)
assert loaded_sample.startswith("Error loading SampleModel from JSON")
assert "Invalid JSON" in loaded_sample


def test_file_not_found(tmp_path):
json_path = tmp_path / "file_not_found.json"

loaded_sample = SampleModel.load_from_json(json_path)
assert loaded_sample.startswith("File not found at")
assert "No such file or directory" in loaded_sample


def test_pretty_json_output():
Expand Down
8 changes: 4 additions & 4 deletions python/test/helpers/test_volume_cache.py
Original file line number Diff line number Diff line change
Expand Up @@ -49,7 +49,7 @@ def test_volume_cache_init(volume_cache, bounding_boxes):
assert volume_cache.link_rects == [0, 1]
assert volume_cache.cv.source_url == "test_source_url"
assert volume_cache.mip == 0
assert volume_cache.flush_cache == True
assert volume_cache.flush_cache is True
assert volume_cache.volumes == [None, None]
assert volume_cache.cache_volume == [False, False]

Expand Down Expand Up @@ -82,7 +82,7 @@ def test_volume_cache_from_dict(volume_cache):
assert new_volume_cache.link_rects == volume_cache.link_rects
assert new_volume_cache.cv.source_url == "test_source_url"
assert new_volume_cache.mip == 0
assert new_volume_cache.flush_cache == True
assert new_volume_cache.flush_cache is True
mock_from_dict.assert_called_once_with(volume_cache_dict["cv"])


Expand Down Expand Up @@ -142,8 +142,8 @@ def test_volume_cache_has_color_channels(volume_cache):


def test_volume_cache_should_cache_last_volume(volume_cache):
assert volume_cache.should_cache_last_volume([1, 2, 3, 1]) == True
assert volume_cache.should_cache_last_volume([1, 2, 3, 4]) == False
assert volume_cache.should_cache_last_volume([1, 2, 3, 1]) is True
assert volume_cache.should_cache_last_volume([1, 2, 3, 4]) is False


def test_volume_cache_get_dtype(volume_cache):
Expand Down