From 47c7de4a382a774978a2e09ef2f4e11d99a9faeb Mon Sep 17 00:00:00 2001 From: Martin Reuter Date: Fri, 16 Jan 2026 18:15:59 +0100 Subject: [PATCH 01/32] add crtical points, boundary and improve level paths --- lapy/tria_mesh.py | 344 +++++++++++++++++++++++++++++++++++++--------- 1 file changed, 280 insertions(+), 64 deletions(-) diff --git a/lapy/tria_mesh.py b/lapy/tria_mesh.py index 951555d..3559533 100644 --- a/lapy/tria_mesh.py +++ b/lapy/tria_mesh.py @@ -329,6 +329,24 @@ def is_manifold(self): """ return np.max(self.adj_sym.data) <= 2 + def is_boundary(self) -> np.ndarray: + """Check which vertices are on the boundary. + + Returns + ------- + np.ndarray + Boolean array of shape (n_vertices,) where True indicates the vertex + is on a boundary edge (an edge that belongs to only one triangle). + """ + # Boundary edges have value 1 in the symmetric adjacency matrix + boundary_edges = self.adj_sym == 1 + # Get vertices that are part of any boundary edge + boundary_vertices = np.zeros(self.v.shape[0], dtype=bool) + rows, cols = boundary_edges.nonzero() + boundary_vertices[rows] = True + boundary_vertices[cols] = True + return boundary_vertices + def is_oriented(self) -> bool: """Check if triangle mesh is oriented. @@ -1006,6 +1024,112 @@ def curvature_tria( # find orthogonal umin next? return tumin2, tumax2, tcmin, tcmax + def critical_points(self, vfunc: np.ndarray) -> tuple[np.ndarray, np.ndarray, np.ndarray, np.ndarray]: + """Compute critical points (extrema and saddles) of a function on mesh vertices. + + A minimum is a vertex where all neighbor values are larger. + A maximum is a vertex where all neighbor values are smaller. + A saddle is a vertex with at least two regions of neighbors with larger values, + and two with smaller values. Boundary vertices assume a virtual edge outside + the mesh that closes the boundary loop around the vertex. + + Parameters + ---------- + vfunc : np.ndarray + Real-valued function defined on mesh vertices, shape (n_vertices,). + + Returns + ------- + minima : np.ndarray + Array of vertex indices that are local minima, shape (n_minima,). + maxima : np.ndarray + Array of vertex indices that are local maxima, shape (n_maxima,). + saddles : np.ndarray + Array of vertex indices that are saddles (all orders), shape (n_saddles,). + saddle_orders : np.ndarray + Array of saddle orders for each saddle vertex (same length as saddles), shape (n_saddles,). + Order 2 = simple saddle (4 sign flips), order 3+ = higher-order saddles. + + Notes + ----- + The algorithm works by: + 1. For each vertex, compute difference: neighbor_value - vertex_value + 2. Minima: all differences are positive (all neighbors higher) + 3. Maxima: all differences are negative (all neighbors lower) + 4. Saddles: count sign flips across opposite edges in triangles at vertex + - Regular point: 2 sign flips + - Simple saddle (order 2): 4 sign flips + - Higher-order saddle (order n): 2n sign flips, order = n_flips / 2 + 5. Tie-breaker: when two vertices have equal function values, the vertex + with the higher vertex ID is treated as slightly larger to remove ambiguity. + + The implementation is fully vectorized using sparse matrices and numpy operations. + """ + vfunc = np.asarray(vfunc) + if len(vfunc) != self.v.shape[0]: + raise ValueError("vfunc length must match number of vertices") + n_vertices = self.v.shape[0] + + # Use SYMMETRIC adjacency matrix to get ALL edges (including boundary edges in both directions) + # COMPUTE EDGE SIGNS ONCE for all neighbor relationships + rows, cols = self.adj_sym.nonzero() + edge_diffs = vfunc[cols] - vfunc[rows] + edge_signs = np.sign(edge_diffs) + # Tie-breaker: when function values are equal, vertex with higher ID is larger + zero_mask = edge_signs == 0 + edge_signs[zero_mask] = np.sign(cols[zero_mask] - rows[zero_mask]) + # Create sparse matrix of edge signs for O(1) lookup + edge_sign_matrix = sparse.csr_matrix( + (edge_signs, (rows, cols)), + shape=(n_vertices, n_vertices) + ) + + # CLASSIFY MINIMA AND MAXIMA + # Compute min and max edge sign per vertex (row-wise) + # Note: edge_sign_matrix only contains +1/-1 (never 0 due to tie-breaker) + # Implicit zeros in sparse matrix represent non-neighbors and can be ignored + min_signs = edge_sign_matrix.min(axis=1).toarray().flatten() + max_signs = edge_sign_matrix.max(axis=1).toarray().flatten() + # Minimum: all neighbor signs are positive (+1) + # If min in {0,1} and max=1, all actual neighbors are +1 (zeros are non-neighbors) + is_minimum = (min_signs > -1) & (max_signs == 1) + # Maximum: all neighbor signs are negative (-1) + # If min=-1 and max in {-1,0}, all actual neighbors are -1 (zeros are non-neighbors) + is_maximum = (min_signs == -1) & (max_signs < 1) + minima = np.where(is_minimum)[0] + maxima = np.where(is_maximum)[0] + + # COUNT SIGN FLIPS at opposite edge for saddle detection + sign_flips = np.zeros(n_vertices, dtype=int) + t0 = self.t[:, 0] + t1 = self.t[:, 1] + t2 = self.t[:, 2] + # For vertex 0, opposite edge is (v1, v2) + sign0_1 = np.array(edge_sign_matrix[t0, t1]).flatten() + sign0_2 = np.array(edge_sign_matrix[t0, t2]).flatten() + sign1_2 = np.array(edge_sign_matrix[t1, t2]).flatten() + flip0 = (sign0_1 * sign0_2) < 0 + np.add.at(sign_flips, t0[flip0], 1) + # For vertex 1, opposite edge is (v2, v0) + # sign(v1→v0) = -sign(v0→v1) = -sign0_1 + flip1 = (sign1_2 * (-sign0_1)) < 0 + np.add.at(sign_flips, t1[flip1], 1) + # For vertex 2, opposite edge is (v0, v1) + # sign(v2→v0) = -sign(v0→v2) = -sign0_2 + # sign(v2→v1) = -sign(v1→v2) = -sign1_2 + flip2 = (sign0_2 * sign1_2) < 0 + np.add.at(sign_flips, t2[flip2], 1) + + # CLASSIFY SADDLES + # Saddles have 4+ flips (regular points have 2, minima/maxima have 0) + # Boundary vertices can have 3 flips (or more) to be a saddle, assuming an additional flip outside + # Order = n_flips / 2 + is_saddle = sign_flips >= 3 + saddles = np.where(is_saddle)[0] + saddle_orders = (sign_flips[saddles] + 1) // 2 + + return minima, maxima, saddles, saddle_orders + def normalize_(self) -> None: """Normalize TriaMesh to unit surface area and centroid at the origin. @@ -1554,92 +1678,169 @@ def __reduce_edges_to_path( edges: np.ndarray, start_idx: Optional[int] = None, get_edge_idx: bool = False, - ) -> Union[np.ndarray, tuple[np.ndarray, np.ndarray]]: - """Reduce undirected unsorted edges (index pairs) to path (index array). + ) -> Union[list, tuple[list, list]]: + """Reduce undirected unsorted edges to ordered path(s). - Converts an unordered list of edge pairs into an ordered path by finding - a traversal from one endpoint to the other. + Converts unordered edge pairs into ordered paths by finding traversals. + Handles single open paths, closed loops, and multiple disconnected components. + Always returns lists to handle multiple components uniformly. Parameters ---------- edges : np.ndarray Array of shape (n, 2) with pairs of positive integer node indices - representing undirected edges. All indices from 0 to max(edges) must - be used and graph needs to be connected. Nodes cannot have more than - 2 neighbors. Requires exactly two endnodes (nodes with only one - neighbor). For closed loops, cut it open by removing one edge before - passing to this function. + representing undirected edges. Node indices can have gaps (i.e., not all + indices from 0 to max need to appear in the edges). Only nodes that + actually appear in edges are processed. start_idx : int or None, default=None - Node with only one neighbor to start path. If None, the node with - the smaller index will be selected as start point. + Preferred start node. If None, selects an endpoint (degree 1) automatically, + or arbitrary node for closed loops. Must be a node that appears in edges. get_edge_idx : bool, default=False - If True, also return index of edge into edges for each path segment. - The returned edge index list has length n, while path has length n+1. + If True, also return edge indices for each path segment. Returns ------- - path : np.ndarray - Array of length n+1 containing node indices as single path from start - to endpoint. - edge_idx : np.ndarray - Array of length n containing corresponding edge idx into edges for each - path segment. Only returned if get_edge_idx is True. + paths : list[np.ndarray] + List of ordered paths, one per connected component. For closed loops, + first node is repeated at end. + edge_idxs : list[np.ndarray] + List of edge index arrays, one per component. Only returned if + get_edge_idx=True. Raises ------ ValueError - If graph does not have exactly two endpoints. - If start_idx is not one of the endpoints. - If graph is not connected. + If start_idx is invalid. + If graph has degree > 2 (branching or self-intersections). """ - from scipy.sparse.csgraph import shortest_path - - # Extract node indices and create a sparse adjacency matrix edges = np.array(edges) + if edges.shape[0] == 0: + return ([], []) if get_edge_idx else [] + + # Build adjacency matrix i = np.column_stack((edges[:, 0], edges[:, 1])).reshape(-1) j = np.column_stack((edges[:, 1], edges[:, 0])).reshape(-1) dat = np.ones(i.shape) n = edges.max() + 1 adj_matrix = sparse.csr_matrix((dat, (i, j)), shape=(n, n)) - # Find the degree of each node, sum over rows to get degree degrees = np.asarray(adj_matrix.sum(axis=1)).ravel() - endpoints = np.where(degrees == 1)[0] - if len(endpoints) != 2: - raise ValueError( - "The graph does not have exactly two endpoints; invalid input." - ) - if not start_idx: - start_idx = endpoints[0] - else: - if not np.isin(start_idx, endpoints): + + # Find connected components + n_comp, labels = sparse.csgraph.connected_components(adj_matrix, directed=False) + + # Build edge index lookup matrix + edge_dat = np.arange(edges.shape[0]) + 1 + edge_dat = np.column_stack((edge_dat, edge_dat)).reshape(-1) + eidx_matrix = sparse.csr_matrix((edge_dat, (i, j)), shape=(n, n)) + + paths = [] + edge_idxs = [] + + for comp_id in range(n_comp): + comp_nodes = np.where(labels == comp_id)[0] + if len(comp_nodes) == 0: + continue + + comp_degrees = degrees[comp_nodes] + + # Skip isolated nodes (degree 0) that exist only due to matrix sizing. + # When edges use indices [0, 5, 10], we create a matrix of size 11x11. + # Indices [1,2,3,4,6,7,8,9] don't appear in any edges (have degree 0). + # connected_components treats each as a separate component, but they're + # not real nodes - just phantom entries from sizing matrix to max_index+1. + if np.all(comp_degrees == 0): + continue + + # SAFETY CHECK: Reject graphs with nodes of degree > 2 + # This single check catches all problematic cases: + # - Branching structures (Y-shape, star graph) + # - Self-intersections (figure-8, etc.) + # - Trees with multiple endpoints + # Valid graphs have only degree 1 (endpoints) and degree 2 (path nodes) + max_degree = comp_degrees.max() + if max_degree > 2: + high_degree_nodes = comp_nodes[comp_degrees > 2] raise ValueError( - f"start_idx {start_idx} must be one of the endpoints {endpoints}." + f"Component {comp_id}: found {len(high_degree_nodes)} node(s) with degree > 2 " + f"(max degree: {max_degree}). Degrees: {np.sort(comp_degrees)}. " + f"This indicates branching or self-intersecting structure. " + f"Only simple paths and simple closed loops are supported." ) - # Traverse the graph by computing shortest path - dist_matrix = shortest_path( - csgraph=adj_matrix, - directed=False, - indices=start_idx, - return_predecessors=False, - ) - if np.isinf(dist_matrix).any(): - raise ValueError( - "Ensure connected graph with indices from 0 to max_idx without gaps." - ) - # sort indices according to distance form start - path = dist_matrix.argsort() - # get edge idx of each segment from original list - enum = edges.shape[0] - dat = np.arange(enum) + 1 - dat = np.column_stack((dat, dat)).reshape(-1) - eidx_matrix = sparse.csr_matrix((dat, (i, j)), shape=(n, n)) - ei = path[0:-1] - ej = path[(np.arange(path.size - 1) + 1)] - eidx = np.asarray(eidx_matrix[ei, ej] - 1).ravel() + + # Determine if closed loop: all nodes have degree 2 + is_closed = np.all(comp_degrees == 2) + + # For closed loops: break one edge to convert to open path + if is_closed: + # Pick start node + if start_idx is not None and start_idx in comp_nodes: + start = start_idx + else: + start = comp_nodes[0] + + # Find neighbors of start node to break one edge + neighbors = adj_matrix[start, :].nonzero()[1] + neighbors_in_comp = [n for n in neighbors if n in comp_nodes] + + if len(neighbors_in_comp) < 2: + raise ValueError(f"Component {comp_id}: Closed loop node {start} should have 2 neighbors") + + # Break the edge from start to first neighbor (temporarily) + adj_matrix = adj_matrix.tolil() + break_neighbor = neighbors_in_comp[0] + adj_matrix[start, break_neighbor] = 0 + adj_matrix[break_neighbor, start] = 0 + adj_matrix = adj_matrix.tocsr() + + # Update degrees after breaking edge + degrees = np.asarray(adj_matrix.sum(axis=1)).ravel() + comp_degrees = degrees[comp_nodes] + + # Now handle as open path (both originally open and converted closed loops) + endpoints = comp_nodes[comp_degrees == 1] + + if len(endpoints) != 2: + raise ValueError( + f"Component {comp_id}: Expected 2 endpoints after breaking loop, found {len(endpoints)}" + ) + + # Select start node + if is_closed: + # For closed loops, start is already selected above + pass + elif start_idx is not None and start_idx in endpoints: + start = start_idx + else: + # For originally open paths, pick first endpoint + start = endpoints[0] + + # Use shortest_path to order nodes by distance from start + dist = sparse.csgraph.shortest_path(adj_matrix, indices=start, directed=False) + + if np.isinf(dist[comp_nodes]).any(): + raise ValueError(f"Component {comp_id} is not fully connected.") + + # Sort nodes by distance from start + path = comp_nodes[dist[comp_nodes].argsort()] + + # For closed loops: append start again to close the loop + if is_closed: + path = np.append(path, path[0]) + + paths.append(path) + + # Get edge indices if requested + if get_edge_idx: + ei = path[:-1] + ej = path[1:] + eidx = np.asarray(eidx_matrix[ei, ej] - 1).ravel() + edge_idxs.append(eidx) + + # Always return lists if get_edge_idx: - return path, eidx + return paths, edge_idxs else: - return path + return paths def level_path( @@ -1758,15 +1959,31 @@ def level_path( p2 = np.squeeze(p[edge_idxs[:, 1]]) llength = np.sqrt(((p1 - p2) ** 2).sum(1)).sum() # compute path from unordered, not-directed edge list - # and return path as list of points, and path length + # __reduce_edges_to_path now returns lists (can have multiple components) if get_tria_idx: - path, edge_idx = TriaMesh.__reduce_edges_to_path( - edge_idxs, get_edge_idx=get_tria_idx + paths, edge_idxs_list = TriaMesh.__reduce_edges_to_path( + edge_idxs, get_edge_idx=True ) + # level_path expects single component + if len(paths) != 1: + raise ValueError( + f"Found {len(paths)} disconnected level curves. " + f"Use extract_level_curves() for multiple components." + ) + path = paths[0] + edge_idx = edge_idxs_list[0] # translate local edge id to global tria id tria_idx = t_idx[edge_idx] else: - path = TriaMesh.__reduce_edges_to_path(edge_idxs, get_tria_idx) + paths = TriaMesh.__reduce_edges_to_path(edge_idxs, get_edge_idx=False) + # level_path expects single component + if len(paths) != 1: + raise ValueError( + f"Found {len(paths)} disconnected level curves. " + f"Use extract_level_curves() for multiple components." + ) + path = paths[0] + # remove duplicate vertices (happens when levelset hits a vertex almost # perfectly) path3d = p[path, :] @@ -1800,4 +2017,3 @@ def level_path( return path3d, llength, edges_vidxs, edges_relpos else: return path3d, llength - From d43040854d1d6804382a5e15cc2877eec3048b49 Mon Sep 17 00:00:00 2001 From: Martin Reuter Date: Wed, 21 Jan 2026 17:49:17 +0100 Subject: [PATCH 02/32] allow to auto-determine if closed or not based on last/first point duplication when initialized --- lapy/polygon.py | 59 +++++++++++++++++++++++++++++++++++-------------- 1 file changed, 42 insertions(+), 17 deletions(-) diff --git a/lapy/polygon.py b/lapy/polygon.py index fbe10af..2185a5d 100644 --- a/lapy/polygon.py +++ b/lapy/polygon.py @@ -5,6 +5,7 @@ """ import logging import sys +from typing import Optional import numpy as np from scipy import sparse @@ -22,15 +23,21 @@ class Polygon: points : np.ndarray Array of shape (n, d) containing coordinates of polygon vertices in order, where d is 2 or 3 for 2D (x, y) or 3D (x, y, z) paths. - For closed polygons, the last point should not duplicate the first point. - closed : bool, default=False - If True, treats the path as a closed polygon. If False, treats it as - an open polyline. + If the last point duplicates the first point and closed is None, + the duplicate endpoint will be removed and the polygon will be + marked as closed automatically. + closed : bool or None, default=None + - If None (default): Auto-detect by checking if first and last points + are identical. If they are, removes duplicate endpoint and sets closed=True. + - If True: Treats the path as a closed polygon. If the last point duplicates + the first, it will be removed. + - If False: Treats the path as an open polyline regardless of endpoints. Attributes ---------- points : np.ndarray - Polygon vertex coordinates, shape (n_points, d). + Polygon vertex coordinates, shape (n_points, d). For closed polygons, + the last point does not duplicate the first. closed : bool Whether the polygon is closed or open. _is_2d : bool @@ -46,23 +53,22 @@ class Polygon: -------- >>> import numpy as np >>> from lapy.polygon import Polygon - >>> # Create a 2D closed polygon (square) - >>> square = np.array([[0, 0], [1, 0], [1, 1], [0, 1]]) - >>> poly = Polygon(square, closed=True) - >>> poly.is_2d() + >>> # Create a 2D closed polygon (square) - auto-detected + >>> square = np.array([[0, 0], [1, 0], [1, 1], [0, 1], [0, 0]]) + >>> poly = Polygon(square) # closed=None, will auto-detect and remove duplicate + >>> poly.closed True - >>> poly.length() - 4.0 - >>> # Create a 3D open path - >>> path_3d = np.array([[0, 0, 0], [1, 0, 0], [1, 1, 0], [1, 1, 1]]) - >>> poly3d = Polygon(path_3d, closed=False) - >>> poly3d.is_2d() + >>> poly.points.shape[0] + 4 + >>> # Explicitly open polygon + >>> path = np.array([[0, 0, 0], [1, 0, 0], [1, 1, 0], [1, 1, 1]]) + >>> poly3d = Polygon(path, closed=False) + >>> poly3d.closed False """ - def __init__(self, points: np.ndarray, closed: bool = False): + def __init__(self, points: np.ndarray, closed: Optional[bool] = None): self.points = np.array(points) - self.closed = closed # Validate non-empty polygon if self.points.size == 0: @@ -93,6 +99,25 @@ def __init__(self, points: np.ndarray, closed: bool = False): else: raise ValueError("Points should have 2 or 3 coordinates") + # Auto-detect closed polygons or handle explicit closed parameter + if closed is None: + # Auto-detect: check if first and last points are identical + if len(self.points) > 1 and np.allclose(self.points[0], self.points[-1]): + # Remove duplicate endpoint + self.points = self.points[:-1] + self.closed = True + else: + # Not closed (endpoints differ) + self.closed = False + elif closed: + # Explicitly closed: remove duplicate endpoint if present + if len(self.points) > 1 and np.allclose(self.points[0], self.points[-1]): + self.points = self.points[:-1] + self.closed = True + else: + # Explicitly open + self.closed = False + def is_2d(self) -> bool: """Check if the polygon is 2D. From aac8c8ef41c2f11b8eae2bcd948c714b682b57ec Mon Sep 17 00:00:00 2001 From: Martin Reuter Date: Wed, 21 Jan 2026 17:50:58 +0100 Subject: [PATCH 03/32] add extract_level_paths for multiple non-crossing/branching levelsets --- lapy/tria_mesh.py | 354 +++++++++++++++------- lapy/utils/tests/test_tria_mesh.py | 472 +++++++++++++++++++++++++++++ 2 files changed, 718 insertions(+), 108 deletions(-) diff --git a/lapy/tria_mesh.py b/lapy/tria_mesh.py index 3559533..77baa2a 100644 --- a/lapy/tria_mesh.py +++ b/lapy/tria_mesh.py @@ -1842,6 +1842,152 @@ def __reduce_edges_to_path( else: return paths + def extract_level_paths( + self, + vfunc: np.ndarray, + level: float, + ) -> list[polygon.Polygon]: + """Extract level set paths as Polygon objects with triangle/edge metadata. + + For a given scalar function on mesh vertices, extracts all paths where + the function equals the specified level. Returns polygons with embedded + metadata about which triangles and edges were intersected. + Handles single open paths, closed loops, and multiple disconnected components. + + Parameters + ---------- + vfunc : np.ndarray + Scalar function values at vertices, shape (n_vertices,). Must be 1D. + level : float + Level set value to extract. + + Returns + ------- + list[polygon.Polygon] + List of Polygon objects, one for each connected level curve component. + Each polygon has the following additional attributes: + - points : np.ndarray of shape (n_points, 3) - 3D coordinates on level curve + (access via polygon.points or polygon.get_points()) + - closed : bool - whether the curve is closed + - tria_idx : np.ndarray of shape (n_segments,) - triangle index for each segment + - edge_vidx : np.ndarray of shape (n_points, 2) - mesh vertex indices for edge + - edge_bary : np.ndarray of shape (n_points,) - barycentric coordinate [0,1] + along edge where level set intersects (0=first vertex, 1=second vertex) + + Raises + ------ + ValueError + If vfunc is not 1-dimensional. + """ + if vfunc.ndim > 1: + raise ValueError(f"vfunc needs to be 1-dim, but is {vfunc.ndim}-dim!") + + # Get intersecting triangles + intersect = vfunc[self.t] > level + t_idx = np.where( + np.logical_or( + np.sum(intersect, axis=1) == 1, np.sum(intersect, axis=1) == 2 + ) + )[0] + + if t_idx.size == 0: + return [] + + # Reduce to triangles that intersect with level + t_level = self.t[t_idx, :] + intersect = intersect[t_idx, :] + + # Invert triangles with two true values so single vertex is true + intersect[np.sum(intersect, axis=1) > 1, :] = np.logical_not( + intersect[np.sum(intersect, axis=1) > 1, :] + ) + + # Get index within triangle with single vertex + idx_single = np.argmax(intersect, axis=1) + idx_o1 = (idx_single + 1) % 3 + idx_o2 = (idx_single + 2) % 3 + + # Get global indices + gidx0 = t_level[np.arange(t_level.shape[0]), idx_single] + gidx1 = t_level[np.arange(t_level.shape[0]), idx_o1] + gidx2 = t_level[np.arange(t_level.shape[0]), idx_o2] + + # Sort edge indices (rows are triangles, cols are the two vertex ids) + # This creates unique edge identifiers + gg1 = np.sort( + np.concatenate((gidx0[:, np.newaxis], gidx1[:, np.newaxis]), axis=1) + ) + gg2 = np.sort( + np.concatenate((gidx0[:, np.newaxis], gidx2[:, np.newaxis]), axis=1) + ) + + # Concatenate all edges and get unique ones + gg = np.concatenate((gg1, gg2), axis=0) + gg_unique = np.unique(gg, axis=0) + + # Generate level set intersection points for unique edges + # Barycentric coordinate (0=first vertex, 1=second vertex) + xl = ((level - vfunc[gg_unique[:, 0]]) + / (vfunc[gg_unique[:, 1]] - vfunc[gg_unique[:, 0]])) + + # 3D points on unique edges + p = ((1 - xl)[:, np.newaxis] * self.v[gg_unique[:, 0]] + + xl[:, np.newaxis] * self.v[gg_unique[:, 1]]) + + # Fill sparse matrix with new point indices (+1 to distinguish from zero) + a_mat = sparse.csc_matrix( + (np.arange(gg_unique.shape[0]) + 1, (gg_unique[:, 0], gg_unique[:, 1])) + ) + + # For each triangle, create edge pair via lookup in matrix + edge_idxs = (np.concatenate((a_mat[gg1[:, 0], gg1[:, 1]], + a_mat[gg2[:, 0], gg2[:, 1]]), axis=0).T - 1) + + # Reduce edges to paths (returns list of paths for multiple components) + paths, path_edge_idxs = self._TriaMesh__reduce_edges_to_path(edge_idxs, get_edge_idx=True) + + # Build polygon objects for each connected component + polygons = [] + for path_nodes, path_edge_idx in zip(paths, path_edge_idxs): + # Get 3D coordinates for this path + poly_v = p[path_nodes, :] + + # Remove duplicate vertices (when levelset hits a vertex almost perfectly) + dd = ((poly_v[0:-1, :] - poly_v[1:, :]) ** 2).sum(1) + dd = np.append(dd, 1) # Never delete last node + eps = 0.000001 + keep_ids = dd > eps + poly_v = poly_v[keep_ids, :] + + # Get triangle indices for each edge + poly_tria_idx = t_idx[path_edge_idx[keep_ids[:-1]]] + + # Get edge vertex indices + poly_edge_vidx = gg_unique[path_nodes[keep_ids], :] + + # Get barycentric coordinates + poly_edge_bary = xl[path_nodes[keep_ids]] + + # Create polygon with metadata + # Use closed=None to let Polygon auto-detect based on endpoints + # This will automatically remove duplicate endpoint if present + n_points_before = poly_v.shape[0] + poly = polygon.Polygon(poly_v, closed=None) + n_points_after = poly.points.shape[0] + + # If Polygon removed duplicate endpoint, adjust metadata + if n_points_after < n_points_before: + # Remove last entry from metadata to match polygon points + poly_edge_vidx = poly_edge_vidx[:n_points_after] + poly_edge_bary = poly_edge_bary[:n_points_after] + + poly.tria_idx = poly_tria_idx + poly.edge_vidx = poly_edge_vidx + poly.edge_bary = poly_edge_bary + + polygons.append(poly) + + return polygons def level_path( self, @@ -1859,10 +2005,23 @@ def level_path( The points are sorted and returned as an ordered array of 3D coordinates together with the length of the level set path. + This implementation uses extract_level_paths internally to compute the + level set, ensuring consistent handling of closed loops and metadata. + .. note:: - Only works for level sets that represent a single non-intersecting - path with exactly one start and one endpoint (e.g., not closed)! + Works for level sets that represent a single path or closed loop. + For closed loops, the first and last points are identical (duplicated) + so you can detect closure via ``np.allclose(path[0], path[-1])``. + For open paths, the path has two distinct endpoints. + This function is kept mainly for backward compatability. + + **For more advanced use cases, consider using extract_level_paths() directly:** + + - Multiple disconnected components: extract_level_pathss returns all components + - Custom resampling: Get Polygon objects and use Polygon.resample() directly + - Metadata analysis: Access triangle indices and edge information per component + - Closed loop detection: Polygon objects have a ``closed`` attribute Parameters ---------- @@ -1872,6 +2031,8 @@ def level_path( Level set value to extract. get_tria_idx : bool, default=False If True, also return array of triangle indices for each path segment. + For closed loops with n points (including duplicate), returns n-1 triangle + indices. For open paths with n points, returns n-1 triangle indices. get_edges : bool, default=False If True, also return arrays of vertex indices (i,j) and relative positions for each 3D point along the intersecting edge. @@ -1883,12 +2044,15 @@ def level_path( ------- path : np.ndarray Array of shape (n, 3) containing 3D coordinates of vertices on the - level path. + level path. For closed loops, the last point duplicates the first point, + so ``np.allclose(path[0], path[-1])`` will be True. length : float - Length of the level set. + Total length of the level set path. For closed loops, includes the + closing segment from last to first point. tria_idx : np.ndarray Array of triangle indices for each segment on the path, shape (n-1,) - if the path has length n. Only returned if get_tria_idx is True. + where n is the number of points (including duplicate for closed loops). + Only returned if get_tria_idx is True. edges_vidxs : np.ndarray Array of shape (n, 2) with vertex indices (i,j) for each 3D point, defining the vertices of the original mesh edge intersecting the @@ -1904,116 +2068,90 @@ def level_path( ------ ValueError If vfunc is not 1-dimensional. + If multiple disconnected level paths are found (use extract_level_paths). If n_points is combined with get_tria_idx=True. If n_points is combined with get_edges=True. + + See Also + -------- + extract_level_paths : Extract multiple disconnected level paths as Polygon objects. + Recommended for advanced use cases with multiple components, custom resampling, + or when you need explicit closed/open information. + + Examples + -------- + >>> # Extract a simple level curve + >>> path, length = mesh.level_path(vfunc, 0.5) + >>> print(f"Path has {path.shape[0]} points, length {length:.2f}") + + >>> # Check if the path is closed + >>> is_closed = np.allclose(path[0], path[-1]) + >>> print(f"Path is closed: {is_closed}") + + >>> # Get triangle indices for each segment + >>> path, length, tria_idx = mesh.level_path(vfunc, 0.5, get_tria_idx=True) + + >>> # Get complete edge metadata + >>> path, length, edges, relpos = mesh.level_path(vfunc, 0.5, get_edges=True) + + >>> # Resample to fixed number of points + >>> path, length = mesh.level_path(vfunc, 0.5, n_points=100) + + >>> # For multiple components, use extract_level_paths instead + >>> curves = mesh.extract_level_paths(vfunc, 0.5) + >>> for i, curve in enumerate(curves): + >>> print(f"Component {i}: {curve.points.shape[0]} points, closed={curve.closed}") """ - if vfunc.ndim > 1: - raise ValueError(f"vfunc needs to be 1-dim, but is {vfunc.ndim}-dim!") - # get intersecting triangles - intersect = vfunc[self.t] > level - t_idx = np.where( - np.logical_or( - np.sum(intersect, axis=1) == 1, np.sum(intersect, axis=1) == 2 - ) - )[0] - # reduce to triangles that intersect with level: - t_level = self.t[t_idx, :] - intersect = intersect[t_idx, :] - # trias have one vertex on one side and two on the other side of the level set - # invert trias with two true values, so that single vertex is true - intersect[np.sum(intersect, axis=1) > 1, :] = np.logical_not( - intersect[np.sum(intersect, axis=1) > 1, :] - ) - # get idx within tria with single vertex: - idx_single = np.argmax(intersect, axis=1) - idx_o1 = (idx_single + 1) % 3 - idx_o2 = (idx_single + 2) % 3 - # get global idx - gidx0 = t_level[np.arange(t_level.shape[0]), idx_single] - gidx1 = t_level[np.arange(t_level.shape[0]), idx_o1] - gidx2 = t_level[np.arange(t_level.shape[0]), idx_o2] - # sort edge indices (rows are trias, cols are the two vertex ids) - gg1 = np.sort( - np.concatenate((gidx0[:, np.newaxis], gidx1[:, np.newaxis]), axis=1) - ) - gg2 = np.sort( - np.concatenate((gidx0[:, np.newaxis], gidx2[:, np.newaxis]), axis=1) - ) - # concatenate all and get unique ones - gg = np.concatenate((gg1, gg2), axis=0) - gg_unique = np.unique(gg, axis=0) - # generate level set intersection points for unique edges - xl = ((level - vfunc[gg_unique[:, 0]]) - / ( vfunc[gg_unique[:, 1]] - vfunc[gg_unique[:, 0]])) - p = ((1 - xl)[:, np.newaxis] * self.v[gg_unique[:, 0]] - + xl[:, np.newaxis] * self.v[gg_unique[:, 1]]) - # fill sparse matrix with new point indices (+1 to distinguish from zero) - a_mat = sparse.csc_matrix( - (np.arange(gg_unique.shape[0]) + 1, (gg_unique[:, 0], gg_unique[:, 1])) - ) - # for each tria create one edge via lookup in matrix - edge_idxs = ( np.concatenate((a_mat[gg1[:, 0], gg1[:, 1]], - a_mat[gg2[:, 0], gg2[:, 1]]), axis=0).T - 1 ) - # lengths computation - p1 = np.squeeze(p[edge_idxs[:, 0]]) - p2 = np.squeeze(p[edge_idxs[:, 1]]) - llength = np.sqrt(((p1 - p2) ** 2).sum(1)).sum() - # compute path from unordered, not-directed edge list - # __reduce_edges_to_path now returns lists (can have multiple components) - if get_tria_idx: - paths, edge_idxs_list = TriaMesh.__reduce_edges_to_path( - edge_idxs, get_edge_idx=True + # Use extract_level_paths to get polygons + curves = self.extract_level_paths(vfunc, level) + + # level_path expects single component + if len(curves) != 1: + raise ValueError( + f"Found {len(curves)} disconnected level curves. " + f"Use extract_level_paths() for multiple components." ) - # level_path expects single component - if len(paths) != 1: - raise ValueError( - f"Found {len(paths)} disconnected level curves. " - f"Use extract_level_curves() for multiple components." - ) - path = paths[0] - edge_idx = edge_idxs_list[0] - # translate local edge id to global tria id - tria_idx = t_idx[edge_idx] - else: - paths = TriaMesh.__reduce_edges_to_path(edge_idxs, get_edge_idx=False) - # level_path expects single component - if len(paths) != 1: - raise ValueError( - f"Found {len(paths)} disconnected level curves. " - f"Use extract_level_curves() for multiple components." - ) - path = paths[0] - - # remove duplicate vertices (happens when levelset hits a vertex almost - # perfectly) - path3d = p[path, :] - dd = ((path3d[0:-1, :] - path3d[1:, :]) ** 2).sum(1) - # append 1 (never delete last node, if identical to the one before, we delete - # the one before) - dd = np.append(dd, 1) - eps = 0.000001 - keep_ids = dd > eps - path3d = path3d[keep_ids, :] - edges_vidxs = gg_unique[path, :] - edges_vidxs = edges_vidxs[keep_ids, :] - edges_relpos = xl[path] - edges_relpos = edges_relpos[keep_ids] - if get_tria_idx: - if n_points: + + # Get the single curve + curve = curves[0] + + # Extract data from polygon + path3d = curve.points + edges_vidxs = curve.edge_vidx + edges_relpos = curve.edge_bary + + # Compute length using polygon's length method + length = curve.length() + + # For closed loops, duplicate the last point to match first point + # This allows users to detect closure via np.allclose(path[0], path[-1]) + # and maintains backward compatibility with the original level_path behavior + if curve.closed: + path3d = np.vstack([path3d, path3d[0:1]]) + edges_vidxs = np.vstack([edges_vidxs, edges_vidxs[0:1]]) + edges_relpos = np.append(edges_relpos, edges_relpos[0]) + + # Handle optional resampling + if n_points: + if get_tria_idx: raise ValueError("n_points cannot be combined with get_tria_idx=True.") - tria_idx = tria_idx[dd[:-1] > eps] if get_edges: - return path3d, llength, tria_idx, edges_vidxs, edges_relpos + raise ValueError("n_points cannot be combined with get_edges=True.") + poly = polygon.Polygon(path3d, closed=curve.closed) + path3d = poly.resample(n_points=n_points, n_iter=3, inplace=False) + path3d = path3d.get_points() + + # Build return tuple based on options + if get_tria_idx: + tria_idx = curve.tria_idx + if get_edges: + return path3d, length, tria_idx, edges_vidxs, edges_relpos else: - return path3d, llength, tria_idx + return path3d, length, tria_idx else: - if n_points: - if get_edges: - raise ValueError("n_points cannot be combined with get_edges=True.") - poly = polygon.Polygon(path3d, closed=False) - path3d = poly.resample(n_points=n_points, n_iter=3, inplace=False) - path3d = path3d.get_points() if get_edges: - return path3d, llength, edges_vidxs, edges_relpos + return path3d, length, edges_vidxs, edges_relpos else: - return path3d, llength + return path3d, length + + diff --git a/lapy/utils/tests/test_tria_mesh.py b/lapy/utils/tests/test_tria_mesh.py index 3650947..820ab50 100644 --- a/lapy/utils/tests/test_tria_mesh.py +++ b/lapy/utils/tests/test_tria_mesh.py @@ -631,3 +631,475 @@ def test_2d_mesh_support(): assert v2d.shape == (4, 2), "Should return 2D vertices" np.testing.assert_array_almost_equal(v2d, vertices_2d) + +def test_critical_points_simple(): + """ + Test critical_points on a simple mesh with known extrema. + """ + # Create a simple triangular mesh with 3 peaks and a valley + vertices = np.array([ + [0.0, 0.0, 0.5], # 0: center (middle height) + [1.0, 0.0, 1.0], # 1: peak + [0.0, 1.0, 0.0], # 2: valley + [-1.0, 0.0, 1.0], # 3: peak + ]) + triangles = np.array([ + [0, 1, 2], + [0, 2, 3], + [0, 3, 1], + ]) + mesh = TriaMesh(vertices, triangles) + + # Height function: z-coordinate + vfunc = vertices[:, 2] + + # Compute critical points + minima, maxima, saddles, saddle_orders = mesh.critical_points(vfunc) + + # Expected: vertex 2 is minimum (z=0), vertices 1 and 3 are maxima (z=1) + assert 2 in minima, f"Expected vertex 2 (valley) as minimum, got minima={minima}" + assert 1 in maxima or 3 in maxima, f"Expected vertices 1 or 3 (peaks) as maxima, got maxima={maxima}" + # Vertex 0 is at mid-height, surrounded by higher and lower neighbors - could be saddle or regular + + # Simpler test: just verify the function runs without error + assert len(minima) + len(maxima) + len(saddles) > 0, "Should find some critical points" + + +def test_critical_points_saddle(): + """ + Test critical_points on a mesh with a saddle point. + """ + # Create a simple saddle mesh: 3x3 grid in xy-plane + # Height creates saddle at center + vertices = np.array([ + [-1, -1, 1], # 0: corner (high) + [0, -1, 0], # 1: edge midpoint (low) + [1, -1, 1], # 2: corner (high) + [-1, 0, 0], # 3: edge midpoint (low) + [0, 0, 0.5], # 4: center (saddle) + [1, 0, 0], # 5: edge midpoint (low) + [-1, 1, 1], # 6: corner (high) + [0, 1, 0], # 7: edge midpoint (low) + [1, 1, 1], # 8: corner (high) + ], dtype=float) + + # Create triangular mesh from grid + triangles = np.array([ + [0, 1, 4], [0, 4, 3], # bottom-left quad + [1, 2, 5], [1, 5, 4], # bottom-right quad + [3, 4, 7], [3, 7, 6], # top-left quad + [4, 5, 8], [4, 8, 7], # top-right quad + ]) + mesh = TriaMesh(vertices, triangles) + + # Use z-coordinate as function + vfunc = vertices[:, 2] + + # Compute critical points + minima, maxima, saddles, saddle_orders = mesh.critical_points(vfunc) + + # Center should be a saddle (neighbors alternate high-low) + assert 4 in saddles, f"Expected vertex 4 (center) to be a saddle, saddles={saddles}" + # Check saddle order for center vertex + saddle_idx = np.where(saddles == 4)[0] + if len(saddle_idx) > 0: + order = saddle_orders[saddle_idx[0]] + assert order >= 2, f"Expected saddle order >= 2, got {order}" + + +def test_critical_points_with_ties(): + """ + Test critical_points with equal function values (tie-breaker rule). + """ + # Simple triangle mesh + vertices = np.array([ + [0.0, 0.0, 0.0], + [1.0, 0.0, 0.0], + [0.5, 1.0, 0.0], + ]) + triangles = np.array([[0, 1, 2]]) + mesh = TriaMesh(vertices, triangles) + + # All vertices have same value (ties everywhere) + vfunc = np.array([1.0, 1.0, 1.0]) + + # Should not crash, tie-breaker should resolve ambiguities + minima, maxima, saddles, saddle_orders = mesh.critical_points(vfunc) + + # With tie-breaker, vertex with highest ID is treated as larger + # So vertex 2 should be maximum, vertex 0 should be minimum + assert 0 in minima, "Expected vertex 0 to be minimum with tie-breaker" + assert 2 in maxima, "Expected vertex 2 to be maximum with tie-breaker" + + +def test_critical_points_boundary(): + """ + Test critical_points on a mesh with boundary (non-closed). + """ + # Single triangle (has boundary) + vertices = np.array([ + [0.0, 0.0, 0.0], + [1.0, 0.0, 1.0], + [0.0, 1.0, 0.0], + ]) + triangles = np.array([[0, 1, 2]]) + mesh = TriaMesh(vertices, triangles) + + # Height function + vfunc = vertices[:, 2] + + minima, maxima, saddles, saddle_orders = mesh.critical_points(vfunc) + + # Vertex 1 is highest (z=1), vertices 0 and 2 are lowest (z=0) + assert 1 in maxima, f"Expected vertex 1 as maximum, got maxima={maxima}" + assert len(minima) >= 1, f"Expected at least one minimum, got {len(minima)}" + + +def test_extract_level_paths_simple(): + """ + Test extract_level_paths on a simple mesh with single level curve. + """ + # Create a simple mesh: unit square in xy-plane + vertices = np.array([ + [0.0, 0.0, 0.0], + [1.0, 0.0, 0.0], + [1.0, 1.0, 1.0], + [0.0, 1.0, 1.0], + ]) + triangles = np.array([ + [0, 1, 2], + [0, 2, 3], + ]) + mesh = TriaMesh(vertices, triangles) + + # Height function (z-coordinate) + vfunc = vertices[:, 2] + + # Extract level curve at z=0.5 + curves = mesh.extract_level_paths(vfunc, 0.5) + + # Should have at least one curve + assert len(curves) > 0, "Expected at least one level curve" + + # Check that returned objects are Polygon + from ...polygon import Polygon + for curve in curves: + assert isinstance(curve, Polygon), f"Expected Polygon, got {type(curve)}" + + # Check that polygons have required attributes + for curve in curves: + assert hasattr(curve, 'points'), "Polygon should have 'points' attribute" + assert hasattr(curve, 'closed'), "Polygon should have 'closed' attribute" + assert hasattr(curve, 'tria_idx'), "Polygon should have 'tria_idx' attribute" + assert hasattr(curve, 'edge_vidx'), "Polygon should have 'edge_vidx' attribute" + assert hasattr(curve, 'edge_bary'), "Polygon should have 'edge_bary' attribute" + + # Check that points are 3D + for curve in curves: + assert curve.points.shape[1] == 3, f"Expected 3D points, got shape {curve.points.shape}" + + +def test_extract_level_paths_closed_loop(): + """ + Test extract_level_paths on a mesh with closed loop level curve. + """ + # Create a pyramid mesh + vertices = np.array([ + [0.0, 0.0, 0.0], # base + [1.0, 0.0, 0.0], # base + [1.0, 1.0, 0.0], # base + [0.0, 1.0, 0.0], # base + [0.5, 0.5, 1.0], # apex + ]) + triangles = np.array([ + [0, 1, 4], + [1, 2, 4], + [2, 3, 4], + [3, 0, 4], + [0, 2, 1], + [0, 3, 2], + ]) + mesh = TriaMesh(vertices, triangles) + + # Height function + vfunc = vertices[:, 2] + + # Extract level curve at mid-height (should create closed loop around pyramid) + curves = mesh.extract_level_paths(vfunc, 0.5) + + assert len(curves) > 0, "Expected at least one level curve" + + # At least one curve should be closed + has_closed = any(curve.closed for curve in curves) + # Note: May or may not be closed depending on mesh topology + + # All points should be approximately at z=0.5 + for curve in curves: + z_coords = curve.points[:, 2] + np.testing.assert_allclose(z_coords, 0.5, atol=1e-5, + err_msg="Level curve points should be at z=0.5") + + +def test_extract_level_paths_no_intersection(): + """ + Test extract_level_paths when level doesn't intersect mesh. + """ + # Simple triangle + vertices = np.array([ + [0.0, 0.0, 0.0], + [1.0, 0.0, 1.0], + [0.0, 1.0, 1.0], + ]) + triangles = np.array([[0, 1, 2]]) + mesh = TriaMesh(vertices, triangles) + + # Height function + vfunc = vertices[:, 2] + + # Extract level curve at z=10.0 (way above mesh) + curves = mesh.extract_level_paths(vfunc, 10.0) + + # Should return empty list + assert len(curves) == 0, f"Expected no curves, got {len(curves)}" + + +def test_extract_level_paths_metadata(): + """ + Test that extract_level_paths returns correct metadata. + """ + # Create a simple mesh + vertices = np.array([ + [0.0, 0.0, 0.0], + [1.0, 0.0, 0.5], + [1.0, 1.0, 1.0], + [0.0, 1.0, 0.5], + ]) + triangles = np.array([ + [0, 1, 2], + [0, 2, 3], + ]) + mesh = TriaMesh(vertices, triangles) + + # Height function + vfunc = vertices[:, 2] + + # Extract level curve + curves = mesh.extract_level_paths(vfunc, 0.5) + + for curve in curves: + n_points = curve.points.shape[0] + + # Check tria_idx + assert hasattr(curve, 'tria_idx'), "Missing tria_idx" + # Number of segments (tria_idx) depends on whether curve is closed + if curve.closed: + expected_segments = n_points + else: + expected_segments = n_points - 1 + # Note: tria_idx may be shorter if duplicate points were removed + + # Check edge_vidx + assert hasattr(curve, 'edge_vidx'), "Missing edge_vidx" + assert curve.edge_vidx.shape == (n_points, 2), \ + f"edge_vidx should be (n_points, 2), got {curve.edge_vidx.shape}" + + # Check edge_bary + assert hasattr(curve, 'edge_bary'), "Missing edge_bary" + assert curve.edge_bary.shape == (n_points,), \ + f"edge_bary should be (n_points,), got {curve.edge_bary.shape}" + # Barycentric coordinates should be in [0, 1] + assert np.all(curve.edge_bary >= 0) and np.all(curve.edge_bary <= 1), \ + "edge_bary should be in [0, 1]" + + +def test_level_path(): + """ + Test level_path function for single path extraction with various options. + """ + # Create a simple mesh with a clear level curve + vertices = np.array([ + [0.0, 0.0, 0.0], + [1.0, 0.0, 0.5], + [1.0, 1.0, 1.0], + [0.0, 1.0, 0.5], + ]) + triangles = np.array([ + [0, 1, 2], + [0, 2, 3], + ]) + mesh = TriaMesh(vertices, triangles) + vfunc = vertices[:, 2] + level = 0.5 + + # Test 1: Basic usage - get path and length + path, length = mesh.level_path(vfunc, level) + + assert path.ndim == 2, "Path should be 2D array" + assert path.shape[1] == 3, f"Path should have 3D points, got shape {path.shape}" + assert path.shape[0] >= 2, "Path should have at least 2 points" + assert length > 0, f"Path length should be positive, got {length}" + + # Check all points are approximately at the level + z_coords = path[:, 2] + np.testing.assert_allclose(z_coords, level, atol=1e-5, + err_msg=f"All path points should be at z={level}") + + # Test 2: Check if path is closed (first == last point) + is_closed = np.allclose(path[0], path[-1]) + if is_closed: + print(f" Path is closed (first point == last point)") + else: + print(f" Path is open (endpoints differ)") + + # Test 3: Get triangle indices + path_with_tria, length_with_tria, tria_idx = mesh.level_path( + vfunc, level, get_tria_idx=True + ) + + np.testing.assert_array_equal(path_with_tria, path, + err_msg="Path should be same with get_tria_idx=True") + assert length_with_tria == length, "Length should be same" + assert tria_idx.ndim == 1, "tria_idx should be 1D array" + # For n points, we have n-1 segments + expected_tria_len = path.shape[0] - 1 + assert tria_idx.shape[0] == expected_tria_len, \ + f"tria_idx should have {expected_tria_len} elements, got {tria_idx.shape[0]}" + # Triangle indices should be valid + assert np.all(tria_idx >= 0), "Triangle indices should be non-negative" + assert np.all(tria_idx < len(triangles)), \ + f"Triangle indices should be < {len(triangles)}" + + # Test 4: Get edge information + path_with_edges, length_with_edges, edges_vidxs, edges_relpos = mesh.level_path( + vfunc, level, get_edges=True + ) + + np.testing.assert_array_equal(path_with_edges, path, + err_msg="Path should be same with get_edges=True") + assert length_with_edges == length, "Length should be same" + assert edges_vidxs.shape == (path.shape[0], 2), \ + f"edges_vidxs should be (n_points, 2), got {edges_vidxs.shape}" + assert edges_relpos.shape == (path.shape[0],), \ + f"edges_relpos should be (n_points,), got {edges_relpos.shape}" + # Barycentric coordinates should be in [0, 1] + assert np.all(edges_relpos >= 0) and np.all(edges_relpos <= 1), \ + "Barycentric coordinates should be in [0, 1]" + + # Test 5: Get both triangle and edge information + result = mesh.level_path(vfunc, level, get_tria_idx=True, get_edges=True) + path_full, length_full, tria_idx_full, edges_vidxs_full, edges_relpos_full = result + + np.testing.assert_array_equal(path_full, path, + err_msg="Path should be consistent") + np.testing.assert_array_equal(tria_idx_full, tria_idx, + err_msg="tria_idx should be consistent") + np.testing.assert_array_equal(edges_vidxs_full, edges_vidxs, + err_msg="edges_vidxs should be consistent") + np.testing.assert_array_equal(edges_relpos_full, edges_relpos, + err_msg="edges_relpos should be consistent") + + # Test 6: Resampling to fixed number of points + n_resample = 50 + path_resampled, length_resampled = mesh.level_path( + vfunc, level, n_points=n_resample + ) + + assert path_resampled.shape[0] == n_resample, \ + f"Resampled path should have {n_resample} points, got {path_resampled.shape[0]}" + assert path_resampled.shape[1] == 3, "Resampled path should have 3D points" + # Length should be approximately the same + np.testing.assert_allclose(length_resampled, length, rtol=0.1, + err_msg="Resampled length should be close to original") + + # Test 7: Error case - resampling with get_tria_idx should raise ValueError + try: + mesh.level_path(vfunc, level, get_tria_idx=True, n_points=50) + assert False, "Should raise ValueError when combining n_points with get_tria_idx" + except ValueError as e: + assert "n_points cannot be combined with get_tria_idx" in str(e) + + # Test 8: Error case - resampling with get_edges should raise ValueError + try: + mesh.level_path(vfunc, level, get_edges=True, n_points=50) + assert False, "Should raise ValueError when combining n_points with get_edges" + except ValueError as e: + assert "n_points cannot be combined with get_edges" in str(e) + + +def test_level_path_closed_loop(): + """ + Test level_path on a mesh that produces a closed loop. + """ + # Create a pyramid mesh + vertices = np.array([ + [0.0, 0.0, 0.0], # base + [1.0, 0.0, 0.0], # base + [1.0, 1.0, 0.0], # base + [0.0, 1.0, 0.0], # base + [0.5, 0.5, 1.0], # apex + ]) + triangles = np.array([ + [0, 1, 4], + [1, 2, 4], + [2, 3, 4], + [3, 0, 4], + [0, 2, 1], + [0, 3, 2], + ]) + mesh = TriaMesh(vertices, triangles) + vfunc = vertices[:, 2] + + # Extract level at mid-height (should create closed loop around pyramid) + path, length = mesh.level_path(vfunc, 0.5) + + # For closed loops, first and last points should be identical + is_closed = np.allclose(path[0], path[-1]) + assert is_closed, "Closed loop should have first point equal to last point" + + # All points should be at z=0.5 + np.testing.assert_allclose(path[:, 2], 0.5, atol=1e-5, + err_msg="All points should be at z=0.5") + + # Length should be positive + assert length > 0, "Closed loop should have positive length" + + +def test_level_path_multiple_components_error(): + """ + Test that level_path raises ValueError when multiple components are found. + """ + # Create a mesh that could have multiple level curves + # Two separate triangles at different locations + vertices = np.array([ + # First triangle + [0.0, 0.0, 0.0], + [1.0, 0.0, 1.0], + [0.5, 1.0, 0.5], + # Second triangle (disconnected) + [3.0, 0.0, 0.0], + [4.0, 0.0, 1.0], + [3.5, 1.0, 0.5], + ]) + triangles = np.array([ + [0, 1, 2], + [3, 4, 5], + ]) + mesh = TriaMesh(vertices, triangles) + vfunc = vertices[:, 2] + + # Try to extract level that intersects both triangles + # This should raise ValueError because level_path expects single component + try: + path, length = mesh.level_path(vfunc, 0.5) + # If we get here, either there's only 1 component or the test needs adjustment + # Check via extract_level_paths + curves = mesh.extract_level_paths(vfunc, 0.5) + if len(curves) > 1: + assert False, "Should have raised ValueError for multiple components" + # If only 1 component, that's fine - test is conditional + except ValueError as e: + # Expected error + assert "disconnected" in str(e).lower() or "multiple" in str(e).lower(), \ + f"Error message should mention multiple components: {e}" + + From ddbe24016e031303edc60191ee5da75c235506d0 Mon Sep 17 00:00:00 2001 From: Martin Reuter Date: Wed, 21 Jan 2026 18:06:59 +0100 Subject: [PATCH 04/32] fix tests --- lapy/utils/tests/test_tria_mesh.py | 35 +++++++++--------------------- 1 file changed, 10 insertions(+), 25 deletions(-) diff --git a/lapy/utils/tests/test_tria_mesh.py b/lapy/utils/tests/test_tria_mesh.py index 820ab50..14b946d 100644 --- a/lapy/utils/tests/test_tria_mesh.py +++ b/lapy/utils/tests/test_tria_mesh.py @@ -827,10 +827,11 @@ def test_extract_level_paths_closed_loop(): # Extract level curve at mid-height (should create closed loop around pyramid) curves = mesh.extract_level_paths(vfunc, 0.5) - assert len(curves) > 0, "Expected at least one level curve" + assert len(curves) == 1, "Expected one level curve" + + # Curve should be closed + assert curves[0].closed, "Expected closed level curve" - # At least one curve should be closed - has_closed = any(curve.closed for curve in curves) # Note: May or may not be closed depending on mesh topology # All points should be approximately at z=0.5 @@ -892,11 +893,9 @@ def test_extract_level_paths_metadata(): # Check tria_idx assert hasattr(curve, 'tria_idx'), "Missing tria_idx" # Number of segments (tria_idx) depends on whether curve is closed - if curve.closed: - expected_segments = n_points - else: - expected_segments = n_points - 1 - # Note: tria_idx may be shorter if duplicate points were removed + expected_tria_len = n_points if curve.closed else n_points - 1 + assert curve.tria_idx.shape == (expected_tria_len,), \ + f"tria_idx should be ({expected_tria_len},), got {curve.tria_idx.shape}" # Check edge_vidx assert hasattr(curve, 'edge_vidx'), "Missing edge_vidx" @@ -947,9 +946,9 @@ def test_level_path(): # Test 2: Check if path is closed (first == last point) is_closed = np.allclose(path[0], path[-1]) if is_closed: - print(f" Path is closed (first point == last point)") + print(" Path is closed (first point == last point)") else: - print(f" Path is open (endpoints differ)") + print(" Path is open (endpoints differ)") # Test 3: Get triangle indices path_with_tria, length_with_tria, tria_idx = mesh.level_path( @@ -1011,20 +1010,6 @@ def test_level_path(): np.testing.assert_allclose(length_resampled, length, rtol=0.1, err_msg="Resampled length should be close to original") - # Test 7: Error case - resampling with get_tria_idx should raise ValueError - try: - mesh.level_path(vfunc, level, get_tria_idx=True, n_points=50) - assert False, "Should raise ValueError when combining n_points with get_tria_idx" - except ValueError as e: - assert "n_points cannot be combined with get_tria_idx" in str(e) - - # Test 8: Error case - resampling with get_edges should raise ValueError - try: - mesh.level_path(vfunc, level, get_edges=True, n_points=50) - assert False, "Should raise ValueError when combining n_points with get_edges" - except ValueError as e: - assert "n_points cannot be combined with get_edges" in str(e) - def test_level_path_closed_loop(): """ @@ -1095,7 +1080,7 @@ def test_level_path_multiple_components_error(): # Check via extract_level_paths curves = mesh.extract_level_paths(vfunc, 0.5) if len(curves) > 1: - assert False, "Should have raised ValueError for multiple components" + assert len(curves) == 1, "Should have raised ValueError for multiple components" # If only 1 component, that's fine - test is conditional except ValueError as e: # Expected error From 55ce122aad26d477ec1ca617b9f1c606f6869bf3 Mon Sep 17 00:00:00 2001 From: Martin Reuter Date: Wed, 21 Jan 2026 18:24:25 +0100 Subject: [PATCH 05/32] fix spacing etc --- lapy/tria_mesh.py | 15 ++++++--------- 1 file changed, 6 insertions(+), 9 deletions(-) diff --git a/lapy/tria_mesh.py b/lapy/tria_mesh.py index 77baa2a..35bd152 100644 --- a/lapy/tria_mesh.py +++ b/lapy/tria_mesh.py @@ -1057,13 +1057,11 @@ def critical_points(self, vfunc: np.ndarray) -> tuple[np.ndarray, np.ndarray, np 2. Minima: all differences are positive (all neighbors higher) 3. Maxima: all differences are negative (all neighbors lower) 4. Saddles: count sign flips across opposite edges in triangles at vertex - - Regular point: 2 sign flips - - Simple saddle (order 2): 4 sign flips - - Higher-order saddle (order n): 2n sign flips, order = n_flips / 2 + - Regular point: 2 sign flips + - Simple saddle (order 2): 4 sign flips + - Higher-order saddle (order n): 2n sign flips, order = n_flips / 2 5. Tie-breaker: when two vertices have equal function values, the vertex - with the higher vertex ID is treated as slightly larger to remove ambiguity. - - The implementation is fully vectorized using sparse matrices and numpy operations. + with the higher vertex ID is treated as slightly larger to remove ambiguity. """ vfunc = np.asarray(vfunc) if len(vfunc) != self.v.shape[0]: @@ -1867,12 +1865,11 @@ def extract_level_paths( List of Polygon objects, one for each connected level curve component. Each polygon has the following additional attributes: - points : np.ndarray of shape (n_points, 3) - 3D coordinates on level curve - (access via polygon.points or polygon.get_points()) - closed : bool - whether the curve is closed - tria_idx : np.ndarray of shape (n_segments,) - triangle index for each segment - edge_vidx : np.ndarray of shape (n_points, 2) - mesh vertex indices for edge - edge_bary : np.ndarray of shape (n_points,) - barycentric coordinate [0,1] - along edge where level set intersects (0=first vertex, 1=second vertex) + along edge where level set intersects (0=first vertex, 1=second vertex) Raises ------ @@ -2014,7 +2011,7 @@ def level_path( For closed loops, the first and last points are identical (duplicated) so you can detect closure via ``np.allclose(path[0], path[-1])``. For open paths, the path has two distinct endpoints. - This function is kept mainly for backward compatability. + This function is kept mainly for backward compatibility. **For more advanced use cases, consider using extract_level_paths() directly:** From 56b31ef8992956b8e0aa8183c606f7fd91ab6d65 Mon Sep 17 00:00:00 2001 From: Martin Reuter Date: Wed, 21 Jan 2026 18:25:21 +0100 Subject: [PATCH 06/32] Update lapy/tria_mesh.py Co-authored-by: Copilot <175728472+Copilot@users.noreply.github.com> --- lapy/tria_mesh.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/lapy/tria_mesh.py b/lapy/tria_mesh.py index 35bd152..4a42a0b 100644 --- a/lapy/tria_mesh.py +++ b/lapy/tria_mesh.py @@ -2015,7 +2015,7 @@ def level_path( **For more advanced use cases, consider using extract_level_paths() directly:** - - Multiple disconnected components: extract_level_pathss returns all components + - Multiple disconnected components: extract_level_paths returns all components - Custom resampling: Get Polygon objects and use Polygon.resample() directly - Metadata analysis: Access triangle indices and edge information per component - Closed loop detection: Polygon objects have a ``closed`` attribute From 67a28d079e7ea6cc5df002af1a03bfb1375b4118 Mon Sep 17 00:00:00 2001 From: Martin Reuter Date: Wed, 21 Jan 2026 18:41:30 +0100 Subject: [PATCH 07/32] fix spacing etc --- lapy/tria_mesh.py | 14 +++++++++----- 1 file changed, 9 insertions(+), 5 deletions(-) diff --git a/lapy/tria_mesh.py b/lapy/tria_mesh.py index 4a42a0b..7fc4121 100644 --- a/lapy/tria_mesh.py +++ b/lapy/tria_mesh.py @@ -1053,15 +1053,18 @@ def critical_points(self, vfunc: np.ndarray) -> tuple[np.ndarray, np.ndarray, np Notes ----- The algorithm works by: + 1. For each vertex, compute difference: neighbor_value - vertex_value 2. Minima: all differences are positive (all neighbors higher) 3. Maxima: all differences are negative (all neighbors lower) 4. Saddles: count sign flips across opposite edges in triangles at vertex - - Regular point: 2 sign flips - - Simple saddle (order 2): 4 sign flips - - Higher-order saddle (order n): 2n sign flips, order = n_flips / 2 + + - Regular point: 2 sign flips + - Simple saddle (order 2): 4 sign flips + - Higher-order saddle (order n): 2n sign flips, order = n_flips / 2 + 5. Tie-breaker: when two vertices have equal function values, the vertex - with the higher vertex ID is treated as slightly larger to remove ambiguity. + with the higher vertex ID is treated as slightly larger to remove ambiguity. """ vfunc = np.asarray(vfunc) if len(vfunc) != self.v.shape[0]: @@ -1864,12 +1867,13 @@ def extract_level_paths( list[polygon.Polygon] List of Polygon objects, one for each connected level curve component. Each polygon has the following additional attributes: + - points : np.ndarray of shape (n_points, 3) - 3D coordinates on level curve - closed : bool - whether the curve is closed - tria_idx : np.ndarray of shape (n_segments,) - triangle index for each segment - edge_vidx : np.ndarray of shape (n_points, 2) - mesh vertex indices for edge - edge_bary : np.ndarray of shape (n_points,) - barycentric coordinate [0,1] - along edge where level set intersects (0=first vertex, 1=second vertex) + along edge where level set intersects (0=first vertex, 1=second vertex) Raises ------ From 21a96b4714a0c77fd1f304c34c8e2bf2bb01f7ad Mon Sep 17 00:00:00 2001 From: Martin Reuter Date: Wed, 21 Jan 2026 18:57:24 +0100 Subject: [PATCH 08/32] remove some tests and fix loop closing after resample --- lapy/tria_mesh.py | 2 ++ lapy/utils/tests/test_tria_mesh.py | 40 ------------------------------ 2 files changed, 2 insertions(+), 40 deletions(-) diff --git a/lapy/tria_mesh.py b/lapy/tria_mesh.py index 7fc4121..7f53674 100644 --- a/lapy/tria_mesh.py +++ b/lapy/tria_mesh.py @@ -2141,6 +2141,8 @@ def level_path( poly = polygon.Polygon(path3d, closed=curve.closed) path3d = poly.resample(n_points=n_points, n_iter=3, inplace=False) path3d = path3d.get_points() + if curve.closed: + path3d = np.vstack([path3d, path3d[0:1]]) # Build return tuple based on options if get_tria_idx: diff --git a/lapy/utils/tests/test_tria_mesh.py b/lapy/utils/tests/test_tria_mesh.py index 14b946d..6f9f954 100644 --- a/lapy/utils/tests/test_tria_mesh.py +++ b/lapy/utils/tests/test_tria_mesh.py @@ -1048,43 +1048,3 @@ def test_level_path_closed_loop(): # Length should be positive assert length > 0, "Closed loop should have positive length" - -def test_level_path_multiple_components_error(): - """ - Test that level_path raises ValueError when multiple components are found. - """ - # Create a mesh that could have multiple level curves - # Two separate triangles at different locations - vertices = np.array([ - # First triangle - [0.0, 0.0, 0.0], - [1.0, 0.0, 1.0], - [0.5, 1.0, 0.5], - # Second triangle (disconnected) - [3.0, 0.0, 0.0], - [4.0, 0.0, 1.0], - [3.5, 1.0, 0.5], - ]) - triangles = np.array([ - [0, 1, 2], - [3, 4, 5], - ]) - mesh = TriaMesh(vertices, triangles) - vfunc = vertices[:, 2] - - # Try to extract level that intersects both triangles - # This should raise ValueError because level_path expects single component - try: - path, length = mesh.level_path(vfunc, 0.5) - # If we get here, either there's only 1 component or the test needs adjustment - # Check via extract_level_paths - curves = mesh.extract_level_paths(vfunc, 0.5) - if len(curves) > 1: - assert len(curves) == 1, "Should have raised ValueError for multiple components" - # If only 1 component, that's fine - test is conditional - except ValueError as e: - # Expected error - assert "disconnected" in str(e).lower() or "multiple" in str(e).lower(), \ - f"Error message should mention multiple components: {e}" - - From 611e64deaa61768457c0a6e0546b242f3ad1390d Mon Sep 17 00:00:00 2001 From: Martin Reuter Date: Fri, 30 Jan 2026 19:12:19 +0100 Subject: [PATCH 09/32] flexible vox2surfRAS conversion in write_fssurf --- lapy/_tria_io.py | 40 ++++++++++++++++++++++++++++++++++------ lapy/tria_mesh.py | 30 ++++++++++++++++++++++-------- 2 files changed, 56 insertions(+), 14 deletions(-) diff --git a/lapy/_tria_io.py b/lapy/_tria_io.py index ee83624..d5195da 100644 --- a/lapy/_tria_io.py +++ b/lapy/_tria_io.py @@ -534,7 +534,12 @@ def write_vtk(tria: "TriaMesh", filename: str) -> None: f.close() -def write_fssurf(tria: "TriaMesh", filename: str, image: Optional[object] = None) -> None: +def write_fssurf( + tria: "TriaMesh", + filename: str, + image: Optional[object] = None, + coords_are_voxels: Optional[bool] = None, +) -> None: """Save Freesurfer Surface Geometry file (wrap Nibabel). Parameters @@ -544,13 +549,23 @@ def write_fssurf(tria: "TriaMesh", filename: str, image: Optional[object] = None filename : str Filename to save to. image : str, object, or None, default=None - Path to image, nibabel image object, or image header. If specified, the vertices - are assumed to be in voxel coordinates and are converted to surface RAS (tkr) - coordinates before saving. The expected order of coordinates is (x, y, z) - matching the image voxel indices in nibabel. + Path to image, nibabel image object, or image header. If specified, volume_info + will be extracted from the image header, and by default, vertices are assumed to + be in voxel coordinates and will be converted to surface RAS (tkr) before saving. + The expected order of coordinates is (x, y, z) matching the image voxel indices + in nibabel. + coords_are_voxels : bool or None, default=None + Specifies whether vertices are in voxel coordinates. If None (default), the + behavior is inferred: when image is provided, vertices are assumed to be in + voxel space and converted to surface RAS; when image is not provided, vertices + are assumed to already be in surface RAS. Set explicitly to True to force + conversion (requires image), or False to skip conversion even when image is + provided (only extracts volume_info). Raises ------ + ValueError + If coords_are_voxels is explicitly True but image is None. OSError If file is not writable. TypeError @@ -562,6 +577,17 @@ def write_fssurf(tria: "TriaMesh", filename: str, image: Optional[object] = None ``get_vox2ras_tkr()`` (e.g., ``MGHHeader``). For other header types (NIfTI1/2, Analyze/SPM, etc.), we attempt conversion via ``MGHHeader.from_header``. """ + # Infer coords_are_voxels if not explicitly set + if coords_are_voxels is None: + coords_are_voxels = image is not None + + # Validate parameters + if coords_are_voxels and image is None: + raise ValueError( + "coords_are_voxels=True requires an image to be provided for voxel-to-surface-RAS " + "coordinate conversion. Either provide an image or set coords_are_voxels=False." + ) + # open file try: from nibabel.freesurfer.io import write_geometry @@ -594,7 +620,9 @@ def write_fssurf(tria: "TriaMesh", filename: str, image: Optional[object] = None "via MGHHeader.from_header)." ) from e - v = apply_affine(header.get_vox2ras_tkr(), v) + # Convert from voxel to surface RAS coordinates if requested + if coords_are_voxels: + v = apply_affine(header.get_vox2ras_tkr(), v) # create volume_info from header affine = header.get_best_affine() diff --git a/lapy/tria_mesh.py b/lapy/tria_mesh.py index 7f53674..37d5ec2 100644 --- a/lapy/tria_mesh.py +++ b/lapy/tria_mesh.py @@ -201,7 +201,12 @@ def write_vtk(self, filename: str) -> None: """ io.write_vtk(self, filename) - def write_fssurf(self, filename: str, image: Optional[object] = None) -> None: + def write_fssurf( + self, + filename: str, + image: Optional[object] = None, + coords_are_voxels: Optional[bool] = None, + ) -> None: """Save as Freesurfer Surface Geometry file (wrap Nibabel). Parameters @@ -209,17 +214,26 @@ def write_fssurf(self, filename: str, image: Optional[object] = None) -> None: filename : str Filename to save to. image : str, object, None - Path to image, nibabel image object, or image header. If specified, the vertices - are assumed to be in voxel coordinates and are converted to surface RAS (tkr) - coordinates before saving. The expected order of coordinates is (x, y, z) - matching the image voxel indices in nibabel. + Path to image, nibabel image object, or image header. If specified, volume_info + will be extracted from the image header, and by default, vertices are assumed to + be in voxel coordinates and will be converted to surface RAS (tkr) before saving. + The expected order of coordinates is (x, y, z) matching the image voxel indices + in nibabel. + coords_are_voxels : bool or None, default=None + Specifies whether vertices are in voxel coordinates. If None (default), the + behavior is inferred: when image is provided, vertices are assumed to be in + voxel space and converted to surface RAS; when image is not provided, vertices + are assumed to already be in surface RAS. Set explicitly to True to force + conversion (requires image), or False to skip conversion even when image is + provided (only extracts volume_info). Raises ------ - IOError - If file cannot be written. ValueError + If coords_are_voxels is explicitly True but image is None. If image header cannot be processed. + IOError + If file cannot be written. Notes ----- @@ -227,7 +241,7 @@ def write_fssurf(self, filename: str, image: Optional[object] = None) -> None: ``get_vox2ras_tkr()`` (e.g., ``MGHHeader``). For other header types (NIfTI1/2, Analyze/SPM, etc.), we attempt conversion via ``MGHHeader.from_header``. """ - io.write_fssurf(self, filename, image=image) + io.write_fssurf(self, filename, image=image, coords_are_voxels=coords_are_voxels) def _construct_adj_sym(self): """Construct symmetric adjacency matrix (edge graph) of triangle mesh. From f92def1a307dc32a2b0e24fc7f80e2136e8b8a00 Mon Sep 17 00:00:00 2001 From: Martin Reuter Date: Mon, 9 Feb 2026 17:48:55 +0100 Subject: [PATCH 10/32] improve doc for normalize_ev --- lapy/shapedna.py | 23 ++++++++++++++--------- 1 file changed, 14 insertions(+), 9 deletions(-) diff --git a/lapy/shapedna.py b/lapy/shapedna.py index a6d321b..9aaac92 100644 --- a/lapy/shapedna.py +++ b/lapy/shapedna.py @@ -171,9 +171,9 @@ def normalize_ev( evals: np.ndarray, method: str = "geometry", ) -> np.ndarray: - """Normalize eigenvalues for a surface or a volume. + """Normalize eigenvalues for a 2D surface or a 3D solid. - Normalizes eigenvalues to account for different mesh sizes and dimensions, + Normalizes eigenvalues to unit surface area or unit volume, enabling meaningful comparison between different shapes. Parameters @@ -185,8 +185,8 @@ def normalize_ev( method : {'surface', 'volume', 'geometry'}, default='geometry' Normalization method: - - 'surface': Normalize by surface area (for 2D surfaces) - - 'volume': Normalize by enclosed volume (for 3D objects) + - 'surface': Normalize to unit surface area + - 'volume': Normalize to unit volume - 'geometry': Automatically choose surface for TriaMesh, volume for TetMesh Returns @@ -197,13 +197,18 @@ def normalize_ev( Raises ------ ValueError - If method is not one of 'surface', 'volume', or 'geometry'. - If geometry type is unsupported for the chosen normalization. - If the measure (area/volume) is not positive. + If the method is not one of 'surface', 'volume', or 'geometry'. + If the geometry type is unsupported for the chosen normalization. + If method=volume and the volume of a surface is not defined. + + Notes + ----- + For TriaMesh with 'volume' method, the mesh must be closed and oriented + to compute a valid enclosed volume. """ geom_type = type(geom).__name__ if method == "surface": - return evals * _surface_measure(geom) ** (2.0 / 2.0) + return evals * _surface_measure(geom) if method == "volume": if geom_type == "TriaMesh": @@ -214,7 +219,7 @@ def normalize_ev( if method == "geometry": if geom_type == "TriaMesh": - return evals * _surface_measure(geom) ** (2.0 / 2.0) + return evals * _surface_measure(geom) if geom_type == "TetMesh": return evals * _boundary_volume(geom) ** (2.0 / 3.0) raise ValueError("Unsupported geometry type for geometry normalization") From 29410deff729b0f5474b8a06c465ea152336e7e6 Mon Sep 17 00:00:00 2001 From: Martin Reuter Date: Mon, 9 Feb 2026 17:49:50 +0100 Subject: [PATCH 11/32] improve input parameter check for smooth_taubin --- lapy/polygon.py | 9 +++++---- 1 file changed, 5 insertions(+), 4 deletions(-) diff --git a/lapy/polygon.py b/lapy/polygon.py index 2185a5d..45f668a 100644 --- a/lapy/polygon.py +++ b/lapy/polygon.py @@ -442,12 +442,13 @@ def smooth_taubin( Polygon Smoothed polygon. Returns self if inplace=True, new instance otherwise. """ - if n <= 0: - raise ValueError("n must be a positive integer") + # Input validation to enforce documented parameter ranges + if not isinstance(n, int) or n <= 0: + raise ValueError(f"n must be a positive integer, got {n!r}") if lambda_ <= 0: - raise ValueError("lambda_ must be positive") + raise ValueError(f"lambda_ must be positive, got {lambda_!r}") if mu >= 0: - raise ValueError("mu must be negative") + raise ValueError(f"mu must be negative, got {mu!r}") mat = self._construct_smoothing_matrix() points_smooth = self.points.copy() From b12defc3b0a3baf7d2583a28f78bf34dd289a1bf Mon Sep 17 00:00:00 2001 From: Martin Reuter Date: Mon, 9 Feb 2026 17:51:24 +0100 Subject: [PATCH 12/32] fix tocsr bug, error message, and expose sigma --- lapy/solver.py | 11 +++++++---- 1 file changed, 7 insertions(+), 4 deletions(-) diff --git a/lapy/solver.py b/lapy/solver.py index 7febed1..0ab5ddc 100644 --- a/lapy/solver.py +++ b/lapy/solver.py @@ -665,7 +665,7 @@ def _fem_voxels( b = sparse.csc_matrix((local_b, (i, j)), dtype=dtype) return a, b - def eigs(self, k: int = 10) -> tuple[np.ndarray, np.ndarray]: + def eigs(self, k: int = 10, sigma: float = -0.01) -> tuple[np.ndarray, np.ndarray]: """Compute the linear finite-element method Laplace-Beltrami spectrum. Parameters @@ -674,6 +674,10 @@ def eigs(self, k: int = 10) -> tuple[np.ndarray, np.ndarray]: The number of eigenvalues and eigenvectors desired. ``k`` must be smaller than ``N`` (number of vertices). It is not possible to compute all eigenvectors of a matrix. + sigma : float, default=-0.01 + Shift value for the shift-invert mode. The solver finds eigenvalues + near sigma. Negative values work well for finding smallest eigenvalues. + Adjust if convergence issues occur (typically small negative). Returns ------- @@ -688,7 +692,6 @@ def eigs(self, k: int = 10) -> tuple[np.ndarray, np.ndarray]: """ from scipy.sparse.linalg import LinearOperator, eigsh - sigma = -0.01 if self.use_cholmod: logger.info("Solver: Cholesky decomposition from scikit-sparse cholmod ...") chol = self.sksparse.cholmod.cholesky(self.stiffness - sigma * self.mass) @@ -829,7 +832,7 @@ def poisson( ndat = ntup[1] if not (len(nidx) > 0 and len(nidx) == len(ndat)): raise ValueError( - "dtup should contain index and data arrays (same lengths > 0)" + "ntup should contain index and data arrays (same lengths > 0)" ) nvec = sparse.csc_matrix( (np.tile(ndat, n_rhs), @@ -855,7 +858,7 @@ def poisson( a = a[mask, :] a = a.tocsc() elif self.stiffness.getformat() == "csr": - a = self.stiffness[mask, :].tocrc() + a = self.stiffness[mask, :].tocsr() a = a[:, mask] else: raise ValueError("A matrix needs to be sparse CSC or CSR") From 33c523ae6499d783bfe4bdf9667b3229f4fca6ad Mon Sep 17 00:00:00 2001 From: Martin Reuter Date: Tue, 10 Feb 2026 14:24:43 +0100 Subject: [PATCH 13/32] add Polygon xref alias --- doc/conf.py | 1 + 1 file changed, 1 insertion(+) diff --git a/doc/conf.py b/doc/conf.py index a372b48..d424ae2 100644 --- a/doc/conf.py +++ b/doc/conf.py @@ -143,6 +143,7 @@ # LaPy "TetMesh": "lapy.TetMesh", "TriaMesh": "lapy.TriaMesh", + "Polygon": "lapy.Polygon", # Matplotlib "Axes": "matplotlib.axes.Axes", "Figure": "matplotlib.figure.Figure", From 59f961933533d13e5b7a901fd6472cf1248c194f Mon Sep 17 00:00:00 2001 From: Martin Reuter Date: Tue, 10 Feb 2026 14:25:23 +0100 Subject: [PATCH 14/32] cache pip and avoid upload of .doctrees --- .github/workflows/code-style.yml | 1 + .github/workflows/doc.yml | 3 ++- 2 files changed, 3 insertions(+), 1 deletion(-) diff --git a/.github/workflows/code-style.yml b/.github/workflows/code-style.yml index 70b3515..706ccad 100644 --- a/.github/workflows/code-style.yml +++ b/.github/workflows/code-style.yml @@ -19,6 +19,7 @@ jobs: uses: actions/setup-python@v6 with: python-version: '3.10' + cache: 'pip' # caching pip dependencies - name: Install dependencies run: | python -m pip install --progress-bar off --upgrade pip setuptools wheel diff --git a/.github/workflows/doc.yml b/.github/workflows/doc.yml index 2671a9b..8bd983b 100644 --- a/.github/workflows/doc.yml +++ b/.github/workflows/doc.yml @@ -24,6 +24,7 @@ jobs: uses: actions/setup-python@v6 with: python-version: '3.10' + cache: 'pip' # caching pip dependencies - name: Install package run: | python -m pip install --progress-bar off --upgrade pip setuptools wheel @@ -36,7 +37,7 @@ jobs: uses: actions/upload-artifact@v7 with: name: doc-dev - path: ./doc-build/dev + path: doc-build/dev !doc-build/dev/.doctrees deploy: if: github.event_name == 'push' From 847e774ede484e757e4e9c970c5f7a0bec464ce0 Mon Sep 17 00:00:00 2001 From: Martin Reuter Date: Tue, 10 Feb 2026 14:40:40 +0100 Subject: [PATCH 15/32] conf.py: bump min version, cleaner pandoc setup --- .github/workflows/doc.yml | 4 ++++ doc/conf.py | 36 ++++++++---------------------------- pyproject.toml | 3 +-- 3 files changed, 13 insertions(+), 30 deletions(-) diff --git a/.github/workflows/doc.yml b/.github/workflows/doc.yml index 8bd983b..6426318 100644 --- a/.github/workflows/doc.yml +++ b/.github/workflows/doc.yml @@ -25,6 +25,10 @@ jobs: with: python-version: '3.10' cache: 'pip' # caching pip dependencies + - name: Install system dependencies + run: | + sudo apt-get update + sudo apt-get install -y pandoc - name: Install package run: | python -m pip install --progress-bar off --upgrade pip setuptools wheel diff --git a/doc/conf.py b/doc/conf.py index d424ae2..0ea79e5 100644 --- a/doc/conf.py +++ b/doc/conf.py @@ -27,7 +27,7 @@ # https://www.sphinx-doc.org/en/master/usage/configuration.html#general-configuration # If your documentation needs a minimal Sphinx version, state it here. -needs_sphinx = "5.0" +needs_sphinx = "7.0" # The document name of the “root” document, that is, the document that contains # the root toctree directive. @@ -87,7 +87,6 @@ "css/style.css", ] html_title = project -html_show_sphinx = False # Documentation to change footer icons: # https://pradyunsg.me/furo/customisation/footer/#changing-footer-icons @@ -110,10 +109,9 @@ autosummary_generate = True # -- autodoc ----------------------------------------------------------------- -autodoc_typehints = "none" +autodoc_typehints = "description" autodoc_member_order = "groupwise" autodoc_warningiserror = True -autoclass_content = "class" # -- intersphinx ------------------------------------------------------------- intersphinx_mapping = { @@ -254,27 +252,9 @@ def linkcode_resolve(domain: str, info: Dict[str, str]) -> Optional[str]: "within_subsection_order": FileNameSortKey, } -# -- make sure pandoc gets installed ----------------------------------------- -from inspect import getsourcefile -import os - -# Get path to directory containing this file, conf.py. -DOCS_DIRECTORY = os.path.dirname(os.path.abspath(getsourcefile(lambda: 0))) - -def ensure_pandoc_installed(_): - import pypandoc - - # Download pandoc if necessary. If pandoc is already installed and on - # the PATH, the installed version will be used. Otherwise, we will - # download a copy of pandoc into docs/bin/ and add that to our PATH. - pandoc_dir = os.path.join(DOCS_DIRECTORY, "bin") - # Add dir containing pandoc binary to the PATH environment variable - if pandoc_dir not in os.environ["PATH"].split(os.pathsep): - os.environ["PATH"] += os.pathsep + pandoc_dir - pypandoc.ensure_pandoc_installed( - targetfolder=pandoc_dir, - delete_installer=True, - ) - -def setup(app): - app.connect("builder-inited", ensure_pandoc_installed) +# -- Pandoc requirement ------------------------------------------------------ +# Note: Pandoc must be installed on the system for nbsphinx to convert notebooks. +# Install via system package manager (apt, brew, etc.) or conda/mamba. +# See: https://pandoc.org/installing.html +# +# For CI/CD, ensure pandoc is installed in the workflow (see .github/workflows/doc.yml) diff --git a/pyproject.toml b/pyproject.toml index a2061ff..22ff70f 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -63,13 +63,12 @@ doc = [ 'matplotlib', 'memory-profiler', 'numpydoc', - 'sphinx!=7.2.*', + 'sphinx>=7.0,!=7.2.*', 'sphinxcontrib-bibtex', 'sphinx-copybutton', 'sphinx-design', 'sphinx-gallery', 'sphinx-issues', - 'pypandoc', 'nbsphinx', 'IPython', # For syntax highlighting in notebooks 'ipykernel', From 1a8bf48bbf15818ea0d6f163f0765b805940c008 Mon Sep 17 00:00:00 2001 From: Martin Reuter Date: Tue, 10 Feb 2026 15:13:44 +0100 Subject: [PATCH 16/32] codespell setup in pyproject, bumping python versions --- .codespellignore | 2 -- .github/workflows/build.yml | 2 +- .github/workflows/code-style.yml | 9 ++------- .github/workflows/doc.yml | 4 ++-- .github/workflows/publish.yml | 4 ++-- .github/workflows/pytest.yml | 6 +----- pyproject.toml | 10 ++++++++-- 7 files changed, 16 insertions(+), 21 deletions(-) delete mode 100644 .codespellignore diff --git a/.codespellignore b/.codespellignore deleted file mode 100644 index 9dbaff1..0000000 --- a/.codespellignore +++ /dev/null @@ -1,2 +0,0 @@ -coo -daty diff --git a/.github/workflows/build.yml b/.github/workflows/build.yml index 04e8df6..1c28552 100644 --- a/.github/workflows/build.yml +++ b/.github/workflows/build.yml @@ -15,7 +15,7 @@ jobs: fail-fast: false matrix: os: [ubuntu, macos, windows] - python-version: ["3.9", "3.10", "3.11", "3.12"] + python-version: ["3.10", "3.11", "3.12", "3.13"] name: ${{ matrix.os }} - py${{ matrix.python-version }} runs-on: ${{ matrix.os }}-latest defaults: diff --git a/.github/workflows/code-style.yml b/.github/workflows/code-style.yml index 706ccad..dfbb554 100644 --- a/.github/workflows/code-style.yml +++ b/.github/workflows/code-style.yml @@ -15,10 +15,10 @@ jobs: steps: - name: Checkout repository uses: actions/checkout@v6 - - name: Setup Python 3.10 + - name: Setup Python 3.13 uses: actions/setup-python@v6 with: - python-version: '3.10' + python-version: '3.13' cache: 'pip' # caching pip dependencies - name: Install dependencies run: | @@ -28,11 +28,6 @@ jobs: run: ruff check . - name: Run codespell uses: codespell-project/actions-codespell@master - with: - check_filenames: true - check_hidden: true - skip: './.git,./build,./.mypy_cache,./.pytest_cache' - ignore_words_file: ./.codespellignore - name: Run pydocstyle run: pydocstyle . - name: Run bibclean diff --git a/.github/workflows/doc.yml b/.github/workflows/doc.yml index 6426318..f3d039d 100644 --- a/.github/workflows/doc.yml +++ b/.github/workflows/doc.yml @@ -20,10 +20,10 @@ jobs: uses: actions/checkout@v6 with: path: ./main - - name: Setup Python 3.10 + - name: Setup Python 3.13 uses: actions/setup-python@v6 with: - python-version: '3.10' + python-version: '3.13' cache: 'pip' # caching pip dependencies - name: Install system dependencies run: | diff --git a/.github/workflows/publish.yml b/.github/workflows/publish.yml index 2d4b109..42e42a2 100644 --- a/.github/workflows/publish.yml +++ b/.github/workflows/publish.yml @@ -11,10 +11,10 @@ jobs: steps: - name: Checkout repository uses: actions/checkout@v6 - - name: Setup Python 3.10 + - name: Setup Python 3.13 uses: actions/setup-python@v6 with: - python-version: '3.10' + python-version: '3.13' - name: Install dependencies run: | python -m pip install --progress-bar off --upgrade pip setuptools wheel diff --git a/.github/workflows/pytest.yml b/.github/workflows/pytest.yml index 548c6f6..3c2d47f 100644 --- a/.github/workflows/pytest.yml +++ b/.github/workflows/pytest.yml @@ -19,11 +19,7 @@ jobs: fail-fast: false matrix: os: [ubuntu, macos, windows] - python-version: ["3.9", "3.10", "3.11", "3.12"] - # some tests fail (numerical issues) in older python on mac, so we ... - exclude: - - os: macos - python-version: '3.9' + python-version: ["3.10", "3.11", "3.12", "3.13"] name: ${{ matrix.os }} - py${{ matrix.python-version }} runs-on: ${{ matrix.os }}-latest defaults: diff --git a/pyproject.toml b/pyproject.toml index 22ff70f..b79d501 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -11,7 +11,7 @@ version = '1.6.0-dev' description = 'A package for differential geometry on meshes (Laplace, FEM)' readme = 'README.md' license = {file = 'LICENSE'} -requires-python = '>=3.9' +requires-python = '>=3.10' authors = [ {name = 'Martin Reuter', email = 'martin.reuter@dzne.de'}, ] @@ -34,10 +34,10 @@ classifiers = [ 'Operating System :: Unix', 'Operating System :: MacOS', 'Programming Language :: Python :: 3 :: Only', - 'Programming Language :: Python :: 3.9', 'Programming Language :: Python :: 3.10', 'Programming Language :: Python :: 3.11', 'Programming Language :: Python :: 3.12', + 'Programming Language :: Python :: 3.13', 'Natural Language :: English', 'License :: OSI Approved :: MIT License', 'Intended Audience :: Science/Research', @@ -141,6 +141,12 @@ select = [ "__init__.py" = ["F401"] "examples/*" = ["E501"] # ignore too long lines in example ipynb +[tool.codespell] +ignore-words-list = 'coo,daty' +check-filenames = true +check-hidden = true +skip = './.git,./build,./.mypy_cache,./.pytest_cache' + [tool.pytest.ini_options] minversion = '6.0' addopts = '--durations 20 --junit-xml=junit-results.xml --verbose' From 2ccfd9d9e6f665c1134b9aa85113e087f51830d3 Mon Sep 17 00:00:00 2001 From: Martin Reuter Date: Tue, 10 Feb 2026 15:28:43 +0100 Subject: [PATCH 17/32] fixing ruff errors for newer python version: optional, zip with strict ... --- lapy/_tria_io.py | 6 ++--- lapy/conformal.py | 4 ++-- lapy/plot.py | 51 +++++++++++++++++++++---------------------- lapy/polygon.py | 3 +-- lapy/shapedna.py | 4 ++-- lapy/solver.py | 7 +++--- lapy/tet_mesh.py | 4 ++-- lapy/tria_mesh.py | 23 ++++++++++--------- lapy/utils/_config.py | 5 +++-- 9 files changed, 52 insertions(+), 55 deletions(-) diff --git a/lapy/_tria_io.py b/lapy/_tria_io.py index d5195da..3e00c01 100644 --- a/lapy/_tria_io.py +++ b/lapy/_tria_io.py @@ -4,7 +4,7 @@ """ import logging -from typing import TYPE_CHECKING, Optional +from typing import TYPE_CHECKING import numpy as np @@ -537,8 +537,8 @@ def write_vtk(tria: "TriaMesh", filename: str) -> None: def write_fssurf( tria: "TriaMesh", filename: str, - image: Optional[object] = None, - coords_are_voxels: Optional[bool] = None, + image: object | None = None, + coords_are_voxels: bool | None = None, ) -> None: """Save Freesurfer Surface Geometry file (wrap Nibabel). diff --git a/lapy/conformal.py b/lapy/conformal.py index 3c7a0e0..11b2a8a 100644 --- a/lapy/conformal.py +++ b/lapy/conformal.py @@ -18,7 +18,7 @@ """ import importlib import logging -from typing import Any, Union +from typing import Any import numpy as np from scipy import sparse @@ -535,7 +535,7 @@ def linear_beltrami_solver( def _sparse_symmetric_solve( A: csr_matrix, - b: Union[np.ndarray, csr_matrix], + b: np.ndarray | csr_matrix, use_cholmod: bool = False ) -> np.ndarray: """Solve a sparse symmetric linear system of equations Ax = b. diff --git a/lapy/plot.py b/lapy/plot.py index 434074d..12f0694 100644 --- a/lapy/plot.py +++ b/lapy/plot.py @@ -11,7 +11,6 @@ import re from bisect import bisect -from typing import Optional, Union import numpy as np import plotly @@ -21,7 +20,7 @@ from . import TetMesh, TriaMesh -def _get_color_levels() -> list[list[Union[float, str]]]: +def _get_color_levels() -> list[list[float | str]]: """Return a pre-set colorscale. Returns @@ -75,7 +74,7 @@ def _get_color_levels() -> list[list[Union[float, str]]]: return colorscale -def _get_colorscale(vmin: float, vmax: float) -> list[list[Union[float, str]]]: +def _get_colorscale(vmin: float, vmax: float) -> list[list[float | str]]: """Put together a colorscale map depending on the range of v-values. Parameters @@ -143,7 +142,7 @@ def _get_colorscale(vmin: float, vmax: float) -> list[list[Union[float, str]]]: return colorscale -def _get_colorval(t: float, colormap: list[list[Union[float, str]]]) -> str: +def _get_colorval(t: float, colormap: list[list[float | str]]) -> str: """Turn a scalar value into a color value. Parameters @@ -171,7 +170,7 @@ def _get_colorval(t: float, colormap: list[list[Union[float, str]]]) -> str: return colormap[-1][1] # ok here we need to interpolate # first find two colors before and after - columns = list(zip(*colormap)) + columns = list(zip(*colormap, strict=True)) pos = bisect(columns[0], t) # compute param between pos-1 and pos values if len(columns[0]) < pos + 1 or pos == 0: @@ -192,7 +191,7 @@ def _get_colorval(t: float, colormap: list[list[Union[float, str]]]) -> str: def _map_z2color( zval: float, - colormap: Union[LinearSegmentedColormap, list[list[Union[float, str]]]], + colormap: LinearSegmentedColormap | list[list[float | str]], zmin: float, zmax: float, ) -> str: @@ -242,21 +241,21 @@ def _map_z2color( def plot_tet_mesh( tetra: "TetMesh", - vfunc: Optional[np.ndarray] = None, + vfunc: np.ndarray | None = None, plot_edges: bool = False, plot_levels: bool = False, - tfunc: Optional[np.ndarray] = None, - cutting: Optional[Union[str, list[str]]] = None, + tfunc: np.ndarray | None = None, + cutting: str | list[str] | None = None, edge_color: str = "rgb(50,50,50)", html_output: bool = False, width: int = 800, height: int = 800, flatshading: bool = False, - xrange: Optional[Union[list[float], tuple[float, float]]] = None, - yrange: Optional[Union[list[float], tuple[float, float]]] = None, - zrange: Optional[Union[list[float], tuple[float, float]]] = None, + xrange: list[float] | tuple[float, float] | None = None, + yrange: list[float] | tuple[float, float] | None = None, + zrange: list[float] | tuple[float, float] | None = None, showcaxis: bool = False, - caxis: Optional[Union[list[float], tuple[float, float]]] = None, + caxis: list[float] | tuple[float, float] | None = None, ) -> None: """Plot tetra meshes. @@ -394,26 +393,26 @@ def plot_tet_mesh( def plot_tria_mesh( tria: "TriaMesh", - vfunc: Optional[np.ndarray] = None, - tfunc: Optional[np.ndarray] = None, - vcolor: Optional[list[str]] = None, - tcolor: Optional[list[str]] = None, + vfunc: np.ndarray | None = None, + tfunc: np.ndarray | None = None, + vcolor: list[str] | None = None, + tcolor: list[str] | None = None, showcaxis: bool = False, - caxis: Optional[Union[list[float], tuple[float, float]]] = None, - xrange: Optional[Union[list[float], tuple[float, float]]] = None, - yrange: Optional[Union[list[float], tuple[float, float]]] = None, - zrange: Optional[Union[list[float], tuple[float, float]]] = None, + caxis: list[float] | tuple[float, float] | None = None, + xrange: list[float] | tuple[float, float] | None = None, + yrange: list[float] | tuple[float, float] | None = None, + zrange: list[float] | tuple[float, float] | None = None, plot_edges: bool = False, plot_levels: bool = False, edge_color: str = "rgb(50,50,50)", tic_color: str = "rgb(50,200,10)", - background_color: Optional[str] = None, + background_color: str | None = None, flatshading: bool = False, width: int = 800, height: int = 800, - camera: Optional[dict[str, dict[str, float]]] = None, + camera: dict[str, dict[str, float]] | None = None, html_output: bool = False, - export_png: Optional[str] = None, + export_png: str | None = None, scale_png: float = 1.0, no_display: bool = False, ) -> None: @@ -496,8 +495,8 @@ def plot_tria_mesh( " but not both at the same time" ) - x, y, z = zip(*tria.v) - i, j, k = zip(*tria.t) + x, y, z = zip(*tria.v, strict=True) + i, j, k = zip(*tria.t, strict=True) vlines = [] if vfunc is None: diff --git a/lapy/polygon.py b/lapy/polygon.py index 45f668a..0e4e081 100644 --- a/lapy/polygon.py +++ b/lapy/polygon.py @@ -5,7 +5,6 @@ """ import logging import sys -from typing import Optional import numpy as np from scipy import sparse @@ -67,7 +66,7 @@ class Polygon: False """ - def __init__(self, points: np.ndarray, closed: Optional[bool] = None): + def __init__(self, points: np.ndarray, closed: bool | None = None): self.points = np.array(points) # Validate non-empty polygon diff --git a/lapy/shapedna.py b/lapy/shapedna.py index 9aaac92..dee85c4 100644 --- a/lapy/shapedna.py +++ b/lapy/shapedna.py @@ -9,7 +9,7 @@ and comparison. """ import logging -from typing import TYPE_CHECKING, Optional, Union +from typing import TYPE_CHECKING, Union import numpy as np import scipy.spatial.distance as di @@ -97,7 +97,7 @@ def compute_shapedna( geom: Union["TriaMesh", "TetMesh"], k: int = 50, lump: bool = False, - aniso: Optional[Union[float, tuple[float, float]]] = None, + aniso: float | tuple[float, float] | None = None, aniso_smooth: int = 10, use_cholmod: bool = False, ) -> dict: diff --git a/lapy/solver.py b/lapy/solver.py index 0ab5ddc..f4b02dc 100644 --- a/lapy/solver.py +++ b/lapy/solver.py @@ -1,7 +1,6 @@ import importlib import logging import sys -from typing import Optional, Union import numpy as np from scipy import sparse @@ -54,9 +53,9 @@ class Solver: def __init__( self, - geometry: Union[TriaMesh, TetMesh], + geometry: TriaMesh | TetMesh, lump: bool = False, - aniso: Optional[Union[float, tuple[float, float]]] = None, + aniso: float | tuple[float, float] | None = None, aniso_smooth: int = 10, use_cholmod: bool = False, dtype: np.dtype = np.float64, @@ -718,7 +717,7 @@ def eigs(self, k: int = 10, sigma: float = -0.01) -> tuple[np.ndarray, np.ndarra def poisson( self, - h: Union[float, np.ndarray] = 0.0, + h: float | np.ndarray = 0.0, dtup: tuple = (), ntup: tuple = (), ) -> np.ndarray: diff --git a/lapy/tet_mesh.py b/lapy/tet_mesh.py index 63dd187..18a6884 100644 --- a/lapy/tet_mesh.py +++ b/lapy/tet_mesh.py @@ -1,5 +1,5 @@ import logging -from typing import TYPE_CHECKING, Optional, Union +from typing import TYPE_CHECKING, Union import numpy as np from scipy import sparse @@ -195,7 +195,7 @@ def avg_edge_length(self) -> float: return edgelens.mean() def boundary_tria( - self, tetfunc: Optional[np.ndarray] = None + self, tetfunc: np.ndarray | None = None ) -> Union["TriaMesh", tuple["TriaMesh", np.ndarray]]: """Get boundary triangle mesh of tetrahedra. diff --git a/lapy/tria_mesh.py b/lapy/tria_mesh.py index 37d5ec2..95c6e2d 100644 --- a/lapy/tria_mesh.py +++ b/lapy/tria_mesh.py @@ -1,7 +1,6 @@ import logging import sys import warnings -from typing import Optional, Union import numpy as np from scipy import sparse @@ -204,8 +203,8 @@ def write_vtk(self, filename: str) -> None: def write_fssurf( self, filename: str, - image: Optional[object] = None, - coords_are_voxels: Optional[bool] = None, + image: object | None = None, + coords_are_voxels: bool | None = None, ) -> None: """Save as Freesurfer Surface Geometry file (wrap Nibabel). @@ -707,7 +706,7 @@ def connected_components(self) -> tuple[int, np.ndarray]: def keep_largest_connected_component_( self, clean: bool = True - ) -> tuple[Optional[np.ndarray], Optional[np.ndarray]]: + ) -> tuple[np.ndarray | None, np.ndarray | None]: """Keep only the largest connected component of the mesh. Modifies the mesh in-place. @@ -1504,10 +1503,10 @@ def smooth_vfunc(self, vfunc, n=1): def smooth_laplace( self, - vfunc: Optional[np.ndarray] = None, + vfunc: np.ndarray | None = None, n: int = 1, lambda_: float = 0.5, - mat: Optional[sparse.csc_matrix] = None, + mat: sparse.csc_matrix | None = None, ) -> np.ndarray: """Smooth the mesh or a vertex function using Laplace smoothing. @@ -1551,7 +1550,7 @@ def smooth_laplace( def smooth_taubin( self, - vfunc: Optional[np.ndarray] = None, + vfunc: np.ndarray | None = None, n: int = 1, lambda_: float = 0.5, mu: float = -0.53, @@ -1614,7 +1613,7 @@ def smooth_(self, n: int = 1) -> None: self.v = vfunc return - def level_length(self, vfunc: np.ndarray, level: Union[float, np.ndarray]) -> Union[float, np.ndarray]: + def level_length(self, vfunc: np.ndarray, level: float | np.ndarray) -> float | np.ndarray: """Compute the length of level sets. For a scalar function defined on mesh vertices, computes the total length @@ -1691,9 +1690,9 @@ def level_length(self, vfunc: np.ndarray, level: Union[float, np.ndarray]) -> Un @staticmethod def __reduce_edges_to_path( edges: np.ndarray, - start_idx: Optional[int] = None, + start_idx: int | None = None, get_edge_idx: bool = False, - ) -> Union[list, tuple[list, list]]: + ) -> list | tuple[list, list]: """Reduce undirected unsorted edges to ordered path(s). Converts unordered edge pairs into ordered paths by finding traversals. @@ -1963,7 +1962,7 @@ def extract_level_paths( # Build polygon objects for each connected component polygons = [] - for path_nodes, path_edge_idx in zip(paths, path_edge_idxs): + for path_nodes, path_edge_idx in zip(paths, path_edge_idxs, strict=True): # Get 3D coordinates for this path poly_v = p[path_nodes, :] @@ -2010,7 +2009,7 @@ def level_path( level: float, get_tria_idx: bool = False, get_edges: bool = False, - n_points: Optional[int] = None, + n_points: int | None = None, ) -> tuple[np.ndarray, ...]: """Extract levelset of vfunc at a specific level as a path of 3D points. diff --git a/lapy/utils/_config.py b/lapy/utils/_config.py index 44722fd..774d8b1 100644 --- a/lapy/utils/_config.py +++ b/lapy/utils/_config.py @@ -1,14 +1,15 @@ import platform import re import sys +from collections.abc import Callable from functools import partial from importlib.metadata import requires, version -from typing import IO, Callable, Optional +from typing import IO import psutil -def sys_info(fid: Optional[IO] = None, developer: bool = False): +def sys_info(fid: IO | None = None, developer: bool = False): """Print the system information for debugging. Parameters From 72dd1c70ad68f1952d75cbc2319e1ab160cb330c Mon Sep 17 00:00:00 2001 From: Martin Reuter Date: Tue, 10 Feb 2026 15:34:26 +0100 Subject: [PATCH 18/32] exclude thrid-party objects from API documentation --- doc/conf.py | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/doc/conf.py b/doc/conf.py index 0ea79e5..f18653b 100644 --- a/doc/conf.py +++ b/doc/conf.py @@ -183,6 +183,12 @@ r"\.__iter__", r"\.__div__", r"\.__neg__", + # Imported third-party objects (not part of LaPy API) + r"\.Any$", # typing.Any + r"\.csr_matrix$", # scipy.sparse.csr_matrix + r"\.minimize$", # scipy.optimize.minimize + r"\.bisect$", # bisect.bisect + r"\.LinearSegmentedColormap$", # matplotlib.colors.LinearSegmentedColormap } # -- sphinxcontrib-bibtex ---------------------------------------------------- From c9ad42a118c1d8ada7b382b6595d856d3a9f0875 Mon Sep 17 00:00:00 2001 From: Martin Reuter Date: Tue, 10 Feb 2026 16:34:50 +0100 Subject: [PATCH 19/32] fix doc build upload exclude --- .github/workflows/doc.yml | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/.github/workflows/doc.yml b/.github/workflows/doc.yml index f3d039d..ae68f23 100644 --- a/.github/workflows/doc.yml +++ b/.github/workflows/doc.yml @@ -41,7 +41,9 @@ jobs: uses: actions/upload-artifact@v7 with: name: doc-dev - path: doc-build/dev !doc-build/dev/.doctrees + path: | + doc-build/dev + !doc-build/dev/.doctrees deploy: if: github.event_name == 'push' From 77ed9f99b143edb1ef8c606c5e059f52f1f16b8a Mon Sep 17 00:00:00 2001 From: Martin Reuter Date: Sun, 22 Feb 2026 11:08:10 +0100 Subject: [PATCH 20/32] add gifti io support via nibabel --- lapy/_tria_io.py | 140 +++++++++++++++++++++++++++++ lapy/tria_mesh.py | 74 ++++++++++++++- lapy/utils/tests/test_tria_mesh.py | 45 ++++++++++ 3 files changed, 258 insertions(+), 1 deletion(-) diff --git a/lapy/_tria_io.py b/lapy/_tria_io.py index 3e00c01..bc7f039 100644 --- a/lapy/_tria_io.py +++ b/lapy/_tria_io.py @@ -495,6 +495,146 @@ def read_gmsh(filename: str) -> tuple[np.ndarray, dict, dict, dict, dict]: return points, cells, point_data, cell_data, field_data +def read_gifti(filename: str) -> "TriaMesh": + """Load triangle mesh from GIFTI surface file. + + GIFTI (``.surf.gii``) is the standard surface format used by HCP, FSL, + FreeSurfer (since v6), and many other neuroimaging tools. The file must + contain at least one data array with intent ``NIFTI_INTENT_POINTSET`` + (coordinates) and one with intent ``NIFTI_INTENT_TRIANGLE`` (topology). + + Parameters + ---------- + filename : str + Path to a GIFTI surface file (e.g. ``lh.pial.surf.gii``). + + Returns + ------- + TriaMesh + Loaded triangle mesh. Vertex coordinates are returned in the + coordinate system stored in the file (usually surface RAS or MNI). + + Raises + ------ + ImportError + If ``nibabel`` is not installed. + OSError + If the file is not found or not readable. + ValueError + If the GIFTI file does not contain both a POINTSET and a TRIANGLE + data array. + + Notes + ----- + The GIFTI format supports multiple coordinate systems per file + (``GiftiCoordSystem``). This function uses whatever coordinate system + nibabel exposes by default, which corresponds to the *first* coordinate + system listed in the file (typically surface RAS for FreeSurfer output). + + Examples + -------- + >>> from lapy import TriaMesh + >>> tria = TriaMesh.read_gifti("lh.pial.surf.gii") # doctest: +SKIP + """ + logger.debug("--> GIFTI format ... ") + try: + import nibabel as nib + except ImportError as e: + raise ImportError( + "nibabel is required to read GIFTI files. " + "Install it with: pip install nibabel" + ) from e + try: + img = nib.load(filename) + except OSError: + logger.error("[file not found or not readable]") + raise + if not isinstance(img, nib.gifti.GiftiImage): + raise ValueError( + f"Expected a GIFTI image, but got {type(img).__name__}. " + "Make sure the file is a valid GIFTI surface file (.surf.gii)." + ) + try: + coords, trias = img.agg_data(("pointset", "triangle")) + except Exception as exc: + raise ValueError( + f"Could not extract POINTSET and TRIANGLE data arrays from {filename}. " + "Make sure the file is a valid GIFTI surface file." + ) from exc + if coords is None or trias is None: + raise ValueError( + f"{filename} does not contain both a POINTSET (coordinates) and a " + "TRIANGLE (topology) data array." + ) + coords = np.array(coords, dtype=float) + trias = np.array(trias, dtype=np.int64) + logger.info(" --> DONE ( V: %d , T: %d )", coords.shape[0], trias.shape[0]) + from . import TriaMesh + + return TriaMesh(coords, trias) + + +def write_gifti(tria: "TriaMesh", filename: str) -> None: + """Save triangle mesh to a GIFTI surface file. + + Writes the mesh as a ``.surf.gii`` GIFTI file using nibabel. The + coordinate data array uses intent ``NIFTI_INTENT_POINTSET`` and the + topology array uses intent ``NIFTI_INTENT_TRIANGLE``. + + Parameters + ---------- + tria : TriaMesh + Triangle mesh to save. + filename : str + Output filename (conventionally ends with ``.surf.gii``). + + Raises + ------ + ImportError + If ``nibabel`` is not installed. + OSError + If the file cannot be written. + + Notes + ----- + Vertex coordinates are stored as ``float32`` and triangle indices as + ``int32``, matching the conventions of most neuroimaging software. + + Examples + -------- + >>> from lapy import TriaMesh + >>> tria = TriaMesh.read_off("my_mesh.off") # doctest: +SKIP + >>> tria.write_gifti("my_mesh.surf.gii") # doctest: +SKIP + """ + try: + import nibabel as nib + from nibabel.gifti import GiftiDataArray, GiftiImage + from nibabel.nifti1 import intent_codes + except ImportError as e: + raise ImportError( + "nibabel is required to write GIFTI files. " + "Install it with: pip install nibabel" + ) from e + try: + coords = tria.v.astype(np.float32) + trias = tria.t.astype(np.int32) + coord_array = GiftiDataArray( + coords, + intent=intent_codes.code["NIFTI_INTENT_POINTSET"], + datatype="NIFTI_TYPE_FLOAT32", + ) + tria_array = GiftiDataArray( + trias, + intent=intent_codes.code["NIFTI_INTENT_TRIANGLE"], + datatype="NIFTI_TYPE_INT32", + ) + img = GiftiImage(darrays=[coord_array, tria_array]) + nib.save(img, filename) + except OSError: + logger.error("[File %s not writable]", filename) + raise + + def write_vtk(tria: "TriaMesh", filename: str) -> None: """Save VTK file. diff --git a/lapy/tria_mesh.py b/lapy/tria_mesh.py index 95c6e2d..cd753cd 100644 --- a/lapy/tria_mesh.py +++ b/lapy/tria_mesh.py @@ -63,7 +63,7 @@ class TriaMesh: maintaining compatibility with 2D mesh data. Empty meshes are not allowed. Use class methods (read_fssurf, read_vtk, - read_off) to load mesh data from files. + read_off, read_gifti) to load mesh data from files. """ def __init__(self, v, t, fsinfo=None): @@ -185,6 +185,48 @@ def read_vtk(cls, filename): """ return io.read_vtk(filename) + @classmethod + def read_gifti(cls, filename: str) -> "TriaMesh": + """Load triangle mesh from a GIFTI surface file. + + GIFTI (``.surf.gii``) is the standard surface format used by HCP, + FSL, FreeSurfer (since v6), and many other neuroimaging tools. + + Parameters + ---------- + filename : str + Path to a GIFTI surface file (e.g. ``lh.pial.surf.gii``). + + Returns + ------- + TriaMesh + Loaded triangle mesh. Vertex coordinates are in the coordinate + system stored in the file (usually surface RAS or MNI). + + Raises + ------ + ImportError + If ``nibabel`` is not installed. + OSError + If the file is not found or not readable. + ValueError + If the GIFTI file does not contain both a POINTSET and a TRIANGLE + data array. + + Notes + ----- + The GIFTI format supports multiple coordinate systems per file. + This function uses whatever coordinate system nibabel exposes by + default, which corresponds to the first coordinate system listed in + the file (typically surface RAS for FreeSurfer output). + + Examples + -------- + >>> from lapy import TriaMesh + >>> tria = TriaMesh.read_gifti("lh.pial.surf.gii") # doctest: +SKIP + """ + return io.read_gifti(filename) + def write_vtk(self, filename: str) -> None: """Save mesh as VTK file. @@ -200,6 +242,36 @@ def write_vtk(self, filename: str) -> None: """ io.write_vtk(self, filename) + def write_gifti(self, filename: str) -> None: + """Save mesh as a GIFTI surface file. + + Writes the mesh as a ``.surf.gii`` GIFTI file using nibabel. + + Parameters + ---------- + filename : str + Output filename (conventionally ends with ``.surf.gii``). + + Raises + ------ + ImportError + If ``nibabel`` is not installed. + OSError + If the file cannot be written. + + Notes + ----- + Vertex coordinates are stored as ``float32`` and triangle indices as + ``int32``, matching the conventions of most neuroimaging software. + + Examples + -------- + >>> from lapy import TriaMesh + >>> tria = TriaMesh.read_off("my_mesh.off") # doctest: +SKIP + >>> tria.write_gifti("my_mesh.surf.gii") # doctest: +SKIP + """ + io.write_gifti(self, filename) + def write_fssurf( self, filename: str, diff --git a/lapy/utils/tests/test_tria_mesh.py b/lapy/utils/tests/test_tria_mesh.py index 6f9f954..80539fe 100644 --- a/lapy/utils/tests/test_tria_mesh.py +++ b/lapy/utils/tests/test_tria_mesh.py @@ -1048,3 +1048,48 @@ def test_level_path_closed_loop(): # Length should be positive assert length > 0, "Closed loop should have positive length" + +def test_gifti_io(tmp_path): + """ + Test reading and writing GIFTI surface meshes using TriaMesh.read_gifti and write_gifti. + """ + import numpy as np + from lapy.tria_mesh import TriaMesh + + # Create a simple mesh + vertices = np.array([ + [0.0, 0.0, 0.0], + [1.0, 0.0, 0.0], + [0.0, 1.0, 0.0], + [0.0, 0.0, 1.0], + ]) + triangles = np.array([ + [0, 1, 2], + [0, 1, 3], + [0, 2, 3], + [1, 2, 3], + ]) + mesh = TriaMesh(vertices, triangles) + + # Write to GIFTI + gifti_file = tmp_path / "test.surf.gii" + mesh.write_gifti(str(gifti_file)) + + # Read back + mesh2 = TriaMesh.read_gifti(str(gifti_file)) + + # Check vertices and triangles + np.testing.assert_allclose(mesh2.v, mesh.v, rtol=1e-6, atol=1e-8) + np.testing.assert_array_equal(mesh2.t, mesh.t) + assert mesh2.v.shape == mesh.v.shape + assert mesh2.t.shape == mesh.t.shape + + # Check that mesh2 is a TriaMesh + assert isinstance(mesh2, TriaMesh) + + # Check that mesh2 is not empty + assert mesh2.v.size > 0 + assert mesh2.t.size > 0 + + # Check that mesh2 is not 2D + assert not mesh2.is_2d() From 06d613e3f71c961885afba3592ac5c46cbc594bf Mon Sep 17 00:00:00 2001 From: Martin Reuter Date: Sun, 22 Feb 2026 11:13:56 +0100 Subject: [PATCH 21/32] fix ruff --- lapy/utils/tests/test_tria_mesh.py | 1 + 1 file changed, 1 insertion(+) diff --git a/lapy/utils/tests/test_tria_mesh.py b/lapy/utils/tests/test_tria_mesh.py index 80539fe..9a0e244 100644 --- a/lapy/utils/tests/test_tria_mesh.py +++ b/lapy/utils/tests/test_tria_mesh.py @@ -1054,6 +1054,7 @@ def test_gifti_io(tmp_path): Test reading and writing GIFTI surface meshes using TriaMesh.read_gifti and write_gifti. """ import numpy as np + from lapy.tria_mesh import TriaMesh # Create a simple mesh From 2b9fe22e8918d1cf2142c59bfa826b8cc38690e6 Mon Sep 17 00:00:00 2001 From: Martin Reuter Date: Thu, 26 Feb 2026 11:18:06 +0100 Subject: [PATCH 22/32] update license syntax --- pyproject.toml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/pyproject.toml b/pyproject.toml index b79d501..47a2f6a 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -10,7 +10,8 @@ name = 'lapy' version = '1.6.0-dev' description = 'A package for differential geometry on meshes (Laplace, FEM)' readme = 'README.md' -license = {file = 'LICENSE'} +license = 'MIT' +license-files = ['LICENSE'] requires-python = '>=3.10' authors = [ {name = 'Martin Reuter', email = 'martin.reuter@dzne.de'}, @@ -39,7 +40,6 @@ classifiers = [ 'Programming Language :: Python :: 3.12', 'Programming Language :: Python :: 3.13', 'Natural Language :: English', - 'License :: OSI Approved :: MIT License', 'Intended Audience :: Science/Research', ] dependencies = [ From 4291d465a94a8296799bb9413c0a700f4ac83b3c Mon Sep 17 00:00:00 2001 From: Martin Reuter Date: Tue, 3 Mar 2026 11:47:20 +0100 Subject: [PATCH 23/32] get all loops at intersections --- lapy/tria_mesh.py | 442 +++++++++++++++++++++++++++++++++++++--------- 1 file changed, 354 insertions(+), 88 deletions(-) diff --git a/lapy/tria_mesh.py b/lapy/tria_mesh.py index cd753cd..8b8b062 100644 --- a/lapy/tria_mesh.py +++ b/lapy/tria_mesh.py @@ -1928,144 +1928,118 @@ def __reduce_edges_to_path( else: return paths - def extract_level_paths( + def _extract_level_paths_core( self, vfunc: np.ndarray, level: float, + t_idx: np.ndarray, ) -> list[polygon.Polygon]: - """Extract level set paths as Polygon objects with triangle/edge metadata. + """Core level-path extraction on a pre-selected subset of triangles. - For a given scalar function on mesh vertices, extracts all paths where - the function equals the specified level. Returns polygons with embedded - metadata about which triangles and edges were intersected. - Handles single open paths, closed loops, and multiple disconnected components. + Internal helper used by extract_level_paths. Computes edge-edge intersections + with the level set for the given triangle indices and assembles them into + ordered Polygon fragments. Fragments may be open (their endpoints lie on + edges adjacent to masked-out special vertices). Parameters ---------- vfunc : np.ndarray - Scalar function values at vertices, shape (n_vertices,). Must be 1D. + Scalar function values at vertices, shape (n_vertices,). level : float - Level set value to extract. + Level set value. + t_idx : np.ndarray + Indices into self.t of the triangles to process. Must already exclude + triangles whose intersection count is 0 or 3. Returns ------- list[polygon.Polygon] - List of Polygon objects, one for each connected level curve component. - Each polygon has the following additional attributes: - - - points : np.ndarray of shape (n_points, 3) - 3D coordinates on level curve - - closed : bool - whether the curve is closed - - tria_idx : np.ndarray of shape (n_segments,) - triangle index for each segment - - edge_vidx : np.ndarray of shape (n_points, 2) - mesh vertex indices for edge - - edge_bary : np.ndarray of shape (n_points,) - barycentric coordinate [0,1] - along edge where level set intersects (0=first vertex, 1=second vertex) - - Raises - ------ - ValueError - If vfunc is not 1-dimensional. + Polygon fragments. Each has attributes: points, closed, tria_idx, + edge_vidx (shape (n_points, 2) with original mesh vertex indices), + edge_bary (barycentric coordinate along each edge). """ - if vfunc.ndim > 1: - raise ValueError(f"vfunc needs to be 1-dim, but is {vfunc.ndim}-dim!") - - # Get intersecting triangles - intersect = vfunc[self.t] > level - t_idx = np.where( - np.logical_or( - np.sum(intersect, axis=1) == 1, np.sum(intersect, axis=1) == 2 - ) - )[0] - if t_idx.size == 0: return [] # Reduce to triangles that intersect with level t_level = self.t[t_idx, :] - intersect = intersect[t_idx, :] + intersect = vfunc[t_level] > level - # Invert triangles with two true values so single vertex is true - intersect[np.sum(intersect, axis=1) > 1, :] = np.logical_not( - intersect[np.sum(intersect, axis=1) > 1, :] - ) + # Invert triangles with two true values so the single-vertex side is true + two_true = np.sum(intersect, axis=1) > 1 + intersect[two_true, :] = ~intersect[two_true, :] - # Get index within triangle with single vertex + # Get index within triangle with single vertex above threshold idx_single = np.argmax(intersect, axis=1) idx_o1 = (idx_single + 1) % 3 idx_o2 = (idx_single + 2) % 3 - # Get global indices - gidx0 = t_level[np.arange(t_level.shape[0]), idx_single] - gidx1 = t_level[np.arange(t_level.shape[0]), idx_o1] - gidx2 = t_level[np.arange(t_level.shape[0]), idx_o2] + # Get global vertex indices + rows = np.arange(t_level.shape[0]) + gidx0 = t_level[rows, idx_single] + gidx1 = t_level[rows, idx_o1] + gidx2 = t_level[rows, idx_o2] - # Sort edge indices (rows are triangles, cols are the two vertex ids) - # This creates unique edge identifiers - gg1 = np.sort( - np.concatenate((gidx0[:, np.newaxis], gidx1[:, np.newaxis]), axis=1) - ) - gg2 = np.sort( - np.concatenate((gidx0[:, np.newaxis], gidx2[:, np.newaxis]), axis=1) - ) + # Build sorted edge pairs (unique edge identifier) + gg1 = np.sort(np.stack((gidx0, gidx1), axis=1), axis=1) + gg2 = np.sort(np.stack((gidx0, gidx2), axis=1), axis=1) - # Concatenate all edges and get unique ones + # Unique intersection edges across all triangles gg = np.concatenate((gg1, gg2), axis=0) gg_unique = np.unique(gg, axis=0) - # Generate level set intersection points for unique edges - # Barycentric coordinate (0=first vertex, 1=second vertex) + # Barycentric coordinate along each unique edge (0=first vertex, 1=second vertex) xl = ((level - vfunc[gg_unique[:, 0]]) / (vfunc[gg_unique[:, 1]] - vfunc[gg_unique[:, 0]])) - # 3D points on unique edges + # 3D intersection points p = ((1 - xl)[:, np.newaxis] * self.v[gg_unique[:, 0]] + xl[:, np.newaxis] * self.v[gg_unique[:, 1]]) - # Fill sparse matrix with new point indices (+1 to distinguish from zero) + # Sparse lookup: edge (u,v) -> index in gg_unique (+1 to distinguish from 0) a_mat = sparse.csc_matrix( - (np.arange(gg_unique.shape[0]) + 1, (gg_unique[:, 0], gg_unique[:, 1])) + (np.arange(gg_unique.shape[0]) + 1, + (gg_unique[:, 0], gg_unique[:, 1])), + shape=(self.v.shape[0], self.v.shape[0]), ) - # For each triangle, create edge pair via lookup in matrix - edge_idxs = (np.concatenate((a_mat[gg1[:, 0], gg1[:, 1]], - a_mat[gg2[:, 0], gg2[:, 1]]), axis=0).T - 1) + # For each triangle, look up the two intersection-edge indices + edge_idxs = ( + np.concatenate( + (np.asarray(a_mat[gg1[:, 0], gg1[:, 1]]), + np.asarray(a_mat[gg2[:, 0], gg2[:, 1]])), + axis=0, + ).T - 1 + ) - # Reduce edges to paths (returns list of paths for multiple components) - paths, path_edge_idxs = self._TriaMesh__reduce_edges_to_path(edge_idxs, get_edge_idx=True) + # Reduce edges to ordered paths / loops + paths, path_edge_idxs = self._TriaMesh__reduce_edges_to_path( + edge_idxs, get_edge_idx=True + ) - # Build polygon objects for each connected component + # Assemble Polygon objects polygons = [] for path_nodes, path_edge_idx in zip(paths, path_edge_idxs, strict=True): - # Get 3D coordinates for this path poly_v = p[path_nodes, :] - # Remove duplicate vertices (when levelset hits a vertex almost perfectly) - dd = ((poly_v[0:-1, :] - poly_v[1:, :]) ** 2).sum(1) - dd = np.append(dd, 1) # Never delete last node - eps = 0.000001 - keep_ids = dd > eps - poly_v = poly_v[keep_ids, :] - - # Get triangle indices for each edge - poly_tria_idx = t_idx[path_edge_idx[keep_ids[:-1]]] + # Remove near-duplicate vertices (level set nearly hits a vertex) + dd = ((poly_v[:-1, :] - poly_v[1:, :]) ** 2).sum(1) + dd = np.append(dd, 1.0) # never drop last node + keep = dd > 1e-12 + poly_v = poly_v[keep, :] - # Get edge vertex indices - poly_edge_vidx = gg_unique[path_nodes[keep_ids], :] + poly_tria_idx = t_idx[path_edge_idx[keep[:-1]]] + poly_edge_vidx = gg_unique[path_nodes[keep], :] + poly_edge_bary = xl[path_nodes[keep]] - # Get barycentric coordinates - poly_edge_bary = xl[path_nodes[keep_ids]] - - # Create polygon with metadata - # Use closed=None to let Polygon auto-detect based on endpoints - # This will automatically remove duplicate endpoint if present - n_points_before = poly_v.shape[0] + n_before = poly_v.shape[0] poly = polygon.Polygon(poly_v, closed=None) - n_points_after = poly.points.shape[0] + n_after = poly.points.shape[0] - # If Polygon removed duplicate endpoint, adjust metadata - if n_points_after < n_points_before: - # Remove last entry from metadata to match polygon points - poly_edge_vidx = poly_edge_vidx[:n_points_after] - poly_edge_bary = poly_edge_bary[:n_points_after] + # If Polygon removed duplicate endpoint (closed loop), trim metadata + if n_after < n_before: + poly_edge_vidx = poly_edge_vidx[:n_after] + poly_edge_bary = poly_edge_bary[:n_after] poly.tria_idx = poly_tria_idx poly.edge_vidx = poly_edge_vidx @@ -2075,6 +2049,298 @@ def extract_level_paths( return polygons + def extract_level_paths( + self, + vfunc: np.ndarray, + level: float, + eps: float | None = None, + ) -> list[polygon.Polygon]: + """Extract level set paths as Polygon objects with triangle/edge metadata. + + For a given scalar function on mesh vertices, extracts all paths where + the function equals the specified level. Returns polygons with embedded + metadata about which triangles and edges were intersected. + + Handles single open paths, closed loops, multiple disconnected components, + and level sets that pass exactly through mesh vertices (including saddle + vertices). When the level set hits a vertex exactly: + + - At a **regular vertex**: the two path fragments are reconnected through + the vertex into a single continuous path or loop. + - At a **saddle vertex**: the level set splits into separate curves (one per + lobe). Each curve is emitted as an independent Polygon. An info message + is logged when this happens. + - At a **boundary vertex**: the open path fragment is extended to include + the vertex as its endpoint. + + Parameters + ---------- + vfunc : np.ndarray + Scalar function values at vertices, shape (n_vertices,). Must be 1D. + level : float + Level set value to extract. + eps : float or None, default=None + Tolerance for detecting vertices exactly on the level set. + If None, defaults to ``max(1e-10, 1e-7 * (|level| + 1))``. + + Returns + ------- + list[polygon.Polygon] + List of Polygon objects, one for each connected level curve component. + Each polygon has the following additional attributes: + + - points : np.ndarray of shape (n_points, 3) - 3D coordinates on level curve + - closed : bool - whether the curve is closed + - tria_idx : np.ndarray of shape (n_segments,) - triangle index for each segment + - edge_vidx : np.ndarray of shape (n_points, 2) - mesh vertex indices for the + intersected edge (original mesh vertex indices) + - edge_bary : np.ndarray of shape (n_points,) - barycentric coordinate [0,1] + along each edge where the level set intersects + (0 = first vertex of edge, 1 = second vertex) + + Raises + ------ + ValueError + If vfunc is not 1-dimensional. + + Notes + ----- + When two or more vertices of the same triangle are exactly on the level set + (the level set runs along a mesh edge), a warning is issued and those triangles + are skipped. The returned curves may be incomplete in that case. + + For saddle vertices at the same level value (saddle-to-saddle paths), an info + message is logged and the fragments are emitted separately with the shared + vertex appended to each endpoint. + """ + if vfunc.ndim > 1: + raise ValueError(f"vfunc needs to be 1-dim, but is {vfunc.ndim}-dim!") + + if eps is None: + eps = max(1e-10, 1e-7 * (abs(level) + 1.0)) + + # ------------------------------------------------------------------ + # Step 1: detect vertices exactly on the level set + # ------------------------------------------------------------------ + on_level = np.abs(vfunc - level) < eps + special_verts = np.where(on_level)[0] # vertex indices + + # ------------------------------------------------------------------ + # Step 2: guard against edge-on-level (2+ vertices in same triangle) + # ------------------------------------------------------------------ + on_level_per_tri = on_level[self.t].sum(axis=1) + if (on_level_per_tri >= 2).any(): + warnings.warn( + "Some triangles have 2 or more vertices exactly on the level set " + "(the level set runs along a mesh edge). Those triangles will be " + "skipped and the returned curves may be incomplete.", + stacklevel=2, + ) + + # ------------------------------------------------------------------ + # Step 3: select intersecting triangles, excluding any that touch a + # special vertex (they would produce degenerate intersections) + # ------------------------------------------------------------------ + touches_special = on_level[self.t].any(axis=1) # bool, shape (n_trias,) + + intersect = vfunc[self.t] > level + row_sum = intersect.sum(axis=1) + is_crossing = (row_sum == 1) | (row_sum == 2) + t_idx = np.where(is_crossing & ~touches_special)[0] + + # ------------------------------------------------------------------ + # Step 4: extract fragments from the masked triangle set + # ------------------------------------------------------------------ + fragments = self._extract_level_paths_core(vfunc, level, t_idx) + + if len(special_verts) == 0: + # Fast path: no special vertices, nothing to reconnect + return fragments + + # ------------------------------------------------------------------ + # Step 5: build a lookup from sorted edge pair -> special vertex. + # + # When we mask the triangles that touch special vertex sv, the fragment + # endpoints land on the *opposite* edges of those masked triangles — + # i.e., edges (a, b) where triangle (sv, a, b) was masked out. + # These edges do NOT contain sv as a vertex, so we cannot detect them + # by looking for sv in the edge endpoints. Instead we build a map: + # sorted (a, b) -> sv_idx + # for every masked triangle at sv. Then we match fragment endpoint + # edges against this map. + # ------------------------------------------------------------------ + from collections import defaultdict + # opposite_edge_to_sv: tuple(a,b) (sorted) -> sv_idx + opposite_edge_to_sv: dict[tuple[int, int], int] = {} + for sv_idx in special_verts.tolist(): + # All triangles that contain sv AND were masked + sv_mask = touches_special & np.any(self.t == sv_idx, axis=1) + for tri in self.t[sv_mask]: + opp = sorted(int(v) for v in tri if v != sv_idx) + if len(opp) == 2: + key = (opp[0], opp[1]) + opposite_edge_to_sv[key] = sv_idx + + def _sv_of_endpoint(frag, end): + """Return the special-vertex index whose opposite edge matches this endpoint, or None.""" + ev = frag.edge_vidx[end] # shape (2,), already sorted (gg_unique is sorted) + key = (int(min(ev)), int(max(ev))) + return opposite_edge_to_sv.get(key, None) + + # Map: sv_idx -> list of (frag_idx, which_end) + # which_end is 0 (start of fragment) or -1 (end of fragment) + sv_to_ends: dict[int, list[tuple[int, int]]] = defaultdict(list) + + for fi, frag in enumerate(fragments): + for end in (0, -1): + sv = _sv_of_endpoint(frag, end) + if sv is not None: + sv_to_ends[sv].append((fi, end)) + + # ------------------------------------------------------------------ + # Helper: build Polygon from point array + metadata arrays + # ------------------------------------------------------------------ + def _make_poly(pts, tria_idx, edge_vidx, edge_bary, closed_hint=None): + n_before = pts.shape[0] + poly = polygon.Polygon(pts, closed=closed_hint) + n_after = poly.points.shape[0] + if n_after < n_before: + edge_vidx = edge_vidx[:n_after] + edge_bary = edge_bary[:n_after] + poly.tria_idx = tria_idx + poly.edge_vidx = edge_vidx + poly.edge_bary = edge_bary + return poly + + def _frag_arrays(frag): + """Return (pts, tria_idx, edge_vidx, edge_bary) all in forward order.""" + return frag.points, frag.tria_idx, frag.edge_vidx, frag.edge_bary + + def _frag_reversed(frag): + """Return fragment arrays in reversed order.""" + pts = frag.points[::-1] + tria_idx = frag.tria_idx[::-1] + edge_vidx = frag.edge_vidx[::-1] + edge_bary = frag.edge_bary[::-1] + return pts, tria_idx, edge_vidx, edge_bary + + def _sv_point_and_meta(sv_idx): + """1-point arrays representing the special vertex itself.""" + pt = self.v[sv_idx][np.newaxis, :] # (1,3) + ev = np.array([[sv_idx, sv_idx]]) # (1,2) sentinel + eb = np.array([0.0]) + return pt, ev, eb + + # ------------------------------------------------------------------ + # Step 6: process each special vertex + # + # For each special vertex sv we have a list of (frag_idx, which_end) + # pairs. We resolve them in priority order: + # 1. Self-loop: fragment has BOTH its ends touching sv → close it. + # 2. Regular connector: exactly two *different* fragments each have + # ONE end at sv → connect them through sv. + # 3. Saddle / boundary: remaining unmatched ends (count ≥ 4 or odd) + # → emit each fragment separately with sv inserted at its endpoint. + # ------------------------------------------------------------------ + result: list[polygon.Polygon] = [] + used: set[int] = set() + + for sv_idx, ends in sv_to_ends.items(): + sv_pt, sv_ev, sv_eb = _sv_point_and_meta(sv_idx) + + # --- Pass 1: close self-loops (fi appears twice in ends) ------- + from collections import Counter + fi_counts = Counter(fi for fi, _ in ends) + self_loop_fis = {fi for fi, cnt in fi_counts.items() if cnt == 2} + + for fi in sorted(self_loop_fis): + if fi in used: + continue + used.add(fi) + frag = fragments[fi] + pts_f, ti_f, ev_f, eb_f = _frag_arrays(frag) + # Both ends of this fragment touch sv. + # Build: sv -> fragment_body -> sv (Polygon auto-detects closure) + pts = np.vstack([sv_pt, pts_f, sv_pt]) + ti = ti_f + ev = np.vstack([sv_ev, ev_f, sv_ev]) + eb = np.concatenate([sv_eb, eb_f, sv_eb]) + result.append(_make_poly(pts, ti, ev, eb, closed_hint=None)) + + # Remaining ends after removing self-loops + remaining = [(fi, end) for fi, end in ends if fi not in self_loop_fis and fi not in used] + + if len(remaining) == 0: + continue + + if len(remaining) == 2: + # --- Pass 2: connect two different fragments through sv ---- + (fi0, end0), (fi1, end1) = remaining + if fi0 in used or fi1 in used: + continue + used.add(fi0) + used.add(fi1) + + frag0 = fragments[fi0] + frag1 = fragments[fi1] + + # Orient so sv-end is at the TAIL of each fragment + if end0 == 0: + p0, t0, e0, b0 = _frag_reversed(frag0) + else: + p0, t0, e0, b0 = _frag_arrays(frag0) + + if end1 == -1: + p1, t1, e1, b1 = _frag_reversed(frag1) + else: + p1, t1, e1, b1 = _frag_arrays(frag1) + + # Concatenate: frag0 ... sv ... frag1 + pts = np.vstack([p0, sv_pt, p1]) + ti = np.concatenate([t0, t1]) + ev = np.vstack([e0, sv_ev, e1]) + eb = np.concatenate([b0, sv_eb, b1]) + result.append(_make_poly(pts, ti, ev, eb, closed_hint=None)) + + else: + # --- Pass 3: saddle, boundary, or boundary-saddle --------- + # remaining has 1 end (boundary vertex) or 4+ ends (saddle). + # In both cases: emit each fragment with sv at its endpoint. + n_remaining = len(remaining) + if n_remaining >= 4: + logger.info( + "Vertex %d is a saddle on the level set with %d branch ends " + "at level %s. Emitting %d separate curves.", + sv_idx, n_remaining, level, n_remaining, + ) + for fi, end in remaining: + if fi in used: + continue + used.add(fi) + frag = fragments[fi] + if end == 0: + pts_f, ti_f, ev_f, eb_f = _frag_arrays(frag) + pts = np.vstack([sv_pt, pts_f]) + ti = ti_f + ev = np.vstack([sv_ev, ev_f]) + eb = np.concatenate([sv_eb, eb_f]) + else: + pts_f, ti_f, ev_f, eb_f = _frag_arrays(frag) + pts = np.vstack([pts_f, sv_pt]) + ti = ti_f + ev = np.vstack([ev_f, sv_ev]) + eb = np.concatenate([eb_f, sv_eb]) + result.append(_make_poly(pts, ti, ev, eb, closed_hint=False)) + + # ------------------------------------------------------------------ + # Step 7: pass through fragments that don't touch any special vertex + # ------------------------------------------------------------------ + for fi, frag in enumerate(fragments): + if fi not in used: + result.append(frag) + + return result + def level_path( self, vfunc: np.ndarray, From a5b6a077a3121d49968dcf3eb2a1acbce767b66d Mon Sep 17 00:00:00 2001 From: Martin Reuter Date: Tue, 3 Mar 2026 15:04:17 +0100 Subject: [PATCH 24/32] fix test --- lapy/utils/tests/test_tria_mesh.py | 8 +++++--- 1 file changed, 5 insertions(+), 3 deletions(-) diff --git a/lapy/utils/tests/test_tria_mesh.py b/lapy/utils/tests/test_tria_mesh.py index 9a0e244..23b8952 100644 --- a/lapy/utils/tests/test_tria_mesh.py +++ b/lapy/utils/tests/test_tria_mesh.py @@ -915,12 +915,14 @@ def test_level_path(): """ Test level_path function for single path extraction with various options. """ - # Create a simple mesh with a clear level curve + # Create a simple mesh where level=0.5 crosses edge interiors cleanly. + # Vertices 0,1 have z=0 and vertices 2,3 have z=1, so the level set at + # z=0.5 intersects two edges (not any vertex) — one per triangle. vertices = np.array([ [0.0, 0.0, 0.0], - [1.0, 0.0, 0.5], + [1.0, 0.0, 0.0], [1.0, 1.0, 1.0], - [0.0, 1.0, 0.5], + [0.0, 1.0, 1.0], ]) triangles = np.array([ [0, 1, 2], From bdfbae2cd7e75d21b4f908a7c627c2645699b34d Mon Sep 17 00:00:00 2001 From: Martin Reuter Date: Sat, 7 Mar 2026 23:38:37 +0100 Subject: [PATCH 25/32] fix spelling --- lapy/tria_mesh.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/lapy/tria_mesh.py b/lapy/tria_mesh.py index 8b8b062..cb54a92 100644 --- a/lapy/tria_mesh.py +++ b/lapy/tria_mesh.py @@ -1934,7 +1934,7 @@ def _extract_level_paths_core( level: float, t_idx: np.ndarray, ) -> list[polygon.Polygon]: - """Core level-path extraction on a pre-selected subset of triangles. + """Core level-path extraction on a preselected subset of triangles. Internal helper used by extract_level_paths. Computes edge-edge intersections with the level set for the given triangle indices and assembles them into From 1aa1c7d5f372a310975e97a53598c32f207bbf0e Mon Sep 17 00:00:00 2001 From: Martin Reuter Date: Tue, 10 Mar 2026 14:04:33 +0100 Subject: [PATCH 26/32] add support for level set hitting vertices --- lapy/polygon.py | 17 + lapy/tria_mesh.py | 839 ++++++++++++++++++++++------------------------ 2 files changed, 414 insertions(+), 442 deletions(-) diff --git a/lapy/polygon.py b/lapy/polygon.py index 0e4e081..f446479 100644 --- a/lapy/polygon.py +++ b/lapy/polygon.py @@ -137,6 +137,23 @@ def is_closed(self) -> bool: """ return self.closed + def close(self) -> "Polygon": + """Mark the polygon as closed in-place. + + If the last point duplicates the first it is removed; otherwise the + polygon is simply flagged as closed (the closing segment is implicit, + from the last point back to the first). + + Returns + ------- + Polygon + Self, for method chaining. + """ + if len(self.points) > 1 and np.allclose(self.points[0], self.points[-1]): + self.points = self.points[:-1] + self.closed = True + return self + def n_points(self) -> int: """Get number of points in polygon. diff --git a/lapy/tria_mesh.py b/lapy/tria_mesh.py index cb54a92..eea1d33 100644 --- a/lapy/tria_mesh.py +++ b/lapy/tria_mesh.py @@ -10,6 +10,137 @@ logger = logging.getLogger(__name__) + +def _reduce_edges_to_path( + edges: np.ndarray, + start_idx: int | None = None, + get_edge_idx: bool = False, +) -> list | tuple[list, list]: + """Reduce undirected unsorted edges to ordered path(s). + + Converts unordered edge pairs into ordered paths by finding traversals. + Handles single open paths, closed loops, and multiple disconnected components. + Always returns lists to handle multiple components uniformly. + + Parameters + ---------- + edges : np.ndarray + Array of shape (n, 2) with pairs of positive integer node indices + representing undirected edges. Node indices can have gaps (i.e., not all + indices from 0 to max need to appear in the edges). Only nodes that + actually appear in edges are processed. + start_idx : int or None, default=None + Preferred start node. If None, selects an endpoint (degree 1) automatically, + or arbitrary node for closed loops. Must be a node that appears in edges. + get_edge_idx : bool, default=False + If True, also return edge indices for each path segment. + + Returns + ------- + paths : list[np.ndarray] + List of ordered paths, one per connected component. For closed loops, + first node is repeated at end. + edge_idxs : list[np.ndarray] + List of edge index arrays, one per component. Only returned if + get_edge_idx=True. + + Raises + ------ + ValueError + If start_idx is invalid. + If graph has degree > 2 (branching or self-intersections). + """ + edges = np.array(edges) + if edges.shape[0] == 0: + return ([], []) if get_edge_idx else [] + + # Build adjacency matrix + i = np.column_stack((edges[:, 0], edges[:, 1])).reshape(-1) + j = np.column_stack((edges[:, 1], edges[:, 0])).reshape(-1) + dat = np.ones(i.shape) + n = edges.max() + 1 + adj_matrix = sparse.csr_matrix((dat, (i, j)), shape=(n, n)) + degrees = np.asarray(adj_matrix.sum(axis=1)).ravel() + + # Find connected components + n_comp, labels = sparse.csgraph.connected_components(adj_matrix, directed=False) + + # Build edge index lookup matrix + edge_dat = np.arange(edges.shape[0]) + 1 + edge_dat = np.column_stack((edge_dat, edge_dat)).reshape(-1) + eidx_matrix = sparse.csr_matrix((edge_dat, (i, j)), shape=(n, n)) + + paths = [] + edge_idxs = [] + + for comp_id in range(n_comp): + comp_nodes = np.where(labels == comp_id)[0] + if len(comp_nodes) == 0: + continue + + comp_degrees = degrees[comp_nodes] + + # Skip isolated nodes (degree 0) that exist only due to matrix sizing. + if np.all(comp_degrees == 0): + continue + + # Reject graphs with nodes of degree > 2 (branching / self-intersections) + max_degree = comp_degrees.max() + if max_degree > 2: + high_degree_nodes = comp_nodes[comp_degrees > 2] + raise ValueError( + f"Component {comp_id}: found {len(high_degree_nodes)} node(s) with " + f"degree > 2 (max degree: {max_degree}). " + f"Only simple paths and simple closed loops are supported." + ) + + # Determine if closed loop: all nodes have degree 2 + is_closed = np.all(comp_degrees == 2) + + # For closed loops: break one edge to convert to open path for traversal + if is_closed: + start = start_idx if (start_idx is not None and start_idx in comp_nodes) else comp_nodes[0] + neighbors = adj_matrix[start, :].nonzero()[1] + neighbors_in_comp = [nb for nb in neighbors if nb in comp_nodes] + if len(neighbors_in_comp) < 2: + raise ValueError( + f"Component {comp_id}: closed loop node {start} has fewer than 2 neighbours." + ) + adj_matrix = adj_matrix.tolil() + break_neighbor = neighbors_in_comp[0] + adj_matrix[start, break_neighbor] = 0 + adj_matrix[break_neighbor, start] = 0 + adj_matrix = adj_matrix.tocsr() + degrees = np.asarray(adj_matrix.sum(axis=1)).ravel() + comp_degrees = degrees[comp_nodes] + + endpoints = comp_nodes[comp_degrees == 1] + if len(endpoints) != 2: + raise ValueError( + f"Component {comp_id}: expected 2 endpoints, found {len(endpoints)}." + ) + + if not is_closed: + start = start_idx if (start_idx is not None and start_idx in endpoints) else endpoints[0] + + dist = sparse.csgraph.shortest_path(adj_matrix, indices=start, directed=False) + if np.isinf(dist[comp_nodes]).any(): + raise ValueError(f"Component {comp_id} is not fully connected.") + + path = comp_nodes[dist[comp_nodes].argsort()] + if is_closed: + path = np.append(path, path[0]) + paths.append(path) + + if get_edge_idx: + ei = path[:-1] + ej = path[1:] + eidx = np.asarray(eidx_matrix[ei, ej] - 1).ravel() + edge_idxs.append(eidx) + + return (paths, edge_idxs) if get_edge_idx else paths + + class TriaMesh: """Class representing a triangle mesh. @@ -1685,6 +1816,10 @@ def smooth_(self, n: int = 1) -> None: self.v = vfunc return + # ------------------------------------------------------------------ + # Level-set methods + # ------------------------------------------------------------------ + def level_length(self, vfunc: np.ndarray, level: float | np.ndarray) -> float | np.ndarray: """Compute the length of level sets. @@ -1759,174 +1894,6 @@ def level_length(self, vfunc: np.ndarray, level: float | np.ndarray) -> float | else: raise ValueError("No lengths computed, should never get here.") - @staticmethod - def __reduce_edges_to_path( - edges: np.ndarray, - start_idx: int | None = None, - get_edge_idx: bool = False, - ) -> list | tuple[list, list]: - """Reduce undirected unsorted edges to ordered path(s). - - Converts unordered edge pairs into ordered paths by finding traversals. - Handles single open paths, closed loops, and multiple disconnected components. - Always returns lists to handle multiple components uniformly. - - Parameters - ---------- - edges : np.ndarray - Array of shape (n, 2) with pairs of positive integer node indices - representing undirected edges. Node indices can have gaps (i.e., not all - indices from 0 to max need to appear in the edges). Only nodes that - actually appear in edges are processed. - start_idx : int or None, default=None - Preferred start node. If None, selects an endpoint (degree 1) automatically, - or arbitrary node for closed loops. Must be a node that appears in edges. - get_edge_idx : bool, default=False - If True, also return edge indices for each path segment. - - Returns - ------- - paths : list[np.ndarray] - List of ordered paths, one per connected component. For closed loops, - first node is repeated at end. - edge_idxs : list[np.ndarray] - List of edge index arrays, one per component. Only returned if - get_edge_idx=True. - - Raises - ------ - ValueError - If start_idx is invalid. - If graph has degree > 2 (branching or self-intersections). - """ - edges = np.array(edges) - if edges.shape[0] == 0: - return ([], []) if get_edge_idx else [] - - # Build adjacency matrix - i = np.column_stack((edges[:, 0], edges[:, 1])).reshape(-1) - j = np.column_stack((edges[:, 1], edges[:, 0])).reshape(-1) - dat = np.ones(i.shape) - n = edges.max() + 1 - adj_matrix = sparse.csr_matrix((dat, (i, j)), shape=(n, n)) - degrees = np.asarray(adj_matrix.sum(axis=1)).ravel() - - # Find connected components - n_comp, labels = sparse.csgraph.connected_components(adj_matrix, directed=False) - - # Build edge index lookup matrix - edge_dat = np.arange(edges.shape[0]) + 1 - edge_dat = np.column_stack((edge_dat, edge_dat)).reshape(-1) - eidx_matrix = sparse.csr_matrix((edge_dat, (i, j)), shape=(n, n)) - - paths = [] - edge_idxs = [] - - for comp_id in range(n_comp): - comp_nodes = np.where(labels == comp_id)[0] - if len(comp_nodes) == 0: - continue - - comp_degrees = degrees[comp_nodes] - - # Skip isolated nodes (degree 0) that exist only due to matrix sizing. - # When edges use indices [0, 5, 10], we create a matrix of size 11x11. - # Indices [1,2,3,4,6,7,8,9] don't appear in any edges (have degree 0). - # connected_components treats each as a separate component, but they're - # not real nodes - just phantom entries from sizing matrix to max_index+1. - if np.all(comp_degrees == 0): - continue - - # SAFETY CHECK: Reject graphs with nodes of degree > 2 - # This single check catches all problematic cases: - # - Branching structures (Y-shape, star graph) - # - Self-intersections (figure-8, etc.) - # - Trees with multiple endpoints - # Valid graphs have only degree 1 (endpoints) and degree 2 (path nodes) - max_degree = comp_degrees.max() - if max_degree > 2: - high_degree_nodes = comp_nodes[comp_degrees > 2] - raise ValueError( - f"Component {comp_id}: found {len(high_degree_nodes)} node(s) with degree > 2 " - f"(max degree: {max_degree}). Degrees: {np.sort(comp_degrees)}. " - f"This indicates branching or self-intersecting structure. " - f"Only simple paths and simple closed loops are supported." - ) - - # Determine if closed loop: all nodes have degree 2 - is_closed = np.all(comp_degrees == 2) - - # For closed loops: break one edge to convert to open path - if is_closed: - # Pick start node - if start_idx is not None and start_idx in comp_nodes: - start = start_idx - else: - start = comp_nodes[0] - - # Find neighbors of start node to break one edge - neighbors = adj_matrix[start, :].nonzero()[1] - neighbors_in_comp = [n for n in neighbors if n in comp_nodes] - - if len(neighbors_in_comp) < 2: - raise ValueError(f"Component {comp_id}: Closed loop node {start} should have 2 neighbors") - - # Break the edge from start to first neighbor (temporarily) - adj_matrix = adj_matrix.tolil() - break_neighbor = neighbors_in_comp[0] - adj_matrix[start, break_neighbor] = 0 - adj_matrix[break_neighbor, start] = 0 - adj_matrix = adj_matrix.tocsr() - - # Update degrees after breaking edge - degrees = np.asarray(adj_matrix.sum(axis=1)).ravel() - comp_degrees = degrees[comp_nodes] - - # Now handle as open path (both originally open and converted closed loops) - endpoints = comp_nodes[comp_degrees == 1] - - if len(endpoints) != 2: - raise ValueError( - f"Component {comp_id}: Expected 2 endpoints after breaking loop, found {len(endpoints)}" - ) - - # Select start node - if is_closed: - # For closed loops, start is already selected above - pass - elif start_idx is not None and start_idx in endpoints: - start = start_idx - else: - # For originally open paths, pick first endpoint - start = endpoints[0] - - # Use shortest_path to order nodes by distance from start - dist = sparse.csgraph.shortest_path(adj_matrix, indices=start, directed=False) - - if np.isinf(dist[comp_nodes]).any(): - raise ValueError(f"Component {comp_id} is not fully connected.") - - # Sort nodes by distance from start - path = comp_nodes[dist[comp_nodes].argsort()] - - # For closed loops: append start again to close the loop - if is_closed: - path = np.append(path, path[0]) - - paths.append(path) - - # Get edge indices if requested - if get_edge_idx: - ei = path[:-1] - ej = path[1:] - eidx = np.asarray(eidx_matrix[ei, ej] - 1).ravel() - edge_idxs.append(eidx) - - # Always return lists - if get_edge_idx: - return paths, edge_idxs - else: - return paths def _extract_level_paths_core( self, @@ -2013,7 +1980,7 @@ def _extract_level_paths_core( ) # Reduce edges to ordered paths / loops - paths, path_edge_idxs = self._TriaMesh__reduce_edges_to_path( + paths, path_edge_idxs = _reduce_edges_to_path( edge_idxs, get_edge_idx=True ) @@ -2057,21 +2024,16 @@ def extract_level_paths( ) -> list[polygon.Polygon]: """Extract level set paths as Polygon objects with triangle/edge metadata. - For a given scalar function on mesh vertices, extracts all paths where - the function equals the specified level. Returns polygons with embedded - metadata about which triangles and edges were intersected. + Extracts all paths where ``vfunc`` equals ``level`` on the mesh surface. + Handles open paths, closed loops, multiple disconnected components, and + level sets that pass exactly through mesh vertices: - Handles single open paths, closed loops, multiple disconnected components, - and level sets that pass exactly through mesh vertices (including saddle - vertices). When the level set hits a vertex exactly: - - - At a **regular vertex**: the two path fragments are reconnected through + - **Regular vertex**: the two adjacent fragments are reconnected through the vertex into a single continuous path or loop. - - At a **saddle vertex**: the level set splits into separate curves (one per - lobe). Each curve is emitted as an independent Polygon. An info message - is logged when this happens. - - At a **boundary vertex**: the open path fragment is extended to include - the vertex as its endpoint. + - **Saddle vertex**: the level set splits; each branch is emitted as an + independent Polygon (potentially closed) and an info message is logged. + - **Boundary vertex**: the open fragment is extended to include the vertex + as its endpoint. Parameters ---------- @@ -2086,17 +2048,17 @@ def extract_level_paths( Returns ------- list[polygon.Polygon] - List of Polygon objects, one for each connected level curve component. - Each polygon has the following additional attributes: - - - points : np.ndarray of shape (n_points, 3) - 3D coordinates on level curve - - closed : bool - whether the curve is closed - - tria_idx : np.ndarray of shape (n_segments,) - triangle index for each segment - - edge_vidx : np.ndarray of shape (n_points, 2) - mesh vertex indices for the - intersected edge (original mesh vertex indices) - - edge_bary : np.ndarray of shape (n_points,) - barycentric coordinate [0,1] - along each edge where the level set intersects - (0 = first vertex of edge, 1 = second vertex) + One Polygon per connected level curve component. Each polygon + carries extra attributes set after construction: + + - ``tria_idx`` : np.ndarray, shape (n_segments,) — triangle index + per segment; ``-1`` for segments adjacent to a special vertex. + - ``edge_vidx`` : np.ndarray, shape (n_points, 2) — mesh vertex + indices of the intersected edge per point. For special-vertex + points both indices are equal (degenerate edge). + - ``edge_bary`` : np.ndarray, shape (n_points,) — barycentric + coordinate in [0, 1] along each edge (0 = first vertex, 1 = + second vertex); 0.0 for special-vertex points. Raises ------ @@ -2105,13 +2067,9 @@ def extract_level_paths( Notes ----- - When two or more vertices of the same triangle are exactly on the level set - (the level set runs along a mesh edge), a warning is issued and those triangles - are skipped. The returned curves may be incomplete in that case. - - For saddle vertices at the same level value (saddle-to-saddle paths), an info - message is logged and the fragments are emitted separately with the shared - vertex appended to each endpoint. + When two or more vertices of the same triangle lie exactly on the level + set (level set runs along a mesh edge), a warning is issued and those + triangles are skipped. The returned curves may be incomplete. """ if vfunc.ndim > 1: raise ValueError(f"vfunc needs to be 1-dim, but is {vfunc.ndim}-dim!") @@ -2158,49 +2116,121 @@ def extract_level_paths( return fragments # ------------------------------------------------------------------ - # Step 5: build a lookup from sorted edge pair -> special vertex. + # Step 5: for each fragment endpoint edge (i,j), check whether it + # borders a masked triangle whose third vertex is a special vertex, + # and if so prepend/append that sv point directly into the fragment. # - # When we mask the triangles that touch special vertex sv, the fragment - # endpoints land on the *opposite* edges of those masked triangles — - # i.e., edges (a, b) where triangle (sv, a, b) was masked out. - # These edges do NOT contain sv as a vertex, so we cannot detect them - # by looking for sv in the edge endpoints. Instead we build a map: - # sorted (a, b) -> sv_idx - # for every masked triangle at sv. Then we match fragment endpoint - # edges against this map. + # We search masked_trias (the small set of triangles touching any sv) + # for a row containing both i and j. If found, the third vertex is + # by construction a special vertex — no separate sv lookup needed. + # + # A special-vertex point is encoded as: + # edge_vidx = [sv_idx, sv_idx] (degenerate edge) + # edge_bary = 0.0 + # tria_idx = -1 (sentinel, no triangle crossing) # ------------------------------------------------------------------ - from collections import defaultdict - # opposite_edge_to_sv: tuple(a,b) (sorted) -> sv_idx - opposite_edge_to_sv: dict[tuple[int, int], int] = {} - for sv_idx in special_verts.tolist(): - # All triangles that contain sv AND were masked - sv_mask = touches_special & np.any(self.t == sv_idx, axis=1) - for tri in self.t[sv_mask]: - opp = sorted(int(v) for v in tri if v != sv_idx) - if len(opp) == 2: - key = (opp[0], opp[1]) - opposite_edge_to_sv[key] = sv_idx - - def _sv_of_endpoint(frag, end): - """Return the special-vertex index whose opposite edge matches this endpoint, or None.""" - ev = frag.edge_vidx[end] # shape (2,), already sorted (gg_unique is sorted) - key = (int(min(ev)), int(max(ev))) - return opposite_edge_to_sv.get(key, None) - - # Map: sv_idx -> list of (frag_idx, which_end) - # which_end is 0 (start of fragment) or -1 (end of fragment) - sv_to_ends: dict[int, list[tuple[int, int]]] = defaultdict(list) - - for fi, frag in enumerate(fragments): - for end in (0, -1): - sv = _sv_of_endpoint(frag, end) - if sv is not None: - sv_to_ends[sv].append((fi, end)) + + masked_trias = self.t[touches_special] # (n_masked, 3) + + def _sv_of_edge(i: int, j: int) -> int | None: + """Return the special vertex opposite edge (i,j) in a masked triangle, or None.""" + rows = np.where( + np.any(masked_trias == i, axis=1) & np.any(masked_trias == j, axis=1) + )[0] + if rows.size == 0: + return None + tri = masked_trias[rows[0]] + return int(tri[(tri != i) & (tri != j)][0]) + + def _sv_arrays(sv_idx: int): + """Arrays for a single special-vertex point.""" + return ( + self.v[sv_idx][np.newaxis, :], # pts (1, 3) + np.array([-1], dtype=np.intp), # ti (1,) + np.array([[sv_idx, sv_idx]]), # ev (1, 2) + np.array([0.0]), # eb (1,) + ) + + # Extend each fragment by prepending/appending its adjacent sv point(s). + # Fragments that are already closed, or that don't touch any sv, are + # moved straight to result. Fragments that are modified (sv prepended + # or appended) are collected in sv_frags for step 6 reassembly. + result: list[polygon.Polygon] = [] + sv_frags: list[polygon.Polygon] = [] + for frag in fragments: + if frag.closed: + # Already a closed loop — no open endpoints to attach sv to. + result.append(frag) + continue + + ev = frag.edge_vidx + sv_start = _sv_of_edge(int(ev[0, 0]), int(ev[0, 1])) + sv_end = _sv_of_edge(int(ev[-1, 0]), int(ev[-1, 1])) + + if sv_start is not None and sv_start == sv_end: + # Both ends touch the same sv: closed loop. + # Prepend the sv point directly onto the fragment and close it. + sp, st, se, sb = _sv_arrays(sv_start) + frag.points = np.vstack([sp, frag.points]) + frag.tria_idx = np.concatenate([st, frag.tria_idx]) + frag.edge_vidx = np.vstack([se, frag.edge_vidx]) + frag.edge_bary = np.concatenate([sb, frag.edge_bary]) + frag.close() + result.append(frag) + continue + + if sv_start is None and sv_end is None: + # No sv at either end — fragment is complete as-is. + result.append(frag) + continue + + if sv_start is not None: + sp, st, se, sb = _sv_arrays(sv_start) + frag.points = np.vstack([sp, frag.points]) + frag.tria_idx = np.concatenate([st, frag.tria_idx]) + frag.edge_vidx = np.vstack([se, frag.edge_vidx]) + frag.edge_bary = np.concatenate([sb, frag.edge_bary]) + + if sv_end is not None: + sp, st, se, sb = _sv_arrays(sv_end) + frag.points = np.vstack([frag.points, sp]) + frag.tria_idx = np.concatenate([frag.tria_idx, st]) + frag.edge_vidx = np.vstack([frag.edge_vidx, se]) + frag.edge_bary = np.concatenate([frag.edge_bary, sb]) + + sv_frags.append(frag) # ------------------------------------------------------------------ - # Helper: build Polygon from point array + metadata arrays + # Step 6: assemble sv_frags (fragments with sv endpoints) into final + # polygons. + # + # Repeatedly merge fragment pairs that share a degree-2 sv endpoint + # until no such sv remains. A merged fragment may expose new degree-2 + # endpoints, so this is done iteratively. + # Remaining fragments (degree-1 boundary or degree ≥ 3 saddle) are + # emitted as-is; saddle vertices are logged. # ------------------------------------------------------------------ + def _make_poly(pts, tria_idx, edge_vidx, edge_bary, closed_hint=None): + """Build a Polygon and attach level-set metadata. + + Parameters + ---------- + pts : np.ndarray, shape (n, 3) + 3D point coordinates. + tria_idx : np.ndarray, shape (n_segments,) + Triangle index per segment (-1 for sv segments). + edge_vidx : np.ndarray, shape (n, 2) + Mesh vertex indices of the intersected edge per point. + edge_bary : np.ndarray, shape (n,) + Barycentric coordinate along each edge. + closed_hint : bool or None + Passed to Polygon constructor. When True the caller guarantees + no duplicate endpoint, so no metadata trimming is needed. + When None, Polygon.__init__ may remove a duplicate last point + (closed loop detected by equal first/last point), in which case + edge_vidx and edge_bary are trimmed to match. + """ n_before = pts.shape[0] poly = polygon.Polygon(pts, closed=closed_hint) n_after = poly.points.shape[0] @@ -2212,132 +2242,106 @@ def _make_poly(pts, tria_idx, edge_vidx, edge_bary, closed_hint=None): poly.edge_bary = edge_bary return poly - def _frag_arrays(frag): - """Return (pts, tria_idx, edge_vidx, edge_bary) all in forward order.""" - return frag.points, frag.tria_idx, frag.edge_vidx, frag.edge_bary - - def _frag_reversed(frag): - """Return fragment arrays in reversed order.""" - pts = frag.points[::-1] - tria_idx = frag.tria_idx[::-1] - edge_vidx = frag.edge_vidx[::-1] - edge_bary = frag.edge_bary[::-1] - return pts, tria_idx, edge_vidx, edge_bary - - def _sv_point_and_meta(sv_idx): - """1-point arrays representing the special vertex itself.""" - pt = self.v[sv_idx][np.newaxis, :] # (1,3) - ev = np.array([[sv_idx, sv_idx]]) # (1,2) sentinel - eb = np.array([0.0]) - return pt, ev, eb - - # ------------------------------------------------------------------ - # Step 6: process each special vertex - # - # For each special vertex sv we have a list of (frag_idx, which_end) - # pairs. We resolve them in priority order: - # 1. Self-loop: fragment has BOTH its ends touching sv → close it. - # 2. Regular connector: exactly two *different* fragments each have - # ONE end at sv → connect them through sv. - # 3. Saddle / boundary: remaining unmatched ends (count ≥ 4 or odd) - # → emit each fragment separately with sv inserted at its endpoint. - # ------------------------------------------------------------------ - result: list[polygon.Polygon] = [] - used: set[int] = set() - - for sv_idx, ends in sv_to_ends.items(): - sv_pt, sv_ev, sv_eb = _sv_point_and_meta(sv_idx) - - # --- Pass 1: close self-loops (fi appears twice in ends) ------- - from collections import Counter - fi_counts = Counter(fi for fi, _ in ends) - self_loop_fis = {fi for fi, cnt in fi_counts.items() if cnt == 2} - - for fi in sorted(self_loop_fis): - if fi in used: - continue - used.add(fi) - frag = fragments[fi] - pts_f, ti_f, ev_f, eb_f = _frag_arrays(frag) - # Both ends of this fragment touch sv. - # Build: sv -> fragment_body -> sv (Polygon auto-detects closure) - pts = np.vstack([sv_pt, pts_f, sv_pt]) - ti = ti_f - ev = np.vstack([sv_ev, ev_f, sv_ev]) - eb = np.concatenate([sv_eb, eb_f, sv_eb]) - result.append(_make_poly(pts, ti, ev, eb, closed_hint=None)) - - # Remaining ends after removing self-loops - remaining = [(fi, end) for fi, end in ends if fi not in self_loop_fis and fi not in used] - - if len(remaining) == 0: - continue - - if len(remaining) == 2: - # --- Pass 2: connect two different fragments through sv ---- - (fi0, end0), (fi1, end1) = remaining - if fi0 in used or fi1 in used: - continue - used.add(fi0) - used.add(fi1) - - frag0 = fragments[fi0] - frag1 = fragments[fi1] - - # Orient so sv-end is at the TAIL of each fragment - if end0 == 0: - p0, t0, e0, b0 = _frag_reversed(frag0) - else: - p0, t0, e0, b0 = _frag_arrays(frag0) - - if end1 == -1: - p1, t1, e1, b1 = _frag_reversed(frag1) - else: - p1, t1, e1, b1 = _frag_arrays(frag1) - - # Concatenate: frag0 ... sv ... frag1 - pts = np.vstack([p0, sv_pt, p1]) - ti = np.concatenate([t0, t1]) - ev = np.vstack([e0, sv_ev, e1]) - eb = np.concatenate([b0, sv_eb, b1]) - result.append(_make_poly(pts, ti, ev, eb, closed_hint=None)) - - else: - # --- Pass 3: saddle, boundary, or boundary-saddle --------- - # remaining has 1 end (boundary vertex) or 4+ ends (saddle). - # In both cases: emit each fragment with sv at its endpoint. - n_remaining = len(remaining) - if n_remaining >= 4: - logger.info( - "Vertex %d is a saddle on the level set with %d branch ends " - "at level %s. Emitting %d separate curves.", - sv_idx, n_remaining, level, n_remaining, - ) - for fi, end in remaining: - if fi in used: - continue - used.add(fi) - frag = fragments[fi] - if end == 0: - pts_f, ti_f, ev_f, eb_f = _frag_arrays(frag) - pts = np.vstack([sv_pt, pts_f]) - ti = ti_f - ev = np.vstack([sv_ev, ev_f]) - eb = np.concatenate([sv_eb, eb_f]) - else: - pts_f, ti_f, ev_f, eb_f = _frag_arrays(frag) - pts = np.vstack([pts_f, sv_pt]) - ti = ti_f - ev = np.vstack([ev_f, sv_ev]) - eb = np.concatenate([eb_f, sv_eb]) - result.append(_make_poly(pts, ti, ev, eb, closed_hint=False)) + def _is_sv_point(ev_row) -> bool: + """True if this endpoint encodes a special vertex (degenerate edge).""" + return int(ev_row[0]) == int(ev_row[1]) + + def _merge_frags(frag0, frag1, sv_idx: int) -> polygon.Polygon: + """Merge frag1 onto the tail of frag0 at their shared sv endpoint. + + The duplicate sv point at the junction is dropped. + Warns and returns frag0 unchanged if sv_idx is not an endpoint of + either fragment. + + Parameters + ---------- + frag0 : Polygon + First fragment; mutated in-place. + frag1 : Polygon + Second fragment; its sv endpoint is consumed in the merge. + sv_idx : int + The special vertex index shared between the two fragments. + + Returns + ------- + Polygon + frag0 after merging (same object, mutated in-place). + """ + def _has_sv_at_start(frag): + return _is_sv_point(frag.edge_vidx[0]) and int(frag.edge_vidx[0, 0]) == sv_idx + def _has_sv_at_end(frag): + return _is_sv_point(frag.edge_vidx[-1]) and int(frag.edge_vidx[-1, 0]) == sv_idx + # Step 1: validate that sv_idx is an endpoint of both fragments + frag0_has_sv = _has_sv_at_start(frag0) or _has_sv_at_end(frag0) + frag1_has_sv = _has_sv_at_start(frag1) or _has_sv_at_end(frag1) + if not frag0_has_sv or not frag1_has_sv: + warnings.warn( + f"Cannot merge fragments at sv {sv_idx}: sv not found at an " + f"endpoint of {'frag0' if not frag0_has_sv else 'frag1'}.", + stacklevel=2, + ) + return frag0 + # Step 2: orient frag0 so sv_idx is at its TAIL + if _has_sv_at_start(frag0): + frag0.points = frag0.points[::-1] + frag0.tria_idx = frag0.tria_idx[::-1] + frag0.edge_vidx = frag0.edge_vidx[::-1] + frag0.edge_bary = frag0.edge_bary[::-1] + # Step 3: orient frag1 so sv_idx is at its HEAD + if _has_sv_at_end(frag1): + frag1.points = frag1.points[::-1] + frag1.tria_idx = frag1.tria_idx[::-1] + frag1.edge_vidx = frag1.edge_vidx[::-1] + frag1.edge_bary = frag1.edge_bary[::-1] + # Step 4: concatenate, dropping the duplicate leading sv point of frag1 + frag0.points = np.vstack([frag0.points, frag1.points[1:]]) + frag0.tria_idx = np.concatenate([frag0.tria_idx, frag1.tria_idx]) + frag0.edge_vidx = np.vstack([frag0.edge_vidx, frag1.edge_vidx[1:]]) + frag0.edge_bary = np.concatenate([frag0.edge_bary, frag1.edge_bary[1:]]) + return frag0 + + # Iteratively merge at degree-2 sv endpoints until none remain. + from collections import defaultdict + active = list(sv_frags) # mutable working list of open fragments + while True: + # Build sv_idx -> list of active indices with that sv as an endpoint + sv_open = defaultdict(list) + for fi, frag in enumerate(active): + ev = frag.edge_vidx + for end in (0, -1): + if _is_sv_point(ev[end]): + sv_open[int(ev[end, 0])].append(fi) + + # Find the first degree-2 sv + merge_sv = next( + (sv for sv, fis in sv_open.items() + if len(dict.fromkeys(fis)) == 2), + None, + ) + if merge_sv is None: + break # no more degree-2 sv endpoints — done + + fi0, fi1 = list(dict.fromkeys(sv_open[merge_sv])) + merged_frag = _merge_frags(active[fi0], active[fi1], merge_sv) + + # Store merged result at the lower index, remove the higher index + lo, hi = (fi0, fi1) if fi0 < fi1 else (fi1, fi0) + active[lo] = merged_frag + active.pop(hi) + + # Log saddle vertices (degree ≥ 3 after all merges are exhausted) + for sv_idx, fis in sv_open.items(): + if len(dict.fromkeys(fis)) >= 3: + logger.info( + "Vertex %d is a saddle on the level set with %d open branch ends " + "at level %s. Emitting %d separate curves.", + sv_idx, len(dict.fromkeys(fis)), level, len(dict.fromkeys(fis)), + ) - # ------------------------------------------------------------------ - # Step 7: pass through fragments that don't touch any special vertex - # ------------------------------------------------------------------ - for fi, frag in enumerate(fragments): - if fi not in used: - result.append(frag) + # Emit all remaining active fragments (boundary ends and saddle branches) + for frag in active: + result.append(_make_poly(frag.points, frag.tria_idx, + frag.edge_vidx, frag.edge_bary)) return result @@ -2349,31 +2353,16 @@ def level_path( get_edges: bool = False, n_points: int | None = None, ) -> tuple[np.ndarray, ...]: - """Extract levelset of vfunc at a specific level as a path of 3D points. - - For a given real-valued scalar map on the surface mesh (vfunc), this - function computes the edges that intersect with a given level set (level). - It then finds the point on each mesh edge where the level set intersects. - The points are sorted and returned as an ordered array of 3D coordinates - together with the length of the level set path. - - This implementation uses extract_level_paths internally to compute the - level set, ensuring consistent handling of closed loops and metadata. - - .. note:: - - Works for level sets that represent a single path or closed loop. - For closed loops, the first and last points are identical (duplicated) - so you can detect closure via ``np.allclose(path[0], path[-1])``. - For open paths, the path has two distinct endpoints. - This function is kept mainly for backward compatibility. + """Extract a single level set path as an array of 3D points. - **For more advanced use cases, consider using extract_level_paths() directly:** + Thin wrapper around :meth:`extract_level_paths` for the common case of + a single connected level curve. Raises ``ValueError`` if the level set + has more (or fewer) than one connected component; use + :meth:`extract_level_paths` directly for those cases. - - Multiple disconnected components: extract_level_paths returns all components - - Custom resampling: Get Polygon objects and use Polygon.resample() directly - - Metadata analysis: Access triangle indices and edge information per component - - Closed loop detection: Polygon objects have a ``closed`` attribute + For closed loops the returned ``path`` has a duplicate last point equal + to the first, so closure can be detected via + ``np.allclose(path[0], path[-1])``. Parameters ---------- @@ -2382,77 +2371,43 @@ def level_path( level : float Level set value to extract. get_tria_idx : bool, default=False - If True, also return array of triangle indices for each path segment. - For closed loops with n points (including duplicate), returns n-1 triangle - indices. For open paths with n points, returns n-1 triangle indices. + If True, also return triangle indices for each path segment. get_edges : bool, default=False - If True, also return arrays of vertex indices (i,j) and relative - positions for each 3D point along the intersecting edge. + If True, also return mesh vertex indices and barycentric positions + for each point on the path. n_points : int or None, default=None - If specified, resample level set into n equidistant points. Cannot - be combined with get_tria_idx=True or get_edges=True. + If given, resample the path to this many equidistant points. + Cannot be combined with ``get_tria_idx=True`` or ``get_edges=True``. Returns ------- - path : np.ndarray - Array of shape (n, 3) containing 3D coordinates of vertices on the - level path. For closed loops, the last point duplicates the first point, - so ``np.allclose(path[0], path[-1])`` will be True. + path : np.ndarray, shape (n, 3) + 3D coordinates along the level curve. For closed loops the last + point duplicates the first. length : float - Total length of the level set path. For closed loops, includes the - closing segment from last to first point. - tria_idx : np.ndarray - Array of triangle indices for each segment on the path, shape (n-1,) - where n is the number of points (including duplicate for closed loops). - Only returned if get_tria_idx is True. - edges_vidxs : np.ndarray - Array of shape (n, 2) with vertex indices (i,j) for each 3D point, - defining the vertices of the original mesh edge intersecting the - level set at this point. Only returned if get_edges is True. - edges_relpos : np.ndarray - Array of floats defining the relative position for each 3D point - along the edges of the original mesh (defined by edges_vidxs). - Value 0 corresponds to first vertex, 1 to second vertex. - The 3D point is computed as: (1 - relpos) * v_i + relpos * v_j. - Only returned if get_edges is True. + Total arc length of the path. + tria_idx : np.ndarray, shape (n-1,) + Triangle index for each segment. Only returned if + ``get_tria_idx=True``. + edges_vidxs : np.ndarray, shape (n, 2) + Mesh vertex indices ``(i, j)`` of the intersected edge for each + point. Only returned if ``get_edges=True``. + edges_relpos : np.ndarray, shape (n,) + Barycentric position along each edge (0 = vertex i, 1 = vertex j). + The 3D point equals ``(1 - t) * v_i + t * v_j``. + Only returned if ``get_edges=True``. Raises ------ ValueError If vfunc is not 1-dimensional. - If multiple disconnected level paths are found (use extract_level_paths). - If n_points is combined with get_tria_idx=True. - If n_points is combined with get_edges=True. + If the level set has more or fewer than one connected component. + If ``n_points`` is combined with ``get_tria_idx=True`` or + ``get_edges=True``. See Also -------- - extract_level_paths : Extract multiple disconnected level paths as Polygon objects. - Recommended for advanced use cases with multiple components, custom resampling, - or when you need explicit closed/open information. - - Examples - -------- - >>> # Extract a simple level curve - >>> path, length = mesh.level_path(vfunc, 0.5) - >>> print(f"Path has {path.shape[0]} points, length {length:.2f}") - - >>> # Check if the path is closed - >>> is_closed = np.allclose(path[0], path[-1]) - >>> print(f"Path is closed: {is_closed}") - - >>> # Get triangle indices for each segment - >>> path, length, tria_idx = mesh.level_path(vfunc, 0.5, get_tria_idx=True) - - >>> # Get complete edge metadata - >>> path, length, edges, relpos = mesh.level_path(vfunc, 0.5, get_edges=True) - - >>> # Resample to fixed number of points - >>> path, length = mesh.level_path(vfunc, 0.5, n_points=100) - - >>> # For multiple components, use extract_level_paths instead - >>> curves = mesh.extract_level_paths(vfunc, 0.5) - >>> for i, curve in enumerate(curves): - >>> print(f"Component {i}: {curve.points.shape[0]} points, closed={curve.closed}") + extract_level_paths : Returns all connected components as Polygon objects. """ # Use extract_level_paths to get polygons curves = self.extract_level_paths(vfunc, level) From ea0f430aef7d3dfed73d09d6074e1f047d61ea06 Mon Sep 17 00:00:00 2001 From: Martin Reuter Date: Tue, 10 Mar 2026 14:44:40 +0100 Subject: [PATCH 27/32] fix tria index for vertices on level set, add tests --- data/torus.off | 1730 ++++++++++++++++++++++++++++ lapy/tria_mesh.py | 54 +- lapy/utils/tests/test_tria_mesh.py | 207 +--- 3 files changed, 1819 insertions(+), 172 deletions(-) create mode 100644 data/torus.off diff --git a/data/torus.off b/data/torus.off new file mode 100644 index 0000000..0cdfba6 --- /dev/null +++ b/data/torus.off @@ -0,0 +1,1730 @@ +OFF +576 1152 0 +-1.000 0.000 0.000 +-0.991 -0.131 0.000 +-0.991 0.131 0.000 +-0.966 -0.259 0.000 +-0.966 0.259 0.000 +-0.958 0.000 -0.158 +-0.958 0.000 0.158 +-0.949 -0.125 -0.158 +-0.949 -0.125 0.158 +-0.949 0.125 -0.158 +-0.949 0.125 0.158 +-0.925 -0.248 -0.158 +-0.925 -0.248 0.158 +-0.925 0.248 -0.158 +-0.925 0.248 0.158 +-0.924 -0.383 0.000 +-0.924 0.383 0.000 +-0.885 -0.366 -0.158 +-0.885 -0.366 0.158 +-0.885 0.366 -0.158 +-0.885 0.366 0.158 +-0.866 -0.500 0.000 +-0.866 0.500 0.000 +-0.842 0.000 -0.274 +-0.842 0.000 0.274 +-0.835 -0.110 -0.274 +-0.835 -0.110 0.274 +-0.835 0.110 -0.274 +-0.835 0.110 0.274 +-0.829 -0.479 -0.158 +-0.829 -0.479 0.158 +-0.829 0.479 -0.158 +-0.829 0.479 0.158 +-0.813 -0.218 -0.274 +-0.813 -0.218 0.274 +-0.813 0.218 -0.274 +-0.813 0.218 0.274 +-0.793 -0.609 0.000 +-0.793 0.609 0.000 +-0.778 -0.322 -0.274 +-0.778 -0.322 0.274 +-0.778 0.322 -0.274 +-0.778 0.322 0.274 +-0.760 -0.583 -0.158 +-0.760 -0.583 0.158 +-0.760 0.583 -0.158 +-0.760 0.583 0.158 +-0.729 -0.421 -0.274 +-0.729 -0.421 0.274 +-0.729 0.421 -0.274 +-0.729 0.421 0.274 +-0.707 -0.707 0.000 +-0.707 0.707 0.000 +-0.684 0.000 -0.316 +-0.684 0.000 0.316 +-0.678 -0.089 -0.316 +-0.678 -0.089 0.316 +-0.678 0.089 -0.316 +-0.678 0.089 0.316 +-0.677 -0.677 -0.158 +-0.677 -0.677 0.158 +-0.677 0.677 -0.158 +-0.677 0.677 0.158 +-0.668 -0.512 -0.274 +-0.668 -0.512 0.274 +-0.668 0.512 -0.274 +-0.668 0.512 0.274 +-0.660 -0.177 -0.316 +-0.660 -0.177 0.316 +-0.660 0.177 -0.316 +-0.660 0.177 0.316 +-0.632 -0.262 -0.316 +-0.632 -0.262 0.316 +-0.632 0.262 -0.316 +-0.632 0.262 0.316 +-0.609 -0.793 0.000 +-0.609 0.793 0.000 +-0.595 -0.595 -0.274 +-0.595 -0.595 0.274 +-0.595 0.595 -0.274 +-0.595 0.595 0.274 +-0.592 -0.342 -0.316 +-0.592 -0.342 0.316 +-0.592 0.342 -0.316 +-0.592 0.342 0.316 +-0.583 -0.760 -0.158 +-0.583 -0.760 0.158 +-0.583 0.760 -0.158 +-0.583 0.760 0.158 +-0.542 -0.416 -0.316 +-0.542 -0.416 0.316 +-0.542 0.416 -0.316 +-0.542 0.416 0.316 +-0.526 0.000 -0.274 +-0.526 0.000 0.274 +-0.521 -0.069 -0.274 +-0.521 -0.069 0.274 +-0.521 0.069 -0.274 +-0.521 0.069 0.274 +-0.512 -0.668 -0.274 +-0.512 -0.668 0.274 +-0.512 0.668 -0.274 +-0.512 0.668 0.274 +-0.508 -0.136 -0.274 +-0.508 -0.136 0.274 +-0.508 0.136 -0.274 +-0.508 0.136 0.274 +-0.500 -0.866 0.000 +-0.500 0.866 0.000 +-0.486 -0.201 -0.274 +-0.486 -0.201 0.274 +-0.486 0.201 -0.274 +-0.486 0.201 0.274 +-0.483 -0.483 -0.316 +-0.483 -0.483 0.316 +-0.483 0.483 -0.316 +-0.483 0.483 0.316 +-0.479 -0.829 -0.158 +-0.479 -0.829 0.158 +-0.479 0.829 -0.158 +-0.479 0.829 0.158 +-0.455 -0.263 -0.274 +-0.455 -0.263 0.274 +-0.455 0.263 -0.274 +-0.455 0.263 0.274 +-0.421 -0.729 -0.274 +-0.421 -0.729 0.274 +-0.421 0.729 -0.274 +-0.421 0.729 0.274 +-0.417 -0.320 -0.274 +-0.417 -0.320 0.274 +-0.417 0.320 -0.274 +-0.417 0.320 0.274 +-0.416 -0.542 -0.316 +-0.416 -0.542 0.316 +-0.416 0.542 -0.316 +-0.416 0.542 0.316 +-0.410 0.000 -0.158 +-0.410 0.000 0.158 +-0.406 -0.053 -0.158 +-0.406 -0.053 0.158 +-0.406 0.053 -0.158 +-0.406 0.053 0.158 +-0.396 -0.106 -0.158 +-0.396 -0.106 0.158 +-0.396 0.106 -0.158 +-0.396 0.106 0.158 +-0.383 -0.924 0.000 +-0.383 0.924 0.000 +-0.379 -0.157 -0.158 +-0.379 -0.157 0.158 +-0.379 0.157 -0.158 +-0.379 0.157 0.158 +-0.372 -0.372 -0.274 +-0.372 -0.372 0.274 +-0.372 0.372 -0.274 +-0.372 0.372 0.274 +-0.367 0.000 0.000 +-0.366 -0.885 -0.158 +-0.366 -0.885 0.158 +-0.366 0.885 -0.158 +-0.366 0.885 0.158 +-0.364 -0.048 0.000 +-0.364 0.048 0.000 +-0.355 -0.205 -0.158 +-0.355 -0.205 0.158 +-0.355 0.205 -0.158 +-0.355 0.205 0.158 +-0.355 -0.095 0.000 +-0.355 0.095 0.000 +-0.342 -0.592 -0.316 +-0.342 -0.592 0.316 +-0.342 0.592 -0.316 +-0.342 0.592 0.316 +-0.339 -0.141 0.000 +-0.339 0.141 0.000 +-0.325 -0.249 -0.158 +-0.325 -0.249 0.158 +-0.325 0.249 -0.158 +-0.325 0.249 0.158 +-0.322 -0.778 -0.274 +-0.322 -0.778 0.274 +-0.322 0.778 -0.274 +-0.322 0.778 0.274 +-0.320 -0.417 -0.274 +-0.320 -0.417 0.274 +-0.320 0.417 -0.274 +-0.320 0.417 0.274 +-0.318 -0.184 0.000 +-0.318 0.184 0.000 +-0.291 -0.224 0.000 +-0.291 0.224 0.000 +-0.290 -0.290 -0.158 +-0.290 -0.290 0.158 +-0.290 0.290 -0.158 +-0.290 0.290 0.158 +-0.263 -0.455 -0.274 +-0.263 -0.455 0.274 +-0.263 0.455 -0.274 +-0.263 0.455 0.274 +-0.262 -0.632 -0.316 +-0.262 -0.632 0.316 +-0.262 0.632 -0.316 +-0.262 0.632 0.316 +-0.260 -0.260 0.000 +-0.260 0.260 0.000 +-0.259 -0.966 0.000 +-0.259 0.966 0.000 +-0.249 -0.325 -0.158 +-0.249 -0.325 0.158 +-0.249 0.325 -0.158 +-0.249 0.325 0.158 +-0.248 -0.925 -0.158 +-0.248 -0.925 0.158 +-0.248 0.925 -0.158 +-0.248 0.925 0.158 +-0.224 -0.291 0.000 +-0.224 0.291 0.000 +-0.218 -0.813 -0.274 +-0.218 -0.813 0.274 +-0.218 0.813 -0.274 +-0.218 0.813 0.274 +-0.205 -0.355 -0.158 +-0.205 -0.355 0.158 +-0.205 0.355 -0.158 +-0.205 0.355 0.158 +-0.201 -0.486 -0.274 +-0.201 -0.486 0.274 +-0.201 0.486 -0.274 +-0.201 0.486 0.274 +-0.184 -0.318 0.000 +-0.184 0.318 0.000 +-0.177 -0.660 -0.316 +-0.177 -0.660 0.316 +-0.177 0.660 -0.316 +-0.177 0.660 0.316 +-0.157 -0.379 -0.158 +-0.157 -0.379 0.158 +-0.157 0.379 -0.158 +-0.157 0.379 0.158 +-0.141 -0.339 0.000 +-0.141 0.339 0.000 +-0.136 -0.508 -0.274 +-0.136 -0.508 0.274 +-0.136 0.508 -0.274 +-0.136 0.508 0.274 +-0.131 -0.991 0.000 +-0.131 0.991 0.000 +-0.125 -0.949 -0.158 +-0.125 -0.949 0.158 +-0.125 0.949 -0.158 +-0.125 0.949 0.158 +-0.110 -0.835 -0.274 +-0.110 -0.835 0.274 +-0.110 0.835 -0.274 +-0.110 0.835 0.274 +-0.106 -0.396 -0.158 +-0.106 -0.396 0.158 +-0.106 0.396 -0.158 +-0.106 0.396 0.158 +-0.095 -0.355 0.000 +-0.095 0.355 0.000 +-0.089 -0.678 -0.316 +-0.089 -0.678 0.316 +-0.089 0.678 -0.316 +-0.089 0.678 0.316 +-0.069 -0.521 -0.274 +-0.069 -0.521 0.274 +-0.069 0.521 -0.274 +-0.069 0.521 0.274 +-0.053 -0.406 -0.158 +-0.053 -0.406 0.158 +-0.053 0.406 -0.158 +-0.053 0.406 0.158 +-0.048 -0.364 0.000 +-0.048 0.364 0.000 +0.000 -0.958 -0.158 +0.000 -0.958 0.158 +0.000 -0.526 -0.274 +0.000 -0.526 0.274 +0.000 -0.410 -0.158 +0.000 -0.410 0.158 +0.000 0.410 -0.158 +0.000 0.410 0.158 +0.000 0.526 -0.274 +0.000 0.526 0.274 +0.000 0.958 -0.158 +0.000 0.958 0.158 +0.000 -0.367 0.000 +0.000 0.367 0.000 +0.000 -0.684 -0.316 +0.000 -0.684 0.316 +0.000 0.684 -0.316 +0.000 0.684 0.316 +0.000 -1.000 0.000 +0.000 -0.842 -0.274 +0.000 -0.842 0.274 +0.000 0.842 -0.274 +0.000 0.842 0.274 +0.000 1.000 0.000 +0.048 -0.364 0.000 +0.048 0.364 0.000 +0.053 -0.406 -0.158 +0.053 -0.406 0.158 +0.053 0.406 -0.158 +0.053 0.406 0.158 +0.069 -0.521 -0.274 +0.069 -0.521 0.274 +0.069 0.521 -0.274 +0.069 0.521 0.274 +0.089 -0.678 -0.316 +0.089 -0.678 0.316 +0.089 0.678 -0.316 +0.089 0.678 0.316 +0.095 -0.355 0.000 +0.095 0.355 0.000 +0.106 -0.396 -0.158 +0.106 -0.396 0.158 +0.106 0.396 -0.158 +0.106 0.396 0.158 +0.110 -0.835 -0.274 +0.110 -0.835 0.274 +0.110 0.835 -0.274 +0.110 0.835 0.274 +0.125 -0.949 -0.158 +0.125 -0.949 0.158 +0.125 0.949 -0.158 +0.125 0.949 0.158 +0.131 -0.991 0.000 +0.131 0.991 0.000 +0.136 -0.508 -0.274 +0.136 -0.508 0.274 +0.136 0.508 -0.274 +0.136 0.508 0.274 +0.141 -0.339 0.000 +0.141 0.339 0.000 +0.157 -0.379 -0.158 +0.157 -0.379 0.158 +0.157 0.379 -0.158 +0.157 0.379 0.158 +0.177 -0.660 -0.316 +0.177 -0.660 0.316 +0.177 0.660 -0.316 +0.177 0.660 0.316 +0.184 -0.318 0.000 +0.184 0.318 0.000 +0.201 -0.486 -0.274 +0.201 -0.486 0.274 +0.201 0.486 -0.274 +0.201 0.486 0.274 +0.205 -0.355 -0.158 +0.205 -0.355 0.158 +0.205 0.355 -0.158 +0.205 0.355 0.158 +0.218 -0.813 -0.274 +0.218 -0.813 0.274 +0.218 0.813 -0.274 +0.218 0.813 0.274 +0.224 -0.291 0.000 +0.224 0.291 0.000 +0.248 -0.925 -0.158 +0.248 -0.925 0.158 +0.248 0.925 -0.158 +0.248 0.925 0.158 +0.249 -0.325 -0.158 +0.249 -0.325 0.158 +0.249 0.325 -0.158 +0.249 0.325 0.158 +0.259 -0.966 0.000 +0.259 0.966 0.000 +0.260 -0.260 0.000 +0.260 0.260 0.000 +0.262 -0.632 -0.316 +0.262 -0.632 0.316 +0.262 0.632 -0.316 +0.262 0.632 0.316 +0.263 -0.455 -0.274 +0.263 -0.455 0.274 +0.263 0.455 -0.274 +0.263 0.455 0.274 +0.290 -0.290 -0.158 +0.290 -0.290 0.158 +0.290 0.290 -0.158 +0.290 0.290 0.158 +0.291 -0.224 0.000 +0.291 0.224 0.000 +0.318 -0.184 0.000 +0.318 0.184 0.000 +0.320 -0.417 -0.274 +0.320 -0.417 0.274 +0.320 0.417 -0.274 +0.320 0.417 0.274 +0.322 -0.778 -0.274 +0.322 -0.778 0.274 +0.322 0.778 -0.274 +0.322 0.778 0.274 +0.325 -0.249 -0.158 +0.325 -0.249 0.158 +0.325 0.249 -0.158 +0.325 0.249 0.158 +0.339 -0.141 0.000 +0.339 0.141 0.000 +0.342 -0.592 -0.316 +0.342 -0.592 0.316 +0.342 0.592 -0.316 +0.342 0.592 0.316 +0.355 -0.095 0.000 +0.355 0.095 0.000 +0.355 -0.205 -0.158 +0.355 -0.205 0.158 +0.355 0.205 -0.158 +0.355 0.205 0.158 +0.364 -0.048 0.000 +0.364 0.048 0.000 +0.366 -0.885 -0.158 +0.366 -0.885 0.158 +0.366 0.885 -0.158 +0.366 0.885 0.158 +0.367 0.000 0.000 +0.372 -0.372 -0.274 +0.372 -0.372 0.274 +0.372 0.372 -0.274 +0.372 0.372 0.274 +0.379 -0.157 -0.158 +0.379 -0.157 0.158 +0.379 0.157 -0.158 +0.379 0.157 0.158 +0.383 -0.924 0.000 +0.383 0.924 0.000 +0.396 0.106 -0.158 +0.396 0.106 0.158 +0.396 -0.106 -0.158 +0.396 -0.106 0.158 +0.406 -0.053 -0.158 +0.406 -0.053 0.158 +0.406 0.053 -0.158 +0.406 0.053 0.158 +0.410 0.000 -0.158 +0.410 0.000 0.158 +0.416 -0.542 -0.316 +0.416 -0.542 0.316 +0.416 0.542 -0.316 +0.416 0.542 0.316 +0.417 -0.320 -0.274 +0.417 -0.320 0.274 +0.417 0.320 -0.274 +0.417 0.320 0.274 +0.421 -0.729 -0.274 +0.421 -0.729 0.274 +0.421 0.729 -0.274 +0.421 0.729 0.274 +0.455 -0.263 -0.274 +0.455 -0.263 0.274 +0.455 0.263 -0.274 +0.455 0.263 0.274 +0.479 -0.829 -0.158 +0.479 -0.829 0.158 +0.479 0.829 -0.158 +0.479 0.829 0.158 +0.483 -0.483 -0.316 +0.483 -0.483 0.316 +0.483 0.483 -0.316 +0.483 0.483 0.316 +0.486 -0.201 -0.274 +0.486 -0.201 0.274 +0.486 0.201 -0.274 +0.486 0.201 0.274 +0.500 -0.866 0.000 +0.500 0.866 0.000 +0.508 0.136 -0.274 +0.508 0.136 0.274 +0.508 -0.136 -0.274 +0.508 -0.136 0.274 +0.512 -0.668 -0.274 +0.512 -0.668 0.274 +0.512 0.668 -0.274 +0.512 0.668 0.274 +0.521 -0.069 -0.274 +0.521 -0.069 0.274 +0.521 0.069 -0.274 +0.521 0.069 0.274 +0.526 0.000 -0.274 +0.526 0.000 0.274 +0.542 -0.416 -0.316 +0.542 -0.416 0.316 +0.542 0.416 -0.316 +0.542 0.416 0.316 +0.583 -0.760 -0.158 +0.583 -0.760 0.158 +0.583 0.760 -0.158 +0.583 0.760 0.158 +0.592 -0.342 -0.316 +0.592 -0.342 0.316 +0.592 0.342 -0.316 +0.592 0.342 0.316 +0.595 -0.595 -0.274 +0.595 -0.595 0.274 +0.595 0.595 -0.274 +0.595 0.595 0.274 +0.609 -0.793 0.000 +0.609 0.793 0.000 +0.632 -0.262 -0.316 +0.632 -0.262 0.316 +0.632 0.262 -0.316 +0.632 0.262 0.316 +0.660 -0.177 -0.316 +0.660 -0.177 0.316 +0.660 0.177 -0.316 +0.660 0.177 0.316 +0.668 -0.512 -0.274 +0.668 -0.512 0.274 +0.668 0.512 -0.274 +0.668 0.512 0.274 +0.677 -0.677 -0.158 +0.677 -0.677 0.158 +0.677 0.677 -0.158 +0.677 0.677 0.158 +0.678 -0.089 -0.316 +0.678 -0.089 0.316 +0.678 0.089 -0.316 +0.678 0.089 0.316 +0.684 0.000 -0.316 +0.684 0.000 0.316 +0.707 -0.707 0.000 +0.707 0.707 0.000 +0.729 -0.421 -0.274 +0.729 -0.421 0.274 +0.729 0.421 -0.274 +0.729 0.421 0.274 +0.760 -0.583 -0.158 +0.760 -0.583 0.158 +0.760 0.583 -0.158 +0.760 0.583 0.158 +0.778 -0.322 -0.274 +0.778 -0.322 0.274 +0.778 0.322 -0.274 +0.778 0.322 0.274 +0.793 -0.609 0.000 +0.793 0.609 0.000 +0.813 0.218 -0.274 +0.813 0.218 0.274 +0.813 -0.218 -0.274 +0.813 -0.218 0.274 +0.829 -0.479 -0.158 +0.829 -0.479 0.158 +0.829 0.479 -0.158 +0.829 0.479 0.158 +0.835 -0.110 -0.274 +0.835 -0.110 0.274 +0.835 0.110 -0.274 +0.835 0.110 0.274 +0.842 0.000 -0.274 +0.842 0.000 0.274 +0.866 -0.500 0.000 +0.866 0.500 0.000 +0.885 -0.366 -0.158 +0.885 -0.366 0.158 +0.885 0.366 -0.158 +0.885 0.366 0.158 +0.924 -0.383 0.000 +0.924 0.383 0.000 +0.925 0.248 -0.158 +0.925 0.248 0.158 +0.925 -0.248 -0.158 +0.925 -0.248 0.158 +0.949 -0.125 -0.158 +0.949 -0.125 0.158 +0.949 0.125 -0.158 +0.949 0.125 0.158 +0.958 0.000 -0.158 +0.958 0.000 0.158 +0.966 -0.259 0.000 +0.966 0.259 0.000 +0.991 -0.131 0.000 +0.991 0.131 0.000 +1.000 0.000 0.000 +3 565 569 575 +3 573 565 575 +3 547 569 565 +3 569 547 551 +3 517 551 547 +3 521 551 517 +3 521 517 477 +3 481 521 477 +3 481 477 433 +3 433 437 481 +3 412 437 433 +3 418 437 412 +3 412 434 418 +3 438 418 434 +3 434 478 438 +3 482 438 478 +3 518 482 478 +3 518 522 482 +3 518 548 522 +3 522 548 552 +3 566 552 548 +3 552 566 570 +3 570 566 573 +3 575 570 573 +3 571 563 573 +3 573 563 565 +3 541 565 563 +3 547 565 541 +3 505 547 541 +3 547 505 517 +3 471 517 505 +3 471 477 517 +3 477 471 431 +3 477 431 433 +3 431 406 433 +3 406 412 433 +3 406 432 412 +3 434 412 432 +3 434 432 472 +3 434 472 478 +3 478 472 506 +3 518 478 506 +3 506 542 518 +3 518 542 548 +3 548 542 564 +3 566 548 564 +3 571 566 564 +3 573 566 571 +3 559 555 571 +3 563 571 555 +3 555 533 563 +3 533 541 563 +3 501 541 533 +3 541 501 505 +3 463 505 501 +3 463 471 505 +3 423 471 463 +3 423 431 471 +3 423 400 431 +3 406 431 400 +3 400 424 406 +3 432 406 424 +3 464 432 424 +3 472 432 464 +3 502 472 464 +3 502 506 472 +3 502 534 506 +3 506 534 542 +3 542 534 556 +3 542 556 564 +3 556 559 564 +3 559 571 564 +3 543 559 553 +3 543 555 559 +3 543 525 555 +3 555 525 533 +3 533 525 491 +3 491 501 533 +3 451 501 491 +3 463 501 451 +3 451 408 463 +3 463 408 423 +3 423 408 386 +3 400 423 386 +3 409 400 386 +3 424 400 409 +3 424 409 452 +3 452 464 424 +3 452 492 464 +3 502 464 492 +3 526 502 492 +3 534 502 526 +3 526 544 534 +3 534 544 556 +3 544 553 556 +3 553 559 556 +3 553 537 529 +3 543 553 529 +3 509 543 529 +3 509 525 543 +3 509 483 525 +3 483 491 525 +3 483 443 491 +3 443 451 491 +3 443 396 451 +3 408 451 396 +3 396 384 408 +3 408 384 386 +3 397 386 384 +3 409 386 397 +3 409 397 444 +3 452 409 444 +3 484 452 444 +3 452 484 492 +3 510 492 484 +3 492 510 526 +3 526 510 530 +3 530 544 526 +3 544 530 537 +3 537 553 544 +3 513 537 523 +3 529 537 513 +3 495 529 513 +3 529 495 509 +3 495 459 509 +3 509 459 483 +3 483 459 419 +3 443 483 419 +3 380 443 419 +3 443 380 396 +3 370 396 380 +3 396 370 384 +3 384 370 381 +3 384 381 397 +3 381 420 397 +3 420 444 397 +3 460 444 420 +3 460 484 444 +3 496 484 460 +3 484 496 510 +3 496 514 510 +3 514 530 510 +3 530 514 523 +3 530 523 537 +3 499 487 523 +3 513 523 487 +3 513 487 473 +3 473 495 513 +3 473 439 495 +3 439 459 495 +3 439 388 459 +3 388 419 459 +3 388 364 419 +3 364 380 419 +3 358 380 364 +3 380 358 370 +3 370 358 365 +3 370 365 381 +3 389 381 365 +3 420 381 389 +3 389 440 420 +3 420 440 460 +3 474 460 440 +3 460 474 496 +3 488 496 474 +3 514 496 488 +3 514 488 499 +3 499 523 514 +3 455 499 467 +3 455 487 499 +3 487 455 447 +3 447 473 487 +3 447 402 473 +3 439 473 402 +3 402 376 439 +3 439 376 388 +3 376 350 388 +3 350 364 388 +3 350 344 364 +3 364 344 358 +3 344 351 358 +3 351 365 358 +3 377 365 351 +3 389 365 377 +3 403 389 377 +3 389 403 440 +3 440 403 448 +3 440 448 474 +3 474 448 456 +3 456 488 474 +3 456 467 488 +3 488 467 499 +3 414 467 427 +3 467 414 455 +3 414 392 455 +3 392 447 455 +3 372 447 392 +3 372 402 447 +3 346 402 372 +3 376 402 346 +3 336 376 346 +3 350 376 336 +3 334 350 336 +3 344 350 334 +3 337 344 334 +3 337 351 344 +3 337 347 351 +3 377 351 347 +3 377 347 373 +3 403 377 373 +3 403 373 393 +3 448 403 393 +3 393 415 448 +3 448 415 456 +3 415 427 456 +3 427 467 456 +3 427 368 360 +3 360 414 427 +3 354 414 360 +3 392 414 354 +3 392 354 340 +3 392 340 372 +3 340 330 372 +3 330 346 372 +3 330 316 346 +3 316 336 346 +3 316 314 336 +3 336 314 334 +3 317 334 314 +3 317 337 334 +3 331 337 317 +3 347 337 331 +3 347 331 341 +3 373 347 341 +3 355 373 341 +3 393 373 355 +3 361 393 355 +3 393 361 415 +3 415 361 368 +3 415 368 427 +3 368 328 324 +3 368 324 360 +3 360 324 320 +3 360 320 354 +3 310 354 320 +3 310 340 354 +3 310 306 340 +3 340 306 330 +3 306 302 330 +3 302 316 330 +3 316 302 300 +3 316 300 314 +3 314 300 303 +3 303 317 314 +3 307 317 303 +3 307 331 317 +3 307 311 331 +3 311 341 331 +3 341 311 321 +3 341 321 355 +3 321 325 355 +3 325 361 355 +3 361 325 328 +3 368 361 328 +3 294 276 328 +3 328 276 324 +3 295 324 276 +3 324 295 320 +3 290 320 295 +3 320 290 310 +3 310 290 278 +3 278 306 310 +3 306 278 280 +3 302 306 280 +3 288 302 280 +3 302 288 300 +3 281 300 288 +3 303 300 281 +3 303 281 279 +3 279 307 303 +3 307 279 291 +3 307 291 311 +3 291 296 311 +3 311 296 321 +3 277 321 296 +3 325 321 277 +3 325 277 294 +3 325 294 328 +3 248 294 246 +3 294 248 276 +3 248 252 276 +3 252 295 276 +3 252 262 295 +3 262 290 295 +3 266 290 262 +3 290 266 278 +3 266 270 278 +3 278 270 280 +3 280 270 274 +3 274 288 280 +3 271 288 274 +3 281 288 271 +3 281 271 267 +3 279 281 267 +3 263 279 267 +3 263 291 279 +3 291 263 253 +3 291 253 296 +3 296 253 249 +3 249 277 296 +3 246 277 249 +3 294 277 246 +3 246 206 212 +3 212 248 246 +3 248 212 218 +3 248 218 252 +3 218 232 252 +3 262 252 232 +3 232 242 262 +3 266 262 242 +3 266 242 256 +3 256 270 266 +3 260 270 256 +3 274 270 260 +3 260 257 274 +3 257 271 274 +3 271 257 243 +3 243 267 271 +3 267 243 233 +3 267 233 263 +3 219 263 233 +3 263 219 253 +3 253 219 213 +3 249 253 213 +3 206 249 213 +3 246 249 206 +3 147 158 206 +3 158 212 206 +3 212 158 180 +3 218 212 180 +3 200 218 180 +3 200 232 218 +3 226 232 200 +3 232 226 242 +3 226 236 242 +3 236 256 242 +3 236 240 256 +3 260 256 240 +3 260 240 237 +3 237 257 260 +3 227 257 237 +3 227 243 257 +3 243 227 201 +3 243 201 233 +3 201 181 233 +3 219 233 181 +3 219 181 159 +3 213 219 159 +3 213 159 147 +3 206 213 147 +3 107 117 147 +3 158 147 117 +3 158 117 125 +3 180 158 125 +3 180 125 170 +3 180 170 200 +3 196 200 170 +3 196 226 200 +3 226 196 222 +3 236 226 222 +3 230 236 222 +3 236 230 240 +3 223 240 230 +3 240 223 237 +3 223 197 237 +3 197 227 237 +3 197 171 227 +3 201 227 171 +3 126 201 171 +3 126 181 201 +3 181 126 118 +3 159 181 118 +3 118 107 159 +3 107 147 159 +3 107 75 85 +3 85 117 107 +3 117 85 99 +3 125 117 99 +3 99 133 125 +3 133 170 125 +3 133 184 170 +3 184 196 170 +3 208 196 184 +3 196 208 222 +3 208 216 222 +3 230 222 216 +3 230 216 209 +3 209 223 230 +3 223 209 185 +3 185 197 223 +3 185 134 197 +3 171 197 134 +3 171 134 100 +3 126 171 100 +3 86 126 100 +3 126 86 118 +3 75 118 86 +3 75 107 118 +3 51 59 75 +3 59 85 75 +3 85 59 77 +3 77 99 85 +3 99 77 113 +3 113 133 99 +3 133 113 153 +3 184 133 153 +3 153 192 184 +3 192 208 184 +3 208 192 204 +3 208 204 216 +3 204 193 216 +3 209 216 193 +3 193 154 209 +3 209 154 185 +3 114 185 154 +3 114 134 185 +3 114 78 134 +3 100 134 78 +3 100 78 60 +3 86 100 60 +3 86 60 51 +3 75 86 51 +3 37 43 51 +3 43 59 51 +3 63 59 43 +3 63 77 59 +3 89 77 63 +3 113 77 89 +3 113 89 129 +3 113 129 153 +3 129 176 153 +3 176 192 153 +3 192 176 190 +3 192 190 204 +3 177 204 190 +3 204 177 193 +3 193 177 130 +3 154 193 130 +3 90 154 130 +3 90 114 154 +3 90 64 114 +3 78 114 64 +3 44 78 64 +3 44 60 78 +3 37 60 44 +3 60 37 51 +3 37 21 29 +3 37 29 43 +3 29 47 43 +3 47 63 43 +3 47 81 63 +3 81 89 63 +3 81 121 89 +3 89 121 129 +3 129 121 164 +3 176 129 164 +3 176 164 188 +3 176 188 190 +3 190 188 165 +3 177 190 165 +3 165 122 177 +3 130 177 122 +3 82 130 122 +3 130 82 90 +3 48 90 82 +3 64 90 48 +3 48 30 64 +3 30 44 64 +3 21 44 30 +3 21 37 44 +3 21 15 17 +3 29 21 17 +3 29 17 39 +3 47 29 39 +3 71 47 39 +3 71 81 47 +3 109 81 71 +3 81 109 121 +3 109 149 121 +3 121 149 164 +3 149 174 164 +3 188 164 174 +3 174 150 188 +3 188 150 165 +3 165 150 110 +3 165 110 122 +3 122 110 72 +3 122 72 82 +3 72 40 82 +3 82 40 48 +3 48 40 18 +3 48 18 30 +3 18 15 30 +3 21 30 15 +3 15 3 11 +3 15 11 17 +3 33 17 11 +3 33 39 17 +3 39 33 67 +3 67 71 39 +3 67 103 71 +3 109 71 103 +3 103 143 109 +3 149 109 143 +3 143 168 149 +3 168 174 149 +3 168 144 174 +3 144 150 174 +3 104 150 144 +3 150 104 110 +3 68 110 104 +3 68 72 110 +3 72 68 34 +3 40 72 34 +3 40 34 12 +3 12 18 40 +3 12 3 18 +3 3 15 18 +3 1 7 3 +3 11 3 7 +3 25 11 7 +3 25 33 11 +3 33 25 55 +3 55 67 33 +3 95 67 55 +3 95 103 67 +3 139 103 95 +3 139 143 103 +3 162 143 139 +3 162 168 143 +3 162 140 168 +3 140 144 168 +3 144 140 96 +3 96 104 144 +3 96 56 104 +3 56 68 104 +3 26 68 56 +3 68 26 34 +3 8 34 26 +3 12 34 8 +3 8 1 12 +3 12 1 3 +3 0 5 1 +3 5 7 1 +3 5 23 7 +3 7 23 25 +3 23 53 25 +3 55 25 53 +3 53 93 55 +3 95 55 93 +3 137 95 93 +3 137 139 95 +3 157 139 137 +3 157 162 139 +3 157 138 162 +3 138 140 162 +3 140 138 94 +3 94 96 140 +3 54 96 94 +3 96 54 56 +3 54 24 56 +3 56 24 26 +3 26 24 6 +3 6 8 26 +3 8 6 0 +3 8 0 1 +3 2 9 0 +3 0 9 5 +3 9 27 5 +3 27 23 5 +3 23 27 57 +3 53 23 57 +3 53 57 97 +3 97 93 53 +3 141 93 97 +3 137 93 141 +3 141 163 137 +3 163 157 137 +3 163 142 157 +3 138 157 142 +3 142 98 138 +3 94 138 98 +3 94 98 58 +3 54 94 58 +3 54 58 28 +3 24 54 28 +3 10 24 28 +3 6 24 10 +3 2 6 10 +3 6 2 0 +3 13 2 4 +3 9 2 13 +3 13 35 9 +3 35 27 9 +3 35 69 27 +3 27 69 57 +3 57 69 105 +3 57 105 97 +3 105 145 97 +3 141 97 145 +3 145 169 141 +3 141 169 163 +3 169 146 163 +3 163 146 142 +3 146 106 142 +3 142 106 98 +3 98 106 70 +3 70 58 98 +3 58 70 36 +3 36 28 58 +3 14 28 36 +3 28 14 10 +3 10 14 4 +3 10 4 2 +3 19 4 16 +3 19 13 4 +3 41 13 19 +3 13 41 35 +3 73 35 41 +3 35 73 69 +3 73 111 69 +3 111 105 69 +3 151 105 111 +3 105 151 145 +3 145 151 175 +3 169 145 175 +3 169 175 152 +3 152 146 169 +3 152 112 146 +3 106 146 112 +3 112 74 106 +3 70 106 74 +3 74 42 70 +3 70 42 36 +3 42 20 36 +3 20 14 36 +3 16 14 20 +3 14 16 4 +3 22 31 16 +3 19 16 31 +3 49 19 31 +3 19 49 41 +3 41 49 83 +3 73 41 83 +3 123 73 83 +3 73 123 111 +3 123 166 111 +3 111 166 151 +3 151 166 189 +3 189 175 151 +3 175 189 167 +3 152 175 167 +3 152 167 124 +3 124 112 152 +3 84 112 124 +3 74 112 84 +3 50 74 84 +3 42 74 50 +3 50 32 42 +3 32 20 42 +3 32 22 20 +3 20 22 16 +3 45 22 38 +3 45 31 22 +3 65 31 45 +3 49 31 65 +3 91 49 65 +3 91 83 49 +3 131 83 91 +3 123 83 131 +3 131 178 123 +3 178 166 123 +3 178 191 166 +3 189 166 191 +3 189 191 179 +3 167 189 179 +3 132 167 179 +3 167 132 124 +3 92 124 132 +3 84 124 92 +3 92 66 84 +3 50 84 66 +3 66 46 50 +3 32 50 46 +3 32 46 38 +3 38 22 32 +3 52 61 38 +3 38 61 45 +3 45 61 79 +3 79 65 45 +3 115 65 79 +3 65 115 91 +3 91 115 155 +3 155 131 91 +3 131 155 194 +3 131 194 178 +3 178 194 205 +3 178 205 191 +3 195 191 205 +3 191 195 179 +3 195 156 179 +3 179 156 132 +3 156 116 132 +3 92 132 116 +3 116 80 92 +3 66 92 80 +3 62 66 80 +3 66 62 46 +3 46 62 52 +3 46 52 38 +3 52 76 87 +3 61 52 87 +3 87 101 61 +3 101 79 61 +3 101 135 79 +3 135 115 79 +3 135 186 115 +3 186 155 115 +3 210 155 186 +3 155 210 194 +3 210 217 194 +3 194 217 205 +3 217 211 205 +3 195 205 211 +3 195 211 187 +3 156 195 187 +3 187 136 156 +3 156 136 116 +3 136 102 116 +3 80 116 102 +3 102 88 80 +3 62 80 88 +3 76 62 88 +3 76 52 62 +3 108 119 76 +3 76 119 87 +3 127 87 119 +3 127 101 87 +3 101 127 172 +3 172 135 101 +3 172 198 135 +3 198 186 135 +3 186 198 224 +3 210 186 224 +3 231 210 224 +3 210 231 217 +3 231 225 217 +3 217 225 211 +3 211 225 199 +3 187 211 199 +3 187 199 173 +3 187 173 136 +3 173 128 136 +3 102 136 128 +3 120 102 128 +3 88 102 120 +3 108 88 120 +3 88 108 76 +3 108 148 160 +3 108 160 119 +3 182 119 160 +3 127 119 182 +3 127 182 202 +3 172 127 202 +3 202 228 172 +3 198 172 228 +3 238 198 228 +3 238 224 198 +3 224 238 241 +3 231 224 241 +3 241 239 231 +3 231 239 225 +3 225 239 229 +3 229 199 225 +3 203 199 229 +3 199 203 173 +3 173 203 183 +3 128 173 183 +3 161 128 183 +3 161 120 128 +3 120 161 148 +3 120 148 108 +3 207 214 148 +3 160 148 214 +3 220 160 214 +3 220 182 160 +3 220 234 182 +3 182 234 202 +3 202 234 244 +3 202 244 228 +3 228 244 258 +3 258 238 228 +3 261 238 258 +3 241 238 261 +3 261 259 241 +3 239 241 259 +3 245 239 259 +3 245 229 239 +3 235 229 245 +3 203 229 235 +3 203 235 221 +3 183 203 221 +3 221 215 183 +3 215 161 183 +3 215 207 161 +3 148 161 207 +3 207 247 250 +3 250 214 207 +3 254 214 250 +3 220 214 254 +3 220 254 264 +3 220 264 234 +3 234 264 268 +3 234 268 244 +3 272 244 268 +3 272 258 244 +3 272 275 258 +3 261 258 275 +3 275 273 261 +3 273 259 261 +3 259 273 269 +3 269 245 259 +3 265 245 269 +3 265 235 245 +3 265 255 235 +3 221 235 255 +3 255 251 221 +3 251 215 221 +3 215 251 247 +3 207 215 247 +3 247 299 286 +3 247 286 250 +3 286 297 250 +3 297 254 250 +3 254 297 292 +3 292 264 254 +3 264 292 284 +3 284 268 264 +3 284 282 268 +3 268 282 272 +3 282 289 272 +3 275 272 289 +3 289 283 275 +3 283 273 275 +3 273 283 285 +3 269 273 285 +3 269 285 293 +3 293 265 269 +3 293 298 265 +3 255 265 298 +3 298 287 255 +3 251 255 287 +3 251 287 299 +3 247 251 299 +3 329 326 299 +3 286 299 326 +3 326 322 286 +3 297 286 322 +3 312 297 322 +3 312 292 297 +3 308 292 312 +3 308 284 292 +3 284 308 304 +3 304 282 284 +3 304 301 282 +3 282 301 289 +3 305 289 301 +3 283 289 305 +3 309 283 305 +3 283 309 285 +3 285 309 313 +3 293 285 313 +3 323 293 313 +3 323 298 293 +3 323 327 298 +3 287 298 327 +3 287 327 329 +3 329 299 287 +3 329 369 362 +3 362 326 329 +3 326 362 356 +3 322 326 356 +3 342 322 356 +3 312 322 342 +3 342 332 312 +3 312 332 308 +3 318 308 332 +3 318 304 308 +3 304 318 315 +3 304 315 301 +3 301 315 319 +3 319 305 301 +3 305 319 333 +3 309 305 333 +3 343 309 333 +3 313 309 343 +3 357 313 343 +3 357 323 313 +3 363 323 357 +3 363 327 323 +3 363 369 327 +3 369 329 327 +3 416 369 428 +3 362 369 416 +3 394 362 416 +3 362 394 356 +3 374 356 394 +3 342 356 374 +3 342 374 348 +3 342 348 332 +3 348 338 332 +3 332 338 318 +3 335 318 338 +3 335 315 318 +3 315 335 339 +3 319 315 339 +3 349 319 339 +3 349 333 319 +3 375 333 349 +3 333 375 343 +3 395 343 375 +3 357 343 395 +3 357 395 417 +3 357 417 363 +3 428 363 417 +3 369 363 428 +3 428 468 457 +3 457 416 428 +3 449 416 457 +3 416 449 394 +3 449 404 394 +3 394 404 374 +3 404 378 374 +3 374 378 348 +3 348 378 352 +3 352 338 348 +3 352 345 338 +3 345 335 338 +3 345 353 335 +3 353 339 335 +3 339 353 379 +3 379 349 339 +3 405 349 379 +3 349 405 375 +3 405 450 375 +3 375 450 395 +3 458 395 450 +3 395 458 417 +3 458 468 417 +3 417 468 428 +3 468 500 489 +3 468 489 457 +3 489 475 457 +3 475 449 457 +3 475 441 449 +3 449 441 404 +3 390 404 441 +3 390 378 404 +3 390 366 378 +3 366 352 378 +3 352 366 359 +3 352 359 345 +3 367 345 359 +3 345 367 353 +3 353 367 391 +3 391 379 353 +3 379 391 442 +3 379 442 405 +3 476 405 442 +3 405 476 450 +3 490 450 476 +3 458 450 490 +3 500 458 490 +3 458 500 468 +3 515 500 524 +3 500 515 489 +3 497 489 515 +3 497 475 489 +3 475 497 461 +3 441 475 461 +3 421 441 461 +3 441 421 390 +3 390 421 382 +3 390 382 366 +3 371 366 382 +3 366 371 359 +3 383 359 371 +3 359 383 367 +3 383 422 367 +3 367 422 391 +3 422 462 391 +3 442 391 462 +3 498 442 462 +3 476 442 498 +3 476 498 516 +3 490 476 516 +3 516 524 490 +3 490 524 500 +3 538 531 524 +3 524 531 515 +3 515 531 511 +3 497 515 511 +3 497 511 485 +3 485 461 497 +3 445 461 485 +3 421 461 445 +3 398 421 445 +3 421 398 382 +3 382 398 385 +3 385 371 382 +3 371 385 399 +3 399 383 371 +3 399 446 383 +3 422 383 446 +3 446 486 422 +3 486 462 422 +3 462 486 512 +3 512 498 462 +3 532 498 512 +3 498 532 516 +3 538 516 532 +3 538 524 516 +3 554 545 538 +3 545 531 538 +3 531 545 527 +3 511 531 527 +3 527 493 511 +3 493 485 511 +3 453 485 493 +3 445 485 453 +3 453 410 445 +3 398 445 410 +3 387 398 410 +3 398 387 385 +3 387 411 385 +3 411 399 385 +3 399 411 454 +3 399 454 446 +3 454 494 446 +3 494 486 446 +3 528 486 494 +3 486 528 512 +3 546 512 528 +3 532 512 546 +3 532 546 554 +3 538 532 554 +3 557 554 560 +3 545 554 557 +3 535 545 557 +3 535 527 545 +3 535 503 527 +3 493 527 503 +3 493 503 465 +3 453 493 465 +3 465 425 453 +3 453 425 410 +3 425 401 410 +3 410 401 387 +3 426 387 401 +3 426 411 387 +3 426 466 411 +3 466 454 411 +3 466 504 454 +3 454 504 494 +3 536 494 504 +3 536 528 494 +3 536 558 528 +3 558 546 528 +3 560 546 558 +3 554 546 560 +3 572 561 560 +3 557 560 561 +3 561 539 557 +3 557 539 535 +3 535 539 507 +3 503 535 507 +3 507 469 503 +3 465 503 469 +3 429 465 469 +3 429 425 465 +3 407 425 429 +3 425 407 401 +3 407 430 401 +3 401 430 426 +3 426 430 470 +3 470 466 426 +3 470 508 466 +3 504 466 508 +3 504 508 540 +3 504 540 536 +3 562 536 540 +3 536 562 558 +3 562 572 558 +3 572 560 558 +3 572 574 567 +3 567 561 572 +3 561 567 549 +3 539 561 549 +3 549 519 539 +3 519 507 539 +3 479 507 519 +3 479 469 507 +3 469 479 435 +3 435 429 469 +3 429 435 413 +3 429 413 407 +3 413 436 407 +3 407 436 430 +3 436 480 430 +3 430 480 470 +3 470 480 520 +3 520 508 470 +3 508 520 550 +3 550 540 508 +3 550 568 540 +3 562 540 568 +3 568 574 562 +3 562 574 572 +3 575 567 574 +3 567 575 569 +3 551 567 569 +3 549 567 551 +3 551 521 549 +3 519 549 521 +3 481 519 521 +3 481 479 519 +3 481 437 479 +3 479 437 435 +3 437 418 435 +3 435 418 413 +3 418 438 413 +3 413 438 436 +3 438 482 436 +3 480 436 482 +3 480 482 522 +3 480 522 520 +3 552 520 522 +3 550 520 552 +3 552 570 550 +3 568 550 570 +3 575 568 570 +3 568 575 574 diff --git a/lapy/tria_mesh.py b/lapy/tria_mesh.py index eea1d33..0881939 100644 --- a/lapy/tria_mesh.py +++ b/lapy/tria_mesh.py @@ -2123,32 +2123,37 @@ def extract_level_paths( # We search masked_trias (the small set of triangles touching any sv) # for a row containing both i and j. If found, the third vertex is # by construction a special vertex — no separate sv lookup needed. + # The triangle index is also returned so it can be stored in tria_idx + # (the segment from the edge point to/from the sv passes through that + # masked triangle). # # A special-vertex point is encoded as: # edge_vidx = [sv_idx, sv_idx] (degenerate edge) # edge_bary = 0.0 - # tria_idx = -1 (sentinel, no triangle crossing) # ------------------------------------------------------------------ masked_trias = self.t[touches_special] # (n_masked, 3) + touches_special_idx = np.where(touches_special)[0] # actual tria indices - def _sv_of_edge(i: int, j: int) -> int | None: - """Return the special vertex opposite edge (i,j) in a masked triangle, or None.""" + def _sv_of_edge(i: int, j: int) -> tuple[int, int] | None: + """Return (sv_idx, tria_idx) for the masked triangle opposite edge (i,j), or None.""" rows = np.where( np.any(masked_trias == i, axis=1) & np.any(masked_trias == j, axis=1) )[0] if rows.size == 0: return None - tri = masked_trias[rows[0]] - return int(tri[(tri != i) & (tri != j)][0]) + row = rows[0] + tri = masked_trias[row] + sv_idx = int(tri[(tri != i) & (tri != j)][0]) + return sv_idx, int(touches_special_idx[row]) - def _sv_arrays(sv_idx: int): + def _sv_arrays(sv_idx: int, tria_idx: int): """Arrays for a single special-vertex point.""" return ( - self.v[sv_idx][np.newaxis, :], # pts (1, 3) - np.array([-1], dtype=np.intp), # ti (1,) - np.array([[sv_idx, sv_idx]]), # ev (1, 2) - np.array([0.0]), # eb (1,) + self.v[sv_idx][np.newaxis, :], # pts (1, 3) + np.array([tria_idx], dtype=np.intp), # ti (1,) + np.array([[sv_idx, sv_idx]]), # ev (1, 2) + np.array([0.0]), # eb (1,) ) # Extend each fragment by prepending/appending its adjacent sv point(s). @@ -2164,35 +2169,42 @@ def _sv_arrays(sv_idx: int): continue ev = frag.edge_vidx - sv_start = _sv_of_edge(int(ev[0, 0]), int(ev[0, 1])) - sv_end = _sv_of_edge(int(ev[-1, 0]), int(ev[-1, 1])) + res_start = _sv_of_edge(int(ev[0, 0]), int(ev[0, 1])) + res_end = _sv_of_edge(int(ev[-1, 0]), int(ev[-1, 1])) - if sv_start is not None and sv_start == sv_end: + if res_start is not None and res_end is not None and res_start[0] == res_end[0]: # Both ends touch the same sv: closed loop. - # Prepend the sv point directly onto the fragment and close it. - sp, st, se, sb = _sv_arrays(sv_start) + # Prepend sv with the start-edge's triangle. + # The closing segment (last point → sv) passes through the + # end-edge's triangle, so append that triangle index too. + sv_idx, ti_start = res_start + _, ti_end = res_end + sp, st, se, sb = _sv_arrays(sv_idx, ti_start) frag.points = np.vstack([sp, frag.points]) - frag.tria_idx = np.concatenate([st, frag.tria_idx]) + frag.tria_idx = np.concatenate([st, frag.tria_idx, + np.array([ti_end], dtype=np.intp)]) frag.edge_vidx = np.vstack([se, frag.edge_vidx]) frag.edge_bary = np.concatenate([sb, frag.edge_bary]) frag.close() result.append(frag) continue - if sv_start is None and sv_end is None: + if res_start is None and res_end is None: # No sv at either end — fragment is complete as-is. result.append(frag) continue - if sv_start is not None: - sp, st, se, sb = _sv_arrays(sv_start) + if res_start is not None: + sv_idx, ti = res_start + sp, st, se, sb = _sv_arrays(sv_idx, ti) frag.points = np.vstack([sp, frag.points]) frag.tria_idx = np.concatenate([st, frag.tria_idx]) frag.edge_vidx = np.vstack([se, frag.edge_vidx]) frag.edge_bary = np.concatenate([sb, frag.edge_bary]) - if sv_end is not None: - sp, st, se, sb = _sv_arrays(sv_end) + if res_end is not None: + sv_idx, ti = res_end + sp, st, se, sb = _sv_arrays(sv_idx, ti) frag.points = np.vstack([frag.points, sp]) frag.tria_idx = np.concatenate([frag.tria_idx, st]) frag.edge_vidx = np.vstack([frag.edge_vidx, se]) diff --git a/lapy/utils/tests/test_tria_mesh.py b/lapy/utils/tests/test_tria_mesh.py index 23b8952..538dc75 100644 --- a/lapy/utils/tests/test_tria_mesh.py +++ b/lapy/utils/tests/test_tria_mesh.py @@ -755,160 +755,65 @@ def test_critical_points_boundary(): assert len(minima) >= 1, f"Expected at least one minimum, got {len(minima)}" -def test_extract_level_paths_simple(): - """ - Test extract_level_paths on a simple mesh with single level curve. - """ - # Create a simple mesh: unit square in xy-plane - vertices = np.array([ - [0.0, 0.0, 0.0], - [1.0, 0.0, 0.0], - [1.0, 1.0, 1.0], - [0.0, 1.0, 1.0], - ]) - triangles = np.array([ - [0, 1, 2], - [0, 2, 3], - ]) - mesh = TriaMesh(vertices, triangles) - - # Height function (z-coordinate) - vfunc = vertices[:, 2] - - # Extract level curve at z=0.5 - curves = mesh.extract_level_paths(vfunc, 0.5) - - # Should have at least one curve - assert len(curves) > 0, "Expected at least one level curve" - - # Check that returned objects are Polygon - from ...polygon import Polygon - for curve in curves: - assert isinstance(curve, Polygon), f"Expected Polygon, got {type(curve)}" - - # Check that polygons have required attributes - for curve in curves: - assert hasattr(curve, 'points'), "Polygon should have 'points' attribute" - assert hasattr(curve, 'closed'), "Polygon should have 'closed' attribute" - assert hasattr(curve, 'tria_idx'), "Polygon should have 'tria_idx' attribute" - assert hasattr(curve, 'edge_vidx'), "Polygon should have 'edge_vidx' attribute" - assert hasattr(curve, 'edge_bary'), "Polygon should have 'edge_bary' attribute" - - # Check that points are 3D - for curve in curves: - assert curve.points.shape[1] == 3, f"Expected 3D points, got shape {curve.points.shape}" - +def test_extract_level_paths_torus(): + """Test extract_level_paths on torus.off covering normal, saddle and empty cases. -def test_extract_level_paths_closed_loop(): + The x-coordinate of torus.off has exactly two order-2 saddles at x ≈ ±0.36. + Between the saddles a generic level crosses the torus in two separate closed + loops. At the saddle level the loop splits and each component must start or + end exactly at the saddle vertex. """ - Test extract_level_paths on a mesh with closed loop level curve. - """ - # Create a pyramid mesh - vertices = np.array([ - [0.0, 0.0, 0.0], # base - [1.0, 0.0, 0.0], # base - [1.0, 1.0, 0.0], # base - [0.0, 1.0, 0.0], # base - [0.5, 0.5, 1.0], # apex - ]) - triangles = np.array([ - [0, 1, 4], - [1, 2, 4], - [2, 3, 4], - [3, 0, 4], - [0, 2, 1], - [0, 3, 2], - ]) - mesh = TriaMesh(vertices, triangles) - - # Height function - vfunc = vertices[:, 2] - - # Extract level curve at mid-height (should create closed loop around pyramid) - curves = mesh.extract_level_paths(vfunc, 0.5) - - assert len(curves) == 1, "Expected one level curve" - - # Curve should be closed - assert curves[0].closed, "Expected closed level curve" - - # Note: May or may not be closed depending on mesh topology - - # All points should be approximately at z=0.5 - for curve in curves: - z_coords = curve.points[:, 2] - np.testing.assert_allclose(z_coords, 0.5, atol=1e-5, - err_msg="Level curve points should be at z=0.5") - - -def test_extract_level_paths_no_intersection(): - """ - Test extract_level_paths when level doesn't intersect mesh. - """ - # Simple triangle - vertices = np.array([ - [0.0, 0.0, 0.0], - [1.0, 0.0, 1.0], - [0.0, 1.0, 1.0], - ]) - triangles = np.array([[0, 1, 2]]) - mesh = TriaMesh(vertices, triangles) - - # Height function - vfunc = vertices[:, 2] - - # Extract level curve at z=10.0 (way above mesh) - curves = mesh.extract_level_paths(vfunc, 10.0) - - # Should return empty list - assert len(curves) == 0, f"Expected no curves, got {len(curves)}" - - -def test_extract_level_paths_metadata(): - """ - Test that extract_level_paths returns correct metadata. - """ - # Create a simple mesh - vertices = np.array([ - [0.0, 0.0, 0.0], - [1.0, 0.0, 0.5], - [1.0, 1.0, 1.0], - [0.0, 1.0, 0.5], - ]) - triangles = np.array([ - [0, 1, 2], - [0, 2, 3], - ]) - mesh = TriaMesh(vertices, triangles) - - # Height function - vfunc = vertices[:, 2] + import os + mesh = TriaMesh.read_off( + os.path.join(os.path.dirname(__file__), "../../../data/torus.off") + ) + vfunc = mesh.v[:, 0].copy() - # Extract level curve - curves = mesh.extract_level_paths(vfunc, 0.5) - - for curve in curves: - n_points = curve.points.shape[0] - - # Check tria_idx - assert hasattr(curve, 'tria_idx'), "Missing tria_idx" - # Number of segments (tria_idx) depends on whether curve is closed - expected_tria_len = n_points if curve.closed else n_points - 1 - assert curve.tria_idx.shape == (expected_tria_len,), \ - f"tria_idx should be ({expected_tria_len},), got {curve.tria_idx.shape}" - - # Check edge_vidx - assert hasattr(curve, 'edge_vidx'), "Missing edge_vidx" - assert curve.edge_vidx.shape == (n_points, 2), \ - f"edge_vidx should be (n_points, 2), got {curve.edge_vidx.shape}" - - # Check edge_bary - assert hasattr(curve, 'edge_bary'), "Missing edge_bary" - assert curve.edge_bary.shape == (n_points,), \ - f"edge_bary should be (n_points,), got {curve.edge_bary.shape}" - # Barycentric coordinates should be in [0, 1] - assert np.all(curve.edge_bary >= 0) and np.all(curve.edge_bary <= 1), \ - "edge_bary should be in [0, 1]" + # --- helper: check metadata consistency for a single curve --------------- + def _check_curve(c): + n = c.points.shape[0] + n_segs = n if c.closed else n - 1 + assert c.points.shape == (n, 3) + assert c.tria_idx.shape == (n_segs,), ( + f"tria_idx shape {c.tria_idx.shape} != ({n_segs},)" + ) + assert c.edge_vidx.shape == (n, 2) + assert c.edge_bary.shape == (n,) + assert np.all(c.edge_bary >= 0) and np.all(c.edge_bary <= 1) + + # --- 1. normal level: two closed loops, no special vertices -------------- + # level 0.2 lies strictly between the two saddles at ±0.36 + curves = mesh.extract_level_paths(vfunc, 0.2) + assert len(curves) == 2, f"Expected 2 closed loops at normal level, got {len(curves)}" + for c in curves: + assert c.closed, "Expected closed loop at normal level" + _check_curve(c) + + # --- 2. saddle level: two curves each anchored at the saddle vertex ------ + minima, maxima, saddles, orders = mesh.critical_points(vfunc) + assert len(saddles) >= 1, "Expected at least one saddle on torus x-function" + saddle_v = int(saddles[orders == 2][0]) + saddle_level = float(vfunc[saddle_v]) + saddle_coord = mesh.v[saddle_v] + + curves_saddle = mesh.extract_level_paths(vfunc, saddle_level) + assert len(curves_saddle) >= 2, ( + f"Expected >=2 curves at saddle level, got {len(curves_saddle)}" + ) + for c in curves_saddle: + _check_curve(c) + # each curve must start or end at the saddle vertex + dist = min( + np.linalg.norm(c.points[0] - saddle_coord), + np.linalg.norm(c.points[-1] - saddle_coord), + ) + assert dist < 1e-6, ( + f"Curve does not touch saddle vertex (min dist = {dist:.2e})" + ) + + # --- 3. level out of range: empty result --------------------------------- + curves_empty = mesh.extract_level_paths(vfunc, vfunc.max() + 1.0) + assert len(curves_empty) == 0, "Expected no curves above mesh range" def test_level_path(): From 89578bb03ea63eb0b5f86607cad07cc7dcca6ed5 Mon Sep 17 00:00:00 2001 From: Martin Reuter Date: Tue, 10 Mar 2026 14:46:50 +0100 Subject: [PATCH 28/32] fix code style docstring imperative --- lapy/tria_mesh.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/lapy/tria_mesh.py b/lapy/tria_mesh.py index 0881939..c3415b9 100644 --- a/lapy/tria_mesh.py +++ b/lapy/tria_mesh.py @@ -2255,7 +2255,7 @@ def _make_poly(pts, tria_idx, edge_vidx, edge_bary, closed_hint=None): return poly def _is_sv_point(ev_row) -> bool: - """True if this endpoint encodes a special vertex (degenerate edge).""" + """Check if this endpoint encodes a special vertex (degenerate edge).""" return int(ev_row[0]) == int(ev_row[1]) def _merge_frags(frag0, frag1, sv_idx: int) -> polygon.Polygon: From 6e89f4d063c1346e3af24f1de15ca619381f6cf0 Mon Sep 17 00:00:00 2001 From: Martin Reuter Date: Tue, 10 Mar 2026 21:41:09 +0100 Subject: [PATCH 29/32] update doc strings --- lapy/tria_mesh.py | 7 +++++-- 1 file changed, 5 insertions(+), 2 deletions(-) diff --git a/lapy/tria_mesh.py b/lapy/tria_mesh.py index c3415b9..061fc72 100644 --- a/lapy/tria_mesh.py +++ b/lapy/tria_mesh.py @@ -1265,6 +1265,8 @@ def critical_points(self, vfunc: np.ndarray) -> tuple[np.ndarray, np.ndarray, np saddle_orders : np.ndarray Array of saddle orders for each saddle vertex (same length as saddles), shape (n_saddles,). Order 2 = simple saddle (4 sign flips), order 3+ = higher-order saddles. + Computed as ``ceil(n_flips / 2)``; for boundary saddles with an odd + flip count the ceiling accounts for the implicit flip across the boundary. Notes ----- @@ -1340,7 +1342,7 @@ def critical_points(self, vfunc: np.ndarray) -> tuple[np.ndarray, np.ndarray, np # CLASSIFY SADDLES # Saddles have 4+ flips (regular points have 2, minima/maxima have 0) # Boundary vertices can have 3 flips (or more) to be a saddle, assuming an additional flip outside - # Order = n_flips / 2 + # Order = ceil(n_flips / 2) is_saddle = sign_flips >= 3 saddles = np.where(is_saddle)[0] saddle_orders = (sign_flips[saddles] + 1) // 2 @@ -2052,7 +2054,8 @@ def extract_level_paths( carries extra attributes set after construction: - ``tria_idx`` : np.ndarray, shape (n_segments,) — triangle index - per segment; ``-1`` for segments adjacent to a special vertex. + per segment. For closed curves n_segments == n_points; for open + curves n_segments == n_points - 1. - ``edge_vidx`` : np.ndarray, shape (n_points, 2) — mesh vertex indices of the intersected edge per point. For special-vertex points both indices are equal (degenerate edge). From e60fafa18d259553582d497560608e1a07ad8b38 Mon Sep 17 00:00:00 2001 From: Martin Reuter Date: Tue, 10 Mar 2026 21:45:38 +0100 Subject: [PATCH 30/32] clearer polygon closure treatment --- lapy/polygon.py | 29 ++++++++++++++++++----------- 1 file changed, 18 insertions(+), 11 deletions(-) diff --git a/lapy/polygon.py b/lapy/polygon.py index f446479..fecd229 100644 --- a/lapy/polygon.py +++ b/lapy/polygon.py @@ -27,9 +27,11 @@ class Polygon: marked as closed automatically. closed : bool or None, default=None - If None (default): Auto-detect by checking if first and last points - are identical. If they are, removes duplicate endpoint and sets closed=True. - - If True: Treats the path as a closed polygon. If the last point duplicates - the first, it will be removed. + are **exactly** equal (``np.array_equal``). Only an exact duplicate + is removed; nearly-equal but geometrically distinct endpoints are left + untouched and the polygon is treated as open. + - If True: Treats the path as a closed polygon. If the last point is + nearly equal to the first (``np.allclose``), it is removed. - If False: Treats the path as an open polyline regardless of endpoints. Attributes @@ -100,16 +102,19 @@ def __init__(self, points: np.ndarray, closed: bool | None = None): # Auto-detect closed polygons or handle explicit closed parameter if closed is None: - # Auto-detect: check if first and last points are identical - if len(self.points) > 1 and np.allclose(self.points[0], self.points[-1]): - # Remove duplicate endpoint + # Auto-detect: strip duplicate endpoint only when first and last + # points are exactly identical. np.allclose is intentionally + # avoided here — nearly-equal but distinct endpoints should not + # be silently removed when the caller has not requested closure. + if len(self.points) > 1 and np.array_equal(self.points[0], self.points[-1]): self.points = self.points[:-1] self.closed = True else: - # Not closed (endpoints differ) self.closed = False elif closed: - # Explicitly closed: remove duplicate endpoint if present + # Explicitly closed: remove duplicate endpoint if present. + # np.allclose is used here because the caller has explicitly + # requested closure, so stripping a near-duplicate is intentional. if len(self.points) > 1 and np.allclose(self.points[0], self.points[-1]): self.points = self.points[:-1] self.closed = True @@ -140,9 +145,11 @@ def is_closed(self) -> bool: def close(self) -> "Polygon": """Mark the polygon as closed in-place. - If the last point duplicates the first it is removed; otherwise the - polygon is simply flagged as closed (the closing segment is implicit, - from the last point back to the first). + If the last point is nearly equal to the first (``np.allclose``), it is + removed; otherwise the polygon is simply flagged as closed (the closing + segment from the last point back to the first is implicit). Using + ``np.allclose`` here is intentional: the caller has explicitly requested + closure, so stripping a near-duplicate endpoint is the expected behaviour. Returns ------- From f74ed95958e2d4c8fe4fb6f777dac022c53af9d2 Mon Sep 17 00:00:00 2001 From: Martin Reuter Date: Tue, 10 Mar 2026 21:51:12 +0100 Subject: [PATCH 31/32] clearer start_idx checking in reduce_edges_to_path --- lapy/tria_mesh.py | 37 ++++++++++++++++++++++++++++++++----- 1 file changed, 32 insertions(+), 5 deletions(-) diff --git a/lapy/tria_mesh.py b/lapy/tria_mesh.py index 061fc72..63beacb 100644 --- a/lapy/tria_mesh.py +++ b/lapy/tria_mesh.py @@ -30,8 +30,14 @@ def _reduce_edges_to_path( indices from 0 to max need to appear in the edges). Only nodes that actually appear in edges are processed. start_idx : int or None, default=None - Preferred start node. If None, selects an endpoint (degree 1) automatically, - or arbitrary node for closed loops. Must be a node that appears in edges. + Preferred start node. If None, an endpoint (degree 1) is selected + automatically for open paths, or an arbitrary node for closed loops. + If provided, must be a node that actually appears in ``edges``; raises + ``ValueError`` otherwise. For open paths, if ``start_idx`` belongs to + the component it must also be one of its two endpoints, or ``ValueError`` + is raised. For multiple paths, if ``start_idx`` is not in the current + component it is silently ignored and the default is used (this is normal + behaviour for multi-component graphs). get_edge_idx : bool, default=False If True, also return edge indices for each path segment. @@ -47,7 +53,8 @@ def _reduce_edges_to_path( Raises ------ ValueError - If start_idx is invalid. + If ``start_idx`` is not a real node in the edge graph. + If ``start_idx`` is in an open-path component but is not an endpoint. If graph has degree > 2 (branching or self-intersections). """ edges = np.array(edges) @@ -62,6 +69,15 @@ def _reduce_edges_to_path( adj_matrix = sparse.csr_matrix((dat, (i, j)), shape=(n, n)) degrees = np.asarray(adj_matrix.sum(axis=1)).ravel() + # Validate start_idx: must be a real node (degree > 0) if provided. + if start_idx is not None: + real_nodes = np.where(degrees > 0)[0] + if start_idx not in real_nodes: + raise ValueError( + f"start_idx {start_idx} is not a node in the edge graph " + f"(real nodes range: {real_nodes.min()}–{real_nodes.max()})." + ) + # Find connected components n_comp, labels = sparse.csgraph.connected_components(adj_matrix, directed=False) @@ -99,7 +115,10 @@ def _reduce_edges_to_path( # For closed loops: break one edge to convert to open path for traversal if is_closed: - start = start_idx if (start_idx is not None and start_idx in comp_nodes) else comp_nodes[0] + # Use start_idx if it belongs to this component, else fall back to first node. + start = comp_nodes[0] + if start_idx is not None and start_idx in comp_nodes: + start = start_idx neighbors = adj_matrix[start, :].nonzero()[1] neighbors_in_comp = [nb for nb in neighbors if nb in comp_nodes] if len(neighbors_in_comp) < 2: @@ -121,7 +140,15 @@ def _reduce_edges_to_path( ) if not is_closed: - start = start_idx if (start_idx is not None and start_idx in endpoints) else endpoints[0] + if start_idx is not None and start_idx in comp_nodes: + if start_idx not in endpoints: + raise ValueError( + f"start_idx {start_idx} is in component {comp_id} but is not " + f"an endpoint (endpoints: {endpoints.tolist()})." + ) + start = start_idx + else: + start = endpoints[0] dist = sparse.csgraph.shortest_path(adj_matrix, indices=start, directed=False) if np.isinf(dist[comp_nodes]).any(): From 4c450ee0a3f4593883442c18196219fb1e264151 Mon Sep 17 00:00:00 2001 From: Martin Reuter Date: Tue, 10 Mar 2026 21:59:25 +0100 Subject: [PATCH 32/32] remap to dense nodes in reduce_edges_to_path --- lapy/tria_mesh.py | 65 +++++++++++++++++++++++++++-------------------- 1 file changed, 38 insertions(+), 27 deletions(-) diff --git a/lapy/tria_mesh.py b/lapy/tria_mesh.py index 63beacb..4ff7e86 100644 --- a/lapy/tria_mesh.py +++ b/lapy/tria_mesh.py @@ -25,10 +25,10 @@ def _reduce_edges_to_path( Parameters ---------- edges : np.ndarray - Array of shape (n, 2) with pairs of positive integer node indices - representing undirected edges. Node indices can have gaps (i.e., not all - indices from 0 to max need to appear in the edges). Only nodes that - actually appear in edges are processed. + Array of shape (n, 2) with pairs of non-negative integer node indices + representing undirected edges. Node indices may have arbitrary gaps — + they are remapped to a dense 0..N-1 range internally, so the adjacency + matrix size is always proportional to the number of unique nodes. start_idx : int or None, default=None Preferred start node. If None, an endpoint (degree 1) is selected automatically for open paths, or an arbitrary node for closed loops. @@ -61,22 +61,31 @@ def _reduce_edges_to_path( if edges.shape[0] == 0: return ([], []) if get_edge_idx else [] - # Build adjacency matrix - i = np.column_stack((edges[:, 0], edges[:, 1])).reshape(-1) - j = np.column_stack((edges[:, 1], edges[:, 0])).reshape(-1) - dat = np.ones(i.shape) - n = edges.max() + 1 - adj_matrix = sparse.csr_matrix((dat, (i, j)), shape=(n, n)) - degrees = np.asarray(adj_matrix.sum(axis=1)).ravel() + # Remap node ids to a dense 0..N-1 range so the adjacency matrix is + # proportional to the number of unique nodes, not the maximum id. + # This avoids allocating a huge matrix when node ids have large gaps. + unique_nodes, inverse = np.unique(edges, return_inverse=True) + dense_edges = inverse.reshape(edges.shape) # same shape as edges, dense ids - # Validate start_idx: must be a real node (degree > 0) if provided. + # Validate start_idx against original node ids before remapping. if start_idx is not None: - real_nodes = np.where(degrees > 0)[0] - if start_idx not in real_nodes: + if start_idx not in unique_nodes: raise ValueError( f"start_idx {start_idx} is not a node in the edge graph " - f"(real nodes range: {real_nodes.min()}–{real_nodes.max()})." + f"(known nodes: {unique_nodes.tolist()})." ) + # Map start_idx to its dense counterpart + dense_start = int(np.searchsorted(unique_nodes, start_idx)) + else: + dense_start = None + + # Build adjacency matrix on dense ids + i = np.column_stack((dense_edges[:, 0], dense_edges[:, 1])).reshape(-1) + j = np.column_stack((dense_edges[:, 1], dense_edges[:, 0])).reshape(-1) + dat = np.ones(i.shape) + n = len(unique_nodes) + adj_matrix = sparse.csr_matrix((dat, (i, j)), shape=(n, n)) + degrees = np.asarray(adj_matrix.sum(axis=1)).ravel() # Find connected components n_comp, labels = sparse.csgraph.connected_components(adj_matrix, directed=False) @@ -115,10 +124,10 @@ def _reduce_edges_to_path( # For closed loops: break one edge to convert to open path for traversal if is_closed: - # Use start_idx if it belongs to this component, else fall back to first node. + # Use dense_start if it belongs to this component, else fall back to first node. start = comp_nodes[0] - if start_idx is not None and start_idx in comp_nodes: - start = start_idx + if dense_start is not None and dense_start in comp_nodes: + start = dense_start neighbors = adj_matrix[start, :].nonzero()[1] neighbors_in_comp = [nb for nb in neighbors if nb in comp_nodes] if len(neighbors_in_comp) < 2: @@ -140,13 +149,13 @@ def _reduce_edges_to_path( ) if not is_closed: - if start_idx is not None and start_idx in comp_nodes: - if start_idx not in endpoints: + if dense_start is not None and dense_start in comp_nodes: + if dense_start not in endpoints: raise ValueError( f"start_idx {start_idx} is in component {comp_id} but is not " - f"an endpoint (endpoints: {endpoints.tolist()})." + f"an endpoint (endpoints: {unique_nodes[endpoints].tolist()})." ) - start = start_idx + start = dense_start else: start = endpoints[0] @@ -154,14 +163,16 @@ def _reduce_edges_to_path( if np.isinf(dist[comp_nodes]).any(): raise ValueError(f"Component {comp_id} is not fully connected.") - path = comp_nodes[dist[comp_nodes].argsort()] + dense_path = comp_nodes[dist[comp_nodes].argsort()] if is_closed: - path = np.append(path, path[0]) - paths.append(path) + dense_path = np.append(dense_path, dense_path[0]) + + # Map dense ids back to original node ids + paths.append(unique_nodes[dense_path]) if get_edge_idx: - ei = path[:-1] - ej = path[1:] + ei = dense_path[:-1] + ej = dense_path[1:] eidx = np.asarray(eidx_matrix[ei, ej] - 1).ravel() edge_idxs.append(eidx)