From 996db274c3a1fa38ac8bb3aa6ed5375376e66f29 Mon Sep 17 00:00:00 2001 From: Jelle Feringa Date: Thu, 11 Dec 2025 20:37:35 +0100 Subject: [PATCH 1/4] Update Jelle Feringa's affiliation link --- docs/index.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/index.md b/docs/index.md index 049105cc..d0861d93 100644 --- a/docs/index.md +++ b/docs/index.md @@ -19,7 +19,7 @@ COMPAS Slicer was developed at [ETH Zurich](https://ethz.ch/) by: - **[Andrei Jipa](https://github.com/stratocaster)** - [Gramazio Kohler Research](https://gramaziokohler.arch.ethz.ch/) -- **[Jelle Feringa](https://github.com/jf---)** - [Gramazio Kohler Research](https://terrestrial.construction) +- **[Jelle Feringa](https://github.com/jf---)** - [Terrestrial](https://terrestrial.construction) The package emerged from research on non-planar 3D printing and robotic fabrication at the Institute of Technology in Architecture. From 8c1027f34c51afe69e2d409f1996a9a353e3e04f Mon Sep 17 00:00:00 2001 From: Jelle Feringa Date: Thu, 11 Dec 2025 20:39:07 +0100 Subject: [PATCH 2/4] Delete .gitmodules --- .gitmodules | 1 - 1 file changed, 1 deletion(-) delete mode 100644 .gitmodules diff --git a/.gitmodules b/.gitmodules deleted file mode 100644 index 8b137891..00000000 --- a/.gitmodules +++ /dev/null @@ -1 +0,0 @@ - From 1bfab2008ec61e2cc2e01d8a72bbbbdad619c099 Mon Sep 17 00:00:00 2001 From: Jelle Feringa Date: Wed, 17 Dec 2025 20:44:17 +0100 Subject: [PATCH 3/4] refactor: use compas_cgal.isolines for contour extraction --- docs/concepts/slicing-algorithms.md | 24 +- docs/examples/05_scalar_field.md | 6 +- .../2_curved_slicing/ex2_curved_slicing.py | 13 +- src/compas_slicer/config.py | 33 --- src/compas_slicer/data/defaults.toml | 2 - .../interpolation_slicing_preprocessor.py | 16 +- .../preprocessing_utils/compound_target.py | 37 +-- .../preprocessing_utils/geodesics.py | 225 +----------------- .../slicers/slice_utilities/__init__.py | 1 + .../slice_utilities/geodesic_contours.py | 63 +++++ .../slice_utilities/scalar_field_contours.py | 129 ++++------ 11 files changed, 143 insertions(+), 406 deletions(-) create mode 100644 src/compas_slicer/slicers/slice_utilities/geodesic_contours.py diff --git a/docs/concepts/slicing-algorithms.md b/docs/concepts/slicing-algorithms.md index 92b82f4d..bf4ccd6c 100644 --- a/docs/concepts/slicing-algorithms.md +++ b/docs/concepts/slicing-algorithms.md @@ -185,29 +185,21 @@ To build connected contours: ## Contour Assembly -All slicers eventually produce contours via the `ScalarFieldContours` class: - -### From Crossings to Paths +All slicers produce contours via `ScalarFieldContours`, which uses CGAL's isoline extraction: ```mermaid flowchart LR - A[Edge crossings] --> B[Face traversal] - B --> C[Connected polylines] + A[Scalar field on vertices] --> B[CGAL isolines] + B --> C[Sorted polylines] C --> D[Path objects] ``` -1. **Build crossing map**: Dictionary of edge → crossing point -2. **Traverse faces**: Walk around faces connecting crossings -3. **Handle branches**: Multiple paths per layer for complex geometry -4. **Create Paths**: Wrap polylines in Path objects with metadata - -### Handling Complex Topology - -The algorithm handles: +The CGAL backend (`compas_cgal.isolines`) handles: -- **Multiple contours per layer**: Holes, disconnected regions -- **Open contours**: When path hits mesh boundary -- **Branching**: When contours merge or split +- **Edge crossing detection**: Finding zero-crossings on mesh edges +- **Polyline assembly**: Connecting crossings into coherent curves +- **Multiple contours**: Holes, disconnected regions, branching +- **Open/closed detection**: Identifying boundary-hitting paths ## Performance Considerations diff --git a/docs/examples/05_scalar_field.md b/docs/examples/05_scalar_field.md index 3036b4ab..421f05a0 100644 --- a/docs/examples/05_scalar_field.md +++ b/docs/examples/05_scalar_field.md @@ -193,9 +193,9 @@ Creates concentric circular layers (spiral vase mode). ### Geodesic Field ```python -# Using igl for geodesic distance from boundary vertices -import igl -distances = igl.exact_geodesic(V, F, boundary_vertices) +# Using CGAL for geodesic distance from boundary vertices +from compas_cgal.geodesics import heat_geodesic_distances +distances = heat_geodesic_distances((V, F), boundary_vertices) ``` Creates layers that follow surface curvature. diff --git a/examples/2_curved_slicing/ex2_curved_slicing.py b/examples/2_curved_slicing/ex2_curved_slicing.py index 30f83fef..7f937960 100644 --- a/examples/2_curved_slicing/ex2_curved_slicing.py +++ b/examples/2_curved_slicing/ex2_curved_slicing.py @@ -1,11 +1,12 @@ import time from pathlib import Path +import numpy as np from compas.datastructures import Mesh import compas_slicer.utilities as utils from compas_slicer.config import InterpolationConfig -from compas_slicer.post_processing import seams_smooth, simplify_paths_rdp +from compas_slicer.post_processing import seams_smooth from compas_slicer.pre_processing import InterpolationSlicingPreprocessor, create_mesh_boundary_attributes from compas_slicer.print_organization import ( InterpolationPrintOrganizer, @@ -28,9 +29,12 @@ def main(visualize: bool = False): # Load initial_mesh mesh = Mesh.from_obj(DATA_PATH / 'mesh.obj') - # Load targets (boundaries) - low_boundary_vs = utils.load_from_json(DATA_PATH, 'boundaryLOW.json') - high_boundary_vs = utils.load_from_json(DATA_PATH, 'boundaryHIGH.json') + # Identify boundaries from mesh topology + boundaries = [list(loop) for loop in mesh.vertices_on_boundaries()] + avg_zs = [np.mean([mesh.vertex_coordinates(v)[2] for v in loop]) for loop in boundaries] + low_idx = int(np.argmin(avg_zs)) + low_boundary_vs = boundaries.pop(low_idx) + high_boundary_vs = [v for loop in boundaries for v in loop] # flatten remaining create_mesh_boundary_attributes(mesh, low_boundary_vs, high_boundary_vs) avg_layer_height = 2.0 @@ -48,7 +52,6 @@ def main(visualize: bool = False): slicer = InterpolationSlicer(mesh, preprocessor, config) slicer.slice_model() - simplify_paths_rdp(slicer, threshold=0.25) seams_smooth(slicer, smooth_distance=3) slicer.printout_info() utils.save_to_json(slicer.to_data(), OUTPUT_PATH, 'curved_slicer.json') diff --git a/src/compas_slicer/config.py b/src/compas_slicer/config.py index 7123e963..0a55194f 100644 --- a/src/compas_slicer/config.py +++ b/src/compas_slicer/config.py @@ -25,7 +25,6 @@ "GcodeConfig", "PrintConfig", "OutputConfig", - "GeodesicsMethod", "UnionMethod", "load_defaults", ] @@ -50,15 +49,6 @@ def load_defaults() -> dict[str, Any]: _DEFAULTS = load_defaults() -class GeodesicsMethod(str, Enum): - """Method for computing geodesic distances.""" - - EXACT_IGL = "exact_igl" - HEAT_IGL = "heat_igl" - HEAT_CGAL = "heat_cgal" - HEAT = "heat" - - class UnionMethod(str, Enum): """Method for combining target boundaries.""" @@ -162,10 +152,6 @@ class InterpolationConfig(Data): Maximum layer height. vertical_layers_max_centroid_dist : float Maximum distance for grouping paths into vertical layers. - target_low_geodesics_method : GeodesicsMethod - Method for computing geodesics to low boundary. - target_high_geodesics_method : GeodesicsMethod - Method for computing geodesics to high boundary. target_high_union_method : UnionMethod Method for combining high target boundaries. target_high_union_params : list[float] @@ -181,12 +167,6 @@ class InterpolationConfig(Data): vertical_layers_max_centroid_dist: float = field( default_factory=lambda: _interpolation_defaults().get("vertical_layers_max_centroid_dist", 25.0) ) - target_low_geodesics_method: GeodesicsMethod = field( - default_factory=lambda: GeodesicsMethod(_interpolation_defaults().get("target_low_geodesics_method", "heat_igl")) - ) - target_high_geodesics_method: GeodesicsMethod = field( - default_factory=lambda: GeodesicsMethod(_interpolation_defaults().get("target_high_geodesics_method", "heat_igl")) - ) target_high_union_method: UnionMethod = field( default_factory=lambda: UnionMethod(_interpolation_defaults().get("target_high_union_method", "min")) ) @@ -199,11 +179,6 @@ class InterpolationConfig(Data): def __post_init__(self) -> None: super().__init__() - # Convert string enums if needed - if isinstance(self.target_low_geodesics_method, str): - self.target_low_geodesics_method = GeodesicsMethod(self.target_low_geodesics_method) - if isinstance(self.target_high_geodesics_method, str): - self.target_high_geodesics_method = GeodesicsMethod(self.target_high_geodesics_method) if isinstance(self.target_high_union_method, str): self.target_high_union_method = UnionMethod(self.target_high_union_method) @@ -214,8 +189,6 @@ def __data__(self) -> dict[str, Any]: "min_layer_height": self.min_layer_height, "max_layer_height": self.max_layer_height, "vertical_layers_max_centroid_dist": self.vertical_layers_max_centroid_dist, - "target_low_geodesics_method": self.target_low_geodesics_method.value, - "target_high_geodesics_method": self.target_high_geodesics_method.value, "target_high_union_method": self.target_high_union_method.value, "target_high_union_params": self.target_high_union_params, "uneven_upper_targets_offset": self.uneven_upper_targets_offset, @@ -231,12 +204,6 @@ def __from_data__(cls, data: dict[str, Any]) -> InterpolationConfig: vertical_layers_max_centroid_dist=data.get( "vertical_layers_max_centroid_dist", d.get("vertical_layers_max_centroid_dist", 25.0) ), - target_low_geodesics_method=data.get( - "target_low_geodesics_method", d.get("target_low_geodesics_method", "heat_igl") - ), - target_high_geodesics_method=data.get( - "target_high_geodesics_method", d.get("target_high_geodesics_method", "heat_igl") - ), target_high_union_method=data.get("target_high_union_method", d.get("target_high_union_method", "min")), target_high_union_params=data.get("target_high_union_params", d.get("target_high_union_params", [])), uneven_upper_targets_offset=data.get( diff --git a/src/compas_slicer/data/defaults.toml b/src/compas_slicer/data/defaults.toml index e7c1d443..454e57a0 100644 --- a/src/compas_slicer/data/defaults.toml +++ b/src/compas_slicer/data/defaults.toml @@ -11,8 +11,6 @@ avg_layer_height = 5.0 min_layer_height = 0.5 max_layer_height = 10.0 vertical_layers_max_centroid_dist = 25.0 -target_low_geodesics_method = "heat_cgal" -target_high_geodesics_method = "heat_cgal" target_high_union_method = "min" target_high_union_params = [] uneven_upper_targets_offset = 0.0 diff --git a/src/compas_slicer/pre_processing/interpolation_slicing_preprocessor.py b/src/compas_slicer/pre_processing/interpolation_slicing_preprocessor.py index eb64d7ef..cef4f299 100644 --- a/src/compas_slicer/pre_processing/interpolation_slicing_preprocessor.py +++ b/src/compas_slicer/pre_processing/interpolation_slicing_preprocessor.py @@ -63,23 +63,15 @@ def create_compound_targets(self) -> None: """Create target_LOW and target_HIGH and compute geodesic distances.""" # --- low target - geodesics_method = self.config.target_low_geodesics_method.value - method = 'min' # no other union methods currently supported for lower target - params: list[float] = [] - self.target_LOW = CompoundTarget(self.mesh, 'boundary', 1, self.DATA_PATH, - union_method=method, - union_params=params, - geodesics_method=geodesics_method) + self.target_LOW = CompoundTarget(self.mesh, 'boundary', 1, self.DATA_PATH) # --- high target - geodesics_method = self.config.target_high_geodesics_method.value method = self.config.target_high_union_method.value params = self.config.target_high_union_params logger.info(f"Creating target with union type: {method} and params: {params}") - self.target_HIGH = CompoundTarget(self.mesh, 'boundary', 2, self.DATA_PATH, - union_method=method, - union_params=params, - geodesics_method=geodesics_method) + self.target_HIGH = CompoundTarget( + self.mesh, 'boundary', 2, self.DATA_PATH, union_method=method, union_params=params + ) # --- uneven boundaries of high target self.target_HIGH.offset = self.config.uneven_upper_targets_offset diff --git a/src/compas_slicer/pre_processing/preprocessing_utils/compound_target.py b/src/compas_slicer/pre_processing/preprocessing_utils/compound_target.py index 74c14fdc..7c5e073e 100644 --- a/src/compas_slicer/pre_processing/preprocessing_utils/compound_target.py +++ b/src/compas_slicer/pre_processing/preprocessing_utils/compound_target.py @@ -11,14 +11,8 @@ from numpy.typing import NDArray import compas_slicer.utilities as utils -from compas_slicer.pre_processing.preprocessing_utils.geodesics import ( - get_cgal_HEAT_geodesic_distances, - get_custom_HEAT_geodesic_distances, - get_igl_EXACT_geodesic_distances, - get_igl_HEAT_geodesic_distances, -) - -GeodesicsMethod = Literal['exact_igl', 'heat_igl', 'heat_cgal', 'heat'] +from compas_slicer.pre_processing.preprocessing_utils.geodesics import get_heat_geodesic_distances + UnionMethod = Literal['min', 'smooth', 'chamfer', 'stairs'] @@ -57,11 +51,6 @@ class CompoundTarget: DATA_PATH: str has_blend_union: bool blend_radius : float - geodesics_method: str - 'heat_cgal' CGAL heat geodesic distances (recommended) - 'heat' custom heat geodesic distances - anisotropic_scaling: bool - This is not yet implemented """ def __init__( @@ -72,8 +61,6 @@ def __init__( DATA_PATH: str, union_method: UnionMethod = 'min', union_params: list[Any] | None = None, - geodesics_method: GeodesicsMethod = 'heat_cgal', - anisotropic_scaling: bool = False, ) -> None: if union_params is None: @@ -89,9 +76,6 @@ def __init__( self.union_method = union_method self.union_params = union_params - self.geodesics_method = geodesics_method - self.anisotropic_scaling = anisotropic_scaling # Anisotropic scaling not yet implemented - self.offset = 0 self.VN = len(list(self.mesh.vertices())) @@ -145,20 +129,9 @@ def compute_geodesic_distances(self) -> None: Computes the geodesic distances from each of the target's neighborhoods to all the mesh vertices. Fills in the distances attributes. """ - if self.geodesics_method == 'exact_igl': - distances_lists = [get_igl_EXACT_geodesic_distances(self.mesh, vstarts) for vstarts in - self.clustered_vkeys] - elif self.geodesics_method == 'heat_igl': - distances_lists = [get_igl_HEAT_geodesic_distances(self.mesh, vstarts) for vstarts in - self.clustered_vkeys] - elif self.geodesics_method == 'heat_cgal': - distances_lists = [get_cgal_HEAT_geodesic_distances(self.mesh, vstarts) for vstarts in - self.clustered_vkeys] - elif self.geodesics_method == 'heat': - distances_lists = [get_custom_HEAT_geodesic_distances(self.mesh, vstarts, str(self.OUTPUT_PATH)) for vstarts in - self.clustered_vkeys] - else: - raise ValueError('Unknown geodesics method : ' + self.geodesics_method) + distances_lists = [ + get_heat_geodesic_distances(self.mesh, vstarts) for vstarts in self.clustered_vkeys + ] distances_lists = [list(dl) for dl in distances_lists] # number_of_boundaries x #V self.update_distances_lists(distances_lists) diff --git a/src/compas_slicer/pre_processing/preprocessing_utils/geodesics.py b/src/compas_slicer/pre_processing/preprocessing_utils/geodesics.py index fa736d40..1c3d8dec 100644 --- a/src/compas_slicer/pre_processing/preprocessing_utils/geodesics.py +++ b/src/compas_slicer/pre_processing/preprocessing_utils/geodesics.py @@ -1,38 +1,24 @@ from __future__ import annotations -import math from typing import TYPE_CHECKING -import numpy as np -import scipy -from loguru import logger from numpy.typing import NDArray -import compas_slicer.utilities as utils -from compas_slicer.pre_processing.preprocessing_utils.gradient import ( - get_face_gradient_from_scalar_field, - get_scalar_field_from_gradient, - normalize_gradient, -) - if TYPE_CHECKING: + import numpy as np from compas.datastructures import Mesh -__all__ = ['get_heat_geodesic_distances', - 'get_custom_HEAT_geodesic_distances', - 'GeodesicsCache'] +__all__ = ['get_heat_geodesic_distances'] -# CGAL heat method solver cache (for precomputation reuse) _cgal_solver_cache: dict[int, object] = {} def get_heat_geodesic_distances( mesh: Mesh, vertices_start: list[int] ) -> NDArray[np.floating]: - """ - Calculate geodesic distances using CGAL heat method. + """Calculate geodesic distances using CGAL heat method. Uses compas_cgal's HeatGeodesicSolver which provides CGAL's Heat_method_3 implementation with intrinsic Delaunay triangulation. @@ -47,209 +33,16 @@ def get_heat_geodesic_distances( Returns ------- NDArray - Minimum distance from any source to each vertex. + Geodesic distance from sources to each vertex. + """ from compas_cgal.geodesics import HeatGeodesicSolver - # Check if we have a cached solver for this mesh mesh_hash = hash((len(list(mesh.vertices())), len(list(mesh.faces())))) if mesh_hash not in _cgal_solver_cache: - _cgal_solver_cache.clear() # Clear old solvers - _cgal_solver_cache[mesh_hash] = HeatGeodesicSolver(mesh) + _cgal_solver_cache.clear() + V, F = mesh.to_vertices_and_faces() + _cgal_solver_cache[mesh_hash] = HeatGeodesicSolver((V, F)) solver = _cgal_solver_cache[mesh_hash] - - # Compute distances for each source and take minimum - all_distances = [] - for source in vertices_start: - distances = solver.solve([source]) - all_distances.append(distances) - - return np.min(np.array(all_distances), axis=0) - - -# Backwards compatibility aliases -get_cgal_HEAT_geodesic_distances = get_heat_geodesic_distances -get_igl_HEAT_geodesic_distances = get_heat_geodesic_distances -get_igl_EXACT_geodesic_distances = get_heat_geodesic_distances - - -class GeodesicsCache: - """Cache for geodesic distances to avoid redundant computations. - - Note: This class is kept for backwards compatibility but now uses CGAL. - The CGAL solver has its own internal caching via _cgal_solver_cache. - """ - - def __init__(self) -> None: - self._cache: dict[tuple[int, str], NDArray[np.floating]] = {} - self._mesh_hash: int | None = None - - def clear(self) -> None: - """Clear the cache.""" - self._cache.clear() - self._mesh_hash = None - - def get_distances( - self, mesh: Mesh, sources: list[int], method: str = 'heat' - ) -> NDArray[np.floating]: - """Get geodesic distances from sources, using cache when possible. - - Parameters - ---------- - mesh : Mesh - The mesh to compute distances on. - sources : list[int] - Source vertex indices. - method : str - Geodesic method (ignored, always uses CGAL heat method). - - Returns - ------- - NDArray - Minimum distance from any source to each vertex. - """ - return get_heat_geodesic_distances(mesh, sources) - - -def get_custom_HEAT_geodesic_distances( - mesh: Mesh, - vi_sources: list[int], - OUTPUT_PATH: str, - v_equalize: list[int] | None = None, -) -> NDArray[np.floating]: - """Calculate geodesic distances using the custom heat method. - - This is a pure Python implementation of the heat method (Crane et al., 2013). - For production use, prefer CGAL's implementation via get_heat_geodesic_distances() - which uses intrinsic Delaunay triangulation for better accuracy. - - Parameters - ---------- - mesh : Mesh - A compas mesh (must be triangulated). - vi_sources : list[int] - Source vertex indices. - OUTPUT_PATH : str - Path to save intermediate results. - v_equalize : list[int] | None - Vertices to equalize (for saddle point handling). - - Returns - ------- - NDArray - Geodesic distance from sources to each vertex. - """ - geodesics_solver = GeodesicsSolver(mesh, OUTPUT_PATH) - u = geodesics_solver.diffuse_heat(vi_sources, v_equalize) - geodesic_dist = geodesics_solver.get_geodesic_distances(u, vi_sources, v_equalize) - return geodesic_dist - - -###################################### -# --- GeodesicsSolver - -# Heat diffusion parameters for custom solver -HEAT_DIFFUSION_ITERATIONS = 250 -DELTA = 0.1 # Time step for backward Euler - - -class GeodesicsSolver: - """ - Computes custom geodesic distances. Starts from implementation of the method presented in the paper - 'Geodesics in Heat' (Crane, 2013) - - Attributes - ---------- - mesh: :class: compas.datastructures.Mesh - OUTPUT_PATH: str - """ - - def __init__(self, mesh: Mesh, OUTPUT_PATH: str) -> None: - logger.info('GeodesicsSolver') - self.mesh = mesh - self.OUTPUT_PATH = OUTPUT_PATH - - self.use_forwards_euler = True - - # Compute matrices using NumPy implementations - self.cotans = utils.get_mesh_cotans(mesh) - self.L = utils.get_mesh_cotmatrix(mesh, fix_boundaries=False) - self.M = utils.get_mesh_massmatrix(mesh) - - def diffuse_heat( - self, - vi_sources: list[int], - v_equalize: list[int] | None = None, - ) -> NDArray[np.floating]: - """ - Heat diffusion using iterative backward Euler. - - This is a custom Python implementation of the heat method. For production use, - prefer CGAL's heat method (geodesics_method='heat_cgal') which uses intrinsic - Delaunay triangulation for better accuracy. - - Parameters - ---------- - vi_sources : list[int] - The vertex indices of the heat sources. - v_equalize : list[int] | None - Vertex indices whose values should be equalized (for handling saddle points). - - Returns - ------- - NDArray - Heat distribution u, with sources at 0 and increasing away from them. - """ - if not v_equalize: - v_equalize = [] - - # First assign starting values (0 everywhere, 1 on the sources) - u0 = np.zeros(len(list(self.mesh.vertices()))) - u0[vi_sources] = 1.0 - u = u0 - - # Pre-factor the matrix ONCE outside the loop (major speedup) - # Using backward Euler: (M - δL)u' = M·u - S = self.M - DELTA * self.L - solver = scipy.sparse.linalg.factorized(S) - - for _i in range(HEAT_DIFFUSION_ITERATIONS): - b = self.M * u - u_prime = solver(b) - - if len(v_equalize) > 0: - u_prime[v_equalize] = np.min(u_prime[v_equalize]) - - u = u_prime - u[vi_sources] = 1.0 # enforce Dirichlet boundary: sources remain fixed - - # reverse values (to make sources at 0, increasing outward) - u = ([np.max(u)] * len(u)) - u - - utils.save_to_json([float(value) for value in u], self.OUTPUT_PATH, 'diffused_heat.json') - return u - - def get_geodesic_distances( - self, u: NDArray[np.floating], vi_sources: list[int], v_equalize: list[int] | None = None - ) -> NDArray[np.floating]: - """ - Finds geodesic distances from heat distribution u. I - - Parameters - ---------- - u: np.array, dimensions: V x 1 (one scalar value per vertex) - vi_sources: list, int, the vertex indices of the sources - v_equalize: list, int, the vertex indices whose value should be equalized - """ - X = get_face_gradient_from_scalar_field(self.mesh, u) - X = normalize_gradient(X) - geodesic_dist = get_scalar_field_from_gradient(self.mesh, X, self.L, self.cotans) - if math.isnan(geodesic_dist[0]): - raise RuntimeError("get_scalar_field_from_gradient returned NaN - check mesh quality.") - geodesic_dist[vi_sources] = 0 # coerce boundary vertices to be on 0 (fixes small boundary imprecision) - return geodesic_dist - - -if __name__ == "__main__": - pass + return solver.solve(vertices_start) diff --git a/src/compas_slicer/slicers/slice_utilities/__init__.py b/src/compas_slicer/slicers/slice_utilities/__init__.py index e84292c0..0acaf622 100644 --- a/src/compas_slicer/slicers/slice_utilities/__init__.py +++ b/src/compas_slicer/slicers/slice_utilities/__init__.py @@ -1,4 +1,5 @@ from .contours_base import * # noqa: F401 F403 +from .geodesic_contours import * # noqa: F401 F403 from .graph_connectivity import * # noqa: F401 F403 from .scalar_field_contours import * # noqa: F401 F403 from .uv_contours import * # noqa: F401 F403 diff --git a/src/compas_slicer/slicers/slice_utilities/geodesic_contours.py b/src/compas_slicer/slicers/slice_utilities/geodesic_contours.py new file mode 100644 index 00000000..0b7bf37f --- /dev/null +++ b/src/compas_slicer/slicers/slice_utilities/geodesic_contours.py @@ -0,0 +1,63 @@ +from __future__ import annotations + +from typing import TYPE_CHECKING + +import numpy as np +from compas.geometry import Point, Polyline + +from compas_slicer.geometry import Path, VerticalLayersManager + +if TYPE_CHECKING: + from compas.datastructures import Mesh + + +__all__ = ["GeodesicContours"] + + +class GeodesicContours: + """Extract geodesic isolines using compas_cgal. + + Parameters + ---------- + mesh : Mesh + Triangular mesh. + sources : list[int] + Source vertex indices. + isovalues : list[float] + Isovalue thresholds for isoline extraction. + + """ + + def __init__(self, mesh: Mesh, sources: list[int], isovalues: list[float]) -> None: + self.mesh = mesh + self.sources = sources + self.isovalues = isovalues + self.polylines: list[Polyline] = [] + self._closed_flags: list[bool] = [] + + def compute(self) -> None: + """Compute geodesic isolines from sources at specified isovalues.""" + from compas_cgal.geodesics import geodesic_isolines + + V, F = self.mesh.to_vertices_and_faces() + results = geodesic_isolines((V, F), self.sources, self.isovalues) + + for pts in results: + points = [Point(*p) for p in pts.tolist()] + self.polylines.append(Polyline(points)) + is_closed = bool(np.linalg.norm(pts[0] - pts[-1]) < 1e-6) + self._closed_flags.append(is_closed) + + def add_to_vertical_layers_manager(self, manager: VerticalLayersManager) -> None: + """Add computed isolines to a VerticalLayersManager. + + Parameters + ---------- + manager : VerticalLayersManager + The manager to add paths to. + + """ + for polyline, is_closed in zip(self.polylines, self._closed_flags): + if len(polyline.points) > 3: + path = Path(polyline.points, is_closed=is_closed) + manager.add(path) diff --git a/src/compas_slicer/slicers/slice_utilities/scalar_field_contours.py b/src/compas_slicer/slicers/slice_utilities/scalar_field_contours.py index 1124f3fe..35519e57 100644 --- a/src/compas_slicer/slicers/slice_utilities/scalar_field_contours.py +++ b/src/compas_slicer/slicers/slice_utilities/scalar_field_contours.py @@ -3,9 +3,9 @@ from typing import TYPE_CHECKING import numpy as np -from compas.geometry import Point, Vector, add_vectors, scale_vector +from compas.geometry import Point, Polyline -from compas_slicer.slicers.slice_utilities import ContoursBase +from compas_slicer.geometry import Path, VerticalLayersManager if TYPE_CHECKING: from compas.datastructures import Mesh @@ -13,95 +13,50 @@ __all__ = ['ScalarFieldContours'] -class ScalarFieldContours(ContoursBase): - """ - Finds the iso-contours of the function f(x) = vertex_data['scalar_field'] - on the mesh. +class ScalarFieldContours: + """Finds iso-contours of vertex scalar field using CGAL backend. + + Extracts zero-level isolines from the 'scalar_field' vertex attribute. - Attributes + Parameters ---------- - mesh: :class: 'compas.datastructures.Mesh' + mesh : Mesh + Triangular mesh with 'scalar_field' vertex attribute. + """ + def __init__(self, mesh: Mesh) -> None: - ContoursBase.__init__(self, mesh) # initialize from parent class + self.mesh = mesh + self.polylines: list[Polyline] = [] + self._closed_flags: list[bool] = [] + + def compute(self) -> None: + """Extract zero-level isolines from scalar field.""" + from compas_cgal.isolines import isolines + + results = isolines(self.mesh, 'scalar_field', isovalues=[0.0]) + + for pts in results: + points = [Point(*p) for p in pts.tolist()] + self.polylines.append(Polyline(points)) + is_closed = bool(np.linalg.norm(pts[0] - pts[-1]) < 1e-6) + self._closed_flags.append(is_closed) + + def add_to_vertical_layers_manager(self, manager: VerticalLayersManager) -> None: + """Add isolines to a VerticalLayersManager. - def find_intersections(self) -> None: - """Vectorized intersection finding for scalar field contours. + Parameters + ---------- + manager : VerticalLayersManager + The manager to add paths to. - Overrides parent method for ~10x speedup on large meshes. """ - # Get all edges as numpy array - edges = np.array(list(self.mesh.edges())) - n_edges = len(edges) - - if n_edges == 0: - return - - # Get scalar field values for all vertices - scalar_field = np.array([ - self.mesh.vertex[v]['scalar_field'] - for v in range(len(list(self.mesh.vertices()))) - ]) - - # Get scalar values at edge endpoints - d1 = scalar_field[edges[:, 0]] - d2 = scalar_field[edges[:, 1]] - - # Vectorized intersection test: sign change across edge - intersected = (d1 * d2) <= 0 # different signs or zero - - # Get vertex coordinates - vertices = np.array([self.mesh.vertex_coordinates(v) for v in self.mesh.vertices()]) - - # Compute zero crossings for intersected edges - intersected_edges = edges[intersected] - d1_int = d1[intersected] - d2_int = d2[intersected] - - # Interpolation parameter (avoid division by zero) - abs_d1 = np.abs(d1_int) - abs_d2 = np.abs(d2_int) - denom = abs_d1 + abs_d2 - valid = denom > 0 - - # Compute intersection points - v1 = vertices[intersected_edges[:, 0]] - v2 = vertices[intersected_edges[:, 1]] - - # Linear interpolation: pt = v1 + t * (v2 - v1) where t = |d1| / (|d1| + |d2|) - t = np.zeros(len(intersected_edges)) - t[valid] = abs_d1[valid] / denom[valid] - pts = v1 + t[:, np.newaxis] * (v2 - v1) - - # Store results - for edge, pt, is_valid in zip(intersected_edges, pts, valid): - if is_valid: - edge_tuple = (int(edge[0]), int(edge[1])) - rev_edge = (int(edge[1]), int(edge[0])) - if edge_tuple not in self.intersection_data and rev_edge not in self.intersection_data: - self.intersection_data[edge_tuple] = Point(pt[0], pt[1], pt[2]) - - # Build edge to index mapping - for i, e in enumerate(self.intersection_data): - self.edge_to_index[e] = i - - def edge_is_intersected(self, u: int, v: int) -> bool: - """ Returns True if the edge u,v has a zero-crossing, False otherwise. """ - d1 = self.mesh.vertex[u]['scalar_field'] - d2 = self.mesh.vertex[v]['scalar_field'] - return not (d1 > 0 and d2 > 0 or d1 < 0 and d2 < 0) - - def find_zero_crossing_data(self, u: int, v: int) -> list[float] | None: - """ Finds the position of the zero-crossing on the edge u,v. """ - dist_a, dist_b = self.mesh.vertex[u]['scalar_field'], self.mesh.vertex[v]['scalar_field'] - if abs(dist_a) + abs(dist_b) > 0: - v_coords_a, v_coords_b = self.mesh.vertex_coordinates(u), self.mesh.vertex_coordinates(v) - vec = Vector.from_start_end(v_coords_a, v_coords_b) - vec = scale_vector(vec, abs(dist_a) / (abs(dist_a) + abs(dist_b))) - pt: list[float] = add_vectors(v_coords_a, vec) - return pt - return None - - -if __name__ == "__main__": - pass + for polyline, is_closed in zip(self.polylines, self._closed_flags): + if len(polyline.points) > 3: + path = Path(polyline.points, is_closed=is_closed) + manager.add(path) + + @property + def is_valid(self) -> bool: + """Check if any valid paths were found.""" + return any(len(pl.points) > 3 for pl in self.polylines) From 49e67539bbf9bd4cca2bf0f27cb44d1e8f74e5ef Mon Sep 17 00:00:00 2001 From: Jelle Feringa Date: Wed, 17 Dec 2025 20:54:26 +0100 Subject: [PATCH 4/4] style: ruff format --- src/compas_slicer/__main__.py | 8 +- src/compas_slicer/_numpy_ops.py | 10 +- src/compas_slicer/geometry/__init__.py | 2 +- src/compas_slicer/geometry/path.py | 5 +- src/compas_slicer/post_processing/__init__.py | 2 +- .../post_processing/generate_brim.py | 18 +- .../post_processing/generate_raft.py | 50 +++--- .../infill/medial_axis_infill.py | 5 +- .../reorder_vertical_layers.py | 10 +- .../post_processing/seams_align.py | 7 +- .../post_processing/seams_smooth.py | 4 +- .../post_processing/simplify_paths_rdp.py | 7 +- .../sort_into_vertical_layers.py | 2 +- .../sort_paths_minimum_travel_time.py | 4 +- .../post_processing/spiralize_contours.py | 4 +- .../unify_paths_orientation.py | 8 +- .../post_processing/zig_zag_open_paths.py | 4 +- .../pre_processing/gradient_evaluation.py | 25 +-- .../interpolation_slicing_preprocessor.py | 78 ++++----- .../pre_processing/positioning.py | 17 +- .../assign_vertex_distance.py | 73 ++++---- .../preprocessing_utils/compound_target.py | 106 ++++++------ .../preprocessing_utils/geodesics.py | 6 +- .../preprocessing_utils/gradient.py | 38 ++--- .../mesh_attributes_handling.py | 88 +++++----- .../preprocessing_utils/region_split.py | 91 +++++----- .../topological_sorting.py | 132 ++++++++------- .../print_organization/__init__.py | 2 +- .../base_print_organizer.py | 13 +- .../base_boundary.py | 27 +-- .../interpolation_print_organizer.py | 36 ++-- .../planar_print_organizer.py | 10 +- .../blend_radius.py | 16 +- .../data_smoothing.py | 20 +-- .../extruder_toggle.py | 9 +- .../print_organization_utilities/gcode.py | 12 +- .../linear_velocity.py | 24 +-- .../safety_printpoints.py | 5 +- .../print_organization_utilities/wait_time.py | 22 +-- .../scalar_field_print_organizer.py | 25 ++- src/compas_slicer/slicers/__init__.py | 3 +- src/compas_slicer/slicers/base_slicer.py | 6 +- .../slicers/interpolation_slicer.py | 13 +- src/compas_slicer/slicers/planar_slicer.py | 10 +- .../planar_slicing/planar_slicing_cgal.py | 8 +- .../slicers/scalar_field_slicer.py | 10 +- .../slicers/slice_utilities/contours_base.py | 22 +-- .../slice_utilities/graph_connectivity.py | 17 +- .../slice_utilities/scalar_field_contours.py | 4 +- .../slicers/slice_utilities/uv_contours.py | 14 +- src/compas_slicer/slicers/uv_slicer.py | 12 +- src/compas_slicer/utilities/__init__.py | 2 +- .../utilities/attributes_transfer.py | 20 +-- .../utilities/terminal_command.py | 7 +- src/compas_slicer/utilities/utils.py | 109 ++++++------ .../visualization/visualization.py | 3 +- src/compas_slicer_ghpython/install.py | 4 +- src/compas_slicer_ghpython/visualization.py | 157 +++++++++++------- tests/test_examples.py | 16 +- tests/test_performance.py | 8 +- ...ar_print_organization_horizontal_layers.py | 89 ++++++---- tests/test_planar_slicing.py | 15 +- 62 files changed, 814 insertions(+), 760 deletions(-) diff --git a/src/compas_slicer/__main__.py b/src/compas_slicer/__main__.py index 2bc52cb7..8160d02f 100644 --- a/src/compas_slicer/__main__.py +++ b/src/compas_slicer/__main__.py @@ -3,7 +3,7 @@ import compas_slicer -if __name__ == '__main__': - logger.info(f'COMPAS: {compas.__version__}') - logger.info(f'COMPAS Slicer: {compas_slicer.__version__}') - logger.info('Awesome! Your installation worked! :)') +if __name__ == "__main__": + logger.info(f"COMPAS: {compas.__version__}") + logger.info(f"COMPAS Slicer: {compas_slicer.__version__}") + logger.info("Awesome! Your installation worked! :)") diff --git a/src/compas_slicer/_numpy_ops.py b/src/compas_slicer/_numpy_ops.py index 579ef362..dea47a9e 100644 --- a/src/compas_slicer/_numpy_ops.py +++ b/src/compas_slicer/_numpy_ops.py @@ -143,9 +143,7 @@ def face_gradient_from_scalar_field( cross2 = np.cross(v1 - v0, face_normals) # (F, 3) # Compute gradient - grad = ( - (u1 - u0)[:, np.newaxis] * cross1 + (u2 - u0)[:, np.newaxis] * cross2 - ) / (2 * face_areas[:, np.newaxis]) + grad = ((u1 - u0)[:, np.newaxis] * cross1 + (u2 - u0)[:, np.newaxis] * cross2) / (2 * face_areas[:, np.newaxis]) return grad @@ -187,9 +185,9 @@ def per_vertex_divergence( e2 = v0 - v1 # edge opposite to v2 # Compute dot products with gradient - dot0 = np.einsum('ij,ij->i', X, e0) # (F,) - dot1 = np.einsum('ij,ij->i', X, e1) # (F,) - dot2 = np.einsum('ij,ij->i', X, e2) # (F,) + dot0 = np.einsum("ij,ij->i", X, e0) # (F,) + dot1 = np.einsum("ij,ij->i", X, e1) # (F,) + dot2 = np.einsum("ij,ij->i", X, e2) # (F,) # Cotangent contributions (cotans[f, i] is cotan of angle at vertex i) # For vertex i: contrib = cotan[k] * dot(X, e_i) + cotan[j] * dot(X, -e_k) diff --git a/src/compas_slicer/geometry/__init__.py b/src/compas_slicer/geometry/__init__.py index 964b0153..42601ab9 100644 --- a/src/compas_slicer/geometry/__init__.py +++ b/src/compas_slicer/geometry/__init__.py @@ -5,4 +5,4 @@ from .print_point import * # noqa: F401 E402 F403 from .printpoints_collection import * # noqa: F401 E402 F403 -__all__ = [name for name in dir() if not name.startswith('_')] +__all__ = [name for name in dir() if not name.startswith("_")] diff --git a/src/compas_slicer/geometry/path.py b/src/compas_slicer/geometry/path.py index c665ee3a..72c58065 100644 --- a/src/compas_slicer/geometry/path.py +++ b/src/compas_slicer/geometry/path.py @@ -49,10 +49,7 @@ def __from_data__(cls, data: dict[str, Any]) -> Path: points_data = data["points"] # Handle both list format and legacy dict format if isinstance(points_data, dict): - pts = [ - Point.__from_data__(points_data[key]) - for key in sorted(points_data.keys(), key=lambda x: int(x)) - ] + pts = [Point.__from_data__(points_data[key]) for key in sorted(points_data.keys(), key=lambda x: int(x))] else: pts = [Point.__from_data__(p) for p in points_data] return cls(points=pts, is_closed=data["is_closed"]) diff --git a/src/compas_slicer/post_processing/__init__.py b/src/compas_slicer/post_processing/__init__.py index ff6fc16a..4787c481 100644 --- a/src/compas_slicer/post_processing/__init__.py +++ b/src/compas_slicer/post_processing/__init__.py @@ -21,4 +21,4 @@ from .unify_paths_orientation import * # noqa: F401 E402 F403 from .zig_zag_open_paths import * # noqa: F401 E402 F403 -__all__ = [name for name in dir() if not name.startswith('_')] +__all__ = [name for name in dir() if not name.startswith("_")] diff --git a/src/compas_slicer/post_processing/generate_brim.py b/src/compas_slicer/post_processing/generate_brim.py index b3bed697..45876238 100644 --- a/src/compas_slicer/post_processing/generate_brim.py +++ b/src/compas_slicer/post_processing/generate_brim.py @@ -14,6 +14,7 @@ try: from compas_cgal.straight_skeleton_2 import offset_polygon as _cgal_offset from compas_cgal.straight_skeleton_2 import offset_polygon_with_holes as _cgal_offset_with_holes + _USE_CGAL = True except ImportError: _cgal_offset = None @@ -23,7 +24,7 @@ from compas_slicer.slicers import BaseSlicer -__all__ = ['generate_brim', 'offset_polygon', 'offset_polygon_with_holes'] +__all__ = ["generate_brim", "offset_polygon", "offset_polygon_with_holes"] def _offset_polygon_cgal(points: list[Point], offset: float, z: float) -> list[Point]: @@ -83,16 +84,12 @@ def _offset_polygon_pyclipper(points: list[Point], offset: float, z: float) -> l import pyclipper from pyclipper import scale_from_clipper, scale_to_clipper - SCALING_FACTOR = 2 ** 32 + SCALING_FACTOR = 2**32 xy_coords = [[p[0], p[1]] for p in points] pco = pyclipper.PyclipperOffset() - pco.AddPath( - scale_to_clipper(xy_coords, SCALING_FACTOR), - pyclipper.JT_MITER, - pyclipper.ET_CLOSEDPOLYGON - ) + pco.AddPath(scale_to_clipper(xy_coords, SCALING_FACTOR), pyclipper.JT_MITER, pyclipper.ET_CLOSEDPOLYGON) result = scale_from_clipper(pco.Execute(offset * SCALING_FACTOR), SCALING_FACTOR) @@ -132,10 +129,7 @@ def offset_polygon(points: list[Point], offset: float, z: float) -> list[Point]: def offset_polygon_with_holes( - outer: list[Point], - holes: list[list[Point]], - offset: float, - z: float + outer: list[Point], holes: list[list[Point]], offset: float, z: float ) -> list[tuple[list[Point], list[list[Point]]]]: """Offset a polygon with holes using CGAL straight skeleton. @@ -224,7 +218,7 @@ def generate_brim(slicer: BaseSlicer, layer_width: float, number_of_brim_offsets has_vertical_layers = False if len(paths_to_offset) == 0: - raise ValueError('Brim generator did not find any path on the base. Please check the paths of your slicer.') + raise ValueError("Brim generator did not find any path on the base. Please check the paths of your slicer.") # (2) --- create new empty brim_layer brim_layer = Layer(paths=[]) diff --git a/src/compas_slicer/post_processing/generate_raft.py b/src/compas_slicer/post_processing/generate_raft.py index 82f5ecd1..da4bb9f7 100644 --- a/src/compas_slicer/post_processing/generate_raft.py +++ b/src/compas_slicer/post_processing/generate_raft.py @@ -6,15 +6,12 @@ import compas_slicer from compas_slicer.geometry import Layer, Path -__all__ = ['generate_raft'] +__all__ = ["generate_raft"] -def generate_raft(slicer, - raft_offset=10, - distance_between_paths=10, - direction="xy_diagonal", - raft_layers=1, - raft_layer_height=None): +def generate_raft( + slicer, raft_offset=10, distance_between_paths=10, direction="xy_diagonal", raft_layers=1, raft_layer_height=None +): """Creates a raft. Parameters @@ -76,7 +73,7 @@ def generate_raft(slicer, # create starting line for diagonal direction if direction == "xy_diagonal": - c = math.sqrt(2*(distance_between_paths**2)) + c = math.sqrt(2 * (distance_between_paths**2)) pt1 = Point(raft_start_pt[0] + c, raft_start_pt[1], raft_start_pt[2]) pt2 = Point(pt1[0] - y_range, pt1[1] + y_range, pt1[2]) @@ -86,10 +83,9 @@ def generate_raft(slicer, for i, layer in enumerate(slicer.layers): for j, path in enumerate(layer.paths): for k, pt in enumerate(path.points): - slicer.layers[i].paths[j].points[k] = Point(pt[0], pt[1], pt[2] + (raft_layers)*raft_layer_height) + slicer.layers[i].paths[j].points[k] = Point(pt[0], pt[1], pt[2] + (raft_layers) * raft_layer_height) for i in range(raft_layers): - iter = 0 raft_points = [] @@ -99,8 +95,16 @@ def generate_raft(slicer, # VERTICAL RAFT # =============== if direction == "y_axis": - raft_pt1 = Point(raft_start_pt[0] + iter*distance_between_paths, raft_start_pt[1], raft_start_pt[2] + i*raft_layer_height) - raft_pt2 = Point(raft_start_pt[0] + iter*distance_between_paths, raft_start_pt[1] + y_range, raft_start_pt[2] + i*raft_layer_height) + raft_pt1 = Point( + raft_start_pt[0] + iter * distance_between_paths, + raft_start_pt[1], + raft_start_pt[2] + i * raft_layer_height, + ) + raft_pt2 = Point( + raft_start_pt[0] + iter * distance_between_paths, + raft_start_pt[1] + y_range, + raft_start_pt[2] + i * raft_layer_height, + ) if raft_pt2[0] > bb_max_x_right or raft_pt1[0] > bb_max_x_right: break @@ -109,8 +113,16 @@ def generate_raft(slicer, # HORIZONTAL RAFT # =============== elif direction == "x_axis": - raft_pt1 = Point(raft_start_pt[0], raft_start_pt[1] + iter*distance_between_paths, raft_start_pt[2] + i*raft_layer_height) - raft_pt2 = Point(raft_start_pt[0] + x_range, raft_start_pt[1] + iter*distance_between_paths, raft_start_pt[2] + i*raft_layer_height) + raft_pt1 = Point( + raft_start_pt[0], + raft_start_pt[1] + iter * distance_between_paths, + raft_start_pt[2] + i * raft_layer_height, + ) + raft_pt2 = Point( + raft_start_pt[0] + x_range, + raft_start_pt[1] + iter * distance_between_paths, + raft_start_pt[2] + i * raft_layer_height, + ) if raft_pt2[1] > bb_max_y_top or raft_pt1[1] > bb_max_y_top: break @@ -120,21 +132,21 @@ def generate_raft(slicer, # =============== elif direction == "xy_diagonal": # create offset of the initial diagonal line - offset_l = offset_line(line, iter*distance_between_paths, Vector(0, 0, -1)) + offset_l = offset_line(line, iter * distance_between_paths, Vector(0, 0, -1)) # get intersections for the initial diagonal line with the left and bottom of the bb int_left = intersection_line_line(offset_l, [bb_xy_offset[0], bb_xy_offset[3]]) int_bottom = intersection_line_line(offset_l, [bb_xy_offset[0], bb_xy_offset[1]]) # get the points at the intersections - raft_pt1 = Point(int_left[0][0], int_left[0][1], int_left[0][2] + i*raft_layer_height) - raft_pt2 = Point(int_bottom[0][0], int_bottom[0][1], int_bottom[0][2] + i*raft_layer_height) + raft_pt1 = Point(int_left[0][0], int_left[0][1], int_left[0][2] + i * raft_layer_height) + raft_pt2 = Point(int_bottom[0][0], int_bottom[0][1], int_bottom[0][2] + i * raft_layer_height) # if the intersection goes beyond the height of the left side of the bounding box: if int_left[0][1] > bb_max_y_top: # create intersection with the top side int_top = intersection_line_line(offset_l, [bb_xy_offset[3], bb_xy_offset[2]]) - raft_pt1 = Point(int_top[0][0], int_top[0][1], int_top[0][2] + i*raft_layer_height) + raft_pt1 = Point(int_top[0][0], int_top[0][1], int_top[0][2] + i * raft_layer_height) # if intersection goes beyond the length of the top side, break if raft_pt1[0] > bb_max_x_right: @@ -144,7 +156,7 @@ def generate_raft(slicer, if int_bottom[0][0] > bb_max_x_right: # create intersection with the right side int_right = intersection_line_line(offset_l, [bb_xy_offset[1], bb_xy_offset[2]]) - raft_pt2 = Point(int_right[0][0], int_right[0][1], int_right[0][2] + i*raft_layer_height) + raft_pt2 = Point(int_right[0][0], int_right[0][1], int_right[0][2] + i * raft_layer_height) # if intersection goes beyond the height of the right side, break if raft_pt2[1] > bb_xy_offset[2][1]: diff --git a/src/compas_slicer/post_processing/infill/medial_axis_infill.py b/src/compas_slicer/post_processing/infill/medial_axis_infill.py index f42d72de..c58a7ebb 100644 --- a/src/compas_slicer/post_processing/infill/medial_axis_infill.py +++ b/src/compas_slicer/post_processing/infill/medial_axis_infill.py @@ -1,4 +1,5 @@ """Medial axis based infill generation using CGAL straight skeleton.""" + from __future__ import annotations from typing import TYPE_CHECKING @@ -64,9 +65,7 @@ def generate_medial_axis_infill( continue # Extract skeleton edges as paths - skeleton_paths = _skeleton_to_paths( - graph, z_height, min_length, include_bisectors - ) + skeleton_paths = _skeleton_to_paths(graph, z_height, min_length, include_bisectors) infill_paths.extend(skeleton_paths) # Add infill paths to layer diff --git a/src/compas_slicer/post_processing/reorder_vertical_layers.py b/src/compas_slicer/post_processing/reorder_vertical_layers.py index dee90811..79befa65 100644 --- a/src/compas_slicer/post_processing/reorder_vertical_layers.py +++ b/src/compas_slicer/post_processing/reorder_vertical_layers.py @@ -10,7 +10,7 @@ from compas_slicer.slicers import BaseSlicer -__all__ = ['reorder_vertical_layers'] +__all__ = ["reorder_vertical_layers"] AlignWith = Literal["x_axis", "y_axis"] @@ -29,9 +29,9 @@ def reorder_vertical_layers(slicer: BaseSlicer, align_with: AlignWith | Point) - """ if align_with == "x_axis": - align_pt = Point(2 ** 32, 0, 0) + align_pt = Point(2**32, 0, 0) elif align_with == "y_axis": - align_pt = Point(0, 2 ** 32, 0) + align_pt = Point(0, 2**32, 0) elif isinstance(align_with, Point): align_pt = align_with else: @@ -56,7 +56,9 @@ def reorder_vertical_layers(slicer: BaseSlicer, align_with: AlignWith | Point) - distances = [] for vert_layer in grouped_layers: # recreate head_centroid_pt as compas.Point - head_centroid_pt = Point(vert_layer.head_centroid[0], vert_layer.head_centroid[1], vert_layer.head_centroid[2]) + head_centroid_pt = Point( + vert_layer.head_centroid[0], vert_layer.head_centroid[1], vert_layer.head_centroid[2] + ) # measure distance distances.append(distance_point_point(head_centroid_pt, align_pt)) diff --git a/src/compas_slicer/post_processing/seams_align.py b/src/compas_slicer/post_processing/seams_align.py index aaba1e93..683966ab 100644 --- a/src/compas_slicer/post_processing/seams_align.py +++ b/src/compas_slicer/post_processing/seams_align.py @@ -10,7 +10,7 @@ from compas_slicer.slicers import BaseSlicer -__all__ = ['seams_align'] +__all__ = ["seams_align"] AlignWith = Literal["next_path", "origin", "x_axis", "y_axis"] @@ -36,7 +36,6 @@ def seams_align(slicer: BaseSlicer, align_with: AlignWith | Point = "next_path") for i, layer in enumerate(slicer.layers): for j, path in enumerate(layer.paths): - if align_with == "next_path": pt_to_align_with = None # make sure aligning point is cleared @@ -67,9 +66,9 @@ def seams_align(slicer: BaseSlicer, align_with: AlignWith | Point = "next_path") elif align_with == "origin": pt_to_align_with = Point(0, 0, 0) elif align_with == "x_axis": - pt_to_align_with = Point(2 ** 32, 0, 0) + pt_to_align_with = Point(2**32, 0, 0) elif align_with == "y_axis": - pt_to_align_with = Point(0, 2 ** 32, 0) + pt_to_align_with = Point(0, 2**32, 0) elif isinstance(align_with, Point): pt_to_align_with = align_with else: diff --git a/src/compas_slicer/post_processing/seams_smooth.py b/src/compas_slicer/post_processing/seams_smooth.py index a91f7589..42aaab1b 100644 --- a/src/compas_slicer/post_processing/seams_smooth.py +++ b/src/compas_slicer/post_processing/seams_smooth.py @@ -11,7 +11,7 @@ from compas_slicer.slicers import BaseSlicer -__all__ = ['seams_smooth'] +__all__ = ["seams_smooth"] def seams_smooth(slicer: BaseSlicer, smooth_distance: float) -> None: @@ -34,7 +34,7 @@ def seams_smooth(slicer: BaseSlicer, smooth_distance: float) -> None: if path.is_closed: # only for closed paths pt0 = path.points[0] # only points in the first half of a path should be evaluated - half_of_path = path.points[:int(len(path.points)/2)] + half_of_path = path.points[: int(len(path.points) / 2)] for point in half_of_path: if distance_point_point(pt0, point) < smooth_distance: # remove points if within smooth_distance diff --git a/src/compas_slicer/post_processing/simplify_paths_rdp.py b/src/compas_slicer/post_processing/simplify_paths_rdp.py index 4be08262..87dc7a95 100644 --- a/src/compas_slicer/post_processing/simplify_paths_rdp.py +++ b/src/compas_slicer/post_processing/simplify_paths_rdp.py @@ -11,12 +11,13 @@ from compas_slicer.slicers import BaseSlicer -__all__ = ['simplify_paths_rdp'] +__all__ = ["simplify_paths_rdp"] # Check for CGAL availability at module load _USE_CGAL = False try: from compas_cgal.polylines import simplify_polylines as _cgal_simplify + _USE_CGAL = True except ImportError: _cgal_simplify = None @@ -63,7 +64,7 @@ def _simplify_paths_cgal(slicer: BaseSlicer, threshold: float) -> None: path.points = [Point(pt[0], pt[1], pt[2]) for pt in pts_simplified] remaining_pts_num += len(path.points) - logger.info(f'{remaining_pts_num} points remaining after simplification') + logger.info(f"{remaining_pts_num} points remaining after simplification") def _simplify_paths_python(slicer: BaseSlicer, threshold: float) -> None: @@ -80,7 +81,7 @@ def _simplify_paths_python(slicer: BaseSlicer, threshold: float) -> None: path.points = [Point(pt[0], pt[1], pt[2]) for pt in pts_rdp] remaining_pts_num += len(path.points) - logger.info(f'{remaining_pts_num} points remaining after simplification') + logger.info(f"{remaining_pts_num} points remaining after simplification") if __name__ == "__main__": diff --git a/src/compas_slicer/post_processing/sort_into_vertical_layers.py b/src/compas_slicer/post_processing/sort_into_vertical_layers.py index d7029710..78b4f3f4 100644 --- a/src/compas_slicer/post_processing/sort_into_vertical_layers.py +++ b/src/compas_slicer/post_processing/sort_into_vertical_layers.py @@ -10,7 +10,7 @@ from compas_slicer.slicers import BaseSlicer -__all__ = ['sort_into_vertical_layers'] +__all__ = ["sort_into_vertical_layers"] def sort_into_vertical_layers( diff --git a/src/compas_slicer/post_processing/sort_paths_minimum_travel_time.py b/src/compas_slicer/post_processing/sort_paths_minimum_travel_time.py index 4104caae..d71b399d 100644 --- a/src/compas_slicer/post_processing/sort_paths_minimum_travel_time.py +++ b/src/compas_slicer/post_processing/sort_paths_minimum_travel_time.py @@ -11,7 +11,7 @@ from compas_slicer.slicers import BaseSlicer -__all__ = ['sort_paths_minimum_travel_time'] +__all__ = ["sort_paths_minimum_travel_time"] def sort_paths_minimum_travel_time(slicer: BaseSlicer) -> None: @@ -24,7 +24,7 @@ def sort_paths_minimum_travel_time(slicer: BaseSlicer) -> None: """ logger.info("Sorting contours to minimize travel time") - ref_point = Point(2 ** 32, 0, 0) # set the reference point to the X-axis + ref_point = Point(2**32, 0, 0) # set the reference point to the X-axis for i, layer in enumerate(slicer.layers): sorted_paths = [] diff --git a/src/compas_slicer/post_processing/spiralize_contours.py b/src/compas_slicer/post_processing/spiralize_contours.py index 124025ef..de691705 100644 --- a/src/compas_slicer/post_processing/spiralize_contours.py +++ b/src/compas_slicer/post_processing/spiralize_contours.py @@ -12,7 +12,7 @@ from compas_slicer.slicers import PlanarSlicer -__all__ = ['spiralize_contours'] +__all__ = ["spiralize_contours"] def spiralize_contours(slicer: PlanarSlicer) -> None: @@ -24,7 +24,7 @@ def spiralize_contours(slicer: PlanarSlicer) -> None: slicer: :class: 'compas_slicer.slicers.PlanarSlicer' An instance of the compas_slicer.slicers.PlanarSlicer class. """ - logger.info('Spiralizing contours') + logger.info("Spiralizing contours") if not isinstance(slicer, compas_slicer.slicers.PlanarSlicer): logger.warning("spiralize_contours() contours only works for PlanarSlicer. Skipping function.") diff --git a/src/compas_slicer/post_processing/unify_paths_orientation.py b/src/compas_slicer/post_processing/unify_paths_orientation.py index 54b6d860..e9f9b8cf 100644 --- a/src/compas_slicer/post_processing/unify_paths_orientation.py +++ b/src/compas_slicer/post_processing/unify_paths_orientation.py @@ -9,7 +9,7 @@ from compas_slicer.slicers import BaseSlicer -__all__ = ['unify_paths_orientation'] +__all__ = ["unify_paths_orientation"] def unify_paths_orientation(slicer: BaseSlicer) -> None: @@ -26,7 +26,7 @@ def unify_paths_orientation(slicer: BaseSlicer) -> None: for j, path in enumerate(layer.paths): reference_points = None # find reference points for each path, if possible if j > 0: - reference_points = layer.paths[j-1].points + reference_points = layer.paths[j - 1].points elif i > 0 and j == 0: reference_points = slicer.layers[i - 1].paths[0].points @@ -34,9 +34,7 @@ def unify_paths_orientation(slicer: BaseSlicer) -> None: path.points = match_paths_orientations(path.points, reference_points, path.is_closed) -def match_paths_orientations( - pts: list[Point], reference_points: list[Point], is_closed: bool -) -> list[Point]: +def match_paths_orientations(pts: list[Point], reference_points: list[Point], is_closed: bool) -> list[Point]: """Check if new curve has same direction as prev curve, otherwise reverse. Parameters diff --git a/src/compas_slicer/post_processing/zig_zag_open_paths.py b/src/compas_slicer/post_processing/zig_zag_open_paths.py index e8fef19b..e8f49fc9 100644 --- a/src/compas_slicer/post_processing/zig_zag_open_paths.py +++ b/src/compas_slicer/post_processing/zig_zag_open_paths.py @@ -6,11 +6,11 @@ from compas_slicer.slicers import BaseSlicer -__all__ = ['zig_zag_open_paths'] +__all__ = ["zig_zag_open_paths"] def zig_zag_open_paths(slicer: BaseSlicer) -> None: - """ Reverses half of the open paths of the slicer, so that they can be printed in a zig zag motion. """ + """Reverses half of the open paths of the slicer, so that they can be printed in a zig zag motion.""" reverse = False for layer in slicer.layers: for _i, path in enumerate(layer.paths): diff --git a/src/compas_slicer/pre_processing/gradient_evaluation.py b/src/compas_slicer/pre_processing/gradient_evaluation.py index 44e231d1..00452cce 100644 --- a/src/compas_slicer/pre_processing/gradient_evaluation.py +++ b/src/compas_slicer/pre_processing/gradient_evaluation.py @@ -17,7 +17,7 @@ from compas.datastructures import Mesh -__all__ = ['GradientEvaluation'] +__all__ = ["GradientEvaluation"] class GradientEvaluation: @@ -31,12 +31,13 @@ class GradientEvaluation: DATA_PATH: str, path to the data folder """ + def __init__(self, mesh: Mesh, DATA_PATH: str | FilePath) -> None: for v_key, data in mesh.vertices(data=True): - if 'scalar_field' not in data: + if "scalar_field" not in data: raise ValueError(f"Vertex {v_key} does not have the attribute 'scalar_field'") - logger.info('Gradient evaluation') + logger.info("Gradient evaluation") self.mesh = mesh self.DATA_PATH = DATA_PATH self.OUTPUT_PATH = utils.get_output_directory(DATA_PATH) @@ -51,35 +52,35 @@ def __init__(self, mesh: Mesh, DATA_PATH: str | FilePath) -> None: self.vertex_gradient_norm: list[float] = [] # list (#V x 1) def compute_gradient(self) -> None: - """ Computes the gradient on the faces and the vertices. """ - u_v = [self.mesh.vertex[vkey]['scalar_field'] for vkey in self.mesh.vertices()] + """Computes the gradient on the faces and the vertices.""" + u_v = [self.mesh.vertex[vkey]["scalar_field"] for vkey in self.mesh.vertices()] self.face_gradient = get_face_gradient_from_scalar_field(self.mesh, u_v) self.vertex_gradient = get_vertex_gradient_from_face_gradient(self.mesh, self.face_gradient) def compute_gradient_norm(self) -> None: - """ Computes the norm of the gradient. """ - logger.info('Computing norm of gradient') + """Computes the norm of the gradient.""" + logger.info("Computing norm of gradient") f_g = np.array([self.face_gradient[i] for i, fkey in enumerate(self.mesh.faces())]) v_g = np.array([self.vertex_gradient[i] for i, vkey in enumerate(self.mesh.vertices())]) self.face_gradient_norm = list(np.linalg.norm(f_g, axis=1)) self.vertex_gradient_norm = list(np.linalg.norm(v_g, axis=1)) def find_critical_points(self) -> None: - """ Finds minima, maxima and saddle points of the scalar function on the mesh. """ + """Finds minima, maxima and saddle points of the scalar function on the mesh.""" for vkey, data in self.mesh.vertices(data=True): - current_v = data['scalar_field'] + current_v = data["scalar_field"] neighbors = self.mesh.vertex_neighbors(vkey, ordered=True) values = [] if len(neighbors) > 0: neighbors.append(neighbors[0]) for n in neighbors: - v = self.mesh.vertex_attributes(n)['scalar_field'] + v = self.mesh.vertex_attributes(n)["scalar_field"] if abs(v - current_v) > 0.0: values.append(current_v - v) sgc = count_sign_changes(values) if sgc == 0: # extreme point - if current_v > self.mesh.vertex_attributes(neighbors[0])['scalar_field']: + if current_v > self.mesh.vertex_attributes(neighbors[0])["scalar_field"]: self.maxima.append(vkey) else: self.minima.append(vkey) @@ -94,7 +95,7 @@ def find_critical_points(self) -> None: def count_sign_changes(values: list[float]) -> int: - """ Returns the number of sign changes in a list of values. """ + """Returns the number of sign changes in a list of values.""" count = 0 prev_v: float = 0.0 for i, v in enumerate(values): diff --git a/src/compas_slicer/pre_processing/interpolation_slicing_preprocessor.py b/src/compas_slicer/pre_processing/interpolation_slicing_preprocessor.py index cef4f299..00bdc128 100644 --- a/src/compas_slicer/pre_processing/interpolation_slicing_preprocessor.py +++ b/src/compas_slicer/pre_processing/interpolation_slicing_preprocessor.py @@ -23,7 +23,7 @@ from compas_slicer.pre_processing.preprocessing_utils.topological_sorting import MeshDirectedGraph -__all__ = ['InterpolationSlicingPreprocessor'] +__all__ = ["InterpolationSlicingPreprocessor"] class InterpolationSlicingPreprocessor: @@ -40,9 +40,7 @@ class InterpolationSlicingPreprocessor: """ - def __init__( - self, mesh: Mesh, config: InterpolationConfig | None = None, DATA_PATH: str | Path = "." - ) -> None: + def __init__(self, mesh: Mesh, config: InterpolationConfig | None = None, DATA_PATH: str | Path = ".") -> None: self.mesh = mesh self.config = config if config else InterpolationConfig() self.DATA_PATH = DATA_PATH @@ -63,14 +61,14 @@ def create_compound_targets(self) -> None: """Create target_LOW and target_HIGH and compute geodesic distances.""" # --- low target - self.target_LOW = CompoundTarget(self.mesh, 'boundary', 1, self.DATA_PATH) + self.target_LOW = CompoundTarget(self.mesh, "boundary", 1, self.DATA_PATH) # --- high target method = self.config.target_high_union_method.value params = self.config.target_high_union_params logger.info(f"Creating target with union type: {method} and params: {params}") self.target_HIGH = CompoundTarget( - self.mesh, 'boundary', 2, self.DATA_PATH, union_method=method, union_params=params + self.mesh, "boundary", 2, self.DATA_PATH, union_method=method, union_params=params ) # --- uneven boundaries of high target @@ -105,8 +103,8 @@ def create_gradient_evaluation( target_1: CompoundTarget, target_2: CompoundTarget | None = None, save_output: bool = True, - norm_filename: str = 'gradient_norm.json', - g_filename: str = 'gradient.json', + norm_filename: str = "gradient_norm.json", + g_filename: str = "gradient.json", ) -> GradientEvaluation: """ Creates a compas_slicer.pre_processing.GradientEvaluation that is stored in self.g_evaluation @@ -116,8 +114,9 @@ def create_gradient_evaluation( raise RuntimeError("Targets not initialized. Call create_compound_targets() first.") if self.target_LOW.VN != target_1.VN: raise ValueError("Preprocessor does not match targets: vertex count mismatch.") - assign_interpolation_distance_to_mesh_vertices(self.mesh, weight=0.5, - target_LOW=self.target_LOW, target_HIGH=self.target_HIGH) + assign_interpolation_distance_to_mesh_vertices( + self.mesh, weight=0.5, target_LOW=self.target_LOW, target_HIGH=self.target_HIGH + ) g_evaluation = GradientEvaluation(self.mesh, self.DATA_PATH) g_evaluation.compute_gradient() g_evaluation.compute_gradient_norm() @@ -129,10 +128,8 @@ def create_gradient_evaluation( return g_evaluation - def find_critical_points( - self, g_evaluation: GradientEvaluation, output_filenames: tuple[str, str, str] - ) -> None: - """ Computes and saves to json the critical points of the df on the mesh (minima, maxima, saddles)""" + def find_critical_points(self, g_evaluation: GradientEvaluation, output_filenames: tuple[str, str, str]) -> None: + """Computes and saves to json the critical points of the df on the mesh (minima, maxima, saddles)""" g_evaluation.find_critical_points() # save results to json utils.save_to_json(g_evaluation.minima, self.OUTPUT_PATH, output_filenames[0]) @@ -165,39 +162,42 @@ def region_split( logger.info("--- Mesh region splitting") if cut_mesh: # (1) - self.mesh.update_default_vertex_attributes({'cut': 0}) + self.mesh.update_default_vertex_attributes({"cut": 0}) mesh_splitter = rs.MeshSplitter(self.mesh, self.target_LOW, self.target_HIGH, self.DATA_PATH) mesh_splitter.run() self.mesh = mesh_splitter.mesh - logger.info('Completed Region splitting') + logger.info("Completed Region splitting") logger.info(f"Region split cut indices: {mesh_splitter.cut_indices}") # save results to json output_path = Path(self.OUTPUT_PATH) - self.mesh.to_obj(str(output_path / 'mesh_with_cuts.obj')) - self.mesh.to_json(str(output_path / 'mesh_with_cuts.json')) + self.mesh.to_obj(str(output_path / "mesh_with_cuts.obj")) + self.mesh.to_json(str(output_path / "mesh_with_cuts.json")) logger.info(f"Saving to Obj and Json: {output_path / 'mesh_with_cuts.json'}") if separate_neighborhoods: # (2) logger.info("--- Separating mesh disconnected components") - self.mesh = Mesh.from_json(str(Path(self.OUTPUT_PATH) / 'mesh_with_cuts.json')) + self.mesh = Mesh.from_json(str(Path(self.OUTPUT_PATH) / "mesh_with_cuts.json")) region_split_cut_indices = get_existing_cut_indices(self.mesh) # save results to json - utils.save_to_json(get_vertices_that_belong_to_cuts(self.mesh, region_split_cut_indices), - self.OUTPUT_PATH, "vertices_on_cuts.json") + utils.save_to_json( + get_vertices_that_belong_to_cuts(self.mesh, region_split_cut_indices), + self.OUTPUT_PATH, + "vertices_on_cuts.json", + ) - self.split_meshes = rs.separate_disconnected_components(self.mesh, attr='cut', - values=region_split_cut_indices, - OUTPUT_PATH=self.OUTPUT_PATH) - logger.info(f'Created {len(self.split_meshes)} split meshes.') + self.split_meshes = rs.separate_disconnected_components( + self.mesh, attr="cut", values=region_split_cut_indices, OUTPUT_PATH=self.OUTPUT_PATH + ) + logger.info(f"Created {len(self.split_meshes)} split meshes.") if topological_sorting: # (3) logger.info("--- Topological sort of meshes directed graph to determine print order") graph = topo_sort.MeshDirectedGraph(self.split_meshes, self.DATA_PATH) all_orders = graph.get_all_topological_orders() selected_order = all_orders[0] - logger.info(f'selected_order: {selected_order}') # TODO: improve the way an order is selected + logger.info(f"selected_order: {selected_order}") # TODO: improve the way an order is selected self.cleanup_mesh_attributes_based_on_selected_order(selected_order, graph) # reorder split_meshes based on selected order @@ -208,9 +208,9 @@ def region_split( logger.info("--- Saving resulting split meshes") output_path = Path(self.OUTPUT_PATH) for i, m in enumerate(self.split_meshes): - m.to_obj(str(output_path / f'split_mesh_{i}.obj')) - m.to_json(str(output_path / f'split_mesh_{i}.json')) - logger.info(f'Saving to Obj and Json: {output_path / "split_mesh_%.obj"}') + m.to_obj(str(output_path / f"split_mesh_{i}.obj")) + m.to_json(str(output_path / f"split_mesh_{i}.json")) + logger.info(f"Saving to Obj and Json: {output_path / 'split_mesh_%.obj'}") logger.info(f"Saved {len(self.split_meshes)} split_meshes") def cleanup_mesh_attributes_based_on_selected_order( @@ -233,18 +233,20 @@ def cleanup_mesh_attributes_based_on_selected_order( for child_node in graph.adj_list[index]: child_mesh = self.split_meshes[child_node] edge = graph.G.edges[index, child_node] - common_cuts = edge['cut'] + common_cuts = edge["cut"] for cut_id in common_cuts: - replace_mesh_vertex_attribute(mesh, 'cut', cut_id, 'boundary', 2) - replace_mesh_vertex_attribute(child_mesh, 'cut', cut_id, 'boundary', 1) + replace_mesh_vertex_attribute(mesh, "cut", cut_id, "boundary", 2) + replace_mesh_vertex_attribute(child_mesh, "cut", cut_id, "boundary", 1) # save results to json - pts_boundary_LOW = utils.get_mesh_vertex_coords_with_attribute(mesh, 'boundary', 1) - pts_boundary_HIGH = utils.get_mesh_vertex_coords_with_attribute(mesh, 'boundary', 2) - utils.save_to_json(utils.point_list_to_dict(pts_boundary_LOW), self.OUTPUT_PATH, - f'pts_boundary_LOW_{index}.json') - utils.save_to_json(utils.point_list_to_dict(pts_boundary_HIGH), self.OUTPUT_PATH, - f'pts_boundary_HIGH_{index}.json') + pts_boundary_LOW = utils.get_mesh_vertex_coords_with_attribute(mesh, "boundary", 1) + pts_boundary_HIGH = utils.get_mesh_vertex_coords_with_attribute(mesh, "boundary", 2) + utils.save_to_json( + utils.point_list_to_dict(pts_boundary_LOW), self.OUTPUT_PATH, f"pts_boundary_LOW_{index}.json" + ) + utils.save_to_json( + utils.point_list_to_dict(pts_boundary_HIGH), self.OUTPUT_PATH, f"pts_boundary_HIGH_{index}.json" + ) if __name__ == "__main__": diff --git a/src/compas_slicer/pre_processing/positioning.py b/src/compas_slicer/pre_processing/positioning.py index ef1e4912..f6ec6217 100644 --- a/src/compas_slicer/pre_processing/positioning.py +++ b/src/compas_slicer/pre_processing/positioning.py @@ -9,9 +9,7 @@ from compas.datastructures import Mesh -__all__ = ['move_mesh_to_point', - 'get_mid_pt_base', - 'remesh_mesh'] +__all__ = ["move_mesh_to_point", "get_mid_pt_base", "remesh_mesh"] def move_mesh_to_point(mesh: Mesh, target_point: Point) -> Mesh: @@ -53,7 +51,7 @@ def get_mid_pt_base(mesh: Mesh) -> Point: """ # get center bottom point of mesh model - vertices = list(mesh.vertices_attributes('xyz')) + vertices = list(mesh.vertices_attributes("xyz")) bbox = bounding_box(vertices) corner_pts = [bbox[0], bbox[2]] @@ -66,12 +64,7 @@ def get_mid_pt_base(mesh: Mesh) -> Point: return mesh_mid_pt -def remesh_mesh( - mesh: Mesh, - target_edge_length: float, - number_of_iterations: int = 10, - do_project: bool = True -) -> Mesh: +def remesh_mesh(mesh: Mesh, target_edge_length: float, number_of_iterations: int = 10, do_project: bool = True) -> Mesh: """Remesh a triangle mesh to achieve uniform edge lengths. Uses CGAL's isotropic remeshing to improve mesh quality for slicing. @@ -108,9 +101,7 @@ def remesh_mesh( try: from compas_cgal.meshing import trimesh_remesh except ImportError as e: - raise ImportError( - "remesh_mesh requires compas_cgal. Install with: pip install compas_cgal" - ) from e + raise ImportError("remesh_mesh requires compas_cgal. Install with: pip install compas_cgal") from e from compas.datastructures import Mesh as CompasMesh diff --git a/src/compas_slicer/pre_processing/preprocessing_utils/assign_vertex_distance.py b/src/compas_slicer/pre_processing/preprocessing_utils/assign_vertex_distance.py index ebd1014e..314b4b9e 100644 --- a/src/compas_slicer/pre_processing/preprocessing_utils/assign_vertex_distance.py +++ b/src/compas_slicer/pre_processing/preprocessing_utils/assign_vertex_distance.py @@ -17,8 +17,7 @@ from compas_slicer.pre_processing.preprocessing_utils.compound_target import CompoundTarget -__all__ = ['assign_interpolation_distance_to_mesh_vertices', - 'assign_interpolation_distance_to_mesh_vertex'] +__all__ = ["assign_interpolation_distance_to_mesh_vertices", "assign_interpolation_distance_to_mesh_vertex"] def assign_interpolation_distance_to_mesh_vertices( @@ -40,7 +39,7 @@ def assign_interpolation_distance_to_mesh_vertices( # Vectorized computation for all vertices at once distances = _compute_all_distances_vectorized(weight, target_LOW, target_HIGH) for vkey, d in zip(mesh.vertices(), distances): - mesh.vertex[vkey]['scalar_field'] = float(d) + mesh.vertex[vkey]["scalar_field"] = float(d) def _compute_all_distances_vectorized( @@ -53,7 +52,7 @@ def _compute_all_distances_vectorized( offset = weight * target_LOW.get_max_dist() return target_LOW.get_all_distances() - offset else: - raise ValueError('You need to provide at least one target') + raise ValueError("You need to provide at least one target") def _get_weighted_distances_vectorized( @@ -67,33 +66,40 @@ def _get_weighted_distances_vectorized( ds_high = target_HIGH.get_all_distances_array() if target_HIGH.number_of_boundaries > 1: - weights = np.array([ - remap_unbound(weight, 0, wmax, 0, 1) - for wmax in target_HIGH.weight_max_per_cluster - ]) # (n_boundaries,) + weights = np.array( + [remap_unbound(weight, 0, wmax, 0, 1) for wmax in target_HIGH.weight_max_per_cluster] + ) # (n_boundaries,) else: weights = np.array([weight]) # Broadcast: (n_boundaries, n_vertices) distances = (weights[:, None] - 1) * d_low + weights[:, None] * ds_high - if target_HIGH.union_method == 'min': + if target_HIGH.union_method == "min": return np.min(distances, axis=0) - elif target_HIGH.union_method == 'smooth': - return np.array([ - blend_union_list(distances[:, i].tolist(), target_HIGH.union_params[0]) - for i in range(distances.shape[1]) - ]) - elif target_HIGH.union_method == 'chamfer': - return np.array([ - chamfer_union_list(distances[:, i].tolist(), target_HIGH.union_params[0]) - for i in range(distances.shape[1]) - ]) - elif target_HIGH.union_method == 'stairs': - return np.array([ - stairs_union_list(distances[:, i].tolist(), target_HIGH.union_params[0], target_HIGH.union_params[1]) - for i in range(distances.shape[1]) - ]) + elif target_HIGH.union_method == "smooth": + return np.array( + [ + blend_union_list(distances[:, i].tolist(), target_HIGH.union_params[0]) + for i in range(distances.shape[1]) + ] + ) + elif target_HIGH.union_method == "chamfer": + return np.array( + [ + chamfer_union_list(distances[:, i].tolist(), target_HIGH.union_params[0]) + for i in range(distances.shape[1]) + ] + ) + elif target_HIGH.union_method == "stairs": + return np.array( + [ + stairs_union_list( + distances[:, i].tolist(), target_HIGH.union_params[0], target_HIGH.union_params[1] + ) + for i in range(distances.shape[1]) + ] + ) else: d_high = target_HIGH.get_all_distances() return d_low * (1 - weight) - d_high * weight @@ -122,13 +128,11 @@ def assign_interpolation_distance_to_mesh_vertex( offset = weight * target_LOW.get_max_dist() d = target_LOW.get_distance(vkey) - offset else: - raise ValueError('You need to provide at least one target') + raise ValueError("You need to provide at least one target") return d -def get_weighted_distance( - vkey: int, weight: float, target_LOW: CompoundTarget, target_HIGH: CompoundTarget -) -> float: +def get_weighted_distance(vkey: int, weight: float, target_LOW: CompoundTarget, target_HIGH: CompoundTarget) -> float: """ Computes the weighted get_distance for a single vertex with vkey. @@ -149,8 +153,9 @@ def get_weighted_distance( ds_high = target_HIGH.get_all_distances_for_vkey(vkey) # list of floats (# number_of_boundaries) if target_HIGH.number_of_boundaries > 1: - weights_remapped = [remap_unbound(weight, 0, weight_max, 0, 1) - for weight_max in target_HIGH.weight_max_per_cluster] + weights_remapped = [ + remap_unbound(weight, 0, weight_max, 0, 1) for weight_max in target_HIGH.weight_max_per_cluster + ] weights = weights_remapped else: weights = [weight] @@ -158,16 +163,16 @@ def get_weighted_distance( distances = [(weight - 1) * d_low + weight * d_high for d_high, weight in zip(ds_high, weights)] # return the distance based on the union method of the high target - if target_HIGH.union_method == 'min': + if target_HIGH.union_method == "min": # --- simple union return np.min(distances) - elif target_HIGH.union_method == 'smooth': + elif target_HIGH.union_method == "smooth": # --- blend (smooth) union return blend_union_list(values=distances, r=target_HIGH.union_params[0]) - elif target_HIGH.union_method == 'chamfer': + elif target_HIGH.union_method == "chamfer": # --- blend (smooth) union return chamfer_union_list(values=distances, r=target_HIGH.union_params[0]) - elif target_HIGH.union_method == 'stairs': + elif target_HIGH.union_method == "stairs": # --- stairs union return stairs_union_list(values=distances, r=target_HIGH.union_params[0], n=target_HIGH.union_params[1]) diff --git a/src/compas_slicer/pre_processing/preprocessing_utils/compound_target.py b/src/compas_slicer/pre_processing/preprocessing_utils/compound_target.py index 7c5e073e..454bd179 100644 --- a/src/compas_slicer/pre_processing/preprocessing_utils/compound_target.py +++ b/src/compas_slicer/pre_processing/preprocessing_utils/compound_target.py @@ -13,7 +13,7 @@ import compas_slicer.utilities as utils from compas_slicer.pre_processing.preprocessing_utils.geodesics import get_heat_geodesic_distances -UnionMethod = Literal['min', 'smooth', 'chamfer', 'stairs'] +UnionMethod = Literal["min", "smooth", "chamfer", "stairs"] def _create_graph_from_mesh_vkeys(mesh: Mesh, v_keys: list[int]) -> nx.Graph: @@ -27,10 +27,8 @@ def _create_graph_from_mesh_vkeys(mesh: Mesh, v_keys: list[int]) -> nx.Graph: G.add_edge(v, other_v) return G -__all__ = ['CompoundTarget', - 'blend_union_list', - 'stairs_union_list', - 'chamfer_union_list'] + +__all__ = ["CompoundTarget", "blend_union_list", "stairs_union_list", "chamfer_union_list"] class CompoundTarget: @@ -59,14 +57,13 @@ def __init__( v_attr: str, value: int, DATA_PATH: str, - union_method: UnionMethod = 'min', + union_method: UnionMethod = "min", union_params: list[Any] | None = None, ) -> None: - if union_params is None: union_params = [] - logger.info(f'Creating target with attribute : {v_attr}={value}') - logger.info(f'union_method: {union_method}, union_params: {union_params}') + logger.info(f"Creating target with attribute : {v_attr}={value}") + logger.info(f"union_method: {union_method}, union_params: {union_params}") self.mesh = mesh self.v_attr = v_attr self.value = value @@ -104,12 +101,12 @@ def find_targets_connected_components(self) -> None: Each target can have an arbitrary number of neighborhoods/clusters. Fills in the attributes: self.all_target_vkeys, self.clustered_vkeys, self.number_of_boundaries """ - self.all_target_vkeys = [vkey for vkey, data in self.mesh.vertices(data=True) if - data[self.v_attr] == self.value] + self.all_target_vkeys = [ + vkey for vkey, data in self.mesh.vertices(data=True) if data[self.v_attr] == self.value + ] if len(self.all_target_vkeys) == 0: raise ValueError( - f"No vertices in mesh with attribute '{self.v_attr}'={self.value}. " - "Check your target creation." + f"No vertices in mesh with attribute '{self.v_attr}'={self.value}. Check your target creation." ) G = _create_graph_from_mesh_vkeys(self.mesh, self.all_target_vkeys) if len(list(G.nodes())) != len(self.all_target_vkeys): @@ -129,9 +126,7 @@ def compute_geodesic_distances(self) -> None: Computes the geodesic distances from each of the target's neighborhoods to all the mesh vertices. Fills in the distances attributes. """ - distances_lists = [ - get_heat_geodesic_distances(self.mesh, vstarts) for vstarts in self.clustered_vkeys - ] + distances_lists = [get_heat_geodesic_distances(self.mesh, vstarts) for vstarts in self.clustered_vkeys] distances_lists = [list(dl) for dl in distances_lists] # number_of_boundaries x #V self.update_distances_lists(distances_lists) @@ -151,7 +146,7 @@ def update_distances_lists(self, distances_lists: list[list[float]]) -> None: # --- Uneven weights @property def has_uneven_weights(self) -> bool: - """ Returns True if the target has uneven_weights calculated, False otherwise. """ + """Returns True if the target has uneven_weights calculated, False otherwise.""" return len(self.weight_max_per_cluster) > 0 def compute_uneven_boundaries_weight_max(self, other_target: CompoundTarget) -> None: @@ -167,13 +162,13 @@ def compute_uneven_boundaries_weight_max(self, other_target: CompoundTarget) -> ds_avg_HIGH[i] = d + self.offset self.weight_max_per_cluster = [d / max_param for d in ds_avg_HIGH] - logger.info(f'weight_max_per_cluster: {self.weight_max_per_cluster}') + logger.info(f"weight_max_per_cluster: {self.weight_max_per_cluster}") else: logger.info("Did not compute_norm_of_gradient uneven boundaries, target consists of single component") # --- Relation to other target def get_boundaries_rel_dist_from_other_target( - self, other_target: CompoundTarget, avg_type: Literal['mean', 'median'] = 'median' + self, other_target: CompoundTarget, avg_type: Literal["mean", "median"] = "median" ) -> list[float]: """ Returns a list, one relative distance value per connected boundary neighborhood. @@ -182,7 +177,7 @@ def get_boundaries_rel_dist_from_other_target( distances = [] for vi_starts in self.clustered_vkeys: ds = [other_target.get_distance(vi) for vi in vi_starts] - if avg_type == 'mean': + if avg_type == "mean": distances.append(statistics.mean(ds)) else: # 'median' distances.append(statistics.median(ds)) @@ -201,11 +196,11 @@ def get_avg_distances_from_other_target(self, other_target: CompoundTarget) -> f # --- get all distances def get_all_clusters_distances_dict(self) -> dict[int, list[float]]: - """ Returns dict. keys: index of connected target neighborhood, value: list, distances (one per vertex). """ + """Returns dict. keys: index of connected target neighborhood, value: list, distances (one per vertex).""" return {i: self._distances_lists[i] for i in range(self.number_of_boundaries)} def get_max_dist(self) -> float | None: - """ Returns the maximum distance that the target has on a mesh vertex. """ + """Returns the maximum distance that the target has on a mesh vertex.""" return self._max_dist ############################# @@ -213,23 +208,23 @@ def get_max_dist(self) -> float | None: def get_all_distances(self) -> np.ndarray: """Return distances for all vertices as 1D array, applying union method.""" - if self.union_method == 'min': + if self.union_method == "min": return np.min(self._np_distances_lists_flipped, axis=1) - elif self.union_method == 'smooth': - return np.array([ - blend_union_list(row.tolist(), self.union_params[0]) - for row in self._np_distances_lists_flipped - ]) - elif self.union_method == 'chamfer': - return np.array([ - chamfer_union_list(row.tolist(), self.union_params[0]) - for row in self._np_distances_lists_flipped - ]) - elif self.union_method == 'stairs': - return np.array([ - stairs_union_list(row.tolist(), self.union_params[0], self.union_params[1]) - for row in self._np_distances_lists_flipped - ]) + elif self.union_method == "smooth": + return np.array( + [blend_union_list(row.tolist(), self.union_params[0]) for row in self._np_distances_lists_flipped] + ) + elif self.union_method == "chamfer": + return np.array( + [chamfer_union_list(row.tolist(), self.union_params[0]) for row in self._np_distances_lists_flipped] + ) + elif self.union_method == "stairs": + return np.array( + [ + stairs_union_list(row.tolist(), self.union_params[0], self.union_params[1]) + for row in self._np_distances_lists_flipped + ] + ) else: raise ValueError(f"Unknown union method: {self.union_method}") @@ -241,24 +236,25 @@ def get_all_distances_array(self) -> np.ndarray: # --- per vkey distances def get_all_distances_for_vkey(self, i: int) -> list[float]: - """ Returns distances from each cluster separately for vertex i. Smooth union doesn't play here any role. """ + """Returns distances from each cluster separately for vertex i. Smooth union doesn't play here any role.""" return [self._distances_lists[list_index][i] for list_index in range(self.number_of_boundaries)] def get_distance(self, i: int) -> float: - """ Return get_distance for vertex with vkey i. """ - if self.union_method == 'min': + """Return get_distance for vertex with vkey i.""" + if self.union_method == "min": # --- simple union return float(np.min(self._np_distances_lists_flipped[i])) - elif self.union_method == 'smooth': + elif self.union_method == "smooth": # --- blend (smooth) union return blend_union_list(values=self._np_distances_lists_flipped[i], r=self.union_params[0]) - elif self.union_method == 'chamfer': + elif self.union_method == "chamfer": # --- blend (smooth) union return chamfer_union_list(values=self._np_distances_lists_flipped[i], r=self.union_params[0]) - elif self.union_method == 'stairs': + elif self.union_method == "stairs": # --- stairs union - return stairs_union_list(values=self._np_distances_lists_flipped[i], r=self.union_params[0], - n=self.union_params[1]) + return stairs_union_list( + values=self._np_distances_lists_flipped[i], r=self.union_params[0], n=self.union_params[1] + ) else: raise ValueError("Unknown Union method : ", self.union_method) @@ -266,11 +262,11 @@ def get_distance(self, i: int) -> float: # --- scalar field smoothing def laplacian_smoothing(self, iterations: int, strength: float) -> None: - """ Smooth the distances on the mesh, using iterative laplacian smoothing. """ + """Smooth the distances on the mesh, using iterative laplacian smoothing.""" L = utils.get_mesh_cotmatrix_igl(self.mesh, fix_boundaries=True) new_distances_lists = [] - logger.info('Laplacian smoothing of all distances') + logger.info("Laplacian smoothing of all distances") for _i, a in enumerate(self._distances_lists): a = np.array(a) # a: numpy array containing the attribute to be smoothed for _ in range(iterations): # iterative smoothing @@ -294,7 +290,7 @@ def save_distances(self, name: str) -> None: # ------ assign new Mesh def assign_new_mesh(self, mesh: Mesh) -> None: - """ When the base mesh changes, a new mesh needs to be assigned. """ + """When the base mesh changes, a new mesh needs to be assigned.""" mesh.to_json(self.OUTPUT_PATH + "/temp.obj") mesh = Mesh.from_json(self.OUTPUT_PATH + "/temp.obj") self.mesh = mesh @@ -304,8 +300,9 @@ def assign_new_mesh(self, mesh: Mesh) -> None: #################### # unions on lists + def blend_union_list(values: NDArray[np.floating] | list[float], r: float) -> float: - """ Returns a smooth union of all the elements in the list, with blend radius blend_radius. """ + """Returns a smooth union of all the elements in the list, with blend radius blend_radius.""" d_result: float = 9999999.0 # very big number for d in values: d_result = blend_union(d_result, float(d), r) @@ -313,7 +310,7 @@ def blend_union_list(values: NDArray[np.floating] | list[float], r: float) -> fl def stairs_union_list(values: NDArray[np.floating] | list[float], r: float, n: int) -> float: - """ Returns a stairs union of all the elements in the list, with blend radius r and number of peaks n-1.""" + """Returns a stairs union of all the elements in the list, with blend radius r and number of peaks n-1.""" d_result: float = 9999999.0 # very big number for _i, d in enumerate(values): d_result = stairs_union(d_result, float(d), r, n) @@ -330,19 +327,20 @@ def chamfer_union_list(values: NDArray[np.floating] | list[float], r: float) -> #################### # unions on pairs + def blend_union(da: float, db: float, r: float) -> float: - """ Returns a smooth union of the two elements da, db with blend radius blend_radius. """ + """Returns a smooth union of the two elements da, db with blend radius blend_radius.""" e = max(r - abs(da - db), 0) return min(da, db) - e * e * 0.25 / r def chamfer_union(a: float, b: float, r: float) -> float: - """ Returns a chamfer union of the two elements da, db with radius r. """ + """Returns a chamfer union of the two elements da, db with radius r.""" return min(min(a, b), (a - r + b) * math.sqrt(0.5)) def stairs_union(a: float, b: float, r: float, n: int) -> float: - """ Returns a stairs union of the two elements da, db with radius r. """ + """Returns a stairs union of the two elements da, db with radius r.""" s = r / n u = b - r return min(min(a, b), 0.5 * (u + a + abs((u - a + s) % (2 * s) - s))) diff --git a/src/compas_slicer/pre_processing/preprocessing_utils/geodesics.py b/src/compas_slicer/pre_processing/preprocessing_utils/geodesics.py index 1c3d8dec..aacf5f6d 100644 --- a/src/compas_slicer/pre_processing/preprocessing_utils/geodesics.py +++ b/src/compas_slicer/pre_processing/preprocessing_utils/geodesics.py @@ -9,15 +9,13 @@ from compas.datastructures import Mesh -__all__ = ['get_heat_geodesic_distances'] +__all__ = ["get_heat_geodesic_distances"] _cgal_solver_cache: dict[int, object] = {} -def get_heat_geodesic_distances( - mesh: Mesh, vertices_start: list[int] -) -> NDArray[np.floating]: +def get_heat_geodesic_distances(mesh: Mesh, vertices_start: list[int]) -> NDArray[np.floating]: """Calculate geodesic distances using CGAL heat method. Uses compas_cgal's HeatGeodesicSolver which provides CGAL's Heat_method_3 diff --git a/src/compas_slicer/pre_processing/preprocessing_utils/gradient.py b/src/compas_slicer/pre_processing/preprocessing_utils/gradient.py index 5fad68ea..69b52f8e 100644 --- a/src/compas_slicer/pre_processing/preprocessing_utils/gradient.py +++ b/src/compas_slicer/pre_processing/preprocessing_utils/gradient.py @@ -15,12 +15,14 @@ from compas_slicer._numpy_ops import per_vertex_divergence as _divergence_vectorized from compas_slicer._numpy_ops import vertex_gradient_from_face_gradient as _vertex_gradient_vectorized -__all__ = ['get_vertex_gradient_from_face_gradient', - 'get_edge_gradient_from_vertex_gradient', - 'get_face_gradient_from_scalar_field', - 'normalize_gradient', - 'get_per_vertex_divergence', - 'get_scalar_field_from_gradient'] +__all__ = [ + "get_vertex_gradient_from_face_gradient", + "get_edge_gradient_from_vertex_gradient", + "get_face_gradient_from_scalar_field", + "normalize_gradient", + "get_per_vertex_divergence", + "get_scalar_field_from_gradient", +] def _mesh_to_arrays(mesh: Mesh) -> tuple[NDArray[np.floating], NDArray[np.intp]]: @@ -30,9 +32,7 @@ def _mesh_to_arrays(mesh: Mesh) -> tuple[NDArray[np.floating], NDArray[np.intp]] return V, F -def get_vertex_gradient_from_face_gradient( - mesh: Mesh, face_gradient: NDArray[np.floating] -) -> NDArray[np.floating]: +def get_vertex_gradient_from_face_gradient(mesh: Mesh, face_gradient: NDArray[np.floating]) -> NDArray[np.floating]: """ Finds vertex gradient given an already calculated per face gradient. @@ -45,15 +45,13 @@ def get_vertex_gradient_from_face_gradient( ---------- np.array (dimensions : #V x 3) one gradient vector per vertex. """ - logger.info('Computing per vertex gradient') + logger.info("Computing per vertex gradient") V, F = _mesh_to_arrays(mesh) face_areas = np.array([mesh.face_area(f) for f in mesh.faces()], dtype=np.float64) return _vertex_gradient_vectorized(V, F, face_gradient, face_areas) -def get_edge_gradient_from_vertex_gradient( - mesh: Mesh, vertex_gradient: NDArray[np.floating] -) -> NDArray[np.floating]: +def get_edge_gradient_from_vertex_gradient(mesh: Mesh, vertex_gradient: NDArray[np.floating]) -> NDArray[np.floating]: """ Finds edge gradient given an already calculated per vertex gradient. @@ -70,9 +68,7 @@ def get_edge_gradient_from_vertex_gradient( return _edge_gradient_vectorized(edges, vertex_gradient) -def get_face_gradient_from_scalar_field( - mesh: Mesh, u: NDArray[np.floating] -) -> NDArray[np.floating]: +def get_face_gradient_from_scalar_field(mesh: Mesh, u: NDArray[np.floating]) -> NDArray[np.floating]: """ Finds face gradient from scalar field u. Scalar field u is given per vertex. @@ -86,7 +82,7 @@ def get_face_gradient_from_scalar_field( ---------- np.array (dimensions : #F x 3) one gradient vector per face. """ - logger.info('Computing per face gradient') + logger.info("Computing per face gradient") V, F = _mesh_to_arrays(mesh) scalar_field = np.asarray(u, dtype=np.float64) face_normals = np.array([mesh.face_normal(f) for f in mesh.faces()], dtype=np.float64) @@ -97,7 +93,7 @@ def get_face_gradient_from_scalar_field( def get_face_edge_vectors( mesh: Mesh, fkey: int ) -> tuple[NDArray[np.floating], NDArray[np.floating], NDArray[np.floating]]: - """ Returns the edge vectors of the face with fkey. """ + """Returns the edge vectors of the face with fkey.""" e0, e1, e2 = mesh.face_halfedges(fkey) edge_0 = np.array(mesh.vertex_coordinates(e0[0])) - np.array(mesh.vertex_coordinates(e0[1])) edge_1 = np.array(mesh.vertex_coordinates(e1[0])) - np.array(mesh.vertex_coordinates(e1[1])) @@ -127,7 +123,7 @@ def get_per_vertex_divergence( def normalize_gradient(X: NDArray[np.floating]) -> NDArray[np.floating]: - """ Returns normalized gradient X. """ + """Returns normalized gradient X.""" norm = np.linalg.norm(X, axis=1)[..., np.newaxis] return X / norm # normalize @@ -157,9 +153,9 @@ def get_scalar_field_from_gradient( """ div_X = get_per_vertex_divergence(mesh, X, cotans) u = scipy.sparse.linalg.spsolve(C, div_X) - logger.info(f'Solved Δ(u) = div(X). Linear system error |Δ(u) - div(X)| = {np.linalg.norm(C * u - div_X):.6e}') + logger.info(f"Solved Δ(u) = div(X). Linear system error |Δ(u) - div(X)| = {np.linalg.norm(C * u - div_X):.6e}") u = u - np.amin(u) # make start value equal 0 - u = 2*u + u = 2 * u return u diff --git a/src/compas_slicer/pre_processing/preprocessing_utils/mesh_attributes_handling.py b/src/compas_slicer/pre_processing/preprocessing_utils/mesh_attributes_handling.py index 695917b7..ba55d515 100644 --- a/src/compas_slicer/pre_processing/preprocessing_utils/mesh_attributes_handling.py +++ b/src/compas_slicer/pre_processing/preprocessing_utils/mesh_attributes_handling.py @@ -11,33 +11,34 @@ if TYPE_CHECKING: from compas.datastructures import Mesh -__all__ = ['create_mesh_boundary_attributes', - 'get_existing_cut_indices', - 'get_existing_boundary_indices', - 'get_vertices_that_belong_to_cuts', - 'save_vertex_attributes', - 'restore_mesh_attributes', - 'replace_mesh_vertex_attribute'] - - -def create_mesh_boundary_attributes( - mesh: Mesh, low_boundary_vs: list[int], high_boundary_vs: list[int] -) -> None: +__all__ = [ + "create_mesh_boundary_attributes", + "get_existing_cut_indices", + "get_existing_boundary_indices", + "get_vertices_that_belong_to_cuts", + "save_vertex_attributes", + "restore_mesh_attributes", + "replace_mesh_vertex_attribute", +] + + +def create_mesh_boundary_attributes(mesh: Mesh, low_boundary_vs: list[int], high_boundary_vs: list[int]) -> None: """ Creates a default vertex attribute data['boundary']=0. Then it gives the value 1 to the vertices that belong to the lower boundary, and the value 2 to the vertices that belong to the higher boundary. """ - mesh.update_default_vertex_attributes({'boundary': 0}) + mesh.update_default_vertex_attributes({"boundary": 0}) for vkey, data in mesh.vertices(data=True): if vkey in low_boundary_vs: - data['boundary'] = 1 + data["boundary"] = 1 elif vkey in high_boundary_vs: - data['boundary'] = 2 + data["boundary"] = 2 ############################################### # --- Mesh existing attributes on vertices + def get_existing_cut_indices(mesh: Mesh) -> list[int]: """ Returns @@ -47,8 +48,8 @@ def get_existing_cut_indices(mesh: Mesh) -> list[int]: """ cut_indices = [] for _vkey, data in mesh.vertices(data=True): - if data['cut'] > 0 and data['cut'] not in cut_indices: - cut_indices.append(data['cut']) + if data["cut"] > 0 and data["cut"] not in cut_indices: + cut_indices.append(data["cut"]) cut_indices = sorted(cut_indices) return cut_indices @@ -62,15 +63,13 @@ def get_existing_boundary_indices(mesh: Mesh) -> list[int]: """ indices = [] for _vkey, data in mesh.vertices(data=True): - if data['boundary'] > 0 and data['boundary'] not in indices: - indices.append(data['boundary']) + if data["boundary"] > 0 and data["boundary"] not in indices: + indices.append(data["boundary"]) boundary_indices = sorted(indices) return boundary_indices -def get_vertices_that_belong_to_cuts( - mesh: Mesh, cut_indices: list[int] -) -> dict[int, dict[int, list[float]]]: +def get_vertices_that_belong_to_cuts(mesh: Mesh, cut_indices: list[int]) -> dict[int, dict[int, list[float]]]: """ Returns ---------- @@ -80,8 +79,8 @@ def get_vertices_that_belong_to_cuts( cuts_dict: dict[int, list[list[float]]] = {i: [] for i in cut_indices} for vkey, data in mesh.vertices(data=True): - if data['cut'] > 0: - cut_index = data['cut'] + if data["cut"] > 0: + cut_index = data["cut"] cuts_dict[cut_index].append(mesh.vertex_coordinates(vkey)) result: dict[int, dict[int, list[float]]] = {} @@ -94,36 +93,37 @@ def get_vertices_that_belong_to_cuts( ############################################### # --- Save and restore attributes + def save_vertex_attributes(mesh: Mesh) -> dict[str, Any]: """ Saves the boundary and cut attributes that are on the mesh on a dictionary. """ - v_attributes_dict: dict[str, Any] = {'boundary_1': [], 'boundary_2': [], 'cut': {}} + v_attributes_dict: dict[str, Any] = {"boundary_1": [], "boundary_2": [], "cut": {}} cut_indices = [] for _vkey, data in mesh.vertices(data=True): - cut_index = data['cut'] + cut_index = data["cut"] if cut_index not in cut_indices: cut_indices.append(cut_index) cut_indices = sorted(cut_indices) for cut_index in cut_indices: - v_attributes_dict['cut'][cut_index] = [] + v_attributes_dict["cut"][cut_index] = [] for vkey, data in mesh.vertices(data=True): - if data['boundary'] == 1: + if data["boundary"] == 1: v_coords = mesh.vertex_coordinates(vkey) pt = Point(x=v_coords[0], y=v_coords[1], z=v_coords[2]) - v_attributes_dict['boundary_1'].append(pt) - elif data['boundary'] == 2: + v_attributes_dict["boundary_1"].append(pt) + elif data["boundary"] == 2: v_coords = mesh.vertex_coordinates(vkey) pt = Point(x=v_coords[0], y=v_coords[1], z=v_coords[2]) - v_attributes_dict['boundary_2'].append(pt) - if data['cut'] > 0: - cut_index = data['cut'] + v_attributes_dict["boundary_2"].append(pt) + if data["cut"] > 0: + cut_index = data["cut"] v_coords = mesh.vertex_coordinates(vkey) pt = Point(x=v_coords[0], y=v_coords[1], z=v_coords[2]) - v_attributes_dict['cut'][cut_index].append(pt) + v_attributes_dict["cut"][cut_index].append(pt) return v_attributes_dict @@ -131,8 +131,8 @@ def restore_mesh_attributes(mesh: Mesh, v_attributes_dict: dict[str, Any]) -> No """ Restores the cut and boundary attributes on the mesh vertices from the dictionary of the previously saved attributes """ - mesh.update_default_vertex_attributes({'boundary': 0}) - mesh.update_default_vertex_attributes({'cut': 0}) + mesh.update_default_vertex_attributes({"boundary": 0}) + mesh.update_default_vertex_attributes({"cut": 0}) D_THRESHOLD = 0.01 @@ -146,23 +146,21 @@ def _restore_attribute_batch(pts_list, attr_name, attr_value): """Restore attribute for a batch of points using KDTree.""" if not pts_list: return - query_pts = np.array([[p.x, p.y, p.z] if hasattr(p, 'x') else p for p in pts_list], dtype=np.float64) + query_pts = np.array([[p.x, p.y, p.z] if hasattr(p, "x") else p for p in pts_list], dtype=np.float64) distances, indices = tree.query(query_pts) for dist, idx in zip(distances, indices): - if dist ** 2 < D_THRESHOLD: + if dist**2 < D_THRESHOLD: c_vkey = indices_to_vkeys[idx] mesh.vertex_attribute(c_vkey, attr_name, value=attr_value) - _restore_attribute_batch(v_attributes_dict['boundary_1'], 'boundary', 1) - _restore_attribute_batch(v_attributes_dict['boundary_2'], 'boundary', 2) + _restore_attribute_batch(v_attributes_dict["boundary_1"], "boundary", 1) + _restore_attribute_batch(v_attributes_dict["boundary_2"], "boundary", 2) - for cut_index in v_attributes_dict['cut']: - _restore_attribute_batch(v_attributes_dict['cut'][cut_index], 'cut', int(cut_index)) + for cut_index in v_attributes_dict["cut"]: + _restore_attribute_batch(v_attributes_dict["cut"][cut_index], "cut", int(cut_index)) -def replace_mesh_vertex_attribute( - mesh: Mesh, old_attr: str, old_val: int, new_attr: str, new_val: int -) -> None: +def replace_mesh_vertex_attribute(mesh: Mesh, old_attr: str, old_val: int, new_attr: str, new_val: int) -> None: """ Replaces one vertex attribute with a new one. For all the vertices where data[old_attr]=old_val, then the old_val is replaced with 0, and data[new_attr]=new_val. diff --git a/src/compas_slicer/pre_processing/preprocessing_utils/region_split.py b/src/compas_slicer/pre_processing/preprocessing_utils/region_split.py index 67f563f9..e6ef2762 100644 --- a/src/compas_slicer/pre_processing/preprocessing_utils/region_split.py +++ b/src/compas_slicer/pre_processing/preprocessing_utils/region_split.py @@ -17,7 +17,7 @@ save_vertex_attributes, ) -__all__ = ['MeshSplitter'] +__all__ = ["MeshSplitter"] # --- Parameters T_SEARCH_RESOLUTION = 60000 @@ -55,8 +55,9 @@ def __init__(self, mesh, target_LOW, target_HIGH, DATA_PATH): self.OUTPUT_PATH = utils.get_output_directory(DATA_PATH) self.target_LOW, self.target_HIGH = target_LOW, target_HIGH - assign_interpolation_distance_to_mesh_vertices(self.mesh, weight=0.5, target_LOW=self.target_LOW, - target_HIGH=self.target_HIGH) + assign_interpolation_distance_to_mesh_vertices( + self.mesh, weight=0.5, target_LOW=self.target_LOW, target_HIGH=self.target_HIGH + ) # Late import to avoid circular dependency from compas_slicer.pre_processing.gradient_evaluation import GradientEvaluation @@ -90,28 +91,30 @@ def run(self): logger.info(f"{len(split_params)} Split params. First rough estimation : {split_params}") # split mesh at params - logger.info('Splitting mesh at split params') + logger.info("Splitting mesh at split params") current_cut_index = 1 for i, param_first_estimation in enumerate(split_params): - logger.info(f'cut_index : {current_cut_index}, param_first_estimation : {param_first_estimation:.6f}') + logger.info(f"cut_index : {current_cut_index}, param_first_estimation : {param_first_estimation:.6f}") # --- (1) More exact estimation of intersecting weight. Recompute gradient evaluation. # Find exact saddle point and the weight that intersects it. - assign_interpolation_distance_to_mesh_vertices(self.mesh, weight=param_first_estimation, - target_LOW=self.target_LOW, target_HIGH=self.target_HIGH) + assign_interpolation_distance_to_mesh_vertices( + self.mesh, weight=param_first_estimation, target_LOW=self.target_LOW, target_HIGH=self.target_HIGH + ) # Late import to avoid circular dependency from compas_slicer.pre_processing.gradient_evaluation import GradientEvaluation g_evaluation = GradientEvaluation(self.mesh, self.DATA_PATH) g_evaluation.find_critical_points() - saddles_ds_tupples = [(vkey, abs(g_evaluation.mesh.vertex_attribute(vkey, 'scalar_field'))) for vkey in - g_evaluation.saddles] + saddles_ds_tupples = [ + (vkey, abs(g_evaluation.mesh.vertex_attribute(vkey, "scalar_field"))) for vkey in g_evaluation.saddles + ] saddles_ds_tupples = sorted(saddles_ds_tupples, key=lambda saddle_tupple: saddle_tupple[1]) vkey = saddles_ds_tupples[0][0] t = self.identify_positions_to_split([vkey])[0] - logger.info(f'vkey_exact : {vkey} , t_exact : {t:.6f}') + logger.info(f"vkey_exact : {vkey} , t_exact : {t:.6f}") # --- (2) find zero-crossing points assign_interpolation_distance_to_mesh_vertices(self.mesh, t, self.target_LOW, self.target_HIGH) @@ -126,12 +129,14 @@ def run(self): cleanup_unrelated_isocontour_neighborhoods(zero_contours, keys_of_clusters_to_keep) if zero_contours: # if there are remaining zero-crossing neighborhoods - zero_contours = smoothen_cut(zero_contours, self.mesh, saddle_vkeys=[vkey], iterations=15, - strength=0.2) # smoothen the cut close to the saddle point. + zero_contours = smoothen_cut( + zero_contours, self.mesh, saddle_vkeys=[vkey], iterations=15, strength=0.2 + ) # smoothen the cut close to the saddle point. # save to json intermediary results - zero_contours.save_point_clusters_as_polylines_to_json(self.OUTPUT_PATH, - f'point_clusters_polylines_{int(i)}.json') + zero_contours.save_point_clusters_as_polylines_to_json( + self.OUTPUT_PATH, f"point_clusters_polylines_{int(i)}.json" + ) # --- (4) Create cut logger.info("Creating cut on mesh") @@ -140,17 +145,17 @@ def run(self): current_cut_index += 1 # --- (5) Weld mesh and restore attributes - logger.info('Cleaning up the mesh. Welding and restoring attributes') + logger.info("Cleaning up the mesh. Welding and restoring attributes") v_attributes_dict = save_vertex_attributes(self.mesh) self.mesh = weld_mesh(self.mesh, self.OUTPUT_PATH) restore_mesh_attributes(self.mesh, v_attributes_dict) # --- (6) Update targets if i < len(split_params) - 1: # does not need to happen at the end - logger.info('Updating targets, recomputing geodesic distances') + logger.info("Updating targets, recomputing geodesic distances") self.update_targets() - self.mesh.to_obj(str(Path(self.OUTPUT_PATH) / 'most_recent_cut_mesh.obj')) + self.mesh.to_obj(str(Path(self.OUTPUT_PATH) / "most_recent_cut_mesh.obj")) def update_targets(self): """ @@ -181,7 +186,7 @@ def split_intersected_faces(self, zero_contours, cut_index): # add first vertex p = pts[0] - v0 = self.mesh.add_vertex(x=p[0], y=p[1], z=p[2], attr_dict={'cut': 1}) + v0 = self.mesh.add_vertex(x=p[0], y=p[1], z=p[2], attr_dict={"cut": 1}) for i, edge in enumerate(edges): next_edge = edges[(i + 1) % len(edges)] @@ -195,7 +200,7 @@ def split_intersected_faces(self, zero_contours, cut_index): v_other_a = list(set(edge).difference([vkey_common]))[0] v_other_b = list(set(next_edge).difference([vkey_common]))[0] - v_new = self.mesh.add_vertex(x=p[0], y=p[1], z=p[2], attr_dict={'cut': cut_index}) + v_new = self.mesh.add_vertex(x=p[0], y=p[1], z=p[2], attr_dict={"cut": cut_index}) # remove and add faces if fkey_common in list(self.mesh.faces()): @@ -204,16 +209,16 @@ def split_intersected_faces(self, zero_contours, cut_index): self.mesh.add_face([v_new, v_other_a, v0]) self.mesh.add_face([v_other_b, v_other_a, v_new]) else: - logger.warning('Did not need to modify faces.') + logger.warning("Did not need to modify faces.") v0 = v_new self.mesh.cull_vertices() # remove all unused vertices try: self.mesh.unify_cycles() except AssertionError: - logger.warning('Could NOT unify cycles') + logger.warning("Could NOT unify cycles") if not self.mesh.is_valid(): - logger.warning('Attention! Mesh is NOT valid!') + logger.warning("Attention! Mesh is NOT valid!") def identify_positions_to_split(self, saddles): """ @@ -251,18 +256,21 @@ def find_weight_intersecting_vkey(self, vkey, threshold, resolution): # TODO: save next d to avoid re-evaluating for i, weight in enumerate(weight_list[:-1]): current_d = assign_interpolation_distance_to_mesh_vertex(vkey, weight, self.target_LOW, self.target_HIGH) - next_d = assign_interpolation_distance_to_mesh_vertex(vkey, weight_list[i + 1], self.target_LOW, self.target_HIGH) + next_d = assign_interpolation_distance_to_mesh_vertex( + vkey, weight_list[i + 1], self.target_LOW, self.target_HIGH + ) if abs(current_d) < abs(next_d) and current_d < threshold: return weight - raise ValueError(f'Could NOT find param for saddle vkey {vkey}!') + raise ValueError(f"Could NOT find param for saddle vkey {vkey}!") ############################################### # --- helpers ############################################### + def get_weights_list(n, start=0.03, end=1.0): - """ Returns a numpy array with n numbers from start to end. """ + """Returns a numpy array with n numbers from start to end.""" return list(np.arange(start=start, stop=end, step=(end - start) / n)) @@ -388,14 +396,10 @@ def _trimesh_face_components( return np.arange(n_faces, dtype=np.int32) data = np.ones(len(row), dtype=np.int32) - adjacency = scipy.sparse.csr_matrix( - (data, (row, col)), shape=(n_faces, n_faces) - ) + adjacency = scipy.sparse.csr_matrix((data, (row, col)), shape=(n_faces, n_faces)) # Find connected components - n_components, labels = scipy.sparse.csgraph.connected_components( - adjacency, directed=False - ) + n_components, labels = scipy.sparse.csgraph.connected_components(adjacency, directed=False) return labels @@ -403,6 +407,7 @@ def _trimesh_face_components( ############################################### # --- Separate disconnected components + def separate_disconnected_components(mesh, attr, values, OUTPUT_PATH): """ Given a mesh with cuts that have already been created, it separates the disconnected @@ -450,14 +455,14 @@ def separate_disconnected_components(mesh, attr, values, OUTPUT_PATH): f_dict[i] = [] for f_index, face in enumerate(f_cut): component = connected_components[f_index] - f_dict[component].append(face.tolist() if hasattr(face, 'tolist') else list(face)) + f_dict[component].append(face.tolist() if hasattr(face, "tolist") else list(face)) cut_meshes = [] for component in f_dict: cut_mesh = Mesh.from_vertices_and_faces(v_cut.tolist(), f_dict[component]) cut_mesh.cull_vertices() if len(list(cut_mesh.faces())) > 2: - temp_path = Path(OUTPUT_PATH) / 'temp.obj' + temp_path = Path(OUTPUT_PATH) / "temp.obj" cut_mesh.to_obj(str(temp_path)) cut_mesh = Mesh.from_obj(str(temp_path)) # get rid of too many empty keys cut_meshes.append(cut_mesh) @@ -472,8 +477,8 @@ def separate_disconnected_components(mesh, attr, values, OUTPUT_PATH): # --- saddle points merging -def smoothen_cut(zero_contours, mesh, saddle_vkeys, iterations, strength, distance_threshold=20.0*20.0): - """ Iterative smoothing of the cut around the saddle point. """ +def smoothen_cut(zero_contours, mesh, saddle_vkeys, iterations, strength, distance_threshold=20.0 * 20.0): + """Iterative smoothing of the cut around the saddle point.""" for _ in range(iterations): saddles = [mesh.vertex_coordinates(key) for key in saddle_vkeys] count = 0 @@ -518,9 +523,10 @@ def merge_clusters_saddle_point(zero_contours, saddle_vkeys): for i, e in enumerate(edges): for saddle_vkey in saddle_vkeys: if saddle_vkey in e: - zero_contours.sorted_point_clusters[cluster_key][i] = \ - zero_contours.mesh.vertex_coordinates(saddle_vkey) # merge point with saddle point - logger.debug(f'Found edge to merge: {e}') + zero_contours.sorted_point_clusters[cluster_key][i] = zero_contours.mesh.vertex_coordinates( + saddle_vkey + ) # merge point with saddle point + logger.debug(f"Found edge to merge: {e}") if cluster_key not in keys_of_clusters_to_keep: keys_of_clusters_to_keep.append(cluster_key) @@ -540,7 +546,7 @@ def cleanup_unrelated_isocontour_neighborhoods(zero_contours, keys_of_clusters_t logger.error("No common vertex found! Skipping this split_param") return None else: - logger.info(f'keys_of_clusters_to_keep: {keys_of_clusters_to_keep}') + logger.info(f"keys_of_clusters_to_keep: {keys_of_clusters_to_keep}") # empty all other clusters that are not in the matching_pair sorted_point_clusters_clean = copy.deepcopy(zero_contours.sorted_point_clusters) sorted_edge_clusters_clean = copy.deepcopy(zero_contours.sorted_edge_clusters) @@ -556,15 +562,16 @@ def cleanup_unrelated_isocontour_neighborhoods(zero_contours, keys_of_clusters_t ######################################################## # --- Mesh welding and sanitizing -def weld_mesh(mesh, OUTPUT_PATH, precision='2f'): - """ Welds mesh and check that the result is valid. """ + +def weld_mesh(mesh, OUTPUT_PATH, precision="2f"): + """Welds mesh and check that the result is valid.""" for f_key in mesh.faces(): if len(mesh.face_vertices(f_key)) < 3: mesh.delete_face(f_key) welded_mesh = mesh.weld(precision=precision) - temp_path = Path(OUTPUT_PATH) / 'temp.obj' + temp_path = Path(OUTPUT_PATH) / "temp.obj" welded_mesh.to_obj(str(temp_path)) # make sure there's no empty f_keys welded_mesh = Mesh.from_obj(str(temp_path)) # TODO: find a better way to do this diff --git a/src/compas_slicer/pre_processing/preprocessing_utils/topological_sorting.py b/src/compas_slicer/pre_processing/preprocessing_utils/topological_sorting.py index 05ce6f96..05f3605a 100644 --- a/src/compas_slicer/pre_processing/preprocessing_utils/topological_sorting.py +++ b/src/compas_slicer/pre_processing/preprocessing_utils/topological_sorting.py @@ -18,13 +18,13 @@ from compas_slicer.geometry import VerticalLayer -__all__ = ['MeshDirectedGraph', - 'SegmentsDirectedGraph'] +__all__ = ["MeshDirectedGraph", "SegmentsDirectedGraph"] ################################# # DirectedGraph + class DirectedGraph: """ Base class for topological sorting of prints that consist of several parts that lie on each other. @@ -34,29 +34,33 @@ class DirectedGraph: """ def __init__(self) -> None: - logger.info('Topological sorting') + logger.info("Topological sorting") self.G: nx.DiGraph = nx.DiGraph() self.create_graph_nodes() self.root_indices = self.find_roots() - logger.info(f'Graph roots: {self.root_indices}') + logger.info(f"Graph roots: {self.root_indices}") if len(self.root_indices) == 0: raise ValueError("No root nodes were found. At least one root node is needed.") self.end_indices = self.find_ends() - logger.info(f'Graph ends: {self.end_indices}') + logger.info(f"Graph ends: {self.end_indices}") if len(self.end_indices) == 0: raise ValueError("No end nodes were found. At least one end node is needed.") self.create_directed_graph_edges(copy.deepcopy(self.root_indices)) - logger.info(f'Nodes: {list(self.G.nodes(data=True))}') - logger.info(f'Edges: {list(self.G.edges(data=True))}') + logger.info(f"Nodes: {list(self.G.nodes(data=True))}") + logger.info(f"Edges: {list(self.G.edges(data=True))}") self.N: int = len(list(self.G.nodes())) - self.adj_list: list[list[int]] = self.get_adjacency_list() # Nested list where adj_list[i] is a list of all the neighbors + self.adj_list: list[list[int]] = ( + self.get_adjacency_list() + ) # Nested list where adj_list[i] is a list of all the neighbors # of the i-th component self.check_that_all_nodes_found_their_connectivity() - logger.info(f'Adjacency list: {self.adj_list}') - self.in_degree: list[int] = self.get_in_degree() # Nested list where adj_list[i] is a list of all the edges pointing + logger.info(f"Adjacency list: {self.adj_list}") + self.in_degree: list[int] = ( + self.get_in_degree() + ) # Nested list where adj_list[i] is a list of all the edges pointing # to the i-th node. self.all_orders: list[list[int]] = [] @@ -66,27 +70,27 @@ def __repr__(self) -> str: # ------------------------------------ Methods to be implemented by inheriting classes @abstractmethod def find_roots(self) -> list[int]: - """ Roots are vertical_layers_print_data that lie on the build platform. Like that they can be print first. """ + """Roots are vertical_layers_print_data that lie on the build platform. Like that they can be print first.""" pass @abstractmethod def find_ends(self) -> list[int]: - """ Ends are vertical_layers_print_data that belong to exclusively one segment. Like that they can be print last. """ + """Ends are vertical_layers_print_data that belong to exclusively one segment. Like that they can be print last.""" pass @abstractmethod def create_graph_nodes(self) -> None: - """ Add the nodes to the graph with their attributes. """ + """Add the nodes to the graph with their attributes.""" pass @abstractmethod def get_children_of_node(self, root: int) -> tuple[list[int], list[Any]]: - """ Find all the vertical_layers_print_data that lie on the current root segment. """ + """Find all the vertical_layers_print_data that lie on the current root segment.""" pass # ------------------------------------ Creation of graph connectivity between different nodes def create_directed_graph_edges(self, root_indices: list[int]) -> None: - """ Create the connectivity of the directed graph using breadth-first search graph traversal. """ + """Create the connectivity of the directed graph using breadth-first search graph traversal.""" passed_nodes = [] queue = root_indices @@ -100,13 +104,13 @@ def create_directed_graph_edges(self, root_indices: list[int]) -> None: self.G.add_edge(current_node, child_key, cut=common_cuts) for child_key in children: if child_key in passed_nodes: - raise ValueError('Error: cyclic directed graph detected.') + raise ValueError("Error: cyclic directed graph detected.") for child_key in children: if child_key not in queue: queue.append(child_key) def check_that_all_nodes_found_their_connectivity(self) -> None: - """ Assert that there is no island, i.e. no node or groups of nodes that are not connected to the base. """ + """Assert that there is no island, i.e. no node or groups of nodes that are not connected to the base.""" good_nodes = list(self.root_indices) for children_list in self.adj_list: for child in children_list: @@ -114,12 +118,12 @@ def check_that_all_nodes_found_their_connectivity(self) -> None: good_nodes.append(child) if len(good_nodes) != self.N: raise ValueError( - f'Floating vertical layers detected: {len(good_nodes)} connected nodes vs {self.N} total. ' - 'Check graph creation process.' + f"Floating vertical layers detected: {len(good_nodes)} connected nodes vs {self.N} total. " + "Check graph creation process." ) def sort_queue_with_end_targets_last(self, queue: list[int]) -> list[int]: - """ Sorts the queue so that the vertical_layers_print_data that have an end target are always at the end. """ + """Sorts the queue so that the vertical_layers_print_data that have an end target are always at the end.""" queue_copy = copy.deepcopy(queue) for index in queue: if index in self.end_indices: @@ -129,15 +133,17 @@ def sort_queue_with_end_targets_last(self, queue: list[int]) -> list[int]: # ------------------------------------ Find all topological orders def get_adjacency_list(self) -> list[list[int]]: - """ Returns adjacency list. Nested list where adj_list[i] is a list of all the neighbors of the ith component""" - adj_list: list[list[int]] = [[] for _ in range(self.N)] # adjacency list , size = len(Nodes), stores nodes' neighbors + """Returns adjacency list. Nested list where adj_list[i] is a list of all the neighbors of the ith component""" + adj_list: list[list[int]] = [ + [] for _ in range(self.N) + ] # adjacency list , size = len(Nodes), stores nodes' neighbors for i, adjacent_to_node in self.G.adjacency(): for key in adjacent_to_node: adj_list[i].append(key) return adj_list def get_in_degree(self) -> list[int]: - """ Returns in_degree list. Nested list where adj_list[i] is a list of all the edges pointing to the node.""" + """Returns in_degree list. Nested list where adj_list[i] is a list of all the edges pointing to the node.""" in_degree = [0] * self.N # in_degree, size = len(Nodes) , stores in-degree of a node for key_degree_tuple in self.G.in_degree: key = key_degree_tuple[0] @@ -156,7 +162,7 @@ def get_all_topological_orders(self) -> list[list[int]]: discovered = [False] * self.N path: list[int] = [] # list to store the topological order self.get_orders(path, discovered) - logger.info(f'Found {len(self.all_orders)} possible orders') + logger.info(f"Found {len(self.all_orders)} possible orders") return self.all_orders def get_orders(self, path: list[int], discovered: list[bool]) -> None: @@ -167,7 +173,6 @@ def get_orders(self, path: list[int], discovered: list[bool]) -> None: for v in range(self.N): # for every node # proceed only if in-degree of current node is 0 and current node is not processed yet if self.in_degree[v] == 0 and not discovered[v]: - # for every adjacent vertex u of v, reduce in-degree of u by 1 for u in self.adj_list[v]: self.in_degree[u] = self.in_degree[u] - 1 @@ -192,7 +197,7 @@ def get_orders(self, path: list[int], discovered: list[bool]) -> None: self.all_orders.append(copy.deepcopy(path)) def get_parents_of_node(self, node_index: int) -> list[int]: - """ Returns the parents of node with i = node_index. """ + """Returns the parents of node with i = node_index.""" return [j for j, adj in enumerate(self.adj_list) if node_index in adj] @@ -201,8 +206,8 @@ def get_parents_of_node(self, node_index: int) -> list[int]: class MeshDirectedGraph(DirectedGraph): - """ The MeshDirectedGraph is used for topological sorting of multiple meshes that have been - generated as a result of region split over the saddle points of the mesh scalar function """ + """The MeshDirectedGraph is used for topological sorting of multiple meshes that have been + generated as a result of region split over the saddle points of the mesh scalar function""" def __init__(self, all_meshes: list[Mesh], DATA_PATH: str) -> None: self.all_meshes = all_meshes @@ -211,28 +216,27 @@ def __init__(self, all_meshes: list[Mesh], DATA_PATH: str) -> None: DirectedGraph.__init__(self) def find_roots(self) -> list[int]: - """ Roots are vertical_layers_print_data that lie on the build platform. Like that they can be print first. """ + """Roots are vertical_layers_print_data that lie on the build platform. Like that they can be print first.""" roots: list[int] = [] for i, mesh in enumerate(self.all_meshes): for _vkey, data in mesh.vertices(data=True): - if i not in roots and data['boundary'] == 1: + if i not in roots and data["boundary"] == 1: roots.append(i) return roots def find_ends(self) -> list[int]: - """ Ends are vertical_layers_print_data that belong to exclusively one segment. Like that they can be print last. """ + """Ends are vertical_layers_print_data that belong to exclusively one segment. Like that they can be print last.""" ends: list[int] = [] for i, mesh in enumerate(self.all_meshes): for _vkey, data in mesh.vertices(data=True): - if i not in ends and data['boundary'] == 2: + if i not in ends and data["boundary"] == 2: ends.append(i) return ends def create_graph_nodes(self) -> None: - """ Add each of the split meshes to the graph as nodes. Cuts and boundaries are stored as attributes. """ + """Add each of the split meshes to the graph as nodes. Cuts and boundaries are stored as attributes.""" for i, m in enumerate(self.all_meshes): - self.G.add_node(i, cuts=get_existing_cut_indices(m), - boundaries=get_existing_boundary_indices(m)) + self.G.add_node(i, cuts=get_existing_cut_indices(m), boundaries=get_existing_boundary_indices(m)) def get_children_of_node(self, root: int) -> tuple[list[int], list[list[int]]]: """ @@ -251,16 +255,20 @@ def get_children_of_node(self, root: int) -> tuple[list[int], list[list[int]]]: parent_data = self.G.nodes(data=True)[root] for key, data in self.G.nodes(data=True): - common_cuts = list(set(parent_data['cuts']).intersection(data['cuts'])) - - if key != root and len(common_cuts) > 0 \ - and (key, root) not in self.G.edges() \ - and (root, key) not in self.G.edges() and is_true_mesh_adjacency(self.all_meshes, key, root): + common_cuts = list(set(parent_data["cuts"]).intersection(data["cuts"])) + + if ( + key != root + and len(common_cuts) > 0 + and (key, root) not in self.G.edges() + and (root, key) not in self.G.edges() + and is_true_mesh_adjacency(self.all_meshes, key, root) + ): if len(common_cuts) != 1: # if all cuts worked, this should be 1. But life is not perfect. logger.error( - f'More than one common cuts between two pieces in the following split meshes. ' - f'Root : {root}, child : {key} . Common cuts : {common_cuts}' - 'Probably some cut did not separate components' + f"More than one common cuts between two pieces in the following split meshes. " + f"Root : {root}, child : {key} . Common cuts : {common_cuts}" + "Probably some cut did not separate components" ) children.append(key) cut_ids.append(common_cuts) @@ -272,12 +280,11 @@ def get_children_of_node(self, root: int) -> tuple[list[int], list[list[int]]]: ################################# # --- Segments DirectedGraph + class SegmentsDirectedGraph(DirectedGraph): - """ The SegmentsDirectedGraph is used for topological sorting of multiple vertical_layers_print_data in one mesh""" + """The SegmentsDirectedGraph is used for topological sorting of multiple vertical_layers_print_data in one mesh""" - def __init__( - self, mesh: Mesh, segments: list[VerticalLayer], max_d_threshold: float, DATA_PATH: str - ) -> None: + def __init__(self, mesh: Mesh, segments: list[VerticalLayer], max_d_threshold: float, DATA_PATH: str) -> None: self.mesh = mesh self.segments = segments self.max_d_threshold = max_d_threshold @@ -286,8 +293,8 @@ def __init__( DirectedGraph.__init__(self) def find_roots(self) -> list[int]: - """ Roots are vertical_layers_print_data that lie on the build platform. Like that they can be print first. """ - boundary_pts = utils.get_mesh_vertex_coords_with_attribute(self.mesh, 'boundary', 1) + """Roots are vertical_layers_print_data that lie on the build platform. Like that they can be print first.""" + boundary_pts = utils.get_mesh_vertex_coords_with_attribute(self.mesh, "boundary", 1) root_segments: list[int] = [] for i, segment in enumerate(self.segments): first_curve_pts = segment.paths[0].points @@ -296,8 +303,8 @@ def find_roots(self) -> list[int]: return root_segments def find_ends(self) -> list[int]: - """ Ends are vertical_layers_print_data that belong to exclusively one segment. Like that they can be print last. """ - boundary_pts = utils.get_mesh_vertex_coords_with_attribute(self.mesh, 'boundary', 2) + """Ends are vertical_layers_print_data that belong to exclusively one segment. Like that they can be print last.""" + boundary_pts = utils.get_mesh_vertex_coords_with_attribute(self.mesh, "boundary", 2) end_segments: list[int] = [] for i, segment in enumerate(self.segments): last_curve_pts = segment.paths[-1].points @@ -306,12 +313,12 @@ def find_ends(self) -> list[int]: return end_segments def create_graph_nodes(self) -> None: - """ Add each segment to to the graph as a node. """ + """Add each segment to to the graph as a node.""" for i, _segment in enumerate(self.segments): self.G.add_node(i) def get_children_of_node(self, root: int) -> tuple[list[int], list[None]]: - """ Find all the nodes that lie on the current root. """ + """Find all the nodes that lie on the current root.""" children: list[int] = [] root_segment = self.segments[root] root_last_crv_pts = root_segment.paths[-1].points @@ -330,6 +337,7 @@ def get_children_of_node(self, root: int) -> tuple[list[int], list[None]]: ################################# # --- helpers + def are_neighboring_point_clouds(pts1: list[Point], pts2: list[Point], threshold: float) -> bool: """ Returns True if 3 or more points of the point clouds are closer than the threshold. False otherwise. @@ -361,10 +369,16 @@ def is_true_mesh_adjacency(all_meshes: list[Mesh], key1: int, key2: int) -> bool """ mesh1 = all_meshes[key1] mesh2 = all_meshes[key2] - pts_mesh2 = [mesh2.vertex_coordinates(vkey) for vkey, data in mesh2.vertices(data=True) - if (data['cut'] > 0 or data['boundary'] > 0)] - pts_mesh1 = [mesh1.vertex_coordinates(vkey) for vkey, data in mesh1.vertices(data=True) - if (data['cut'] > 0 or data['boundary'] > 0)] + pts_mesh2 = [ + mesh2.vertex_coordinates(vkey) + for vkey, data in mesh2.vertices(data=True) + if (data["cut"] > 0 or data["boundary"] > 0) + ] + pts_mesh1 = [ + mesh1.vertex_coordinates(vkey) + for vkey, data in mesh1.vertices(data=True) + if (data["cut"] > 0 or data["boundary"] > 0) + ] if len(pts_mesh1) == 0 or len(pts_mesh2) == 0: return False # Vectorized: compute min distance from each pt in mesh1 to pts_mesh2 @@ -372,8 +386,8 @@ def is_true_mesh_adjacency(all_meshes: list[Mesh], key1: int, key2: int) -> bool arr2 = np.asarray(pts_mesh2, dtype=np.float64) distances = min_distances_to_set(arr1, arr2) # Count points with essentially zero distance (shared vertices) - return np.sum(distances ** 2 < 0.00001) >= 3 + return np.sum(distances**2 < 0.00001) >= 3 -if __name__ == '__main__': +if __name__ == "__main__": pass diff --git a/src/compas_slicer/print_organization/__init__.py b/src/compas_slicer/print_organization/__init__.py index 3959c6a0..ae457512 100644 --- a/src/compas_slicer/print_organization/__init__.py +++ b/src/compas_slicer/print_organization/__init__.py @@ -7,4 +7,4 @@ from .print_organization_utilities import * # noqa: F401 E402 F403 from .scalar_field_print_organizer import * # noqa: F401 E402 F403 -__all__ = [name for name in dir() if not name.startswith('_')] +__all__ = [name for name in dir() if not name.startswith("_")] diff --git a/src/compas_slicer/print_organization/base_print_organizer.py b/src/compas_slicer/print_organization/base_print_organizer.py index efe23f25..87c42d66 100644 --- a/src/compas_slicer/print_organization/base_print_organizer.py +++ b/src/compas_slicer/print_organization/base_print_organizer.py @@ -128,9 +128,7 @@ def number_of_paths_on_layer(self, layer_index: int) -> int: """Number of paths within a layer.""" return len(self.printpoints[layer_index]) - def remove_duplicate_points_in_path( - self, layer_idx: int, path_idx: int, tolerance: float = 0.0001 - ) -> None: + def remove_duplicate_points_in_path(self, layer_idx: int, path_idx: int, tolerance: float = 0.0001) -> None: """Remove subsequent points within a threshold distance. Parameters @@ -161,9 +159,7 @@ def remove_duplicate_points_in_path( for ppt in duplicate_ppts: path.printpoints.remove(ppt) - def get_printpoint_neighboring_items( - self, layer_idx: int, path_idx: int, i: int - ) -> list[PrintPoint | None]: + def get_printpoint_neighboring_items(self, layer_idx: int, path_idx: int, i: int) -> list[PrintPoint | None]: """Get neighboring printpoints. Parameters @@ -188,10 +184,7 @@ def get_printpoint_neighboring_items( def printout_info(self) -> None: """Print information about the PrintOrganizer.""" - ppts_attributes = { - key: str(type(val)) - for key, val in self.printpoints[0][0][0].attributes.items() - } + ppts_attributes = {key: str(type(val)) for key, val in self.printpoints[0][0][0].attributes.items()} logger.info("---- PrintOrganizer Info ----") logger.info(f"Number of layers: {self.number_of_layers}") diff --git a/src/compas_slicer/print_organization/curved_print_organization/base_boundary.py b/src/compas_slicer/print_organization/curved_print_organization/base_boundary.py index a4516af0..522ae19b 100644 --- a/src/compas_slicer/print_organization/curved_print_organization/base_boundary.py +++ b/src/compas_slicer/print_organization/curved_print_organization/base_boundary.py @@ -8,7 +8,7 @@ import compas_slicer.utilities as utils from compas_slicer.geometry import PrintPoint -__all__ = ['BaseBoundary'] +__all__ = ["BaseBoundary"] class BaseBoundary: @@ -24,9 +24,7 @@ class BaseBoundary: override_vector : """ - def __init__( - self, mesh: Mesh, points: list[Point], override_vector: Vector | None = None - ) -> None: + def __init__(self, mesh: Mesh, points: list[Point], override_vector: Vector | None = None) -> None: self.mesh = mesh self.points = points self.override_vector = override_vector @@ -38,9 +36,14 @@ def __init__( else: self.up_vectors = self.get_up_vectors() - self.printpoints = [PrintPoint(pt=pt, # Create fake print points - layer_height=1.0, - mesh_normal=self.normals[i]) for i, pt in enumerate(self.points)] + self.printpoints = [ + PrintPoint( + pt=pt, # Create fake print points + layer_height=1.0, + mesh_normal=self.normals[i], + ) + for i, pt in enumerate(self.points) + ] for i, pp in enumerate(self.printpoints): pp.up_vector = self.up_vectors[i] @@ -49,7 +52,7 @@ def __repr__(self) -> str: return f"" def get_up_vectors(self) -> list[Vector]: - """ Finds the up_vectors of each point of the boundary. A smoothing step is also included. """ + """Finds the up_vectors of each point of the boundary. A smoothing step is also included.""" up_vectors = [] for i, p in enumerate(self.points): v1 = Vector.from_start_end(p, self.points[(i + 1) % len(self.points)]) @@ -62,9 +65,11 @@ def get_up_vectors(self) -> list[Vector]: return up_vectors def to_data(self) -> dict[str, Any]: - """ Returns a dictionary with the data of the class. """ - return {"points": utils.point_list_to_dict(self.points), - "up_vectors": utils.point_list_to_dict(self.up_vectors)} + """Returns a dictionary with the data of the class.""" + return { + "points": utils.point_list_to_dict(self.points), + "up_vectors": utils.point_list_to_dict(self.up_vectors), + } if __name__ == "__main__": diff --git a/src/compas_slicer/print_organization/interpolation_print_organizer.py b/src/compas_slicer/print_organization/interpolation_print_organizer.py index 05d520d0..271ad39c 100644 --- a/src/compas_slicer/print_organization/interpolation_print_organizer.py +++ b/src/compas_slicer/print_organization/interpolation_print_organizer.py @@ -31,6 +31,7 @@ _USE_CGAL = False try: from compas_cgal.polylines import closest_points_on_polyline as _cgal_closest + _USE_CGAL = True except ImportError: _cgal_closest = None @@ -65,7 +66,7 @@ def _batch_closest_points_on_polyline( return np.array(closest), np.array(distances) -__all__ = ['InterpolationPrintOrganizer'] +__all__ = ["InterpolationPrintOrganizer"] class InterpolationPrintOrganizer(BasePrintOrganizer): @@ -99,7 +100,7 @@ def __init__( from compas_slicer.slicers import InterpolationSlicer if not isinstance(slicer, InterpolationSlicer): - raise TypeError('Please provide an InterpolationSlicer') + raise TypeError("Please provide an InterpolationSlicer") BasePrintOrganizer.__init__(self, slicer) self.DATA_PATH = DATA_PATH self.OUTPUT_PATH = utils.get_output_directory(DATA_PATH) @@ -118,7 +119,7 @@ def __init__( raise ValueError("Only one brim horizontal layer is currently supported.") if not self.horizontal_layers[0].is_brim: raise ValueError("Only one brim horizontal layer is currently supported.") - logger.info('Slicer has one horizontal brim layer.') + logger.info("Slicer has one horizontal brim layer.") # topological sorting of vertical layers depending on their connectivity self.topo_sort_graph: topo_sort.SegmentsDirectedGraph | None = None @@ -146,13 +147,14 @@ def topological_sorting(self) -> None: """ avg_layer_height = self.config.avg_layer_height - self.topo_sort_graph = topo_sort.SegmentsDirectedGraph(self.slicer.mesh, self.vertical_layers, - 4 * avg_layer_height, DATA_PATH=self.DATA_PATH) + self.topo_sort_graph = topo_sort.SegmentsDirectedGraph( + self.slicer.mesh, self.vertical_layers, 4 * avg_layer_height, DATA_PATH=self.DATA_PATH + ) def create_base_boundaries(self) -> list[BaseBoundary]: """Create one BaseBoundary per vertical_layer.""" bs: list[BaseBoundary] = [] - root_vs = utils.get_mesh_vertex_coords_with_attribute(self.slicer.mesh, 'boundary', 1) + root_vs = utils.get_mesh_vertex_coords_with_attribute(self.slicer.mesh, "boundary", 1) root_boundary = BaseBoundary(self.slicer.mesh, [Point(*v) for v in root_vs]) if len(self.vertical_layers) > 1 and self.topo_sort_graph is not None: @@ -172,7 +174,7 @@ def create_base_boundaries(self) -> list[BaseBoundary]: # save intermediary outputs b_data = {i: b.to_data() for i, b in enumerate(bs)} - utils.save_to_json(b_data, self.OUTPUT_PATH, 'boundaries.json') + utils.save_to_json(b_data, self.OUTPUT_PATH, "boundaries.json") return bs @@ -190,11 +192,16 @@ def create_printpoints(self) -> None: print_layer = PrintLayer() paths = self.horizontal_layers[0].paths for _j, path in enumerate(paths): - print_path = PrintPath(printpoints=[ - PrintPoint(pt=point, layer_height=self.config.avg_layer_height, - mesh_normal=utils.get_normal_of_path_on_xy_plane(k, point, path, self.slicer.mesh)) - for k, point in enumerate(path.points) - ]) + print_path = PrintPath( + printpoints=[ + PrintPoint( + pt=point, + layer_height=self.config.avg_layer_height, + mesh_normal=utils.get_normal_of_path_on_xy_plane(k, point, path, self.slicer.mesh), + ) + for k, point in enumerate(path.points) + ] + ) print_layer.paths.append(print_path) self.printpoints.layers.append(print_layer) current_layer_index += 1 @@ -204,7 +211,6 @@ def create_printpoints(self) -> None: # (2) --- Select order of vertical layers if len(self.vertical_layers) > 1: # then you need to select one topological order - if not self.topo_sort_graph: logger.error("no topology graph found, cannnot set the order of vertical layers") self.selected_order = [0] @@ -239,9 +245,7 @@ def get_layer_ppts(self, layer: VerticalLayer, base_boundary: BaseBoundary) -> P print_layer = PrintLayer() for _i, path in enumerate(layer.paths): # Batch query: find closest points for all points in this path at once - closest_pts, distances = _batch_closest_points_on_polyline( - path.points, support_polyline_pts - ) + closest_pts, distances = _batch_closest_points_on_polyline(path.points, support_polyline_pts) print_path = PrintPath() for k, p in enumerate(path.points): diff --git a/src/compas_slicer/print_organization/planar_print_organizer.py b/src/compas_slicer/print_organization/planar_print_organizer.py index c1739f2c..d59dc69c 100644 --- a/src/compas_slicer/print_organization/planar_print_organizer.py +++ b/src/compas_slicer/print_organization/planar_print_organizer.py @@ -14,7 +14,7 @@ from compas_slicer.slicers import PlanarSlicer -__all__ = ['PlanarPrintOrganizer'] +__all__ = ["PlanarPrintOrganizer"] class PlanarPrintOrganizer(BasePrintOrganizer): @@ -33,7 +33,7 @@ def __init__(self, slicer: PlanarSlicer) -> None: from compas_slicer.slicers import PlanarSlicer if not isinstance(slicer, PlanarSlicer): - raise TypeError('Please provide a PlanarSlicer') + raise TypeError("Please provide a PlanarSlicer") BasePrintOrganizer.__init__(self, slicer) def __repr__(self) -> str: @@ -50,11 +50,10 @@ def create_printpoints(self, generate_mesh_normals: bool = True) -> None: """ count = 0 - logger.info('Creating print points ...') + logger.info("Creating print points ...") with progressbar.ProgressBar(max_value=self.slicer.number_of_points) as bar: - if generate_mesh_normals: - logger.info('Generating mesh normals ...') + logger.info("Generating mesh normals ...") # fast method for getting the closest mesh normals to all the printpoints all_pts = [pt for layer in self.slicer.layers for path in layer.paths for pt in path.points] closest_fks, projected_pts = utils.pull_pts_to_mesh_faces(self.slicer.mesh, all_pts) @@ -67,7 +66,6 @@ def create_printpoints(self, generate_mesh_normals: bool = True) -> None: print_path = PrintPath() for k, point in enumerate(path.points): - n = normals[count] if generate_mesh_normals else Vector(0, 1, 0) layer_h = self.slicer.layer_height if self.slicer.layer_height else 2.0 printpoint = PrintPoint(pt=point, layer_height=layer_h, mesh_normal=n) diff --git a/src/compas_slicer/print_organization/print_organization_utilities/blend_radius.py b/src/compas_slicer/print_organization/print_organization_utilities/blend_radius.py index 4264275c..dacf8402 100644 --- a/src/compas_slicer/print_organization/print_organization_utilities/blend_radius.py +++ b/src/compas_slicer/print_organization/print_organization_utilities/blend_radius.py @@ -9,12 +9,10 @@ from compas_slicer.print_organization import BasePrintOrganizer -__all__ = ['set_blend_radius'] +__all__ = ["set_blend_radius"] -def set_blend_radius( - print_organizer: BasePrintOrganizer, d_fillet: float = 10.0, buffer: float = 0.3 -) -> None: +def set_blend_radius(print_organizer: BasePrintOrganizer, d_fillet: float = 10.0, buffer: float = 0.3) -> None: """Sets the blend radius (filleting) for the robotic motion. Parameters @@ -35,20 +33,22 @@ def set_blend_radius( neighboring_items = print_organizer.get_printpoint_neighboring_items(i, j, k) if not printpoint.wait_time: - # if the extruder_toggle changes, it must be a new path and therefore the blend radius should be 0 if extruder_state != printpoint.extruder_toggle: extruder_state = printpoint.extruder_toggle radius = 0.0 else: - radius = d_fillet if neighboring_items[0]: - radius = min(radius, norm_vector(Vector.from_start_end(neighboring_items[0].pt, printpoint.pt)) * buffer) + radius = min( + radius, norm_vector(Vector.from_start_end(neighboring_items[0].pt, printpoint.pt)) * buffer + ) if neighboring_items[1]: - radius = min(radius, norm_vector(Vector.from_start_end(neighboring_items[1].pt, printpoint.pt)) * buffer) + radius = min( + radius, norm_vector(Vector.from_start_end(neighboring_items[1].pt, printpoint.pt)) * buffer + ) radius = round(radius, 5) diff --git a/src/compas_slicer/print_organization/print_organization_utilities/data_smoothing.py b/src/compas_slicer/print_organization/print_organization_utilities/data_smoothing.py index 63002685..fa6c06e8 100644 --- a/src/compas_slicer/print_organization/print_organization_utilities/data_smoothing.py +++ b/src/compas_slicer/print_organization/print_organization_utilities/data_smoothing.py @@ -10,9 +10,7 @@ from compas_slicer.print_organization import BasePrintOrganizer -__all__ = ['smooth_printpoint_attribute', - 'smooth_printpoints_up_vectors', - 'smooth_printpoints_layer_heights'] +__all__ = ["smooth_printpoint_attribute", "smooth_printpoints_up_vectors", "smooth_printpoints_layer_heights"] def smooth_printpoint_attribute( @@ -47,7 +45,7 @@ def smooth_printpoint_attribute( # first smoothen the values for ppt in print_organizer.printpoints_iterator(): if get_attr_value(ppt) is None: - raise ValueError('The attribute you are trying to smooth has not been assigned a value') + raise ValueError("The attribute you are trying to smooth has not been assigned a value") attrs = np.array([get_attr_value(ppt) for ppt in print_organizer.printpoints_iterator()]) @@ -62,13 +60,11 @@ def smooth_printpoint_attribute( for i, ppt in enumerate(print_organizer.printpoints_iterator()): val = attrs[i] # Convert back from numpy type if needed - set_attr_value(ppt, val.tolist() if hasattr(val, 'tolist') else float(val)) + set_attr_value(ppt, val.tolist() if hasattr(val, "tolist") else float(val)) -def smooth_printpoints_layer_heights( - print_organizer: BasePrintOrganizer, iterations: int, strength: float -) -> None: - """ This function is an example for how the 'smooth_printpoint_attribute' function can be used. """ +def smooth_printpoints_layer_heights(print_organizer: BasePrintOrganizer, iterations: int, strength: float) -> None: + """This function is an example for how the 'smooth_printpoint_attribute' function can be used.""" def get_ppt_layer_height(printpoint): return printpoint.layer_height # get value @@ -79,10 +75,8 @@ def set_ppt_layer_height(printpoint, v): smooth_printpoint_attribute(print_organizer, iterations, strength, get_ppt_layer_height, set_ppt_layer_height) -def smooth_printpoints_up_vectors( - print_organizer: BasePrintOrganizer, iterations: int, strength: float -) -> None: - """ This function is an example for how the 'smooth_printpoint_attribute' function can be used. """ +def smooth_printpoints_up_vectors(print_organizer: BasePrintOrganizer, iterations: int, strength: float) -> None: + """This function is an example for how the 'smooth_printpoint_attribute' function can be used.""" def get_ppt_up_vec(printpoint): return printpoint.up_vector # get value diff --git a/src/compas_slicer/print_organization/print_organization_utilities/extruder_toggle.py b/src/compas_slicer/print_organization/print_organization_utilities/extruder_toggle.py index 3ad8a4cd..a9c4da2e 100644 --- a/src/compas_slicer/print_organization/print_organization_utilities/extruder_toggle.py +++ b/src/compas_slicer/print_organization/print_organization_utilities/extruder_toggle.py @@ -11,9 +11,7 @@ from compas_slicer.slicers import BaseSlicer -__all__ = ['set_extruder_toggle', - 'override_extruder_toggle', - 'check_assigned_extruder_toggle'] +__all__ = ["set_extruder_toggle", "override_extruder_toggle", "check_assigned_extruder_toggle"] def set_extruder_toggle(print_organizer: BasePrintOrganizer, slicer: BaseSlicer) -> None: @@ -52,7 +50,7 @@ def set_extruder_toggle(print_organizer: BasePrintOrganizer, slicer: BaseSlicer) interrupt_path = True # the last path of a vertical layer should be interrupted - if i < len(slicer.layers)-1 and not slicer.layers[i+1].paths[0].is_closed: + if i < len(slicer.layers) - 1 and not slicer.layers[i + 1].paths[0].is_closed: interrupt_path = True # --- create extruder toggles @@ -62,7 +60,6 @@ def set_extruder_toggle(print_organizer: BasePrintOrganizer, slicer: BaseSlicer) logger.exception(f"no path found for layer {i}") else: for k, printpoint in enumerate(path_printpoints): - if interrupt_path: if k == len(path_printpoints) - 1: printpoint.extruder_toggle = False @@ -95,7 +92,7 @@ def override_extruder_toggle(print_organizer: BasePrintOrganizer, override_value def check_assigned_extruder_toggle(print_organizer: BasePrintOrganizer) -> bool: - """ Checks that all the printpoints have an assigned extruder toggle. """ + """Checks that all the printpoints have an assigned extruder toggle.""" all_toggles_assigned = True for printpoint in print_organizer.printpoints_iterator(): if printpoint.extruder_toggle is None: diff --git a/src/compas_slicer/print_organization/print_organization_utilities/gcode.py b/src/compas_slicer/print_organization/print_organization_utilities/gcode.py index 6789e1aa..fab19df4 100644 --- a/src/compas_slicer/print_organization/print_organization_utilities/gcode.py +++ b/src/compas_slicer/print_organization/print_organization_utilities/gcode.py @@ -163,9 +163,7 @@ def _write_purge_line(gb: GcodeBuilder, config: GcodeConfig) -> None: gb.cmd(f"G1 X{PURGE_START_X} Y{PURGE_START_Y}", "move to purge start") # Calculate extrusion for purge lines - e_purge = _calc_extrusion( - PURGE_LENGTH, PURGE_HEIGHT, config.layer_width, config.filament_diameter - ) + e_purge = _calc_extrusion(PURGE_LENGTH, PURGE_HEIGHT, config.layer_width, config.filament_diameter) # First purge line gb.cmd(f"G1 Y{PURGE_START_Y + PURGE_LENGTH} E{e_purge:.3f}", "purge line 1") @@ -182,9 +180,7 @@ def _write_purge_line(gb: GcodeBuilder, config: GcodeConfig) -> None: gb.blank() -def _write_toolpath( - gb: GcodeBuilder, print_organizer: BasePrintOrganizer, config: GcodeConfig -) -> float: +def _write_toolpath(gb: GcodeBuilder, print_organizer: BasePrintOrganizer, config: GcodeConfig) -> float: """Write the main toolpath G-code. Returns the final Z height for use in footer. @@ -294,9 +290,7 @@ def _write_footer(gb: GcodeBuilder, config: GcodeConfig, final_z: float) -> None # ============================================================================= -def create_gcode_text( - print_organizer: BasePrintOrganizer, config: GcodeConfig | None = None -) -> str: +def create_gcode_text(print_organizer: BasePrintOrganizer, config: GcodeConfig | None = None) -> str: """Create G-code text from organized print points. Parameters diff --git a/src/compas_slicer/print_organization/print_organization_utilities/linear_velocity.py b/src/compas_slicer/print_organization/print_organization_utilities/linear_velocity.py index 8cc868dd..deb9966b 100644 --- a/src/compas_slicer/print_organization/print_organization_utilities/linear_velocity.py +++ b/src/compas_slicer/print_organization/print_organization_utilities/linear_velocity.py @@ -12,10 +12,12 @@ from compas_slicer.print_organization import BasePrintOrganizer -__all__ = ['set_linear_velocity_constant', - 'set_linear_velocity_per_layer', - 'set_linear_velocity_by_range', - 'set_linear_velocity_by_overhang'] +__all__ = [ + "set_linear_velocity_constant", + "set_linear_velocity_per_layer", + "set_linear_velocity_by_range", + "set_linear_velocity_by_overhang", +] def set_linear_velocity_constant(print_organizer: BasePrintOrganizer, v: float = 25.0) -> None: @@ -32,9 +34,7 @@ def set_linear_velocity_constant(print_organizer: BasePrintOrganizer, v: float = printpoint.velocity = v -def set_linear_velocity_per_layer( - print_organizer: BasePrintOrganizer, per_layer_velocities: list[float] -) -> None: +def set_linear_velocity_per_layer(print_organizer: BasePrintOrganizer, per_layer_velocities: list[float]) -> None: """Sets the linear velocity parameter of the printpoints depending on the selected type. Parameters @@ -47,8 +47,8 @@ def set_linear_velocity_per_layer( logger.info("Setting per-layer linear velocity") if len(per_layer_velocities) != print_organizer.number_of_layers: raise ValueError( - f'Wrong number of velocity values: got {len(per_layer_velocities)}, ' - f'need {print_organizer.number_of_layers} (one per layer)' + f"Wrong number of velocity values: got {len(per_layer_velocities)}, " + f"need {print_organizer.number_of_layers} (one per layer)" ) for printpoint, i, _j, _k in print_organizer.printpoints_indices_iterator(): printpoint.velocity = per_layer_velocities[i] @@ -80,7 +80,7 @@ def set_linear_velocity_by_range( for printpoint in print_organizer.printpoints_iterator(): param = param_func(printpoint) if param is None: - raise ValueError('The param_func does not return any value for calculating the velocity range.') + raise ValueError("The param_func does not return any value for calculating the velocity range.") if bound_remapping: v = remap(param, parameter_range[0], parameter_range[1], velocity_range[0], velocity_range[1]) else: @@ -109,7 +109,9 @@ def set_linear_velocity_by_overhang( bound_remapping: bool """ - def param_func(ppt): return dot_vectors(ppt.mesh_normal, Vector(0.0, 0.0, 1.0)) + def param_func(ppt): + return dot_vectors(ppt.mesh_normal, Vector(0.0, 0.0, 1.0)) + # returns values from 0.0 (no overhang) to 1.0 (horizontal overhang) set_linear_velocity_by_range(print_organizer, param_func, overhang_range, velocity_range, bound_remapping) diff --git a/src/compas_slicer/print_organization/print_organization_utilities/safety_printpoints.py b/src/compas_slicer/print_organization/print_organization_utilities/safety_printpoints.py index 5523a6c9..c414e81f 100644 --- a/src/compas_slicer/print_organization/print_organization_utilities/safety_printpoints.py +++ b/src/compas_slicer/print_organization/print_organization_utilities/safety_printpoints.py @@ -14,7 +14,7 @@ from compas_slicer.print_organization import BasePrintOrganizer -__all__ = ['add_safety_printpoints'] +__all__ = ["add_safety_printpoints"] def add_safety_printpoints(print_organizer: BasePrintOrganizer, z_hop: float = 10.0) -> None: @@ -28,7 +28,7 @@ def add_safety_printpoints(print_organizer: BasePrintOrganizer, z_hop: float = 1 Vertical distance (in millimeters) of the safety point above the PrintPoint. """ if not check_assigned_extruder_toggle(print_organizer): - raise ValueError('You need to set the extruder toggles first, before you can create safety points') + raise ValueError("You need to set the extruder toggles first, before you can create safety points") logger.info(f"Generating safety print points with height {z_hop} mm") from compas_slicer.geometry import PrintPointsCollection @@ -47,7 +47,6 @@ def add_safety_printpoints(print_organizer: BasePrintOrganizer, z_hop: float = 1 # add safety printpoints if there is an interruption if printpoint.extruder_toggle is False: - # safety ppt after current printpoint new_path.printpoints.append(create_safety_printpoint(printpoint, z_hop, False)) diff --git a/src/compas_slicer/print_organization/print_organization_utilities/wait_time.py b/src/compas_slicer/print_organization/print_organization_utilities/wait_time.py index a160da5c..3bf725c0 100644 --- a/src/compas_slicer/print_organization/print_organization_utilities/wait_time.py +++ b/src/compas_slicer/print_organization/print_organization_utilities/wait_time.py @@ -12,15 +12,13 @@ from compas_slicer.print_organization import BasePrintOrganizer -__all__ = ['set_wait_time_on_sharp_corners', - 'set_wait_time_based_on_extruder_toggle', - 'override_wait_time'] +__all__ = ["set_wait_time_on_sharp_corners", "set_wait_time_based_on_extruder_toggle", "override_wait_time"] WaitType = Literal[ - 'wait_before_extrusion', - 'wait_after_extrusion', - 'wait_before_and_after_extrusion', - 'wait_at_sharp_corners', + "wait_before_extrusion", + "wait_after_extrusion", + "wait_before_and_after_extrusion", + "wait_at_sharp_corners", ] @@ -53,7 +51,7 @@ def set_wait_time_on_sharp_corners( printpoint.wait_time = wait_time printpoint.blend_radius = 0.0 # 0.0 blend radius for points where the robot will wait number_of_wait_points += 1 - logger.info(f'Added wait times for {number_of_wait_points} points') + logger.info(f"Added wait times for {number_of_wait_points} points") def set_wait_time_based_on_extruder_toggle( @@ -77,7 +75,9 @@ def set_wait_time_based_on_extruder_toggle( for printpoint in print_organizer.printpoints_iterator(): if printpoint.extruder_toggle is None: - raise ValueError('You need to set the extruder toggles first, before you can automatically set the wait time') + raise ValueError( + "You need to set the extruder toggles first, before you can automatically set the wait time" + ) logger.info("Setting wait time") @@ -107,9 +107,9 @@ def set_wait_time_based_on_extruder_toggle( next_ppt.blend_radius = 0.0 number_of_wait_points += 1 else: - logger.error(f'Unknown wait type: {wait_type}') + logger.error(f"Unknown wait type: {wait_type}") - logger.info(f'Added wait times for {number_of_wait_points} points') + logger.info(f"Added wait times for {number_of_wait_points} points") def override_wait_time(print_organizer: BasePrintOrganizer, override_value: float) -> None: diff --git a/src/compas_slicer/print_organization/scalar_field_print_organizer.py b/src/compas_slicer/print_organization/scalar_field_print_organizer.py index 3af4bfa4..ee2162aa 100644 --- a/src/compas_slicer/print_organization/scalar_field_print_organizer.py +++ b/src/compas_slicer/print_organization/scalar_field_print_organizer.py @@ -18,7 +18,7 @@ from compas_slicer.slicers import ScalarFieldSlicer -__all__ = ['ScalarFieldPrintOrganizer'] +__all__ = ["ScalarFieldPrintOrganizer"] class ScalarFieldPrintOrganizer(BasePrintOrganizer): @@ -52,7 +52,7 @@ def __init__( from compas_slicer.slicers import ScalarFieldSlicer if not isinstance(slicer, ScalarFieldSlicer): - raise TypeError('Please provide a ScalarFieldSlicer') + raise TypeError("Please provide a ScalarFieldSlicer") BasePrintOrganizer.__init__(self, slicer) self.DATA_PATH = DATA_PATH self.OUTPUT_PATH = utils.get_output_directory(DATA_PATH) @@ -71,7 +71,7 @@ def __init__( raise ValueError("Only one brim horizontal layer is currently supported.") if not self.horizontal_layers[0].is_brim: raise ValueError("Only one brim horizontal layer is currently supported.") - logger.info('Slicer has one horizontal brim layer.') + logger.info("Slicer has one horizontal brim layer.") self.g_evaluation: GradientEvaluation = self.add_gradient_to_vertices() @@ -81,9 +81,8 @@ def __repr__(self) -> str: def create_printpoints(self) -> None: """Create the print points of the fabrication process.""" count = 0 - logger.info('Creating print points ...') + logger.info("Creating print points ...") with progressbar.ProgressBar(max_value=self.slicer.number_of_points) as bar: - for _i, layer in enumerate(self.slicer.layers): print_layer = PrintLayer() @@ -111,8 +110,8 @@ def create_printpoints(self) -> None: for layer in self.printpoints: for path in layer: for pp in path: - grad_norm = pp.attributes['gradient_norm'] - grad = pp.attributes['gradient'] + grad_norm = pp.attributes["gradient_norm"] + grad = pp.attributes["gradient"] pp.distance_to_support = grad_norm pp.layer_height = grad_norm pp.up_vector = Vector(*normalize_vector(grad)) @@ -123,14 +122,14 @@ def add_gradient_to_vertices(self) -> GradientEvaluation: g_evaluation.compute_gradient() g_evaluation.compute_gradient_norm() - utils.save_to_json(g_evaluation.vertex_gradient_norm, self.OUTPUT_PATH, 'gradient_norm.json') - utils.save_to_json(utils.point_list_to_dict(g_evaluation.vertex_gradient), self.OUTPUT_PATH, 'gradient.json') + utils.save_to_json(g_evaluation.vertex_gradient_norm, self.OUTPUT_PATH, "gradient_norm.json") + utils.save_to_json(utils.point_list_to_dict(g_evaluation.vertex_gradient), self.OUTPUT_PATH, "gradient.json") - self.slicer.mesh.update_default_vertex_attributes({'gradient': 0.0}) - self.slicer.mesh.update_default_vertex_attributes({'gradient_norm': 0.0}) + self.slicer.mesh.update_default_vertex_attributes({"gradient": 0.0}) + self.slicer.mesh.update_default_vertex_attributes({"gradient_norm": 0.0}) for i, (_v_key, data) in enumerate(self.slicer.mesh.vertices(data=True)): - data['gradient'] = g_evaluation.vertex_gradient[i] - data['gradient_norm'] = g_evaluation.vertex_gradient_norm[i] + data["gradient"] = g_evaluation.vertex_gradient[i] + data["gradient_norm"] = g_evaluation.vertex_gradient_norm[i] return g_evaluation diff --git a/src/compas_slicer/slicers/__init__.py b/src/compas_slicer/slicers/__init__.py index 9a9d2362..04c47d65 100644 --- a/src/compas_slicer/slicers/__init__.py +++ b/src/compas_slicer/slicers/__init__.py @@ -1,6 +1,5 @@ """Mesh slicing algorithms.""" - from .base_slicer import * # noqa: F401 F403 from .interpolation_slicer import * # noqa: F401 E402 F403 from .planar_slicer import * # noqa: F401 E402 F403 @@ -8,4 +7,4 @@ from .scalar_field_slicer import * # noqa: F401 E402 F403 from .uv_slicer import * # noqa: F401 E402 F403 -__all__ = [name for name in dir() if not name.startswith('_')] +__all__ = [name for name in dir() if not name.startswith("_")] diff --git a/src/compas_slicer/slicers/base_slicer.py b/src/compas_slicer/slicers/base_slicer.py index e4e9030f..74f5d82a 100644 --- a/src/compas_slicer/slicers/base_slicer.py +++ b/src/compas_slicer/slicers/base_slicer.py @@ -44,7 +44,9 @@ def __init__(self, mesh: Mesh) -> None: utils.check_triangular_mesh(mesh) self.mesh = mesh - logger.info(f"Input Mesh with: {len(list(self.mesh.vertices()))} vertices, {len(list(self.mesh.faces()))} faces") + logger.info( + f"Input Mesh with: {len(list(self.mesh.vertices()))} vertices, {len(list(self.mesh.faces()))} faces" + ) self.layer_height: float | None = None self.layers: list[Layer] = [] @@ -143,7 +145,7 @@ def find_vertical_layers_with_first_path_on_base(self) -> tuple[list[Path], list Paths on base and their vertical layer indices. """ - vertices = list(self.mesh.vertices_attributes('xyz')) + vertices = list(self.mesh.vertices_attributes("xyz")) bbox = bounding_box(vertices) z_min = min(p[2] for p in bbox) paths_on_base = [] diff --git a/src/compas_slicer/slicers/interpolation_slicer.py b/src/compas_slicer/slicers/interpolation_slicer.py index 4ca2e036..41c9fe15 100644 --- a/src/compas_slicer/slicers/interpolation_slicer.py +++ b/src/compas_slicer/slicers/interpolation_slicer.py @@ -20,7 +20,7 @@ from compas_slicer.pre_processing import InterpolationSlicingPreprocessor -__all__ = ['InterpolationSlicer'] +__all__ = ["InterpolationSlicer"] class InterpolationSlicer(BaseSlicer): @@ -47,7 +47,7 @@ def __init__( preprocessor: InterpolationSlicingPreprocessor | None = None, config: InterpolationConfig | None = None, ) -> None: - logger.info('InterpolationSlicer') + logger.info("InterpolationSlicer") BaseSlicer.__init__(self, mesh) # make sure the mesh of the preprocessor and the mesh of the slicer match @@ -64,20 +64,21 @@ def __init__( def generate_paths(self) -> None: """Generate curved paths.""" if not self.preprocessor: - raise ValueError('You need to provide a pre-processor in order to generate paths.') + raise ValueError("You need to provide a pre-processor in order to generate paths.") avg_layer_height = self.config.avg_layer_height n = find_no_of_isocurves(self.preprocessor.target_LOW, self.preprocessor.target_HIGH, avg_layer_height) params_list = get_interpolation_parameters_list(n) - logger.info(f'{n} paths will be generated') + logger.info(f"{n} paths will be generated") vertical_layers_manager = VerticalLayersManager(avg_layer_height) # create paths + layers with progressbar.ProgressBar(max_value=len(params_list)) as bar: for i, param in enumerate(params_list): - assign_interpolation_distance_to_mesh_vertices(self.mesh, param, self.preprocessor.target_LOW, - self.preprocessor.target_HIGH) + assign_interpolation_distance_to_mesh_vertices( + self.mesh, param, self.preprocessor.target_LOW, self.preprocessor.target_HIGH + ) contours = ScalarFieldContours(self.mesh) contours.compute() contours.add_to_vertical_layers_manager(vertical_layers_manager) diff --git a/src/compas_slicer/slicers/planar_slicer.py b/src/compas_slicer/slicers/planar_slicer.py index b6888b03..a493bf4b 100644 --- a/src/compas_slicer/slicers/planar_slicer.py +++ b/src/compas_slicer/slicers/planar_slicer.py @@ -7,7 +7,7 @@ from compas_slicer.slicers.base_slicer import BaseSlicer from compas_slicer.slicers.planar_slicing import create_planar_paths -__all__ = ['PlanarSlicer'] +__all__ = ["PlanarSlicer"] class PlanarSlicer(BaseSlicer): @@ -31,7 +31,7 @@ def __init__( layer_height: float = 2.0, slice_height_range: tuple[float, float] | None = None, ) -> None: - logger.info('PlanarSlicer') + logger.info("PlanarSlicer") BaseSlicer.__init__(self, mesh) self.layer_height = layer_height @@ -42,12 +42,14 @@ def __repr__(self) -> str: def generate_paths(self) -> None: """Generate the planar slicing paths.""" - z = [self.mesh.vertex_attribute(key, 'z') for key in self.mesh.vertices()] + z = [self.mesh.vertex_attribute(key, "z") for key in self.mesh.vertices()] min_z, max_z = min(z), max(z) if self.slice_height_range: if min_z <= self.slice_height_range[0] <= max_z and min_z <= self.slice_height_range[1] <= max_z: - logger.info(f"Slicing mesh in range from Z = {self.slice_height_range[0]} to Z = {self.slice_height_range[1]}.") + logger.info( + f"Slicing mesh in range from Z = {self.slice_height_range[0]} to Z = {self.slice_height_range[1]}." + ) max_z = min_z + self.slice_height_range[1] min_z = min_z + self.slice_height_range[0] else: diff --git a/src/compas_slicer/slicers/planar_slicing/planar_slicing_cgal.py b/src/compas_slicer/slicers/planar_slicing/planar_slicing_cgal.py index 31667c29..c6a5ca70 100644 --- a/src/compas_slicer/slicers/planar_slicing/planar_slicing_cgal.py +++ b/src/compas_slicer/slicers/planar_slicing/planar_slicing_cgal.py @@ -13,7 +13,7 @@ from compas.datastructures import Mesh -__all__ = ['create_planar_paths'] +__all__ = ["create_planar_paths"] def create_planar_paths(mesh: Mesh, planes: list[Plane]) -> list[Layer]: @@ -70,10 +70,8 @@ def create_planar_paths(mesh: Mesh, planes: list[Plane]) -> list[Layer]: return layers -def get_grouped_list( - item_list: list[Any], key_function: Callable[[Any], Any] -) -> list[list[Any]]: - """ Groups layers horizontally. """ +def get_grouped_list(item_list: list[Any], key_function: Callable[[Any], Any]) -> list[list[Any]]: + """Groups layers horizontally.""" # first sort, because grouping only groups consecutively matching items sorted_list = sorted(item_list, key=key_function) # group items, using the provided key function diff --git a/src/compas_slicer/slicers/scalar_field_slicer.py b/src/compas_slicer/slicers/scalar_field_slicer.py index d366ad92..1e02ea58 100644 --- a/src/compas_slicer/slicers/scalar_field_slicer.py +++ b/src/compas_slicer/slicers/scalar_field_slicer.py @@ -17,7 +17,7 @@ from compas.datastructures import Mesh -__all__ = ['ScalarFieldSlicer'] +__all__ = ["ScalarFieldSlicer"] class ScalarFieldSlicer(BaseSlicer): @@ -45,14 +45,14 @@ def __init__( no_of_isocurves: int, config: InterpolationConfig | None = None, ) -> None: - logger.info('ScalarFieldSlicer') + logger.info("ScalarFieldSlicer") BaseSlicer.__init__(self, mesh) self.no_of_isocurves = no_of_isocurves self.scalar_field: list[float] = list(np.array(scalar_field) - np.min(np.array(scalar_field))) self.config = config if config else InterpolationConfig() - mesh.update_default_vertex_attributes({'scalar_field': 0}) + mesh.update_default_vertex_attributes({"scalar_field": 0}) def generate_paths(self) -> None: """Generate isocontours.""" @@ -67,9 +67,9 @@ def generate_paths(self) -> None: for i in range(0, self.no_of_isocurves + 1): for vkey, data in self.mesh.vertices(data=True): if i == 0: - data['scalar_field'] = self.scalar_field[vkey] - 0.05 * step # things can be tricky in the edge + data["scalar_field"] = self.scalar_field[vkey] - 0.05 * step # things can be tricky in the edge else: - data['scalar_field'] = self.scalar_field[vkey] - i * step + data["scalar_field"] = self.scalar_field[vkey] - i * step contours = ScalarFieldContours(self.mesh) contours.compute() diff --git a/src/compas_slicer/slicers/slice_utilities/contours_base.py b/src/compas_slicer/slicers/slice_utilities/contours_base.py index 9b2faccc..0f604c94 100644 --- a/src/compas_slicer/slicers/slice_utilities/contours_base.py +++ b/src/compas_slicer/slicers/slice_utilities/contours_base.py @@ -62,7 +62,7 @@ def compute(self) -> None: nodeDict = dict(G.nodes(data=True)) for key in sorted_indices_dict: sorted_indices = sorted_indices_dict[key] - self.sorted_edge_clusters[key] = [nodeDict[node_index]['mesh_edge'] for node_index in sorted_indices] + self.sorted_edge_clusters[key] = [nodeDict[node_index]["mesh_edge"] for node_index in sorted_indices] self.sorted_point_clusters[key] = [self.intersection_data[e] for e in self.sorted_edge_clusters[key]] self.label_closed_paths() @@ -78,22 +78,20 @@ def find_intersections(self) -> None: """ Fills in the dict self.intersection_data: key=(ui,vi) : [xi,yi,zi], - dict self.edge_to_index: key=(u1,v1) : point_index. """ + dict self.edge_to_index: key=(u1,v1) : point_index.""" for edge in list(self.mesh.edges()): if self.edge_is_intersected(edge[0], edge[1]): point = self.find_zero_crossing_data(edge[0], edge[1]) if point and edge not in self.intersection_data and tuple(reversed(edge)) not in self.intersection_data: - # create [edge - point] dictionary - self.intersection_data[edge] = {} - self.intersection_data[edge] = Point(point[0], point[1], point[2]) + # create [edge - point] dictionary + self.intersection_data[edge] = {} + self.intersection_data[edge] = Point(point[0], point[1], point[2]) # create [edge - point] dictionary for i, e in enumerate(self.intersection_data): self.edge_to_index[e] = i - def save_point_clusters_as_polylines_to_json( - self, DATA_PATH: str | FilePath, name: str - ) -> None: + def save_point_clusters_as_polylines_to_json(self, DATA_PATH: str | FilePath, name: str) -> None: all_points: dict[str, Any] = {} for i, key in enumerate(self.sorted_point_clusters): all_points[str(i)] = utils.point_list_to_dict(self.sorted_point_clusters[key]) @@ -102,19 +100,17 @@ def save_point_clusters_as_polylines_to_json( # --- Abstract methods @abstractmethod def edge_is_intersected(self, u: int, v: int) -> bool: - """ Returns True if the edge u,v has a zero-crossing, False otherwise. """ + """Returns True if the edge u,v has a zero-crossing, False otherwise.""" # to be implemented by the inheriting classes pass @abstractmethod def find_zero_crossing_data(self, u: int, v: int) -> list[float] | None: - """ Finds the position of the zero-crossing on the edge u,v. """ + """Finds the position of the zero-crossing on the edge u,v.""" # to be implemented by the inheriting classes pass - def add_to_vertical_layers_manager( - self, vertical_layers_manager: VerticalLayersManager - ) -> None: + def add_to_vertical_layers_manager(self, vertical_layers_manager: VerticalLayersManager) -> None: for key in self.sorted_point_clusters: pts = self.sorted_point_clusters[key] if len(pts) > 3: # discard curves that are too small diff --git a/src/compas_slicer/slicers/slice_utilities/graph_connectivity.py b/src/compas_slicer/slicers/slice_utilities/graph_connectivity.py index 93695062..265cc9ef 100644 --- a/src/compas_slicer/slicers/slice_utilities/graph_connectivity.py +++ b/src/compas_slicer/slicers/slice_utilities/graph_connectivity.py @@ -8,9 +8,7 @@ from compas.datastructures import Mesh from compas.geometry import Point -__all__ = ['create_graph_from_mesh_edges', - 'sort_graph_connected_components', - 'create_graph_from_mesh_vkeys'] +__all__ = ["create_graph_from_mesh_edges", "sort_graph_connected_components", "create_graph_from_mesh_vkeys"] def create_graph_from_mesh_edges( @@ -43,7 +41,7 @@ def create_graph_from_mesh_edges( G.add_node(i, mesh_edge=edge) # node, attribute for node_index, data in G.nodes(data=True): - mesh_edge = data['mesh_edge'] + mesh_edge = data["mesh_edge"] # find current neighboring edges that are also intersected current_edge_connections = [] @@ -51,14 +49,14 @@ def create_graph_from_mesh_edges( if f is not None: face_edges = mesh.face_halfedges(f) for e in face_edges: - if (e != mesh_edge and tuple(reversed(e)) != mesh_edge) \ - and (e in intersection_data or tuple(reversed(e)) in intersection_data): + if (e != mesh_edge and tuple(reversed(e)) != mesh_edge) and ( + e in intersection_data or tuple(reversed(e)) in intersection_data + ): current_edge_connections.append(e) for e in current_edge_connections: # find other_node_index - other_node_index = edge_to_index[e] if e in edge_to_index \ - else edge_to_index[tuple(reversed(e))] + other_node_index = edge_to_index[e] if e in edge_to_index else edge_to_index[tuple(reversed(e))] # add edges to the graph (only if the edge doesn't exist already) if not G.has_edge(node_index, other_node_index) and not G.has_edge(other_node_index, node_index): G.add_edge(node_index, other_node_index) @@ -116,7 +114,6 @@ def sort_graph_connected_components(G: nx.Graph) -> dict[int, list[int]]: current_index = 0 for _j, cp in enumerate(nx.connected_components(G)): - if len(cp) > 1: # we need at least 2 elements to have an edge sorted_node_indices = [] @@ -140,7 +137,7 @@ def sort_graph_connected_components(G: nx.Graph) -> dict[int, list[int]]: sorted_node_indices.append(node_index_2) if len(sorted_node_indices) != len(cp): - raise RuntimeError(f'Node sorting error: {len(sorted_node_indices)} sorted != {len(cp)} in component') + raise RuntimeError(f"Node sorting error: {len(sorted_node_indices)} sorted != {len(cp)} in component") sorted_indices_dict[current_index] = sorted_node_indices current_index += 1 diff --git a/src/compas_slicer/slicers/slice_utilities/scalar_field_contours.py b/src/compas_slicer/slicers/slice_utilities/scalar_field_contours.py index 35519e57..490c0f95 100644 --- a/src/compas_slicer/slicers/slice_utilities/scalar_field_contours.py +++ b/src/compas_slicer/slicers/slice_utilities/scalar_field_contours.py @@ -10,7 +10,7 @@ if TYPE_CHECKING: from compas.datastructures import Mesh -__all__ = ['ScalarFieldContours'] +__all__ = ["ScalarFieldContours"] class ScalarFieldContours: @@ -34,7 +34,7 @@ def compute(self) -> None: """Extract zero-level isolines from scalar field.""" from compas_cgal.isolines import isolines - results = isolines(self.mesh, 'scalar_field', isovalues=[0.0]) + results = isolines(self.mesh, "scalar_field", isovalues=[0.0]) for pts in results: points = [Point(*p) for p in pts.tolist()] diff --git a/src/compas_slicer/slicers/slice_utilities/uv_contours.py b/src/compas_slicer/slicers/slice_utilities/uv_contours.py index 9c838bc5..506e31c3 100644 --- a/src/compas_slicer/slicers/slice_utilities/uv_contours.py +++ b/src/compas_slicer/slicers/slice_utilities/uv_contours.py @@ -15,7 +15,7 @@ if TYPE_CHECKING: from compas.datastructures import Mesh -__all__ = ['UVContours'] +__all__ = ["UVContours"] class UVContours(ContoursBase): @@ -25,16 +25,20 @@ def __init__(self, mesh: Mesh, p1: tuple[float, float], p2: tuple[float, float]) self.p2 = p2 # tuple (u,v); second point in uv domain defining the cutting line def uv(self, vkey: int) -> tuple[float, float]: - uv: tuple[float, float] = self.mesh.vertex[vkey]['uv'] + uv: tuple[float, float] = self.mesh.vertex[vkey]["uv"] return uv def edge_is_intersected(self, v1: int, v2: int) -> bool: - """ Returns True if the edge v1,v2 intersects the line in the uv domain, False otherwise. """ + """Returns True if the edge v1,v2 intersects the line in the uv domain, False otherwise.""" p = intersection_line_line_xy((self.p1, self.p2), (self.uv(v1), self.uv(v2))) - return bool(p and is_point_on_segment_xy(p, (self.uv(v1), self.uv(v2))) and is_point_on_segment_xy(p, (self.p1, self.p2))) + return bool( + p + and is_point_on_segment_xy(p, (self.uv(v1), self.uv(v2))) + and is_point_on_segment_xy(p, (self.p1, self.p2)) + ) def find_zero_crossing_data(self, v1: int, v2: int) -> list[float] | None: - """ Finds the position of the zero-crossing on the edge u,v. """ + """Finds the position of the zero-crossing on the edge u,v.""" p = intersection_line_line_xy((self.p1, self.p2), (self.uv(v1), self.uv(v2))) d1, d2 = distance_point_point_xy(self.uv(v1), p), distance_point_point_xy(self.uv(v2), p) if d1 + d2 > 0: diff --git a/src/compas_slicer/slicers/uv_slicer.py b/src/compas_slicer/slicers/uv_slicer.py index 7f7c34bc..2f22b317 100644 --- a/src/compas_slicer/slicers/uv_slicer.py +++ b/src/compas_slicer/slicers/uv_slicer.py @@ -15,7 +15,7 @@ from compas.datastructures import Mesh -__all__ = ['UVSlicer'] +__all__ = ["UVSlicer"] class UVSlicer(BaseSlicer): @@ -45,7 +45,7 @@ def __init__( no_of_isocurves: int, config: InterpolationConfig | None = None, ) -> None: - logger.info('UVSlicer') + logger.info("UVSlicer") BaseSlicer.__init__(self, mesh) self.vkey_to_uv = vkey_to_uv @@ -57,13 +57,13 @@ def __init__( u_arr = np.array(u) * float(no_of_isocurves + 1) vkey_to_i = self.mesh.key_index() - mesh.update_default_vertex_attributes({'uv': 0}) + mesh.update_default_vertex_attributes({"uv": 0}) for vkey in mesh.vertices(): - mesh.vertex_attribute(vkey, 'uv', (u_arr[vkey_to_i[vkey]], v[vkey_to_i[vkey]])) + mesh.vertex_attribute(vkey, "uv", (u_arr[vkey_to_i[vkey]], v[vkey_to_i[vkey]])) def generate_paths(self) -> None: """Generate isocontours.""" - paths_type = 'flat' # 'spiral' # 'zigzag' + paths_type = "flat" # 'spiral' # 'zigzag' v_left, v_right = 0.0, 1.0 - 1e-5 max_dist = self.config.vertical_layers_max_centroid_dist @@ -75,7 +75,7 @@ def generate_paths(self) -> None: u_val = float(i) if i == 0: u_val += 0.05 # contours are a bit tricky in the edges - if paths_type == 'spiral': + if paths_type == "spiral": u1, u2 = u_val, u_val + 1.0 else: # 'flat' u1 = u2 = u_val diff --git a/src/compas_slicer/utilities/__init__.py b/src/compas_slicer/utilities/__init__.py index 16bd8fb9..8244dac7 100644 --- a/src/compas_slicer/utilities/__init__.py +++ b/src/compas_slicer/utilities/__init__.py @@ -4,4 +4,4 @@ from .terminal_command import * # noqa: F401 F403 from .utils import * # noqa: F401 E402 F403 -__all__ = [name for name in dir() if not name.startswith('_')] +__all__ = [name for name in dir() if not name.startswith("_")] diff --git a/src/compas_slicer/utilities/attributes_transfer.py b/src/compas_slicer/utilities/attributes_transfer.py index cda72f0d..8ab9619c 100644 --- a/src/compas_slicer/utilities/attributes_transfer.py +++ b/src/compas_slicer/utilities/attributes_transfer.py @@ -14,14 +14,14 @@ from compas_slicer.geometry import PrintPointsCollection - -__all__ = ['transfer_mesh_attributes_to_printpoints'] +__all__ = ["transfer_mesh_attributes_to_printpoints"] ###################### # PrintPoints Attributes ###################### + def transfer_mesh_attributes_to_printpoints( mesh: Mesh, printpoints: PrintPointsCollection, @@ -45,7 +45,7 @@ def transfer_mesh_attributes_to_printpoints( The collection of printpoints to transfer attributes to. """ - logger.info('Transferring mesh attributes to the printpoints.') + logger.info("Transferring mesh attributes to the printpoints.") all_pts = [ppt.pt for ppt in printpoints.iter_printpoints()] @@ -62,9 +62,8 @@ def transfer_mesh_attributes_to_printpoints( def is_reserved_attribute(attr: str) -> bool: - """ Returns True if the attribute name is a reserved, false otherwise. """ - taken_attributes = ['x', 'y', 'z', 'uv', - 'scalar_field'] + """Returns True if the attribute name is a reserved, false otherwise.""" + taken_attributes = ["x", "y", "z", "uv", "scalar_field"] return attr in taken_attributes @@ -86,9 +85,10 @@ def transfer_mesh_attributes_to_point(mesh: Mesh, fkey: int, proj_pt: list[float """ vs = mesh.face_vertices(fkey) - bar_coords = barycentric_coordinates(proj_pt, triangle=(mesh.vertex_coordinates(vs[0]), - mesh.vertex_coordinates(vs[1]), - mesh.vertex_coordinates(vs[2]))) + bar_coords = barycentric_coordinates( + proj_pt, + triangle=(mesh.vertex_coordinates(vs[0]), mesh.vertex_coordinates(vs[1]), mesh.vertex_coordinates(vs[2])), + ) # get face attributes face_attrs = mesh.face_attributes(fkey) @@ -120,5 +120,5 @@ def check_that_attribute_can_be_multiplied(attr_name: str, value: Any) -> bool: return True except TypeError as err: raise ValueError( - f'Attention! The following vertex attribute cannot be multiplied with a scalar. {attr_name} : {type(value)!s} ' + f"Attention! The following vertex attribute cannot be multiplied with a scalar. {attr_name} : {type(value)!s} " ) from err diff --git a/src/compas_slicer/utilities/terminal_command.py b/src/compas_slicer/utilities/terminal_command.py index 9686ae9c..d0d7aedc 100644 --- a/src/compas_slicer/utilities/terminal_command.py +++ b/src/compas_slicer/utilities/terminal_command.py @@ -1,9 +1,10 @@ """ TerminalCommand class is used to run commands from python as if we are in a shell/cmd """ + import subprocess as p -__all__ = ['TerminalCommand'] +__all__ = ["TerminalCommand"] class TerminalCommand: @@ -22,8 +23,8 @@ def __init__(self, cmd, cwd=None, env=None): process = p.Popen(cmd, stdout=p.PIPE, stderr=p.PIPE, shell=True, cwd=cwd, env=env) stdout, stderr = process.communicate() - self.stdout = stdout.decode('utf8') - self.stderr = stderr.decode('utf8') + self.stdout = stdout.decode("utf8") + self.stderr = stderr.decode("utf8") self.return_code = process.returncode process.kill() diff --git a/src/compas_slicer/utilities/utils.py b/src/compas_slicer/utilities/utils.py index c6de5543..07e444b8 100644 --- a/src/compas_slicer/utilities/utils.py +++ b/src/compas_slicer/utilities/utils.py @@ -29,36 +29,38 @@ from compas_slicer.geometry import PrintPoint, PrintPointsCollection -__all__ = ['remap', - 'remap_unbound', - 'get_output_directory', - 'save_to_json', - 'load_from_json', - 'is_jsonable', - 'get_jsonable_attributes', - 'save_to_text_file', - 'flattened_list_of_dictionary', - 'interrupt', - 'point_list_to_dict', - 'point_list_from_dict', - 'get_closest_mesh_vkey_to_pt', - 'get_mesh_cotmatrix', - 'get_mesh_cotans', - 'get_mesh_massmatrix', - 'get_mesh_cotmatrix_igl', - 'get_mesh_cotans_igl', - 'get_closest_pt_index', - 'get_closest_pt', - 'pull_pts_to_mesh_faces', - 'get_mesh_vertex_coords_with_attribute', - 'get_dict_key_from_value', - 'find_next_printpoint', - 'find_previous_printpoint', - 'smooth_vectors', - 'get_normal_of_path_on_xy_plane', - 'get_all_files_with_name', - 'get_closest_mesh_normal_to_pt', - 'check_package_is_installed'] +__all__ = [ + "remap", + "remap_unbound", + "get_output_directory", + "save_to_json", + "load_from_json", + "is_jsonable", + "get_jsonable_attributes", + "save_to_text_file", + "flattened_list_of_dictionary", + "interrupt", + "point_list_to_dict", + "point_list_from_dict", + "get_closest_mesh_vkey_to_pt", + "get_mesh_cotmatrix", + "get_mesh_cotans", + "get_mesh_massmatrix", + "get_mesh_cotmatrix_igl", + "get_mesh_cotans_igl", + "get_closest_pt_index", + "get_closest_pt", + "pull_pts_to_mesh_faces", + "get_mesh_vertex_coords_with_attribute", + "get_dict_key_from_value", + "find_next_printpoint", + "find_previous_printpoint", + "smooth_vectors", + "get_normal_of_path_on_xy_plane", + "get_all_files_with_name", + "get_closest_mesh_normal_to_pt", + "check_package_is_installed", +] def remap(input_val: float, in_from: float, in_to: float, out_from: float, out_to: float) -> float: @@ -95,7 +97,7 @@ def get_output_directory(path: str | Path) -> Path: The path to the 'output' directory. """ - output_dir = Path(path) / 'output' + output_dir = Path(path) / "output" output_dir.mkdir(exist_ok=True) return output_dir @@ -196,9 +198,8 @@ def smooth_vectors(vectors: list[Vector], strength: float, iterations: int) -> l ####################################### # json -def save_to_json( - data: dict[str, Any] | dict[int, Any] | list[Any], filepath: str | Path, name: str -) -> None: + +def save_to_json(data: dict[str, Any] | dict[int, Any] | list[Any], filepath: str | Path, name: str) -> None: """Save data to JSON file. Parameters @@ -258,13 +259,14 @@ def get_jsonable_attributes(attributes_dict: dict[str, Any]) -> dict[str, Any]: if isinstance(attr, np.ndarray): jsonable_attr[attr_key] = list(attr) else: - jsonable_attr[attr_key] = 'non serializable attribute' + jsonable_attr[attr_key] = "non serializable attribute" return jsonable_attr ####################################### # text file + def save_to_text_file(data: str, filepath: str | Path, name: str) -> None: """Save text to file. @@ -286,6 +288,7 @@ def save_to_text_file(data: str, filepath: str | Path, name: str) -> None: ####################################### # mesh utils + def check_triangular_mesh(mesh: Mesh) -> None: """Check if mesh is triangular, raise TypeError if not. @@ -322,7 +325,7 @@ def get_closest_mesh_vkey_to_pt(mesh: Mesh, pt: Point) -> int: Closest vertex key. """ - vertex_tupples = [(v_key, Point(data['x'], data['y'], data['z'])) for v_key, data in mesh.vertices(data=True)] + vertex_tupples = [(v_key, Point(data["x"], data["y"], data["z"])) for v_key, data in mesh.vertices(data=True)] vertex_tupples = sorted(vertex_tupples, key=lambda v_tupple: distance_point_point_sqrd(pt, v_tupple[1])) closest_vkey: int = vertex_tupples[0][0] return closest_vkey @@ -428,6 +431,7 @@ def get_normal_of_path_on_xy_plane(k: int, point: Point, path: SlicerPath, mesh: ####################################### # mesh matrix utils (NumPy implementations) + def get_mesh_cotmatrix(mesh: Mesh, fix_boundaries: bool = True) -> csr_matrix: """Get the cotangent Laplacian matrix of the mesh. @@ -494,7 +498,7 @@ def cotangent(a: NDArray, b: NDArray) -> NDArray: # Zero out rows for boundary vertices boundary_mask = np.zeros(n_vertices, dtype=bool) for i, (_vkey, vdata) in enumerate(mesh.vertices(data=True)): - if vdata.get('boundary', 0) > 0: + if vdata.get("boundary", 0) > 0: boundary_mask[i] = True if np.any(boundary_mask): @@ -590,6 +594,7 @@ def get_mesh_massmatrix(mesh: Mesh) -> csr_matrix: ####################################### # dict utils + def point_list_to_dict(pts_list: list[Point | Vector]) -> dict[int, list[float]]: """Convert list of points/vectors to dict for JSON. @@ -671,9 +676,7 @@ def get_dict_key_from_value(dictionary: dict[Any, Any], val: Any) -> Any | None: return None -def find_next_printpoint( - printpoints: PrintPointsCollection, i: int, j: int, k: int -) -> PrintPoint | None: +def find_next_printpoint(printpoints: PrintPointsCollection, i: int, j: int, k: int) -> PrintPoint | None: """ Returns the next printpoint from the current printpoint if it exists, otherwise returns None. @@ -706,9 +709,7 @@ def find_next_printpoint( return next_ppt -def find_previous_printpoint( - printpoints: PrintPointsCollection, i: int, j: int, k: int -) -> PrintPoint | None: +def find_previous_printpoint(printpoints: PrintPointsCollection, i: int, j: int, k: int) -> PrintPoint | None: """ Returns the previous printpoint from the current printpoint if it exists, otherwise returns None. @@ -744,22 +745,22 @@ def find_previous_printpoint( ####################################### # control flow + def interrupt() -> None: """ Interrupts the flow of the code while it is running. It asks for the user to press a enter to continue or abort. """ value = input("Press enter to continue, Press 1 to abort ") - if isinstance(value, str) and value == '1': + if isinstance(value, str) and value == "1": raise ValueError("Aborted") ####################################### # load all files with name -def get_all_files_with_name( - startswith: str, endswith: str, DATA_PATH: str | Path -) -> list[str]: + +def get_all_files_with_name(startswith: str, endswith: str, DATA_PATH: str | Path) -> list[str]: """ Finds all the filenames in the DATA_PATH that start and end with the provided strings @@ -774,9 +775,8 @@ def get_all_files_with_name( list[str] All the filenames """ - files = [f.name for f in Path(DATA_PATH).iterdir() - if f.name.startswith(startswith) and f.name.endswith(endswith)] - logger.info(f'Reloading: {files}') + files = [f.name for f in Path(DATA_PATH).iterdir() if f.name.startswith(startswith) and f.name.endswith(endswith)] + logger.info(f"Reloading: {files}") return files @@ -785,11 +785,12 @@ def get_all_files_with_name( def check_package_is_installed(package_name: str) -> None: - """ Throws an error if igl python bindings are not installed in the current environment. """ - packages = TerminalCommand('conda list').get_split_output_strings() + """Throws an error if igl python bindings are not installed in the current environment.""" + packages = TerminalCommand("conda list").get_split_output_strings() if package_name not in packages: - raise PluginNotInstalledError(" ATTENTION! Package : " + package_name + - " is missing! Please follow installation guide to install it.") + raise PluginNotInstalledError( + " ATTENTION! Package : " + package_name + " is missing! Please follow installation guide to install it." + ) if __name__ == "__main__": diff --git a/src/compas_slicer/visualization/visualization.py b/src/compas_slicer/visualization/visualization.py index 5f488ce8..e116e7e5 100644 --- a/src/compas_slicer/visualization/visualization.py +++ b/src/compas_slicer/visualization/visualization.py @@ -1,4 +1,5 @@ """Visualization utilities for compas_slicer using compas_viewer.""" + from __future__ import annotations import sys @@ -87,5 +88,5 @@ def plot_networkx_graph(G: nx.Graph) -> None: import matplotlib.pyplot as plt plt.subplot(121) - nx.draw(G, with_labels=True, font_weight='bold', node_color=range(len(list(G.nodes())))) + nx.draw(G, with_labels=True, font_weight="bold", node_color=range(len(list(G.nodes())))) plt.show() diff --git a/src/compas_slicer_ghpython/install.py b/src/compas_slicer_ghpython/install.py index c17b6653..dead4e1c 100644 --- a/src/compas_slicer_ghpython/install.py +++ b/src/compas_slicer_ghpython/install.py @@ -38,9 +38,7 @@ def after_rhino_install(installed_packages): dst = Path(dstdir) / src.name shutil.copyfile(src, dst) - results.append( - ("compas_slicer_ghpython", f"Installed {len(userobjects)} GH User Objects on {dstdir}", True) - ) + results.append(("compas_slicer_ghpython", f"Installed {len(userobjects)} GH User Objects on {dstdir}", True)) except PermissionError as err: raise Exception("Please close all instances of Rhino first and then rerun the command") from err diff --git a/src/compas_slicer_ghpython/visualization.py b/src/compas_slicer_ghpython/visualization.py index c1af17fd..7db4b605 100644 --- a/src/compas_slicer_ghpython/visualization.py +++ b/src/compas_slicer_ghpython/visualization.py @@ -11,8 +11,9 @@ ####################################### # --- Slicer + def load_slicer(path, folder_name, json_name): - """ Loads slicer data. """ + """Loads slicer data.""" data = load_json_file(path, folder_name, json_name) mesh = None @@ -21,9 +22,8 @@ def load_slicer(path, folder_name, json_name): all_points = [] if data: - - if 'mesh' in data: - compas_mesh = Mesh.__from_data__(data['mesh']) + if "mesh" in data: + compas_mesh = Mesh.__from_data__(data["mesh"]) artist = MeshArtist(compas_mesh) artist.show_mesh = True artist.show_vertices = False @@ -31,25 +31,25 @@ def load_slicer(path, folder_name, json_name): artist.show_faces = False mesh = artist.draw() else: - print('No mesh has been saved in the json file.') + print("No mesh has been saved in the json file.") - if 'layers' in data: - layers_data = data['layers'] + if "layers" in data: + layers_data = data["layers"] for i in range(len(layers_data)): paths_nested_list.append([]) # save each layer on a different list layer_data = layers_data[str(i)] - paths_data = layer_data['paths'] + paths_data = layer_data["paths"] for j in range(len(paths_data)): path_data = paths_data[str(j)] pts = [] - are_closed.append(path_data['is_closed']) + are_closed.append(path_data["is_closed"]) - if len(path_data['points']) > 2: # ignore smaller curves that throw errors - for k in range(len(path_data['points'])): - pt = path_data['points'][str(k)] + if len(path_data["points"]) > 2: # ignore smaller curves that throw errors + for k in range(len(path_data["points"])): + pt = path_data["points"][str(k)] pt = rs.AddPoint(pt[0], pt[1], pt[2]) # re-create points pts.append(pt) all_points.extend(pts) @@ -57,9 +57,9 @@ def load_slicer(path, folder_name, json_name): paths_nested_list[-1].append(path) else: - print('No layers have been saved in the json file. Is this the correct json?') + print("No layers have been saved in the json file. Is this the correct json?") - print(f'The slicer contains {len(paths_nested_list)} layers. ') + print(f"The slicer contains {len(paths_nested_list)} layers. ") paths_nested_list = list_to_ghtree(paths_nested_list) return mesh, paths_nested_list, are_closed, all_points @@ -67,6 +67,7 @@ def load_slicer(path, folder_name, json_name): ####################################### # --- Printpoints + class PrintPointGH: def __init__(self, pt): self.pt = pt @@ -92,20 +93,31 @@ def __init__(self): self.paths = [] -def load_nested_printpoints(path, folder_name, json_name, load_frames, load_layer_heights, load_up_vectors, - load_normals, load_closest_support_pt, load_velocities, load_wait_times, - load_blend_radiuses, load_extruder_toggles): - """ Loads a dict of compas_slicer printpoints. """ +def load_nested_printpoints( + path, + folder_name, + json_name, + load_frames, + load_layer_heights, + load_up_vectors, + load_normals, + load_closest_support_pt, + load_velocities, + load_wait_times, + load_blend_radiuses, + load_extruder_toggles, +): + """Loads a dict of compas_slicer printpoints.""" data = load_json_file(path, folder_name, json_name) layers = [] if data: for i in range(len(data)): - layer_key = 'layer_' + str(i) + layer_key = "layer_" + str(i) layer = LayerGH() for j in range(len(data[layer_key])): - path_key = 'path_' + str(j) + path_key = "path_" + str(j) path = PathGH() for k in range(len(data[layer_key][path_key])): ppt_data = data[layer_key][path_key][str(k)] @@ -120,10 +132,14 @@ def load_nested_printpoints(path, folder_name, json_name, load_frames, load_laye ppt.layer_height = ppt_data["layer_height"] if load_up_vectors: - ppt.up_vector = rg.Vector3d(ppt_data["up_vector"][0], ppt_data["up_vector"][1], ppt_data["up_vector"][2]) + ppt.up_vector = rg.Vector3d( + ppt_data["up_vector"][0], ppt_data["up_vector"][1], ppt_data["up_vector"][2] + ) if load_normals: - ppt.mesh_normal = rg.Vector3d(ppt_data["mesh_normal"][0], ppt_data["mesh_normal"][1], ppt_data["mesh_normal"][2]) + ppt.mesh_normal = rg.Vector3d( + ppt_data["mesh_normal"][0], ppt_data["mesh_normal"][1], ppt_data["mesh_normal"][2] + ) if load_closest_support_pt: cp = ppt_data["closest_support_pt"] @@ -148,7 +164,7 @@ def load_nested_printpoints(path, folder_name, json_name, load_frames, load_laye def load_printpoints(path, folder_name, json_name): - """ Loads a dict of compas_slicer printpoints. """ + """Loads a dict of compas_slicer printpoints.""" data = load_json_file(path, folder_name, json_name) # geometry data @@ -201,18 +217,28 @@ def load_printpoints(path, folder_name, json_name): blend_radiuses.append(data_point["blend_radius"]) extruder_toggles.append(data_point["extruder_toggle"]) - return points, frames, layer_heights, up_vectors, mesh_normals, closest_support, velocities, wait_times, \ - blend_radiuses, extruder_toggles + return ( + points, + frames, + layer_heights, + up_vectors, + mesh_normals, + closest_support, + velocities, + wait_times, + blend_radiuses, + extruder_toggles, + ) ####################################### # --- Lightweight path visualization + def lightweight_path_visualization(points, extruder_toggles, diameter, pipe_resolution): - """ Visualize print paths with simple lines or pipes. """ + """Visualize print paths with simple lines or pipes.""" # check input - assert len(points) == len(extruder_toggles), \ - 'Wrong length of input lists' + assert len(points) == len(extruder_toggles), "Wrong length of input lists" print_path_pipes = [] travel_path_lines = [] @@ -232,19 +258,21 @@ def lightweight_path_visualization(points, extruder_toggles, diameter, pipe_reso ####################################### # --- Render path visualization -def render_path_visualization(points, mesh_normals, layer_heights, up_vectors, extruder_toggles, cross_section, - planar_printing): - """ Visualize print paths with simple loft surfaces. """ + +def render_path_visualization( + points, mesh_normals, layer_heights, up_vectors, extruder_toggles, cross_section, planar_printing +): + """Visualize print paths with simple loft surfaces.""" # check input - assert len(points) == len(mesh_normals) == len(layer_heights) == len(up_vectors) == len(extruder_toggles), \ - 'Wrong length of input lists' + assert len(points) == len(mesh_normals) == len(layer_heights) == len(up_vectors) == len(extruder_toggles), ( + "Wrong length of input lists" + ) loft_surfaces = [] travel_path_lines = [] if points[0] and mesh_normals[0] and layer_heights[0] and up_vectors[0]: # check if any of the values are None - if planar_printing: # then make sure that all normals lie on the xy plane for n in mesh_normals: n[2] = 0 @@ -278,7 +306,7 @@ def render_path_visualization(points, mesh_normals, layer_heights, up_vectors, e travel_path_lines.append(line) # add to travel path list else: - print('At least one of the inputs that you have provided are invalid. ') + print("At least one of the inputs that you have provided are invalid. ") return loft_surfaces, travel_path_lines @@ -286,19 +314,20 @@ def render_path_visualization(points, mesh_normals, layer_heights, up_vectors, e ####################################### # --- Tool visualization + def tool_visualization(origin_coords, mesh, planes, i): - """ Visualize example tool motion. """ + """Visualize example tool motion.""" if len(planes) == 0: - print('Please provide valid planes') + print("Please provide valid planes") return None, None if not planes[0]: - print('Please provide valid planes') + print("Please provide valid planes") return None, None i = min(i, len(planes) - 1) # make sure i doesn't go beyond available number of planes passed_path = None - assert planes[0], 'The planes you have provided are invalid.' + assert planes[0], "The planes you have provided are invalid." origin = [float(origin_coords[0]), float(origin_coords[1]), float(origin_coords[2])] o = rg.Point3d(origin[0], origin[1], origin[2]) @@ -312,7 +341,7 @@ def tool_visualization(origin_coords, mesh, planes, i): T = rg.Transform.PlaneToPlane(ee_frame, target_frame) mesh = rs.TransformObject(rs.CopyObject(mesh), T) - passed_path = rs.AddPolyline([plane.Origin for plane in planes[:i + 1]]) + passed_path = rs.AddPolyline([plane.Origin for plane in planes[: i + 1]]) return mesh, passed_path @@ -320,9 +349,10 @@ def tool_visualization(origin_coords, mesh, planes, i): ####################################### # --- Create_targets (Curved slicing) + def load_multiple_meshes(starts_with, ends_with, path, folder_name): - """ Load all the meshes that have the specified name, and print them in different colors. """ - output_dir = Path(path) / folder_name / 'output' + """Load all the meshes that have the specified name, and print them in different colors.""" + output_dir = Path(path) / folder_name / "output" filenames = get_files_with_name(starts_with, ends_with, str(output_dir)) meshes = [Mesh.from_obj(str(output_dir / filename)) for filename in filenames] @@ -343,6 +373,7 @@ def load_multiple_meshes(starts_with, ends_with, path, folder_name): ####################################### # --- Load json points + def load_json_points(path, folder_name, json_name): """ Loads a json file that stores a dictionary of N points in the format: @@ -361,6 +392,7 @@ def load_json_points(path, folder_name, json_name): ####################################### # --- Load json vectors + def load_json_vectors(path, folder_name, json_name): """ Loads a json file from the 'output' folder, @@ -380,6 +412,7 @@ def load_json_vectors(path, folder_name, json_name): ####################################### # --- Load json polylines + def load_json_polylines(path, folder_name, json_name): """ Loads a json file that stores a dictionary of N polylines in the format: @@ -407,6 +440,7 @@ def load_json_polylines(path, folder_name, json_name): ####################################### # --- Load json BaseBoundaries + def load_base_boundaries(path, folder_name, json_name): """ Loads a json file that stores a dictionary of BaseBoundary classes in the format: @@ -419,8 +453,8 @@ def load_base_boundaries(path, folder_name, json_name): number_of_boundaries = len(data) for i in range(len(data)): - p = data[str(i)]['points'] - v = data[str(i)]['up_vectors'] + p = data[str(i)]["points"] + v = data[str(i)]["up_vectors"] points.extend([rg.Point3d(p[key][0], p[key][1], p[key][2]) for key in p]) vectors.extend([rg.Vector3d(v[key][0], v[key][1], v[key][2]) for key in v]) @@ -431,13 +465,14 @@ def load_base_boundaries(path, folder_name, json_name): ####################################### # --- Load json BaseBoundaries + def distance_fields_interpolation(path, folder_name, json_name1, json_name2, weight): - """ Simple interpolation of the distance fields that are stored in the two json files. """ + """Simple interpolation of the distance fields that are stored in the two json files.""" distances_LOW = load_json_file(path, folder_name, json_name1) distances_HIGH = load_json_file(path, folder_name, json_name2) if distances_LOW and distances_HIGH: - assert (len(distances_LOW) == len(distances_HIGH)), 'Wrong number of distances provided. ' + assert len(distances_LOW) == len(distances_HIGH), "Wrong number of distances provided. " return [d2 * weight - d1 * (1 - weight) for d1, d2 in zip(distances_LOW, distances_HIGH)] @@ -445,16 +480,17 @@ def distance_fields_interpolation(path, folder_name, json_name1, json_name2, wei # --- utilities ############################################## + def missing_input(): - """ Deals with cases where the user has not defined all the necessary inputs. """ - print('Please provide all the inputs') + """Deals with cases where the user has not defined all the necessary inputs.""" + print("Please provide all the inputs") def load_json_file(path, folder_name, json_name, in_output_folder=True): - """ Loads data from json. """ + """Loads data from json.""" base = Path(path) / folder_name if in_output_folder: - filename = base / 'output' / json_name + filename = base / "output" / json_name else: filename = base / json_name data = None @@ -469,36 +505,35 @@ def load_json_file(path, folder_name, json_name, in_output_folder=True): def save_json_file(data, path, folder_name, json_name): - """ Saves data to json. """ + """Saves data to json.""" filename = Path(path) / folder_name / json_name filename.write_text(json.dumps(data, indent=3, sort_keys=True)) print(f"Saved to Json: '{filename}'") def get_closest_point_index(pt, pts): - """ Closest point index of the pts from pt. """ + """Closest point index of the pts from pt.""" distances = [rs.Distance(p, pt) for p in pts] min_index = distances.index(min(distances)) return min_index def distance_of_pt_from_crv(pt, crv): - """ Smallest distance from point to curve. """ + """Smallest distance from point to curve.""" param = rs.CurveClosestPoint(crv, pt) cp = rs.EvaluateCurve(crv, param) return rs.Distance(pt, cp) def get_files_with_name(startswith, endswith, DATA_PATH): - """ Find all files with the specified start and end in the data path. """ - files = [f.name for f in Path(DATA_PATH).iterdir() - if f.name.startswith(startswith) and f.name.endswith(endswith)] - print(f'Found {len(files)} files with the given criteria : {files}') + """Find all files with the specified start and end in the data path.""" + files = [f.name for f in Path(DATA_PATH).iterdir() if f.name.startswith(startswith) and f.name.endswith(endswith)] + print(f"Found {len(files)} files with the given criteria : {files}") return files def get_color(i, total): - """ Returns a color per index interpolating the colorspace of 5 colors that are hardcoded (c1 .. c5). """ + """Returns a color per index interpolating the colorspace of 5 colors that are hardcoded (c1 .. c5).""" i, total = float(i), float(total) c1 = rg.Vector3d(234, 38, 0.0) # 0.00 @@ -526,7 +561,7 @@ def get_color(i, total): def remap_unbound(input_val, in_from, in_to, out_from, out_to): - """ Remap numbers without clamping values. """ + """Remap numbers without clamping values.""" out_range = out_to - out_from in_range = in_to - in_from in_val = input_val - in_from @@ -536,7 +571,7 @@ def remap_unbound(input_val, in_from, in_to, out_from, out_to): def blend_union_list(values, r): - """ Returns a blend union of the elements of the list, with blend radius r. """ + """Returns a blend union of the elements of the list, with blend radius r.""" d_result = 9999999 # very big number for d in values: d_result = blend_union(d_result, d, r) @@ -544,7 +579,7 @@ def blend_union_list(values, r): def blend_union(da, db, r): - """ Blend union of the distances da, db with blend radius r. """ + """Blend union of the distances da, db with blend radius r.""" e = max(r - abs(da - db), 0) return min(da, db) - e * e * 0.25 / r diff --git a/tests/test_examples.py b/tests/test_examples.py index b20cb06d..d16c4927 100644 --- a/tests/test_examples.py +++ b/tests/test_examples.py @@ -3,19 +3,19 @@ import pytest -EXAMPLES_DIR = Path(__file__).parent.parent / 'examples' +EXAMPLES_DIR = Path(__file__).parent.parent / "examples" examples = [ - ('1_planar_slicing_simple', 'example_1_planar_slicing_simple'), - ('2_curved_slicing', 'ex2_curved_slicing'), - ('3_planar_slicing_vertical_sorting', 'example_3_planar_vertical_sorting'), - ('4_gcode_generation', 'example_4_gcode'), - ('5_non_planar_slicing_on_custom_base', 'scalar_field_slicing'), - ('6_attributes_transfer', 'example_6_attributes_transfer'), + ("1_planar_slicing_simple", "example_1_planar_slicing_simple"), + ("2_curved_slicing", "ex2_curved_slicing"), + ("3_planar_slicing_vertical_sorting", "example_3_planar_vertical_sorting"), + ("4_gcode_generation", "example_4_gcode"), + ("5_non_planar_slicing_on_custom_base", "scalar_field_slicing"), + ("6_attributes_transfer", "example_6_attributes_transfer"), ] -@pytest.mark.parametrize('folder,module', examples) +@pytest.mark.parametrize("folder,module", examples) def test_example(folder, module): """Run example as integration test.""" example_path = str(EXAMPLES_DIR / folder) diff --git a/tests/test_performance.py b/tests/test_performance.py index 4e6b825d..19288e40 100644 --- a/tests/test_performance.py +++ b/tests/test_performance.py @@ -213,9 +213,7 @@ def test_face_gradient_medium_mesh(self, benchmark, medium_mesh): V, F, _, face_normals, face_areas = mesh_to_arrays(medium_mesh) scalar_field = V[:, 2].copy() - result = benchmark( - face_gradient_from_scalar_field, V, F, scalar_field, face_normals, face_areas - ) + result = benchmark(face_gradient_from_scalar_field, V, F, scalar_field, face_normals, face_areas) assert result.shape == (len(F), 3) @@ -278,7 +276,7 @@ def test_batch_closest_should_be_fast(self): batch_closest_points(query, target) elapsed = (time.perf_counter() - start) / 10 - assert elapsed < 0.05, f"batch_closest_points too slow: {elapsed*1000:.1f}ms" + assert elapsed < 0.05, f"batch_closest_points too slow: {elapsed * 1000:.1f}ms" def test_vertex_gradient_should_be_fast(self, medium_mesh): """Vertex gradient on 2k face mesh should complete in < 20ms.""" @@ -292,4 +290,4 @@ def test_vertex_gradient_should_be_fast(self, medium_mesh): vertex_gradient_from_face_gradient(V, F, face_gradient, face_areas) elapsed = (time.perf_counter() - start) / 10 - assert elapsed < 0.02, f"vertex_gradient too slow: {elapsed*1000:.1f}ms" + assert elapsed < 0.02, f"vertex_gradient too slow: {elapsed * 1000:.1f}ms" diff --git a/tests/test_planar_print_organization_horizontal_layers.py b/tests/test_planar_print_organization_horizontal_layers.py index 607397d5..f31c1407 100644 --- a/tests/test_planar_print_organization_horizontal_layers.py +++ b/tests/test_planar_print_organization_horizontal_layers.py @@ -12,12 +12,12 @@ from compas_slicer.print_organization.print_organization_utilities.extruder_toggle import check_assigned_extruder_toggle from compas.datastructures import Mesh -DATA_PATH = Path(__file__).parent / 'tests_data' -stl_to_test = ['distorted_v_closed_low_res.obj'] +DATA_PATH = Path(__file__).parent / "tests_data" +stl_to_test = ["distorted_v_closed_low_res.obj"] def create_setup(filename): - """ Setting up the stage for testing. """ + """Setting up the stage for testing.""" compas_mesh = Mesh.from_obj(DATA_PATH / filename) slicer = PlanarSlicer(compas_mesh, layer_height=20) slicer.slice_model() @@ -30,25 +30,27 @@ def create_setup(filename): def test_planar_set_extruder_toggle_for_horizontal_layers(): - """ Tests set_extruder_toggle on planar slicer. """ + """Tests set_extruder_toggle on planar slicer.""" for filename in stl_to_test: slicer, print_organizer = create_setup(filename) pp_dict = print_organizer.printpoints_dict set_extruder_toggle(print_organizer, slicer) - assert check_assigned_extruder_toggle(print_organizer), \ - "Not all extruder toggles have been assigned after using 'set_extruder_toggle()'. \nFilename : " + \ - str(filename) + assert check_assigned_extruder_toggle(print_organizer), ( + "Not all extruder toggles have been assigned after using 'set_extruder_toggle()'. \nFilename : " + + str(filename) + ) for i, layer in enumerate(slicer.layers): - layer_key = 'layer_%d' % i - assert not isinstance(layer, compas_slicer.geometry.VerticalLayer), \ + layer_key = "layer_%d" % i + assert not isinstance(layer, compas_slicer.geometry.VerticalLayer), ( "You are testing vertical layers on a test for planar layers. \nFilename : " + str(filename) + ) # --------------- check each individual path for j, path in enumerate(layer.paths): - path_key = 'path_%d' % j + path_key = "path_%d" % j # (1) --- Find how many trues and falses exist in the path path_extruder_toggles = [pp.extruder_toggle for pp in pp_dict[layer_key][path_key]] @@ -77,28 +79,50 @@ def test_planar_set_extruder_toggle_for_horizontal_layers(): # (3) Check if path has the correct number of interruptions that you decided on step (2) if path_should_be_interrupted_at_end: - assert path_Falses == 1, \ - "On an path that should be interrupted there should be 1 extruder_toggle = " \ - "False, instead you have %d Falses.\n The error came up on layer %d out of " \ - "total %d layers, \n path %d out of total %d paths, \n with %d printpoints. " \ - "" % (path_Falses, i, len(slicer.layers) - 1, j, len(slicer.layers[i].paths) - 1, - len(path_extruder_toggles)) + "\nFilename: " + str(filename) - assert path_extruder_toggles[-1] is False, \ - "Last printpoint of open path does not have extruder_toggle = False. \n The error is on layer " \ - "%d out of total %d layers, \n path %d of total %d paths,\n with %d printpoints. " \ - % (i, len(slicer.layers) - 1, j, len(slicer.layers[i].paths) - 1, - len(path_extruder_toggles)) + "\nFilename: " + str(filename) + assert path_Falses == 1, ( + "On an path that should be interrupted there should be 1 extruder_toggle = " + "False, instead you have %d Falses.\n The error came up on layer %d out of " + "total %d layers, \n path %d out of total %d paths, \n with %d printpoints. " + "" + % ( + path_Falses, + i, + len(slicer.layers) - 1, + j, + len(slicer.layers[i].paths) - 1, + len(path_extruder_toggles), + ) + + "\nFilename: " + + str(filename) + ) + assert path_extruder_toggles[-1] is False, ( + "Last printpoint of open path does not have extruder_toggle = False. \n The error is on layer " + "%d out of total %d layers, \n path %d of total %d paths,\n with %d printpoints. " + % (i, len(slicer.layers) - 1, j, len(slicer.layers[i].paths) - 1, len(path_extruder_toggles)) + + "\nFilename: " + + str(filename) + ) else: - assert path_Falses == 0, \ - "On an path that should NOT be interrupted there should be 0 extruder_toggle " \ - "= False, instead you have %d Falses.\n The error came up on layer %d out " \ - "of total %d layers, \n path %d out of total %d paths, \n with %d " \ - "printpoints. " % (path_Falses, i, len(slicer.layers) - 1, j, len(slicer.layers[i].paths) - 1, - len(path_extruder_toggles)) + "\nFilename: " + str(filename) + assert path_Falses == 0, ( + "On an path that should NOT be interrupted there should be 0 extruder_toggle " + "= False, instead you have %d Falses.\n The error came up on layer %d out " + "of total %d layers, \n path %d out of total %d paths, \n with %d " + "printpoints. " + % ( + path_Falses, + i, + len(slicer.layers) - 1, + j, + len(slicer.layers[i].paths) - 1, + len(path_extruder_toggles), + ) + + "\nFilename: " + + str(filename) + ) def test_planar_add_safety_printpoints_for_horizontal_layers(): - """ Tests add_safety_printpoints on planar slicer. """ + """Tests add_safety_printpoints on planar slicer.""" for filename in stl_to_test: slicer, print_organizer = create_setup(filename) @@ -122,20 +146,21 @@ def test_planar_add_safety_printpoints_for_horizontal_layers(): # (3) find resulting number of ppts resulting_ppts_number = print_organizer.number_of_printpoints - assert initial_ppts_number + 2 * total_interruptions == resulting_ppts_number, \ + assert initial_ppts_number + 2 * total_interruptions == resulting_ppts_number, ( "Wrong number of safety points added on file : " + str(filename) + ) def test_planar_set_linear_velocity_constant_for_horizontal_layers(): - """ Tests set_linear_velocity on planar slicer, with constant value. """ + """Tests set_linear_velocity on planar slicer, with constant value.""" pass def test_planar_set_blend_radius_for_horizontal_layers(): - """ Tests set_blend_radius on planar slicer. """ + """Tests set_blend_radius on planar slicer.""" pass -if __name__ == '__main__': +if __name__ == "__main__": test_planar_set_extruder_toggle_for_horizontal_layers() test_planar_add_safety_printpoints_for_horizontal_layers() diff --git a/tests/test_planar_slicing.py b/tests/test_planar_slicing.py index 6bb5bbca..1efe0b2f 100644 --- a/tests/test_planar_slicing.py +++ b/tests/test_planar_slicing.py @@ -6,19 +6,19 @@ from compas_slicer.geometry import Path as SlicerPath from compas_slicer.slicers import PlanarSlicer -DATA_PATH = Path(__file__).parent / 'tests_data' +DATA_PATH = Path(__file__).parent / "tests_data" -compas_mesh = Mesh.from_obj(DATA_PATH / 'cylinder.obj') +compas_mesh = Mesh.from_obj(DATA_PATH / "cylinder.obj") layer_height = 15.0 -z = [compas_mesh.vertex_attribute(key, 'z') for key in compas_mesh.vertices()] +z = [compas_mesh.vertex_attribute(key, "z") for key in compas_mesh.vertices()] min_z, max_z = min(z), max(z) d = abs(min_z - max_z) no_of_layers = int(d / layer_height) + 1 def test_planar_slicing_success(): - """ Tests simple planar slicing. """ + """Tests simple planar slicing.""" slicer = PlanarSlicer(compas_mesh, layer_height=layer_height) slicer.slice_model() @@ -29,9 +29,10 @@ def test_planar_slicing_success(): for i in range(len(slicer.layers)): assert len(slicer.layers[i].paths) == 1, "There is a layer with empty Contours list at index %d" % i assert isinstance(slicer.layers[i].paths[0], SlicerPath), "Wrong class type in Layer.Contour list" - assert slicer.layers[i].paths[0].is_closed, "Path resulting from slicing of cylinder using 'planar_compas' is " \ - "open. It should be closed " + assert slicer.layers[i].paths[0].is_closed, ( + "Path resulting from slicing of cylinder using 'planar_compas' is open. It should be closed " + ) -if __name__ == '__main__': +if __name__ == "__main__": pass