• Home
  • Features
  • Pricing
  • Docs
  • Announcements
  • Sign In

mkofler96 / DeepSDFStruct / 20274137249

16 Dec 2025 03:54PM UTC coverage: 80.284%. Remained the same
20274137249

push

github

web-flow
Merge pull request #10 from mkofler96/formatting-all

formatting

328 of 398 branches covered (82.41%)

Branch coverage included in aggregate %.

2779 of 3472 relevant lines covered (80.04%)

0.8 hits per line

Source File
Press 'n' to go to next uncovered line, 'b' for previous

79.23
DeepSDFStruct/sampling.py
1
"""
2
SDF Sampling and Dataset Generation
3
====================================
4

5
This module provides tools for sampling points from SDF representations and
6
generating datasets for training neural networks (DeepSDF models). It supports
7
various sampling strategies to create well-distributed training data.
8

9
Key Features
10
------------
11

12
Sampling Strategies
13
    - Uniform sampling in a bounding box
14
    - Surface-focused sampling near the zero level set
15
    - Importance sampling based on SDF gradients
16
    - Sphere-based sampling patterns
17
    - Combined strategies for balanced datasets
18

19
Dataset Generation
20
    - Batch processing of multiple geometries
21
    - Automatic data normalization and standardization
22
    - Support for multiple geometry classes
23
    - Metadata tracking (version, sampling parameters)
24
    - Export to formats compatible with DeepSDF training
25

26
The module is designed to generate high-quality training data for implicit
27
neural representations, with careful attention to sampling near surfaces
28
where accurate reconstruction is most critical.
29

30
Classes
31
-------
32
SampledSDF
33
    Container for sampled points and their SDF values, with utilities
34
    for splitting by sign and visualization.
35
    
36
DataSetInfo
37
    TypedDict for dataset metadata (name, classes, sampling strategy, etc.).
38
    
39
Functions
40
---------
41
process_single_geometry
42
    Process a single geometry to generate training samples.
43
generate_dataset
44
    Batch process multiple geometries to create a complete dataset.
45
sample_sdf_*
46
    Various sampling strategies for different use cases.
47
"""
48

49
import os
1✔
50
import vtk
1✔
51
import numpy as np
1✔
52
import json
1✔
53
import pathlib
1✔
54
import typing
1✔
55
import gustaf as gus
1✔
56
import trimesh
1✔
57
from DeepSDFStruct.SDF import SDFfromMesh, SDFBase
1✔
58
import DeepSDFStruct
1✔
59

60
# from analysis.problems.homogenization import computeHomogenizedMaterialProperties
61
import splinepy
1✔
62
import torch
1✔
63
from collections import defaultdict
1✔
64
from tqdm import tqdm
1✔
65
import logging
1✔
66
import datetime
1✔
67
from importlib.metadata import version
1✔
68

69
logger = logging.getLogger(DeepSDFStruct.__name__)
1✔
70

71

72
class DataSetInfo(typing.TypedDict):
1✔
73
    """Metadata for a generated SDF dataset.
74

75
    Attributes
76
    ----------
77
    dataset_name : str
78
        Unique identifier for the dataset.
79
    class_names : list of str
80
        Names of geometry classes in the dataset.
81
    sampling_strategy : str
82
        Description of how points were sampled.
83
    date_created : str
84
        ISO format timestamp of dataset creation.
85
    stds : list of float
86
        Standard deviations used for normalization.
87
    n_samples : int
88
        Number of sample points per geometry.
89
    add_surface_samples : bool
90
        Whether surface points were included.
91
    sdf_struct_version : str
92
        Version of DeepSDFStruct used to create the dataset.
93
    """
94

95
    dataset_name: str
1✔
96
    class_names: list[str]
1✔
97
    sampling_strategy: str
1✔
98
    date_created: str
1✔
99
    stds: list[float]
1✔
100
    n_samples: int
1✔
101
    add_surface_samples: bool
1✔
102
    sdf_struct_version: str
1✔
103

104

105
class SphereParameters(typing.TypedDict):
1✔
106
    """Parameters defining a sampling sphere."""
107

108
    cx: float
1✔
109
    cy: float
1✔
110
    cz: float
1✔
111
    r: float
1✔
112

113

114
class SampledSDF:
1✔
115
    """Container for sampled SDF points and their distance values.
116

117
    This class stores point samples and their corresponding SDF values,
118
    providing utilities for data manipulation, splitting, and visualization.
119

120
    Parameters
121
    ----------
122
    samples : torch.Tensor
123
        Point coordinates of shape (N, 3).
124
    distances : torch.Tensor
125
        SDF values at sample points of shape (N, 1).
126

127
    Attributes
128
    ----------
129
    samples : torch.Tensor
130
        The sampled point coordinates.
131
    distances : torch.Tensor
132
        The SDF distance values.
133

134
    Methods
135
    -------
136
    split_pos_neg()
137
        Split into separate datasets for inside (negative) and outside
138
        (positive) points.
139
    create_gus_plottable()
140
        Convert to gustaf Vertices for visualization.
141
    stacked
142
        Property returning concatenated samples and distances.
143

144
    Examples
145
    --------
146
    >>> import torch
147
    >>> from DeepSDFStruct.sampling import SampledSDF
148
    >>>
149
    >>> points = torch.rand(100, 3)
150
    >>> distances = torch.randn(100, 1)
151
    >>> sampled = SampledSDF(points, distances)
152
    >>>
153
    >>> # Split by sign
154
    >>> inside, outside = sampled.split_pos_neg()
155
    >>> print(f"Inside points: {inside.samples.shape[0]}")
156
    >>> print(f"Outside points: {outside.samples.shape[0]}")
157
    """
158

159
    samples: torch.Tensor
1✔
160
    distances: torch.Tensor
1✔
161

162
    def split_pos_neg(self):
1✔
163
        """Split samples into inside (negative) and outside (positive) points.
164

165
        Returns
166
        -------
167
        pos : SampledSDF
168
            Samples with non-negative distances (outside or on surface).
169
        neg : SampledSDF
170
            Samples with negative distances (inside geometry).
171
        """
172
        pos_mask = torch.where(self.distances >= 0.0)[0]
1✔
173
        neg_mask = torch.where(self.distances < 0.0)[0]
1✔
174
        pos = SampledSDF(
1✔
175
            samples=self.samples[pos_mask], distances=self.distances[pos_mask]
176
        )
177
        neg = SampledSDF(
1✔
178
            samples=self.samples[neg_mask], distances=self.distances[neg_mask]
179
        )
180
        return pos, neg
1✔
181

182
    def create_gus_plottable(self):
1✔
183
        """Create a gustaf Vertices object for visualization.
184

185
        Returns
186
        -------
187
        gustaf.Vertices
188
            Vertices with distance values stored as vertex data.
189
        """
190
        vp = gus.Vertices(vertices=self.samples)
×
191
        vp.vertex_data["distance"] = self.distances
×
192
        return vp
×
193

194
    @property
1✔
195
    def stacked(self):
1✔
196
        """Concatenate samples and distances into a single tensor.
197

198
        Returns
199
        -------
200
        torch.Tensor
201
            Tensor of shape (N, 4) with [x, y, z, distance] per row.
202
        """
203
        return torch.hstack((self.samples, self.distances))
1✔
204

205
    def __init__(self, samples, distances):
1✔
206
        self.samples = samples
1✔
207
        self.distances = distances
1✔
208

209
    def __add__(self, other):
1✔
210
        """Concatenate two SampledSDF objects.
211

212
        Parameters
213
        ----------
214
        other : SampledSDF
215
            Another SampledSDF to concatenate.
216

217
        Returns
218
        -------
219
        SampledSDF
220
            Combined dataset with all samples from both objects.
221
        """
222
        return SampledSDF(
1✔
223
            samples=torch.vstack((self.samples, other.samples)),
224
            distances=torch.vstack((self.distances, other.distances)),
225
        )
226

227

228
def process_single_geometry(args):
1✔
229
    (
×
230
        class_name,
231
        instance_id,
232
        geometry,
233
        outdir,
234
        dataset_name,
235
        unify_multipatches,
236
        n_faces,
237
        n_samples,
238
        sampling_strategy,
239
        show,
240
        get_sdf_from_geometry,
241
        sample_sdf,
242
    ) = args
243

244
    logger.info(f"processing {instance_id} in geometry list {class_name}")
×
245
    file_name = f"{instance_id}.npz"
×
246

247
    folder_name = pathlib.Path(outdir) / dataset_name / class_name
×
248
    fname = folder_name / file_name
×
249

250
    if not os.path.exists(folder_name):
×
251
        os.makedirs(folder_name, exist_ok=True)
×
252

253
    if os.path.isfile(fname) and not show:
×
254
        logger.warning(f"File {fname} already exists")
×
255
        return
×
256

257
    sdf = get_sdf_from_geometry(geometry, n_faces, unify_multipatches)
×
258
    pos, neg = sample_sdf(
×
259
        sdf, show=show, n_samples=n_samples, sampling_strategy=sampling_strategy
260
    )
261

262
    np.savez(fname, neg=neg.stacked, pos=pos.stacked)
×
263

264

265
class SDFSampler:
1✔
266
    def __init__(
1✔
267
        self,
268
        outdir,
269
        splitdir,
270
        dataset_name,
271
        unify_multipatches=True,
272
        stds=[0.05, 0.025],
273
        overwrite_existing=False,
274
    ) -> None:
275
        self.outdir = outdir
1✔
276
        self.splitdir = splitdir
1✔
277
        self.dataset_name = dataset_name
1✔
278
        self.unify_multipatches = unify_multipatches
1✔
279
        self.geometries = {}
1✔
280
        self.stds = stds
1✔
281
        folder_name = pathlib.Path(outdir) / dataset_name
1✔
282
        if os.path.exists(folder_name):
1✔
283
            if not overwrite_existing:
×
284
                raise IsADirectoryError(
×
285
                    f"Dataset {folder_name} already exists. "
286
                    "Set overwrite_existing to true to overwrite."
287
                )
288
        else:
289
            os.makedirs(folder_name)
1✔
290

291
    def add_class(self, geom_list: list, class_name: str) -> None:
1✔
292
        instances = {}
1✔
293
        for i, geom in enumerate(geom_list):
1✔
294
            instance_name = f"{class_name}_{i:05}"
1✔
295
            instances[instance_name] = geom
1✔
296
        self.geometries[class_name] = instances
1✔
297

298
    def get_SDF_list(self, n_faces=100) -> list[SDFBase]:
1✔
299
        sdf_list = []
×
300
        for class_name, instance_list in self.geometries.items():
×
301
            logger.info(f"processing geometry list {class_name}")
×
302
            for instance_id, geometry in tqdm(
×
303
                instance_list.items(), desc="Processing instances"
304
            ):
305
                sdf = self.get_sdf_from_geometry(
×
306
                    geometry, n_faces, self.unify_multipatches
307
                )
308
                sdf_list.append(sdf)
×
309
        return sdf_list
×
310

311
    def process_geometries(
1✔
312
        self,
313
        sampling_strategy="uniform",
314
        n_faces=100,
315
        n_samples: int = 1e5,
316
        unify_multipatches=True,
317
        add_surface_samples=True,
318
        also_save_vtk=False,
319
    ):
320
        for class_name, instance_list in self.geometries.items():
1✔
321
            logger.info(f"processing geometry list {class_name}")
1✔
322
            for instance_id, geometry in tqdm(
1✔
323
                instance_list.items(), desc="Processing instances"
324
            ):
325
                file_name = f"{instance_id}.npz"
1✔
326

327
                folder_name = pathlib.Path(self.outdir) / self.dataset_name / class_name
1✔
328
                fname = folder_name / file_name
1✔
329
                if not os.path.exists(folder_name):
1✔
330
                    os.makedirs(folder_name)
1✔
331
                if os.path.isfile(fname):
1✔
332
                    logger.warning(f"File {fname} already exists")
×
333
                    continue
×
334
                sdf = self.get_sdf_from_geometry(
1✔
335
                    geometry, n_faces, self.unify_multipatches
336
                )
337
                sampled_sdf = random_sample_sdf(
1✔
338
                    sdf,
339
                    bounds=(-1, 1),
340
                    n_samples=int(n_samples),
341
                    type=sampling_strategy,
342
                )
343
                if add_surface_samples:
1✔
344
                    if not isinstance(geometry, trimesh.Trimesh):
1✔
345
                        logger.warning(
1✔
346
                            "Add surface samples was specified, but geometry"
347
                            f"is not given as a trimesh.Trimesh but as {type(geometry)}"
348
                        )
349
                    else:
350
                        surf_samples = sample_mesh_surface(
1✔
351
                            sdf,
352
                            sdf.mesh,
353
                            int(n_samples // 2),
354
                            self.stds,
355
                            device="cpu",
356
                            dtype=torch.float32,
357
                        )
358
                        sampled_sdf += surf_samples
1✔
359
                pos, neg = sampled_sdf.split_pos_neg()
1✔
360

361
                np.savez(fname, neg=neg.stacked, pos=pos.stacked)
1✔
362
                if also_save_vtk:
1✔
363
                    save_points_to_vtp(
1✔
364
                        fname.with_suffix(".vtp"), neg=neg.stacked, pos=pos.stacked
365
                    )
366
        summary = DataSetInfo(
1✔
367
            dataset_name=self.dataset_name,
368
            class_names=list(self.geometries.keys()),
369
            sampling_strategy=sampling_strategy,
370
            date_created=datetime.datetime.now().strftime("%Y-%m-%d %H:%M:%S"),
371
            stds=self.stds,
372
            n_samples=n_samples,
373
            add_surface_samples=add_surface_samples,
374
            sdf_struct_version=version("DeepSDFStruct"),
375
        )
376
        with open(
1✔
377
            str(pathlib.Path(self.outdir) / self.dataset_name / "summary.json"), "w"
378
        ) as f:
379
            json.dump(summary, f, indent=4)
1✔
380

381
    def get_sdf_from_geometry(
1✔
382
        self,
383
        geometry,
384
        n_faces: int,
385
        unify_multipatches: bool = True,
386
        threshold: float = 1e-5,
387
    ) -> SDFfromMesh:
388
        if isinstance(geometry, splinepy.Multipatch):
1✔
389
            if unify_multipatches:
1✔
390
                patch_meshs = []
1✔
391
                for patch in geometry.patches:
1✔
392
                    patch_faces = patch.extract.faces(n_faces)
1✔
393
                    patch_mesh = trimesh.Trimesh(
1✔
394
                        vertices=patch_faces.vertices, faces=patch_faces.faces
395
                    )
396
                    # add all patches as meshs to one boolean addition
397
                    patch_meshs.append(SDFfromMesh(patch_mesh))
1✔
398
                sdf_geom = patch_meshs[0]
1✔
399
                for pm in patch_meshs[1:]:
1✔
400
                    sdf_geom = sdf_geom + pm
1✔
401
            else:
402
                sdf_geom = SDFfromMesh(
×
403
                    geometry.extract.faces(n_faces), threshold=threshold
404
                )
405
        elif isinstance(geometry, trimesh.Trimesh):
1✔
406
            sdf_geom = SDFfromMesh(geometry, threshold=threshold)
1✔
407

408
        else:
409
            raise NotImplementedError(
×
410
                f"Geometry of type {type(geometry)} not supported yet."
411
            )
412

413
        return sdf_geom
1✔
414

415
    def get_meshs_from_folder(self, foldername, mesh_type) -> list:
1✔
416
        """
417
        Reads all mesh files of a given type (extension) from a folder using meshio.
418

419
        Parameters
420
        ----------
421
        foldername : str
422
            Path to the folder containing the mesh files.
423
        mesh_type : str
424
            Mesh file extension (e.g., 'vtk', 'obj', 'stl', 'msh', 'xdmf').
425

426
        Returns
427
        -------
428
        list[trimesh.Trimesh]
429
            A list of trimesh.Trimesh objects loaded from the folder.
430
        """
431
        meshes = []
1✔
432

433
        # Normalize extension (remove dot if present)
434
        mesh_type = mesh_type.lstrip(".")
1✔
435

436
        # Iterate through all files in the folder
437

438
        for filename in tqdm(os.listdir(foldername), desc="Loading meshs"):
1✔
439
            if filename.lower().endswith("." + mesh_type.lower()):
1✔
440
                filepath = os.path.join(foldername, filename)
1✔
441
                try:
1✔
442
                    faces = gus.io.meshio.load(filepath)
1✔
443
                    trim = trimesh.Trimesh(faces.vertices, faces.elements)
1✔
444
                    meshes.append(trim)
1✔
445
                    logger.info(f"Loaded mesh: {filename}")
1✔
446
                except ValueError as e:
×
447
                    logger.warning(f"Could not read {filename}: {e}")
×
448

449
        if not meshes:
1✔
450
            print(f"No .{mesh_type} meshes found in {foldername}.")
×
451

452
        return meshes
1✔
453

454
    def write_json(self, json_fname):
1✔
455
        json_content = defaultdict(lambda: defaultdict(list))
1✔
456
        for class_name, instance_list in self.geometries.items():
1✔
457
            for instance_id, geometry in instance_list.items():
1✔
458
                file_name = f"{instance_id}"
1✔
459
                json_content[self.dataset_name][class_name].append(file_name)
1✔
460
        # json_content = {
461
        #     data_info["dataset_name"]: {data_info["class_name"]: split_files}
462
        # }
463
        json_fname = pathlib.Path(f"{self.splitdir}/{json_fname}")
1✔
464
        if not json_fname.parent.is_dir():
1✔
465
            os.makedirs(json_fname.parent)
1✔
466
        with open(json_fname, "w", encoding="utf-8") as f:
1✔
467
            json.dump(json_content, f, indent=4)
1✔
468

469

470
def move(t_mesh, new_center):
1✔
471
    t_mesh.vertices += new_center - t_mesh.bounding_box.centroid
×
472

473

474
def noisy_sample(t_mesh, std, count):
1✔
475
    return t_mesh.sample(int(count)) + torch.random.normal(
×
476
        scale=std, size=(int(count), 3)
477
    )
478

479

480
def random_points(count):
1✔
481
    """random points in a unit sphere centered at (0, 0, 0)"""
482
    points = torch.random.uniform(-1, 1, (int(count * 3), 3))
×
483
    points = points[torch.linalg.norm(points, axis=1) <= 1]
×
484
    if points.shape[0] < count:
×
485
        print("Too little random sampling points. Resampling.......")
×
486
        random_points(count=count, boundary="unit_sphere")
×
487
    elif points.shape[0] > count:
×
488
        return points[torch.random.choice(points.shape[0], count)]
×
489
    else:
490
        return points
×
491

492

493
def random_points_cube(count, box_size):
1✔
494
    """random points in a cube with size box_size centered at (0, 0, 0)"""
495
    points = torch.random.uniform(-box_size / 2, box_size / 2, (int(count), 3))
×
496
    return points
×
497

498

499
def random_sample_sdf(
1✔
500
    sdf, bounds, n_samples, type="uniform", device="cpu", dtype=torch.float32
501
):
502
    bounds = torch.tensor(bounds, dtype=dtype, device=device)
1✔
503
    if type == "plane":
1✔
504
        samples = torch.random.uniform(
×
505
            bounds[0], bounds[1], (n_samples, 2), device=device, dtype=dtype
506
        )
507
        samples = torch.hstack((samples, torch.zeros((n_samples, 1))))
×
508
    elif type == "spherical_gaussian":
1✔
509
        samples = torch.random.randn(n_samples, 3, device=device, dtype=dtype)
×
510
        samples /= torch.linalg.norm(samples, axis=1).reshape(-1, 1)
×
511
        # samples += torch.random.uniform(bounds[0], bounds[1], (n_samples, 3))
512
        samples = samples + torch.random.normal(0, 0.01, (n_samples, 3))
×
513
    elif type == "uniform":
1✔
514
        samples = (
1✔
515
            torch.rand((n_samples, 3), device=device, dtype=dtype)
516
            * (bounds[1] - bounds[0])
517
            + bounds[0]
518
        )
519
    distances = sdf(samples)
1✔
520
    return SampledSDF(samples=samples, distances=distances)
1✔
521

522

523
def sample_mesh_surface(
1✔
524
    sdf: SDFBase,
525
    mesh: gus.Faces,
526
    n_samples: int,
527
    stds: list[float],
528
    device="cpu",
529
    dtype=torch.float32,
530
) -> SampledSDF:
531
    """
532
    Sample noisy points around a mesh surface and evaluate them with a signed distance function (SDF).
533

534
    This function uses trimesh.sample to generate surface samples
535
    and perturbs them with Gaussian noise of varying standard deviations,
536
    and queries the SDF at those points.
537

538
    Args:
539
        sdf (SDFBase): A callable SDF object that takes 3D points and returns signed distances.
540
        mesh (gus.Faces): A mesh object containing the vertices.
541
        n_samples (int): Number of mesh vertices to sample
542
        stds (list[float]): Standard deviations for Gaussian noise added to sampled vertices.
543
            - Typical values: [0.05, 0.0015].
544
            - Larger values spread samples farther from the surface; smaller values keep them closer.
545
        device (str, optional): Torch device to place tensors on (e.g., "cpu" or "cuda").
546
        dtype (torch.dtype, optional): Data type for generated tensors (default: torch.float32).
547

548
    Returns:
549
        SampledSDF: An object containing:
550
            - samples (torch.Tensor): The perturbed sample points of shape (n_samples * len(stds), 3).
551
            - distances (torch.Tensor): The corresponding SDF values at those sample points.
552
    """
553
    samples = []
1✔
554

555
    trim = trimesh.Trimesh(mesh.vertices, mesh.faces)
1✔
556

557
    random_samples = torch.tensor(trim.sample(n_samples), dtype=dtype, device=device)
1✔
558

559
    for std in stds:
1✔
560
        noise = torch.randn((n_samples, 3), device=device, dtype=dtype) * std
1✔
561
        samples.append(random_samples + noise)
1✔
562

563
    queries = torch.vstack(samples)
1✔
564

565
    distances = sdf(queries)
1✔
566

567
    return SampledSDF(samples=queries, distances=distances)
1✔
568

569

570
def save_points_to_vtp(filename, neg, pos):
1✔
571
    """
572
    Save pos/neg SDF sample points as a VTU point cloud using vtkPolyData.
573
    Each point has an SDF scalar value.
574
    """
575
    # Combine points
576
    all_points = np.vstack((pos, neg))
1✔
577
    coords = all_points[:, :3]
1✔
578
    sdf_vals = all_points[:, 3]
1✔
579

580
    # --- Create vtkPoints ---
581
    vtk_points = vtk.vtkPoints()
1✔
582
    for pt in coords:
1✔
583
        vtk_points.InsertNextPoint(pt)
1✔
584

585
    # --- Create PolyData ---
586
    polydata = vtk.vtkPolyData()
1✔
587
    polydata.SetPoints(vtk_points)
1✔
588

589
    # Add vertex cells (required for points in PolyData)
590
    verts = vtk.vtkCellArray()
1✔
591
    for i in range(len(coords)):
1✔
592
        verts.InsertNextCell(1)
1✔
593
        verts.InsertCellPoint(i)
1✔
594
    polydata.SetVerts(verts)
1✔
595

596
    # --- Add SDF scalar values ---
597
    vtk_array = vtk.vtkDoubleArray()
1✔
598
    vtk_array.SetName("SDF")
1✔
599
    vtk_array.SetNumberOfValues(len(sdf_vals))
1✔
600
    for i, val in enumerate(sdf_vals):
1✔
601
        vtk_array.SetValue(i, val)
1✔
602
    polydata.GetPointData().SetScalars(vtk_array)
1✔
603

604
    # --- Write to VTU ---
605
    writer = vtk.vtkXMLPolyDataWriter()
1✔
606
    writer.SetFileName(filename)
1✔
607
    writer.SetInputData(polydata)
1✔
608
    writer.Write()
1✔
609

610
    logger.debug(f"Saved {len(coords)} points with SDF to '{filename}'")
1✔
611

612

613
def augment_by_FFD(
1✔
614
    meshs: list[trimesh.Trimesh],
615
    n_control_points: int = 5,
616
    std_dev_fraction: float | None = 0.05,
617
    n_transformations: int = 10,
618
    save_meshs=False,
619
) -> list[trimesh.Trimesh]:
620
    """
621
    Takes list of meshs and augments the meshs by applying a freeform deformation
622
    """
623
    new_meshs = []
1✔
624

625
    for i_mesh, mesh in enumerate(tqdm(meshs, desc="Augmenting meshs")):
1✔
626
        bbox = mesh.bounds  # shape (2, 3)
1✔
627
        # Compute approximate spacing between control points along each axis
628
        spacing = (bbox[1] - bbox[0]) / (n_control_points - 1)
1✔
629
        # Use a fraction of spacing (e.g., 15%) as std_dev
630
        std_dev_local = std_dev_fraction * spacing
1✔
631

632
        for i_FFD in range(n_transformations):
1✔
633
            ffd = splinepy.FFD()
1✔
634
            ffd.mesh = gus.Faces(mesh.vertices, mesh.faces)
1✔
635
            ffd.spline.insert_knots(0, np.linspace(0, 1, n_control_points)[1:-1])
1✔
636
            ffd.spline.insert_knots(1, np.linspace(0, 1, n_control_points)[1:-1])
1✔
637
            ffd.spline.insert_knots(2, np.linspace(0, 1, n_control_points)[1:-1])
1✔
638
            ffd.spline.elevate_degrees([0, 1, 2])
1✔
639
            ffd.spline.control_points += np.random.normal(
1✔
640
                loc=0.0, scale=std_dev_local, size=ffd.spline.control_points.shape
641
            )
642
            new_meshs.append(trimesh.Trimesh(ffd.mesh.vertices, ffd.mesh.faces))
1✔
643
            if save_meshs:
1✔
644
                save_meshs = True
×
645

646
                # Make sure the directory exists
647
                os.makedirs("tmp", exist_ok=True)
×
648
                gus.io.meshio.export(f"tmp/mesh_{i_mesh}_{i_FFD}.obj", ffd.mesh)
×
649

650
    return new_meshs
1✔
STATUS · Troubleshooting · Open an Issue · Sales · Support · CAREERS · ENTERPRISE · START FREE · SCHEDULE DEMO
ANNOUNCEMENTS · TWITTER · TOS & SLA · Supported CI Services · What's a CI service? · Automated Testing

© 2026 Coveralls, Inc