From 158a1ca2c94e892802e42be1773df11ac75f883d Mon Sep 17 00:00:00 2001 From: Afshawn Lotfi Date: Mon, 3 Mar 2025 06:16:31 +0000 Subject: [PATCH 01/30] Basic python wrapper --- .devcontainer/devcontainer.json | 22 ++ .gitignore | 274 +++++++++++++++++++++ python/.vscode/settings.json | 11 + python/README.md | 176 ++++++++++++++ python/meshoptimizer/__init__.py | 261 ++++++++++++++++++++ python/meshoptimizer/_loader.py | 227 ++++++++++++++++++ python/meshoptimizer/decoder.py | 187 +++++++++++++++ python/meshoptimizer/encoder.py | 121 ++++++++++ python/meshoptimizer/optimizer.py | 247 +++++++++++++++++++ python/meshoptimizer/simplifier.py | 359 ++++++++++++++++++++++++++++ python/meshoptimizer/utils.py | 139 +++++++++++ python/setup.py | 52 ++++ python/tests/__init__.py | 3 + python/tests/run_tests.py | 38 +++ python/tests/test_encoding.py | 160 +++++++++++++ python/tests/test_mesh_integrity.py | 197 +++++++++++++++ python/tests/test_optimization.py | 219 +++++++++++++++++ python/tests/test_simplification.py | 262 ++++++++++++++++++++ 18 files changed, 2955 insertions(+) create mode 100644 .devcontainer/devcontainer.json create mode 100644 python/.vscode/settings.json create mode 100644 python/README.md create mode 100644 python/meshoptimizer/__init__.py create mode 100644 python/meshoptimizer/_loader.py create mode 100644 python/meshoptimizer/decoder.py create mode 100644 python/meshoptimizer/encoder.py create mode 100644 python/meshoptimizer/optimizer.py create mode 100644 python/meshoptimizer/simplifier.py create mode 100644 python/meshoptimizer/utils.py create mode 100644 python/setup.py create mode 100644 python/tests/__init__.py create mode 100644 python/tests/run_tests.py create mode 100644 python/tests/test_encoding.py create mode 100644 python/tests/test_mesh_integrity.py create mode 100644 python/tests/test_optimization.py create mode 100644 python/tests/test_simplification.py diff --git a/.devcontainer/devcontainer.json b/.devcontainer/devcontainer.json new file mode 100644 index 000000000..8c6f3fb3d --- /dev/null +++ b/.devcontainer/devcontainer.json @@ -0,0 +1,22 @@ +// For format details, see https://aka.ms/devcontainer.json. For config options, see the +// README at: https://github.com/devcontainers/templates/tree/main/src/python +{ + "name": "Python 3", + // Or use a Dockerfile or Docker Compose file. More info: https://containers.dev/guide/dockerfile + "image": "mcr.microsoft.com/devcontainers/python:1-3.12-bullseye" + + // Features to add to the dev container. More info: https://containers.dev/features. + // "features": {}, + + // Use 'forwardPorts' to make a list of ports inside the container available locally. + // "forwardPorts": [], + + // Use 'postCreateCommand' to run commands after the container is created. + // "postCreateCommand": "pip3 install --user -r requirements.txt", + + // Configure tool-specific properties. + // "customizations": {}, + + // Uncomment to connect as root instead. More info: https://aka.ms/dev-containers-non-root. + // "remoteUser": "root" +} diff --git a/.gitignore b/.gitignore index 75ea27b87..cdcafb1db 100644 --- a/.gitignore +++ b/.gitignore @@ -11,3 +11,277 @@ # Test files /data/ + +# Byte-compiled / optimized / DLL files +__pycache__/ +*.py[cod] +*$py.class + +# C extensions +*.so + +# Distribution / packaging +.Python +build/ +develop-eggs/ +dist/ +downloads/ +eggs/ +.eggs/ +lib/ +lib64/ +parts/ +sdist/ +var/ +wheels/ +share/python-wheels/ +*.egg-info/ +.installed.cfg +*.egg +MANIFEST + +# PyInstaller +# Usually these files are written by a python script from a template +# before PyInstaller builds the exe, so as to inject date/other infos into it. +*.manifest +*.spec + +# Installer logs +pip-log.txt +pip-delete-this-directory.txt + +# Unit test / coverage reports +htmlcov/ +.tox/ +.nox/ +.coverage +.coverage.* +.cache +nosetests.xml +coverage.xml +*.cover +*.py,cover +.hypothesis/ +.pytest_cache/ +cover/ + +# Translations +*.mo +*.pot + +# Django stuff: +*.log +local_settings.py +db.sqlite3 +db.sqlite3-journal + +# Flask stuff: +instance/ +.webassets-cache + +# Scrapy stuff: +.scrapy + +# Sphinx documentation +docs/_build/ + +# PyBuilder +.pybuilder/ +target/ + +# Jupyter Notebook +.ipynb_checkpoints + +# IPython +profile_default/ +ipython_config.py + +# pyenv +# For a library or package, you might want to ignore these files since the code is +# intended to run in multiple environments; otherwise, check them in: +# .python-version + +# pipenv +# According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control. +# However, in case of collaboration, if having platform-specific dependencies or dependencies +# having no cross-platform support, pipenv may install dependencies that don't work, or not +# install all needed dependencies. +#Pipfile.lock + +# UV +# Similar to Pipfile.lock, it is generally recommended to include uv.lock in version control. +# This is especially recommended for binary packages to ensure reproducibility, and is more +# commonly ignored for libraries. +#uv.lock + +# poetry +# Similar to Pipfile.lock, it is generally recommended to include poetry.lock in version control. +# This is especially recommended for binary packages to ensure reproducibility, and is more +# commonly ignored for libraries. +# https://python-poetry.org/docs/basic-usage/#commit-your-poetrylock-file-to-version-control +#poetry.lock + +# pdm +# Similar to Pipfile.lock, it is generally recommended to include pdm.lock in version control. +#pdm.lock +# pdm stores project-wide configurations in .pdm.toml, but it is recommended to not include it +# in version control. +# https://pdm.fming.dev/latest/usage/project/#working-with-version-control +.pdm.toml +.pdm-python +.pdm-build/ + +# PEP 582; used by e.g. github.com/David-OConnor/pyflow and github.com/pdm-project/pdm +__pypackages__/ + +# Celery stuff +celerybeat-schedule +celerybeat.pid + +# SageMath parsed files +*.sage.py + +# Environments +.env +.venv +env/ +venv/ +ENV/ +env.bak/ +venv.bak/ + +# Spyder project settings +.spyderproject +.spyproject + +# Rope project settings +.ropeproject + +# mkdocs documentation +/site + +# mypy +.mypy_cache/ +.dmypy.json +dmypy.json + +# Pyre type checker +.pyre/ + +# pytype static type analyzer +.pytype/ + +# Cython debug symbols +cython_debug/ + +# PyCharm +# JetBrains specific template is maintained in a separate JetBrains.gitignore that can +# be found at https://github.com/github/gitignore/blob/main/Global/JetBrains.gitignore +# and can be added to the global gitignore or merged into this file. For a more nuclear +# option (not recommended) you can uncomment the following to ignore the entire idea folder. +#.idea/ + +# Ruff stuff: +.ruff_cache/ + +# PyPI configuration file +.pypirc + +# Prerequisites +*.d + +# Compiled Object files +*.slo +*.lo +*.o +*.obj + +# Precompiled Headers +*.gch +*.pch + +# Compiled Dynamic libraries +*.so +*.dylib +*.dll + +# Fortran module files +*.mod +*.smod + +# Compiled Static libraries +*.lai +*.la +*.a +*.lib + +# Executables +*.exe +*.out +*.app + +# General +.DS_Store +.AppleDouble +.LSOverride + +# Icon must end with two \r +Icon + +# Thumbnails +._* + +# Files that might appear in the root of a volume +.DocumentRevisions-V100 +.fseventsd +.Spotlight-V100 +.TemporaryItems +.Trashes +.VolumeIcon.icns +.com.apple.timemachine.donotpresent + +# Directories potentially created on remote AFP share +.AppleDB +.AppleDesktop +Network Trash Folder +Temporary Items +.apdisk + +*~ + +# temporary files which can be created if a process still has a handle open of a deleted file +.fuse_hidden* + +# KDE directory preferences +.directory + +# Linux trash folder which might appear on any partition or disk +.Trash-* + +# .nfs files are created when an open file is removed but is still being accessed +.nfs* + +# Windows thumbnail cache files +Thumbs.db +Thumbs.db:encryptable +ehthumbs.db +ehthumbs_vista.db + +# Dump file +*.stackdump + +# Folder config file +[Dd]esktop.ini + +# Recycle Bin used on file shares +$RECYCLE.BIN/ + +# Windows Installer files +*.cab +*.msi +*.msix +*.msm +*.msp + +# Windows shortcuts +*.lnk diff --git a/python/.vscode/settings.json b/python/.vscode/settings.json new file mode 100644 index 000000000..e9e6a805e --- /dev/null +++ b/python/.vscode/settings.json @@ -0,0 +1,11 @@ +{ + "python.testing.unittestArgs": [ + "-v", + "-s", + "./tests", + "-p", + "test_*.py" + ], + "python.testing.pytestEnabled": false, + "python.testing.unittestEnabled": true +} \ No newline at end of file diff --git a/python/README.md b/python/README.md new file mode 100644 index 000000000..c9a6b4a78 --- /dev/null +++ b/python/README.md @@ -0,0 +1,176 @@ +# Meshoptimizer Python + +Python bindings for the [meshoptimizer](https://github.com/zeux/meshoptimizer) library, which provides algorithms for optimizing 3D meshes for GPU rendering. + +## Installation + +```bash +pip install meshoptimizer +``` + +Or install from source: + +```bash +git clone https://github.com/zeux/meshoptimizer.git +cd meshoptimizer/python +pip install -e . +``` + +## Features + +- Vertex cache optimization +- Overdraw optimization +- Vertex fetch optimization +- Mesh simplification +- Vertex/index buffer compression and decompression +- And more... + +## Usage + +### Basic Usage + +```python +import numpy as np +from meshoptimizer import Mesh + +# Create a simple mesh (a cube) +vertices = np.array([ + # positions + [-0.5, -0.5, -0.5], + [0.5, -0.5, -0.5], + [0.5, 0.5, -0.5], + [-0.5, 0.5, -0.5], + [-0.5, -0.5, 0.5], + [0.5, -0.5, 0.5], + [0.5, 0.5, 0.5], + [-0.5, 0.5, 0.5] +], dtype=np.float32) + +indices = np.array([ + 0, 1, 2, 2, 3, 0, # front + 1, 5, 6, 6, 2, 1, # right + 5, 4, 7, 7, 6, 5, # back + 4, 0, 3, 3, 7, 4, # left + 3, 2, 6, 6, 7, 3, # top + 4, 5, 1, 1, 0, 4 # bottom +], dtype=np.uint32) + +# Create a mesh +mesh = Mesh(vertices, indices) + +# Optimize the mesh +mesh.optimize_vertex_cache() +mesh.optimize_overdraw() +mesh.optimize_vertex_fetch() + +# Simplify the mesh +mesh.simplify(target_ratio=0.5) # Keep 50% of triangles + +# Encode the mesh for efficient transmission +encoded = mesh.encode() + +# Decode the mesh +decoded = Mesh.decode( + encoded, + vertex_count=len(mesh.vertices), + vertex_size=mesh.vertices.itemsize * mesh.vertices.shape[1], + index_count=len(mesh.indices) +) +``` + +### Low-level API + +If you need more control, you can use the low-level API directly: + +```python +import numpy as np +from meshoptimizer import ( + optimize_vertex_cache, + optimize_overdraw, + optimize_vertex_fetch, + simplify, + encode_vertex_buffer, + decode_vertex_buffer, + encode_index_buffer, + decode_index_buffer +) + +# Optimize vertex cache +optimized_indices = np.zeros_like(indices) +optimize_vertex_cache(optimized_indices, indices, len(indices), len(vertices)) + +# Optimize overdraw +optimized_indices2 = np.zeros_like(indices) +optimize_overdraw( + optimized_indices2, + optimized_indices, + vertices, + len(indices), + len(vertices), + vertices.itemsize * vertices.shape[1], + 1.05 # threshold +) + +# And so on... +``` + +## Notes on Index Encoding/Decoding + +When using the index buffer encoding and decoding functions, note that the decoded indices may not exactly match the original indices, even though the mesh geometry remains the same. This is due to how the meshoptimizer library internally encodes and decodes the indices. The triangles may be in a different order, but the resulting mesh is still valid and represents the same geometry. + +## API Reference + +### High-level API + +#### `Mesh` class + +- `__init__(vertices, indices=None)`: Initialize a mesh with vertices and optional indices. +- `optimize_vertex_cache()`: Optimize the mesh for vertex cache efficiency. +- `optimize_overdraw(threshold=1.05)`: Optimize the mesh for overdraw. +- `optimize_vertex_fetch()`: Optimize the mesh for vertex fetch efficiency. +- `simplify(target_ratio=0.25, target_error=0.01, options=0)`: Simplify the mesh. +- `encode()`: Encode the mesh for efficient transmission. +- `decode(encoded_data, vertex_count, vertex_size, index_count=None, index_size=4)` (class method): Decode an encoded mesh. + +### Low-level API + +#### Vertex Remapping + +- `generate_vertex_remap(destination, indices, index_count, vertices, vertex_count, vertex_size)`: Generate vertex remap table. +- `remap_vertex_buffer(destination, vertices, vertex_count, vertex_size, remap)`: Remap vertex buffer. +- `remap_index_buffer(destination, indices, index_count, remap)`: Remap index buffer. + +#### Optimization + +- `optimize_vertex_cache(destination, indices, index_count, vertex_count)`: Optimize vertex cache. +- `optimize_vertex_cache_strip(destination, indices, index_count, vertex_count)`: Optimize vertex cache for strip-like caches. +- `optimize_vertex_cache_fifo(destination, indices, index_count, vertex_count, cache_size)`: Optimize vertex cache for FIFO caches. +- `optimize_overdraw(destination, indices, vertex_positions, index_count, vertex_count, vertex_positions_stride, threshold)`: Optimize overdraw. +- `optimize_vertex_fetch(destination_vertices, indices, source_vertices, index_count, vertex_count, vertex_size)`: Optimize vertex fetch. +- `optimize_vertex_fetch_remap(destination, indices, index_count, vertex_count)`: Generate vertex remap to optimize vertex fetch. + +#### Simplification + +- `simplify(destination, indices, vertex_positions, index_count, vertex_count, vertex_positions_stride, target_index_count, target_error, options, result_error)`: Simplify mesh. +- `simplify_with_attributes(destination, indices, vertex_positions, vertex_attributes, attribute_weights, index_count, vertex_count, vertex_positions_stride, vertex_attributes_stride, attribute_count, vertex_lock, target_index_count, target_error, options, result_error)`: Simplify mesh with attribute metric. +- `simplify_sloppy(destination, indices, vertex_positions, index_count, vertex_count, vertex_positions_stride, target_index_count, target_error, result_error)`: Simplify mesh (sloppy). +- `simplify_points(destination, vertex_positions, vertex_colors, vertex_count, vertex_positions_stride, vertex_colors_stride, color_weight, target_vertex_count)`: Simplify point cloud. +- `simplify_scale(vertex_positions, vertex_count, vertex_positions_stride)`: Get the scale factor for simplification error. + +#### Encoding/Decoding + +- `encode_vertex_buffer(vertices, vertex_count, vertex_size)`: Encode vertex buffer. +- `encode_index_buffer(indices, index_count, vertex_count)`: Encode index buffer. +- `encode_vertex_version(version)`: Set vertex encoder format version. +- `encode_index_version(version)`: Set index encoder format version. +- `decode_vertex_buffer(destination, vertex_count, vertex_size, buffer)`: Decode vertex buffer. +- `decode_index_buffer(destination, index_count, index_size, buffer)`: Decode index buffer. +- `decode_vertex_version(buffer)`: Get encoded vertex format version. +- `decode_index_version(buffer)`: Get encoded index format version. +- `decode_filter_oct(buffer, count, stride)`: Apply octahedral filter to decoded data. +- `decode_filter_quat(buffer, count, stride)`: Apply quaternion filter to decoded data. +- `decode_filter_exp(buffer, count, stride)`: Apply exponential filter to decoded data. + +## License + +MIT License \ No newline at end of file diff --git a/python/meshoptimizer/__init__.py b/python/meshoptimizer/__init__.py new file mode 100644 index 000000000..48493b03c --- /dev/null +++ b/python/meshoptimizer/__init__.py @@ -0,0 +1,261 @@ +""" +Python wrapper for the meshoptimizer library. + +This package provides Python bindings for the meshoptimizer C++ library, +which offers various algorithms for optimizing 3D meshes for GPU rendering. +""" + +from .encoder import ( + encode_vertex_buffer, + encode_index_buffer, + encode_vertex_version, + encode_index_version, +) + +from .decoder import ( + decode_vertex_buffer, + decode_index_buffer, + decode_vertex_version, + decode_index_version, + decode_filter_oct, + decode_filter_quat, + decode_filter_exp, +) + +from .optimizer import ( + optimize_vertex_cache, + optimize_vertex_cache_strip, + optimize_vertex_cache_fifo, + optimize_overdraw, + optimize_vertex_fetch, + optimize_vertex_fetch_remap, +) + +from .simplifier import ( + simplify, + simplify_with_attributes, + simplify_sloppy, + simplify_points, + simplify_scale, + SIMPLIFY_LOCK_BORDER, + SIMPLIFY_SPARSE, + SIMPLIFY_ERROR_ABSOLUTE, + SIMPLIFY_PRUNE, +) + +from .utils import ( + generate_vertex_remap, + remap_vertex_buffer, + remap_index_buffer, +) + +import numpy as np +from typing import Optional, Union, Dict, Any, ClassVar, Type, TypeVar + +T = TypeVar('T', bound='Mesh') + +class Mesh: + """ + A class representing a 3D mesh with optimization capabilities. + """ + + def __init__(self, vertices: np.ndarray, indices: Optional[np.ndarray] = None) -> None: + """ + Initialize a mesh with vertices and optional indices. + + Args: + vertices: numpy array of vertex data + indices: numpy array of indices (optional) + """ + self.vertices = np.asarray(vertices, dtype=np.float32) + self.indices = np.asarray(indices, dtype=np.uint32) if indices is not None else None + self.vertex_count = len(vertices) + self.index_count = len(indices) if indices is not None else 0 + + def optimize_vertex_cache(self) -> 'Mesh': + """ + Optimize the mesh for vertex cache efficiency. + + Returns: + self (for method chaining) + """ + if self.indices is None: + raise ValueError("Mesh has no indices to optimize") + + # Create output array + optimized_indices = np.zeros_like(self.indices) + + # Call optimization function + optimize_vertex_cache( + optimized_indices, + self.indices, + self.index_count, + self.vertex_count + ) + + self.indices = optimized_indices + return self + + def optimize_overdraw(self, threshold: float = 1.05) -> 'Mesh': + """ + Optimize the mesh for overdraw. + + Args: + threshold: threshold for optimization (default: 1.05) + + Returns: + self (for method chaining) + """ + if self.indices is None: + raise ValueError("Mesh has no indices to optimize") + + # Create output array + optimized_indices = np.zeros_like(self.indices) + + # Call optimization function + optimize_overdraw( + optimized_indices, + self.indices, + self.vertices, + self.index_count, + self.vertex_count, + self.vertices.itemsize * self.vertices.shape[1], + threshold + ) + + self.indices = optimized_indices + return self + + def optimize_vertex_fetch(self) -> 'Mesh': + """ + Optimize the mesh for vertex fetch efficiency. + + Returns: + self (for method chaining) + """ + if self.indices is None: + raise ValueError("Mesh has no indices to optimize") + + # Create output array + optimized_vertices = np.zeros_like(self.vertices) + + # Call optimization function + unique_vertex_count = optimize_vertex_fetch( + optimized_vertices, + self.indices, + self.vertices, + self.index_count, + self.vertex_count, + self.vertices.itemsize * self.vertices.shape[1] + ) + + self.vertices = optimized_vertices[:unique_vertex_count] + self.vertex_count = unique_vertex_count + return self + + def simplify(self, target_ratio: float = 0.25, target_error: float = 0.01, options: int = 0) -> 'Mesh': + """ + Simplify the mesh. + + Args: + target_ratio: ratio of triangles to keep (default: 0.25) + target_error: target error (default: 0.01) + options: simplification options (default: 0) + + Returns: + self (for method chaining) + """ + if self.indices is None: + raise ValueError("Mesh has no indices to simplify") + + # Calculate target index count + target_index_count = int(self.index_count * target_ratio) + + # Create output array + simplified_indices = np.zeros(self.index_count, dtype=np.uint32) + + # Call simplification function + result_error = np.array([0.0], dtype=np.float32) + new_index_count = simplify( + simplified_indices, + self.indices, + self.vertices, + self.index_count, + self.vertex_count, + self.vertices.itemsize * self.vertices.shape[1], + target_index_count, + target_error, + options, + result_error + ) + + self.indices = simplified_indices[:new_index_count] + self.index_count = new_index_count + return self + + def encode(self) -> Dict[str, bytes]: + """ + Encode the mesh for efficient transmission. + + Returns: + Dictionary with encoded vertices and indices + """ + # Encode vertices + encoded_vertices = encode_vertex_buffer( + self.vertices, + self.vertex_count, + self.vertices.itemsize * self.vertices.shape[1] + ) + + # Encode indices if present + encoded_indices = None + if self.indices is not None: + encoded_indices = encode_index_buffer( + self.indices, + self.index_count, + self.vertex_count + ) + + return { + 'vertices': encoded_vertices, + 'indices': encoded_indices + } + + @classmethod + def decode(cls: Type[T], encoded_data: Dict[str, bytes], + vertex_count: int, vertex_size: int, + index_count: Optional[int] = None, + index_size: int = 4) -> T: + """ + Decode an encoded mesh. + + Args: + encoded_data: Dictionary with encoded vertices and indices + vertex_count: Number of vertices + vertex_size: Size of each vertex in bytes + index_count: Number of indices (optional) + index_size: Size of each index in bytes (default: 4) + + Returns: + Decoded Mesh object + """ + # Decode vertices using the new function that returns a numpy array + vertices = decode_vertex_buffer( + vertex_count, + vertex_size, + encoded_data['vertices'] + ) + + # Decode indices if present using the new function that returns a numpy array + indices = None + if encoded_data['indices'] is not None and index_count is not None: + indices = decode_index_buffer( + index_count, + index_size, + encoded_data['indices'] + ) + + return cls(vertices, indices) + +# Version information +__version__ = '0.1.0' \ No newline at end of file diff --git a/python/meshoptimizer/_loader.py b/python/meshoptimizer/_loader.py new file mode 100644 index 000000000..05db9c381 --- /dev/null +++ b/python/meshoptimizer/_loader.py @@ -0,0 +1,227 @@ +""" +Library loader for meshoptimizer. +""" +import ctypes +import os +import sys +import platform +import glob +from typing import Optional, List, Any +import numpy as np + +def find_library() -> str: + """Find the meshoptimizer shared library.""" + # Get the directory of this file + this_dir = os.path.dirname(os.path.abspath(__file__)) + + # Look for any _meshoptimizer*.so or _meshoptimizer*.pyd file + if platform.system() == 'Windows': + pattern = os.path.join(this_dir, '_meshoptimizer*.pyd') + else: + pattern = os.path.join(this_dir, '_meshoptimizer*.so') + + lib_files = glob.glob(pattern) + + if lib_files: + return lib_files[0] + + raise ImportError(f"Could not find meshoptimizer library in {this_dir}") + +# Load the library +try: + lib_path = find_library() + lib = ctypes.CDLL(lib_path) +except ImportError as e: + # If the library is not found, provide a helpful error message + print(f"Error loading meshoptimizer library: {e}") + print("Make sure the library is properly installed.") + raise + +# Define function signatures +def setup_function_signatures() -> None: + """Set up the function signatures for the library.""" + # Vertex remap functions + lib.meshopt_generateVertexRemap.argtypes = [ + ctypes.POINTER(ctypes.c_uint), # destination + ctypes.POINTER(ctypes.c_uint), # indices + ctypes.c_size_t, # index_count + ctypes.c_void_p, # vertices + ctypes.c_size_t, # vertex_count + ctypes.c_size_t # vertex_size + ] + lib.meshopt_generateVertexRemap.restype = ctypes.c_size_t + + lib.meshopt_remapVertexBuffer.argtypes = [ + ctypes.c_void_p, # destination + ctypes.c_void_p, # vertices + ctypes.c_size_t, # vertex_count + ctypes.c_size_t, # vertex_size + ctypes.POINTER(ctypes.c_uint) # remap + ] + lib.meshopt_remapVertexBuffer.restype = None + + lib.meshopt_remapIndexBuffer.argtypes = [ + ctypes.POINTER(ctypes.c_uint), # destination + ctypes.POINTER(ctypes.c_uint), # indices + ctypes.c_size_t, # index_count + ctypes.POINTER(ctypes.c_uint) # remap + ] + lib.meshopt_remapIndexBuffer.restype = None + + # Vertex cache optimization + lib.meshopt_optimizeVertexCache.argtypes = [ + ctypes.POINTER(ctypes.c_uint), # destination + ctypes.POINTER(ctypes.c_uint), # indices + ctypes.c_size_t, # index_count + ctypes.c_size_t # vertex_count + ] + lib.meshopt_optimizeVertexCache.restype = None + + # Overdraw optimization + lib.meshopt_optimizeOverdraw.argtypes = [ + ctypes.POINTER(ctypes.c_uint), # destination + ctypes.POINTER(ctypes.c_uint), # indices + ctypes.c_size_t, # index_count + ctypes.POINTER(ctypes.c_float), # vertex_positions + ctypes.c_size_t, # vertex_count + ctypes.c_size_t, # vertex_positions_stride + ctypes.c_float # threshold + ] + lib.meshopt_optimizeOverdraw.restype = None + + # Vertex fetch optimization + lib.meshopt_optimizeVertexFetch.argtypes = [ + ctypes.c_void_p, # destination + ctypes.POINTER(ctypes.c_uint), # indices + ctypes.c_size_t, # index_count + ctypes.c_void_p, # vertices + ctypes.c_size_t, # vertex_count + ctypes.c_size_t # vertex_size + ] + lib.meshopt_optimizeVertexFetch.restype = ctypes.c_size_t + + # Simplification + lib.meshopt_simplify.argtypes = [ + ctypes.POINTER(ctypes.c_uint), # destination + ctypes.POINTER(ctypes.c_uint), # indices + ctypes.c_size_t, # index_count + ctypes.POINTER(ctypes.c_float), # vertex_positions + ctypes.c_size_t, # vertex_count + ctypes.c_size_t, # vertex_positions_stride + ctypes.c_size_t, # target_index_count + ctypes.c_float, # target_error + ctypes.c_uint, # options + ctypes.POINTER(ctypes.c_float) # result_error + ] + lib.meshopt_simplify.restype = ctypes.c_size_t + + # Simplification scale + lib.meshopt_simplifyScale.argtypes = [ + ctypes.POINTER(ctypes.c_float), # vertex_positions + ctypes.c_size_t, # vertex_count + ctypes.c_size_t # vertex_positions_stride + ] + lib.meshopt_simplifyScale.restype = ctypes.c_float # Return type is float + + # Encoding + lib.meshopt_encodeVertexBufferBound.argtypes = [ + ctypes.c_size_t, # vertex_count + ctypes.c_size_t # vertex_size + ] + lib.meshopt_encodeVertexBufferBound.restype = ctypes.c_size_t + + lib.meshopt_encodeVertexBuffer.argtypes = [ + ctypes.POINTER(ctypes.c_ubyte), # buffer + ctypes.c_size_t, # buffer_size + ctypes.c_void_p, # vertices + ctypes.c_size_t, # vertex_count + ctypes.c_size_t # vertex_size + ] + lib.meshopt_encodeVertexBuffer.restype = ctypes.c_size_t + + lib.meshopt_encodeIndexBufferBound.argtypes = [ + ctypes.c_size_t, # index_count + ctypes.c_size_t # vertex_count + ] + lib.meshopt_encodeIndexBufferBound.restype = ctypes.c_size_t + + lib.meshopt_encodeIndexBuffer.argtypes = [ + ctypes.POINTER(ctypes.c_ubyte), # buffer + ctypes.c_size_t, # buffer_size + ctypes.POINTER(ctypes.c_uint), # indices + ctypes.c_size_t # index_count + ] + lib.meshopt_encodeIndexBuffer.restype = ctypes.c_size_t + + # Decoding + lib.meshopt_decodeVertexBuffer.argtypes = [ + ctypes.c_void_p, # destination + ctypes.c_size_t, # vertex_count + ctypes.c_size_t, # vertex_size + ctypes.POINTER(ctypes.c_ubyte), # buffer + ctypes.c_size_t # buffer_size + ] + lib.meshopt_decodeVertexBuffer.restype = ctypes.c_int + + lib.meshopt_decodeIndexBuffer.argtypes = [ + ctypes.c_void_p, # destination + ctypes.c_size_t, # index_count + ctypes.c_size_t, # index_size + ctypes.POINTER(ctypes.c_ubyte), # buffer + ctypes.c_size_t # buffer_size + ] + lib.meshopt_decodeIndexBuffer.restype = ctypes.c_int + + # Encoding/Decoding versions + lib.meshopt_encodeVertexVersion.argtypes = [ctypes.c_int] + lib.meshopt_encodeVertexVersion.restype = None + + lib.meshopt_encodeIndexVersion.argtypes = [ctypes.c_int] + lib.meshopt_encodeIndexVersion.restype = None + + lib.meshopt_decodeVertexVersion.argtypes = [ + ctypes.POINTER(ctypes.c_ubyte), # buffer + ctypes.c_size_t # buffer_size + ] + lib.meshopt_decodeVertexVersion.restype = ctypes.c_int + + lib.meshopt_decodeIndexVersion.argtypes = [ + ctypes.POINTER(ctypes.c_ubyte), # buffer + ctypes.c_size_t # buffer_size + ] + lib.meshopt_decodeIndexVersion.restype = ctypes.c_int + + # Simplify sloppy + lib.meshopt_simplifySloppy.argtypes = [ + ctypes.POINTER(ctypes.c_uint), # destination + ctypes.POINTER(ctypes.c_uint), # indices + ctypes.c_size_t, # index_count + ctypes.POINTER(ctypes.c_float), # vertex_positions + ctypes.c_size_t, # vertex_count + ctypes.c_size_t, # vertex_positions_stride + ctypes.c_size_t, # target_index_count + ctypes.c_float, # target_error + ctypes.POINTER(ctypes.c_float) # result_error + ] + lib.meshopt_simplifySloppy.restype = ctypes.c_size_t + + # Simplify points + lib.meshopt_simplifyPoints.argtypes = [ + ctypes.POINTER(ctypes.c_uint), # destination + ctypes.POINTER(ctypes.c_float), # vertex_positions + ctypes.c_size_t, # vertex_count + ctypes.c_size_t, # vertex_positions_stride + ctypes.POINTER(ctypes.c_float), # vertex_colors + ctypes.c_size_t, # vertex_colors_stride + ctypes.c_float, # color_weight + ctypes.c_size_t # target_vertex_count + ] + lib.meshopt_simplifyPoints.restype = ctypes.c_size_t + +# Set up function signatures +try: + setup_function_signatures() +except AttributeError as e: + print(f"Error setting up function signatures: {e}") + print("The library might be missing some expected functions.") + raise \ No newline at end of file diff --git a/python/meshoptimizer/decoder.py b/python/meshoptimizer/decoder.py new file mode 100644 index 000000000..addfebdad --- /dev/null +++ b/python/meshoptimizer/decoder.py @@ -0,0 +1,187 @@ +""" +Decoder functions for meshoptimizer. +""" +import ctypes +from typing import Optional, Union, Tuple, Any, List +import numpy as np +from ._loader import lib + +def decode_vertex_buffer(vertex_count: int, + vertex_size: int, + buffer: Union[bytes, np.ndarray]) -> np.ndarray: + """ + Decode vertex buffer data. + + Args: + vertex_count: number of vertices + vertex_size: size of each vertex in bytes + buffer: encoded buffer as bytes + + Returns: + Numpy array containing the decoded vertex data + """ + # Convert buffer to numpy array if it's not already + buffer_array = np.frombuffer(buffer, dtype=np.uint8) + + # Create destination array + # Calculate the number of float32 elements needed + float_count = vertex_count * vertex_size // 4 + destination = np.zeros(float_count, dtype=np.float32) + + # Call C function + result = lib.meshopt_decodeVertexBuffer( + destination.ctypes.data_as(ctypes.c_void_p), + vertex_count, + vertex_size, + buffer_array.ctypes.data_as(ctypes.POINTER(ctypes.c_ubyte)), + len(buffer_array) + ) + + if result != 0: + raise RuntimeError(f"Failed to decode vertex buffer: error code {result}") + + # Reshape the array if vertex_size indicates multiple components per vertex + components_per_vertex = vertex_size // 4 # Assuming float32 (4 bytes) components + if components_per_vertex > 1: + destination = destination.reshape(vertex_count, components_per_vertex) + + return destination + +def decode_index_buffer(index_count: int, + index_size: int, + buffer: Union[bytes, np.ndarray]) -> np.ndarray: + """ + Decode index buffer data. + + Args: + index_count: number of indices + index_size: size of each index in bytes (2 or 4) + buffer: encoded buffer as bytes + + Returns: + Numpy array containing the decoded index data + """ + # Convert buffer to numpy array if it's not already + buffer_array = np.frombuffer(buffer, dtype=np.uint8) + + # Create destination array + destination = np.zeros(index_count, dtype=np.uint32) + + # Call C function + result = lib.meshopt_decodeIndexBuffer( + destination.ctypes.data_as(ctypes.c_void_p), + index_count, + index_size, + buffer_array.ctypes.data_as(ctypes.POINTER(ctypes.c_ubyte)), + len(buffer_array) + ) + + if result != 0: + raise RuntimeError(f"Failed to decode index buffer: error code {result}") + + return destination + +def decode_vertex_version(buffer: Union[bytes, np.ndarray]) -> int: + """ + Get encoded vertex format version. + + Args: + buffer: encoded buffer as bytes + + Returns: + Format version of the encoded vertex buffer, or -1 if the buffer header is invalid + """ + # Convert buffer to numpy array if it's not already + buffer_array = np.frombuffer(buffer, dtype=np.uint8) + + return lib.meshopt_decodeVertexVersion( + buffer_array.ctypes.data_as(ctypes.POINTER(ctypes.c_ubyte)), + len(buffer_array) + ) + +def decode_index_version(buffer: Union[bytes, np.ndarray]) -> int: + """ + Get encoded index format version. + + Args: + buffer: encoded buffer as bytes + + Returns: + Format version of the encoded index buffer, or -1 if the buffer header is invalid + """ + # Convert buffer to numpy array if it's not already + buffer_array = np.frombuffer(buffer, dtype=np.uint8) + + return lib.meshopt_decodeIndexVersion( + buffer_array.ctypes.data_as(ctypes.POINTER(ctypes.c_ubyte)), + len(buffer_array) + ) + +def decode_filter_oct(buffer: np.ndarray, count: int, stride: int) -> np.ndarray: + """ + Apply octahedral filter to decoded data. + + Args: + buffer: numpy array of decoded data + count: number of elements + stride: stride between elements in bytes + + Returns: + Numpy array with the filter applied (a copy of the input buffer) + """ + # Create a copy of the buffer to avoid modifying the original + result_buffer = buffer.copy() + + lib.meshopt_decodeFilterOct( + result_buffer.ctypes.data_as(ctypes.c_void_p), + count, + stride + ) + + return result_buffer + +def decode_filter_quat(buffer: np.ndarray, count: int, stride: int) -> np.ndarray: + """ + Apply quaternion filter to decoded data. + + Args: + buffer: numpy array of decoded data + count: number of elements + stride: stride between elements in bytes + + Returns: + Numpy array with the filter applied (a copy of the input buffer) + """ + # Create a copy of the buffer to avoid modifying the original + result_buffer = buffer.copy() + + lib.meshopt_decodeFilterQuat( + result_buffer.ctypes.data_as(ctypes.c_void_p), + count, + stride + ) + + return result_buffer + +def decode_filter_exp(buffer: np.ndarray, count: int, stride: int) -> np.ndarray: + """ + Apply exponential filter to decoded data. + + Args: + buffer: numpy array of decoded data + count: number of elements + stride: stride between elements in bytes + + Returns: + Numpy array with the filter applied (a copy of the input buffer) + """ + # Create a copy of the buffer to avoid modifying the original + result_buffer = buffer.copy() + + lib.meshopt_decodeFilterExp( + result_buffer.ctypes.data_as(ctypes.c_void_p), + count, + stride + ) + + return result_buffer \ No newline at end of file diff --git a/python/meshoptimizer/encoder.py b/python/meshoptimizer/encoder.py new file mode 100644 index 000000000..22bfc42f8 --- /dev/null +++ b/python/meshoptimizer/encoder.py @@ -0,0 +1,121 @@ +""" +Encoder functions for meshoptimizer. +""" +import ctypes +from typing import Optional, Union, Tuple +import numpy as np +from ._loader import lib + +def encode_vertex_buffer(vertices: np.ndarray, + vertex_count: Optional[int] = None, + vertex_size: Optional[int] = None) -> bytes: + """ + Encode vertex buffer data. + + Args: + vertices: numpy array of vertex data + vertex_count: number of vertices (optional, derived from vertices if not provided) + vertex_size: size of each vertex in bytes (optional, derived from vertices if not provided) + + Returns: + Encoded buffer as bytes + """ + # Convert vertices to numpy array if it's not already + vertices = np.asarray(vertices, dtype=np.float32) + + # Derive vertex_count and vertex_size if not provided + if vertex_count is None: + vertex_count = len(vertices) + + if vertex_size is None: + vertex_size = vertices.itemsize * vertices.shape[1] if len(vertices.shape) > 1 else vertices.itemsize + + # Calculate buffer size + bound = lib.meshopt_encodeVertexBufferBound(vertex_count, vertex_size) + + # Allocate buffer + buffer = np.zeros(bound, dtype=np.uint8) + + # Call C function + result_size = lib.meshopt_encodeVertexBuffer( + buffer.ctypes.data_as(ctypes.POINTER(ctypes.c_ubyte)), + bound, + vertices.ctypes.data_as(ctypes.c_void_p), + vertex_count, + vertex_size + ) + + if result_size == 0: + raise RuntimeError("Failed to encode vertex buffer") + + # Return only the used portion of the buffer + return bytes(buffer[:result_size]) + +def encode_index_buffer(indices: np.ndarray, + index_count: Optional[int] = None, + vertex_count: Optional[int] = None) -> bytes: + """ + Encode index buffer data. + + Args: + indices: numpy array of index data + index_count: number of indices (optional, derived from indices if not provided) + vertex_count: number of vertices (optional, derived from indices if not provided) + + Returns: + Encoded buffer as bytes + """ + # Convert indices to numpy array if it's not already + indices = np.asarray(indices, dtype=np.uint32) + + # Derive index_count if not provided + if index_count is None: + index_count = len(indices) + + # Derive vertex_count if not provided + if vertex_count is None: + vertex_count = np.max(indices) + 1 + + # Calculate buffer size + bound = lib.meshopt_encodeIndexBufferBound(index_count, vertex_count) + + # Allocate buffer + buffer = np.zeros(bound, dtype=np.uint8) + + # Call C function + result_size = lib.meshopt_encodeIndexBuffer( + buffer.ctypes.data_as(ctypes.POINTER(ctypes.c_ubyte)), + bound, + indices.ctypes.data_as(ctypes.POINTER(ctypes.c_uint)), + index_count + ) + + if result_size == 0: + raise RuntimeError("Failed to encode index buffer") + + # Return only the used portion of the buffer + return bytes(buffer[:result_size]) + +def encode_vertex_version(version: int) -> None: + """ + Set vertex encoder format version. + + Args: + version: version number (0 or 1) + """ + if version not in (0, 1): + raise ValueError("Version must be 0 or 1") + + lib.meshopt_encodeVertexVersion(version) + +def encode_index_version(version: int) -> None: + """ + Set index encoder format version. + + Args: + version: version number (0 or 1) + """ + if version not in (0, 1): + raise ValueError("Version must be 0 or 1") + + lib.meshopt_encodeIndexVersion(version) \ No newline at end of file diff --git a/python/meshoptimizer/optimizer.py b/python/meshoptimizer/optimizer.py new file mode 100644 index 000000000..be241c9fb --- /dev/null +++ b/python/meshoptimizer/optimizer.py @@ -0,0 +1,247 @@ +""" +Optimization functions for meshoptimizer. +""" +import ctypes +from typing import Optional, Union, Tuple +import numpy as np +from ._loader import lib + +def optimize_vertex_cache(destination: np.ndarray, indices: np.ndarray, + index_count: Optional[int] = None, + vertex_count: Optional[int] = None) -> None: + """ + Optimize vertex cache. + + Args: + destination: numpy array to store the optimized indices + indices: numpy array of index data + index_count: number of indices (optional, derived from indices if not provided) + vertex_count: number of vertices (optional, derived from indices if not provided) + + Returns: + None (destination is modified in-place) + """ + # Convert indices to numpy array if it's not already + indices = np.asarray(indices, dtype=np.uint32) + + # Derive index_count if not provided + if index_count is None: + index_count = len(indices) + + # Derive vertex_count if not provided + if vertex_count is None: + vertex_count = np.max(indices) + 1 + + # Call C function + lib.meshopt_optimizeVertexCache( + destination.ctypes.data_as(ctypes.POINTER(ctypes.c_uint)), + indices.ctypes.data_as(ctypes.POINTER(ctypes.c_uint)), + index_count, + vertex_count + ) + +def optimize_vertex_cache_strip(destination: np.ndarray, indices: np.ndarray, + index_count: Optional[int] = None, + vertex_count: Optional[int] = None) -> None: + """ + Optimize vertex cache for strip-like caches. + + Args: + destination: numpy array to store the optimized indices + indices: numpy array of index data + index_count: number of indices (optional, derived from indices if not provided) + vertex_count: number of vertices (optional, derived from indices if not provided) + + Returns: + None (destination is modified in-place) + """ + # Convert indices to numpy array if it's not already + indices = np.asarray(indices, dtype=np.uint32) + + # Derive index_count if not provided + if index_count is None: + index_count = len(indices) + + # Derive vertex_count if not provided + if vertex_count is None: + vertex_count = np.max(indices) + 1 + + # Call C function + lib.meshopt_optimizeVertexCacheStrip( + destination.ctypes.data_as(ctypes.POINTER(ctypes.c_uint)), + indices.ctypes.data_as(ctypes.POINTER(ctypes.c_uint)), + index_count, + vertex_count + ) + +def optimize_vertex_cache_fifo(destination: np.ndarray, indices: np.ndarray, + index_count: Optional[int] = None, + vertex_count: Optional[int] = None, + cache_size: int = 16) -> None: + """ + Optimize vertex cache for FIFO caches. + + Args: + destination: numpy array to store the optimized indices + indices: numpy array of index data + index_count: number of indices (optional, derived from indices if not provided) + vertex_count: number of vertices (optional, derived from indices if not provided) + cache_size: size of the cache (default: 16) + + Returns: + None (destination is modified in-place) + """ + # Convert indices to numpy array if it's not already + indices = np.asarray(indices, dtype=np.uint32) + + # Derive index_count if not provided + if index_count is None: + index_count = len(indices) + + # Derive vertex_count if not provided + if vertex_count is None: + vertex_count = np.max(indices) + 1 + + # Call C function + lib.meshopt_optimizeVertexCacheFifo( + destination.ctypes.data_as(ctypes.POINTER(ctypes.c_uint)), + indices.ctypes.data_as(ctypes.POINTER(ctypes.c_uint)), + index_count, + vertex_count, + cache_size + ) + +def optimize_overdraw(destination: np.ndarray, indices: np.ndarray, + vertex_positions: np.ndarray, + index_count: Optional[int] = None, + vertex_count: Optional[int] = None, + vertex_positions_stride: Optional[int] = None, + threshold: float = 1.05) -> None: + """ + Optimize overdraw. + + Args: + destination: numpy array to store the optimized indices + indices: numpy array of index data + vertex_positions: numpy array of vertex position data + index_count: number of indices (optional, derived from indices if not provided) + vertex_count: number of vertices (optional, derived from vertex_positions if not provided) + vertex_positions_stride: stride of vertex positions in bytes (optional, derived from vertex_positions if not provided) + threshold: threshold for optimization (default: 1.05) + + Returns: + None (destination is modified in-place) + """ + # Convert indices to numpy array if it's not already + indices = np.asarray(indices, dtype=np.uint32) + + # Convert vertex_positions to numpy array if it's not already + vertex_positions = np.asarray(vertex_positions, dtype=np.float32) + + # Derive index_count if not provided + if index_count is None: + index_count = len(indices) + + # Derive vertex_count if not provided + if vertex_count is None: + vertex_count = len(vertex_positions) + + # Derive vertex_positions_stride if not provided + if vertex_positions_stride is None: + vertex_positions_stride = vertex_positions.itemsize * vertex_positions.shape[1] if len(vertex_positions.shape) > 1 else vertex_positions.itemsize + + # Call C function + lib.meshopt_optimizeOverdraw( + destination.ctypes.data_as(ctypes.POINTER(ctypes.c_uint)), + indices.ctypes.data_as(ctypes.POINTER(ctypes.c_uint)), + index_count, + vertex_positions.ctypes.data_as(ctypes.POINTER(ctypes.c_float)), + vertex_count, + vertex_positions_stride, + threshold + ) + +def optimize_vertex_fetch(destination_vertices: np.ndarray, indices: np.ndarray, + source_vertices: np.ndarray, + index_count: Optional[int] = None, + vertex_count: Optional[int] = None, + vertex_size: Optional[int] = None) -> int: + """ + Optimize vertex fetch. + + Args: + destination_vertices: numpy array to store the optimized vertices + indices: numpy array of index data + source_vertices: numpy array of vertex data + index_count: number of indices (optional, derived from indices if not provided) + vertex_count: number of vertices (optional, derived from source_vertices if not provided) + vertex_size: size of each vertex in bytes (optional, derived from source_vertices if not provided) + + Returns: + Number of unique vertices + """ + # Convert indices to numpy array if it's not already + indices = np.asarray(indices, dtype=np.uint32) + + # Convert source_vertices to numpy array if it's not already + source_vertices = np.asarray(source_vertices) + + # Derive index_count if not provided + if index_count is None: + index_count = len(indices) + + # Derive vertex_count if not provided + if vertex_count is None: + vertex_count = len(source_vertices) + + # Derive vertex_size if not provided + if vertex_size is None: + vertex_size = source_vertices.itemsize * source_vertices.shape[1] if len(source_vertices.shape) > 1 else source_vertices.itemsize + + # Call C function + result = lib.meshopt_optimizeVertexFetch( + destination_vertices.ctypes.data_as(ctypes.c_void_p), + indices.ctypes.data_as(ctypes.POINTER(ctypes.c_uint)), + index_count, + source_vertices.ctypes.data_as(ctypes.c_void_p), + vertex_count, + vertex_size + ) + + return result + +def optimize_vertex_fetch_remap(destination: np.ndarray, indices: np.ndarray, + index_count: Optional[int] = None, + vertex_count: Optional[int] = None) -> int: + """ + Generate vertex remap to optimize vertex fetch. + + Args: + destination: numpy array to store the remap table + indices: numpy array of index data + index_count: number of indices (optional, derived from indices if not provided) + vertex_count: number of vertices (optional, derived from indices if not provided) + + Returns: + Number of unique vertices + """ + # Convert indices to numpy array if it's not already + indices = np.asarray(indices, dtype=np.uint32) + + # Derive index_count if not provided + if index_count is None: + index_count = len(indices) + + # Derive vertex_count if not provided + if vertex_count is None: + vertex_count = np.max(indices) + 1 + + # Call C function + result = lib.meshopt_optimizeVertexFetchRemap( + destination.ctypes.data_as(ctypes.POINTER(ctypes.c_uint)), + indices.ctypes.data_as(ctypes.POINTER(ctypes.c_uint)), + index_count, + vertex_count + ) + + return result \ No newline at end of file diff --git a/python/meshoptimizer/simplifier.py b/python/meshoptimizer/simplifier.py new file mode 100644 index 000000000..cd458342f --- /dev/null +++ b/python/meshoptimizer/simplifier.py @@ -0,0 +1,359 @@ +""" +Simplification functions for meshoptimizer. +""" +import ctypes +from typing import Optional, Union, Tuple, List, Any +import numpy as np +from ._loader import lib + +# Simplification options +SIMPLIFY_LOCK_BORDER = 1 << 0 +SIMPLIFY_SPARSE = 1 << 1 +SIMPLIFY_ERROR_ABSOLUTE = 1 << 2 +SIMPLIFY_PRUNE = 1 << 3 + +def simplify(destination: np.ndarray, indices: np.ndarray, vertex_positions: np.ndarray, + index_count: Optional[int] = None, vertex_count: Optional[int] = None, + vertex_positions_stride: Optional[int] = None, target_index_count: Optional[int] = None, + target_error: float = 0.01, options: int = 0, + result_error: Optional[np.ndarray] = None) -> int: + """ + Simplify mesh. + + Args: + destination: numpy array to store the simplified indices + indices: numpy array of index data + vertex_positions: numpy array of vertex position data + index_count: number of indices (optional, derived from indices if not provided) + vertex_count: number of vertices (optional, derived from vertex_positions if not provided) + vertex_positions_stride: stride of vertex positions in bytes (optional, derived from vertex_positions if not provided) + target_index_count: target number of indices (optional, defaults to 25% of original) + target_error: target error (default: 0.01) + options: simplification options (default: 0) + result_error: optional float to store the resulting error + + Returns: + Number of indices in the simplified mesh + """ + # Convert indices to numpy array if it's not already + indices = np.asarray(indices, dtype=np.uint32) + + # Convert vertex_positions to numpy array if it's not already + vertex_positions = np.asarray(vertex_positions, dtype=np.float32) + + # Derive index_count if not provided + if index_count is None: + index_count = len(indices) + + # Derive vertex_count if not provided + if vertex_count is None: + vertex_count = len(vertex_positions) + + # Derive vertex_positions_stride if not provided + if vertex_positions_stride is None: + vertex_positions_stride = vertex_positions.itemsize * vertex_positions.shape[1] if len(vertex_positions.shape) > 1 else vertex_positions.itemsize + + # Derive target_index_count if not provided + if target_index_count is None: + target_index_count = index_count // 4 # 25% of original + + # Create result_error_ptr if result_error is provided + if result_error is not None: + result_error_ptr = ctypes.pointer(ctypes.c_float(0.0)) + else: + result_error_ptr = ctypes.POINTER(ctypes.c_float)() + + # Call C function + result = lib.meshopt_simplify( + destination.ctypes.data_as(ctypes.POINTER(ctypes.c_uint)), + indices.ctypes.data_as(ctypes.POINTER(ctypes.c_uint)), + index_count, + vertex_positions.ctypes.data_as(ctypes.POINTER(ctypes.c_float)), + vertex_count, + vertex_positions_stride, + target_index_count, + target_error, + options, + result_error_ptr + ) + + # Update result_error if provided + if result_error is not None: + result_error[0] = result_error_ptr.contents.value + + return result + +def simplify_with_attributes(destination: np.ndarray, indices: np.ndarray, vertex_positions: np.ndarray, + vertex_attributes: np.ndarray, attribute_weights: np.ndarray, + index_count: Optional[int] = None, vertex_count: Optional[int] = None, + vertex_positions_stride: Optional[int] = None, + vertex_attributes_stride: Optional[int] = None, + attribute_count: Optional[int] = None, + vertex_lock: Optional[np.ndarray] = None, + target_index_count: Optional[int] = None, + target_error: float = 0.01, options: int = 0, + result_error: Optional[np.ndarray] = None) -> int: + """ + Simplify mesh with attribute metric. + + Args: + destination: numpy array to store the simplified indices + indices: numpy array of index data + vertex_positions: numpy array of vertex position data + vertex_attributes: numpy array of vertex attribute data + attribute_weights: numpy array of attribute weights + index_count: number of indices (optional, derived from indices if not provided) + vertex_count: number of vertices (optional, derived from vertex_positions if not provided) + vertex_positions_stride: stride of vertex positions in bytes (optional, derived from vertex_positions if not provided) + vertex_attributes_stride: stride of vertex attributes in bytes (optional, derived from vertex_attributes if not provided) + attribute_count: number of attributes (optional, derived from attribute_weights if not provided) + vertex_lock: optional numpy array of vertex lock flags + target_index_count: target number of indices (optional, defaults to 25% of original) + target_error: target error (default: 0.01) + options: simplification options (default: 0) + result_error: optional float to store the resulting error + + Returns: + Number of indices in the simplified mesh + """ + # Convert indices to numpy array if it's not already + indices = np.asarray(indices, dtype=np.uint32) + + # Convert vertex_positions to numpy array if it's not already + vertex_positions = np.asarray(vertex_positions, dtype=np.float32) + + # Convert vertex_attributes to numpy array if it's not already + vertex_attributes = np.asarray(vertex_attributes, dtype=np.float32) + + # Convert attribute_weights to numpy array if it's not already + attribute_weights = np.asarray(attribute_weights, dtype=np.float32) + + # Derive index_count if not provided + if index_count is None: + index_count = len(indices) + + # Derive vertex_count if not provided + if vertex_count is None: + vertex_count = len(vertex_positions) + + # Derive vertex_positions_stride if not provided + if vertex_positions_stride is None: + vertex_positions_stride = vertex_positions.itemsize * vertex_positions.shape[1] if len(vertex_positions.shape) > 1 else vertex_positions.itemsize + + # Derive vertex_attributes_stride if not provided + if vertex_attributes_stride is None: + vertex_attributes_stride = vertex_attributes.itemsize * vertex_attributes.shape[1] if len(vertex_attributes.shape) > 1 else vertex_attributes.itemsize + + # Derive attribute_count if not provided + if attribute_count is None: + attribute_count = len(attribute_weights) + + # Derive target_index_count if not provided + if target_index_count is None: + target_index_count = index_count // 4 # 25% of original + + # Create result_error_ptr if result_error is provided + if result_error is not None: + result_error_ptr = ctypes.pointer(ctypes.c_float(0.0)) + else: + result_error_ptr = ctypes.POINTER(ctypes.c_float)() + + # Create vertex_lock_ptr if vertex_lock is provided + if vertex_lock is not None: + vertex_lock = np.asarray(vertex_lock, dtype=np.uint8) + vertex_lock_ptr = vertex_lock.ctypes.data_as(ctypes.POINTER(ctypes.c_ubyte)) + else: + vertex_lock_ptr = ctypes.POINTER(ctypes.c_ubyte)() + + # Call C function + result = lib.meshopt_simplifyWithAttributes( + destination.ctypes.data_as(ctypes.POINTER(ctypes.c_uint)), + indices.ctypes.data_as(ctypes.POINTER(ctypes.c_uint)), + index_count, + vertex_positions.ctypes.data_as(ctypes.POINTER(ctypes.c_float)), + vertex_count, + vertex_positions_stride, + vertex_attributes.ctypes.data_as(ctypes.POINTER(ctypes.c_float)), + vertex_attributes_stride, + attribute_weights.ctypes.data_as(ctypes.POINTER(ctypes.c_float)), + attribute_count, + vertex_lock_ptr, + target_index_count, + target_error, + options, + result_error_ptr + ) + + # Update result_error if provided + if result_error is not None: + result_error[0] = result_error_ptr.contents.value + + return result + +def simplify_sloppy(destination: np.ndarray, indices: np.ndarray, vertex_positions: np.ndarray, + index_count: Optional[int] = None, vertex_count: Optional[int] = None, + vertex_positions_stride: Optional[int] = None, + target_index_count: Optional[int] = None, target_error: float = 0.01, + result_error: Optional[np.ndarray] = None) -> int: + """ + Simplify mesh (sloppy). + + Args: + destination: numpy array to store the simplified indices + indices: numpy array of index data + vertex_positions: numpy array of vertex position data + index_count: number of indices (optional, derived from indices if not provided) + vertex_count: number of vertices (optional, derived from vertex_positions if not provided) + vertex_positions_stride: stride of vertex positions in bytes (optional, derived from vertex_positions if not provided) + target_index_count: target number of indices (optional, defaults to 25% of original) + target_error: target error (default: 0.01) + result_error: optional float to store the resulting error + + Returns: + Number of indices in the simplified mesh + """ + # Convert indices to numpy array if it's not already + indices = np.asarray(indices, dtype=np.uint32) + + # Convert vertex_positions to numpy array if it's not already + vertex_positions = np.asarray(vertex_positions, dtype=np.float32) + + # Derive index_count if not provided + if index_count is None: + index_count = len(indices) + + # Derive vertex_count if not provided + if vertex_count is None: + vertex_count = len(vertex_positions) + + # Derive vertex_positions_stride if not provided + if vertex_positions_stride is None: + vertex_positions_stride = vertex_positions.itemsize * vertex_positions.shape[1] if len(vertex_positions.shape) > 1 else vertex_positions.itemsize + + # Derive target_index_count if not provided + if target_index_count is None: + target_index_count = index_count // 4 # 25% of original + + # Create result_error_ptr if result_error is provided + if result_error is not None: + result_error_ptr = ctypes.pointer(ctypes.c_float(0.0)) + else: + result_error_ptr = ctypes.POINTER(ctypes.c_float)() + + # Call C function + result = lib.meshopt_simplifySloppy( + destination.ctypes.data_as(ctypes.POINTER(ctypes.c_uint)), + indices.ctypes.data_as(ctypes.POINTER(ctypes.c_uint)), + index_count, + vertex_positions.ctypes.data_as(ctypes.POINTER(ctypes.c_float)), + vertex_count, + vertex_positions_stride, + target_index_count, + ctypes.c_float(target_error), # Explicitly convert to c_float + result_error_ptr + ) + + # Update result_error if provided + if result_error is not None: + result_error[0] = result_error_ptr.contents.value + + return result + +def simplify_points(destination: np.ndarray, vertex_positions: np.ndarray, + vertex_colors: Optional[np.ndarray] = None, + vertex_count: Optional[int] = None, + vertex_positions_stride: Optional[int] = None, + vertex_colors_stride: Optional[int] = None, + color_weight: float = 1.0, + target_vertex_count: Optional[int] = None) -> int: + """ + Simplify point cloud. + + Args: + destination: numpy array to store the simplified point indices + vertex_positions: numpy array of vertex position data + vertex_colors: numpy array of vertex color data (optional) + vertex_count: number of vertices (optional, derived from vertex_positions if not provided) + vertex_positions_stride: stride of vertex positions in bytes (optional, derived from vertex_positions if not provided) + vertex_colors_stride: stride of vertex colors in bytes (optional, derived from vertex_colors if not provided) + color_weight: weight of color in simplification (default: 1.0) + target_vertex_count: target number of vertices (optional, defaults to 25% of original) + + Returns: + Number of vertices in the simplified point cloud + """ + # Convert vertex_positions to numpy array if it's not already + vertex_positions = np.asarray(vertex_positions, dtype=np.float32) + + # Derive vertex_count if not provided + if vertex_count is None: + vertex_count = len(vertex_positions) + + # Derive vertex_positions_stride if not provided + if vertex_positions_stride is None: + vertex_positions_stride = vertex_positions.itemsize * vertex_positions.shape[1] if len(vertex_positions.shape) > 1 else vertex_positions.itemsize + + # Derive target_vertex_count if not provided + if target_vertex_count is None: + target_vertex_count = vertex_count // 4 # 25% of original + + # Handle vertex_colors + if vertex_colors is not None: + vertex_colors = np.asarray(vertex_colors, dtype=np.float32) + + # Derive vertex_colors_stride if not provided + if vertex_colors_stride is None: + vertex_colors_stride = vertex_colors.itemsize * vertex_colors.shape[1] if len(vertex_colors.shape) > 1 else vertex_colors.itemsize + + vertex_colors_ptr = vertex_colors.ctypes.data_as(ctypes.POINTER(ctypes.c_float)) + else: + vertex_colors_ptr = ctypes.POINTER(ctypes.c_float)() + vertex_colors_stride = 0 + + # Call C function + result = lib.meshopt_simplifyPoints( + destination.ctypes.data_as(ctypes.POINTER(ctypes.c_uint)), + vertex_positions.ctypes.data_as(ctypes.POINTER(ctypes.c_float)), + vertex_count, + vertex_positions_stride, + vertex_colors_ptr, + vertex_colors_stride, + ctypes.c_float(color_weight), # Explicitly convert to c_float + target_vertex_count + ) + + return result + +def simplify_scale(vertex_positions: np.ndarray, + vertex_count: Optional[int] = None, + vertex_positions_stride: Optional[int] = None) -> float: + """ + Get the scale factor for simplification error. + + Args: + vertex_positions: numpy array of vertex position data + vertex_count: number of vertices (optional, derived from vertex_positions if not provided) + vertex_positions_stride: stride of vertex positions in bytes (optional, derived from vertex_positions if not provided) + + Returns: + Scale factor for simplification error + """ + # Convert vertex_positions to numpy array if it's not already + vertex_positions = np.asarray(vertex_positions, dtype=np.float32) + + # Derive vertex_count if not provided + if vertex_count is None: + vertex_count = len(vertex_positions) + + # Derive vertex_positions_stride if not provided + if vertex_positions_stride is None: + vertex_positions_stride = vertex_positions.itemsize * vertex_positions.shape[1] if len(vertex_positions.shape) > 1 else vertex_positions.itemsize + + # Call C function + result = lib.meshopt_simplifyScale( + vertex_positions.ctypes.data_as(ctypes.POINTER(ctypes.c_float)), + vertex_count, + vertex_positions_stride + ) + + return result \ No newline at end of file diff --git a/python/meshoptimizer/utils.py b/python/meshoptimizer/utils.py new file mode 100644 index 000000000..f5f782858 --- /dev/null +++ b/python/meshoptimizer/utils.py @@ -0,0 +1,139 @@ +""" +Utility functions for meshoptimizer. +""" +import ctypes +from typing import Optional, Union, Tuple, Any +import numpy as np +from ._loader import lib + +def generate_vertex_remap(destination: np.ndarray, + indices: Optional[np.ndarray] = None, + index_count: Optional[int] = None, + vertices: Optional[np.ndarray] = None, + vertex_count: Optional[int] = None, + vertex_size: Optional[int] = None) -> int: + """ + Generate vertex remap table. + + Args: + destination: numpy array to store the remap table + indices: numpy array of index data (can be None for unindexed geometry) + index_count: number of indices (optional, derived from indices if not provided) + vertices: numpy array of vertex data + vertex_count: number of vertices (optional, derived from vertices if not provided) + vertex_size: size of each vertex in bytes (optional, derived from vertices if not provided) + + Returns: + Number of unique vertices + """ + # Convert indices to numpy array if it's not already and not None + if indices is not None: + indices = np.asarray(indices, dtype=np.uint32) + + # Derive index_count if not provided + if index_count is None: + index_count = len(indices) + else: + # If indices is None, index_count must be 0 + index_count = 0 + + # Convert vertices to numpy array if it's not already + if vertices is not None: + vertices = np.asarray(vertices) + + # Derive vertex_count if not provided + if vertex_count is None: + vertex_count = len(vertices) + + # Derive vertex_size if not provided + if vertex_size is None: + vertex_size = vertices.itemsize * vertices.shape[1] if len(vertices.shape) > 1 else vertices.itemsize + + # Call C function + result = lib.meshopt_generateVertexRemap( + destination.ctypes.data_as(ctypes.POINTER(ctypes.c_uint)), + indices.ctypes.data_as(ctypes.POINTER(ctypes.c_uint)) if indices is not None else None, + index_count, + vertices.ctypes.data_as(ctypes.c_void_p) if vertices is not None else None, + vertex_count, + vertex_size + ) + + return result + +def remap_vertex_buffer(destination: np.ndarray, + vertices: np.ndarray, + vertex_count: Optional[int] = None, + vertex_size: Optional[int] = None, + remap: Optional[np.ndarray] = None) -> None: + """ + Remap vertex buffer. + + Args: + destination: numpy array to store the remapped vertices + vertices: numpy array of vertex data + vertex_count: number of vertices (optional, derived from vertices if not provided) + vertex_size: size of each vertex in bytes (optional, derived from vertices if not provided) + remap: numpy array of remap data + + Returns: + None (destination is modified in-place) + """ + # Convert vertices to numpy array if it's not already + vertices = np.asarray(vertices) + + # Derive vertex_count if not provided + if vertex_count is None: + vertex_count = len(vertices) + + # Derive vertex_size if not provided + if vertex_size is None: + vertex_size = vertices.itemsize * vertices.shape[1] if len(vertices.shape) > 1 else vertices.itemsize + + # Convert remap to numpy array if it's not already and not None + if remap is not None: + remap = np.asarray(remap, dtype=np.uint32) + + # Call C function + lib.meshopt_remapVertexBuffer( + destination.ctypes.data_as(ctypes.c_void_p), + vertices.ctypes.data_as(ctypes.c_void_p), + vertex_count, + vertex_size, + remap.ctypes.data_as(ctypes.POINTER(ctypes.c_uint)) if remap is not None else None + ) + +def remap_index_buffer(destination: np.ndarray, + indices: np.ndarray, + index_count: Optional[int] = None, + remap: Optional[np.ndarray] = None) -> None: + """ + Remap index buffer. + + Args: + destination: numpy array to store the remapped indices + indices: numpy array of index data + index_count: number of indices (optional, derived from indices if not provided) + remap: numpy array of remap data + + Returns: + None (destination is modified in-place) + """ + # Convert indices to numpy array if it's not already + indices = np.asarray(indices, dtype=np.uint32) + + # Derive index_count if not provided + if index_count is None: + index_count = len(indices) + + # Convert remap to numpy array if it's not already and not None + if remap is not None: + remap = np.asarray(remap, dtype=np.uint32) + + # Call C function + lib.meshopt_remapIndexBuffer( + destination.ctypes.data_as(ctypes.POINTER(ctypes.c_uint)), + indices.ctypes.data_as(ctypes.POINTER(ctypes.c_uint)), + index_count, + remap.ctypes.data_as(ctypes.POINTER(ctypes.c_uint)) if remap is not None else None + ) \ No newline at end of file diff --git a/python/setup.py b/python/setup.py new file mode 100644 index 000000000..221bcc340 --- /dev/null +++ b/python/setup.py @@ -0,0 +1,52 @@ +from setuptools import setup, Extension, find_packages +import os +import platform + +# Determine source files +source_files = [ + '../src/clusterizer.cpp', + '../src/indexcodec.cpp', + '../src/indexgenerator.cpp', + '../src/overdrawanalyzer.cpp', + '../src/overdrawoptimizer.cpp', + '../src/simplifier.cpp', + '../src/spatialorder.cpp', + '../src/stripifier.cpp', + '../src/vcacheanalyzer.cpp', + '../src/vcacheoptimizer.cpp', + '../src/vertexcodec.cpp', + '../src/vertexfilter.cpp', + '../src/vfetchanalyzer.cpp', + '../src/vfetchoptimizer.cpp', + '../src/allocator.cpp', + '../src/quantization.cpp', + '../src/partition.cpp', +] + +# Define the extension module +meshoptimizer_module = Extension( + 'meshoptimizer._meshoptimizer', + sources=source_files, + include_dirs=['../src'], + extra_compile_args=['-std=c++11'], +) + +setup( + name='meshoptimizer', + version='0.1.0', + description='Python wrapper for meshoptimizer library', + author='Meshoptimizer Team', + author_email='example@example.com', + packages=find_packages(), + ext_modules=[meshoptimizer_module], + install_requires=[ + 'numpy', + ], + classifiers=[ + 'Development Status :: 3 - Alpha', + 'Intended Audience :: Developers', + 'Topic :: Multimedia :: Graphics :: 3D Modeling', + 'License :: OSI Approved :: MIT License', + 'Programming Language :: Python :: 3', + ], +) \ No newline at end of file diff --git a/python/tests/__init__.py b/python/tests/__init__.py new file mode 100644 index 000000000..3c4693b71 --- /dev/null +++ b/python/tests/__init__.py @@ -0,0 +1,3 @@ +""" +Tests package for the meshoptimizer Python wrapper. +""" \ No newline at end of file diff --git a/python/tests/run_tests.py b/python/tests/run_tests.py new file mode 100644 index 000000000..77c7fb42c --- /dev/null +++ b/python/tests/run_tests.py @@ -0,0 +1,38 @@ +#!/usr/bin/env python3 +""" +Test runner for the meshoptimizer Python wrapper. + +This script runs all the tests for the meshoptimizer Python wrapper. +""" +import unittest +import sys +import os + +# Add the parent directory to the path so we can import the meshoptimizer package +sys.path.insert(0, os.path.abspath(os.path.join(os.path.dirname(__file__), '..'))) + +# Import the test modules +from test_encoding import TestEncoding +from test_optimization import TestOptimization +from test_simplification import TestSimplification +from test_mesh_integrity import TestMeshIntegrity + +if __name__ == '__main__': + # Create a test suite + test_suite = unittest.TestSuite() + + # Create a test loader + loader = unittest.TestLoader() + + # Add the test cases + test_suite.addTest(loader.loadTestsFromTestCase(TestEncoding)) + test_suite.addTest(loader.loadTestsFromTestCase(TestOptimization)) + test_suite.addTest(loader.loadTestsFromTestCase(TestSimplification)) + test_suite.addTest(loader.loadTestsFromTestCase(TestMeshIntegrity)) + + # Run the tests + runner = unittest.TextTestRunner(verbosity=2) + result = runner.run(test_suite) + + # Exit with non-zero code if tests failed + sys.exit(not result.wasSuccessful()) \ No newline at end of file diff --git a/python/tests/test_encoding.py b/python/tests/test_encoding.py new file mode 100644 index 000000000..9f3a93972 --- /dev/null +++ b/python/tests/test_encoding.py @@ -0,0 +1,160 @@ +""" +Tests for the meshoptimizer Python wrapper. + +This file contains tests to verify that the encoding/decoding process +preserves the mesh geometry correctly. +""" +import numpy as np +import unittest +from meshoptimizer import Mesh, encode_vertex_buffer, encode_index_buffer, decode_vertex_buffer, decode_index_buffer + +class TestEncoding(unittest.TestCase): + """Test encoding and decoding functionality.""" + + def setUp(self): + """Set up test data.""" + # Create a simple mesh (a cube) + self.vertices = np.array([ + # positions + [-0.5, -0.5, -0.5], + [0.5, -0.5, -0.5], + [0.5, 0.5, -0.5], + [-0.5, 0.5, -0.5], + [-0.5, -0.5, 0.5], + [0.5, -0.5, 0.5], + [0.5, 0.5, 0.5], + [-0.5, 0.5, 0.5] + ], dtype=np.float32) + + self.indices = np.array([ + 0, 1, 2, 2, 3, 0, # front + 1, 5, 6, 6, 2, 1, # right + 5, 4, 7, 7, 6, 5, # back + 4, 0, 3, 3, 7, 4, # left + 3, 2, 6, 6, 7, 3, # top + 4, 5, 1, 1, 0, 4 # bottom + ], dtype=np.uint32) + + self.mesh = Mesh(self.vertices, self.indices) + + def get_triangles_set(self, vertices, indices): + """ + Get a set of triangles from vertices and indices. + Each triangle is represented as a frozenset of tuples of vertex coordinates. + This makes the comparison invariant to vertex order within triangles. + """ + triangles = set() + for i in range(0, len(indices), 3): + # Get the three vertices of the triangle + v1 = tuple(vertices[indices[i]]) + v2 = tuple(vertices[indices[i+1]]) + v3 = tuple(vertices[indices[i+2]]) + # Create a frozenset of the vertices (order-invariant) + triangle = frozenset([v1, v2, v3]) + triangles.add(triangle) + return triangles + + def test_encode_decode_vertices(self): + """Test that encoding and decoding vertices preserves the data.""" + # Encode vertices + encoded_vertices = encode_vertex_buffer( + self.vertices, + len(self.vertices), + self.vertices.itemsize * self.vertices.shape[1] + ) + + # Decode vertices using the new function that returns a numpy array + decoded_vertices = decode_vertex_buffer( + len(self.vertices), + self.vertices.itemsize * self.vertices.shape[1], + encoded_vertices + ) + + # Check that the decoded vertices match the original + np.testing.assert_array_almost_equal(self.vertices, decoded_vertices) + + def test_encode_decode_mesh(self): + """Test that encoding and decoding a mesh preserves the geometry.""" + # Encode the mesh + encoded = self.mesh.encode() + + # Decode the mesh + decoded_mesh = Mesh.decode( + encoded, + vertex_count=len(self.mesh.vertices), + vertex_size=self.mesh.vertices.itemsize * self.mesh.vertices.shape[1], + index_count=len(self.mesh.indices) + ) + + # Check that the decoded vertices match the original + np.testing.assert_array_almost_equal(self.mesh.vertices, decoded_mesh.vertices) + + # The indices might not match exactly due to how the encoding/decoding works, + # but the geometry should be preserved. Let's check that by comparing + # the triangles. + + # Get the triangles from the original and decoded meshes + original_triangles = self.get_triangles_set(self.mesh.vertices, self.mesh.indices) + decoded_triangles = self.get_triangles_set(decoded_mesh.vertices, decoded_mesh.indices) + + # Check that the triangles match + self.assertEqual(original_triangles, decoded_triangles) + + def test_optimize_and_encode_decode(self): + """Test that optimizing and then encoding/decoding preserves the geometry.""" + # Optimize the mesh + optimized_mesh = Mesh(self.vertices.copy(), self.indices.copy()) + optimized_mesh.optimize_vertex_cache() + optimized_mesh.optimize_overdraw() + optimized_mesh.optimize_vertex_fetch() + + # Encode the optimized mesh + encoded = optimized_mesh.encode() + + # Decode the mesh + decoded_mesh = Mesh.decode( + encoded, + vertex_count=len(optimized_mesh.vertices), + vertex_size=optimized_mesh.vertices.itemsize * optimized_mesh.vertices.shape[1], + index_count=len(optimized_mesh.indices) + ) + + # Check that the decoded vertices match the optimized vertices + np.testing.assert_array_almost_equal(optimized_mesh.vertices, decoded_mesh.vertices) + + # Get the triangles from the optimized and decoded meshes + optimized_triangles = self.get_triangles_set(optimized_mesh.vertices, optimized_mesh.indices) + decoded_triangles = self.get_triangles_set(decoded_mesh.vertices, decoded_mesh.indices) + + # Check that the triangles match + self.assertEqual(optimized_triangles, decoded_triangles) + + def test_simplify_and_encode_decode(self): + """Test that simplifying and then encoding/decoding preserves the geometry.""" + # Simplify the mesh + simplified_mesh = Mesh(self.vertices.copy(), self.indices.copy()) + simplified_mesh.simplify(target_ratio=0.5) # Keep 50% of triangles + + # Encode the simplified mesh + encoded = simplified_mesh.encode() + + # Decode the mesh + decoded_mesh = Mesh.decode( + encoded, + vertex_count=len(simplified_mesh.vertices), + vertex_size=simplified_mesh.vertices.itemsize * simplified_mesh.vertices.shape[1], + index_count=len(simplified_mesh.indices) + ) + + # Check that the decoded vertices match the simplified vertices + np.testing.assert_array_almost_equal(simplified_mesh.vertices, decoded_mesh.vertices) + + # Get the triangles from the simplified and decoded meshes + simplified_triangles = self.get_triangles_set(simplified_mesh.vertices, simplified_mesh.indices) + decoded_triangles = self.get_triangles_set(decoded_mesh.vertices, decoded_mesh.indices) + + # Check that the triangles match + self.assertEqual(simplified_triangles, decoded_triangles) + +if __name__ == '__main__': + unittest.main() diff --git a/python/tests/test_mesh_integrity.py b/python/tests/test_mesh_integrity.py new file mode 100644 index 000000000..3d9469013 --- /dev/null +++ b/python/tests/test_mesh_integrity.py @@ -0,0 +1,197 @@ +""" +Tests for the meshoptimizer Python wrapper. + +This file contains tests to verify that the mesh vertices indexed by the indices +are the same before and after encoding/decoding, ensuring that the mesh geometry +is preserved correctly. +""" +import numpy as np +import unittest +from meshoptimizer import Mesh + +class TestMeshIntegrity(unittest.TestCase): + """Test mesh integrity during encoding/decoding.""" + + def setUp(self): + """Set up test data.""" + # Create a simple mesh (a cube) + self.vertices = np.array([ + # positions + [-0.5, -0.5, -0.5], + [0.5, -0.5, -0.5], + [0.5, 0.5, -0.5], + [-0.5, 0.5, -0.5], + [-0.5, -0.5, 0.5], + [0.5, -0.5, 0.5], + [0.5, 0.5, 0.5], + [-0.5, 0.5, 0.5] + ], dtype=np.float32) + + self.indices = np.array([ + 0, 1, 2, 2, 3, 0, # front + 1, 5, 6, 6, 2, 1, # right + 5, 4, 7, 7, 6, 5, # back + 4, 0, 3, 3, 7, 4, # left + 3, 2, 6, 6, 7, 3, # top + 4, 5, 1, 1, 0, 4 # bottom + ], dtype=np.uint32) + + self.mesh = Mesh(self.vertices, self.indices) + + def get_triangles_set(self, vertices, indices): + """ + Get a set of triangles from vertices and indices. + Each triangle is represented as a frozenset of tuples of vertex coordinates. + This makes the comparison invariant to vertex order within triangles. + """ + triangles = set() + for i in range(0, len(indices), 3): + # Get the three vertices of the triangle + v1 = tuple(vertices[indices[i]]) + v2 = tuple(vertices[indices[i+1]]) + v3 = tuple(vertices[indices[i+2]]) + # Create a frozenset of the vertices (order-invariant) + triangle = frozenset([v1, v2, v3]) + triangles.add(triangle) + return triangles + + def test_mesh_integrity_encode_decode(self): + """Test that mesh vertices indexed by indices are preserved during encoding/decoding.""" + # Get the original triangles + original_triangles = self.get_triangles_set(self.mesh.vertices, self.mesh.indices) + + # Encode the mesh + encoded = self.mesh.encode() + + # Decode the mesh + decoded_mesh = Mesh.decode( + encoded, + vertex_count=len(self.mesh.vertices), + vertex_size=self.mesh.vertices.itemsize * self.mesh.vertices.shape[1], + index_count=len(self.mesh.indices) + ) + + # Get the decoded triangles + decoded_triangles = self.get_triangles_set(decoded_mesh.vertices, decoded_mesh.indices) + + # Check that the triangles match + self.assertEqual(original_triangles, decoded_triangles) + + def test_mesh_integrity_optimize_encode_decode(self): + """Test that mesh vertices indexed by indices are preserved during optimization, encoding, and decoding.""" + # Create a copy of the mesh + optimized_mesh = Mesh(self.vertices.copy(), self.indices.copy()) + + # Optimize the mesh + optimized_mesh.optimize_vertex_cache() + optimized_mesh.optimize_overdraw() + optimized_mesh.optimize_vertex_fetch() + + # Get the optimized triangles + optimized_triangles = self.get_triangles_set(optimized_mesh.vertices, optimized_mesh.indices) + + # Encode the optimized mesh + encoded = optimized_mesh.encode() + + # Decode the mesh + decoded_mesh = Mesh.decode( + encoded, + vertex_count=len(optimized_mesh.vertices), + vertex_size=optimized_mesh.vertices.itemsize * optimized_mesh.vertices.shape[1], + index_count=len(optimized_mesh.indices) + ) + + # Get the decoded triangles + decoded_triangles = self.get_triangles_set(decoded_mesh.vertices, decoded_mesh.indices) + + # Check that the triangles match + self.assertEqual(optimized_triangles, decoded_triangles) + + def test_mesh_integrity_simplify_encode_decode(self): + """Test that mesh vertices indexed by indices are preserved during simplification, encoding, and decoding.""" + # Create a more complex mesh (a sphere) + # Generate a sphere with 16 segments and 16 rings + segments = 16 + rings = 16 + vertices = [] + indices = [] + + # Generate vertices + for i in range(rings + 1): + v = i / rings + phi = v * np.pi + + for j in range(segments): + u = j / segments + theta = u * 2 * np.pi + + x = np.sin(phi) * np.cos(theta) + y = np.sin(phi) * np.sin(theta) + z = np.cos(phi) + + vertices.append([x, y, z]) + + # Generate indices + for i in range(rings): + for j in range(segments): + a = i * segments + j + b = i * segments + (j + 1) % segments + c = (i + 1) * segments + (j + 1) % segments + d = (i + 1) * segments + j + + # Two triangles per quad + indices.extend([a, b, c]) + indices.extend([a, c, d]) + + sphere_vertices = np.array(vertices, dtype=np.float32) + sphere_indices = np.array(indices, dtype=np.uint32) + sphere_mesh = Mesh(sphere_vertices, sphere_indices) + + # Simplify the mesh + simplified_mesh = Mesh(sphere_vertices.copy(), sphere_indices.copy()) + simplified_mesh.simplify(target_ratio=0.5) # Keep 50% of triangles + + # Get the simplified triangles + simplified_triangles = self.get_triangles_set(simplified_mesh.vertices, simplified_mesh.indices) + + # Encode the simplified mesh + encoded = simplified_mesh.encode() + + # Decode the mesh + decoded_mesh = Mesh.decode( + encoded, + vertex_count=len(simplified_mesh.vertices), + vertex_size=simplified_mesh.vertices.itemsize * simplified_mesh.vertices.shape[1], + index_count=len(simplified_mesh.indices) + ) + + # Get the decoded triangles + decoded_triangles = self.get_triangles_set(decoded_mesh.vertices, decoded_mesh.indices) + + # Check that the triangles match + self.assertEqual(simplified_triangles, decoded_triangles) + + def test_mesh_integrity_triangles(self): + """Test that mesh triangles are preserved during encoding/decoding.""" + # Get the original triangles + original_triangles = self.get_triangles_set(self.mesh.vertices, self.mesh.indices) + + # Encode the mesh + encoded = self.mesh.encode() + + # Decode the mesh + decoded_mesh = Mesh.decode( + encoded, + vertex_count=len(self.mesh.vertices), + vertex_size=self.mesh.vertices.itemsize * self.mesh.vertices.shape[1], + index_count=len(self.mesh.indices) + ) + + # Get the decoded triangles + decoded_triangles = self.get_triangles_set(decoded_mesh.vertices, decoded_mesh.indices) + + # Check that the triangles match + self.assertEqual(original_triangles, decoded_triangles) + +if __name__ == '__main__': + unittest.main() \ No newline at end of file diff --git a/python/tests/test_optimization.py b/python/tests/test_optimization.py new file mode 100644 index 000000000..d58d27444 --- /dev/null +++ b/python/tests/test_optimization.py @@ -0,0 +1,219 @@ +""" +Tests for the meshoptimizer Python wrapper. + +This file contains tests to verify that the optimization functions +work correctly and preserve the mesh geometry. +""" +import numpy as np +import unittest +from meshoptimizer import ( + Mesh, + optimize_vertex_cache, + optimize_overdraw, + optimize_vertex_fetch, + generate_vertex_remap, + remap_vertex_buffer, + remap_index_buffer +) + +class TestOptimization(unittest.TestCase): + """Test optimization functionality.""" + + def setUp(self): + """Set up test data.""" + # Create a simple mesh (a cube) + self.vertices = np.array([ + # positions + [-0.5, -0.5, -0.5], + [0.5, -0.5, -0.5], + [0.5, 0.5, -0.5], + [-0.5, 0.5, -0.5], + [-0.5, -0.5, 0.5], + [0.5, -0.5, 0.5], + [0.5, 0.5, 0.5], + [-0.5, 0.5, 0.5] + ], dtype=np.float32) + + self.indices = np.array([ + 0, 1, 2, 2, 3, 0, # front + 1, 5, 6, 6, 2, 1, # right + 5, 4, 7, 7, 6, 5, # back + 4, 0, 3, 3, 7, 4, # left + 3, 2, 6, 6, 7, 3, # top + 4, 5, 1, 1, 0, 4 # bottom + ], dtype=np.uint32) + + self.mesh = Mesh(self.vertices, self.indices) + + def get_triangles_set(self, vertices, indices): + """ + Get a set of triangles from vertices and indices. + Each triangle is represented as a frozenset of tuples of vertex coordinates. + This makes the comparison invariant to vertex order within triangles. + """ + triangles = set() + for i in range(0, len(indices), 3): + # Get the three vertices of the triangle + v1 = tuple(vertices[indices[i]]) + v2 = tuple(vertices[indices[i+1]]) + v3 = tuple(vertices[indices[i+2]]) + # Create a frozenset of the vertices (order-invariant) + triangle = frozenset([v1, v2, v3]) + triangles.add(triangle) + return triangles + + def test_vertex_cache_optimization(self): + """Test vertex cache optimization.""" + # Optimize vertex cache + optimized_indices = np.zeros_like(self.indices) + optimize_vertex_cache( + optimized_indices, + self.indices, + len(self.indices), + len(self.vertices) + ) + + # Check that the number of indices is the same + self.assertEqual(len(self.indices), len(optimized_indices)) + + # Get the triangles from the original and optimized meshes + original_triangles = self.get_triangles_set(self.vertices, self.indices) + optimized_triangles = self.get_triangles_set(self.vertices, optimized_indices) + + # Check that the triangles match + self.assertEqual(original_triangles, optimized_triangles) + + def test_overdraw_optimization(self): + """Test overdraw optimization.""" + # Optimize overdraw + optimized_indices = np.zeros_like(self.indices) + optimize_overdraw( + optimized_indices, + self.indices, + self.vertices, + len(self.indices), + len(self.vertices), + self.vertices.itemsize * self.vertices.shape[1], + 1.05 + ) + + # Check that the number of indices is the same + self.assertEqual(len(self.indices), len(optimized_indices)) + + # Get the triangles from the original and optimized meshes + original_triangles = self.get_triangles_set(self.vertices, self.indices) + optimized_triangles = self.get_triangles_set(self.vertices, optimized_indices) + + # Check that the triangles match + self.assertEqual(original_triangles, optimized_triangles) + + def test_vertex_fetch_optimization(self): + """Test vertex fetch optimization.""" + # Optimize vertex fetch + optimized_vertices = np.zeros_like(self.vertices) + unique_vertex_count = optimize_vertex_fetch( + optimized_vertices, + self.indices, + self.vertices, + len(self.indices), + len(self.vertices), + self.vertices.itemsize * self.vertices.shape[1] + ) + + # Check that the number of unique vertices is less than or equal to the original + self.assertLessEqual(unique_vertex_count, len(self.vertices)) + + # For vertex fetch optimization, we can't directly compare triangles because + # the optimization reorders vertices for better cache locality. + # Instead, we'll check that the number of triangles is the same and + # that each vertex in the optimized mesh is present in the original mesh. + + # Check that all optimized vertices are present in the original vertices + for i in range(unique_vertex_count): + vertex = tuple(optimized_vertices[i]) + # Check if this vertex exists in the original vertices + found = False + for j in range(len(self.vertices)): + if np.allclose(self.vertices[j], optimized_vertices[i]): + found = True + break + self.assertTrue(found, f"Vertex {vertex} not found in original vertices") + + # Check that the number of triangles is the same + self.assertEqual(len(self.indices) // 3, len(self.indices) // 3) + + def test_vertex_remap(self): + """Test vertex remapping.""" + # Generate vertex remap + remap = np.zeros(len(self.vertices), dtype=np.uint32) + unique_vertex_count = generate_vertex_remap( + remap, + self.indices, + len(self.indices), + self.vertices, + len(self.vertices), + self.vertices.itemsize * self.vertices.shape[1] + ) + + # Check that the number of unique vertices is less than or equal to the original + self.assertLessEqual(unique_vertex_count, len(self.vertices)) + + # Remap vertices + remapped_vertices = np.zeros_like(self.vertices) + remap_vertex_buffer( + remapped_vertices, + self.vertices, + len(self.vertices), + self.vertices.itemsize * self.vertices.shape[1], + remap + ) + + # Remap indices + remapped_indices = np.zeros_like(self.indices) + remap_index_buffer( + remapped_indices, + self.indices, + len(self.indices), + remap + ) + + # Get the triangles from the original and remapped meshes + original_triangles = self.get_triangles_set(self.vertices, self.indices) + remapped_triangles = self.get_triangles_set(remapped_vertices, remapped_indices) + + # Check that the triangles match + self.assertEqual(original_triangles, remapped_triangles) + + def test_mesh_optimization_chain(self): + """Test chaining multiple optimizations on a mesh.""" + # Create a copy of the mesh + optimized_mesh = Mesh(self.vertices.copy(), self.indices.copy()) + + # Apply optimizations + optimized_mesh.optimize_vertex_cache() + optimized_mesh.optimize_overdraw() + optimized_mesh.optimize_vertex_fetch() + + # Check that the optimized mesh has the same number of triangles + self.assertEqual(len(self.indices) // 3, len(optimized_mesh.indices) // 3) + + # Check that the optimized mesh has the same or fewer vertices + self.assertLessEqual(len(optimized_mesh.vertices), len(self.vertices)) + + # For the full optimization chain, we can't directly compare triangles because + # the vertex fetch optimization reorders vertices for better cache locality. + # Instead, we'll check that each vertex in the optimized mesh is present in the original mesh. + + # Check that all optimized vertices are present in the original vertices + for i in range(len(optimized_mesh.vertices)): + vertex = tuple(optimized_mesh.vertices[i]) + # Check if this vertex exists in the original vertices + found = False + for j in range(len(self.vertices)): + if np.allclose(self.vertices[j], optimized_mesh.vertices[i]): + found = True + break + self.assertTrue(found, f"Vertex {vertex} not found in original vertices") + +if __name__ == '__main__': + unittest.main() \ No newline at end of file diff --git a/python/tests/test_simplification.py b/python/tests/test_simplification.py new file mode 100644 index 000000000..603613aaa --- /dev/null +++ b/python/tests/test_simplification.py @@ -0,0 +1,262 @@ +""" +Tests for the meshoptimizer Python wrapper. + +This file contains tests to verify that the simplification functions +work correctly and preserve the mesh geometry as much as possible. +""" +import numpy as np +import unittest +from meshoptimizer import ( + Mesh, + simplify, + simplify_sloppy, + simplify_points, + simplify_scale, + SIMPLIFY_LOCK_BORDER, + SIMPLIFY_SPARSE, + SIMPLIFY_ERROR_ABSOLUTE, + SIMPLIFY_PRUNE +) + +class TestSimplification(unittest.TestCase): + """Test simplification functionality.""" + + def setUp(self): + """Set up test data.""" + # Create a simple mesh (a cube) + self.vertices = np.array([ + # positions + [-0.5, -0.5, -0.5], + [0.5, -0.5, -0.5], + [0.5, 0.5, -0.5], + [-0.5, 0.5, -0.5], + [-0.5, -0.5, 0.5], + [0.5, -0.5, 0.5], + [0.5, 0.5, 0.5], + [-0.5, 0.5, 0.5] + ], dtype=np.float32) + + self.indices = np.array([ + 0, 1, 2, 2, 3, 0, # front + 1, 5, 6, 6, 2, 1, # right + 5, 4, 7, 7, 6, 5, # back + 4, 0, 3, 3, 7, 4, # left + 3, 2, 6, 6, 7, 3, # top + 4, 5, 1, 1, 0, 4 # bottom + ], dtype=np.uint32) + + self.mesh = Mesh(self.vertices, self.indices) + + # Create a more complex mesh (a sphere) + # Generate a sphere with 8 segments and 8 rings + segments = 8 + rings = 8 + vertices = [] + indices = [] + + # Generate vertices + for i in range(rings + 1): + v = i / rings + phi = v * np.pi + + for j in range(segments): + u = j / segments + theta = u * 2 * np.pi + + x = np.sin(phi) * np.cos(theta) + y = np.sin(phi) * np.sin(theta) + z = np.cos(phi) + + vertices.append([x, y, z]) + + # Generate indices + for i in range(rings): + for j in range(segments): + a = i * segments + j + b = i * segments + (j + 1) % segments + c = (i + 1) * segments + (j + 1) % segments + d = (i + 1) * segments + j + + # Two triangles per quad + indices.extend([a, b, c]) + indices.extend([a, c, d]) + + self.sphere_vertices = np.array(vertices, dtype=np.float32) + self.sphere_indices = np.array(indices, dtype=np.uint32) + self.sphere_mesh = Mesh(self.sphere_vertices, self.sphere_indices) + + def test_simplify_basic(self): + """Test basic simplification.""" + # Simplify the mesh + simplified_indices = np.zeros_like(self.indices) + result_error = np.array([0.0], dtype=np.float32) + + new_index_count = simplify( + simplified_indices, + self.indices, + self.vertices, + len(self.indices), + len(self.vertices), + self.vertices.itemsize * self.vertices.shape[1], + len(self.indices) // 2, # Target 50% reduction + 0.01, # Target error + 0, # No options + result_error + ) + + # Check that the number of indices is reduced + self.assertLessEqual(new_index_count, len(self.indices)) + + # Check that the error is reasonable + self.assertGreaterEqual(result_error[0], 0.0) + + def test_simplify_options(self): + """Test simplification with different options.""" + # Test with SIMPLIFY_LOCK_BORDER option + simplified_indices = np.zeros_like(self.indices) + result_error = np.array([0.0], dtype=np.float32) + + new_index_count = simplify( + simplified_indices, + self.indices, + self.vertices, + len(self.indices), + len(self.vertices), + self.vertices.itemsize * self.vertices.shape[1], + len(self.indices) // 2, # Target 50% reduction + 0.01, # Target error + SIMPLIFY_LOCK_BORDER, # Lock border vertices + result_error + ) + + # Check that the number of indices is reduced + self.assertLessEqual(new_index_count, len(self.indices)) + + # Test with SIMPLIFY_SPARSE option + simplified_indices = np.zeros_like(self.indices) + result_error = np.array([0.0], dtype=np.float32) + + new_index_count = simplify( + simplified_indices, + self.indices, + self.vertices, + len(self.indices), + len(self.vertices), + self.vertices.itemsize * self.vertices.shape[1], + len(self.indices) // 2, # Target 50% reduction + 0.01, # Target error + SIMPLIFY_SPARSE, # Sparse simplification + result_error + ) + + # Check that the number of indices is reduced + self.assertLessEqual(new_index_count, len(self.indices)) + + def test_simplify_sloppy(self): + """Test sloppy simplification.""" + # Simplify the mesh (sloppy) + simplified_indices = np.zeros_like(self.sphere_indices) + result_error = np.array([0.0], dtype=np.float32) + + new_index_count = simplify_sloppy( + simplified_indices, + self.sphere_indices, + self.sphere_vertices, + len(self.sphere_indices), + len(self.sphere_vertices), + self.sphere_vertices.itemsize * self.sphere_vertices.shape[1], + len(self.sphere_indices) // 4, # Target 75% reduction + 0.01, # Target error + result_error + ) + + # Check that the number of indices is reduced + self.assertLessEqual(new_index_count, len(self.sphere_indices)) + + # Check that the error is reasonable + self.assertGreaterEqual(result_error[0], 0.0) + + def test_simplify_points(self): + """Test point cloud simplification.""" + # Create a point cloud + points = np.random.rand(100, 3).astype(np.float32) + + # Simplify the point cloud + simplified_points = np.zeros(50, dtype=np.uint32) + + new_point_count = simplify_points( + simplified_points, + points, + None, # No colors + len(points), + points.itemsize * points.shape[1], + 0, # No colors stride + 0.0, # No color weight + 50 # Target 50% reduction + ) + + # Check that the number of points is reduced + self.assertLessEqual(new_point_count, 50) + + # Test with colors + colors = np.random.rand(100, 3).astype(np.float32) + + simplified_points = np.zeros(50, dtype=np.uint32) + + new_point_count = simplify_points( + simplified_points, + points, + colors, + len(points), + points.itemsize * points.shape[1], + colors.itemsize * colors.shape[1], + 1.0, # Equal weight for colors + 50 # Target 50% reduction + ) + + # Check that the number of points is reduced + self.assertLessEqual(new_point_count, 50) + + def test_simplify_scale(self): + """Test simplification scale calculation.""" + # Calculate the scale + scale = simplify_scale( + self.vertices, + len(self.vertices), + self.vertices.itemsize * self.vertices.shape[1] + ) + + # Check that the scale is positive + self.assertGreater(scale, 0.0) + + def test_mesh_simplify(self): + """Test mesh simplification using the Mesh class.""" + # Create a copy of the sphere mesh + simplified_mesh = Mesh(self.sphere_vertices.copy(), self.sphere_indices.copy()) + + # Simplify the mesh + simplified_mesh.simplify(target_ratio=0.5) # Keep 50% of triangles + + # Check that the number of triangles is reduced + self.assertLessEqual(len(simplified_mesh.indices) // 3, len(self.sphere_indices) // 3) + + # Check that the mesh is still valid + # (Each triangle should have 3 unique vertices) + for i in range(0, len(simplified_mesh.indices), 3): + a = simplified_mesh.indices[i] + b = simplified_mesh.indices[i+1] + c = simplified_mesh.indices[i+2] + + # Check that indices are within bounds + self.assertLess(a, len(simplified_mesh.vertices)) + self.assertLess(b, len(simplified_mesh.vertices)) + self.assertLess(c, len(simplified_mesh.vertices)) + + # Check that the triangle has 3 unique vertices + # (This is not always true for simplified meshes, but it's a good sanity check) + # self.assertNotEqual(a, b) + # self.assertNotEqual(b, c) + # self.assertNotEqual(c, a) + +if __name__ == '__main__': + unittest.main() \ No newline at end of file From 8d2f21bd6e6596834c380d87907f51ccc748b8a7 Mon Sep 17 00:00:00 2001 From: Afshawn Lotfi Date: Tue, 4 Mar 2025 20:38:33 +0000 Subject: [PATCH 02/30] Added Python Github Action --- .github/workflows/python-publish.yml | 29 ++++++++ .github/workflows/release.yml | 22 ++++++ PYPI.md | 55 +++++++++++++++ python/setup.py | 101 ++++++++++++++++++++------- 4 files changed, 183 insertions(+), 24 deletions(-) create mode 100644 .github/workflows/python-publish.yml create mode 100644 PYPI.md diff --git a/.github/workflows/python-publish.yml b/.github/workflows/python-publish.yml new file mode 100644 index 000000000..87bacf5df --- /dev/null +++ b/.github/workflows/python-publish.yml @@ -0,0 +1,29 @@ +name: Publish Python Package + +on: + release: + types: [created] + +jobs: + deploy: + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v4 + - name: Set up Python + uses: actions/setup-python@v4 + with: + python-version: '3.x' + - name: Install dependencies + run: | + python -m pip install --upgrade pip + pip install build twine + - name: Build package + run: | + cd python + python -m build + - name: Publish package + uses: pypa/gh-action-pypi-publish@release/v1 + with: + user: __token__ + password: ${{ secrets.PYPI_API_TOKEN }} + packages-dir: python/dist/ \ No newline at end of file diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml index 105672b13..b230e7bb9 100644 --- a/.github/workflows/release.yml +++ b/.github/workflows/release.yml @@ -67,3 +67,25 @@ jobs: with: name: meshoptimizer-npm path: js/meshoptimizer-*.tgz + + python: + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v4 + - name: Set up Python + uses: actions/setup-python@v4 + with: + python-version: '3.x' + - name: Install dependencies + run: | + python -m pip install --upgrade pip + pip install build twine + - name: Build package + run: | + cd python + python -m build + - name: Store package + uses: actions/upload-artifact@v4 + with: + name: python-package + path: python/dist/* diff --git a/PYPI.md b/PYPI.md new file mode 100644 index 000000000..1a1f58a54 --- /dev/null +++ b/PYPI.md @@ -0,0 +1,55 @@ +# Publishing to PyPI + +This project includes GitHub Actions workflows to automatically build and publish the Python package to PyPI. + +## Automatic Publishing + +The Python package is automatically built and published to PyPI when a new GitHub release is created. This is handled by the `.github/workflows/python-publish.yml` workflow. + +### Requirements + +To publish to PyPI, you need to set up a PyPI API token: + +1. Create an account on [PyPI](https://pypi.org/) if you don't have one +2. Go to your account settings and create an API token with upload permissions for the meshoptimizer project +3. Add the token as a GitHub repository secret named `PYPI_API_TOKEN` + +### Creating a Release + +To trigger the publishing workflow: + +1. Go to the GitHub repository page +2. Click on "Releases" in the right sidebar +3. Click "Create a new release" +4. Enter a tag version (e.g., `v0.22.0`) +5. Enter a release title and description +6. Click "Publish release" + +The workflow will automatically build the Python package and upload it to PyPI. + +## Manual Building + +If you want to build the package manually: + +```bash +cd python +python -m pip install --upgrade pip +pip install build +python -m build +``` + +This will create distribution packages in the `python/dist/` directory. + +## Manual Publishing + +To manually publish to PyPI: + +```bash +cd python +python -m pip install --upgrade pip +pip install build twine +python -m build +python -m twine upload dist/* +``` + +You will be prompted for your PyPI username and password. \ No newline at end of file diff --git a/python/setup.py b/python/setup.py index 221bcc340..5499f2a8e 100644 --- a/python/setup.py +++ b/python/setup.py @@ -1,52 +1,105 @@ from setuptools import setup, Extension, find_packages import os import platform +import sys + +# Read long description from README +with open(os.path.join(os.path.dirname(__file__), "README.md"), "r", encoding="utf-8") as f: + long_description = f.read() +import re + +# Read version from package or use a default +def get_version(): + try: + with open(os.path.join(os.path.dirname(__file__), '..', 'src', 'meshoptimizer.h'), 'r') as f: + content = f.read() + version_match = re.search(r'#define\s+MESHOPTIMIZER_VERSION\s+(\d+)', content) + if version_match: + version = int(version_match.group(1)) + major = version // 10000 + minor = (version // 100) % 100 + patch = version % 100 + return f"{major}.{minor}.{patch}" + except: + pass + return '0.1.0' # Default version if unable to extract + +# Get long description from README +def get_long_description(): + try: + with open(os.path.join(os.path.dirname(__file__), 'README.md'), 'r') as f: + return f.read() + except: + return 'Python wrapper for meshoptimizer library' # Determine source files source_files = [ - '../src/clusterizer.cpp', - '../src/indexcodec.cpp', - '../src/indexgenerator.cpp', - '../src/overdrawanalyzer.cpp', - '../src/overdrawoptimizer.cpp', - '../src/simplifier.cpp', - '../src/spatialorder.cpp', - '../src/stripifier.cpp', - '../src/vcacheanalyzer.cpp', - '../src/vcacheoptimizer.cpp', - '../src/vertexcodec.cpp', - '../src/vertexfilter.cpp', - '../src/vfetchanalyzer.cpp', - '../src/vfetchoptimizer.cpp', - '../src/allocator.cpp', - '../src/quantization.cpp', - '../src/partition.cpp', + os.path.join('..', 'src', f) for f in [ + 'allocator.cpp', + 'clusterizer.cpp', + 'indexcodec.cpp', + 'indexgenerator.cpp', + 'overdrawanalyzer.cpp', + 'overdrawoptimizer.cpp', + 'simplifier.cpp', + 'spatialorder.cpp', + 'stripifier.cpp', + 'vcacheanalyzer.cpp', + 'vcacheoptimizer.cpp', + 'vertexcodec.cpp', + 'vertexfilter.cpp', + 'vfetchanalyzer.cpp', + 'vfetchoptimizer.cpp', + 'quantization.cpp', + 'partition.cpp', + ] ] +# Platform-specific compile arguments +extra_compile_args = ['-std=c++11'] +if platform.system() != 'Windows': + extra_compile_args.append('-fPIC') +if platform.system() == 'Darwin': + extra_compile_args.extend(['-stdlib=libc++', '-mmacosx-version-min=10.9']) + +# Ensure build directories exist +build_temp_dir = os.path.join('build', f'temp.{platform.system().lower()}-{platform.machine()}-{sys.version_info[0]}.{sys.version_info[1]}') +os.makedirs(build_temp_dir, exist_ok=True) + # Define the extension module meshoptimizer_module = Extension( 'meshoptimizer._meshoptimizer', sources=source_files, - include_dirs=['../src'], - extra_compile_args=['-std=c++11'], + include_dirs=[os.path.join('..', 'src')], + extra_compile_args=extra_compile_args, + language='c++', ) setup( name='meshoptimizer', - version='0.1.0', + version=get_version(), description='Python wrapper for meshoptimizer library', - author='Meshoptimizer Team', - author_email='example@example.com', + long_description=get_long_description(), + long_description_content_type='text/markdown', + url='https://github.com/zeux/meshoptimizer', packages=find_packages(), ext_modules=[meshoptimizer_module], install_requires=[ - 'numpy', + 'numpy>=1.19.0', ], + python_requires='>=3.6', classifiers=[ - 'Development Status :: 3 - Alpha', + 'Development Status :: 4 - Beta', 'Intended Audience :: Developers', 'Topic :: Multimedia :: Graphics :: 3D Modeling', 'License :: OSI Approved :: MIT License', 'Programming Language :: Python :: 3', + 'Programming Language :: Python :: 3.6', + 'Programming Language :: Python :: 3.7', + 'Programming Language :: Python :: 3.8', + 'Programming Language :: Python :: 3.9', + 'Programming Language :: Python :: 3.10', + 'Programming Language :: Python :: 3.11', ], + keywords='mesh optimization graphics 3d', ) \ No newline at end of file From 25d6cf566fbd242d60e6befae48475b08c014555 Mon Sep 17 00:00:00 2001 From: Afshawn Lotfi Date: Wed, 5 Mar 2025 01:30:55 +0000 Subject: [PATCH 03/30] Refactor Python package workflow and setup configuration --- .github/workflows/python-publish.yml | 15 +++- .github/workflows/release.yml | 24 +----- .gitignore | 4 +- python/MANIFEST.in | 5 ++ python/pyproject.toml | 6 ++ python/setup.py | 123 ++++++++++++++++++--------- 6 files changed, 108 insertions(+), 69 deletions(-) create mode 100644 python/MANIFEST.in create mode 100644 python/pyproject.toml diff --git a/.github/workflows/python-publish.yml b/.github/workflows/python-publish.yml index 87bacf5df..ecca6399f 100644 --- a/.github/workflows/python-publish.yml +++ b/.github/workflows/python-publish.yml @@ -1,4 +1,4 @@ -name: Publish Python Package +name: Python Package on: release: @@ -9,21 +9,28 @@ jobs: runs-on: ubuntu-latest steps: - uses: actions/checkout@v4 + with: + fetch-depth: 0 # Required for setuptools_scm to determine version + - name: Set up Python uses: actions/setup-python@v4 with: - python-version: '3.x' + python-version: '3.10' + - name: Install dependencies run: | python -m pip install --upgrade pip pip install build twine + - name: Build package run: | cd python - python -m build + python -m build --sdist + - name: Publish package uses: pypa/gh-action-pypi-publish@release/v1 with: user: __token__ password: ${{ secrets.PYPI_API_TOKEN }} - packages-dir: python/dist/ \ No newline at end of file + packages-dir: python/dist/ + skip-existing: true \ No newline at end of file diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml index b230e7bb9..acaa1ca14 100644 --- a/.github/workflows/release.yml +++ b/.github/workflows/release.yml @@ -66,26 +66,4 @@ jobs: - uses: actions/upload-artifact@v4 with: name: meshoptimizer-npm - path: js/meshoptimizer-*.tgz - - python: - runs-on: ubuntu-latest - steps: - - uses: actions/checkout@v4 - - name: Set up Python - uses: actions/setup-python@v4 - with: - python-version: '3.x' - - name: Install dependencies - run: | - python -m pip install --upgrade pip - pip install build twine - - name: Build package - run: | - cd python - python -m build - - name: Store package - uses: actions/upload-artifact@v4 - with: - name: python-package - path: python/dist/* + path: js/meshoptimizer-*.tgz \ No newline at end of file diff --git a/.gitignore b/.gitignore index cdcafb1db..6c1458341 100644 --- a/.gitignore +++ b/.gitignore @@ -1,3 +1,4 @@ +python/src/ # IDE integrations /.idea/ /.vs/ @@ -226,7 +227,8 @@ cython_debug/ .LSOverride # Icon must end with two \r -Icon +Icon + # Thumbnails ._* diff --git a/python/MANIFEST.in b/python/MANIFEST.in new file mode 100644 index 000000000..6510d1f97 --- /dev/null +++ b/python/MANIFEST.in @@ -0,0 +1,5 @@ +include README.md +include pyproject.toml +recursive-include src *.cpp *.h +recursive-include python/src *.cpp *.h +recursive-include ../src *.cpp *.h \ No newline at end of file diff --git a/python/pyproject.toml b/python/pyproject.toml new file mode 100644 index 000000000..193028a8e --- /dev/null +++ b/python/pyproject.toml @@ -0,0 +1,6 @@ +[build-system] +requires = ["setuptools>=42", "wheel", "setuptools_scm>=6.0"] +build-backend = "setuptools.build_meta" + +[tool.setuptools_scm] +root = ".." \ No newline at end of file diff --git a/python/setup.py b/python/setup.py index 5499f2a8e..103dd8913 100644 --- a/python/setup.py +++ b/python/setup.py @@ -2,58 +2,74 @@ import os import platform import sys - -# Read long description from README -with open(os.path.join(os.path.dirname(__file__), "README.md"), "r", encoding="utf-8") as f: - long_description = f.read() import re +# Get the directory containing this file (setup.py) +SETUP_DIR = os.path.dirname(os.path.abspath(__file__)) + # Read version from package or use a default def get_version(): try: - with open(os.path.join(os.path.dirname(__file__), '..', 'src', 'meshoptimizer.h'), 'r') as f: - content = f.read() - version_match = re.search(r'#define\s+MESHOPTIMIZER_VERSION\s+(\d+)', content) - if version_match: - version = int(version_match.group(1)) - major = version // 10000 - minor = (version // 100) % 100 - patch = version % 100 - return f"{major}.{minor}.{patch}" - except: - pass + # Try to read from the src directory first + version_file_paths = [ + os.path.join('src', 'meshoptimizer.h'), + os.path.join('..', 'src', 'meshoptimizer.h') + ] + + for path in version_file_paths: + full_path = os.path.join(SETUP_DIR, path) + if os.path.exists(full_path): + with open(full_path, 'r') as f: + content = f.read() + version_match = re.search(r'#define\s+MESHOPTIMIZER_VERSION\s+(\d+)', content) + if version_match: + version = int(version_match.group(1)) + major = version // 10000 + minor = (version // 100) % 100 + patch = version % 100 + return f"{major}.{minor}.{patch}" + except Exception as e: + print(f"Warning: Could not extract version: {e}") return '0.1.0' # Default version if unable to extract # Get long description from README def get_long_description(): try: - with open(os.path.join(os.path.dirname(__file__), 'README.md'), 'r') as f: - return f.read() - except: - return 'Python wrapper for meshoptimizer library' + readme_path = os.path.join(SETUP_DIR, 'README.md') + if os.path.exists(readme_path): + with open(readme_path, 'r', encoding='utf-8') as f: + return f.read() + except Exception as e: + print(f"Warning: Could not read README.md: {e}") + return 'Python wrapper for meshoptimizer library' # Determine source files -source_files = [ - os.path.join('..', 'src', f) for f in [ - 'allocator.cpp', - 'clusterizer.cpp', - 'indexcodec.cpp', - 'indexgenerator.cpp', - 'overdrawanalyzer.cpp', - 'overdrawoptimizer.cpp', - 'simplifier.cpp', - 'spatialorder.cpp', - 'stripifier.cpp', - 'vcacheanalyzer.cpp', - 'vcacheoptimizer.cpp', - 'vertexcodec.cpp', - 'vertexfilter.cpp', - 'vfetchanalyzer.cpp', - 'vfetchoptimizer.cpp', - 'quantization.cpp', - 'partition.cpp', +# Check if we're in the python directory or the root directory +if os.path.exists(os.path.join(SETUP_DIR, 'src')): + # Source files are in the python/src directory + source_files = [ + os.path.join('src', filename) for filename in [ + 'allocator.cpp', 'clusterizer.cpp', 'indexcodec.cpp', 'indexgenerator.cpp', + 'overdrawanalyzer.cpp', 'overdrawoptimizer.cpp', 'simplifier.cpp', + 'spatialorder.cpp', 'stripifier.cpp', 'vcacheanalyzer.cpp', + 'vcacheoptimizer.cpp', 'vertexcodec.cpp', 'vertexfilter.cpp', + 'vfetchanalyzer.cpp', 'vfetchoptimizer.cpp', 'quantization.cpp', + 'partition.cpp' + ] ] -] +else: + # Source files are in the root src directory + source_files = [ + os.path.join('..', 'src', filename) for filename in [ + 'allocator.cpp', 'clusterizer.cpp', 'indexcodec.cpp', 'indexgenerator.cpp', + 'overdrawanalyzer.cpp', 'overdrawoptimizer.cpp', 'simplifier.cpp', + 'spatialorder.cpp', 'stripifier.cpp', 'vcacheanalyzer.cpp', + 'vcacheoptimizer.cpp', 'vertexcodec.cpp', 'vertexfilter.cpp', + 'vfetchanalyzer.cpp', 'vfetchoptimizer.cpp', 'quantization.cpp', + 'partition.cpp' + ] + ] + # Platform-specific compile arguments extra_compile_args = ['-std=c++11'] @@ -64,17 +80,38 @@ def get_long_description(): # Ensure build directories exist build_temp_dir = os.path.join('build', f'temp.{platform.system().lower()}-{platform.machine()}-{sys.version_info[0]}.{sys.version_info[1]}') -os.makedirs(build_temp_dir, exist_ok=True) +os.makedirs(os.path.join(SETUP_DIR, build_temp_dir), exist_ok=True) + +# Define include directories +include_dirs = [] +if os.path.exists(os.path.join(SETUP_DIR, 'src')): + include_dirs.append('src') +else: + include_dirs.append(os.path.join('..', 'src')) # Define the extension module meshoptimizer_module = Extension( 'meshoptimizer._meshoptimizer', sources=source_files, - include_dirs=[os.path.join('..', 'src')], + include_dirs=include_dirs, extra_compile_args=extra_compile_args, language='c++', ) +# Check if source files exist at the expected paths +def check_source_files_exist(): + for source_file in source_files: + if not os.path.exists(source_file): + print(f"Warning: Source file not found: {source_file}") + return False + return True + +# Verify source files exist +if not check_source_files_exist(): + print("Warning: Some source files were not found. This may cause build failures.") + print(f"Current directory: {os.getcwd()}") + print(f"Setup directory: {SETUP_DIR}") + setup( name='meshoptimizer', version=get_version(), @@ -88,6 +125,10 @@ def get_long_description(): 'numpy>=1.19.0', ], python_requires='>=3.6', + package_data={ + '': ['src/*.cpp', 'src/*.h'], + }, + include_package_data=True, classifiers=[ 'Development Status :: 4 - Beta', 'Intended Audience :: Developers', From 82bb6bedc45fffe3269867bbba26357c9d14e988 Mon Sep 17 00:00:00 2001 From: Afshawn Lotfi Date: Wed, 5 Mar 2025 01:37:51 +0000 Subject: [PATCH 04/30] Update Python workflow to comment out package publishing step and modify version extraction error handling --- .github/workflows/python-publish.yml | 14 +++++++------- python/setup.py | 2 +- 2 files changed, 8 insertions(+), 8 deletions(-) diff --git a/.github/workflows/python-publish.yml b/.github/workflows/python-publish.yml index ecca6399f..1f27f4671 100644 --- a/.github/workflows/python-publish.yml +++ b/.github/workflows/python-publish.yml @@ -27,10 +27,10 @@ jobs: cd python python -m build --sdist - - name: Publish package - uses: pypa/gh-action-pypi-publish@release/v1 - with: - user: __token__ - password: ${{ secrets.PYPI_API_TOKEN }} - packages-dir: python/dist/ - skip-existing: true \ No newline at end of file + # - name: Publish package + # uses: pypa/gh-action-pypi-publish@release/v1 + # with: + # user: __token__ + # password: ${{ secrets.PYPI_API_TOKEN }} + # packages-dir: python/dist/ + # skip-existing: true \ No newline at end of file diff --git a/python/setup.py b/python/setup.py index 103dd8913..26e8f4e5b 100644 --- a/python/setup.py +++ b/python/setup.py @@ -30,7 +30,7 @@ def get_version(): return f"{major}.{minor}.{patch}" except Exception as e: print(f"Warning: Could not extract version: {e}") - return '0.1.0' # Default version if unable to extract + raise RuntimeError("Version not found. Please ensure meshoptimizer.h is present in the src directory.") # Get long description from README def get_long_description(): From cc4cc2827b37a484b3db5a8ad739f6e0b8b5506d Mon Sep 17 00:00:00 2001 From: Afshawn Lotfi Date: Wed, 5 Mar 2025 01:38:27 +0000 Subject: [PATCH 05/30] Update Python workflow to trigger on manual dispatch instead of release events --- .github/workflows/python-publish.yml | 8 ++++++-- 1 file changed, 6 insertions(+), 2 deletions(-) diff --git a/.github/workflows/python-publish.yml b/.github/workflows/python-publish.yml index 1f27f4671..90d6aa1aa 100644 --- a/.github/workflows/python-publish.yml +++ b/.github/workflows/python-publish.yml @@ -1,8 +1,12 @@ name: Python Package on: - release: - types: [created] + workflow_dispatch: + + +# on: +# release: +# types: [created] jobs: deploy: From c379104bb450040cd514f73c6f310b93e0acfcb7 Mon Sep 17 00:00:00 2001 From: Afshawn Lotfi Date: Wed, 5 Mar 2025 01:42:26 +0000 Subject: [PATCH 06/30] Remove version information from the Mesh class --- python/meshoptimizer/__init__.py | 2 -- 1 file changed, 2 deletions(-) diff --git a/python/meshoptimizer/__init__.py b/python/meshoptimizer/__init__.py index 48493b03c..20855c191 100644 --- a/python/meshoptimizer/__init__.py +++ b/python/meshoptimizer/__init__.py @@ -257,5 +257,3 @@ def decode(cls: Type[T], encoded_data: Dict[str, bytes], return cls(vertices, indices) -# Version information -__version__ = '0.1.0' \ No newline at end of file From c0a2f4418ab6d73f0dcd738ce53a157dd38e9509 Mon Sep 17 00:00:00 2001 From: Afshawn Lotfi Date: Wed, 5 Mar 2025 01:46:23 +0000 Subject: [PATCH 07/30] Refactor setup.py to dynamically gather source files from the src directory --- python/setup.py | 34 ++++++++++++++-------------------- 1 file changed, 14 insertions(+), 20 deletions(-) diff --git a/python/setup.py b/python/setup.py index 26e8f4e5b..3dd9eb7e6 100644 --- a/python/setup.py +++ b/python/setup.py @@ -47,28 +47,22 @@ def get_long_description(): # Check if we're in the python directory or the root directory if os.path.exists(os.path.join(SETUP_DIR, 'src')): # Source files are in the python/src directory - source_files = [ - os.path.join('src', filename) for filename in [ - 'allocator.cpp', 'clusterizer.cpp', 'indexcodec.cpp', 'indexgenerator.cpp', - 'overdrawanalyzer.cpp', 'overdrawoptimizer.cpp', 'simplifier.cpp', - 'spatialorder.cpp', 'stripifier.cpp', 'vcacheanalyzer.cpp', - 'vcacheoptimizer.cpp', 'vertexcodec.cpp', 'vertexfilter.cpp', - 'vfetchanalyzer.cpp', 'vfetchoptimizer.cpp', 'quantization.cpp', - 'partition.cpp' - ] - ] + src_path = os.path.join(SETUP_DIR, 'src') else: # Source files are in the root src directory - source_files = [ - os.path.join('..', 'src', filename) for filename in [ - 'allocator.cpp', 'clusterizer.cpp', 'indexcodec.cpp', 'indexgenerator.cpp', - 'overdrawanalyzer.cpp', 'overdrawoptimizer.cpp', 'simplifier.cpp', - 'spatialorder.cpp', 'stripifier.cpp', 'vcacheanalyzer.cpp', - 'vcacheoptimizer.cpp', 'vertexcodec.cpp', 'vertexfilter.cpp', - 'vfetchanalyzer.cpp', 'vfetchoptimizer.cpp', 'quantization.cpp', - 'partition.cpp' - ] - ] + src_path = os.path.join(SETUP_DIR, '..', 'src') + +# Get all .cpp files from the src directory +source_files = [] +for filename in os.listdir(src_path): + if filename.endswith('.cpp'): + # Use relative path for the source files + rel_path = 'src' if os.path.exists(os.path.join(SETUP_DIR, 'src')) else os.path.join('..', 'src') + source_files.append(os.path.join(rel_path, filename)) + +# Make sure we have source files +if not source_files: + raise RuntimeError(f"No source files found in {src_path}") # Platform-specific compile arguments From 556a84656a6cc7300ca06f18d2eced989c16c333 Mon Sep 17 00:00:00 2001 From: Afshawn Lotfi Date: Wed, 5 Mar 2025 01:47:27 +0000 Subject: [PATCH 08/30] Re-enable package publishing step in Python workflow --- .github/workflows/python-publish.yml | 14 +++++++------- 1 file changed, 7 insertions(+), 7 deletions(-) diff --git a/.github/workflows/python-publish.yml b/.github/workflows/python-publish.yml index 90d6aa1aa..3da093b43 100644 --- a/.github/workflows/python-publish.yml +++ b/.github/workflows/python-publish.yml @@ -31,10 +31,10 @@ jobs: cd python python -m build --sdist - # - name: Publish package - # uses: pypa/gh-action-pypi-publish@release/v1 - # with: - # user: __token__ - # password: ${{ secrets.PYPI_API_TOKEN }} - # packages-dir: python/dist/ - # skip-existing: true \ No newline at end of file + - name: Publish package + uses: pypa/gh-action-pypi-publish@release/v1 + with: + user: __token__ + password: ${{ secrets.PYPI_API_TOKEN }} + packages-dir: python/dist/ + skip-existing: true \ No newline at end of file From f933054412f25f1651661f753eac6da68fcf460a Mon Sep 17 00:00:00 2001 From: Afshawn Lotfi Date: Wed, 5 Mar 2025 02:25:36 +0000 Subject: [PATCH 09/30] Add Python workflow for package build and testing; remove deprecated test runner --- .github/workflows/release.yml | 35 ++++++++++++++++++++++++- python/setup.py | 48 +++++++++++++++++------------------ python/tests/run_tests.py | 38 --------------------------- 3 files changed, 58 insertions(+), 63 deletions(-) delete mode 100644 python/tests/run_tests.py diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml index acaa1ca14..146aa7c65 100644 --- a/.github/workflows/release.yml +++ b/.github/workflows/release.yml @@ -66,4 +66,37 @@ jobs: - uses: actions/upload-artifact@v4 with: name: meshoptimizer-npm - path: js/meshoptimizer-*.tgz \ No newline at end of file + path: js/meshoptimizer-*.tgz + + python: + strategy: + matrix: + os: [ubuntu] #, macos, windows] + runs-on: ${{matrix.os}}-latest + steps: + - uses: actions/checkout@v4 + with: + fetch-depth: 0 # Required for setuptools_scm to determine version + + - name: Set up Python + uses: actions/setup-python@v4 + with: + python-version: '3.10' + + - name: Build package + run: | + python -m pip install --upgrade pip + cd python + pip install -e . + + - name: Run tests + run: | + cd python + python -m unittest discover -v + + - name: Store package + uses: actions/upload-artifact@v4 + with: + name: python-package + path: python/dist/* + if: matrix.os == 'ubuntu' \ No newline at end of file diff --git a/python/setup.py b/python/setup.py index 3dd9eb7e6..7eb2289bb 100644 --- a/python/setup.py +++ b/python/setup.py @@ -7,30 +7,30 @@ # Get the directory containing this file (setup.py) SETUP_DIR = os.path.dirname(os.path.abspath(__file__)) -# Read version from package or use a default -def get_version(): - try: - # Try to read from the src directory first - version_file_paths = [ - os.path.join('src', 'meshoptimizer.h'), - os.path.join('..', 'src', 'meshoptimizer.h') - ] +# # Read version from package or use a default +# def get_version(): +# try: +# # Try to read from the src directory first +# version_file_paths = [ +# os.path.join('src', 'meshoptimizer.h'), +# os.path.join('..', 'src', 'meshoptimizer.h') +# ] - for path in version_file_paths: - full_path = os.path.join(SETUP_DIR, path) - if os.path.exists(full_path): - with open(full_path, 'r') as f: - content = f.read() - version_match = re.search(r'#define\s+MESHOPTIMIZER_VERSION\s+(\d+)', content) - if version_match: - version = int(version_match.group(1)) - major = version // 10000 - minor = (version // 100) % 100 - patch = version % 100 - return f"{major}.{minor}.{patch}" - except Exception as e: - print(f"Warning: Could not extract version: {e}") - raise RuntimeError("Version not found. Please ensure meshoptimizer.h is present in the src directory.") +# for path in version_file_paths: +# full_path = os.path.join(SETUP_DIR, path) +# if os.path.exists(full_path): +# with open(full_path, 'r') as f: +# content = f.read() +# version_match = re.search(r'#define\s+MESHOPTIMIZER_VERSION\s+(\d+)', content) +# if version_match: +# version = int(version_match.group(1)) +# major = version // 10000 +# minor = (version // 100) % 100 +# patch = version % 100 +# return f"{major}.{minor}.{patch}" +# except Exception as e: +# print(f"Warning: Could not extract version: {e}") +# raise RuntimeError("Version not found. Please ensure meshoptimizer.h is present in the src directory.") # Get long description from README def get_long_description(): @@ -108,7 +108,7 @@ def check_source_files_exist(): setup( name='meshoptimizer', - version=get_version(), + version="0.2.20a1", description='Python wrapper for meshoptimizer library', long_description=get_long_description(), long_description_content_type='text/markdown', diff --git a/python/tests/run_tests.py b/python/tests/run_tests.py deleted file mode 100644 index 77c7fb42c..000000000 --- a/python/tests/run_tests.py +++ /dev/null @@ -1,38 +0,0 @@ -#!/usr/bin/env python3 -""" -Test runner for the meshoptimizer Python wrapper. - -This script runs all the tests for the meshoptimizer Python wrapper. -""" -import unittest -import sys -import os - -# Add the parent directory to the path so we can import the meshoptimizer package -sys.path.insert(0, os.path.abspath(os.path.join(os.path.dirname(__file__), '..'))) - -# Import the test modules -from test_encoding import TestEncoding -from test_optimization import TestOptimization -from test_simplification import TestSimplification -from test_mesh_integrity import TestMeshIntegrity - -if __name__ == '__main__': - # Create a test suite - test_suite = unittest.TestSuite() - - # Create a test loader - loader = unittest.TestLoader() - - # Add the test cases - test_suite.addTest(loader.loadTestsFromTestCase(TestEncoding)) - test_suite.addTest(loader.loadTestsFromTestCase(TestOptimization)) - test_suite.addTest(loader.loadTestsFromTestCase(TestSimplification)) - test_suite.addTest(loader.loadTestsFromTestCase(TestMeshIntegrity)) - - # Run the tests - runner = unittest.TextTestRunner(verbosity=2) - result = runner.run(test_suite) - - # Exit with non-zero code if tests failed - sys.exit(not result.wasSuccessful()) \ No newline at end of file From 1ea80950d1e2ff4edf76107ef9fa4907643714fd Mon Sep 17 00:00:00 2001 From: Afshawn Lotfi Date: Wed, 5 Mar 2025 02:27:40 +0000 Subject: [PATCH 10/30] Fix indentation in release workflow for artifact upload step --- .github/workflows/release.yml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml index 146aa7c65..d81a277a6 100644 --- a/.github/workflows/release.yml +++ b/.github/workflows/release.yml @@ -97,6 +97,6 @@ jobs: - name: Store package uses: actions/upload-artifact@v4 with: - name: python-package - path: python/dist/* + name: python-package + path: python/dist/* if: matrix.os == 'ubuntu' \ No newline at end of file From 374793d69429417b7125bb75c2b76dab2c09d3b3 Mon Sep 17 00:00:00 2001 From: Afshawn Lotfi Date: Wed, 5 Mar 2025 02:28:44 +0000 Subject: [PATCH 11/30] Add macOS and Windows to the release workflow matrix --- .github/workflows/release.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml index d81a277a6..238515725 100644 --- a/.github/workflows/release.yml +++ b/.github/workflows/release.yml @@ -71,7 +71,7 @@ jobs: python: strategy: matrix: - os: [ubuntu] #, macos, windows] + os: [ubuntu, macos, windows] runs-on: ${{matrix.os}}-latest steps: - uses: actions/checkout@v4 From 6b79f412071f1c53f8ded09f9ec5fdd48dc9d684 Mon Sep 17 00:00:00 2001 From: Afshawn Lotfi Date: Wed, 5 Mar 2025 03:30:28 +0000 Subject: [PATCH 12/30] Enhance setup.py with platform-specific compile and link arguments for Windows and macOS --- .github/workflows/release.yml | 8 +- python/pyproject.toml | 2 +- python/python/src/module.cpp | 19 +++++ python/setup.py | 141 +++++++++++++++++++--------------- 4 files changed, 107 insertions(+), 63 deletions(-) create mode 100644 python/python/src/module.cpp diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml index 238515725..650ba0f43 100644 --- a/.github/workflows/release.yml +++ b/.github/workflows/release.yml @@ -83,9 +83,10 @@ jobs: with: python-version: '3.10' - - name: Build package + - name: Install package run: | python -m pip install --upgrade pip + pip install build cd python pip install -e . @@ -94,6 +95,11 @@ jobs: cd python python -m unittest discover -v + - name: Build package + run: | + python -m build --sdist + if: matrix.os == 'ubuntu' + - name: Store package uses: actions/upload-artifact@v4 with: diff --git a/python/pyproject.toml b/python/pyproject.toml index 193028a8e..84513fe46 100644 --- a/python/pyproject.toml +++ b/python/pyproject.toml @@ -1,5 +1,5 @@ [build-system] -requires = ["setuptools>=42", "wheel", "setuptools_scm>=6.0"] +requires = ["setuptools>=42", "wheel", "setuptools_scm>=6.0", "numpy>=1.19.0"] build-backend = "setuptools.build_meta" [tool.setuptools_scm] diff --git a/python/python/src/module.cpp b/python/python/src/module.cpp new file mode 100644 index 000000000..4ff2e3a65 --- /dev/null +++ b/python/python/src/module.cpp @@ -0,0 +1,19 @@ + +#include + +// Add your Python module function definitions here +static PyMethodDef MeshoptimizerMethods[] = { + {NULL, NULL, 0, NULL} // Sentinel +}; + +static struct PyModuleDef meshoptimizermodule = { + PyModuleDef_HEAD_INIT, + "_meshoptimizer", + "Python bindings for meshoptimizer library.", + -1, + MeshoptimizerMethods +}; + +PyMODINIT_FUNC PyInit__meshoptimizer(void) { + return PyModule_Create(&meshoptimizermodule); +} diff --git a/python/setup.py b/python/setup.py index 7eb2289bb..9b70f8a84 100644 --- a/python/setup.py +++ b/python/setup.py @@ -4,34 +4,10 @@ import sys import re + # Get the directory containing this file (setup.py) SETUP_DIR = os.path.dirname(os.path.abspath(__file__)) -# # Read version from package or use a default -# def get_version(): -# try: -# # Try to read from the src directory first -# version_file_paths = [ -# os.path.join('src', 'meshoptimizer.h'), -# os.path.join('..', 'src', 'meshoptimizer.h') -# ] - -# for path in version_file_paths: -# full_path = os.path.join(SETUP_DIR, path) -# if os.path.exists(full_path): -# with open(full_path, 'r') as f: -# content = f.read() -# version_match = re.search(r'#define\s+MESHOPTIMIZER_VERSION\s+(\d+)', content) -# if version_match: -# version = int(version_match.group(1)) -# major = version // 10000 -# minor = (version // 100) % 100 -# patch = version % 100 -# return f"{major}.{minor}.{patch}" -# except Exception as e: -# print(f"Warning: Could not extract version: {e}") -# raise RuntimeError("Version not found. Please ensure meshoptimizer.h is present in the src directory.") - # Get long description from README def get_long_description(): try: @@ -44,44 +20,80 @@ def get_long_description(): return 'Python wrapper for meshoptimizer library' # Determine source files -# Check if we're in the python directory or the root directory -if os.path.exists(os.path.join(SETUP_DIR, 'src')): - # Source files are in the python/src directory - src_path = os.path.join(SETUP_DIR, 'src') -else: - # Source files are in the root src directory - src_path = os.path.join(SETUP_DIR, '..', 'src') - -# Get all .cpp files from the src directory -source_files = [] -for filename in os.listdir(src_path): - if filename.endswith('.cpp'): - # Use relative path for the source files - rel_path = 'src' if os.path.exists(os.path.join(SETUP_DIR, 'src')) else os.path.join('..', 'src') - source_files.append(os.path.join(rel_path, filename)) - -# Make sure we have source files -if not source_files: - raise RuntimeError(f"No source files found in {src_path}") +def get_source_files(): + # Check if we're in the python directory or the root directory + if os.path.exists('src'): + # Source files are in the src directory + src_path = 'src' + rel_path = 'src' + else: + # Source files are in the root src directory + src_path = os.path.join('..', 'src') + rel_path = os.path.join('..', 'src') + + # Get all .cpp files from the src directory + source_files = [] + if os.path.exists(src_path): + for filename in os.listdir(src_path): + if filename.endswith('.cpp'): + source_files.append(os.path.join(rel_path, filename)) + + # Add the module initialization file + source_files.append("python/src/module.cpp") + + # Make sure we have source files + if not source_files: + raise RuntimeError(f"No source files found in {src_path}") + + return source_files +# Get include directories +def get_include_dirs(): + include_dirs = [] + + if os.path.exists('src'): + include_dirs.append('src') + else: + include_dirs.append(os.path.join('..', 'src')) + + # Try to add numpy include directory if available, but don't fail if it's not + try: + import numpy + include_dirs.append(numpy.get_include()) + except ImportError: + # Create a class that will resolve numpy's include path during build + class numpy_include_dir(object): + def __str__(self): + import numpy + return numpy.get_include() + + include_dirs.append(numpy_include_dir()) + + return include_dirs -# Platform-specific compile arguments -extra_compile_args = ['-std=c++11'] -if platform.system() != 'Windows': - extra_compile_args.append('-fPIC') -if platform.system() == 'Darwin': - extra_compile_args.extend(['-stdlib=libc++', '-mmacosx-version-min=10.9']) - -# Ensure build directories exist -build_temp_dir = os.path.join('build', f'temp.{platform.system().lower()}-{platform.machine()}-{sys.version_info[0]}.{sys.version_info[1]}') -os.makedirs(os.path.join(SETUP_DIR, build_temp_dir), exist_ok=True) +# Platform-specific compile and link arguments +def get_build_args(): + is_windows = platform.system() == 'Windows' + is_macos = platform.system() == 'Darwin' + + extra_compile_args = [] + extra_link_args = [] + + if is_windows: + # Windows-specific flags (MSVC) + extra_compile_args = ['/std:c++14', '/O2', '/EHsc'] + else: + # Unix-like systems (Linux/Mac) + extra_compile_args = ['-std=c++11', '-O3'] + if is_macos: + extra_compile_args.extend(['-stdlib=libc++', '-mmacosx-version-min=10.9']) + + return extra_compile_args, extra_link_args -# Define include directories -include_dirs = [] -if os.path.exists(os.path.join(SETUP_DIR, 'src')): - include_dirs.append('src') -else: - include_dirs.append(os.path.join('..', 'src')) +# Get the source files and build arguments +source_files = get_source_files() +include_dirs = get_include_dirs() +extra_compile_args, extra_link_args = get_build_args() # Define the extension module meshoptimizer_module = Extension( @@ -89,6 +101,7 @@ def get_long_description(): sources=source_files, include_dirs=include_dirs, extra_compile_args=extra_compile_args, + extra_link_args=extra_link_args, language='c++', ) @@ -105,6 +118,7 @@ def check_source_files_exist(): print("Warning: Some source files were not found. This may cause build failures.") print(f"Current directory: {os.getcwd()}") print(f"Setup directory: {SETUP_DIR}") + print(f"Source files: {source_files}") setup( name='meshoptimizer', @@ -118,9 +132,14 @@ def check_source_files_exist(): install_requires=[ 'numpy>=1.19.0', ], + setup_requires=[ + 'setuptools>=42', + 'wheel', + 'numpy>=1.19.0', + ], python_requires='>=3.6', package_data={ - '': ['src/*.cpp', 'src/*.h'], + '': ['src/*.cpp', 'src/*.h', 'python/src/*.cpp', 'python/src/*.h'], }, include_package_data=True, classifiers=[ From 190a8f08c1de801bfdca8805bf3e4fc54538f6a4 Mon Sep 17 00:00:00 2001 From: Afshawn Lotfi Date: Wed, 5 Mar 2025 03:32:34 +0000 Subject: [PATCH 13/30] Remove unnecessary directory change in test execution step of release workflow --- .github/workflows/release.yml | 1 - 1 file changed, 1 deletion(-) diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml index 650ba0f43..b655e6c2d 100644 --- a/.github/workflows/release.yml +++ b/.github/workflows/release.yml @@ -92,7 +92,6 @@ jobs: - name: Run tests run: | - cd python python -m unittest discover -v - name: Build package From e503ec774529733a57d1cdafaf76db2f4353a7ba Mon Sep 17 00:00:00 2001 From: Afshawn Lotfi Date: Wed, 5 Mar 2025 03:34:10 +0000 Subject: [PATCH 14/30] Comment out package build and storage steps for Ubuntu in release workflow --- .github/workflows/release.yml | 20 ++++++++++---------- 1 file changed, 10 insertions(+), 10 deletions(-) diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml index b655e6c2d..c3aa64cde 100644 --- a/.github/workflows/release.yml +++ b/.github/workflows/release.yml @@ -94,14 +94,14 @@ jobs: run: | python -m unittest discover -v - - name: Build package - run: | - python -m build --sdist - if: matrix.os == 'ubuntu' + # - name: Build package + # run: | + # python -m build --sdist + # if: matrix.os == 'ubuntu' - - name: Store package - uses: actions/upload-artifact@v4 - with: - name: python-package - path: python/dist/* - if: matrix.os == 'ubuntu' \ No newline at end of file + # - name: Store package + # uses: actions/upload-artifact@v4 + # with: + # name: python-package + # path: python/dist/* + # if: matrix.os == 'ubuntu' \ No newline at end of file From 25d1093c42f576f71370924e9164382dd2d09fa1 Mon Sep 17 00:00:00 2001 From: Afshawn Lotfi Date: Wed, 5 Mar 2025 03:36:13 +0000 Subject: [PATCH 15/30] Change directory to 'python' before running tests in release workflow --- .github/workflows/release.yml | 1 + 1 file changed, 1 insertion(+) diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml index c3aa64cde..eaf45b9e2 100644 --- a/.github/workflows/release.yml +++ b/.github/workflows/release.yml @@ -92,6 +92,7 @@ jobs: - name: Run tests run: | + cd python python -m unittest discover -v # - name: Build package From cc875f9678125105b05a9a78ec350fabf050180c Mon Sep 17 00:00:00 2001 From: Afshawn Lotfi Date: Wed, 5 Mar 2025 04:15:51 +0000 Subject: [PATCH 16/30] Refactor get_source_files and get_build_args functions in setup.py for clarity and platform-specific support --- python/setup.py | 27 +++++++++++---------------- 1 file changed, 11 insertions(+), 16 deletions(-) diff --git a/python/setup.py b/python/setup.py index 9b70f8a84..296c01d41 100644 --- a/python/setup.py +++ b/python/setup.py @@ -56,19 +56,6 @@ def get_include_dirs(): else: include_dirs.append(os.path.join('..', 'src')) - # Try to add numpy include directory if available, but don't fail if it's not - try: - import numpy - include_dirs.append(numpy.get_include()) - except ImportError: - # Create a class that will resolve numpy's include path during build - class numpy_include_dir(object): - def __str__(self): - import numpy - return numpy.get_include() - - include_dirs.append(numpy_include_dir()) - return include_dirs # Platform-specific compile and link arguments @@ -78,22 +65,29 @@ def get_build_args(): extra_compile_args = [] extra_link_args = [] + define_macros = [] if is_windows: # Windows-specific flags (MSVC) extra_compile_args = ['/std:c++14', '/O2', '/EHsc'] + # Export functions for DLL + define_macros = [ + ('MESHOPTIMIZER_API', '__declspec(dllexport)'), + ('MESHOPTIMIZER_EXPERIMENTAL', '__declspec(dllexport)') + ] + extra_link_args = ['/DLL'] else: # Unix-like systems (Linux/Mac) - extra_compile_args = ['-std=c++11', '-O3'] + extra_compile_args = ['-std=c++11', '-O3', '-fPIC'] if is_macos: extra_compile_args.extend(['-stdlib=libc++', '-mmacosx-version-min=10.9']) - return extra_compile_args, extra_link_args + return extra_compile_args, extra_link_args, define_macros # Get the source files and build arguments source_files = get_source_files() include_dirs = get_include_dirs() -extra_compile_args, extra_link_args = get_build_args() +extra_compile_args, extra_link_args, define_macros = get_build_args() # Define the extension module meshoptimizer_module = Extension( @@ -102,6 +96,7 @@ def get_build_args(): include_dirs=include_dirs, extra_compile_args=extra_compile_args, extra_link_args=extra_link_args, + define_macros=define_macros, language='c++', ) From 5468a916d4fb4b5e2fbdc6abc7cafa43c3d67c74 Mon Sep 17 00:00:00 2001 From: Afshawn Lotfi Date: Wed, 5 Mar 2025 04:20:11 +0000 Subject: [PATCH 17/30] Simplify get_source_files function in setup.py by removing unnecessary checks and streamline include directory handling --- python/setup.py | 29 ++++------------------------- 1 file changed, 4 insertions(+), 25 deletions(-) diff --git a/python/setup.py b/python/setup.py index 296c01d41..560196e84 100644 --- a/python/setup.py +++ b/python/setup.py @@ -1,9 +1,6 @@ from setuptools import setup, Extension, find_packages import os import platform -import sys -import re - # Get the directory containing this file (setup.py) SETUP_DIR = os.path.dirname(os.path.abspath(__file__)) @@ -22,21 +19,14 @@ def get_long_description(): # Determine source files def get_source_files(): # Check if we're in the python directory or the root directory - if os.path.exists('src'): - # Source files are in the src directory - src_path = 'src' - rel_path = 'src' - else: - # Source files are in the root src directory - src_path = os.path.join('..', 'src') - rel_path = os.path.join('..', 'src') - + src_path = os.path.join('..', 'src') + # Get all .cpp files from the src directory source_files = [] if os.path.exists(src_path): for filename in os.listdir(src_path): if filename.endswith('.cpp'): - source_files.append(os.path.join(rel_path, filename)) + source_files.append(os.path.join(src_path, filename)) # Add the module initialization file source_files.append("python/src/module.cpp") @@ -47,17 +37,6 @@ def get_source_files(): return source_files -# Get include directories -def get_include_dirs(): - include_dirs = [] - - if os.path.exists('src'): - include_dirs.append('src') - else: - include_dirs.append(os.path.join('..', 'src')) - - return include_dirs - # Platform-specific compile and link arguments def get_build_args(): is_windows = platform.system() == 'Windows' @@ -86,7 +65,7 @@ def get_build_args(): # Get the source files and build arguments source_files = get_source_files() -include_dirs = get_include_dirs() +include_dirs = [os.path.join('..', 'src')] extra_compile_args, extra_link_args, define_macros = get_build_args() # Define the extension module From 7d405e6b07eb0b52d494ee6aaae25bf5b690cc87 Mon Sep 17 00:00:00 2001 From: Afshawn Lotfi Date: Wed, 5 Mar 2025 04:24:34 +0000 Subject: [PATCH 18/30] Update README to include Python interface installation instructions and clean up GitHub Actions workflow --- .github/workflows/python-publish.yml | 5 ----- README.md | 2 +- 2 files changed, 1 insertion(+), 6 deletions(-) diff --git a/.github/workflows/python-publish.yml b/.github/workflows/python-publish.yml index 3da093b43..bdb99fa26 100644 --- a/.github/workflows/python-publish.yml +++ b/.github/workflows/python-publish.yml @@ -3,11 +3,6 @@ name: Python Package on: workflow_dispatch: - -# on: -# release: -# types: [created] - jobs: deploy: runs-on: ubuntu-latest diff --git a/README.md b/README.md index bee53c330..eec3fd375 100644 --- a/README.md +++ b/README.md @@ -4,7 +4,7 @@ When a GPU renders triangle meshes, various stages of the GPU pipeline have to process vertex and index data. The efficiency of these stages depends on the data you feed to them; this library provides algorithms to help optimize meshes for these stages, as well as algorithms to reduce the mesh complexity and storage overhead. -The library provides a C and C++ interface for all algorithms; you can use it from C/C++ or from other languages via FFI (such as P/Invoke). If you want to use this library from Rust, you should use [meshopt crate](https://crates.io/crates/meshopt). JavaScript interface for some algorithms is available through [meshoptimizer.js](https://www.npmjs.com/package/meshoptimizer). +The library provides a C and C++ interface for all algorithms; you can use it from C/C++ or from other languages via FFI (such as P/Invoke). If you want to use this library from Rust, you should use [meshopt crate](https://crates.io/crates/meshopt). JavaScript interface for some algorithms is available through [meshoptimizer.js](https://www.npmjs.com/package/meshoptimizer). Python interface is available through [pip install meshoptimizer](https://pypi.org/project/meshoptimizer/). [gltfpack](./gltf/README.md), which is a tool that can automatically optimize glTF files, is developed and distributed alongside the library. From 61cd5346498520d9d3b1c01cbdbf5aea8c1e6d11 Mon Sep 17 00:00:00 2001 From: Afshawn Lotfi Date: Wed, 5 Mar 2025 07:04:18 +0000 Subject: [PATCH 19/30] Only have low level api --- python/README.md | 166 +++++++++++----------- python/meshoptimizer/__init__.py | 212 +--------------------------- python/meshoptimizer/decoder.py | 2 +- python/meshoptimizer/encoder.py | 2 +- python/meshoptimizer/simplifier.py | 2 +- python/meshoptimizer/utils.py | 2 +- python/tests/test_encoding.py | 98 +++---------- python/tests/test_mesh_integrity.py | 197 -------------------------- python/tests/test_optimization.py | 32 ----- python/tests/test_simplification.py | 34 ----- 10 files changed, 109 insertions(+), 638 deletions(-) delete mode 100644 python/tests/test_mesh_integrity.py diff --git a/python/README.md b/python/README.md index c9a6b4a78..178e04c3b 100644 --- a/python/README.md +++ b/python/README.md @@ -23,15 +23,28 @@ pip install -e . - Vertex fetch optimization - Mesh simplification - Vertex/index buffer compression and decompression +- Zip file storage for encoded meshes +- Numpy array compression and storage - And more... ## Usage -### Basic Usage +### Low-level API + +The meshoptimizer Python bindings provide a low-level API that directly maps to the C++ functions. All functions accept numpy arrays and have optional parameters that are automatically derived when not provided. ```python import numpy as np -from meshoptimizer import Mesh +from meshoptimizer import ( + optimize_vertex_cache, + optimize_overdraw, + optimize_vertex_fetch, + simplify, + encode_vertex_buffer, + decode_vertex_buffer, + encode_index_buffer, + decode_index_buffer +) # Create a simple mesh (a cube) vertices = np.array([ @@ -55,63 +68,64 @@ indices = np.array([ 4, 5, 1, 1, 0, 4 # bottom ], dtype=np.uint32) -# Create a mesh -mesh = Mesh(vertices, indices) - -# Optimize the mesh -mesh.optimize_vertex_cache() -mesh.optimize_overdraw() -mesh.optimize_vertex_fetch() - -# Simplify the mesh -mesh.simplify(target_ratio=0.5) # Keep 50% of triangles - -# Encode the mesh for efficient transmission -encoded = mesh.encode() - -# Decode the mesh -decoded = Mesh.decode( - encoded, - vertex_count=len(mesh.vertices), - vertex_size=mesh.vertices.itemsize * mesh.vertices.shape[1], - index_count=len(mesh.indices) -) -``` - -### Low-level API - -If you need more control, you can use the low-level API directly: - -```python -import numpy as np -from meshoptimizer import ( - optimize_vertex_cache, - optimize_overdraw, - optimize_vertex_fetch, - simplify, - encode_vertex_buffer, - decode_vertex_buffer, - encode_index_buffer, - decode_index_buffer -) - # Optimize vertex cache optimized_indices = np.zeros_like(indices) -optimize_vertex_cache(optimized_indices, indices, len(indices), len(vertices)) +optimize_vertex_cache(optimized_indices, indices) # vertex_count is automatically derived # Optimize overdraw optimized_indices2 = np.zeros_like(indices) optimize_overdraw( optimized_indices2, optimized_indices, + vertices +) # index_count, vertex_count, and vertex_positions_stride are automatically derived + +# Optimize vertex fetch +optimized_vertices = np.zeros_like(vertices) +unique_vertex_count = optimize_vertex_fetch( + optimized_vertices, + optimized_indices2, + vertices +) # index_count, vertex_count, and vertex_size are automatically derived + +print(f"Optimized mesh has {unique_vertex_count} unique vertices") + +# Simplify the mesh +simplified_indices = np.zeros(len(indices), dtype=np.uint32) +target_index_count = len(indices) // 2 # Keep 50% of triangles + +simplified_index_count = simplify( + simplified_indices, + optimized_indices2, vertices, - len(indices), - len(vertices), + target_index_count=target_index_count +) # index_count, vertex_count, and vertex_positions_stride are automatically derived + +print(f"Simplified mesh has {simplified_index_count} indices") + +# Encode the mesh for efficient transmission +encoded_vertices = encode_vertex_buffer(optimized_vertices[:unique_vertex_count]) +encoded_indices = encode_index_buffer(simplified_indices[:simplified_index_count]) + +print(f"Encoded vertex buffer size: {len(encoded_vertices)} bytes") +print(f"Encoded index buffer size: {len(encoded_indices)} bytes") + +# Decode the mesh +decoded_vertices = decode_vertex_buffer( + unique_vertex_count, vertices.itemsize * vertices.shape[1], - 1.05 # threshold + encoded_vertices ) -# And so on... +decoded_indices = decode_index_buffer( + simplified_index_count, + 4, # 4 bytes per index (uint32) + encoded_indices +) + +# Verify the decoded data +print(f"Decoded vertices shape: {decoded_vertices.shape}") +print(f"Decoded indices shape: {decoded_indices.shape}") ``` ## Notes on Index Encoding/Decoding @@ -120,51 +134,37 @@ When using the index buffer encoding and decoding functions, note that the decod ## API Reference -### High-level API - -#### `Mesh` class - -- `__init__(vertices, indices=None)`: Initialize a mesh with vertices and optional indices. -- `optimize_vertex_cache()`: Optimize the mesh for vertex cache efficiency. -- `optimize_overdraw(threshold=1.05)`: Optimize the mesh for overdraw. -- `optimize_vertex_fetch()`: Optimize the mesh for vertex fetch efficiency. -- `simplify(target_ratio=0.25, target_error=0.01, options=0)`: Simplify the mesh. -- `encode()`: Encode the mesh for efficient transmission. -- `decode(encoded_data, vertex_count, vertex_size, index_count=None, index_size=4)` (class method): Decode an encoded mesh. - -### Low-level API - -#### Vertex Remapping +### Vertex Remapping -- `generate_vertex_remap(destination, indices, index_count, vertices, vertex_count, vertex_size)`: Generate vertex remap table. -- `remap_vertex_buffer(destination, vertices, vertex_count, vertex_size, remap)`: Remap vertex buffer. -- `remap_index_buffer(destination, indices, index_count, remap)`: Remap index buffer. +- `generate_vertex_remap(destination, indices=None, index_count=None, vertices=None, vertex_count=None, vertex_size=None)`: Generate vertex remap table. +- `remap_vertex_buffer(destination, vertices, vertex_count=None, vertex_size=None, remap=None)`: Remap vertex buffer. +- `remap_index_buffer(destination, indices, index_count=None, remap=None)`: Remap index buffer. -#### Optimization +### Optimization -- `optimize_vertex_cache(destination, indices, index_count, vertex_count)`: Optimize vertex cache. -- `optimize_vertex_cache_strip(destination, indices, index_count, vertex_count)`: Optimize vertex cache for strip-like caches. -- `optimize_vertex_cache_fifo(destination, indices, index_count, vertex_count, cache_size)`: Optimize vertex cache for FIFO caches. -- `optimize_overdraw(destination, indices, vertex_positions, index_count, vertex_count, vertex_positions_stride, threshold)`: Optimize overdraw. -- `optimize_vertex_fetch(destination_vertices, indices, source_vertices, index_count, vertex_count, vertex_size)`: Optimize vertex fetch. -- `optimize_vertex_fetch_remap(destination, indices, index_count, vertex_count)`: Generate vertex remap to optimize vertex fetch. +- `optimize_vertex_cache(destination, indices, index_count=None, vertex_count=None)`: Optimize vertex cache. +- `optimize_vertex_cache_strip(destination, indices, index_count=None, vertex_count=None)`: Optimize vertex cache for strip-like caches. +- `optimize_vertex_cache_fifo(destination, indices, index_count=None, vertex_count=None, cache_size=16)`: Optimize vertex cache for FIFO caches. +- `optimize_overdraw(destination, indices, vertex_positions, index_count=None, vertex_count=None, vertex_positions_stride=None, threshold=1.05)`: Optimize overdraw. +- `optimize_vertex_fetch(destination_vertices, indices, source_vertices, index_count=None, vertex_count=None, vertex_size=None)`: Optimize vertex fetch. +- `optimize_vertex_fetch_remap(destination, indices, index_count=None, vertex_count=None)`: Generate vertex remap to optimize vertex fetch. -#### Simplification +### Simplification -- `simplify(destination, indices, vertex_positions, index_count, vertex_count, vertex_positions_stride, target_index_count, target_error, options, result_error)`: Simplify mesh. -- `simplify_with_attributes(destination, indices, vertex_positions, vertex_attributes, attribute_weights, index_count, vertex_count, vertex_positions_stride, vertex_attributes_stride, attribute_count, vertex_lock, target_index_count, target_error, options, result_error)`: Simplify mesh with attribute metric. -- `simplify_sloppy(destination, indices, vertex_positions, index_count, vertex_count, vertex_positions_stride, target_index_count, target_error, result_error)`: Simplify mesh (sloppy). -- `simplify_points(destination, vertex_positions, vertex_colors, vertex_count, vertex_positions_stride, vertex_colors_stride, color_weight, target_vertex_count)`: Simplify point cloud. -- `simplify_scale(vertex_positions, vertex_count, vertex_positions_stride)`: Get the scale factor for simplification error. +- `simplify(destination, indices, vertex_positions, index_count=None, vertex_count=None, vertex_positions_stride=None, target_index_count=None, target_error=0.01, options=0, result_error=None)`: Simplify mesh. +- `simplify_with_attributes(destination, indices, vertex_positions, vertex_attributes, attribute_weights, index_count=None, vertex_count=None, vertex_positions_stride=None, vertex_attributes_stride=None, attribute_count=None, vertex_lock=None, target_index_count=None, target_error=0.01, options=0, result_error=None)`: Simplify mesh with attribute metric. +- `simplify_sloppy(destination, indices, vertex_positions, index_count=None, vertex_count=None, vertex_positions_stride=None, target_index_count=None, target_error=0.01, result_error=None)`: Simplify mesh (sloppy). +- `simplify_points(destination, vertex_positions, vertex_colors=None, vertex_count=None, vertex_positions_stride=None, vertex_colors_stride=None, color_weight=1.0, target_vertex_count=None)`: Simplify point cloud. +- `simplify_scale(vertex_positions, vertex_count=None, vertex_positions_stride=None)`: Get the scale factor for simplification error. -#### Encoding/Decoding +### Encoding/Decoding -- `encode_vertex_buffer(vertices, vertex_count, vertex_size)`: Encode vertex buffer. -- `encode_index_buffer(indices, index_count, vertex_count)`: Encode index buffer. +- `encode_vertex_buffer(vertices, vertex_count=None, vertex_size=None)`: Encode vertex buffer. +- `encode_index_buffer(indices, index_count=None, vertex_count=None)`: Encode index buffer. - `encode_vertex_version(version)`: Set vertex encoder format version. - `encode_index_version(version)`: Set index encoder format version. -- `decode_vertex_buffer(destination, vertex_count, vertex_size, buffer)`: Decode vertex buffer. -- `decode_index_buffer(destination, index_count, index_size, buffer)`: Decode index buffer. +- `decode_vertex_buffer(vertex_count, vertex_size, buffer)`: Decode vertex buffer. +- `decode_index_buffer(index_count, index_size, buffer)`: Decode index buffer. - `decode_vertex_version(buffer)`: Get encoded vertex format version. - `decode_index_version(buffer)`: Get encoded index format version. - `decode_filter_oct(buffer, count, stride)`: Apply octahedral filter to decoded data. diff --git a/python/meshoptimizer/__init__.py b/python/meshoptimizer/__init__.py index 20855c191..bed400528 100644 --- a/python/meshoptimizer/__init__.py +++ b/python/meshoptimizer/__init__.py @@ -3,6 +3,9 @@ This package provides Python bindings for the meshoptimizer C++ library, which offers various algorithms for optimizing 3D meshes for GPU rendering. +It also provides utilities for compressing and storing numpy arrays. + +High-level functionality is available in the 'export' submodule. """ from .encoder import ( @@ -48,212 +51,3 @@ remap_vertex_buffer, remap_index_buffer, ) - -import numpy as np -from typing import Optional, Union, Dict, Any, ClassVar, Type, TypeVar - -T = TypeVar('T', bound='Mesh') - -class Mesh: - """ - A class representing a 3D mesh with optimization capabilities. - """ - - def __init__(self, vertices: np.ndarray, indices: Optional[np.ndarray] = None) -> None: - """ - Initialize a mesh with vertices and optional indices. - - Args: - vertices: numpy array of vertex data - indices: numpy array of indices (optional) - """ - self.vertices = np.asarray(vertices, dtype=np.float32) - self.indices = np.asarray(indices, dtype=np.uint32) if indices is not None else None - self.vertex_count = len(vertices) - self.index_count = len(indices) if indices is not None else 0 - - def optimize_vertex_cache(self) -> 'Mesh': - """ - Optimize the mesh for vertex cache efficiency. - - Returns: - self (for method chaining) - """ - if self.indices is None: - raise ValueError("Mesh has no indices to optimize") - - # Create output array - optimized_indices = np.zeros_like(self.indices) - - # Call optimization function - optimize_vertex_cache( - optimized_indices, - self.indices, - self.index_count, - self.vertex_count - ) - - self.indices = optimized_indices - return self - - def optimize_overdraw(self, threshold: float = 1.05) -> 'Mesh': - """ - Optimize the mesh for overdraw. - - Args: - threshold: threshold for optimization (default: 1.05) - - Returns: - self (for method chaining) - """ - if self.indices is None: - raise ValueError("Mesh has no indices to optimize") - - # Create output array - optimized_indices = np.zeros_like(self.indices) - - # Call optimization function - optimize_overdraw( - optimized_indices, - self.indices, - self.vertices, - self.index_count, - self.vertex_count, - self.vertices.itemsize * self.vertices.shape[1], - threshold - ) - - self.indices = optimized_indices - return self - - def optimize_vertex_fetch(self) -> 'Mesh': - """ - Optimize the mesh for vertex fetch efficiency. - - Returns: - self (for method chaining) - """ - if self.indices is None: - raise ValueError("Mesh has no indices to optimize") - - # Create output array - optimized_vertices = np.zeros_like(self.vertices) - - # Call optimization function - unique_vertex_count = optimize_vertex_fetch( - optimized_vertices, - self.indices, - self.vertices, - self.index_count, - self.vertex_count, - self.vertices.itemsize * self.vertices.shape[1] - ) - - self.vertices = optimized_vertices[:unique_vertex_count] - self.vertex_count = unique_vertex_count - return self - - def simplify(self, target_ratio: float = 0.25, target_error: float = 0.01, options: int = 0) -> 'Mesh': - """ - Simplify the mesh. - - Args: - target_ratio: ratio of triangles to keep (default: 0.25) - target_error: target error (default: 0.01) - options: simplification options (default: 0) - - Returns: - self (for method chaining) - """ - if self.indices is None: - raise ValueError("Mesh has no indices to simplify") - - # Calculate target index count - target_index_count = int(self.index_count * target_ratio) - - # Create output array - simplified_indices = np.zeros(self.index_count, dtype=np.uint32) - - # Call simplification function - result_error = np.array([0.0], dtype=np.float32) - new_index_count = simplify( - simplified_indices, - self.indices, - self.vertices, - self.index_count, - self.vertex_count, - self.vertices.itemsize * self.vertices.shape[1], - target_index_count, - target_error, - options, - result_error - ) - - self.indices = simplified_indices[:new_index_count] - self.index_count = new_index_count - return self - - def encode(self) -> Dict[str, bytes]: - """ - Encode the mesh for efficient transmission. - - Returns: - Dictionary with encoded vertices and indices - """ - # Encode vertices - encoded_vertices = encode_vertex_buffer( - self.vertices, - self.vertex_count, - self.vertices.itemsize * self.vertices.shape[1] - ) - - # Encode indices if present - encoded_indices = None - if self.indices is not None: - encoded_indices = encode_index_buffer( - self.indices, - self.index_count, - self.vertex_count - ) - - return { - 'vertices': encoded_vertices, - 'indices': encoded_indices - } - - @classmethod - def decode(cls: Type[T], encoded_data: Dict[str, bytes], - vertex_count: int, vertex_size: int, - index_count: Optional[int] = None, - index_size: int = 4) -> T: - """ - Decode an encoded mesh. - - Args: - encoded_data: Dictionary with encoded vertices and indices - vertex_count: Number of vertices - vertex_size: Size of each vertex in bytes - index_count: Number of indices (optional) - index_size: Size of each index in bytes (default: 4) - - Returns: - Decoded Mesh object - """ - # Decode vertices using the new function that returns a numpy array - vertices = decode_vertex_buffer( - vertex_count, - vertex_size, - encoded_data['vertices'] - ) - - # Decode indices if present using the new function that returns a numpy array - indices = None - if encoded_data['indices'] is not None and index_count is not None: - indices = decode_index_buffer( - index_count, - index_size, - encoded_data['indices'] - ) - - return cls(vertices, indices) - diff --git a/python/meshoptimizer/decoder.py b/python/meshoptimizer/decoder.py index addfebdad..7aa713d50 100644 --- a/python/meshoptimizer/decoder.py +++ b/python/meshoptimizer/decoder.py @@ -2,7 +2,7 @@ Decoder functions for meshoptimizer. """ import ctypes -from typing import Optional, Union, Tuple, Any, List +from typing import Union import numpy as np from ._loader import lib diff --git a/python/meshoptimizer/encoder.py b/python/meshoptimizer/encoder.py index 22bfc42f8..02f1a1728 100644 --- a/python/meshoptimizer/encoder.py +++ b/python/meshoptimizer/encoder.py @@ -2,7 +2,7 @@ Encoder functions for meshoptimizer. """ import ctypes -from typing import Optional, Union, Tuple +from typing import Optional import numpy as np from ._loader import lib diff --git a/python/meshoptimizer/simplifier.py b/python/meshoptimizer/simplifier.py index cd458342f..58d829c0c 100644 --- a/python/meshoptimizer/simplifier.py +++ b/python/meshoptimizer/simplifier.py @@ -2,7 +2,7 @@ Simplification functions for meshoptimizer. """ import ctypes -from typing import Optional, Union, Tuple, List, Any +from typing import Optional import numpy as np from ._loader import lib diff --git a/python/meshoptimizer/utils.py b/python/meshoptimizer/utils.py index f5f782858..491129d2f 100644 --- a/python/meshoptimizer/utils.py +++ b/python/meshoptimizer/utils.py @@ -2,7 +2,7 @@ Utility functions for meshoptimizer. """ import ctypes -from typing import Optional, Union, Tuple, Any +from typing import Optional import numpy as np from ._loader import lib diff --git a/python/tests/test_encoding.py b/python/tests/test_encoding.py index 9f3a93972..e68a778bb 100644 --- a/python/tests/test_encoding.py +++ b/python/tests/test_encoding.py @@ -6,7 +6,7 @@ """ import numpy as np import unittest -from meshoptimizer import Mesh, encode_vertex_buffer, encode_index_buffer, decode_vertex_buffer, decode_index_buffer +from meshoptimizer import encode_vertex_buffer, decode_vertex_buffer class TestEncoding(unittest.TestCase): """Test encoding and decoding functionality.""" @@ -35,7 +35,6 @@ def setUp(self): 4, 5, 1, 1, 0, 4 # bottom ], dtype=np.uint32) - self.mesh = Mesh(self.vertices, self.indices) def get_triangles_set(self, vertices, indices): """ @@ -73,88 +72,29 @@ def test_encode_decode_vertices(self): # Check that the decoded vertices match the original np.testing.assert_array_almost_equal(self.vertices, decoded_vertices) - def test_encode_decode_mesh(self): - """Test that encoding and decoding a mesh preserves the geometry.""" - # Encode the mesh - encoded = self.mesh.encode() - - # Decode the mesh - decoded_mesh = Mesh.decode( - encoded, - vertex_count=len(self.mesh.vertices), - vertex_size=self.mesh.vertices.itemsize * self.mesh.vertices.shape[1], - index_count=len(self.mesh.indices) - ) - - # Check that the decoded vertices match the original - np.testing.assert_array_almost_equal(self.mesh.vertices, decoded_mesh.vertices) - - # The indices might not match exactly due to how the encoding/decoding works, - # but the geometry should be preserved. Let's check that by comparing - # the triangles. - - # Get the triangles from the original and decoded meshes - original_triangles = self.get_triangles_set(self.mesh.vertices, self.mesh.indices) - decoded_triangles = self.get_triangles_set(decoded_mesh.vertices, decoded_mesh.indices) - - # Check that the triangles match - self.assertEqual(original_triangles, decoded_triangles) - - def test_optimize_and_encode_decode(self): - """Test that optimizing and then encoding/decoding preserves the geometry.""" - # Optimize the mesh - optimized_mesh = Mesh(self.vertices.copy(), self.indices.copy()) - optimized_mesh.optimize_vertex_cache() - optimized_mesh.optimize_overdraw() - optimized_mesh.optimize_vertex_fetch() - - # Encode the optimized mesh - encoded = optimized_mesh.encode() - - # Decode the mesh - decoded_mesh = Mesh.decode( - encoded, - vertex_count=len(optimized_mesh.vertices), - vertex_size=optimized_mesh.vertices.itemsize * optimized_mesh.vertices.shape[1], - index_count=len(optimized_mesh.indices) + def encode_decode_indices(self): + """Test that encoding and decoding indices preserves the data.""" + # Encode indices + encoded_indices = encode_vertex_buffer( + self.indices, + len(self.indices), + self.indices.itemsize * self.indices.shape[1] ) - # Check that the decoded vertices match the optimized vertices - np.testing.assert_array_almost_equal(optimized_mesh.vertices, decoded_mesh.vertices) - - # Get the triangles from the optimized and decoded meshes - optimized_triangles = self.get_triangles_set(optimized_mesh.vertices, optimized_mesh.indices) - decoded_triangles = self.get_triangles_set(decoded_mesh.vertices, decoded_mesh.indices) - - # Check that the triangles match - self.assertEqual(optimized_triangles, decoded_triangles) - - def test_simplify_and_encode_decode(self): - """Test that simplifying and then encoding/decoding preserves the geometry.""" - # Simplify the mesh - simplified_mesh = Mesh(self.vertices.copy(), self.indices.copy()) - simplified_mesh.simplify(target_ratio=0.5) # Keep 50% of triangles - - # Encode the simplified mesh - encoded = simplified_mesh.encode() - - # Decode the mesh - decoded_mesh = Mesh.decode( - encoded, - vertex_count=len(simplified_mesh.vertices), - vertex_size=simplified_mesh.vertices.itemsize * simplified_mesh.vertices.shape[1], - index_count=len(simplified_mesh.indices) + # Decode indices using the new function that returns a numpy array + decoded_indices = decode_vertex_buffer( + len(self.indices), + self.indices.itemsize * self.indices.shape[1], + encoded_indices ) - # Check that the decoded vertices match the simplified vertices - np.testing.assert_array_almost_equal(simplified_mesh.vertices, decoded_mesh.vertices) - - # Get the triangles from the simplified and decoded meshes - simplified_triangles = self.get_triangles_set(simplified_mesh.vertices, simplified_mesh.indices) - decoded_triangles = self.get_triangles_set(decoded_mesh.vertices, decoded_mesh.indices) - + # Check that the decoded indices match the original + np.testing.assert_array_almost_equal(self.indices, decoded_indices) # Check that the triangles match - self.assertEqual(simplified_triangles, decoded_triangles) + original_triangles = self.get_triangles_set(self.vertices, self.indices) + decoded_triangles = self.get_triangles_set(self.vertices, decoded_indices) + self.assertEqual(original_triangles, decoded_triangles) + if __name__ == '__main__': unittest.main() diff --git a/python/tests/test_mesh_integrity.py b/python/tests/test_mesh_integrity.py deleted file mode 100644 index 3d9469013..000000000 --- a/python/tests/test_mesh_integrity.py +++ /dev/null @@ -1,197 +0,0 @@ -""" -Tests for the meshoptimizer Python wrapper. - -This file contains tests to verify that the mesh vertices indexed by the indices -are the same before and after encoding/decoding, ensuring that the mesh geometry -is preserved correctly. -""" -import numpy as np -import unittest -from meshoptimizer import Mesh - -class TestMeshIntegrity(unittest.TestCase): - """Test mesh integrity during encoding/decoding.""" - - def setUp(self): - """Set up test data.""" - # Create a simple mesh (a cube) - self.vertices = np.array([ - # positions - [-0.5, -0.5, -0.5], - [0.5, -0.5, -0.5], - [0.5, 0.5, -0.5], - [-0.5, 0.5, -0.5], - [-0.5, -0.5, 0.5], - [0.5, -0.5, 0.5], - [0.5, 0.5, 0.5], - [-0.5, 0.5, 0.5] - ], dtype=np.float32) - - self.indices = np.array([ - 0, 1, 2, 2, 3, 0, # front - 1, 5, 6, 6, 2, 1, # right - 5, 4, 7, 7, 6, 5, # back - 4, 0, 3, 3, 7, 4, # left - 3, 2, 6, 6, 7, 3, # top - 4, 5, 1, 1, 0, 4 # bottom - ], dtype=np.uint32) - - self.mesh = Mesh(self.vertices, self.indices) - - def get_triangles_set(self, vertices, indices): - """ - Get a set of triangles from vertices and indices. - Each triangle is represented as a frozenset of tuples of vertex coordinates. - This makes the comparison invariant to vertex order within triangles. - """ - triangles = set() - for i in range(0, len(indices), 3): - # Get the three vertices of the triangle - v1 = tuple(vertices[indices[i]]) - v2 = tuple(vertices[indices[i+1]]) - v3 = tuple(vertices[indices[i+2]]) - # Create a frozenset of the vertices (order-invariant) - triangle = frozenset([v1, v2, v3]) - triangles.add(triangle) - return triangles - - def test_mesh_integrity_encode_decode(self): - """Test that mesh vertices indexed by indices are preserved during encoding/decoding.""" - # Get the original triangles - original_triangles = self.get_triangles_set(self.mesh.vertices, self.mesh.indices) - - # Encode the mesh - encoded = self.mesh.encode() - - # Decode the mesh - decoded_mesh = Mesh.decode( - encoded, - vertex_count=len(self.mesh.vertices), - vertex_size=self.mesh.vertices.itemsize * self.mesh.vertices.shape[1], - index_count=len(self.mesh.indices) - ) - - # Get the decoded triangles - decoded_triangles = self.get_triangles_set(decoded_mesh.vertices, decoded_mesh.indices) - - # Check that the triangles match - self.assertEqual(original_triangles, decoded_triangles) - - def test_mesh_integrity_optimize_encode_decode(self): - """Test that mesh vertices indexed by indices are preserved during optimization, encoding, and decoding.""" - # Create a copy of the mesh - optimized_mesh = Mesh(self.vertices.copy(), self.indices.copy()) - - # Optimize the mesh - optimized_mesh.optimize_vertex_cache() - optimized_mesh.optimize_overdraw() - optimized_mesh.optimize_vertex_fetch() - - # Get the optimized triangles - optimized_triangles = self.get_triangles_set(optimized_mesh.vertices, optimized_mesh.indices) - - # Encode the optimized mesh - encoded = optimized_mesh.encode() - - # Decode the mesh - decoded_mesh = Mesh.decode( - encoded, - vertex_count=len(optimized_mesh.vertices), - vertex_size=optimized_mesh.vertices.itemsize * optimized_mesh.vertices.shape[1], - index_count=len(optimized_mesh.indices) - ) - - # Get the decoded triangles - decoded_triangles = self.get_triangles_set(decoded_mesh.vertices, decoded_mesh.indices) - - # Check that the triangles match - self.assertEqual(optimized_triangles, decoded_triangles) - - def test_mesh_integrity_simplify_encode_decode(self): - """Test that mesh vertices indexed by indices are preserved during simplification, encoding, and decoding.""" - # Create a more complex mesh (a sphere) - # Generate a sphere with 16 segments and 16 rings - segments = 16 - rings = 16 - vertices = [] - indices = [] - - # Generate vertices - for i in range(rings + 1): - v = i / rings - phi = v * np.pi - - for j in range(segments): - u = j / segments - theta = u * 2 * np.pi - - x = np.sin(phi) * np.cos(theta) - y = np.sin(phi) * np.sin(theta) - z = np.cos(phi) - - vertices.append([x, y, z]) - - # Generate indices - for i in range(rings): - for j in range(segments): - a = i * segments + j - b = i * segments + (j + 1) % segments - c = (i + 1) * segments + (j + 1) % segments - d = (i + 1) * segments + j - - # Two triangles per quad - indices.extend([a, b, c]) - indices.extend([a, c, d]) - - sphere_vertices = np.array(vertices, dtype=np.float32) - sphere_indices = np.array(indices, dtype=np.uint32) - sphere_mesh = Mesh(sphere_vertices, sphere_indices) - - # Simplify the mesh - simplified_mesh = Mesh(sphere_vertices.copy(), sphere_indices.copy()) - simplified_mesh.simplify(target_ratio=0.5) # Keep 50% of triangles - - # Get the simplified triangles - simplified_triangles = self.get_triangles_set(simplified_mesh.vertices, simplified_mesh.indices) - - # Encode the simplified mesh - encoded = simplified_mesh.encode() - - # Decode the mesh - decoded_mesh = Mesh.decode( - encoded, - vertex_count=len(simplified_mesh.vertices), - vertex_size=simplified_mesh.vertices.itemsize * simplified_mesh.vertices.shape[1], - index_count=len(simplified_mesh.indices) - ) - - # Get the decoded triangles - decoded_triangles = self.get_triangles_set(decoded_mesh.vertices, decoded_mesh.indices) - - # Check that the triangles match - self.assertEqual(simplified_triangles, decoded_triangles) - - def test_mesh_integrity_triangles(self): - """Test that mesh triangles are preserved during encoding/decoding.""" - # Get the original triangles - original_triangles = self.get_triangles_set(self.mesh.vertices, self.mesh.indices) - - # Encode the mesh - encoded = self.mesh.encode() - - # Decode the mesh - decoded_mesh = Mesh.decode( - encoded, - vertex_count=len(self.mesh.vertices), - vertex_size=self.mesh.vertices.itemsize * self.mesh.vertices.shape[1], - index_count=len(self.mesh.indices) - ) - - # Get the decoded triangles - decoded_triangles = self.get_triangles_set(decoded_mesh.vertices, decoded_mesh.indices) - - # Check that the triangles match - self.assertEqual(original_triangles, decoded_triangles) - -if __name__ == '__main__': - unittest.main() \ No newline at end of file diff --git a/python/tests/test_optimization.py b/python/tests/test_optimization.py index d58d27444..526b605bc 100644 --- a/python/tests/test_optimization.py +++ b/python/tests/test_optimization.py @@ -7,7 +7,6 @@ import numpy as np import unittest from meshoptimizer import ( - Mesh, optimize_vertex_cache, optimize_overdraw, optimize_vertex_fetch, @@ -43,7 +42,6 @@ def setUp(self): 4, 5, 1, 1, 0, 4 # bottom ], dtype=np.uint32) - self.mesh = Mesh(self.vertices, self.indices) def get_triangles_set(self, vertices, indices): """ @@ -184,36 +182,6 @@ def test_vertex_remap(self): # Check that the triangles match self.assertEqual(original_triangles, remapped_triangles) - def test_mesh_optimization_chain(self): - """Test chaining multiple optimizations on a mesh.""" - # Create a copy of the mesh - optimized_mesh = Mesh(self.vertices.copy(), self.indices.copy()) - - # Apply optimizations - optimized_mesh.optimize_vertex_cache() - optimized_mesh.optimize_overdraw() - optimized_mesh.optimize_vertex_fetch() - - # Check that the optimized mesh has the same number of triangles - self.assertEqual(len(self.indices) // 3, len(optimized_mesh.indices) // 3) - - # Check that the optimized mesh has the same or fewer vertices - self.assertLessEqual(len(optimized_mesh.vertices), len(self.vertices)) - - # For the full optimization chain, we can't directly compare triangles because - # the vertex fetch optimization reorders vertices for better cache locality. - # Instead, we'll check that each vertex in the optimized mesh is present in the original mesh. - - # Check that all optimized vertices are present in the original vertices - for i in range(len(optimized_mesh.vertices)): - vertex = tuple(optimized_mesh.vertices[i]) - # Check if this vertex exists in the original vertices - found = False - for j in range(len(self.vertices)): - if np.allclose(self.vertices[j], optimized_mesh.vertices[i]): - found = True - break - self.assertTrue(found, f"Vertex {vertex} not found in original vertices") if __name__ == '__main__': unittest.main() \ No newline at end of file diff --git a/python/tests/test_simplification.py b/python/tests/test_simplification.py index 603613aaa..06a2f518c 100644 --- a/python/tests/test_simplification.py +++ b/python/tests/test_simplification.py @@ -7,15 +7,12 @@ import numpy as np import unittest from meshoptimizer import ( - Mesh, simplify, simplify_sloppy, simplify_points, simplify_scale, SIMPLIFY_LOCK_BORDER, SIMPLIFY_SPARSE, - SIMPLIFY_ERROR_ABSOLUTE, - SIMPLIFY_PRUNE ) class TestSimplification(unittest.TestCase): @@ -45,7 +42,6 @@ def setUp(self): 4, 5, 1, 1, 0, 4 # bottom ], dtype=np.uint32) - self.mesh = Mesh(self.vertices, self.indices) # Create a more complex mesh (a sphere) # Generate a sphere with 8 segments and 8 rings @@ -83,7 +79,6 @@ def setUp(self): self.sphere_vertices = np.array(vertices, dtype=np.float32) self.sphere_indices = np.array(indices, dtype=np.uint32) - self.sphere_mesh = Mesh(self.sphere_vertices, self.sphere_indices) def test_simplify_basic(self): """Test basic simplification.""" @@ -228,35 +223,6 @@ def test_simplify_scale(self): # Check that the scale is positive self.assertGreater(scale, 0.0) - - def test_mesh_simplify(self): - """Test mesh simplification using the Mesh class.""" - # Create a copy of the sphere mesh - simplified_mesh = Mesh(self.sphere_vertices.copy(), self.sphere_indices.copy()) - - # Simplify the mesh - simplified_mesh.simplify(target_ratio=0.5) # Keep 50% of triangles - - # Check that the number of triangles is reduced - self.assertLessEqual(len(simplified_mesh.indices) // 3, len(self.sphere_indices) // 3) - - # Check that the mesh is still valid - # (Each triangle should have 3 unique vertices) - for i in range(0, len(simplified_mesh.indices), 3): - a = simplified_mesh.indices[i] - b = simplified_mesh.indices[i+1] - c = simplified_mesh.indices[i+2] - - # Check that indices are within bounds - self.assertLess(a, len(simplified_mesh.vertices)) - self.assertLess(b, len(simplified_mesh.vertices)) - self.assertLess(c, len(simplified_mesh.vertices)) - - # Check that the triangle has 3 unique vertices - # (This is not always true for simplified meshes, but it's a good sanity check) - # self.assertNotEqual(a, b) - # self.assertNotEqual(b, c) - # self.assertNotEqual(c, a) if __name__ == '__main__': unittest.main() \ No newline at end of file From ced74a330cb74f4b4158787be82ef8490eba07a0 Mon Sep 17 00:00:00 2001 From: Afshawn Lotfi Date: Wed, 5 Mar 2025 07:10:04 +0000 Subject: [PATCH 20/30] Bump version to 0.2.20a2 in setup.py --- python/setup.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/python/setup.py b/python/setup.py index 560196e84..fc833a4aa 100644 --- a/python/setup.py +++ b/python/setup.py @@ -96,7 +96,7 @@ def check_source_files_exist(): setup( name='meshoptimizer', - version="0.2.20a1", + version="0.2.20a2", description='Python wrapper for meshoptimizer library', long_description=get_long_description(), long_description_content_type='text/markdown', From 2c99ca2d8bda72ef4a1135c01d5ad4cc3c15102c Mon Sep 17 00:00:00 2001 From: Afshawn Lotfi Date: Wed, 5 Mar 2025 08:09:33 +0000 Subject: [PATCH 21/30] Refactor setup.py to generate module file from template and streamline source file handling; update .gitignore and MANIFEST.in for new structure --- .gitignore | 3 +- python/MANIFEST.in | 3 +- python/bindings/module.template.cpp | 40 ++++++++++++++++++++ python/python/src/module.cpp | 19 ---------- python/setup.py | 58 ++++++++++++++--------------- 5 files changed, 72 insertions(+), 51 deletions(-) create mode 100644 python/bindings/module.template.cpp delete mode 100644 python/python/src/module.cpp diff --git a/.gitignore b/.gitignore index 6c1458341..1b384bab9 100644 --- a/.gitignore +++ b/.gitignore @@ -1,4 +1,5 @@ -python/src/ +python/bindings/module.cpp +python/src # IDE integrations /.idea/ /.vs/ diff --git a/python/MANIFEST.in b/python/MANIFEST.in index 6510d1f97..f31b83909 100644 --- a/python/MANIFEST.in +++ b/python/MANIFEST.in @@ -2,4 +2,5 @@ include README.md include pyproject.toml recursive-include src *.cpp *.h recursive-include python/src *.cpp *.h -recursive-include ../src *.cpp *.h \ No newline at end of file +# Include the meshoptimizer source files +graft ../src \ No newline at end of file diff --git a/python/bindings/module.template.cpp b/python/bindings/module.template.cpp new file mode 100644 index 000000000..5c8a4f341 --- /dev/null +++ b/python/bindings/module.template.cpp @@ -0,0 +1,40 @@ +#include + +// Define MESHOPTIMIZER_IMPLEMENTATION to include the implementation +#define MESHOPTIMIZER_IMPLEMENTATION +// Include the meshoptimizer header +#include "../../src/meshoptimizer.h" + +// Include all the implementation files +{{SOURCE_IMPORTS}} + +// Define the Python module + +// Export all the C functions that are used by the Python wrapper +// This ensures they are available when loaded via ctypes + +// Add your Python module function definitions here +static PyMethodDef MeshoptimizerMethods[] = { + {NULL, NULL, 0, NULL} // Sentinel +}; + +static struct PyModuleDef meshoptimizermodule = { + PyModuleDef_HEAD_INIT, + "_meshoptimizer", + "Python bindings for meshoptimizer library.", + -1, + MeshoptimizerMethods +}; + +PyMODINIT_FUNC PyInit__meshoptimizer(void) { + PyObject* m = PyModule_Create(&meshoptimizermodule); + if (m == NULL) + return NULL; + + // We don't need to add any methods to the module since we're using ctypes + // to access the C functions directly. The important part is that by including + // meshoptimizer.h and linking against the C++ code, the functions will be + // exported in the shared library. + + return m; +} diff --git a/python/python/src/module.cpp b/python/python/src/module.cpp deleted file mode 100644 index 4ff2e3a65..000000000 --- a/python/python/src/module.cpp +++ /dev/null @@ -1,19 +0,0 @@ - -#include - -// Add your Python module function definitions here -static PyMethodDef MeshoptimizerMethods[] = { - {NULL, NULL, 0, NULL} // Sentinel -}; - -static struct PyModuleDef meshoptimizermodule = { - PyModuleDef_HEAD_INIT, - "_meshoptimizer", - "Python bindings for meshoptimizer library.", - -1, - MeshoptimizerMethods -}; - -PyMODINIT_FUNC PyInit__meshoptimizer(void) { - return PyModule_Create(&meshoptimizermodule); -} diff --git a/python/setup.py b/python/setup.py index fc833a4aa..fdf97aa35 100644 --- a/python/setup.py +++ b/python/setup.py @@ -16,25 +16,32 @@ def get_long_description(): print(f"Warning: Could not read README.md: {e}") return 'Python wrapper for meshoptimizer library' -# Determine source files -def get_source_files(): - # Check if we're in the python directory or the root directory +# Determine source files and generate module file +def generate_module_file(): src_path = os.path.join('..', 'src') - + # Get all .cpp files from the src directory source_files = [] if os.path.exists(src_path): - for filename in os.listdir(src_path): - if filename.endswith('.cpp'): - source_files.append(os.path.join(src_path, filename)) + source_files = [f"../../src/{f}" for f in os.listdir(src_path) if f.endswith('.cpp')] + + # Create the module.cpp file from template + module_template_path = os.path.join(SETUP_DIR, 'bindings', 'module.template.cpp') + # Create directory if it doesn't exist - # Add the module initialization file - source_files.append("python/src/module.cpp") + output_module_path = os.path.join(SETUP_DIR, 'bindings', 'module.cpp') - # Make sure we have source files - if not source_files: - raise RuntimeError(f"No source files found in {src_path}") + # Read template and insert source imports + with open(module_template_path, 'r') as template_file: + template_content = template_file.read() + source_imports = '\n'.join([f'#include "{src}"' for src in source_files]) + module_content = template_content.replace('{{SOURCE_IMPORTS}}', source_imports) + + # Write the resulting module file + with open(output_module_path, 'w') as module_file: + module_file.write(module_content) + return source_files # Platform-specific compile and link arguments @@ -46,14 +53,19 @@ def get_build_args(): extra_link_args = [] define_macros = [] + # Define macros for all platforms + define_macros = [ + ('MESHOPTIMIZER_IMPLEMENTATION', '1') # Include implementation in the build + ] + if is_windows: # Windows-specific flags (MSVC) extra_compile_args = ['/std:c++14', '/O2', '/EHsc'] # Export functions for DLL - define_macros = [ + define_macros.extend([ ('MESHOPTIMIZER_API', '__declspec(dllexport)'), ('MESHOPTIMIZER_EXPERIMENTAL', '__declspec(dllexport)') - ] + ]) extra_link_args = ['/DLL'] else: # Unix-like systems (Linux/Mac) @@ -64,14 +76,13 @@ def get_build_args(): return extra_compile_args, extra_link_args, define_macros # Get the source files and build arguments -source_files = get_source_files() include_dirs = [os.path.join('..', 'src')] extra_compile_args, extra_link_args, define_macros = get_build_args() # Define the extension module meshoptimizer_module = Extension( 'meshoptimizer._meshoptimizer', - sources=source_files, + sources= ["bindings/module.cpp"], include_dirs=include_dirs, extra_compile_args=extra_compile_args, extra_link_args=extra_link_args, @@ -79,20 +90,7 @@ def get_build_args(): language='c++', ) -# Check if source files exist at the expected paths -def check_source_files_exist(): - for source_file in source_files: - if not os.path.exists(source_file): - print(f"Warning: Source file not found: {source_file}") - return False - return True - -# Verify source files exist -if not check_source_files_exist(): - print("Warning: Some source files were not found. This may cause build failures.") - print(f"Current directory: {os.getcwd()}") - print(f"Setup directory: {SETUP_DIR}") - print(f"Source files: {source_files}") +generate_module_file() setup( name='meshoptimizer', From 5219e26b12b3247dbf82eb3198836c7e888e9e24 Mon Sep 17 00:00:00 2001 From: Afshawn Lotfi Date: Wed, 5 Mar 2025 08:11:36 +0000 Subject: [PATCH 22/30] Bump version to 0.2.20a3 in setup.py --- python/setup.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/python/setup.py b/python/setup.py index fdf97aa35..8460d2697 100644 --- a/python/setup.py +++ b/python/setup.py @@ -94,7 +94,7 @@ def get_build_args(): setup( name='meshoptimizer', - version="0.2.20a2", + version="0.2.20a3", description='Python wrapper for meshoptimizer library', long_description=get_long_description(), long_description_content_type='text/markdown', From b68cff5ca7a868fffafc1e44205720356227a772 Mon Sep 17 00:00:00 2001 From: Afshawn Lotfi Date: Wed, 5 Mar 2025 18:37:03 +0000 Subject: [PATCH 23/30] Refactor Python bindings: replace module.template.cpp, update setup.py to generate module file from CMakeLists.txt, and improve memory allocation handling --- python/bindings/module.template.cpp | 40 ------------------ python/module.template.cpp | 58 ++++++++++++++++++++++++++ python/setup.py | 63 +++++++++++++++++++++++------ 3 files changed, 108 insertions(+), 53 deletions(-) delete mode 100644 python/bindings/module.template.cpp create mode 100644 python/module.template.cpp diff --git a/python/bindings/module.template.cpp b/python/bindings/module.template.cpp deleted file mode 100644 index 5c8a4f341..000000000 --- a/python/bindings/module.template.cpp +++ /dev/null @@ -1,40 +0,0 @@ -#include - -// Define MESHOPTIMIZER_IMPLEMENTATION to include the implementation -#define MESHOPTIMIZER_IMPLEMENTATION -// Include the meshoptimizer header -#include "../../src/meshoptimizer.h" - -// Include all the implementation files -{{SOURCE_IMPORTS}} - -// Define the Python module - -// Export all the C functions that are used by the Python wrapper -// This ensures they are available when loaded via ctypes - -// Add your Python module function definitions here -static PyMethodDef MeshoptimizerMethods[] = { - {NULL, NULL, 0, NULL} // Sentinel -}; - -static struct PyModuleDef meshoptimizermodule = { - PyModuleDef_HEAD_INIT, - "_meshoptimizer", - "Python bindings for meshoptimizer library.", - -1, - MeshoptimizerMethods -}; - -PyMODINIT_FUNC PyInit__meshoptimizer(void) { - PyObject* m = PyModule_Create(&meshoptimizermodule); - if (m == NULL) - return NULL; - - // We don't need to add any methods to the module since we're using ctypes - // to access the C functions directly. The important part is that by including - // meshoptimizer.h and linking against the C++ code, the functions will be - // exported in the shared library. - - return m; -} diff --git a/python/module.template.cpp b/python/module.template.cpp new file mode 100644 index 000000000..c4d045e38 --- /dev/null +++ b/python/module.template.cpp @@ -0,0 +1,58 @@ +#include +#include +#include + +// Define implementation before including the header +#define MESHOPTIMIZER_NO_RESET_OVERRIDE +#define MESHOPTIMIZER_IMPLEMENTATION +#include "meshoptimizer.h" + +// Include all implementation files directly +{{SOURCE_IMPORTS}} + +// Prevent namespace pollution +namespace { + +void* fallback_allocate(size_t size) { + return PyMem_Malloc(size); +} + +void fallback_deallocate(void* ptr) { + PyMem_Free(ptr); +} + +void* (*allocate_fun)(size_t) = fallback_allocate; +void (*deallocate_fun)(void*) = fallback_deallocate; + +PyObject* meshopt_set_allocator(PyObject* self, PyObject* args) { + meshopt_setAllocator(allocate_fun, deallocate_fun); + Py_RETURN_NONE; +} + +PyMethodDef MeshoptMethods[] = { + {"set_allocator", meshopt_set_allocator, METH_NOARGS, + "Set the default memory allocator"}, + {NULL, NULL, 0, NULL} +}; + +struct PyModuleDef meshopt_module = { + PyModuleDef_HEAD_INIT, + "_meshoptimizer", + "Python binding for meshoptimizer library", + -1, + MeshoptMethods +}; + +} // anonymous namespace + +PyMODINIT_FUNC PyInit__meshoptimizer(void) { + import_array(); + + PyObject* m = PyModule_Create(&meshopt_module); + if (m == NULL) + return NULL; + + meshopt_setAllocator(allocate_fun, deallocate_fun); + + return m; +} diff --git a/python/setup.py b/python/setup.py index 8460d2697..e9d603ee5 100644 --- a/python/setup.py +++ b/python/setup.py @@ -1,9 +1,15 @@ from setuptools import setup, Extension, find_packages import os import platform +import sys # Get the directory containing this file (setup.py) SETUP_DIR = os.path.dirname(os.path.abspath(__file__)) +SRC_DIR = os.path.join(SETUP_DIR, 'src') + +# Create source directory if it doesn't exist +if not os.path.exists(SRC_DIR): + os.makedirs(SRC_DIR) # Get long description from README def get_long_description(): @@ -16,20 +22,42 @@ def get_long_description(): print(f"Warning: Could not read README.md: {e}") return 'Python wrapper for meshoptimizer library' -# Determine source files and generate module file -def generate_module_file(): - src_path = os.path.join('..', 'src') - - # Get all .cpp files from the src directory +# Parse CMakeLists.txt to get source files in the correct order +def parse_cmake_sources(): + cmake_path = os.path.join('..', 'CMakeLists.txt') source_files = [] - if os.path.exists(src_path): - source_files = [f"../../src/{f}" for f in os.listdir(src_path) if f.endswith('.cpp')] + if os.path.exists(cmake_path): + with open(cmake_path, 'r') as f: + content = f.read() + + # Find the SOURCES section + start = content.find('set(SOURCES') + if start != -1: + end = content.find(')', start) + if end != -1: + sources_section = content[start:end] + + # Extract file paths + for line in sources_section.split('\n'): + line = line.strip() + if line.startswith('src/') and line.endswith('.cpp'): + # Convert to relative path from python directory + source_files.append(line.replace('src/', '')) + + + return source_files + +# Determine source files and generate module file +def generate_module_file(): + # Get source files from CMakeLists.txt + source_files = parse_cmake_sources() + # Create the module.cpp file from template - module_template_path = os.path.join(SETUP_DIR, 'bindings', 'module.template.cpp') + module_template_path = os.path.join(SETUP_DIR, 'module.template.cpp') # Create directory if it doesn't exist - output_module_path = os.path.join(SETUP_DIR, 'bindings', 'module.cpp') + output_module_path = os.path.join(SRC_DIR, 'module.cpp') # Read template and insert source imports with open(module_template_path, 'r') as template_file: @@ -40,7 +68,10 @@ def generate_module_file(): # Write the resulting module file with open(output_module_path, 'w') as module_file: + # Add a comment indicating this file is generated + module_file.write("// This file is automatically generated by setup.py\n") module_file.write(module_content) + return source_files @@ -75,14 +106,17 @@ def get_build_args(): return extra_compile_args, extra_link_args, define_macros +# Import numpy for include directory +import numpy as np + # Get the source files and build arguments -include_dirs = [os.path.join('..', 'src')] +include_dirs = [os.path.join('..', 'src'), np.get_include()] extra_compile_args, extra_link_args, define_macros = get_build_args() # Define the extension module meshoptimizer_module = Extension( 'meshoptimizer._meshoptimizer', - sources= ["bindings/module.cpp"], + sources= ["src/module.cpp"], include_dirs=include_dirs, extra_compile_args=extra_compile_args, extra_link_args=extra_link_args, @@ -90,7 +124,10 @@ def get_build_args(): language='c++', ) -generate_module_file() +try: + generate_module_file() +except Exception as e: + pass setup( name='meshoptimizer', @@ -111,7 +148,7 @@ def get_build_args(): ], python_requires='>=3.6', package_data={ - '': ['src/*.cpp', 'src/*.h', 'python/src/*.cpp', 'python/src/*.h'], + '': ['src/*.cpp', 'src/*.h'], }, include_package_data=True, classifiers=[ From 6879cc91b685dd286c05d48d07b131168267ea3e Mon Sep 17 00:00:00 2001 From: Afshawn Lotfi Date: Wed, 5 Mar 2025 18:37:07 +0000 Subject: [PATCH 24/30] Update MANIFEST.in to prune unnecessary directories and adjust source file inclusion in setup.py --- python/MANIFEST.in | 6 +++--- python/setup.py | 4 ++-- 2 files changed, 5 insertions(+), 5 deletions(-) diff --git a/python/MANIFEST.in b/python/MANIFEST.in index f31b83909..33000d439 100644 --- a/python/MANIFEST.in +++ b/python/MANIFEST.in @@ -1,6 +1,6 @@ include README.md include pyproject.toml recursive-include src *.cpp *.h -recursive-include python/src *.cpp *.h -# Include the meshoptimizer source files -graft ../src \ No newline at end of file +graft ../src +prune .vscode +prune tests \ No newline at end of file diff --git a/python/setup.py b/python/setup.py index e9d603ee5..16d502e02 100644 --- a/python/setup.py +++ b/python/setup.py @@ -43,7 +43,7 @@ def parse_cmake_sources(): line = line.strip() if line.startswith('src/') and line.endswith('.cpp'): # Convert to relative path from python directory - source_files.append(line.replace('src/', '')) + source_files.append(line) return source_files @@ -63,7 +63,7 @@ def generate_module_file(): with open(module_template_path, 'r') as template_file: template_content = template_file.read() - source_imports = '\n'.join([f'#include "{src}"' for src in source_files]) + source_imports = '\n'.join([f'#include "{src.replace('src/' , '')}"' for src in source_files]) module_content = template_content.replace('{{SOURCE_IMPORTS}}', source_imports) # Write the resulting module file From f1ef8e4f4ba7aa9328bf302484df151216a85d06 Mon Sep 17 00:00:00 2001 From: Afshawn Lotfi Date: Wed, 5 Mar 2025 18:38:47 +0000 Subject: [PATCH 25/30] Add debug print statement for source files in generate_module_file function --- python/setup.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/python/setup.py b/python/setup.py index 16d502e02..0c1a0b94a 100644 --- a/python/setup.py +++ b/python/setup.py @@ -52,7 +52,7 @@ def parse_cmake_sources(): def generate_module_file(): # Get source files from CMakeLists.txt source_files = parse_cmake_sources() - + print(source_files) # Create the module.cpp file from template module_template_path = os.path.join(SETUP_DIR, 'module.template.cpp') # Create directory if it doesn't exist From 3623568f7302a2fee261cb41cee8750fbf28084d Mon Sep 17 00:00:00 2001 From: Afshawn Lotfi Date: Wed, 5 Mar 2025 18:42:15 +0000 Subject: [PATCH 26/30] Remove debug print statement and fix string formatting in generate_module_file function --- python/setup.py | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/python/setup.py b/python/setup.py index 0c1a0b94a..881b203c3 100644 --- a/python/setup.py +++ b/python/setup.py @@ -52,7 +52,6 @@ def parse_cmake_sources(): def generate_module_file(): # Get source files from CMakeLists.txt source_files = parse_cmake_sources() - print(source_files) # Create the module.cpp file from template module_template_path = os.path.join(SETUP_DIR, 'module.template.cpp') # Create directory if it doesn't exist @@ -63,7 +62,7 @@ def generate_module_file(): with open(module_template_path, 'r') as template_file: template_content = template_file.read() - source_imports = '\n'.join([f'#include "{src.replace('src/' , '')}"' for src in source_files]) + source_imports = '\n'.join([f'#include "{src.replace("src/", "")}"' for src in source_files]) module_content = template_content.replace('{{SOURCE_IMPORTS}}', source_imports) # Write the resulting module file From 7b3f35eed29746539551a3a71752d9e8e8d9323d Mon Sep 17 00:00:00 2001 From: Afshawn Lotfi Date: Wed, 5 Mar 2025 21:05:21 +0000 Subject: [PATCH 27/30] Working build --- python/MANIFEST.in | 22 ++++++++ python/setup.py | 131 +++++++++++++++++++++++++++++++++------------ 2 files changed, 120 insertions(+), 33 deletions(-) diff --git a/python/MANIFEST.in b/python/MANIFEST.in index 33000d439..b772965a1 100644 --- a/python/MANIFEST.in +++ b/python/MANIFEST.in @@ -1,6 +1,28 @@ include README.md include pyproject.toml +include module.template.cpp recursive-include src *.cpp *.h +# Include the parent src directory if building from repository graft ../src +# Include meshoptimizer.h header +include ../src/meshoptimizer.h +# Explicitly include all source files needed for the build +include src/allocator.cpp +include src/clusterizer.cpp +include src/indexcodec.cpp +include src/indexgenerator.cpp +include src/overdrawanalyzer.cpp +include src/overdrawoptimizer.cpp +include src/partition.cpp +include src/quantization.cpp +include src/simplifier.cpp +include src/spatialorder.cpp +include src/stripifier.cpp +include src/vcacheanalyzer.cpp +include src/vcacheoptimizer.cpp +include src/vertexcodec.cpp +include src/vertexfilter.cpp +include src/vfetchanalyzer.cpp +include src/vfetchoptimizer.cpp prune .vscode prune tests \ No newline at end of file diff --git a/python/setup.py b/python/setup.py index 881b203c3..8db1900df 100644 --- a/python/setup.py +++ b/python/setup.py @@ -11,6 +11,31 @@ if not os.path.exists(SRC_DIR): os.makedirs(SRC_DIR) +# Copy meshoptimizer.h header if it doesn't exist in src directory +def ensure_header_file(): + header_dest = os.path.join(SRC_DIR, 'meshoptimizer.h') + if not os.path.exists(header_dest): + # Try to find the header file + header_src = os.path.join('..', 'src', 'meshoptimizer.h') + if os.path.exists(header_src): + # Copy from parent directory + with open(header_src, 'r') as f: + content = f.read() + with open(header_dest, 'w') as f: + f.write(content) + print(f"Copied meshoptimizer.h from {header_src} to {header_dest}") + else: + # Check if it's in the current directory + header_src = os.path.join('src', 'meshoptimizer.h') + if os.path.exists(header_src): + with open(header_src, 'r') as f: + content = f.read() + with open(header_dest, 'w') as f: + f.write(content) + print(f"Copied meshoptimizer.h from {header_src} to {header_dest}") + else: + print("Warning: Could not find meshoptimizer.h header file") + # Get long description from README def get_long_description(): try: @@ -22,38 +47,55 @@ def get_long_description(): print(f"Warning: Could not read README.md: {e}") return 'Python wrapper for meshoptimizer library' -# Parse CMakeLists.txt to get source files in the correct order -def parse_cmake_sources(): - cmake_path = os.path.join('..', 'CMakeLists.txt') - source_files = [] +# Define source files explicitly to ensure they're included in the build +def get_source_files(): + # These are the source files needed for the Python extension + source_files = [ + 'src/allocator.cpp', + 'src/clusterizer.cpp', + 'src/indexcodec.cpp', + 'src/indexgenerator.cpp', + 'src/overdrawanalyzer.cpp', + 'src/overdrawoptimizer.cpp', + 'src/partition.cpp', + 'src/quantization.cpp', + 'src/simplifier.cpp', + 'src/spatialorder.cpp', + 'src/stripifier.cpp', + 'src/vcacheanalyzer.cpp', + 'src/vcacheoptimizer.cpp', + 'src/vertexcodec.cpp', + 'src/vertexfilter.cpp', + 'src/vfetchanalyzer.cpp', + 'src/vfetchoptimizer.cpp' + ] - if os.path.exists(cmake_path): - with open(cmake_path, 'r') as f: - content = f.read() - - # Find the SOURCES section - start = content.find('set(SOURCES') - if start != -1: - end = content.find(')', start) - if end != -1: - sources_section = content[start:end] - - # Extract file paths - for line in sources_section.split('\n'): - line = line.strip() - if line.startswith('src/') and line.endswith('.cpp'): - # Convert to relative path from python directory - source_files.append(line) + # Check if we're building from an sdist package + if not os.path.exists(os.path.join('..', 'src')): + # We're in an sdist package, source files should be in the package + return source_files + + # We're building from the repository, verify files exist + for i, src_file in enumerate(source_files): + # Check if file exists in parent directory + if os.path.exists(os.path.join('..', src_file)): + continue + # If not, check if it exists in the current directory + elif os.path.exists(src_file): + source_files[i] = src_file + else: + print(f"Warning: Source file {src_file} not found") - return source_files # Determine source files and generate module file def generate_module_file(): - # Get source files from CMakeLists.txt - source_files = parse_cmake_sources() + # Get source files + source_files = get_source_files() # Create the module.cpp file from template module_template_path = os.path.join(SETUP_DIR, 'module.template.cpp') + if not os.path.exists(module_template_path): + return [] # Create directory if it doesn't exist output_module_path = os.path.join(SRC_DIR, 'module.cpp') @@ -62,7 +104,26 @@ def generate_module_file(): with open(module_template_path, 'r') as template_file: template_content = template_file.read() - source_imports = '\n'.join([f'#include "{src.replace("src/", "")}"' for src in source_files]) + # Copy source files to src directory if needed + for src_file in source_files: + src_basename = os.path.basename(src_file) + dest_path = os.path.join(SRC_DIR, src_basename) + + # If we're building from the repository, copy the files + if os.path.exists(os.path.join('..', src_file)): + with open(os.path.join('..', src_file), 'r') as f: + content = f.read() + with open(dest_path, 'w') as f: + f.write(content) + # If we're in an sdist package, the files might be in the current directory + elif os.path.exists(src_file): + with open(src_file, 'r') as f: + content = f.read() + with open(dest_path, 'w') as f: + f.write(content) + + # Generate includes for the module.cpp file + source_imports = '\n'.join([f'#include "{os.path.basename(src)}"' for src in source_files]) module_content = template_content.replace('{{SOURCE_IMPORTS}}', source_imports) # Write the resulting module file @@ -70,7 +131,6 @@ def generate_module_file(): # Add a comment indicating this file is generated module_file.write("// This file is automatically generated by setup.py\n") module_file.write(module_content) - return source_files @@ -108,14 +168,24 @@ def get_build_args(): # Import numpy for include directory import numpy as np +# Ensure header file is available +ensure_header_file() + +# Generate the module file with source files +source_files = generate_module_file() + # Get the source files and build arguments -include_dirs = [os.path.join('..', 'src'), np.get_include()] +include_dirs = [SRC_DIR, np.get_include()] +# Also include parent src directory if it exists +if os.path.exists(os.path.join('..', 'src')): + include_dirs.append(os.path.join('..', 'src')) + extra_compile_args, extra_link_args, define_macros = get_build_args() # Define the extension module meshoptimizer_module = Extension( 'meshoptimizer._meshoptimizer', - sources= ["src/module.cpp"], + sources=["src/module.cpp"], include_dirs=include_dirs, extra_compile_args=extra_compile_args, extra_link_args=extra_link_args, @@ -123,11 +193,6 @@ def get_build_args(): language='c++', ) -try: - generate_module_file() -except Exception as e: - pass - setup( name='meshoptimizer', version="0.2.20a3", From affa323b9922070db9239e1d770dc3723f0ee6d0 Mon Sep 17 00:00:00 2001 From: Afshawn Lotfi Date: Wed, 5 Mar 2025 21:05:25 +0000 Subject: [PATCH 28/30] Removed uneeded files --- python/MANIFEST.in | 20 -------------------- 1 file changed, 20 deletions(-) diff --git a/python/MANIFEST.in b/python/MANIFEST.in index b772965a1..85ea249cd 100644 --- a/python/MANIFEST.in +++ b/python/MANIFEST.in @@ -4,25 +4,5 @@ include module.template.cpp recursive-include src *.cpp *.h # Include the parent src directory if building from repository graft ../src -# Include meshoptimizer.h header -include ../src/meshoptimizer.h -# Explicitly include all source files needed for the build -include src/allocator.cpp -include src/clusterizer.cpp -include src/indexcodec.cpp -include src/indexgenerator.cpp -include src/overdrawanalyzer.cpp -include src/overdrawoptimizer.cpp -include src/partition.cpp -include src/quantization.cpp -include src/simplifier.cpp -include src/spatialorder.cpp -include src/stripifier.cpp -include src/vcacheanalyzer.cpp -include src/vcacheoptimizer.cpp -include src/vertexcodec.cpp -include src/vertexfilter.cpp -include src/vfetchanalyzer.cpp -include src/vfetchoptimizer.cpp prune .vscode prune tests \ No newline at end of file From ffc03342c7ff12f53446aa4b59e2cd70ce4c3b11 Mon Sep 17 00:00:00 2001 From: Afshawn Lotfi Date: Wed, 5 Mar 2025 22:04:02 +0000 Subject: [PATCH 29/30] Bump version to 0.2.20a4 in setup.py --- python/setup.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/python/setup.py b/python/setup.py index 8db1900df..1f87fc350 100644 --- a/python/setup.py +++ b/python/setup.py @@ -195,7 +195,7 @@ def get_build_args(): setup( name='meshoptimizer', - version="0.2.20a3", + version="0.2.20a4", description='Python wrapper for meshoptimizer library', long_description=get_long_description(), long_description_content_type='text/markdown', From 76ea72f0164b33459c2890981262108b1a815af9 Mon Sep 17 00:00:00 2001 From: Afshawn Lotfi Date: Sat, 15 Mar 2025 23:13:52 +0000 Subject: [PATCH 30/30] Add encoding and decoding for index sequences; update setup.py version to 0.2.20a5 --- python/meshoptimizer/__init__.py | 2 ++ python/meshoptimizer/_loader.py | 23 ++++++++++++++ python/meshoptimizer/decoder.py | 34 +++++++++++++++++++++ python/meshoptimizer/encoder.py | 47 ++++++++++++++++++++++++++++- python/setup.py | 2 +- python/tests/test_encoding.py | 51 ++++++++++++++++++++++++-------- 6 files changed, 144 insertions(+), 15 deletions(-) diff --git a/python/meshoptimizer/__init__.py b/python/meshoptimizer/__init__.py index bed400528..bc706fb68 100644 --- a/python/meshoptimizer/__init__.py +++ b/python/meshoptimizer/__init__.py @@ -11,6 +11,7 @@ from .encoder import ( encode_vertex_buffer, encode_index_buffer, + encode_index_sequence, encode_vertex_version, encode_index_version, ) @@ -18,6 +19,7 @@ from .decoder import ( decode_vertex_buffer, decode_index_buffer, + decode_index_sequence, decode_vertex_version, decode_index_version, decode_filter_oct, diff --git a/python/meshoptimizer/_loader.py b/python/meshoptimizer/_loader.py index 05db9c381..90c7242d5 100644 --- a/python/meshoptimizer/_loader.py +++ b/python/meshoptimizer/_loader.py @@ -153,6 +153,20 @@ def setup_function_signatures() -> None: ] lib.meshopt_encodeIndexBuffer.restype = ctypes.c_size_t + lib.meshopt_encodeIndexSequenceBound.argtypes = [ + ctypes.c_size_t, # index_count + ctypes.c_size_t # vertex_count + ] + lib.meshopt_encodeIndexSequenceBound.restype = ctypes.c_size_t + + lib.meshopt_encodeIndexSequence.argtypes = [ + ctypes.POINTER(ctypes.c_ubyte), # buffer + ctypes.c_size_t, # buffer_size + ctypes.POINTER(ctypes.c_uint), # indices + ctypes.c_size_t # index_count + ] + lib.meshopt_encodeIndexSequence.restype = ctypes.c_size_t + # Decoding lib.meshopt_decodeVertexBuffer.argtypes = [ ctypes.c_void_p, # destination @@ -172,6 +186,15 @@ def setup_function_signatures() -> None: ] lib.meshopt_decodeIndexBuffer.restype = ctypes.c_int + lib.meshopt_decodeIndexSequence.argtypes = [ + ctypes.c_void_p, # destination + ctypes.c_size_t, # index_count + ctypes.c_size_t, # index_size + ctypes.POINTER(ctypes.c_ubyte), # buffer + ctypes.c_size_t # buffer_size + ] + lib.meshopt_decodeIndexSequence.restype = ctypes.c_int + # Encoding/Decoding versions lib.meshopt_encodeVertexVersion.argtypes = [ctypes.c_int] lib.meshopt_encodeVertexVersion.restype = None diff --git a/python/meshoptimizer/decoder.py b/python/meshoptimizer/decoder.py index 7aa713d50..7bcfbdf4f 100644 --- a/python/meshoptimizer/decoder.py +++ b/python/meshoptimizer/decoder.py @@ -117,6 +117,40 @@ def decode_index_version(buffer: Union[bytes, np.ndarray]) -> int: len(buffer_array) ) +def decode_index_sequence(index_count: int, + index_size: int, + buffer: Union[bytes, np.ndarray]) -> np.ndarray: + """ + Decode index sequence data. + + Args: + index_count: number of indices + index_size: size of each index in bytes (2 or 4) + buffer: encoded buffer as bytes + + Returns: + Numpy array containing the decoded index data + """ + # Convert buffer to numpy array if it's not already + buffer_array = np.frombuffer(buffer, dtype=np.uint8) + + # Create destination array + destination = np.zeros(index_count, dtype=np.uint32) + + # Call C function + result = lib.meshopt_decodeIndexSequence( + destination.ctypes.data_as(ctypes.c_void_p), + index_count, + index_size, + buffer_array.ctypes.data_as(ctypes.POINTER(ctypes.c_ubyte)), + len(buffer_array) + ) + + if result != 0: + raise RuntimeError(f"Failed to decode index sequence: error code {result}") + + return destination + def decode_filter_oct(buffer: np.ndarray, count: int, stride: int) -> np.ndarray: """ Apply octahedral filter to decoded data. diff --git a/python/meshoptimizer/encoder.py b/python/meshoptimizer/encoder.py index 02f1a1728..cafc5b238 100644 --- a/python/meshoptimizer/encoder.py +++ b/python/meshoptimizer/encoder.py @@ -118,4 +118,49 @@ def encode_index_version(version: int) -> None: if version not in (0, 1): raise ValueError("Version must be 0 or 1") - lib.meshopt_encodeIndexVersion(version) \ No newline at end of file + lib.meshopt_encodeIndexVersion(version) + +def encode_index_sequence(indices: np.ndarray, + index_count: Optional[int] = None, + vertex_count: Optional[int] = None) -> bytes: + """ + Encode index sequence data. + + Args: + indices: numpy array of index data + index_count: number of indices (optional, derived from indices if not provided) + vertex_count: number of vertices (optional, derived from indices if not provided) + + Returns: + Encoded buffer as bytes + """ + # Convert indices to numpy array if it's not already + indices = np.asarray(indices, dtype=np.uint32) + + # Derive index_count if not provided + if index_count is None: + index_count = len(indices) + + # Derive vertex_count if not provided + if vertex_count is None: + vertex_count = np.max(indices) + 1 + + # Calculate buffer size + bound = lib.meshopt_encodeIndexSequenceBound(index_count, vertex_count) + + # Allocate buffer + buffer = np.zeros(bound, dtype=np.uint8) + + # Call C function + result_size = lib.meshopt_encodeIndexSequence( + buffer.ctypes.data_as(ctypes.POINTER(ctypes.c_ubyte)), + bound, + indices.ctypes.data_as(ctypes.POINTER(ctypes.c_uint)), + index_count + ) + + if result_size == 0: + raise RuntimeError("Failed to encode index sequence") + + # Return only the used portion of the buffer + return bytes(buffer[:result_size]) \ No newline at end of file diff --git a/python/setup.py b/python/setup.py index 1f87fc350..83463c72c 100644 --- a/python/setup.py +++ b/python/setup.py @@ -195,7 +195,7 @@ def get_build_args(): setup( name='meshoptimizer', - version="0.2.20a4", + version="0.2.20a5", description='Python wrapper for meshoptimizer library', long_description=get_long_description(), long_description_content_type='text/markdown', diff --git a/python/tests/test_encoding.py b/python/tests/test_encoding.py index e68a778bb..2e2830166 100644 --- a/python/tests/test_encoding.py +++ b/python/tests/test_encoding.py @@ -6,7 +6,11 @@ """ import numpy as np import unittest -from meshoptimizer import encode_vertex_buffer, decode_vertex_buffer +from meshoptimizer import ( + encode_vertex_buffer, decode_vertex_buffer, + encode_index_buffer, decode_index_buffer, + encode_index_sequence, decode_index_sequence +) class TestEncoding(unittest.TestCase): """Test encoding and decoding functionality.""" @@ -72,28 +76,49 @@ def test_encode_decode_vertices(self): # Check that the decoded vertices match the original np.testing.assert_array_almost_equal(self.vertices, decoded_vertices) - def encode_decode_indices(self): + def test_encode_decode_index_buffer(self): """Test that encoding and decoding indices preserves the data.""" # Encode indices - encoded_indices = encode_vertex_buffer( - self.indices, - len(self.indices), - self.indices.itemsize * self.indices.shape[1] + encoded_indices = encode_index_buffer( + self.indices, + len(self.indices), + len(self.vertices) ) - # Decode indices using the new function that returns a numpy array - decoded_indices = decode_vertex_buffer( - len(self.indices), - self.indices.itemsize * self.indices.shape[1], + # Decode indices + decoded_indices = decode_index_buffer( + len(self.indices), + 4, # 4 bytes for uint32 encoded_indices ) - # Check that the decoded indices match the original - np.testing.assert_array_almost_equal(self.indices, decoded_indices) - # Check that the triangles match + # The encoding/decoding process may reorder indices for optimization + # So we don't check that the indices match exactly, but that they represent the same triangles original_triangles = self.get_triangles_set(self.vertices, self.indices) decoded_triangles = self.get_triangles_set(self.vertices, decoded_indices) self.assertEqual(original_triangles, decoded_triangles) + + def test_encode_decode_index_sequence(self): + """Test that encoding and decoding index sequence preserves the data.""" + # Encode index sequence + encoded_sequence = encode_index_sequence( + self.indices, + len(self.indices), + len(self.vertices) + ) + + # Decode index sequence + decoded_sequence = decode_index_sequence( + len(self.indices), + 4, # 4 bytes for uint32 + encoded_sequence + ) + + # The encoding/decoding process may reorder indices for optimization + # So we don't check that the indices match exactly, but that they represent the same triangles + original_triangles = self.get_triangles_set(self.vertices, self.indices) + decoded_triangles = self.get_triangles_set(self.vertices, decoded_sequence) + self.assertEqual(original_triangles, decoded_triangles) if __name__ == '__main__':