Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

v0.3.0 #16

Merged
merged 8 commits into from
Sep 18, 2024
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
22 changes: 11 additions & 11 deletions .github/workflows/wheels.yml
Original file line number Diff line number Diff line change
Expand Up @@ -15,7 +15,7 @@ jobs:
runs-on: ${{ matrix.os }}
strategy:
matrix:
os: [ubuntu-22.04, windows-2022, macos-14]
os: [ubuntu-latest, windows-latest, macos-13, macos-latest]
fail-fast: false

steps:
Expand All @@ -27,14 +27,14 @@ jobs:
- uses: actions/setup-python@v5

- name: Install cibuildwheel
run: python -m pip install cibuildwheel==2.15
run: python -m pip install cibuildwheel==2.20.0

- name: Build wheels
run: python -m cibuildwheel --output-dir wheelhouse

- uses: actions/upload-artifact@v3
- uses: actions/upload-artifact@v4
with:
name: wheels
name: cibw-wheels-${{ matrix.os }}-${{ strategy.job-index }}
path: ./wheelhouse/*.whl

publish:
Expand All @@ -48,20 +48,20 @@ jobs:
with:
submodules: True

- uses: actions/setup-python@v4
- uses: actions/setup-python@v5

- uses: actions/download-artifact@v3
- uses: actions/download-artifact@v4
with:
name: wheels
path: wheelhouse
pattern: cibw-*
path: dist
merge-multiple: true

- name: Publish to PyPI
env:
TWINE_USERNAME: __token__
TWINE_PASSWORD: ${{ secrets.PYPI_TOKEN }}
# construct the source package and upload src and wheels to PiPy
run: |
python -m pip install twine
python setup.py sdist
python -m pip install twine build --upgrade
python -m build --sdist
twine upload dist/*
twine upload wheelhouse/*.whl
19 changes: 10 additions & 9 deletions CMakeLists.txt
Original file line number Diff line number Diff line change
Expand Up @@ -16,18 +16,19 @@ find_package(nanobind CONFIG REQUIRED)

nanobind_add_module(pgeof_ext NOMINSIZE STABLE_ABI LTO src/pgeof_ext.cpp)

# for nanobind 2
# nanobind_add_stub(
# pgeof_ext_stub
# MODULE pgeof_ext
# OUTPUT pgeof_ext.pyi
# PYTHON_PATH $<TARGET_FILE_DIR:pgeof_ext>
# DEPENDS pgeof_ext
# )
nanobind_add_stub(
pgeof_ext_stub
MODULE pgeof_ext
OUTPUT pgeof_ext.pyi
MARKER_FILE py.typed
PYTHON_PATH $<TARGET_FILE_DIR:pgeof_ext>
DEPENDS pgeof_ext
)

# All lib are headeer only.
# it's faster to include like this than using exported targets
# (i.e add_subdirectories(...))
target_include_directories(pgeof_ext PRIVATE "include" "third_party/eigen" "third_party/nanoflann/include" "third_party/taskflow")

install(TARGETS pgeof_ext LIBRARY DESTINATION pgeof)
install(TARGETS pgeof_ext LIBRARY DESTINATION pgeof)
install(FILES ${CMAKE_CURRENT_BINARY_DIR}/pgeof_ext.pyi ${CMAKE_CURRENT_BINARY_DIR}/py.typed DESTINATION pgeof)
4 changes: 2 additions & 2 deletions include/nn_search.hpp
Original file line number Diff line number Diff line change
Expand Up @@ -36,7 +36,7 @@ static std::pair<nb::ndarray<nb::numpy, uint32_t, nb::ndim<2>>, nb::ndarray<nb::

if (knn > data.rows()) { throw std::invalid_argument("knn size is greater than the data point cloud size"); }

kd_tree_t kd_tree(3, data, 10);
kd_tree_t kd_tree(3, data, 10, 0);
const Eigen::Index n_points = query.rows();
uint32_t* indices = new uint32_t[knn * n_points];
nb::capsule owner_indices(indices, [](void* p) noexcept { delete[] (uint32_t*)p; });
Expand Down Expand Up @@ -94,7 +94,7 @@ static std::pair<nb::ndarray<nb::numpy, int32_t, nb::ndim<2>>, nb::ndarray<nb::n
throw std::invalid_argument("max knn size is greater than the data point cloud size");
}

kd_tree_t kd_tree(3, data, 10);
kd_tree_t kd_tree(3, data, 10, 0);
const real_t sq_search_radius = search_radius * search_radius;

const Eigen::Index n_points = query.rows();
Expand Down
37 changes: 22 additions & 15 deletions include/pgeof.hpp
Original file line number Diff line number Diff line change
Expand Up @@ -73,9 +73,10 @@ static inline void flush() { std::cout << std::endl; };
* @return the geometric features associated with each point's neighborhood in a (num_points, features_count) nd::array.
*/
template <typename real_t = float, const size_t feature_count = 11>
static nb::ndarray<nb::numpy, real_t, nb::shape<nb::any, feature_count>> compute_geometric_features(
RefCloud<real_t> xyz, nb::ndarray<const uint32_t, nb::ndim<1>> nn, nb::ndarray<const uint32_t, nb::ndim<1>> nn_ptr,
const size_t k_min, const bool verbose)
static nb::ndarray<nb::numpy, real_t, nb::shape<-1, static_cast<nb::ssize_t>(feature_count)>>
compute_geometric_features(
RefCloud<real_t> xyz, nb::ndarray<const uint32_t, nb::ndim<1>> nn,
nb::ndarray<const uint32_t, nb::ndim<1>> nn_ptr, const size_t k_min, const bool verbose)
{
if (k_min < 1) { throw std::invalid_argument("k_min should be > 1"); }
// Each point can be treated in parallel
Expand Down Expand Up @@ -111,7 +112,8 @@ static nb::ndarray<nb::numpy, real_t, nb::shape<nb::any, feature_count>> compute
// Final print to start on a new line
if (verbose) log::flush();
const size_t shape[2] = {n_points, feature_count};
return nb::ndarray<nb::numpy, real_t, nb::shape<nb::any, feature_count>>(features, 2, shape, owner_features);
return nb::ndarray<nb::numpy, real_t, nb::shape<-1, static_cast<nb::ssize_t>(feature_count)>>(
features, 2, shape, owner_features);
}
/**
* Convenience function that check that scales are well ordered in increasing order.
Expand Down Expand Up @@ -155,9 +157,10 @@ static bool check_scales(const std::vector<uint32_t>& k_scales)
* nd::array
*/
template <typename real_t, const size_t feature_count = 11>
static nb::ndarray<nb::numpy, real_t, nb::shape<nb::any, nb::any, feature_count>> compute_geometric_features_multiscale(
RefCloud<real_t> xyz, nb::ndarray<const uint32_t, nb::ndim<1>> nn, nb::ndarray<const uint32_t, nb::ndim<1>> nn_ptr,
const std::vector<uint32_t>& k_scales, const bool verbose)
static nb::ndarray<nb::numpy, real_t, nb::shape<-1, -1, static_cast<nb::ssize_t>(feature_count)>>
compute_geometric_features_multiscale(
RefCloud<real_t> xyz, nb::ndarray<const uint32_t, nb::ndim<1>> nn,
nb::ndarray<const uint32_t, nb::ndim<1>> nn_ptr, const std::vector<uint32_t>& k_scales, const bool verbose)
{
if (!check_scales(k_scales))
{
Expand Down Expand Up @@ -203,7 +206,7 @@ static nb::ndarray<nb::numpy, real_t, nb::shape<nb::any, nb::any, feature_count>
if (verbose) log::flush();

const size_t shape[3] = {n_points, n_scales, feature_count};
return nb::ndarray<nb::numpy, real_t, nb::shape<nb::any, nb::any, feature_count>>(
return nb::ndarray<nb::numpy, real_t, nb::shape<-1, -1, static_cast<nb::ssize_t>(feature_count)>>(
features, 3, shape, owner_features);
}

Expand Down Expand Up @@ -238,9 +241,11 @@ static nb::ndarray<nb::numpy, real_t, nb::shape<nb::any, nb::any, feature_count>
* @return Geometric features associated with each point's neighborhood in a (num_points, features_count) nd::array
*/
template <typename real_t, const size_t feature_count = 12>
static nb::ndarray<nb::numpy, real_t, nb::shape<nb::any, feature_count>> compute_geometric_features_optimal(
RefCloud<real_t> xyz, nb::ndarray<const uint32_t, nb::ndim<1>> nn, nb::ndarray<const uint32_t, nb::ndim<1>> nn_ptr,
const uint32_t k_min, const uint32_t k_step, const uint32_t k_min_search, const bool verbose)
static nb::ndarray<nb::numpy, real_t, nb::shape<-1, static_cast<nb::ssize_t>(feature_count)>>
compute_geometric_features_optimal(
RefCloud<real_t> xyz, nb::ndarray<const uint32_t, nb::ndim<1>> nn,
nb::ndarray<const uint32_t, nb::ndim<1>> nn_ptr, const uint32_t k_min, const uint32_t k_step,
const uint32_t k_min_search, const bool verbose)
{
if (k_min < 1 && k_min_search < 1) { throw std::invalid_argument("k_min and k_min_search should be > 1"); }
// Each point can be treated in parallel
Expand Down Expand Up @@ -300,7 +305,8 @@ static nb::ndarray<nb::numpy, real_t, nb::shape<nb::any, feature_count>> compute
if (verbose) log::flush();

const size_t shape[2] = {n_points, feature_count};
return nb::ndarray<nb::numpy, real_t, nb::shape<nb::any, feature_count>>(features, 2, shape, owner_features);
return nb::ndarray<nb::numpy, real_t, nb::shape<-1, static_cast<nb::ssize_t>(feature_count)>>(
features, 2, shape, owner_features);
}

/**
Expand All @@ -317,14 +323,14 @@ static nb::ndarray<nb::numpy, real_t, nb::shape<nb::any, feature_count>> compute
* @return Geometric features associated with each point's neighborhood in a (num_points, features_count) nd::array
*/
template <typename real_t>
static nb::ndarray<nb::numpy, real_t, nb::shape<nb::any, nb::any>> compute_geometric_features_selected(
static nb::ndarray<nb::numpy, real_t, nb::shape<-1, -1>> compute_geometric_features_selected(
RefCloud<real_t> xyz, const real_t search_radius, const uint32_t max_knn,
const std::vector<EFeatureID>& selected_features)
{
using kd_tree_t = nanoflann::KDTreeEigenMatrixAdaptor<RefCloud<real_t>, 3, nanoflann::metric_L2_Simple>;
// TODO: where knn < num of points

kd_tree_t kd_tree(3, xyz, 10);
kd_tree_t kd_tree(3, xyz, 10, 0);
const size_t feature_count = selected_features.size();
const Eigen::Index n_points = xyz.rows();
real_t sq_search_radius = search_radius * search_radius;
Expand Down Expand Up @@ -364,6 +370,7 @@ static nb::ndarray<nb::numpy, real_t, nb::shape<nb::any, nb::any>> compute_geome
});
executor.run(taskflow).get();

return nb::ndarray<nb::numpy, real_t, nb::shape<nb::any, nb::any>>(features, {static_cast<size_t>(n_points), feature_count}, owner_features);
return nb::ndarray<nb::numpy, real_t, nb::shape<-1, -1>>(
features, {static_cast<size_t>(n_points), feature_count}, owner_features);
}
} // namespace pgeof
50 changes: 47 additions & 3 deletions pyproject.toml
Original file line number Diff line number Diff line change
@@ -1,10 +1,13 @@
[build-system]
requires = ["scikit-build-core >=0.4.3", "nanobind == 1.9.2"]
requires = ["scikit-build-core >=0.4.3",
"nanobind == 2.1.0",
"typing_extensions;python_version < '3.11'"
]
build-backend = "scikit_build_core.build"

[project]
name = "pgeof"
version = "0.2.0"
version = "0.3.0"
readme = "README.md"
description = "Compute the geometric features associated with each point's neighborhood:"
requires-python = ">=3.8,<3.13"
Expand Down Expand Up @@ -36,6 +39,47 @@ build-dir = "build/{wheel_tag}"

cmake.build-type = "Release"

# make sdist a lot lighter by removing some useless files from third_party
# ⚠️ be sure to keep copyrights and license file
sdist.exclude = [
"third_party/eigen/bench",
"third_party/eigen/demos",
"third_party/eigen/doc",
"third_party/taskflow/3rd-party",
"third_party/taskflow/benchmarks",
"third_party/taskflow/docs",
"third_party/taskflow/doxygen",
"third_party/taskflow/examples",
"third_party/taskflow/sandbox",
"third_party/taskflow/unittests",
]

[tool.ruff]
target-version = "py310"
line-length = 120

[tool.ruff.lint]
# TODO Add D, PTH, RET, disabled for now as they collides with intial choices
select = ["E", "W", "YTT", "NPY", "PYI", "Q", "F", "B", "I", "SIM", "RUF"]
# TODO: for now we ignore "Line too long error (E501)"
# because our comments are too longs
# code formatting will take care of the line length in code anyway
ignore = [
"E501",
# Ignore docstring in public package and module
"D100",
"D104",
# Blank line before class
"D203",
# multiline summary second line
"D213",
# yoda conditions
"SIM300",
]

[tool.ruff.lint.isort]
known-first-party = ["pgeof"]

[tool.tox]
legacy_tox_ini = """
[tox]
Expand Down Expand Up @@ -67,4 +111,4 @@ archs = ["auto64"] # limit to 64bits builds

# Needed for full C++17 support
[tool.cibuildwheel.macos.environment]
MACOSX_DEPLOYMENT_TARGET = "10.14"
MACOSX_DEPLOYMENT_TARGET = "11.0"
3 changes: 1 addition & 2 deletions src/pgeof_ext.cpp
Original file line number Diff line number Diff line change
@@ -1,7 +1,6 @@

#include <nanobind/nanobind.h>
#include <nanobind/ndarray.h>
#include <nanobind/stl/pair.h>
#include <nanobind/stl/vector.h>

#include "nn_search.hpp"
Expand Down Expand Up @@ -176,4 +175,4 @@ NB_MODULE(pgeof_ext, m)
:param selected_features: List of selected features. See EFeatureID
:return: Geometric features associated with each point's neighborhood in a (num_points, features_count) numpy array.
)");
}
}
3 changes: 2 additions & 1 deletion tests/bench_jakteristics.py
Original file line number Diff line number Diff line change
Expand Up @@ -11,7 +11,8 @@

@pytest.fixture
def random_point_cloud():
return np.random.rand(1000000, 3) * 100
rng = np.random.default_rng()
return rng.random(0, 100, size=((1000000, 3)))


@pytest.mark.benchmark(group="feature-computation-jak", disable_gc=True, warmup=True)
Expand Down
3 changes: 2 additions & 1 deletion tests/bench_knn.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,8 @@

@pytest.fixture
def random_point_cloud():
return np.random.rand(100000, 3).astype("float32")
rng = np.random.default_rng()
return rng.random(0, 100, size=((1000000, 3))).astype("float32")


@pytest.mark.benchmark(group="knn", disable_gc=True, warmup=True)
Expand Down
5 changes: 3 additions & 2 deletions tests/helpers.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,8 @@

def random_nn(num_points, k):
# Generate a random synthetic point cloud
xyz = np.random.rand(num_points, 3)
rng = np.random.default_rng()
xyz = rng.random(size=(num_points, 3))

# Converting k-nearest neighbors to CSR format
kneigh = KDTree(xyz).query(xyz, k=k, workers=-1)
Expand All @@ -20,4 +21,4 @@ def random_nn(num_points, k):
xyz = np.ascontiguousarray(xyz)
nn_ptr = np.ascontiguousarray(nn_ptr)
nn = np.ascontiguousarray(nn)
return xyz, nn, nn_ptr
return xyz, nn, nn_ptr
8 changes: 5 additions & 3 deletions tests/test_pgeof.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,8 @@

def test_knn():
knn = 10
xyz = np.random.rand(1000, 3)
rng = np.random.default_rng()
xyz = rng.random(size=(1000, 3))
xyz = xyz.astype("float32")
tree = KDTree(xyz)
_, k_legacy = tree.query(xyz, k=knn, workers=-1)
Expand All @@ -18,7 +19,8 @@ def test_knn():
def test_radius_search():
knn = 10
radius = 0.2
xyz = np.random.rand(1000, 3)
rng = np.random.default_rng()
xyz = rng.random(size=(1000, 3))
xyz = xyz.astype("float32")
tree = KDTree(xyz)
_, k_legacy = tree.query(xyz, k=knn, distance_upper_bound=radius, workers=-1)
Expand All @@ -43,4 +45,4 @@ def test_pgeof_multiscale():
simple = pgeof.compute_features(xyz, nn, nn_ptr, 50, False)
multi_simple = pgeof.compute_features_multiscale(xyz, nn, nn_ptr, [20], False)
np.testing.assert_allclose(multi[:, 0], multi_simple[:, 0], 1e-1, 1e-5)
np.testing.assert_allclose(multi[:, 1], simple, 1e-1, 1e-5)
np.testing.assert_allclose(multi[:, 1], simple, 1e-1, 1e-5)
Loading