diff --git a/.dockerignore b/.dockerignore index 83f5d8131..f25ca6e52 100644 --- a/.dockerignore +++ b/.dockerignore @@ -11,8 +11,8 @@ !doc !LICENSE !opensfm +!pyproject.toml !README.md -!requirements.txt !setup.cfg !setup.py !viewer diff --git a/.github/workflows/conda.yml b/.github/workflows/conda.yml index 1f9161e21..1dc394936 100644 --- a/.github/workflows/conda.yml +++ b/.github/workflows/conda.yml @@ -14,7 +14,7 @@ jobs: runs-on: ${{ matrix.os }} steps: - - uses: actions/checkout@v2 + - uses: actions/checkout@v4 with: submodules: true @@ -26,7 +26,7 @@ jobs: - name: Build OpenSfM shell: bash -l {0} - run: python setup.py build + run: pip install -e .[test] - name: Run C++ tests shell: bash -l {0} diff --git a/.github/workflows/docker_ubuntu24.yml b/.github/workflows/docker_ubuntu24.yml index 85cbf5ba4..5ae9c70b0 100644 --- a/.github/workflows/docker_ubuntu24.yml +++ b/.github/workflows/docker_ubuntu24.yml @@ -17,7 +17,7 @@ jobs: run: docker build . --file Dockerfile.ubuntu24 --tag mapillary/opensfm.ubuntu24:$GITHUB_SHA - name: Run C++ tests - run: docker run mapillary/opensfm.ubuntu24:$GITHUB_SHA /bin/sh -c "cd cmake_build && ctest" + run: docker run mapillary/opensfm.ubuntu24:$GITHUB_SHA /bin/bash -c "cd cmake_build && ctest --output-on-failure" - name: Run Python tests - run: docker run mapillary/opensfm.ubuntu24:$GITHUB_SHA python3 -m pytest + run: docker run mapillary/opensfm.ubuntu24:$GITHUB_SHA python -m pytest -v diff --git a/.gitignore b/.gitignore index 8843455c0..ea19cfda8 100644 --- a/.gitignore +++ b/.gitignore @@ -6,6 +6,8 @@ xcode launch.json .vscode .idea +.claude +uv.lock # Ignore generated files /build diff --git a/Dockerfile.ubuntu24 b/Dockerfile.ubuntu24 index 91d7fd7ff..6e4cb8ee2 100644 --- a/Dockerfile.ubuntu24 +++ b/Dockerfile.ubuntu24 @@ -11,14 +11,8 @@ RUN apt-get update \ libopencv-dev \ libceres-dev \ python3-dev \ - python3-numpy \ - python3-opencv \ python3-pip \ python3-venv \ - python3-pyproj \ - python3-scipy \ - python3-yaml \ - curl \ && apt-get clean \ && rm -rf /var/lib/apt/lists/* /tmp/* /var/tmp/* @@ -26,30 +20,11 @@ RUN apt-get update \ RUN python3 -m venv /opt/venv ENV PATH="/opt/venv/bin:$PATH" -# Install Python packages in the virtual environment -RUN pip install \ - cloudpickle==3.1.1 \ - ExifRead==3.5.1 \ - Flask==3.1.2 \ - fpdf2==2.8.4 \ - joblib==1.5.2 \ - matplotlib==3.10.6 \ - networkx==3.5 \ - numpy==1.26.4 \ - opencv-python==4.11.0.86 \ - pillow==11.3.0 \ - pyproj==3.7.2 \ - pytest==8.4.2 \ - python-dateutil==2.9.0.post0 \ - PyYAML==6.0.3 \ - scipy==1.16.2 \ - setuptools==80.9.0 \ - Sphinx==6.2.1 \ - wheel==0.45.1 \ - xmltodict==1.0.2 - COPY . /source/OpenSfM WORKDIR /source/OpenSfM -RUN python3 setup.py build +# Build and install OpenSfM using pip with pyproject.toml +# C++ tests are built automatically (OPENSFM_BUILD_TESTS=ON in pyproject.toml) +# and will be available in cmake_build/ directory for running with ctest +RUN pip install --no-cache-dir -e .[test] diff --git a/conda.yml b/conda.yml index 828bcf01c..1e163ceff 100644 --- a/conda.yml +++ b/conda.yml @@ -10,23 +10,3 @@ dependencies: - ceres-solver=2.1 - conda-forge::llvm-openmp - conda-forge::cxx-compiler - - pip - - pip: - - cloudpickle==3.1.1 - - ExifRead==3.3.1 - - Flask==3.1.1 - - fpdf2==2.8.3 - - joblib==1.5.1 - - matplotlib==3.5.1 - - networkx==3.4.2 - - numpy==1.21.5 - - Pillow==9.0.1 - - pyproj>=3.3.0 - - pytest==8.4.0 - - python-dateutil==2.8.1 - - PyYAML>=5.4.1 - - scipy==1.8.0 - - Sphinx==4.2.0 - - xmltodict==0.14.2 - - wheel - - sphinx_rtd_theme diff --git a/doc/Makefile b/doc/Makefile new file mode 100644 index 000000000..69ba4bb7d --- /dev/null +++ b/doc/Makefile @@ -0,0 +1,33 @@ +# Minimal makefile for Sphinx documentation +# + +# You can set these variables from the command line, and also +# from the environment for the first two. +SPHINXOPTS ?= +SPHINXBUILD ?= sphinx-build +SOURCEDIR = source +BUILDDIR = ../build/doc +PORT ?= 8000 + +# Put it first so that "make" without argument is like "make help". +help: + @$(SPHINXBUILD) -M help "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O) + +.PHONY: help Makefile serve clean + +# Serve the documentation locally +serve: + @echo "Serving documentation at http://localhost:$(PORT)/" + @echo "Press Ctrl+C to stop the server" + @python -m http.server --directory $(BUILDDIR)/html $(PORT) + +# Clean build artifacts +clean: + @echo "Cleaning build directory: $(BUILDDIR)" + @rm -rf $(BUILDDIR) + @echo "Done" + +# Catch-all target: route all unknown targets to Sphinx using the new +# "make mode" option. $(O) is meant as a shortcut for $(SPHINXOPTS). +%: Makefile + @$(SPHINXBUILD) -M $@ "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O) diff --git a/doc/source/annotation_tool.rst b/doc/source/annotation_tool.rst index 1813f6186..b86d9471c 100644 --- a/doc/source/annotation_tool.rst +++ b/doc/source/annotation_tool.rst @@ -49,9 +49,9 @@ Main toolbox ~~~~~~~~~~~~ The main toolbox contains the list of existing control points as well as several controls. -The basic controls are explained here. Scroll to :ref:`additional-controls` for information on the rest. +The basic controls are explained here. Scroll to :ref:`advanced-features` for information on the rest. -- The 'Load', 'Save' buttons save and load the ground control points into a ``ground_control_points.json`` file with :ref:`json-gcps`. +- The 'Load', 'Save' buttons save and load the ground control points into a ``ground_control_points.json`` file (see the JSON file format section in the Ground Control Points documentation). - If there is a ``ground_control_points.json`` file in the dataset directory, it will be loaded upon launch. - Control points can be added or removed with the 'Add GCP' and 'Remove GCP' buttons. The active point can be selected from the dropdown. - By selecting a point in the list it becomes active and can be annotated on all images. @@ -84,6 +84,7 @@ Assuming that you have a set of ground control points whose geodetic coordinates You can use ``data/berlin`` for this example. 2. Generate a ``ground_control_points.json`` file with all your measured ground control points and place it in the root of the dataset See the example below. Note how the 'observations' is empty as we will generate those using the annotation tool. + :: "points": [ @@ -93,6 +94,7 @@ Assuming that you have a set of ground control points whose geodetic coordinates "observations": [] } ] + 3. Launch the annotation tool, note how the control points dropdown contains your ground control points. 4. Scroll through all the images, annotating each GCP on all the locations where it is visible. 5. Click on 'save' to overwrite the ``ground_control_points.json`` file with your annotations. @@ -122,6 +124,8 @@ The 'Flex' and 'Full' buttons produce additional analysis results and are explained in :ref:`two-reconstruction-annotation` +.. _advanced-features: + Advanced features ----------------- diff --git a/doc/source/building.rst b/doc/source/building.rst index dd1ad3cf0..061625a8b 100644 --- a/doc/source/building.rst +++ b/doc/source/building.rst @@ -4,6 +4,25 @@ Building ======== +Quick start +----------- + +To build OpenSfM, follow these steps: + +1. Download the OpenSfM code from Github:: + + git clone --recursive https://github.com/mapillary/OpenSfM + +2. Install the dependencies (we recommend using conda):: + + conda env create --file conda.yml --yes + conda activate opensfm + +3. Build OpenSfM:: + + pip install -e . + + Download -------- @@ -20,14 +39,14 @@ If you already have the code or you downloaded a release_, make sure to update t Install dependencies -------------------- -OpenSfM depends on multiple libraries (OpenCV_, `Ceres Solver`_, ...) and python packages that need to be installed before building it. +OpenSfM depends on multiple libraries (OpenCV_, `Ceres Solver`_, ...) that need to be installed before building it. The way to install these dependencies depends on your system. We recommend using a virtual environment manager such as anaconda or miniconda, not to mess up with your current setup. Anaconda will take care of installing both systems and python dependencies. Installing dependencies using Conda (recommended) ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ -Creating a conda environment will take care of installing all dependencies. Make sure you have conda or miniconda installed. From the project root directory, run:: +Creating a conda environment will take care of installing all dependencies. Make sure you have conda or miniconda installed. From the project root directory, run:: conda env create --file conda.yml --yes @@ -40,19 +59,19 @@ and you are ready to build OpenSfM. (Anaconda dependencies installation has been tested under MacOS (Sequoia), Ubuntu 24.04 and Fedora 42.) Installing dependencies on Ubuntu -~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ -If you are not using conda, see this `Dockerfile `_ for the commands to install all dependencies on Ubuntu 20.04. +If you are not using conda, see this `Dockerfile.ubuntu24 `_ for the commands to install all dependencies on Ubuntu 24.04. Installing dependencies on MacOSX -~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ While it is possible to install all dependencies using brew, we recommend using the conda instructions above instead. Installing dependencies on Windows -~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ Install git_. @@ -73,39 +92,47 @@ Then install OpenCV, Ceres, SuiteSparse and LAPACK (this will take a while):: vcpkg install opencv4 ceres ceres[suitesparse] lapack suitesparse --triplet x64-windows -Finally install the PIP requirements:: - - pip install -r requirements.txt - Building the library -------------------- Once the dependencies have been installed, you can build OpenSfM by running the following command from the main folder:: - python setup.py build + pip install -e . + +This will first install python dependencies on your current python environment, and then build OpenSfM and install it in editable mode. + Building Docker images ---------------------- -Once dependencies have been installed, you can build OpenSfM Docker images by running the following command from the main folder:: - - docker build -t opensfm -f Dockerfile . +You can also use OpenSfM inside docker. We provide example Dockerfiles for Ubuntu 20.04 and 24.04. Build it by running the following command from the main folder:: -To build an image using the Ceres 2 solver, use:: + docker build -t opensfm.ubuntu24 -f Dockerfile.ubuntu24 . - docker build -t opensfm:ceres2 -f Dockerfile.ceres2 . Building the documentation -------------------------- -To build the documentation and browse it locally use:: - pip install sphinx_rtd_theme - python setup.py build_doc - python -m http.server --directory build/doc/html/ +To build the documentation and browse it locally, first install Sphinx:: + + pip install -e .[docs] + +Then build the documentation using make:: + + cd doc + make html + +To browse the documentation locally:: + + make serve and browse `http://localhost:8000/ `_ +To clean the build artifacts:: + + make clean + .. _Github: https://github.com/mapillary/OpenSfM .. _release: https://github.com/mapillary/OpenSfM/releases diff --git a/doc/source/conf.py b/doc/source/conf.py index ce672b700..7b716341b 100644 --- a/doc/source/conf.py +++ b/doc/source/conf.py @@ -50,7 +50,7 @@ # General information about the project. project = "OpenSfM" -copyright = "2021, Mapillary" +copyright = "2025, Mapillary" author = "Mapillary" # The version info for the project you're documenting, acts as replacement for @@ -67,7 +67,7 @@ # # This is also used if you do content translation via gettext catalogs. # Usually you set "language" from the command line for these cases. -language = None +language = "en" # There are two options for replacing |today|: either, you set today to some # non-false value, then it is used: diff --git a/doc/source/gcp.rst b/doc/source/gcp.rst index ad92fb99f..96d5dabf4 100644 --- a/doc/source/gcp.rst +++ b/doc/source/gcp.rst @@ -12,8 +12,6 @@ In the bundle adjustment step, GCP observations are used as a constraint to refi GPSs can be specified in two file formats. If existing, both are loaded. -.. _json-gcps: - JSON file format ~~~~~~~~~~~~~~~~ GCPs can be specified by adding a text file named ``ground_control_points.json`` at the root folder of the dataset. The format of the file should be as follows:: diff --git a/doc/source/quality_report.rst b/doc/source/quality_report.rst index 5faebacc0..e910ef3cb 100644 --- a/doc/source/quality_report.rst +++ b/doc/source/quality_report.rst @@ -52,12 +52,12 @@ Reconstruction Details |rec| - - Average reprojection error (normalized/pixels): normalized (by features uncertainty) average norm of reprojection errors and same, but pixel-wise, - un-normalized, error. Errors bigger than 4 pixels are pruned out. - - Average Track Length : average number of images in which a reconstructed points has been detected. - - Average Track Length (> 2) : same as above but ignoring 2-images points. +- Average reprojection error (normalized/pixels): normalized (by features uncertainty) average norm of reprojection errors and same, but pixel-wise, + un-normalized, error. Errors bigger than 4 pixels are pruned out. +- Average Track Length : average number of images in which a reconstructed points has been detected. +- Average Track Length (> 2) : same as above but ignoring 2-images points. - |residual_histogram| +|residual_histogram| The tables are the histogram of the certainty-normalized and un-normalized reprojection errors norm. Errors bigger than 4 pixels are pruned out. diff --git a/doc/source/rig.rst b/doc/source/rig.rst index c261ee906..11029d0d4 100644 --- a/doc/source/rig.rst +++ b/doc/source/rig.rst @@ -12,18 +12,19 @@ Coordinate Systems Rig are defined by a fixed assembly of cameras that are triggered at the same instant. The following terms define such assembly and capture in OpenSfM terminology : - - A `RigCamera` is a camera of the rig assembly defined as a combination of an existing camera model (it refers only to its ID) and its pose wrt. the rig assembly coordinate frame. `RigCamera` are defined in the `rig_cameras.json` as the following:: +- A `RigCamera` is a camera of the rig assembly defined as a combination of an existing camera model (it refers only to its ID) and its pose wrt. the rig assembly coordinate frame. `RigCamera` are defined in the `rig_cameras.json` as the following:: - { - "RIG_CAMERA_ID": { - "translation": translation of the rig frame wrt. the RigCamera frame - "rotation": rotation bringing a point from rig frame to the RigCamera frame - "camera": camera model ID of this RigCamera - }, - ... + "RIG_CAMERA_ID": + { + "translation": translation of the rig frame wrt. the RigCamera frame + "rotation": rotation bringing a point from rig frame to the RigCamera frame + "camera": camera model ID of this RigCamera + }, + ... + } - - A `RigInstance` is a list of `Shots`, each of which correspond to a `RigCamera` of the `RigModel` and the actual pose of the `RigModel` in the world : it's indeed an instantiation of the `RigModel` by combining `Shots`. These instances are defined in the `rig_assignments.json` file as follows:: +- A `RigInstance` is a list of `Shots`, each of which correspond to a `RigCamera` of the `RigModel` and the actual pose of the `RigModel` in the world : it's indeed an instantiation of the `RigModel` by combining `Shots`. These instances are defined in the `rig_assignments.json` file as follows:: { "RIG_INSTANCE_ID1": { @@ -57,8 +58,7 @@ The following terms define such assembly and capture in OpenSfM terminology : ] }, ... - - + } A picture is often worth many words : |rig_frame| diff --git a/doc/source/sensor_database.rst b/doc/source/sensor_database.rst index 63fdc1eb4..400649a16 100644 --- a/doc/source/sensor_database.rst +++ b/doc/source/sensor_database.rst @@ -9,7 +9,7 @@ Calibration Database Overview -------- -In order to produce accurate geometry, structure-from-motion (SfM) needs to have correct estimates of the imaging sensor geometry, such as : lens type (fisheye, perspective, spherical), focal, distorsion, principal point. Please refer to the `Geometric Models`_ section for a comprehensive list of camera internal parameters (calibration). +In order to produce accurate geometry, structure-from-motion (SfM) needs to have correct estimates of the imaging sensor geometry, such as : lens type (fisheye, perspective, spherical), focal, distorsion, principal point. Please refer to the :doc:`geometry` section for a comprehensive list of camera internal parameters (calibration). While reconstructing the scene (using incremental SfM), OpenSfM will adjust for the camera calibration values that best explain the seen geometry. However, in order to get optimal and failsafe results, it is recommended to have a first good guess of the calibration values. By default, OpenSfM will try to get these values by reading the image EXIFs, where the focal length can be red, and is one of the most important of the calibration values. However, sometimes, EXIFs does not contain such value, or it is erroneous, and/or it is better to have other values than just the focal length. @@ -21,18 +21,18 @@ Here comes sensors databases to the rescue. These are files stored under ``opens sensor_data_detailed.json ------------------------- -This file contains physical sensor's width and height, in millimeters, for a given ``model make`` sensor (see `extract_metadata`_). It means that if only the focal length is available in the EXIFs, since we also have the sensor physical size, we know the full sensor geometry. +This file contains physical sensor's width and height, in millimeters, for a given ``model make`` sensor (see ``extract_metadata`` command). It means that if only the focal length is available in the EXIFs, since we also have the sensor physical size, we know the full sensor geometry. sensor_data.json ---------------- -This file contains a multiplicative factor for a given ``model make`` sensor (see `extract_metadata`_). When applied to the EXIFs focal length, this factor gives the focal 35mm equivalent. Since we know the dimensions of 35mm equivalent (24x32 mm), we again know the full sensor geometry. +This file contains a multiplicative factor for a given ``model make`` sensor (see ``extract_metadata`` command). When applied to the EXIFs focal length, this factor gives the focal 35mm equivalent. Since we know the dimensions of 35mm equivalent (24x32 mm), we again know the full sensor geometry. camera_calibration.json ------------------------ -This file contains the full definition (in OpenSfM format) of camera calibrations. Calibration are for a given ``make`` (see `extract_metadata`_), and then, they're further refined : +This file contains the full definition (in OpenSfM format) of camera calibrations. Calibration are for a given ``make`` (see ``extract_metadata`` command), and then, they're further refined : - If ``ALL`` is specified, then the calibration is valid for all ``make model`` camera independant of their ``model`` value - If ``MODEL`` is specified, then calibrations are per actual ``model`` - If ``FOCAL`` is specified, then calibrations are per focal length red from the EXIFs diff --git a/doc/source/using.rst b/doc/source/using.rst index 9cf364ead..3f83aa758 100644 --- a/doc/source/using.rst +++ b/doc/source/using.rst @@ -22,7 +22,7 @@ First, build the OpenSfM Docker image, as described under "building". Then, start a Docker container. The following command mounts the `data/` folder to `/data/` inside the Docker container:: - docker run -it -p 8080:8080 -v ${PWD}/data/:/data/ opensfm:ceres2 + docker run -it -p 8080:8080 -v ${PWD}/data/:/data/ opensfm.ubuntu24 /bin/bash Once inside the running Docker container, start the reconstruction process with the usual command:: @@ -316,4 +316,4 @@ Checkout `the default configuration <_modules/opensfm/config.html>`_ to see the .. include:: gcp.rst -.. _fisheye https://docs.opencv.org/master/db/d58/group__calib3d__fisheye.html +.. _fisheye: https://docs.opencv.org/master/db/d58/group__calib3d__fisheye.html diff --git a/export_pmvs.md b/export_pmvs.md deleted file mode 100644 index e748868da..000000000 --- a/export_pmvs.md +++ /dev/null @@ -1,106 +0,0 @@ -# OpenSfM to PMVS dense point cloud reconstruction - -Download CMVS: http://www.di.ens.fr/cmvs/ - -- [Install PMVS/CMVS](#pmvs-installation-hints-ubuntu) -- [PMVS Inputs](#pmvs-inputs) -- [Usage](#usage) - -### PMVS Installation Hints (Linux/Ubuntu) -- Installation pointers here: http://www.di.ens.fr/cmvs/documentation.html - -- Type `make` in `cmvs/program/main`. Should make three binaries: - + `pmvs2` - + `cmvs` - + `genOptions` - -- Most dependencies installed with apt-get: - - `sudo apt-get install libgsl0-dev libblas-dev libatlas-dev liblapack-dev liblapacke-dev` - -- Updated Graclus link: http://www.cs.utexas.edu/users/dml/Software/graclus.html - -#### Lapack Errors: -http://mdda.net/oss-blog/2014-06/building-VisualSFM-on-FC20/ - - ERROR : ../base/numeric/mylapack.cc:6:25: fatal error: clapack/f2c.h: No such file or directory - -Update `../base/numeric/mylapack.cc` -From: - - extern "C" { - #include - #include - }; -To: - - extern "C" { - //#include - //#include - #include - }; - #define integer int - -Update `../base/numeric/mylapack.h` -From: - - static void lls(std::vector& A, - std::vector& b, - long int width, long int height); - - static void lls(std::vector& A, - std::vector& b, - long int width, long int height); -To: - - static void lls(std::vector& A, - std::vector& b, - int width, int height); - - static void lls(std::vector& A, - std::vector& b, - int width, int height); - -#### Accumulate Error: - - ../base/cmvs/bundle.cc: In member function ‘int CMVS::Cbundle::addImagesSub(const std::vector >&)’: - ../base/cmvs/bundle.cc:1134:52: error: ‘accumulate’ was not declared in this scope - return accumulate(addnum.begin(), addnum.end(), 0); - -Add this to `../base/cmvs/bundle.cc` - - #include - -#### Stdlib Error: - genOption.cc: In function ‘int main(int, char**)’: - genOption.cc:17:12: error: ‘exit’ was not declared in this scope - -Add this to `genOption.cc` - - #include - -### PMVS Inputs - -These are the files that `export_pmvs` generates for PMVS from OpenSfM output. More info: http://www.di.ens.fr/pmvs/documentation.html - -- Images: `visualize/%08d.jpg` (radially undistorted) -- Camera Parameters: `txt/%08d.txt` -- Image Co-Visibility file: `vis.dat` -- Options file: `options.txt` (includes mention of `vis.dat`) -- Output directory: `models/` - -### Usage - -From the root OpenSfM directory, run: - - bin/export_pmvs - -There will be an individual pmvs directory for each separate reconstruction. - -To perform the PMVS point cloud reconstruction, run: - - ./pmvs2 /pmvs/recon0/ pmvs_options.txt - -This will generate files in `/pmvs/recon0/models/` including a `pmvs_options.txt.ply` - -**Important:** note that the trailing `/` in `recon0/` is needed. Otherwise PMVS will fail to find the options file and will give an `Unrecognizable option` warning. diff --git a/opensfm/dataset.py b/opensfm/dataset.py index 529fa31d3..d4aebfcbe 100644 --- a/opensfm/dataset.py +++ b/opensfm/dataset.py @@ -347,9 +347,13 @@ def load_matches(self, image: str) -> Dict[str, NDArray]: # as 'pickle.load' is RCE-prone. Will raise on any class other # than the numpy ones we allow. class MatchingUnpickler(pickle.Unpickler): + # Handle both numpy <2.0 (np.core) and numpy >=2.0 (np._core) + _multiarray = np._core.multiarray if hasattr(np, '_core') else np.core.multiarray modules_map = { - "numpy.core.multiarray._reconstruct": np.core.multiarray, - "numpy.core.multiarray.scalar": np.core.multiarray, + "numpy.core.multiarray._reconstruct": _multiarray, + "numpy.core.multiarray.scalar": _multiarray, + "numpy._core.multiarray._reconstruct": _multiarray, + "numpy._core.multiarray.scalar": _multiarray, "numpy.ndarray": np, "numpy.dtype": np, } diff --git a/opensfm/exif.py b/opensfm/exif.py index 7f5b6f75a..f59f630e1 100644 --- a/opensfm/exif.py +++ b/opensfm/exif.py @@ -530,7 +530,7 @@ def extract_opk(self, geo: Dict[str, Any]) -> Optional[Dict[str, Any]]: ) ) - if np.all(ypr) is not None: + if not np.any(ypr == None): ypr = np.radians(ypr) # Convert YPR --> OPK diff --git a/opensfm/src/CMakeLists.txt b/opensfm/src/CMakeLists.txt index d6b5ad80b..5f02694fd 100644 --- a/opensfm/src/CMakeLists.txt +++ b/opensfm/src/CMakeLists.txt @@ -1,4 +1,4 @@ -cmake_minimum_required(VERSION 3.0) +cmake_minimum_required(VERSION 3.15) project(opensfm C CXX) @@ -63,7 +63,16 @@ find_package(SuiteSparse) find_package(Eigen3 REQUIRED) find_package(Ceres) find_package(Gflags REQUIRED) -find_package(Glog REQUIRED) + +# Try to find glog using its native CMake config first (modern systems) +# Fall back to custom FindGlog.cmake for older systems (e.g., Ubuntu 20.04) +find_package(glog QUIET CONFIG) +if(glog_FOUND) + message(STATUS "Using glog native CMake config (modern glog)") +else() + message(STATUS "glog CMake config not found, using FindGlog.cmake (Ubuntu 20.04 compatibility)") + find_package(Glog REQUIRED) +endif() # Ceres2 exposes Ceres::ceres target. # Ceres1 exposes just ceres. @@ -75,14 +84,23 @@ else() set(CERES_LIBRARIES ceres) endif() -find_package(OpenCV) +# Only link against the OpenCV components we actually need +# This avoids pulling in unnecessary dependencies like viz (which requires VTK) # OpenCV's OpenCVConfig will enforce imgcodecs for < 3.0 -# (even if OPTIONAL_COMPONENTS) so we remove it as we don't need it +# (even if OPTIONAL_COMPONENTS) so we handle version detection carefully # Cause is imread/imwrite moved to imgcodecs on > 3.0 -if(${OpenCV_VERSION} LESS 3.0) - find_package(OpenCV REQUIRED core imgproc calib3d) +find_package(OpenCV QUIET) +if(OpenCV_FOUND AND ${OpenCV_VERSION} VERSION_LESS "3.0") + find_package(OpenCV REQUIRED COMPONENTS core imgproc calib3d) else() - find_package(OpenCV REQUIRED core imgproc calib3d OPTIONAL_COMPONENTS imgcodecs) + find_package(OpenCV REQUIRED COMPONENTS core imgproc calib3d OPTIONAL_COMPONENTS imgcodecs) +endif() + +# Override OpenCV_LIBS to only include the components we explicitly requested +# This prevents linking against all OpenCV modules (especially viz which requires VTK) +set(OpenCV_LIBS opencv_core opencv_imgproc opencv_calib3d) +if(TARGET opencv_imgcodecs) + list(APPEND OpenCV_LIBS opencv_imgcodecs) endif() ####### Third party libraries ####### @@ -117,12 +135,13 @@ if (OPENSFM_BUILD_TESTS) third_party/gtest/gmock_gtest_all.cc third_party/gtest/gmock_main.cc) target_include_directories(gtest PRIVATE ${GFLAGS_INCLUDE_DIR}) + target_link_libraries(gtest PRIVATE glog::glog) set(TEST_MAIN test_main) add_library(${TEST_MAIN} testing_main.cc) target_link_libraries(${TEST_MAIN} ${GFLAGS_LIBRARY} - ${GLOG_LIBRARY} + glog::glog gtest) endif() diff --git a/opensfm/src/bundle/CMakeLists.txt b/opensfm/src/bundle/CMakeLists.txt index 87b6a9587..9aa7a0b9a 100644 --- a/opensfm/src/bundle/CMakeLists.txt +++ b/opensfm/src/bundle/CMakeLists.txt @@ -58,3 +58,4 @@ target_link_libraries(pybundle PRIVATE set_target_properties(pybundle PROPERTIES LIBRARY_OUTPUT_DIRECTORY "${opensfm_SOURCE_DIR}/.." ) +install(TARGETS pybundle LIBRARY DESTINATION .) diff --git a/opensfm/src/cmake/FindGlog.cmake b/opensfm/src/cmake/FindGlog.cmake index 0dde218ee..b1244c15d 100644 --- a/opensfm/src/cmake/FindGlog.cmake +++ b/opensfm/src/cmake/FindGlog.cmake @@ -157,6 +157,16 @@ ENDIF (GLOG_LIBRARY AND IF (GLOG_FOUND) SET(GLOG_INCLUDE_DIRS ${GLOG_INCLUDE_DIR}) SET(GLOG_LIBRARIES ${GLOG_LIBRARY}) + + # Create imported target for modern CMake usage + # This provides compatibility with systems that don't have glog CMake config + IF (NOT TARGET glog::glog) + ADD_LIBRARY(glog::glog UNKNOWN IMPORTED) + SET_TARGET_PROPERTIES(glog::glog PROPERTIES + IMPORTED_LOCATION "${GLOG_LIBRARY}" + INTERFACE_INCLUDE_DIRECTORIES "${GLOG_INCLUDE_DIR}" + INTERFACE_COMPILE_DEFINITIONS "GLOG_USE_GLOG_EXPORT") + ENDIF() ENDIF (GLOG_FOUND) # Handle REQUIRED / QUIET optional arguments. diff --git a/opensfm/src/dense/CMakeLists.txt b/opensfm/src/dense/CMakeLists.txt index 2728749df..da67193e8 100644 --- a/opensfm/src/dense/CMakeLists.txt +++ b/opensfm/src/dense/CMakeLists.txt @@ -27,3 +27,4 @@ target_link_libraries(pydense PRIVATE dense foundation) set_target_properties(pydense PROPERTIES LIBRARY_OUTPUT_DIRECTORY "${opensfm_SOURCE_DIR}/.." ) +install(TARGETS pydense LIBRARY DESTINATION .) diff --git a/opensfm/src/features/CMakeLists.txt b/opensfm/src/features/CMakeLists.txt index 6db5b3f10..430a6182b 100644 --- a/opensfm/src/features/CMakeLists.txt +++ b/opensfm/src/features/CMakeLists.txt @@ -27,3 +27,4 @@ target_link_libraries(pyfeatures set_target_properties(pyfeatures PROPERTIES LIBRARY_OUTPUT_DIRECTORY "${opensfm_SOURCE_DIR}/.." ) +install(TARGETS pyfeatures LIBRARY DESTINATION .) diff --git a/opensfm/src/foundation/CMakeLists.txt b/opensfm/src/foundation/CMakeLists.txt index f7737f327..9508d1882 100644 --- a/opensfm/src/foundation/CMakeLists.txt +++ b/opensfm/src/foundation/CMakeLists.txt @@ -17,7 +17,7 @@ target_link_libraries(foundation Eigen3::Eigen PRIVATE ${GFLAGS_LIBRARY} - ${GLOG_LIBRARY} + glog::glog ) target_include_directories(foundation PUBLIC diff --git a/opensfm/src/geo/CMakeLists.txt b/opensfm/src/geo/CMakeLists.txt index 75620d069..ad62e5853 100644 --- a/opensfm/src/geo/CMakeLists.txt +++ b/opensfm/src/geo/CMakeLists.txt @@ -33,3 +33,4 @@ target_link_libraries(pygeo set_target_properties(pygeo PROPERTIES LIBRARY_OUTPUT_DIRECTORY "${opensfm_SOURCE_DIR}/.." ) +install(TARGETS pygeo LIBRARY DESTINATION .) diff --git a/opensfm/src/geometry/CMakeLists.txt b/opensfm/src/geometry/CMakeLists.txt index c8be4b9b7..8bdc5455a 100644 --- a/opensfm/src/geometry/CMakeLists.txt +++ b/opensfm/src/geometry/CMakeLists.txt @@ -53,3 +53,4 @@ target_link_libraries(pygeometry set_target_properties(pygeometry PROPERTIES LIBRARY_OUTPUT_DIRECTORY "${opensfm_SOURCE_DIR}/.." ) +install(TARGETS pygeometry LIBRARY DESTINATION .) diff --git a/opensfm/src/geometry/triangulation.h b/opensfm/src/geometry/triangulation.h index a97ae6a17..c620d7323 100644 --- a/opensfm/src/geometry/triangulation.h +++ b/opensfm/src/geometry/triangulation.h @@ -2,6 +2,7 @@ #include +#include #include #include #include diff --git a/opensfm/src/map/CMakeLists.txt b/opensfm/src/map/CMakeLists.txt index 74eb67420..58f931fb2 100644 --- a/opensfm/src/map/CMakeLists.txt +++ b/opensfm/src/map/CMakeLists.txt @@ -62,3 +62,4 @@ endif() set_target_properties(pymap PROPERTIES LIBRARY_OUTPUT_DIRECTORY "${opensfm_SOURCE_DIR}/.." ) +install(TARGETS pymap LIBRARY DESTINATION .) diff --git a/opensfm/src/robust/CMakeLists.txt b/opensfm/src/robust/CMakeLists.txt index 40bdf7a46..a9eb0f2f9 100644 --- a/opensfm/src/robust/CMakeLists.txt +++ b/opensfm/src/robust/CMakeLists.txt @@ -33,3 +33,4 @@ target_link_libraries(pyrobust set_target_properties(pyrobust PROPERTIES LIBRARY_OUTPUT_DIRECTORY "${opensfm_SOURCE_DIR}/.." ) +install(TARGETS pyrobust LIBRARY DESTINATION .) diff --git a/opensfm/src/sfm/CMakeLists.txt b/opensfm/src/sfm/CMakeLists.txt index a5bfe6632..ffc8d6e56 100644 --- a/opensfm/src/sfm/CMakeLists.txt +++ b/opensfm/src/sfm/CMakeLists.txt @@ -43,3 +43,4 @@ target_link_libraries(pysfm set_target_properties(pysfm PROPERTIES LIBRARY_OUTPUT_DIRECTORY "${opensfm_SOURCE_DIR}/.." ) +install(TARGETS pysfm LIBRARY DESTINATION .) diff --git a/opensfm/transformations.py b/opensfm/transformations.py index dfa59ed29..209aafc6f 100644 --- a/opensfm/transformations.py +++ b/opensfm/transformations.py @@ -235,7 +235,7 @@ def translation_from_matrix(matrix: NDArray) -> NDArray: True """ - return numpy.array(matrix, copy=False)[:3, 3].copy() + return numpy.asarray(matrix)[:3, 3].copy() def reflection_matrix(point: NDArray, normal: NDArray) -> NDArray: @@ -278,7 +278,7 @@ def reflection_from_matrix( True """ - M = numpy.array(matrix, dtype=numpy.float64, copy=False) + M = numpy.asarray(matrix, dtype=numpy.float64) # normal: unit eigenvector corresponding to eigenvalue -1 w, V = numpy.linalg.eig(M[:3, :3]) i = numpy.where(abs(numpy.real(w) + 1.0) < 1e-8)[0] @@ -342,7 +342,7 @@ def rotation_matrix( M[:3, :3] = R if point is not None: # rotation not around origin - point = numpy.array(point[:3], dtype=numpy.float64, copy=False) + point = numpy.asarray(point[:3], dtype=numpy.float64) M[:3, 3] = point - numpy.dot(R, point) return M @@ -362,7 +362,7 @@ def rotation_from_matrix( True """ - R = numpy.array(matrix, dtype=numpy.float64, copy=False) + R = numpy.asarray(matrix, dtype=numpy.float64) R33 = R[:3, :3] # direction: unit eigenvector of R33 corresponding to eigenvalue of 1 w, W = numpy.linalg.eig(R33.T) @@ -448,7 +448,7 @@ def scale_from_matrix( True """ - M = numpy.array(matrix, dtype=numpy.float64, copy=False) + M = numpy.asarray(matrix, dtype=numpy.float64) M33 = M[:3, :3] factor = numpy.trace(M33) - 2.0 try: @@ -513,11 +513,11 @@ def projection_matrix( """ M = numpy.identity(4) - point = numpy.array(point[:3], dtype=numpy.float64, copy=False) + point = numpy.asarray(point[:3], dtype=numpy.float64) normal = unit_vector(normal[:3]) if perspective is not None: # perspective projection - perspective = numpy.array(perspective[:3], dtype=numpy.float64, copy=False) + perspective = numpy.asarray(perspective[:3], dtype=numpy.float64) M[0, 0] = M[1, 1] = M[2, 2] = numpy.dot(perspective - point, normal) M[:3, :3] -= numpy.outer(perspective, normal) if pseudo: @@ -530,7 +530,7 @@ def projection_matrix( M[3, 3] = numpy.dot(perspective, normal) elif direction is not None: # parallel projection - direction = numpy.array(direction[:3], dtype=numpy.float64, copy=False) + direction = numpy.asarray(direction[:3], dtype=numpy.float64) scale = numpy.dot(direction, normal) M[:3, :3] -= numpy.outer(direction, normal) / scale M[:3, 3] = direction * (numpy.dot(point, normal) / scale) @@ -575,7 +575,7 @@ def projection_from_matrix( True """ - M = numpy.array(matrix, dtype=numpy.float64, copy=False) + M = numpy.asarray(matrix, dtype=numpy.float64) M33 = M[:3, :3] w, V = numpy.linalg.eig(M) i = numpy.where(abs(numpy.real(w) - 1.0) < 1e-8)[0] @@ -732,7 +732,7 @@ def shear_from_matrix( True """ - M = numpy.array(matrix, dtype=numpy.float64, copy=False) + M = numpy.asarray(matrix, dtype=numpy.float64) M33 = M[:3, :3] # normal: cross independent eigenvectors corresponding to the eigenvalue 1 w, V = numpy.linalg.eig(M33) @@ -1103,8 +1103,8 @@ def superimposition_matrix( True """ - v0 = numpy.array(v0, dtype=numpy.float64, copy=False)[:3] - v1 = numpy.array(v1, dtype=numpy.float64, copy=False)[:3] + v0 = numpy.asarray(v0, dtype=numpy.float64)[:3] + v1 = numpy.asarray(v1, dtype=numpy.float64)[:3] return affine_matrix_from_points(v0, v1, shear=False, scale=scale, usesvd=usesvd) @@ -1210,7 +1210,7 @@ def euler_from_matrix( # `Union[int, str]`. k = _NEXT_AXIS[i - parity + 1] - M = numpy.array(matrix, dtype=numpy.float64, copy=False)[:3, :3] + M = numpy.asarray(matrix, dtype=numpy.float64)[:3, :3] if repetition: # pyre-fixme[6]: For 1st argument expected `Union[ndarray[Any, dtype[Any]], # tuple[ndarray[Any, dtype[Any]], ...]]` but got `Tuple[Union[int, str], @@ -1429,7 +1429,7 @@ def quaternion_from_matrix(matrix: NDArray, isprecise: bool = False) -> NDArray: True """ - M = numpy.array(matrix, dtype=numpy.float64, copy=False)[:4, :4] + M = numpy.asarray(matrix, dtype=numpy.float64)[:4, :4] if isprecise: q = numpy.empty((4,)) t = numpy.trace(M) @@ -1755,7 +1755,7 @@ def unit_vector( return data else: if out is not data: - out[:] = numpy.array(data, copy=False) + out[:] = numpy.asarray(data) data = out length = numpy.atleast_1d(numpy.sum(data * data, axis)) numpy.sqrt(length, length) @@ -1827,8 +1827,8 @@ def angle_between_vectors( True """ - v0 = numpy.array(v0, dtype=numpy.float64, copy=False) - v1 = numpy.array(v1, dtype=numpy.float64, copy=False) + v0 = numpy.asarray(v0, dtype=numpy.float64) + v1 = numpy.asarray(v1, dtype=numpy.float64) dot = numpy.sum(v0 * v1, axis=axis) dot /= vector_norm(v0, axis=axis) * vector_norm(v1, axis=axis) dot = numpy.clip(dot, -1.0, 1.0) diff --git a/pyproject.toml b/pyproject.toml new file mode 100644 index 000000000..5d67cf18a --- /dev/null +++ b/pyproject.toml @@ -0,0 +1,107 @@ +[build-system] +requires = ["scikit-build-core>=0.8.0", "pybind11>=2.10.0"] +build-backend = "scikit_build_core.build" + +[project] +name = "opensfm" +version = "0.5.2" +description = "A Structure from Motion library" +readme = "README.md" +requires-python = ">=3.8" +license = {text = "BSD"} +authors = [ + {name = "Mapillary"}, +] + +dependencies = [ + "cloudpickle>=0.4.0", + "exifread>=2.1.2", + "flask>=2.3.2", + "fpdf2>=2.4.6", + "joblib>=1.0.0", + "matplotlib", + "networkx>=2.5", + "numpy>=1.19", + "Pillow>=8.1.1", + "pyproj>=1.9.5.1", + "python-dateutil>=2.7", + "pyyaml>=5.4", + "scipy>=1.10.0", + "xmltodict>=0.10.2", + "opencv-python>=4.8.0", +] + +[project.optional-dependencies] +dev = [ + "pytest>=7.0.0", + "wheel", +] +docs = [ + "Sphinx>=4.2.0", + "sphinx_rtd_theme>=1.0.0", +] +test = [ + "pytest>=7.0.0", +] + +# Note: The original bin/opensfm and bin/opensfm_run_all are bash scripts +# For Phase 1, we'll install them as scripts using scikit-build's script handling +# TODO Phase 2: Create proper Python entry points by moving bin/opensfm_main.py +# into the opensfm package (e.g., opensfm/__main__.py or opensfm/cli.py) +# [project.scripts] +# opensfm = "opensfm.cli:main" +# opensfm_run_all = "opensfm.cli:run_all" + +[project.urls] +Homepage = "https://github.com/mapillary/OpenSfM" +Documentation = "https://docs.opensfm.org/" + +[tool.scikit-build] +# CMake source directory containing CMakeLists.txt +cmake.source-dir = "opensfm/src" +cmake.build-type = "Release" + +# Use a fixed build directory (useful for running C++ tests) +build-dir = "cmake_build" + +# Minimum CMake version +cmake.version = ">=3.15" + +# Specify where to install the Python package +wheel.install-dir = "opensfm" + +# Package discovery - automatically find Python packages +wheel.packages = ["opensfm"] + +# Package data (JSON, YAML, NPZ files in opensfm/data/) is automatically +# included because it's inside the opensfm package directory +# CMake only installs compiled .so files; Python packaging handles the rest + +# Note: Script installation +# The bin/opensfm and bin/opensfm_run_all bash scripts need to be installed +# This will be handled via CMake install() commands or Python entry points + +# Include files in the sdist (source distribution) +sdist.include = [ + "opensfm/data/**", # sensor_data.json, camera_calibration.yaml, bow/*.npz + "opensfm/src/**", # CMake source files + "bin/**", # All utility scripts +] + +# Exclude unnecessary files from sdist +sdist.exclude = [ + ".git/**", + ".github/**", + "cmake_build/**", + "**/__pycache__/**", + "**/*.pyc", +] + +# CMake configuration options +[tool.scikit-build.cmake.define] +# Build C++ tests (enabled for Docker/CI environments) +OPENSFM_BUILD_TESTS = "ON" + +# Ensure we're using the correct Python executable +# This will be automatically set by scikit-build-core +# PYTHON_EXECUTABLE will be set automatically diff --git a/requirements.txt b/requirements.txt deleted file mode 100644 index 86e1b1767..000000000 --- a/requirements.txt +++ /dev/null @@ -1,19 +0,0 @@ -cloudpickle==0.4.0 -exifread==2.1.2 -flask==2.3.2 -fpdf2==2.4.6 -joblib==0.14.1 -matplotlib -networkx==2.5 -numpy>=1.19 -Pillow>=8.1.1 -pyproj>=1.9.5.1 -pytest==3.0.7 -python-dateutil>=2.7 -pyyaml>=5.4 -scipy>=1.10.0 -Sphinx==4.2.0 -xmltodict==0.10.2 -wheel -opencv-python==4.5.1.48 ; sys_platform == "win32" -opencv-python ; sys_platform == "linux"