diff --git a/.github/dependabot.yml b/.github/dependabot.yml new file mode 100644 index 00000000..2c431b0b --- /dev/null +++ b/.github/dependabot.yml @@ -0,0 +1,9 @@ +# Please see the documentation for all configuration options: +# https://docs.github.com/github/administering-a-repository/configuration-options-for-dependency-updates + +version: 2 +updates: + - package-ecosystem: "github-actions" + directory: "/" + schedule: + interval: "weekly" diff --git a/.github/workflows/official-docker-images.yml b/.github/workflows/deploy_docker.yml similarity index 91% rename from .github/workflows/official-docker-images.yml rename to .github/workflows/deploy_docker.yml index 795bd7c7..cc8fe198 100644 --- a/.github/workflows/official-docker-images.yml +++ b/.github/workflows/deploy_docker.yml @@ -1,4 +1,4 @@ -name: Build Official Docker Images +name: Deploy Official Docker Images on: push: @@ -12,14 +12,10 @@ jobs: runs-on: ubuntu-latest steps: - - uses: actions/checkout@v2 + - uses: actions/checkout@v4 # Fetch all history for all tags and branches with: fetch-depth: 0 - - name: Set up Python 3.8 - uses: actions/setup-python@v2 - with: - python-version: 3.8 # Build - name: Build docker images diff --git a/.github/workflows/deploy.yml b/.github/workflows/deploy_wheels.yml similarity index 61% rename from .github/workflows/deploy.yml rename to .github/workflows/deploy_wheels.yml index 293d4da0..fa1115ee 100644 --- a/.github/workflows/deploy.yml +++ b/.github/workflows/deploy_wheels.yml @@ -1,6 +1,6 @@ # Deploy tagged releases. -name: Deploy Release +name: Deploy Release Wheels on: push: @@ -17,7 +17,7 @@ concurrency: jobs: build_wheels: - name: Build wheel for cp${{ matrix.python }}-${{ matrix.builder }}_${{ matrix.arch }} + name: Deploy for cp${{ matrix.python }}-${{ matrix.builder }}_${{ matrix.arch }} runs-on: ${{ matrix.os }} strategy: # Ensure that a wheel builder finishes even if another fails. Useful for @@ -26,10 +26,6 @@ jobs: matrix: include: # Linux 64 bit manylinux - - os: ubuntu-latest - arch: x86_64 - python: 38 - builder: manylinux - os: ubuntu-latest arch: x86_64 python: 39 @@ -42,6 +38,10 @@ jobs: arch: x86_64 python: 311 builder: manylinux + - os: ubuntu-latest + arch: x86_64 + python: 312 + builder: manylinux # MacOS x86_64. The macos-13 runner is the last # Intel-based runner version. At some point we'll @@ -50,28 +50,66 @@ jobs: arch: x86_64 python: 39 builder: macosx + deploy: 13.0 - os: macos-13 arch: x86_64 python: 310 builder: macosx + deploy: 13.0 - os: macos-13 arch: x86_64 python: 311 builder: macosx + deploy: 13.0 + - os: macos-13 + arch: x86_64 + python: 312 + builder: macosx + deploy: 13.0 + + # MacOS arm64 + - os: macos-latest + arch: arm64 + python: 310 + builder: macosx + deploy: 14.0 + - os: macos-latest + arch: arm64 + python: 311 + builder: macosx + deploy: 14.0 + - os: macos-latest + arch: arm64 + python: 312 + builder: macosx + deploy: 14.0 env: CIBW_BUILD: cp${{ matrix.python }}-${{ matrix.builder }}_${{ matrix.arch }} CIBW_MANYLINUX_X86_64_IMAGE: ${{ matrix.builder }}2014 CIBW_MANYLINUX_I686_IMAGE: ${{ matrix.builder }}2014 - CIBW_TEST_SKIP: "*-macosx_arm64" CIBW_BUILD_VERBOSITY: 3 - CIBW_ENVIRONMENT_LINUX: "CC=gcc CXX=g++ CFLAGS='-O3 -fPIC' CXXFLAGS='-O3 -fPIC -std=c++14' BOOST_ROOT=/usr/local" - CIBW_ENVIRONMENT_MACOS: "MACOSX_DEPLOYMENT_TARGET=13.0 CC=gcc-14 CXX=g++-14 CFLAGS='-O3 -fPIC' CXXFLAGS='-O3 -fPIC -std=c++14' BOOST_ROOT=/usr/local FLAC_ROOT=/usr/local BLAS_LIBRARIES=/usr/local/lib/libscipy_openblas.dylib" + CIBW_ENVIRONMENT_LINUX: > + CC=gcc + CXX=g++ + CFLAGS='-O3 -fPIC' + CXXFLAGS='-O3 -fPIC -std=c++17' + BOOST_ROOT=/usr/local + BLAS_LIBRARIES='-L/usr/local/lib -lopenblas -fopenmp -lm -lgfortran' + CIBW_ENVIRONMENT_MACOS: > + MACOSX_DEPLOYMENT_TARGET=${{ matrix.deploy }} + CC=clang + CXX=clang++ + CFLAGS='-O3 -fPIC' + CXXFLAGS='-O3 -fPIC -std=c++17 -stdlib=libc++' + BOOST_ROOT=/usr/local + FLAC_ROOT=/usr/local + BLAS_LIBRARIES='/usr/local/lib/libopenblas.dylib' CIBW_BEFORE_BUILD_LINUX: ./wheels/install_deps_linux.sh - CIBW_BEFORE_BUILD_MACOS: ./wheels/install_deps_osx.sh ${{ matrix.builder }}_${{ matrix.arch }} + CIBW_BEFORE_BUILD_MACOS: ./wheels/install_deps_osx.sh CIBW_REPAIR_WHEEL_COMMAND_LINUX: ./wheels/repair_wheel_linux.sh {dest_dir} {wheel} CIBW_REPAIR_WHEEL_COMMAND_MACOS: ./wheels/repair_wheel_macos.sh {dest_dir} {wheel} {delocate_archs} CIBW_BEFORE_TEST: export OMP_NUM_THREADS=2 - CIBW_TEST_REQUIRES: pytest + CIBW_TEST_REQUIRES: pytest pixell CIBW_TEST_COMMAND: > python -c 'import so3g.smurf.reader; from spt3g import core' && python -m pytest {package}/test @@ -82,11 +120,11 @@ jobs: - uses: actions/setup-python@v5 name: Install Python with: - python-version: '3.10' + python-version: '3.12' - name: Install cibuildwheel run: | - python -m pip install twine cibuildwheel==2.20.0 + python -m pip install twine cibuildwheel==2.22.0 - name: Build wheel run: | @@ -94,4 +132,4 @@ jobs: - name: Upload to PyPI run: | - python -m twine upload wheelhouse/so3g*cp${{ matrix.python }}-${{ matrix.builder }}*.whl + python -m twine upload wheelhouse/so3g*cp${{ matrix.python }}-${{ matrix.builder }}*${{ matrix.arch }}*.whl diff --git a/.github/workflows/test_conda.yml b/.github/workflows/test_conda.yml new file mode 100644 index 00000000..ada21eab --- /dev/null +++ b/.github/workflows/test_conda.yml @@ -0,0 +1,95 @@ +# In general, we try to run on: +# - The oldest supported python +# - The latest stable python that is the common default on most systems and conda +# - (During transitions) The newly released bleeding edge python + +name: Run Tests with Conda + +on: + workflow_dispatch: + push: + branches: + - master + pull_request: + branches: + - master + +concurrency: + group: ${{ github.workflow }}-${{ github.ref }} + cancel-in-progress: true + +jobs: + test: + name: Python-${{ matrix.python }} on ${{ matrix.arch }} + runs-on: ${{ matrix.os }} + defaults: + run: + shell: bash -l {0} + strategy: + fail-fast: false + matrix: + include: + - os: ubuntu-latest + python: "3.9" + arch: Linux-x86_64 + - os: ubuntu-latest + python: "3.11" + arch: Linux-x86_64 + - os: ubuntu-latest + python: "3.12" + arch: Linux-x86_64 + - os: macos-latest + python: "3.10" + arch: MacOSX-x86_64 + - os: macos-latest + python: "3.12" + arch: MacOSX-x86_64 + - os: macos-latest + python: "3.10" + arch: MacOSX-arm64 + - os: macos-latest + python: "3.12" + arch: MacOSX-arm64 + steps: + - name: Checkout + uses: actions/checkout@v4 + + - name: Setup Conda Base + run: | + sudo rm -rf /usr/share/miniconda \ + && sudo rm -rf /usr/local/miniconda \ + && curl -SL -o miniforge.sh https://github.com/conda-forge/miniforge/releases/latest/download/Miniforge3-${{ matrix.arch }}.sh \ + && bash miniforge.sh -b -f -p ~/conda \ + && source ~/conda/etc/profile.d/conda.sh \ + && conda activate base \ + && conda update -n base --yes conda + + - name: Check Conda Config + run: | + source ~/conda/etc/profile.d/conda.sh \ + && conda activate base \ + && conda info \ + && conda list \ + && conda config --show-sources \ + && conda config --show + + - name: Install Dependencies + run: | + source ~/conda/etc/profile.d/conda.sh \ + && conda create --yes -n test python=${{ matrix.python }} \ + && conda activate test \ + && conda install --yes --file conda_dev_requirements.txt \ + && pip install -r test-requirements.txt + + - name: Install + run: | + source ~/conda/etc/profile.d/conda.sh \ + && conda activate test \ + && python3 -m pip install . + + - name: Run Tests + run: | + source ~/conda/etc/profile.d/conda.sh \ + && conda activate test \ + && export OMP_NUM_THREADS=2 \ + && pytest ./test diff --git a/.github/workflows/pytest.yml b/.github/workflows/test_docker.yml similarity index 77% rename from .github/workflows/pytest.yml rename to .github/workflows/test_docker.yml index 0864e10d..56d677d8 100644 --- a/.github/workflows/pytest.yml +++ b/.github/workflows/test_docker.yml @@ -1,9 +1,13 @@ -name: Run Tests +name: Run Tests with Docker +# on: workflow_dispatch on: push: - branches-ignore: [ master ] - pull_request: + branches: + - master + # pull_request: + # branches: + # - master concurrency: group: ${{ github.workflow }}-${{ github.ref }} @@ -15,11 +19,7 @@ jobs: steps: - name: Checkout - uses: actions/checkout@v2 - - name: Set up Python 3.8 - uses: actions/setup-python@v2 - with: - python-version: 3.8 + uses: actions/checkout@v4 - name: Build docker images run: | diff --git a/.github/workflows/test_wheels.yml b/.github/workflows/test_wheels.yml new file mode 100644 index 00000000..a4b47c73 --- /dev/null +++ b/.github/workflows/test_wheels.yml @@ -0,0 +1,134 @@ +name: Test Wheel Build + +on: + workflow_dispatch: + push: + branches: + - master + pull_request: + branches: + - master + +concurrency: + group: ${{ github.workflow }}-${{ github.ref }} + cancel-in-progress: true + +jobs: + build_wheels: + name: Build for cp${{ matrix.python }}-${{ matrix.builder }}_${{ matrix.arch }} + runs-on: ${{ matrix.os }} + strategy: + # Ensure that a wheel builder finishes even if another fails. Useful for + # debugging multiple problems in parallel. + fail-fast: false + matrix: + include: + # Linux 64 bit manylinux + - os: ubuntu-latest + arch: x86_64 + python: 39 + builder: manylinux + - os: ubuntu-latest + arch: x86_64 + python: 310 + builder: manylinux + - os: ubuntu-latest + arch: x86_64 + python: 311 + builder: manylinux + - os: ubuntu-latest + arch: x86_64 + python: 312 + builder: manylinux + + # MacOS x86_64. The macos-13 runner is the last + # Intel-based runner version. At some point we'll + # need to switch to macos-14 and test cross compiling. + - os: macos-13 + arch: x86_64 + python: 39 + builder: macosx + deploy: 13.0 + - os: macos-13 + arch: x86_64 + python: 310 + builder: macosx + deploy: 13.0 + - os: macos-13 + arch: x86_64 + python: 311 + builder: macosx + deploy: 13.0 + - os: macos-13 + arch: x86_64 + python: 312 + builder: macosx + deploy: 13.0 + + # MacOS arm64 + - os: macos-latest + arch: arm64 + python: 310 + builder: macosx + deploy: 14.0 + - os: macos-latest + arch: arm64 + python: 311 + builder: macosx + deploy: 14.0 + - os: macos-latest + arch: arm64 + python: 312 + builder: macosx + deploy: 14.0 + env: + CIBW_BUILD: cp${{ matrix.python }}-${{ matrix.builder }}_${{ matrix.arch }} + CIBW_MANYLINUX_X86_64_IMAGE: ${{ matrix.builder }}2014 + CIBW_MANYLINUX_I686_IMAGE: ${{ matrix.builder }}2014 + CIBW_BUILD_VERBOSITY: 3 + CIBW_ENVIRONMENT_LINUX: > + CC=gcc + CXX=g++ + CFLAGS='-O3 -fPIC' + CXXFLAGS='-O3 -fPIC -std=c++17' + BOOST_ROOT=/usr/local + BLAS_LIBRARIES='-L/usr/local/lib -lopenblas -fopenmp -lm -lgfortran' + CIBW_ENVIRONMENT_MACOS: > + MACOSX_DEPLOYMENT_TARGET=${{ matrix.deploy }} + CC=clang + CXX=clang++ + CFLAGS='-O3 -fPIC' + CXXFLAGS='-O3 -fPIC -std=c++17 -stdlib=libc++' + BOOST_ROOT=/usr/local + FLAC_ROOT=/usr/local + BLAS_LIBRARIES='/usr/local/lib/libopenblas.dylib' + CIBW_BEFORE_BUILD_LINUX: ./wheels/install_deps_linux.sh + CIBW_BEFORE_BUILD_MACOS: ./wheels/install_deps_osx.sh + CIBW_REPAIR_WHEEL_COMMAND_LINUX: ./wheels/repair_wheel_linux.sh {dest_dir} {wheel} + CIBW_REPAIR_WHEEL_COMMAND_MACOS: ./wheels/repair_wheel_macos.sh {dest_dir} {wheel} {delocate_archs} + CIBW_BEFORE_TEST: export OMP_NUM_THREADS=2 + CIBW_TEST_REQUIRES: pytest pixell + CIBW_TEST_COMMAND: > + python -c 'import so3g.smurf.reader; from spt3g import core' && + python -m pytest {package}/test + steps: + - name: Checkout + uses: actions/checkout@v4 + + - uses: actions/setup-python@v5 + name: Install Python + with: + python-version: '3.12' + + - name: Install cibuildwheel + run: | + python -m pip install cibuildwheel==2.22.0 + + - name: Build wheel + run: | + python -m cibuildwheel --output-dir wheelhouse + + - uses: actions/upload-artifact@v4 + with: + name: wheels_cp${{ matrix.python }}-${{ matrix.builder }} + path: ./wheelhouse/so3g*cp${{ matrix.python }}-${{ matrix.builder }}*${{ matrix.arch }}*.whl diff --git a/.github/workflows/wheels.yml b/.github/workflows/wheels.yml index af4d1dc5..a4b47c73 100644 --- a/.github/workflows/wheels.yml +++ b/.github/workflows/wheels.yml @@ -1,7 +1,13 @@ +name: Test Wheel Build -name: Test Binary Wheels - -on: workflow_dispatch +on: + workflow_dispatch: + push: + branches: + - master + pull_request: + branches: + - master concurrency: group: ${{ github.workflow }}-${{ github.ref }} @@ -9,7 +15,7 @@ concurrency: jobs: build_wheels: - name: Build wheel for cp${{ matrix.python }}-${{ matrix.builder }}_${{ matrix.arch }} + name: Build for cp${{ matrix.python }}-${{ matrix.builder }}_${{ matrix.arch }} runs-on: ${{ matrix.os }} strategy: # Ensure that a wheel builder finishes even if another fails. Useful for @@ -18,10 +24,6 @@ jobs: matrix: include: # Linux 64 bit manylinux - - os: ubuntu-latest - arch: x86_64 - python: 38 - builder: manylinux - os: ubuntu-latest arch: x86_64 python: 39 @@ -34,6 +36,10 @@ jobs: arch: x86_64 python: 311 builder: manylinux + - os: ubuntu-latest + arch: x86_64 + python: 312 + builder: manylinux # MacOS x86_64. The macos-13 runner is the last # Intel-based runner version. At some point we'll @@ -42,27 +48,66 @@ jobs: arch: x86_64 python: 39 builder: macosx + deploy: 13.0 - os: macos-13 arch: x86_64 python: 310 builder: macosx + deploy: 13.0 + - os: macos-13 + arch: x86_64 + python: 311 + builder: macosx + deploy: 13.0 - os: macos-13 arch: x86_64 + python: 312 + builder: macosx + deploy: 13.0 + + # MacOS arm64 + - os: macos-latest + arch: arm64 + python: 310 + builder: macosx + deploy: 14.0 + - os: macos-latest + arch: arm64 python: 311 builder: macosx + deploy: 14.0 + - os: macos-latest + arch: arm64 + python: 312 + builder: macosx + deploy: 14.0 env: CIBW_BUILD: cp${{ matrix.python }}-${{ matrix.builder }}_${{ matrix.arch }} CIBW_MANYLINUX_X86_64_IMAGE: ${{ matrix.builder }}2014 CIBW_MANYLINUX_I686_IMAGE: ${{ matrix.builder }}2014 CIBW_BUILD_VERBOSITY: 3 - CIBW_ENVIRONMENT_LINUX: "CC=gcc CXX=g++ CFLAGS='-O3 -fPIC' CXXFLAGS='-O3 -fPIC -std=c++14' BOOST_ROOT=/usr/local" - CIBW_ENVIRONMENT_MACOS: "MACOSX_DEPLOYMENT_TARGET=13.0 CC=gcc-14 CXX=g++-14 CFLAGS='-O3 -fPIC' CXXFLAGS='-O3 -fPIC -std=c++14' BOOST_ROOT=/usr/local FLAC_ROOT=/usr/local BLAS_LIBRARIES=/usr/local/lib/libscipy_openblas.dylib" + CIBW_ENVIRONMENT_LINUX: > + CC=gcc + CXX=g++ + CFLAGS='-O3 -fPIC' + CXXFLAGS='-O3 -fPIC -std=c++17' + BOOST_ROOT=/usr/local + BLAS_LIBRARIES='-L/usr/local/lib -lopenblas -fopenmp -lm -lgfortran' + CIBW_ENVIRONMENT_MACOS: > + MACOSX_DEPLOYMENT_TARGET=${{ matrix.deploy }} + CC=clang + CXX=clang++ + CFLAGS='-O3 -fPIC' + CXXFLAGS='-O3 -fPIC -std=c++17 -stdlib=libc++' + BOOST_ROOT=/usr/local + FLAC_ROOT=/usr/local + BLAS_LIBRARIES='/usr/local/lib/libopenblas.dylib' CIBW_BEFORE_BUILD_LINUX: ./wheels/install_deps_linux.sh - CIBW_BEFORE_BUILD_MACOS: ./wheels/install_deps_osx.sh ${{ matrix.builder }}_${{ matrix.arch }} + CIBW_BEFORE_BUILD_MACOS: ./wheels/install_deps_osx.sh CIBW_REPAIR_WHEEL_COMMAND_LINUX: ./wheels/repair_wheel_linux.sh {dest_dir} {wheel} CIBW_REPAIR_WHEEL_COMMAND_MACOS: ./wheels/repair_wheel_macos.sh {dest_dir} {wheel} {delocate_archs} CIBW_BEFORE_TEST: export OMP_NUM_THREADS=2 - CIBW_TEST_REQUIRES: pytest + CIBW_TEST_REQUIRES: pytest pixell CIBW_TEST_COMMAND: > python -c 'import so3g.smurf.reader; from spt3g import core' && python -m pytest {package}/test @@ -73,11 +118,11 @@ jobs: - uses: actions/setup-python@v5 name: Install Python with: - python-version: '3.10' + python-version: '3.12' - name: Install cibuildwheel run: | - python -m pip install cibuildwheel==2.20.0 + python -m pip install cibuildwheel==2.22.0 - name: Build wheel run: | @@ -86,4 +131,4 @@ jobs: - uses: actions/upload-artifact@v4 with: name: wheels_cp${{ matrix.python }}-${{ matrix.builder }} - path: ./wheelhouse/so3g*cp${{ matrix.python }}-${{ matrix.builder }}*.whl + path: ./wheelhouse/so3g*cp${{ matrix.python }}-${{ matrix.builder }}*${{ matrix.arch }}*.whl diff --git a/CMakeLists.txt b/CMakeLists.txt index 144e2506..d22f8a19 100644 --- a/CMakeLists.txt +++ b/CMakeLists.txt @@ -1,15 +1,12 @@ -cmake_minimum_required (VERSION 3.1) +cmake_minimum_required(VERSION 3.17) project (so3g) include(local.cmake OPTIONAL) # cmake policies -- best to keep these in sync with spt3g! -if(POLICY CMP0060) # Suppress cmake stripping full paths from libraries in some cases - cmake_policy(SET CMP0060 NEW) -endif() -cmake_policy(SET CMP0012 NEW) # Allow use of true in boolean expressions -if(POLICY CMP0042) # Enable RPATH on OSX - cmake_policy(SET CMP0042 NEW) +# Don't warn about removal of FindBoost in cmake 3.30+ +if(POLICY CMP0167) + cmake_policy(SET CMP0167 NEW) endif() # Default to Release because we want that -O3. This is what spt3g_software does too. @@ -18,12 +15,20 @@ if(NOT CMAKE_BUILD_TYPE) "Choose the type of build, options are: None(CMAKE_CXX_FLAGS or CMAKE_C_FLAGS used) Debug Release RelWithDebInfo MinSizeRel" FORCE) endif(NOT CMAKE_BUILD_TYPE) +# Require C++ 17 (aligned with spt3g) +set(CMAKE_CXX_STANDARD 17) +set(CMAKE_CXX_STANDARD_REQUIRED ON) +set(CMAKE_CXX_EXTENSIONS OFF) + +# We are building libraries that will eventually be linked into shared +# modules. All code should be built with PIC. +set(CMAKE_POSITION_INDEPENDENT_CODE ON) + # For this to be found, make sure the spt3g build directory can be # searched; i.e. -DCMAKE_PREFIX_PATH=/path/to/spt3g_software/build find_package(Spt3g REQUIRED) -find_package(PythonInterp 3) -find_package(PythonLibs 3) +find_package(Python COMPONENTS Interpreter Development.Module REQUIRED) find_package(FLAC) find_package(GSL) @@ -36,22 +41,26 @@ else() endif() # Determine the location of site-packages. -execute_process ( COMMAND ${PYTHON_EXECUTABLE} -c "from distutils.sysconfig import get_python_lib; print(get_python_lib())" OUTPUT_VARIABLE PYTHON_SITE_PACKAGES OUTPUT_STRIP_TRAILING_WHITESPACE) +execute_process(COMMAND ${Python_EXECUTABLE} -c "from distutils.sysconfig import get_python_lib; print(get_python_lib())" OUTPUT_VARIABLE PYTHON_SITE_PACKAGES OUTPUT_STRIP_TRAILING_WHITESPACE) # Numpy include directory? -execute_process(COMMAND ${PYTHON_EXECUTABLE} -c "import numpy; print(numpy.get_include())" - OUTPUT_VARIABLE NUMPY_INCLUDE_DIR OUTPUT_STRIP_TRAILING_WHITESPACE) +execute_process(COMMAND ${Python_EXECUTABLE} -c + "import numpy; print(numpy.get_include())" + OUTPUT_VARIABLE NUMPY_INCLUDE_DIR + OUTPUT_STRIP_TRAILING_WHITESPACE +) include_directories(${CMAKE_SOURCE_DIR}/include ${CMAKE_BINARY_DIR} ) include_directories(${NUMPY_INCLUDE_DIR}) # -# Define the so3g build target. This is a shared library. +# Define the libso3g build target. This is an internal library that holds all the +# linking dependencies. # set(CMAKE_LIBRARY_OUTPUT_DIRECTORY so3g) -add_library(so3g SHARED - src/main.cxx + +add_library(so3g OBJECT src/test.cxx src/hkagg.cxx src/G3Ndarray.cxx @@ -69,32 +78,25 @@ add_library(so3g SHARED src/array_ops.cxx ) -# We could disable the lib prefix on the output library... but let's not. -#set_target_properties(so3g PROPERTIES PREFIX "") +# Disable boost python auto_ptr warnings +target_compile_definitions(so3g PUBLIC BOOST_NO_AUTO_PTR) -# Make a list of .py files for the library. -file(GLOB MY_PYTHONS - "${CMAKE_CURRENT_SOURCE_DIR}/python/*.py") -file(GLOB MY_PYTHONS_HK - "${CMAKE_CURRENT_SOURCE_DIR}/python/hk/*.py") -file(GLOB MY_PYTHONS_PROJ - "${CMAKE_CURRENT_SOURCE_DIR}/python/proj/*.py") -file(GLOB MY_PYTHONS_SMURF - "${CMAKE_CURRENT_SOURCE_DIR}/python/smurf/*.py") +# Link to the core spt3g library. This brings in boost dependencies +# as well. +target_link_libraries(so3g PUBLIC spt3g::core) -# Provide list of libs to link against. -target_link_libraries(so3g spt3g::core) -# Link GSL +# Link to GSL target_include_directories(so3g PRIVATE ${GSL_INCLUDE_DIR}) -target_link_libraries(so3g ${GSL_LIBRARIES}) +target_link_libraries(so3g PUBLIC ${GSL_LIBRARIES}) -# You probably want to select openblas, so pass -DBLA_VENDOR=OpenBLAS +# Link to BLAS. You probably want to select openblas, so pass +# -DBLA_VENDOR=OpenBLAS find_package(BLAS REQUIRED) if(BLAS_FOUND) message("-- BLAS found: ${BLAS_LIBRARIES}") - target_link_libraries(so3g ${BLAS_LIBRARIES}) + target_link_libraries(so3g PUBLIC ${BLAS_LIBRARIES}) - # The BLAS library may or may not include the cblas_ bindings. + # The BLAS library may or may not include the cblas_* bindings. # This variable set is needed by check_function_exists; starting in # cmake v3.18 you can say BLAS::BLAS instead of the lib path... set(CMAKE_REQUIRED_LIBRARIES ${BLAS_LIBRARIES}) @@ -103,27 +105,46 @@ if(BLAS_FOUND) message("-- cblas bindings are included in the BLAS library") else() message("-- cblas bindings not found in BLAS; adding cblas.") - target_link_libraries(so3g cblas) + target_link_libraries(so3g PUBLIC cblas) endif() # On MacOS with clang linking to the Accelerate framework, the cblas - # headers are not always found. Handle this case. + # headers are not always found. Handle this case. Also note that the + # Accelerate framework has documented numerical problems- consider using + # a better BLAS/LAPACK implementation. if(BLAS_Accelerate_LIBRARY) - #string(REGEX REPLACE "^(.*)/System/Library/Frameworks/Accelerate.framework" "\\1" FRAMEWORK_INCLUDE "${BLAS_Accelerate_LIBRARY}") - #target_compile_options(so3g PRIVATE -F ${FRAMEWORK_INCLUDE}) target_include_directories(so3g PRIVATE ${BLAS_Accelerate_LIBRARY}/Versions/A/Frameworks/vecLib.framework/Headers) endif() endif(BLAS_FOUND) # This custom target generates _version.h, in the build tree. That is all. add_custom_target(so3g-version - COMMAND python ${CMAKE_CURRENT_SOURCE_DIR}/version_h.py - SO3G_VERSION_STRING ${CMAKE_CURRENT_BINARY_DIR}/_version.h - SOURCES version_h.py - ) + COMMAND python ${CMAKE_CURRENT_SOURCE_DIR}/version_h.py + SO3G_VERSION_STRING ${CMAKE_CURRENT_BINARY_DIR}/_version.h + SOURCES version_h.py +) add_dependencies(so3g so3g-version) +# Add the Python interface module +Python_add_library(_libso3g MODULE WITH_SOABI src/main.cxx) +set_target_properties(_libso3g PROPERTIES PREFIX "") +target_link_libraries(_libso3g PUBLIC so3g) +if (CMAKE_CXX_COMPILER_ID MATCHES "GNU") + #set_target_properties(_libso3g PROPERTIES LINK_FLAGS "-Wl,--no-as-needed") + target_link_options(_libso3g PUBLIC "LINKER:--no-as-needed") +endif() + +# Make a list of .py files for the library. +file(GLOB MY_PYTHONS + "${CMAKE_CURRENT_SOURCE_DIR}/python/*.py") +file(GLOB MY_PYTHONS_HK + "${CMAKE_CURRENT_SOURCE_DIR}/python/hk/*.py") +file(GLOB MY_PYTHONS_PROJ + "${CMAKE_CURRENT_SOURCE_DIR}/python/proj/*.py") +file(GLOB MY_PYTHONS_SMURF + "${CMAKE_CURRENT_SOURCE_DIR}/python/smurf/*.py") + # Define the install rules. if(DEFINED PYTHON_INSTALL_DEST) @@ -134,7 +155,7 @@ else() set(INSTALL_DEST ${PYTHON_SITE_PACKAGES}/so3g) endif() -install(TARGETS so3g +install(TARGETS _libso3g DESTINATION ${INSTALL_DEST}) install(FILES ${MY_PYTHONS} diff --git a/Dockerfile b/Dockerfile index 49abf5ad..ff1af2af 100644 --- a/Dockerfile +++ b/Dockerfile @@ -2,7 +2,7 @@ # A containerized so3g installation. # Build on spt3g base image -FROM simonsobs/spt3g:0.3-289-g4bd3275 +FROM simonsobs/spt3g:0.4-55-g682414d # Set locale ENV LANG C.UTF-8 @@ -12,7 +12,7 @@ RUN apt update && apt install -y \ build-essential \ automake \ gfortran \ - libopenblas-dev \ + libopenblas-openmp-dev \ libbz2-dev \ python-is-python3 diff --git a/README.rst b/README.rst index f4524991..8e4e8bba 100644 --- a/README.rst +++ b/README.rst @@ -22,6 +22,64 @@ so3g Glue functions and new classes for SO work in the spt3g paradigm. +Installation from Binary Packages +=================================== + +If you are just "using" `so3g` and not actively modifying the source, simply install the binary wheels from PyPI:: + + pip install so3g + +Building from Source +====================== + +When developing the `so3g` code, you will need to build from source. There are two methods documented here: (1) using a conda environment to provide python and all compiled dependencies and (2) using a virtualenv for python and OS packages for compiled dependencies. In both cases, the compiled dependencies include: + +- A C++ compiler supporting the c++17 standard + +- BLAS / LAPACK + +- Boost (at least version 1.87 for numpy-2 compatibility) + +- GSL + +- libFLAC + +Building with Conda Tools +---------------------------- + +This method is the most reliable, since we will be using a self-consistent set of dependencies and the same compilers that were used to build those. First, ensure that you have a conda base environment that uses the conda-forge channels. The easiest way to get this is to use the "mini-forge" installer (https://github.com/conda-forge/miniforge). + +Once you have the conda "base" environment installed, create a new environment for Simons Observatory work. We force the python version to 3.12, since the default (3.13) is still missing some of our dependencies:: + + conda create -n simons python==3.12 # <- Only do this once + conda activate simons + +Now install all of our dependencies (except for spt3g):: + + conda install --file conda_dev_requirements.txt + +Bundled SPT3G +~~~~~~~~~~~~~~~~~ + +If you are just testing a quick change, you can use `pip` to install so3g. This will download a copy of spt3g and bundle it into the the installed package. The downside is that **every time** you run pip, it will re-build all of spt3g and so3g under the hood with cmake:: + + pip install -vv . + +Separate SPT3G +~~~~~~~~~~~~~~~~~ + +If you are going to be developing so3g and repeatedly building it, you probably want to install spt3g once. + + + +Building with OS Packages +---------------------------- + +Another option is to use a virtualenv for python packages and use the compilers and libraries from your OS to provide so3g dependencies. + + + + Environment Setup ================= @@ -51,7 +109,7 @@ environment. First, verify some info about your installation:: python3 --version which conda -Your python version should be at least 3.7.0. Does the location of python3 +Your python version should be at least 3.9.0. Does the location of python3 match the location of the conda command (are they in the same bin directory)? If so, then you are ready. If you do not have conda installed but would like to use it, you might consider installing the "miniforge" @@ -69,7 +127,7 @@ messing up the root environment:: Now install as many dependencies as possible from conda packages. These are listed in a text file in the top of this git repo:: - conda install --file conda_deps.txt + conda install --file conda_dev_requirements.txt Using a Virtualenv ------------------ @@ -313,7 +371,7 @@ one:: Testing -======= +======== The unit tests are not installed with the so3g package, so in order to run them you must have a git checkout of so3g (even if you installed so3g from diff --git a/conda_deps.txt b/conda_deps.txt deleted file mode 100644 index 43822872..00000000 --- a/conda_deps.txt +++ /dev/null @@ -1,14 +0,0 @@ -# Requirements to install with conda, if using -# that. -astropy -matplotlib -numpy -scipy -ephem -pytz -pyaml -sqlalchemy -pysqlite3 -tqdm -pytest -cmake diff --git a/conda_dev_requirements.txt b/conda_dev_requirements.txt new file mode 100644 index 00000000..2c293ded --- /dev/null +++ b/conda_dev_requirements.txt @@ -0,0 +1,34 @@ +# This is the set of conda packages needed when creating +# a development environment for building spt3g / so3g +# using conda compilers and packages to meet dependencies. +# +# For example, assuming you have a conda-forge base environment +# you can do: +# +# conda create -n dev python==3.12 +# conda activate dev +# conda install --file conda_dev_requirements.txt +# python3 -m pip install -vv . +# +# Build tools +compilers +cmake +# Compiled dependencies +libopenblas=*=*openmp* +libblas=*=*openblas +openblas=*=*openmp* +boost +libflac +gsl +# Python dependencies +numpy +scipy +astropy +matplotlib +ephem +pytz +pyaml +sqlalchemy +tqdm +# Eventually we should make a conda package for these: +# qpoint diff --git a/docker/so3g-setup.sh b/docker/so3g-setup.sh index 9e352e7b..f8594493 100644 --- a/docker/so3g-setup.sh +++ b/docker/so3g-setup.sh @@ -2,6 +2,10 @@ mkdir -p build cd build -cmake .. -make +cmake \ + -DCMAKE_VERBOSE_MAKEFILE=ON \ + -DCMAKE_BUILD_TYPE=Release \ + -DPython_EXECUTABLE=$(which python3) \ + .. +make -j 2 make install diff --git a/pyproject.toml b/pyproject.toml new file mode 100644 index 00000000..d5882e5f --- /dev/null +++ b/pyproject.toml @@ -0,0 +1,50 @@ +[build-system] +requires = [ + "cmake>=3.17", + "setuptools", + "wheel", + "numpy", + # Astropy depends on numpy 1.x with python-3.9. Place + # a build-time dependency here so that we build with a + # compatible version of numpy. Remove this after dropping + # python-3.9 support. + "astropy", +] +build-backend = "setuptools.build_meta" + +[project] +name = "so3g" +readme = "README.rst" +description = "Tools for Simons Observatory work with spt3g_software" +urls = {source = "https://github.com/simonsobs/so3g"} +license = {file = "LICENSE"} +requires-python = ">=3.9" +dependencies = [ + "numpy", + "scipy", + "astropy", + "matplotlib", + "ephem", + "pytz", + "pyaml", + "sqlalchemy", + "tqdm", + "qpoint", +] +dynamic=["version"] +classifiers = [ + "Development Status :: 5 - Production/Stable", + "Environment :: Console", + "Intended Audience :: Science/Research", + "License :: OSI Approved :: BSD License", + "Topic :: Scientific/Engineering :: Astronomy", + "Programming Language :: Python :: 3.9", + "Programming Language :: Python :: 3.10", + "Programming Language :: Python :: 3.11", + "Programming Language :: Python :: 3.12", +] + +[tool.pytest.ini_options] +addopts = [ + "--import-mode=importlib", +] diff --git a/python/__init__.py b/python/__init__.py index 790512da..aae25288 100644 --- a/python/__init__.py +++ b/python/__init__.py @@ -1,13 +1,6 @@ import os import numpy as np -# Verify that we are using numpy 1.x -npversion = np.version.version -npversplit = npversion.split(".") -if npversplit[0] != "1": - msg = f"so3g requires numpy version 1.x but found version {npversion}" - raise RuntimeError(msg) - if os.getenv('DOCS_BUILD') == '1': from ._libso3g_docstring_shells import * else: @@ -15,10 +8,8 @@ from . import spt3g from spt3g import core as spt3g_core - # Our library is called libso3g.{suffix}, but will load into module - # namespace so3g. - from .load_pybindings import load_pybindings - load_pybindings([__path__[0] + '/libso3g'], name='so3g') + # Load all symbols from our compiled extension. + from ._libso3g import * # Version is computed by versioneer. __version__ = version() diff --git a/python/hk/cli.py b/python/hk/cli.py index 17ab9839..2d0fc8e9 100644 --- a/python/hk/cli.py +++ b/python/hk/cli.py @@ -1,11 +1,11 @@ -import so3g -from spt3g import core import numpy as np import os import sys import csv import argparse +from .. import _libso3g as libso3g + _UNITS = { 'bytes': 1, @@ -246,7 +246,7 @@ def main(args=None): for filename in get_file_list(args): file_size = os.path.getsize(filename) clean_exit = True - r = so3g.G3IndexedReader(filename) + r = libso3g.G3IndexedReader(filename) while True: try: f = r.Process(None) @@ -274,7 +274,7 @@ def main(args=None): renamer = TokenCleanser(args.strip_tokens) for filename in get_file_list(args): - r = so3g.G3IndexedReader(filename) + r = libso3g.G3IndexedReader(filename) while True: start = r.Tell() f = r.Process(None) @@ -305,7 +305,7 @@ def main(args=None): renamer = TokenCleanser(args.strip_tokens) for filename in get_file_list(args): - r = so3g.G3IndexedReader(filename) + r = libso3g.G3IndexedReader(filename) while True: f = r.Process(None) if f is None or len(f) == 0: diff --git a/python/hk/getdata.py b/python/hk/getdata.py index 0ba5d508..7a6a2344 100644 --- a/python/hk/getdata.py +++ b/python/hk/getdata.py @@ -18,10 +18,11 @@ import numpy as np import datetime as dt - -import so3g +from .. import _libso3g as libso3g from spt3g import core +from .translator import HKTranslator + hk_logger = logging.getLogger(__name__) hk_logger.setLevel(logging.INFO) @@ -92,7 +93,7 @@ def __init__(self, field_groups=None): self.field_groups = list(field_groups) # A translator is used to update frames, on the fly, to the # modern schema assumed here. - self.translator = so3g.hk.HKTranslator() + self.translator = HKTranslator() def _get_groups(self, fields=None, start=None, end=None, short_match=False): @@ -136,7 +137,7 @@ def _get_groups(self, fields=None, start=None, end=None, in any group. """ - span = so3g.IntervalsDouble() + span = libso3g.IntervalsDouble() if start is None: start = span.domain[0] if end is None: @@ -314,7 +315,7 @@ def check_overlap(time_range): timelines = {} for filename, file_map in sorted(files.items()): hk_logger.debug('get_data: reading %s' % filename) - reader = so3g.G3IndexedReader(filename) + reader = libso3g.G3IndexedReader(filename) for byte_offset, frame_info in sorted(file_map.items()): # Seek and decode. hk_logger.debug('get_data: seeking to %i for %i block extractions' % @@ -484,7 +485,7 @@ def __init__(self, pre_proc_dir=None, pre_proc_mode=None): self.field_groups = [] self.frame_info = [] self.counter = -1 - self.translator = so3g.hk.HKTranslator() + self.translator = HKTranslator() self.pre_proc_dir = pre_proc_dir self.pre_proc_mode = pre_proc_mode @@ -516,7 +517,7 @@ def Process(self, f, index_info=None): vers = f.get('hkagg_version', 0) assert(vers == 2) - if f['hkagg_type'] == so3g.HKFrameType.session: + if f['hkagg_type'] == libso3g.HKFrameType.session: session_id = f['session_id'] if self.session_id is not None: if self.session_id != session_id: @@ -526,7 +527,7 @@ def Process(self, f, index_info=None): (session_id, f['start_time']), unit='HKScanner') self.session_id = session_id - elif f['hkagg_type'] == so3g.HKFrameType.status: + elif f['hkagg_type'] == libso3g.HKFrameType.status: # If a provider has disappeared, flush its information into a # FieldGroup. prov_cands = [_HKProvider.from_g3(p) for p in f['providers']] @@ -539,7 +540,7 @@ def Process(self, f, index_info=None): for prov_id in to_flush: self.flush([prov_id]) - elif f['hkagg_type'] == so3g.HKFrameType.data: + elif f['hkagg_type'] == libso3g.HKFrameType.data: # Data frame -- merge info for this provider. prov = self.providers[f['prov_id']] representatives = prov.blocks.keys() @@ -614,7 +615,7 @@ def process_file(self, filename, flush_after=True): with flush_after=False. """ - reader = so3g.G3IndexedReader(filename) + reader = libso3g.G3IndexedReader(filename) while True: info = {'filename': filename, 'byte_offset': reader.Tell()} @@ -711,7 +712,7 @@ class _FieldGroup: def __init__(self, prefix, fields, start, end, index_info): self.prefix = prefix self.fields = list(fields) - self.cover = so3g.IntervalsDouble().add_interval(start, end) + self.cover = libso3g.IntervalsDouble().add_interval(start, end) self.index_info = index_info def __repr__(self): try: diff --git a/python/hk/reframer.py b/python/hk/reframer.py index 458dd418..96514a3e 100644 --- a/python/hk/reframer.py +++ b/python/hk/reframer.py @@ -1,7 +1,13 @@ -import so3g -from spt3g import core +import sys import numpy as np +from .. import _libso3g as libso3g +from spt3g import core + +from .scanner import HKScanner +from .session import HKSessionHelper + + class _HKBlockBundle: def __init__(self): self.t = None @@ -33,7 +39,7 @@ def rebundle(self, flush_time): idx = 0 while idx < len(self.t) and self.t[idx] < flush_time: idx += 1 - out = so3g.IrregBlockDouble() + out = libso3g.IrregBlockDouble() out.t = np.array(self.t[:idx]) self.t = self.t[idx:] for k in self.chans.keys(): @@ -53,7 +59,7 @@ def __init__(self, t0, dt): def add(self, f): if self.sess is None: - self.sess = so3g.hk.HKSessionHelper(f['session_id']) + self.sess = HKSessionHelper(f['session_id']) self.prov_id = f['prov_id'] for b in f['blocks']: chans = b.data.keys() @@ -137,7 +143,7 @@ def __call__(self, f): output = [] - if f['hkagg_type'] == so3g.HKFrameType.session: + if f['hkagg_type'] == libso3g.HKFrameType.session: session_id = f['session_id'] if self.session_id is not None: if self.session_id != session_id: @@ -151,7 +157,7 @@ def __call__(self, f): self.session_id = session_id output.append(f) - elif f['hkagg_type'] == so3g.HKFrameType.status: + elif f['hkagg_type'] == libso3g.HKFrameType.status: # Only issue status if something has changed. changes = False # Flush any providers that are now expired. @@ -169,7 +175,7 @@ def __call__(self, f): if changes: output.append(f) - elif f['hkagg_type'] == so3g.HKFrameType.data: + elif f['hkagg_type'] == libso3g.HKFrameType.data: fb = self.providers[f['prov_id']] fb.add(f) if fb.ready(): @@ -182,9 +188,6 @@ def __call__(self, f): if __name__ == '__main__': - from so3g.hk import HKScanner, HKReframer - import sys - core.set_log_level(core.G3LogLevel.LOG_INFO) files = sys.argv[1:] diff --git a/python/hk/scanner.py b/python/hk/scanner.py index 9fabbfc1..09393bfb 100644 --- a/python/hk/scanner.py +++ b/python/hk/scanner.py @@ -1,8 +1,8 @@ -import so3g -from spt3g import core import numpy as np -from so3g import hk +from .. import _libso3g as libso3g +from spt3g import core + class HKScanner: """Module that scans and reports on HK archive contents and compliance. @@ -62,7 +62,7 @@ def __call__(self, f): vers = f.get('hkagg_version', 0) self.stats['versions'][vers] = self.stats['versions'].get(vers, 0) + 1 - if f['hkagg_type'] == so3g.HKFrameType.session: + if f['hkagg_type'] == libso3g.HKFrameType.session: session_id = f['session_id'] if self.session_id is not None: if self.session_id != session_id: @@ -73,7 +73,7 @@ def __call__(self, f): self.session_id = session_id self.stats['n_session'] += 1 - elif f['hkagg_type'] == so3g.HKFrameType.status: + elif f['hkagg_type'] == libso3g.HKFrameType.status: # Have any providers disappeared? now_prov_id = [p['prov_id'].value for p in f['providers']] for p, info in self.providers.items(): @@ -102,7 +102,7 @@ def __call__(self, f): 'block_streams_map': {}, # Map from field name to block name. } - elif f['hkagg_type'] == so3g.HKFrameType.data: + elif f['hkagg_type'] == libso3g.HKFrameType.data: info = self.providers[f['prov_id']] vers = f.get('hkagg_version', 0) diff --git a/python/hk/session.py b/python/hk/session.py index f5b79986..31aae2d2 100644 --- a/python/hk/session.py +++ b/python/hk/session.py @@ -1,9 +1,10 @@ -import so3g -from spt3g import core import time import os import binascii +from .. import _libso3g as libso3g +from spt3g import core + class HKSessionHelper: def __init__(self, session_id=None, start_time=None, hkagg_version=None, @@ -93,7 +94,7 @@ def session_frame(self): """ f = core.G3Frame() f.type = core.G3FrameType.Housekeeping - f['hkagg_type'] = so3g.HKFrameType.session + f['hkagg_type'] = libso3g.HKFrameType.session f['hkagg_version'] = self.hkagg_version f['session_id'] = self.session_id f['start_time'] = self.start_time @@ -110,7 +111,7 @@ def status_frame(self, timestamp=None): timestamp = time.time() f = core.G3Frame() f.type = core.G3FrameType.Housekeeping - f['hkagg_type'] = so3g.HKFrameType.status + f['hkagg_type'] = libso3g.HKFrameType.status f['hkagg_version'] = self.hkagg_version f['session_id'] = self.session_id f['timestamp'] = timestamp @@ -136,7 +137,7 @@ def data_frame(self, prov_id, timestamp=None): f = core.G3Frame() f.type = core.G3FrameType.Housekeeping f['hkagg_version'] = self.hkagg_version - f['hkagg_type'] = so3g.HKFrameType.data + f['hkagg_type'] = libso3g.HKFrameType.data f['session_id'] = self.session_id f['prov_id'] = prov_id f['timestamp'] = timestamp diff --git a/python/hk/translator.py b/python/hk/translator.py index d6c10741..232ab3bf 100644 --- a/python/hk/translator.py +++ b/python/hk/translator.py @@ -1,11 +1,11 @@ """Backwards compatibility for older SO HK schemas. """ - -import so3g -import so3g.hk +from .. import _libso3g as libso3g from spt3g import core +from .util import get_g3_time + class HKTranslator: """Translates SO Housekeeping frames from schema versions {v0, v1} to @@ -89,7 +89,7 @@ def Process(self, f): f['hkagg_version'] = self.target_version # No difference in Session/Status for v0, v1, v2. - if f.get('hkagg_type') != so3g.HKFrameType.data: + if f.get('hkagg_type') != libso3g.HKFrameType.data: return [f] if self.target_version == 0: @@ -103,7 +103,7 @@ def Process(self, f): # Now process the data blocks. for block in orig_blocks: new_block = core.G3TimesampleMap() - new_block.times = so3g.hk.util.get_g3_time(block.t) + new_block.times = get_g3_time(block.t) for k in block.data.keys(): v = block.data[k] new_block[k] = core.G3VectorDouble(v) diff --git a/python/hk/tree.py b/python/hk/tree.py index 643519bb..70c10efe 100644 --- a/python/hk/tree.py +++ b/python/hk/tree.py @@ -2,13 +2,13 @@ of attributes. """ - -from so3g.hk import getdata import time import os import yaml import logging +from .getdata import to_timestamp, HKArchiveScanner + logger = logging.getLogger(__name__) @@ -137,11 +137,11 @@ def __init__(self, start=None, stop=None, config=None, if start is None: start = now - 86400 else: - start = getdata.to_timestamp(start) + start = to_timestamp(start) if stop is None: stop = start + 86400 else: - stop = getdata.to_timestamp(stop) + stop = to_timestamp(stop) if aliases is None: aliases = {} @@ -170,7 +170,7 @@ def __init__(self, start=None, stop=None, config=None, # Walk the files -- same approach as load_ranges logger.debug('Scanning %s (pre_proc=%s)' % (data_dir, pre_proc_dir)) - hksc = getdata.HKArchiveScanner(pre_proc_dir=pre_proc_dir) + hksc = HKArchiveScanner(pre_proc_dir=pre_proc_dir) for folder in range(int(start / 1e5), int(stop / 1e5) + 1): base = os.path.join(data_dir, str(folder)) logger.debug(f' ... checking {base}') diff --git a/python/load_pybindings.py b/python/load_pybindings.py deleted file mode 100644 index ba956de8..00000000 --- a/python/load_pybindings.py +++ /dev/null @@ -1,51 +0,0 @@ -# -# Based on spt3g.core.load_bindings. -# -import platform, sys, os -try: - # Starting in spt3g 0.3-240-ga9d32d5, a custom loader is available. - from spt3g import dload - imp = None -except ImportError: - # The imp module is deprecated; to suppress warning, upgrade spt3g ^^^. - import imp - dload = None - -if platform.system().startswith('freebsd') or platform.system().startswith('FreeBSD'): - # C++ modules are extremely fragile when loaded with RTLD_LOCAL, - # which is what Python uses on FreeBSD by default, and maybe other - # systems. Convince it to use RTLD_GLOBAL. - - # See thread by Abrahams et al: - # http://mail.python.org/pipermail/python-dev/2002-May/024074.html - sys.setdlopenflags(0x102) - -def load_pybindings(paths, name=None, lib_suffix=None): - """ - Load all non-private items from the libraries in the list "paths". - Provide the full path to each library, but without extension. The - .so or .dylib will be appended depending on the system - architecture. The namespace into which the items are imported - will be determined from the first path, unless name= is explicitly - provided. - """ - if lib_suffix is None: - if platform.system().startswith('Darwin'): - # OSX compatibility requires .dylib suffix - lib_suffix = ".dylib" - else: - lib_suffix = ".so" - for path in paths: - if name is None: - name = os.path.split(path)[1] - # Save copy of current module def - mod = sys.modules[name] - if dload is None: - m = imp.load_dynamic(name, path + lib_suffix) - else: - m = dload.load_dynamic(name, name, path + lib_suffix) - sys.modules[name] = mod # Don't override Python mod with C++ - - for (k,v) in m.__dict__.items(): - if not k.startswith("_"): - mod.__dict__[k] = v diff --git a/python/proj/__init__.py b/python/proj/__init__.py index d0a9790c..5561ba53 100644 --- a/python/proj/__init__.py +++ b/python/proj/__init__.py @@ -1,6 +1,3 @@ -import so3g -from spt3g import core - from . import quat from . import util from . import mapthreads diff --git a/python/proj/coords.py b/python/proj/coords.py index 7ffb5fd6..8b6938b7 100644 --- a/python/proj/coords.py +++ b/python/proj/coords.py @@ -1,11 +1,12 @@ -import so3g -from . import quat -from .weather import weather_factory - from collections import OrderedDict import numpy as np +from .. import _libso3g as libso3g +from . import quat +from .weather import weather_factory + + DEG = np.pi / 180. @@ -231,7 +232,7 @@ def coords(self, det_offsets=None, output=None): """ # Get a projector, in CAR. - p = so3g.ProjEng_CAR_TQU_NonTiled((1, 1, 1., 1., 1., 1.)) + p = libso3g.ProjEng_CAR_TQU_NonTiled((1, 1, 1., 1., 1., 1.)) # Pre-process the offsets collapse = (det_offsets is None) if collapse: @@ -241,7 +242,7 @@ def coords(self, det_offsets=None, output=None): redict = isinstance(det_offsets, dict) if redict: keys, det_offsets = zip(*det_offsets.items()) - if isinstance(det_offsets[0], quat.quat): + if isinstance(det_offsets[0], quat.Quat): # Individual quat doesn't array() properly... det_offsets = np.array(quat.G3VectorQuat(det_offsets)) else: @@ -334,7 +335,7 @@ def attach(cls, sight_line, det_offsets): else: self.dets = det_offsets # Make sure it's a numpy array. This is dumb. - if isinstance(self.dets[0], quat.quat): + if isinstance(self.dets[0], quat.Quat): self.dets = quat.G3VectorQuat(self.dets) self.dets = np.array(self.dets) return self diff --git a/python/proj/mapthreads.py b/python/proj/mapthreads.py index 0440ea61..06ca999d 100644 --- a/python/proj/mapthreads.py +++ b/python/proj/mapthreads.py @@ -5,10 +5,15 @@ parallelization. """ - -import so3g import numpy as np +from .. import _libso3g as libso3g + +from . import quat +from .coords import Assembly +from .wcs import Projectionist + + def get_num_threads(n_threads=None): """Utility function for computing n_threads. If n_threads is not None, it is returned directly. But if it is None, then the OpenMP @@ -16,7 +21,7 @@ def get_num_threads(n_threads=None): """ if n_threads is None: - return so3g.useful_info()['omp_num_threads'] + return libso3g.useful_info()['omp_num_threads'] return n_threads def get_threads_domdir(sight, offs, shape, wcs, tile_shape=None, @@ -77,11 +82,11 @@ def get_threads_domdir(sight, offs, shape, wcs, tile_shape=None, active_tiles = [0] # The full assembly, for later. - asm_full = so3g.proj.Assembly.attach(sight, offs) + asm_full = Assembly.attach(sight, offs) # Get a Projectionist -- note it can be used with full or # representative assembly. - pmat = so3g.proj.wcs.Projectionist.for_tiled( + pmat = Projectionist.for_tiled( shape, wcs, tile_shape=tile_shape, active_tiles=active_tiles ) if active_tiles is None: @@ -93,9 +98,9 @@ def get_threads_domdir(sight, offs, shape, wcs, tile_shape=None, # For the scan direction map, use the "representative" subset # detectors, with polarization direction aligned parallel to # elevation. - xi, eta, gamma = so3g.proj.quat.decompose_xieta(offs_rep) - offs_xl = np.array(so3g.proj.quat.rotation_xieta(xi, eta, gamma*0 + 90*so3g.proj.DEG)) - asm_rep = so3g.proj.Assembly.attach(sight, offs_xl) + xi, eta, gamma = quat.decompose_xieta(offs_rep) + offs_xl = np.array(quat.rotation_xieta(xi, eta, gamma*0 + 90*quat.DEG)) + asm_rep = Assembly.attach(sight, offs_xl) sig = np.ones((len(offs_xl), len(asm_rep.Q)), dtype='float32') scan_maps = pmat.to_map(sig, asm_rep, comps='TQU') @@ -107,7 +112,7 @@ def get_threads_domdir(sight, offs, shape, wcs, tile_shape=None, phi = np.arctan2(U, Q) / 2 if plot_prefix: - text = 'Qf=%.2f Uf=%.2f phi=%.1f deg' % (Q/T, U/T, phi / so3g.proj.DEG) + text = 'Qf=%.2f Uf=%.2f phi=%.1f deg' % (Q/T, U/T, phi / quat.DEG) for label, _m in tile_iter(scan_maps): for i in range(3): pl.imshow(_m[i], origin='lower') diff --git a/python/proj/quat.py b/python/proj/quat.py index c092d732..c714b5df 100644 --- a/python/proj/quat.py +++ b/python/proj/quat.py @@ -1,10 +1,12 @@ import numpy as np -try: - from spt3g.core import quat, G3VectorQuat -except ImportError: - # Pre-Oct 2019 versions. - from spt3g.coordinateutils import quat, G3VectorQuat +from spt3g.core import Quat, G3VectorQuat + +# try: +# from spt3g.core import quat, G3VectorQuat +# except ImportError: +# # Pre-Oct 2019 versions. +# from spt3g.coordinateutils import quat, G3VectorQuat """We are using the spt3g quaternion containers, i.e. cu3g.G3VectorQuat and cu3g.quat. One way these are nice is that @@ -33,7 +35,7 @@ def euler(axis, angle): Returns ------- - quat or G3VectorQuat, depending on ndim(angle). + Quat or G3VectorQuat, depending on ndim(angle). """ # Either angle or axis or both can be vectors. angle = np.asarray(angle) @@ -43,7 +45,7 @@ def euler(axis, angle): q[..., 0] = c q[..., axis+1] = s if len(shape) == 1: - return quat(*q) + return Quat(*q) return G3VectorQuat(q) @@ -97,7 +99,7 @@ def decompose_iso(q): Parameters ---------- - q : quat or G3VectorQuat + q : Quat or G3VectorQuat The quaternion(s) to be decomposed. Returns @@ -106,7 +108,7 @@ def decompose_iso(q): The rotation angles, in radians. """ - if isinstance(q, quat): + if isinstance(q, Quat): a,b,c,d = q.a, q.b, q.c, q.d else: a,b,c,d = np.transpose(q) diff --git a/python/proj/ranges.py b/python/proj/ranges.py index a5fde7f2..5c132d7f 100644 --- a/python/proj/ranges.py +++ b/python/proj/ranges.py @@ -1,11 +1,13 @@ -import so3g import numpy as np +from .. import _libso3g as libso3g + + """Objects will self report as being of type "RangesInt32" rather than Ranges. But let's try to use so3g.proj.Ranges when testing types and making new ones and stuff.""" -Ranges = so3g.RangesInt32 +Ranges = libso3g.RangesInt32 class RangesMatrix(): diff --git a/python/proj/wcs.py b/python/proj/wcs.py index 3b355ccd..2af95756 100644 --- a/python/proj/wcs.py +++ b/python/proj/wcs.py @@ -1,11 +1,12 @@ -import so3g -from . import quat - import numpy as np +from .. import _libso3g as libso3g + +from . import quat from .ranges import Ranges, RangesMatrix from . import mapthreads + # For coordinate systems we use the following abbreviations: # # - DC: Detector coordinates @@ -128,7 +129,7 @@ def get_ProjEng(self, comps='TQU', proj_name=None, get=True, if not get: return projeng_name try: - projeng_cls = getattr(so3g, projeng_name) + projeng_cls = getattr(libso3g, projeng_name) except AttributeError: raise ValueError(f'There is no projector implemented for ' f'pixelization "{proj_name}", components ' @@ -445,7 +446,7 @@ def get_active_tiles(self, assembly, assign=False): tiles = np.nonzero(hits)[0] hits = hits[tiles] if assign is True: - assign = so3g.useful_info()['omp_num_threads'] + assign = libso3g.useful_info()['omp_num_threads'] if assign > 0: group_n = np.array([0 for g in range(assign)]) group_tiles = [[] for _ in group_n] @@ -720,7 +721,7 @@ def __init__(self): self._q_fp_to_celestial = None self.active_tiles = None self.proj_name = None - self.q_celestial_to_native = quat.quat(1,0,0,0) + self.q_celestial_to_native = quat.Quat(1,0,0,0) self.interpol = 'nearest' self.tiling = None @@ -771,7 +772,7 @@ def compute_nside_tile(self, assembly, nActivePerThread=5, nThreads=None): nActive = len(self.get_active_tiles(assembly)['active_tiles']) fsky = nActive / (12 * nside_tile0**2) if nThreads is None: - nThreads = so3g.useful_info()['omp_num_threads'] + nThreads = libso3g.useful_info()['omp_num_threads'] # nside_tile is smallest power of 2 satisfying nTile >= nActivePerThread * nthread / fsky self.nside_tile = int(2**np.ceil(0.5 * np.log2(nActivePerThread * nThreads / (12 * fsky)))) self.nside_tile = min(self.nside_tile, self.nside) diff --git a/python/quicksim.py b/python/quicksim.py index 6ae264f9..beb43ae9 100644 --- a/python/quicksim.py +++ b/python/quicksim.py @@ -8,16 +8,17 @@ can add realism or whatever. """ -import so3g +import numpy as np +from scipy.interpolate import InterpolatedUnivariateSpline as spline1d + +from . import _libso3g as libso3g from spt3g import core from spt3g import coordinateutils as cu3g -import numpy as np - -from scipy.interpolate import InterpolatedUnivariateSpline as spline1d -#from so3g import coords +# FIXME: The "coords.q_euler" function does not seem to exist anywhere... from so3g import coords + FT = core.G3FrameType def create_CES_observation(start_time, stop_time, az_min, az_max, el, @@ -118,7 +119,7 @@ def __call__(self, f): for i0, i1 in zip(swing_points[:-1], swing_points[1:]): f0 = core.G3Frame() f0.type = core.G3FrameType.Scan - benc = so3g.IrregBlockDouble() + benc = libso3g.IrregBlockDouble() benc.t = time_vec[i0:i1] benc.data['az'] = az_vec[i0:i1] benc.data['el'] = el_vec[i0:i1] diff --git a/python/smurf/reader.py b/python/smurf/reader.py index 1565bbe0..6f519182 100644 --- a/python/smurf/reader.py +++ b/python/smurf/reader.py @@ -1,12 +1,12 @@ -import so3g -from spt3g import core import numpy as np import pickle -import datetime, time -import sys, os import warnings import argparse +from .. import _libso3g as libso3g +from spt3g import core + + def g3_to_array(g3file, verbose=False): """ Takes a G3 file output from the SMuRF archiver and reads to a numpy array. diff --git a/python/smurf/smurf_archive.py b/python/smurf/smurf_archive.py index 17e07b36..6df79852 100644 --- a/python/smurf/smurf_archive.py +++ b/python/smurf/smurf_archive.py @@ -1,18 +1,19 @@ +import ast +from collections import namedtuple +import datetime as dt +from enum import Enum +import os + +import numpy as np +import yaml import sqlalchemy as db from sqlalchemy.exc import IntegrityError from sqlalchemy.ext.declarative import declarative_base from sqlalchemy.orm import sessionmaker, relationship, backref +from tqdm import tqdm +from .. import _libso3g as libso3g from spt3g import core -import so3g -import datetime as dt -import os -from tqdm import tqdm -import numpy as np -import yaml -import ast -from collections import namedtuple -from enum import Enum Base = declarative_base() @@ -164,7 +165,7 @@ def add_file(self, path, session): db_file = Files(path=path) session.add(db_file) - reader = so3g.G3IndexedReader(path) + reader = libso3g.G3IndexedReader(path) total_channels = 0 file_start, file_stop = None, None @@ -326,7 +327,7 @@ def load_data(self, start, end, show_pb=True, load_biases=True): for frame_info in tqdm(frames, total=num_frames, disable=(not show_pb)): file = frame_info.file.path if file != cur_file: - reader = so3g.G3IndexedReader(file) + reader = libso3g.G3IndexedReader(file) cur_file = file reader.Seek(frame_info.offset) @@ -400,7 +401,7 @@ def load_status(self, time, show_pb=False): for frame_info in tqdm(status_frames.all(), disable=(not show_pb)): file = frame_info.file.path if file != cur_file: - reader = so3g.G3IndexedReader(file) + reader = libso3g.G3IndexedReader(file) cur_file = file reader.Seek(frame_info.offset) frame = reader.Process(None)[0] diff --git a/python/soframe.py b/python/soframe.py index b104e901..a0888db1 100644 --- a/python/soframe.py +++ b/python/soframe.py @@ -15,7 +15,7 @@ with the config variable patch_g3frame. """ -import so3g +from . import _libso3g as libso3g from spt3g.core import G3Frame orig_getitem = None @@ -76,8 +76,8 @@ def set_frame_hooks(config={}): # Always do numpy. import numpy as np # Numpy arrays in frames - G3Frame.setitem_converters[np.ndarray] = lambda a: so3g.G3Ndarray(a) - G3Frame.getitem_converters[so3g.G3Ndarray] = lambda a: a.to_array() + G3Frame.setitem_converters[np.ndarray] = lambda a: libso3g.G3Ndarray(a) + G3Frame.getitem_converters[libso3g.G3Ndarray] = lambda a: a.to_array() has_astropy = False use_astropy = config.get('use_astropy', 'try') @@ -85,15 +85,15 @@ def set_frame_hooks(config={}): if astropy is not None: has_astropy = True G3Frame.setitem_converters[astropy.wcs.WCS] = \ - lambda a: so3g.G3WCS(a.to_header_string()) - G3Frame.getitem_converters[so3g.G3WCS] = \ + lambda a: libso3g.G3WCS(a.to_header_string()) + G3Frame.getitem_converters[libso3g.G3WCS] = \ lambda a: astropy.wcs.WCS(a.header) use_pixell = config.get('use_pixell', 'try') pixell = _try_import('pixell.enmap', use_pixell) if pixell is not None and has_astropy: G3Frame.setitem_converters[pixell.enmap.ndmap] = \ - lambda a: so3g.G3Ndmap(a, a.wcs.to_header_string()) - G3Frame.getitem_converters[so3g.G3Ndmap] = \ + lambda a: libso3g.G3Ndmap(a, a.wcs.to_header_string()) + G3Frame.getitem_converters[libso3g.G3Ndmap] = \ lambda a: pixell.enmap.ndmap(a.data.to_array(), astropy.wcs.WCS(a.wcs.header)) diff --git a/requirements.txt b/requirements.txt index b825a6b7..b890bf3f 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1,12 +1,11 @@ astropy matplotlib -numpy<2 +numpy scipy ephem pytz pyaml sqlalchemy -pysqlite3-wheels tqdm cmake qpoint diff --git a/setup.py b/setup.py index d614cc7d..eb46da55 100644 --- a/setup.py +++ b/setup.py @@ -4,7 +4,6 @@ import os import sys -import sysconfig import re import subprocess as sp import glob @@ -13,7 +12,8 @@ from setuptools import setup, Extension from setuptools.command.build_ext import build_ext -from distutils.command.clean import clean + +import numpy as np # Absolute path to the directory with this file topdir = Path(__file__).resolve().parent @@ -40,10 +40,8 @@ def get_spt3g_version(): return ver upstream_spt3g_version = get_spt3g_version() -print(f"Using upstream spt3g_software version {upstream_spt3g_version}") # The name of the spt3g source and package dirs -spt3g_pkg_dir = os.path.join(topdir, "python", "spt3g_internal") spt3g_src_dir = os.path.join(topdir, "spt3g_software") @@ -66,8 +64,6 @@ def get_version(): def get_spt3g(): - # if os.path.isdir(spt3g_pkg_dir): - # return # We use git to get the repo, since spt3g uses git to get its version # information. if not os.path.isdir(spt3g_src_dir): @@ -102,23 +98,21 @@ def get_spt3g(): def extract_cmake_env(varprefix): cmake_opts = list() - cpat = re.compile(r"{}_(.*)".format(varprefix)) + cpat = re.compile(f"{varprefix}_(.*)") for k, v in os.environ.items(): mat = cpat.match(k) if mat is not None: - cmake_opts.append("-D{}={}".format(mat.group(1), v)) + cmake_opts.append(f"-D{mat.group(1)}={v}") return cmake_opts def build_common(src_dir, build_dir, install_dir, cmake_extra, debug, pkg, version): cmake_args = list() - #cmake_args = ["-DPYTHON_EXECUTABLE=" + sys.executable] - #cmake_args += ["-DBOOST_PYTHON_MAX_ARITY=20"] cfg = "Debug" if debug else "Release" cmake_args += ["-DCMAKE_BUILD_TYPE=" + cfg] cmake_args += ["-DCMAKE_VERBOSE_MAKEFILE=ON"] - cmake_args += ["-DCMAKE_INSTALL_PREFIX={}".format(install_dir)] - cmake_args.extend(extract_cmake_env("{}_BUILD".format(pkg))) + cmake_args += [f"-DCMAKE_INSTALL_PREFIX={install_dir}"] + cmake_args.extend(extract_cmake_env(f"{pkg}_BUILD")) cmake_args.extend(cmake_extra) build_args = ["--config", cfg] @@ -130,9 +124,12 @@ def build_common(src_dir, build_dir, install_dir, cmake_extra, debug, pkg, versi cxxcomp = env.get("CXX", None) cflags = env.get("CFLAGS", None) cxxflags = env.get("CXXFLAGS", "") - cxxflags = "{} -DVERSION_INFO='{}'".format(cxxflags, version) - if sys.platform.lower() == "darwin": - cmake_args += ["-DCMAKE_SHARED_LINKER_FLAGS='-undefined dynamic_lookup'"] + cxxflags = f"{cxxflags} -DVERSION_INFO='{version}'" + + # Add numpy includes + numpy_inc = np.get_include() + cxxflags += f" -I{numpy_inc}" + env["CXXFLAGS"] = cxxflags if ccomp is not None: @@ -148,11 +145,16 @@ def build_common(src_dir, build_dir, install_dir, cmake_extra, debug, pkg, versi # CMakeLists.txt is in the source dir cmake_list_dir = os.path.abspath(src_dir) - print("-" * 10, "Running {} CMake".format(pkg), "-" * 40) + print("-" * 10, f"Running {pkg} CMake", "-" * 40) + print(f"cmake {cmake_list_dir} {' '.join(cmake_args)}") sp.check_call(["cmake", cmake_list_dir] + cmake_args, cwd=build_dir, env=env) - print("-" * 10, "Building {}".format(pkg), "-" * 40) - cmake_cmd = ["cmake", "--build", "."] + build_args + ["--", "-j2"] + make_j = 2 + if "CPU_COUNT" in os.environ: + make_j = int(os.environ["CPU_COUNT"]) + + print("-" * 10, f"Building {pkg}", "-" * 40) + cmake_cmd = ["cmake", "--build", "."] + build_args + ["--", f"-j{make_j}"] sp.check_call(cmake_cmd, cwd=build_dir) cmake_cmd = ["cmake", "--install", "."] + build_args sp.check_call(cmake_cmd, cwd=build_dir) @@ -178,36 +180,6 @@ def build_so3g(src_dir, build_dir, install_dir, cmake_extra, debug): get_spt3g() -class RealClean(clean): - """Really clean up. - - Delete all temporary build directories when running `python setup.py clean`. - """ - - def run(self): - super().run() - clean_files = [ - "./build", - "./dist", - "./__pycache__", - "./*.egg-info", - spt3g_pkg_dir, - spt3g_src_dir, - "./include/_version.h", - ] - for cf in clean_files: - if not os.path.exists(cf): - continue - # Make paths absolute and relative to this path - apaths = glob.glob(os.path.abspath(cf)) - for path in apaths: - if os.path.isdir(path): - shutil.rmtree(path) - elif os.path.isfile(path): - os.remove(path) - return - - class CMakeExtension(Extension): """ This overrides the built-in extension class and essentially does nothing, @@ -223,12 +195,16 @@ class CMakeBuild(build_ext): Builds the full package using CMake. """ + def initialize_options(self): + super().initialize_options() + self.cmake_build_done = False + def run(self): """ Perform build_cmake before doing the 'normal' stuff """ for extension in self.extensions: - if extension.name == "so3g.libso3g": + if extension.name == "so3g._libso3g": # We just trigger this on one of the extensions. build_cmake() # will actually build everything. self.build_cmake() @@ -237,6 +213,8 @@ def run(self): # super().run() def build_cmake(self): + if self.cmake_build_done: + return try: out = sp.check_output(["cmake", "--version"]) except OSError: @@ -256,19 +234,14 @@ def build_cmake(self): # Use CMake to install to the distutils build location install_so3g = os.path.dirname( - Path(self.get_ext_fullpath("so3g.libso3g")).resolve().parents[0] + Path(self.get_ext_fullpath("so3g._libso3g")).resolve().parents[0] ) - # Use CMake to install spt3g python code into a subdirectory of so3g, but - # install the headers and other files to a separate location. + # Fake install directory passed to spt3g cmake. install_spt3g_fake = os.path.join(temp_build, "spt3g_install") - install_spt3g_py = install_so3g - # By default, the spt3g build system attempts to link to libpython, which - # should never be done when building wheels. This link resolution should - # only be done at runtime on the target system after installation. We - # have patched spt3g to not look for the python "Development" target, so - # here we specify the associated CMake variables directly. + # The cmake python discovery can be fragile. Here we override all the + # artifacts explicitly. py_exe = sys.executable py_maj = sys.version_info[0] py_min = sys.version_info[1] @@ -283,15 +256,10 @@ def build_cmake(self): dlist3g = [ f"-DPython_EXECUTABLE={py_exe}", f"-DPython_INCLUDE_DIRS={py_incl}", - f"-DPython_LIBRARIES=''", - f"-DPython_RUNTIME_LIBRARY_DIRS=''", - f"-DPython_LIBRARY_DIRS=''", f"-DPython_VERSION_MAJOR={py_maj}", f"-DPython_VERSION_MINOR={py_min}", - "-DBoost_ARCHITECTURE=-x64", - f"-DBoost_PYTHON_TYPE=python{py_maj}{py_min}", - "-DBoost_DEBUG=ON", - f"-DPYTHON_MODULE_DIR={install_spt3g_py}", + "-DDISABLE_NETCDF=ON", + "-DDISABLE_HDF5=ON", ] if "BOOST_ROOT" in os.environ: dlist3g.append(f"-DBOOST_ROOT={os.environ['BOOST_ROOT']}") @@ -306,7 +274,7 @@ def build_cmake(self): [ f"-DFLAC_LIBRARIES={flcroot}/lib/libFLAC.{flcext}", f"-DFLAC_INCLUDE_DIR={flcroot}/include", - f"-DFLAC_FOUND=1", + "-DFLAC_FOUND=1", ] ) @@ -320,71 +288,34 @@ def build_cmake(self): # Move spt3g python directory into place. Remove any stale copy of the # directory. - sh_ext = os.path.splitext(sysconfig.get_config_var("EXT_SUFFIX"))[1] - install_spt3g_internal = os.path.join(install_so3g, "so3g", "spt3g_internal") if os.path.isdir(install_spt3g_internal): - print(f"rmtree {install_spt3g_internal}") shutil.rmtree(install_spt3g_internal) - print(f"mv/rename {os.path.join(install_spt3g_py, 'spt3g')}, {install_spt3g_internal}") - os.rename(os.path.join(install_spt3g_py, "spt3g"), install_spt3g_internal) + os.rename(os.path.join(temp_spt3g, "spt3g"), install_spt3g_internal) build_so3g( topdir, temp_so3g, install_so3g, [ - "-DPYTHON_INSTALL_DEST={}".format(install_so3g), + f"-DPYTHON_INSTALL_DEST={install_so3g}", f"-DCMAKE_PREFIX_PATH={install_spt3g_fake}", ], self.debug, ) - + self.cmake_build_done = True ext_modules = [ - CMakeExtension("so3g.libso3g"), - CMakeExtension("so3g.spt3g_internal.libspt3g-core"), - CMakeExtension("so3g.spt3g_internal.libspt3g-dfmux"), - CMakeExtension("so3g.spt3g_internal.libspt3g-calibration"), - CMakeExtension("so3g.spt3g_internal.libspt3g-gcp"), - CMakeExtension("so3g.spt3g_internal.libspt3g-maps"), + CMakeExtension("so3g._libso3g"), ] - # Install the python scripts from spt3g -scripts = glob.glob(os.path.join(spt3g_src_dir, "*", "bin", "*")) - - -def readme(): - with open("README.rst") as f: - return f.read() - +raw_scripts = glob.glob(os.path.join(spt3g_src_dir, "*", "bin", "*")) +scripts = [x.removeprefix(f"{topdir}/") for x in raw_scripts] conf = dict() conf["name"] = "so3g" -conf["description"] = "Tools for Simons Observatory work with spt3g_software" -conf["long_description"] = readme() -conf["long_description_content_type"] = "text/x-rst" -conf["author"] = "Simons Observatory Collaboration" -conf["author_email"] = "so_software@simonsobservatory.org" -conf["license"] = "MIT" -conf["url"] = "https://github.com/simonsobs/so3g" conf["version"] = get_version() -conf["python_requires"] = ">=3.7.0" -conf["setup_requires"] = (["wheel", "cmake"],) -conf["install_requires"] = [ - "numpy<2", - "astropy", - "matplotlib", - "scipy", - "ephem", - "pytz", - "pyaml", - "sqlalchemy", - "pysqlite3-wheels", - "tqdm", - "qpoint", -] # Since the so3g python package is in a directory called "python", we can't use the # normal find_packages() function to recursively set these up. Instead we specify them @@ -396,25 +327,14 @@ def readme(): } for sub in ["hk", "proj", "smurf"]: - psub = "so3g.{}".format(sub) + psub = f"so3g.{sub}" pdir = os.path.join("python", sub) conf["packages"].append(psub) conf["package_dir"][psub] = pdir conf["ext_modules"] = ext_modules conf["scripts"] = scripts -conf["cmdclass"] = {"build_ext": CMakeBuild, "clean": RealClean} +conf["cmdclass"] = {"build_ext": CMakeBuild,} conf["zip_safe"] = False -conf["classifiers"] = [ - "Development Status :: 5 - Production/Stable", - "Environment :: Console", - "Intended Audience :: Science/Research", - "License :: OSI Approved :: BSD License", - "Operating System :: POSIX", - "Programming Language :: Python :: 3.7", - "Programming Language :: Python :: 3.8", - "Programming Language :: Python :: 3.9", - "Topic :: Scientific/Engineering :: Astronomy", -] setup(**conf) diff --git a/src/G3IndexedReader.cxx b/src/G3IndexedReader.cxx index 4e8e6b6e..b3d88c50 100644 --- a/src/G3IndexedReader.cxx +++ b/src/G3IndexedReader.cxx @@ -3,16 +3,16 @@ #include #include "exceptions.h" -#include +#include G3IndexedReader::G3IndexedReader(std::string filename, int n_frames_to_read) : prefix_file_(false), n_frames_to_read_(n_frames_to_read), n_frames_read_(0) { - boost::filesystem::path fpath(filename); + std::filesystem::path fpath(filename); if (filename.find("://") == std::string::npos && - (!boost::filesystem::exists(fpath) || - !boost::filesystem::is_regular_file(fpath))) + (!std::filesystem::exists(fpath) || + !std::filesystem::is_regular_file(fpath))) log_fatal("Could not find file %s", filename.c_str()); StartFile(filename); } @@ -25,10 +25,10 @@ G3IndexedReader::G3IndexedReader(std::vector filename, int n_frames log_fatal("Empty file list provided to G3IndexedReader"); for (auto i = filename.begin(); i != filename.end(); i++){ - boost::filesystem::path fpath(*i); + std::filesystem::path fpath(*i); if (i->find("://") == std::string::npos && - (!boost::filesystem::exists(fpath) || - !boost::filesystem::is_regular_file(fpath))) + (!std::filesystem::exists(fpath) || + !std::filesystem::is_regular_file(fpath))) log_fatal("Could not find file %s", i->c_str()); filename_.push_back(*i); } @@ -106,7 +106,7 @@ PYBINDINGS("so3g") { using namespace boost::python; // Instead of EXPORT_G3MODULE since there are two constructors - class_, boost::shared_ptr, + class_, std::shared_ptr, boost::noncopyable>("G3IndexedReader", "Read frames from disk. Takes either the path to a file to read " "or an iterable of files to be read in sequence. If " diff --git a/src/G3Ndarray.cxx b/src/G3Ndarray.cxx index 5e1493aa..0fdc1756 100644 --- a/src/G3Ndarray.cxx +++ b/src/G3Ndarray.cxx @@ -67,7 +67,7 @@ template void G3Ndarray::save(A &ar, unsigned v) const // v is the ver // Copy over data into contiguous structure. It seems like the easiest way // to do this is to make a whole new numpy array PyArrayObject *contig = (PyArrayObject*) PyArray_NewCopy(data, NPY_CORDER); - ar & make_nvp("data", binary_data((char*)PyArray_DATA(contig), size*PyArray_DESCR(contig)->elsize)); + ar & make_nvp("data", binary_data((char*)PyArray_DATA(contig), size*PyArray_ITEMSIZE(contig))); Py_DECREF((PyObject*)contig); } @@ -86,7 +86,7 @@ template void G3Ndarray::load(A &ar, unsigned v) { // Make a new PyArrayObject with these properties Py_XDECREF(data); data = (PyArrayObject*) PyArray_SimpleNew(ndim, &shape[0], type_num); - ar & make_nvp("data", binary_data((char*)PyArray_DATA(data), size*PyArray_DESCR(data)->elsize)); + ar & make_nvp("data", binary_data((char*)PyArray_DATA(data), size*PyArray_ITEMSIZE(data))); } bp::object G3Ndarray::to_array() const { @@ -95,10 +95,10 @@ bp::object G3Ndarray::to_array() const { G3_SPLIT_SERIALIZABLE_CODE(G3Ndarray); -using namespace boost::python; - PYBINDINGS("so3g") { + using namespace boost::python; + EXPORT_FRAMEOBJECT(G3Ndarray, init<>(), "G3Ndarray default constructor") .def(init("Construct G3Ndarray from numpy array")) .def("to_array", &G3Ndarray::to_array, "Get the wrapped numpy array") diff --git a/src/healpix_bare.c b/src/healpix_bare.c index 747a05a2..58721c74 100644 --- a/src/healpix_bare.c +++ b/src/healpix_bare.c @@ -65,8 +65,8 @@ static t_hpc loc2hpc (tloc loc) double jm = (1.0-tp)*tmp; /* decreasing edge line index */ if (jp>1.) jp = 1.; /* for points too close to the boundary */ if (jm>1.) jm = 1.; - return (loc.z >= 0) ? (t_hpc){1.-jm, 1.-jp, ntt} - : (t_hpc){jp, jm, ntt+8}; + return (loc.z >= 0) ? (t_hpc){1.-jm, 1.-jp, (int32_t)ntt} + : (t_hpc){jp, jm, (int32_t)ntt+8}; } static tloc hpc2loc (t_hpc hpc) @@ -173,7 +173,7 @@ static int64_t hpd2nest (int64_t nside, t_hpd hpd) static t_hpd nest2hpd (int64_t nside, int64_t pix) { int64_t npface_=nside*nside, p2=pix&(npface_-1); - return (t_hpd){compress_bits(p2), compress_bits(p2>>1), pix/npface_}; + return (t_hpd){compress_bits(p2), compress_bits(p2>>1), (int32_t)(pix/npface_)}; } static int64_t hpd2ring (int64_t nside_, t_hpd hpd) @@ -215,7 +215,7 @@ static t_hpd ring2hpd (int64_t nside_, int64_t pix) int64_t irt = iring - (jrll[face]*nside_) + 1; int64_t ipt = 2*iphi- jpll[face]*iring -1; if (ipt>=2*nside_) ipt-=8*nside_; - return (t_hpd) {(ipt-irt)>>1, (-(ipt+irt))>>1, face}; + return (t_hpd) {(ipt-irt)>>1, (-(ipt+irt))>>1, (int32_t)face}; } else if (pix<(npix_-ncap_)) /* Equatorial region */ { @@ -231,7 +231,7 @@ static t_hpd ring2hpd (int64_t nside_, int64_t pix) int64_t irt = iring - (jrll[face]*nside_) + 1; int64_t ipt = 2*iphi- jpll[face]*nside_ - kshift -1; if (ipt>=2*nside_) ipt-=8*nside_; - return (t_hpd) {(ipt-irt)>>1, (-(ipt+irt))>>1, face}; + return (t_hpd) {(ipt-irt)>>1, (-(ipt+irt))>>1, (int32_t)face}; } else /* South Polar cap */ { @@ -242,7 +242,7 @@ static t_hpd ring2hpd (int64_t nside_, int64_t pix) int64_t irt = 4*nside_ - iring - (jrll[face]*nside_) + 1; int64_t ipt = 2*iphi- jpll[face]*iring -1; if (ipt>=2*nside_) ipt-=8*nside_; - return (t_hpd) {(ipt-irt)>>1, (-(ipt+irt))>>1, face}; + return (t_hpd) {(ipt-irt)>>1, (-(ipt+irt))>>1, (int32_t)face}; } } @@ -263,7 +263,7 @@ int64_t ring2nest(int64_t nside, int64_t ipring) static t_hpd loc2hpd (int64_t nside_, tloc loc) { t_hpc tmp = loc2hpc(loc); - return (t_hpd){(tmp.x*nside_), (tmp.y*nside_), tmp.f}; + return (t_hpd){((int64_t)(tmp.x*nside_)), ((int64_t)(tmp.y*nside_)), tmp.f}; } static tloc hpd2loc (int64_t nside_, t_hpd hpd) diff --git a/src/main.cxx b/src/main.cxx index 97b761ca..6f1fe9db 100644 --- a/src/main.cxx +++ b/src/main.cxx @@ -38,9 +38,6 @@ bp::object useful_info() { return output; } - - - PYBINDINGS("so3g") { bp::def("version", version); bp::def("useful_info", useful_info); @@ -51,7 +48,7 @@ static void* _so3g_import_array() { return NULL; } -BOOST_PYTHON_MODULE(so3g) { +BOOST_PYTHON_MODULE(_libso3g) { _so3g_import_array(); G3ModuleRegistrator::CallRegistrarsFor("so3g"); } diff --git a/test/test_proj_astro.py b/test/test_proj_astro.py index 35ebe948..5517a20f 100644 --- a/test/test_proj_astro.py +++ b/test/test_proj_astro.py @@ -238,7 +238,7 @@ def get_pyephem_radec(az, el, t, site, weather=None): esite.long = site.lon * DEG if weather is not None: weather.apply(esite) - d = datetime.datetime.utcfromtimestamp(t) + d = datetime.datetime.fromtimestamp(t, tz=datetime.timezone.utc) Xt = d.year, d.month, d.day, d.hour, d.minute, d.second+d.microsecond*1e-6 esite.date = ephem.date(Xt) return esite.radec_of(az, el) diff --git a/wheels/build_requirements.txt b/wheels/build_requirements.txt index 136da4ed..0f293d97 100644 --- a/wheels/build_requirements.txt +++ b/wheels/build_requirements.txt @@ -9,3 +9,4 @@ sqlalchemy pysqlite3-wheels tqdm pytest +qpoint diff --git a/wheels/install_deps_linux.sh b/wheels/install_deps_linux.sh index d02ed47e..9d62f872 100755 --- a/wheels/install_deps_linux.sh +++ b/wheels/install_deps_linux.sh @@ -13,7 +13,7 @@ echo "Wheel script directory = ${scriptdir}" # Install library dependencies yum update -y -yum install -y flac-devel bzip2-devel zlib-devel sqlite-devel netcdf-devel +yum install -y flac-devel bzip2-devel zlib-devel sqlite-devel # Build options @@ -22,7 +22,7 @@ CXX=g++ FC=gfortran CFLAGS="-O3 -fPIC -pthread" -CXXFLAGS="-O3 -fPIC -pthread -std=c++14" +CXXFLAGS="-O3 -fPIC -pthread -std=c++17" FCFLAGS="-O3 -fPIC -pthread" MAKEJ=2 @@ -37,25 +37,25 @@ pip install -v cmake wheel setuptools # In order to maximize ABI compatibility with numpy, build with the newest numpy # version containing the oldest ABI version compatible with the python we are using. +# NOTE: for now, we build with numpy 2.0.x, which is backwards compatible with +# numpy-1.x and forward compatible with numpy-2.x. pyver=$(python3 --version 2>&1 | awk '{print $2}' | sed -e "s#\(.*\)\.\(.*\)\..*#\1.\2#") -if [ ${pyver} == "3.7" ]; then - numpy_ver="1.20" -fi -if [ ${pyver} == "3.8" ]; then - numpy_ver="1.20" -fi -if [ ${pyver} == "3.9" ]; then - numpy_ver="1.20" -fi -if [ ${pyver} == "3.10" ]; then - numpy_ver="1.22" -fi -if [ ${pyver} == "3.11" ]; then - numpy_ver="1.24" -fi +# if [ ${pyver} == "3.8" ]; then +# numpy_ver="1.20" +# fi +# if [ ${pyver} == "3.9" ]; then +# numpy_ver="1.20" +# fi +# if [ ${pyver} == "3.10" ]; then +# numpy_ver="1.22" +# fi +# if [ ${pyver} == "3.11" ]; then +# numpy_ver="1.24" +# fi +numpy_ver="2.0.1" # Install build requirements. -CC="${CC}" CFLAGS="${CFLAGS}" pip install -v -r "${scriptdir}/build_requirements.txt" "numpy<${numpy_ver}" +CC="${CC}" CFLAGS="${CFLAGS}" pip install -v -r "${scriptdir}/../requirements.txt" "numpy<${numpy_ver}" # Install Openblas @@ -83,14 +83,14 @@ tar xzf ${openblas_pkg} \ # Install boost -boost_version=1_86_0 +boost_version=1_87_0 boost_dir=boost_${boost_version} boost_pkg=${boost_dir}.tar.bz2 echo "Fetching boost..." if [ ! -e ${boost_pkg} ]; then - curl -SL "https://archives.boost.io/release/1.86.0/source/${boost_pkg}" -o "${boost_pkg}" + curl -SL "https://archives.boost.io/release/1.87.0/source/${boost_pkg}" -o "${boost_pkg}" fi echo "Building boost..." diff --git a/wheels/install_deps_osx.sh b/wheels/install_deps_osx.sh index 06507c1f..475839a1 100755 --- a/wheels/install_deps_osx.sh +++ b/wheels/install_deps_osx.sh @@ -6,10 +6,6 @@ set -e -# Note: we are not cross-compiling here. This is for selecting the -# openblas tarball to fetch. -arch=$1 - # Location of this script pushd $(dirname $0) >/dev/null 2>&1 scriptdir=$(pwd) @@ -20,19 +16,24 @@ echo "Wheel script directory = ${scriptdir}" # FIXME: would be nice to switch to clang once spt3g / cereal # runtime registration works. -use_gcc=yes -# use_gcc=no +#use_gcc=yes +use_gcc=no +gcc_version=14 if [ "x${use_gcc}" = "xyes" ]; then - CC=gcc-14 - CXX=g++-14 + CC=gcc-${gcc_version} + CXX=g++-${gcc_version} CFLAGS="-O3 -fPIC" - CXXFLAGS="-O3 -fPIC -std=c++14" + CXXFLAGS="-O3 -fPIC -std=c++17" + OMPFLAGS="-fopenmp" else + export MACOSX_DEPLOYMENT_TARGET=$(python3 -c "import sysconfig as s; print(s.get_config_vars()['MACOSX_DEPLOYMENT_TARGET'])") + echo "Using MACOSX_DEPLOYMENT_TARGET=${MACOSX_DEPLOYMENT_TARGET}" CC=clang CXX=clang++ CFLAGS="-O3 -fPIC" - CXXFLAGS="-O3 -fPIC -std=c++11 -stdlib=libc++" + CXXFLAGS="-O3 -fPIC -std=c++17 -stdlib=libc++" + OMPFLAGS="" fi MAKEJ=2 @@ -40,13 +41,12 @@ MAKEJ=2 PREFIX=/usr/local # Install library dependencies with homebrew -brew install netcdf brew install sqlite3 brew install flac # Optionally install gcc if [ "x${use_gcc}" = "xyes" ]; then - brew install gcc@14 + brew install gcc@${gcc_version} fi # Update pip @@ -57,55 +57,65 @@ pip install -v cmake wheel setuptools # In order to maximize ABI compatibility with numpy, build with the newest numpy # version containing the oldest ABI version compatible with the python we are using. +# NOTE: for now, we build with numpy 2.0.x, which is backwards compatible with +# numpy-1.x and forward compatible with numpy-2.x. pyver=$(python3 --version 2>&1 | awk '{print $2}' | sed -e "s#\(.*\)\.\(.*\)\..*#\1.\2#") -if [ ${pyver} == "3.7" ]; then - numpy_ver="1.20" -fi -if [ ${pyver} == "3.8" ]; then - numpy_ver="1.20" -fi -if [ ${pyver} == "3.9" ]; then - numpy_ver="1.20" -fi -if [ ${pyver} == "3.10" ]; then - numpy_ver="1.22" -fi -if [ ${pyver} == "3.11" ]; then - numpy_ver="1.24" -fi +# if [ ${pyver} == "3.8" ]; then +# numpy_ver="1.20" +# fi +# if [ ${pyver} == "3.9" ]; then +# numpy_ver="1.20" +# fi +# if [ ${pyver} == "3.10" ]; then +# numpy_ver="1.22" +# fi +# if [ ${pyver} == "3.11" ]; then +# numpy_ver="1.24" +# fi +numpy_ver="2.0.1" # Install build requirements. -CC="${CC}" CFLAGS="${CFLAGS}" pip install -v -r "${scriptdir}/build_requirements.txt" "numpy<${numpy_ver}" "scipy_openblas32" +CC="${CC}" CFLAGS="${CFLAGS}" pip install -v -r "${scriptdir}/../requirements.txt" "numpy<${numpy_ver}" + +# Install Openblas -# We use the scipy openblas wheel to get the openblas to use. +openblas_version=0.3.28 +openblas_dir=OpenBLAS-${openblas_version} +openblas_pkg=${openblas_dir}.tar.gz -# First ensure that pkg-config is set to search somewhere -if [ -z "${PKG_CONFIG_PATH}" ]; then - export PKG_CONFIG_PATH="/usr/local/lib/pkgconfig" +if [ ! -e ${openblas_pkg} ]; then + echo "Fetching OpenBLAS..." + curl -SL https://github.com/xianyi/OpenBLAS/archive/v${openblas_version}.tar.gz -o ${openblas_pkg} fi -python3 -c "import scipy_openblas32; print(scipy_openblas32.get_pkg_config())" > ${PKG_CONFIG_PATH}/scipy-openblas.pc +echo "Building OpenBLAS..." -# To help delocate find the libraries, we copy them into /usr/local -python3 </dev/null 2>&1 \ + && make ${omp} NO_STATIC=1 \ + MAKE_NB_JOBS=${MAKEJ} \ + CC="${CC}" FC="${FC}" DYNAMIC_ARCH=1 TARGET=GENERIC \ + COMMON_OPT="${CFLAGS}" FCOMMON_OPT="${FCFLAGS}" \ + EXTRALIB="${OMPFLAGS}" libs netlib shared \ + && make NO_STATIC=1 DYNAMIC_ARCH=1 TARGET=GENERIC PREFIX="${PREFIX}" install \ + && popd >/dev/null 2>&1 # Install boost -boost_version=1_86_0 +boost_version=1_87_0 boost_dir=boost_${boost_version} boost_pkg=${boost_dir}.tar.bz2 echo "Fetching boost..." if [ ! -e ${boost_pkg} ]; then - curl -SL "https://archives.boost.io/release/1.86.0/source/${boost_pkg}" -o "${boost_pkg}" + curl -SL "https://archives.boost.io/release/1.87.0/source/${boost_pkg}" -o "${boost_pkg}" fi echo "Building boost..." diff --git a/wheels/repair_wheel_linux.sh b/wheels/repair_wheel_linux.sh index 98eaafaf..fdfde3ce 100755 --- a/wheels/repair_wheel_linux.sh +++ b/wheels/repair_wheel_linux.sh @@ -10,9 +10,8 @@ set -e dest_dir=$1 wheel=$2 -spt3g_build=$(ls -d /project/build/lib.linux*/so3g/spt3g_internal) -so3g_build=$(ls -d /project/build/lib.linux*/so3g) +spt3g_install=$(ls -d /project/build/temp.linux*/spt3g_install) -export LD_LIBRARY_PATH=${spt3g_build}:${so3g_build}:${LD_LIBRARY_PATH} +export LD_LIBRARY_PATH="/usr/local/lib":"/usr/local/lib64":"${spt3g_install}/lib":"${spt3g_install}/lib64":${LD_LIBRARY_PATH} auditwheel repair -w ${dest_dir} ${wheel} diff --git a/wheels/repair_wheel_macos.sh b/wheels/repair_wheel_macos.sh index b8fe4c74..1416c7af 100755 --- a/wheels/repair_wheel_macos.sh +++ b/wheels/repair_wheel_macos.sh @@ -11,9 +11,9 @@ dest_dir=$1 wheel=$2 delocate_archs=$3 -spt3g_libdir=$(ls -d /Users/runner/work/so3g/so3g/build/temp.macosx*/spt3g_install/lib) +spt3g_install=$(ls -d /project/build/temp.linux*/spt3g_install) -export DYLD_LIBRARY_PATH=/usr/local/lib:${spt3g_libdir}:${DYLD_LIBRARY_PATH} +export DYLD_LIBRARY_PATH="/usr/local/lib":"${spt3g_install}/lib":"${spt3g_install}/lib64":${DYLD_LIBRARY_PATH} delocate-listdeps ${wheel} \ && delocate-wheel --require-archs ${delocate_archs} -w ${dest_dir} ${wheel} diff --git a/wheels/spt3g.patch b/wheels/spt3g.patch deleted file mode 100644 index 18c42aaf..00000000 --- a/wheels/spt3g.patch +++ /dev/null @@ -1,68 +0,0 @@ -diff -urN spt3g_software_orig/cmake/Spt3gBoostPython.cmake spt3g_software_export/cmake/Spt3gBoostPython.cmake ---- spt3g_software_orig/cmake/Spt3gBoostPython.cmake 2024-08-22 10:25:14.077183587 -0700 -+++ spt3g_software_export/cmake/Spt3gBoostPython.cmake 2024-12-11 12:24:37.355444860 -0800 -@@ -1,7 +1,7 @@ - # Locate Python - - if(${CMAKE_VERSION} VERSION_GREATER_EQUAL 3.12) -- find_package(Python COMPONENTS Interpreter Development) -+ find_package(Python COMPONENTS Interpreter) - else() - find_package(PythonInterp) - find_package(PythonLibs ${PYTHON_VERSION_MAJOR}.${PYTHON_VERSION_MINOR}) -diff -urN spt3g_software_orig/CMakeLists.txt spt3g_software_export/CMakeLists.txt ---- spt3g_software_orig/CMakeLists.txt 2024-08-22 10:24:59.301256298 -0700 -+++ spt3g_software_export/CMakeLists.txt 2024-12-11 12:24:37.364444816 -0800 -@@ -42,7 +42,7 @@ - - # Raise errors on every warning by default - # (use target-specific options to disable particular warnings) --set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -Wall -Werror") -+#set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -Wall -Werror") - - # Interface library for flags and library dependencies - add_library(spt3g INTERFACE) -diff -urN spt3g_software_orig/core/CMakeLists.txt spt3g_software_export/core/CMakeLists.txt ---- spt3g_software_orig/core/CMakeLists.txt 2024-08-06 11:34:45.598647939 -0700 -+++ spt3g_software_export/core/CMakeLists.txt 2024-12-11 12:24:37.364444816 -0800 -@@ -105,8 +105,8 @@ - add_spt3g_test(quaternions) - add_spt3g_test(timesample) - --add_spt3g_test_program(test -- SOURCE_FILES -- ${CMAKE_CURRENT_SOURCE_DIR}/tests/G3TimestreamTest.cxx -- ${CMAKE_CURRENT_SOURCE_DIR}/tests/G3TimestreamMapTest.cxx -- USE_PROJECTS core) -+#add_spt3g_test_program(test -+# SOURCE_FILES -+# ${CMAKE_CURRENT_SOURCE_DIR}/tests/G3TimestreamTest.cxx -+# ${CMAKE_CURRENT_SOURCE_DIR}/tests/G3TimestreamMapTest.cxx -+# USE_PROJECTS core) -diff -urN spt3g_software_orig/core/src/dataio.cxx spt3g_software_export/core/src/dataio.cxx ---- spt3g_software_orig/core/src/dataio.cxx 2024-08-06 11:34:45.606647906 -0700 -+++ spt3g_software_export/core/src/dataio.cxx 2024-12-11 12:24:45.732404214 -0800 -@@ -146,8 +146,14 @@ - stream.push(fs); - } else { - // Simple file case -+ const char * bufcheck = getenv("SO3G_FILESYSTEM_BUFFER"); -+ // Use 20MB default -+ size_t so3g_buffer_size = 20971520; -+ if (bufcheck != nullptr) { -+ so3g_buffer_size = (size_t)atol(bufcheck); -+ } - stream.push(boost::iostreams::file_source(path, -- std::ios::binary)); -+ std::ios::binary), so3g_buffer_size); - } - - return fd; -diff -urN spt3g_software_orig/examples/CMakeLists.txt spt3g_software_export/examples/CMakeLists.txt ---- spt3g_software_orig/examples/CMakeLists.txt 2024-08-06 11:34:45.610647890 -0700 -+++ spt3g_software_export/examples/CMakeLists.txt 2024-12-11 12:24:37.365444811 -0800 -@@ -1,2 +1,2 @@ --add_executable(cppexample cppexample.cxx) --target_link_libraries(cppexample core) -+#add_executable(cppexample cppexample.cxx) -+#target_link_libraries(cppexample core) diff --git a/wheels/spt3g_circular_imports.patch b/wheels/spt3g_circular_imports.patch new file mode 100644 index 00000000..84bd0d4f --- /dev/null +++ b/wheels/spt3g_circular_imports.patch @@ -0,0 +1,875 @@ +diff -urN spt3g_software_orig/calibration/python/bolopropertiesutils.py spt3g_software/calibration/python/bolopropertiesutils.py +--- spt3g_software_orig/calibration/python/bolopropertiesutils.py 2025-01-14 23:48:46.997901524 -0800 ++++ spt3g_software/calibration/python/bolopropertiesutils.py 2025-01-14 23:49:10.239796804 -0800 +@@ -1,4 +1,4 @@ +-from . import BolometerProperties ++from .._libcalibration import BolometerProperties + from .. import core + import numpy as np + import re +diff -urN spt3g_software_orig/calibration/python/build_cal_frames.py spt3g_software/calibration/python/build_cal_frames.py +--- spt3g_software_orig/calibration/python/build_cal_frames.py 2025-01-14 23:48:46.998901519 -0800 ++++ spt3g_software/calibration/python/build_cal_frames.py 2025-01-14 23:49:10.240796800 -0800 +@@ -1,6 +1,10 @@ + from .. import core +-from . import BolometerProperties, BolometerPropertiesMap +-from . import PointingProperties, PointingPropertiesMap ++from .._libcalibration import ( ++ BolometerProperties, ++ BolometerPropertiesMap, ++ PointingProperties, ++ PointingPropertiesMap, ++) + import numpy, os, re + + ''' +@@ -24,13 +28,13 @@ + - Band Calibration (Key: 'BoloBands') + - Physical Name Data (Key: 'PhysicalBoloIDs') + ''' +- ++ + def __init__(self, drop_original_frames=True, fiducial_detectors=[], + bpm_name='NominalBolometerProperties', use_bpm_pointing=False): + ''' + If drop_original_frames is True, will drop all input Calibration frames. + +- If fiducial_detectors is set, will use the average of the position[s] of ++ If fiducial_detectors is set, will use the average of the position[s] of + whatever detector[s] are specified to center each set of relative offsets + encountered (NB: this recentering is done in a Cartesian way). If it is + *not* specified, five detectors near the middle of the focal plane present +@@ -107,7 +111,7 @@ + p.wafer_id = self.props[bolo]['wafer_id'] + if 'pixel_type' in self.props[bolo]: + p.pixel_type = self.props[bolo]['pixel_type'] +- ++ + boloprops[bolo] = p + + cframe = core.G3Frame(core.G3FrameType.Calibration) +@@ -301,7 +305,7 @@ + Expects to be passed frames from: + - Az tilt fit parameters (Keys: 'tiltAngle', 'tiltHA', 'tiltLat', 'tiltMag') + ''' +- ++ + def __init__(self, drop_original_frames=True): + ''' + If drop_original_frames is True, will drop all input Calibration frames. +@@ -322,10 +326,10 @@ + + if 'tiltHA' in self.props: + p['tiltHA'] = self.props['tiltHA'] +- ++ + if 'tiltMag' in self.props: + p['tiltMag'] = self.props['tiltMag'] +- ++ + if 'tiltAngle' in self.props: + p['tiltAngle'] = self.props['tiltAngle'] + +diff -urN spt3g_software_orig/core/python/dataextensions.py spt3g_software/core/python/dataextensions.py +--- spt3g_software_orig/core/python/dataextensions.py 2025-01-14 23:48:46.998901519 -0800 ++++ spt3g_software/core/python/dataextensions.py 2025-01-14 23:49:10.240796800 -0800 +@@ -1,4 +1,4 @@ +-from . import G3Bool, G3Int, G3Double, G3String ++from .._libcore import G3Bool, G3Int, G3Double, G3String + + __all__ = [] + +diff -urN spt3g_software_orig/core/python/docparser.py spt3g_software/core/python/docparser.py +--- spt3g_software_orig/core/python/docparser.py 2025-01-14 23:48:46.998901519 -0800 ++++ spt3g_software/core/python/docparser.py 2025-01-14 23:49:10.240796800 -0800 +@@ -1,5 +1,5 @@ + import sys, inspect, re, textwrap +-from . import G3Module, G3FrameObject ++from .._libcore import G3Module, G3FrameObject + + def format_doc(x, simple=False): + """ +diff -urN spt3g_software_orig/core/python/fileio.py spt3g_software/core/python/fileio.py +--- spt3g_software_orig/core/python/fileio.py 2025-01-14 23:48:46.999901515 -0800 ++++ spt3g_software/core/python/fileio.py 2025-01-14 23:49:10.241796795 -0800 +@@ -1,4 +1,4 @@ +-from .. import core ++from .._libcore import G3Reader, G3Frame, G3FrameType, G3Writer, G3MultiFileWriter + + __all__ = ["G3File"] + +@@ -16,7 +16,7 @@ + """ + + def __init__(self, path): +- self.reader = core.G3Reader(path) ++ self.reader = G3Reader(path) + + def __iter__(self): + return self +@@ -49,11 +49,11 @@ + + + def writer_exit(self, *args, **kwargs): +- fr = core.G3Frame(core.G3FrameType.EndProcessing) ++ fr = G3Frame(G3FrameType.EndProcessing) + self(fr) + + +-core.G3Writer.__enter__ = writer_enter +-core.G3Writer.__exit__ = writer_exit +-core.G3MultiFileWriter.__enter__ = writer_enter +-core.G3MultiFileWriter.__exit__ = writer_exit ++G3Writer.__enter__ = writer_enter ++G3Writer.__exit__ = writer_exit ++G3MultiFileWriter.__enter__ = writer_enter ++G3MultiFileWriter.__exit__ = writer_exit +diff -urN spt3g_software_orig/core/python/frameextensions.py spt3g_software/core/python/frameextensions.py +--- spt3g_software_orig/core/python/frameextensions.py 2025-01-14 23:48:46.999901515 -0800 ++++ spt3g_software/core/python/frameextensions.py 2025-01-14 23:49:10.241796795 -0800 +@@ -1,4 +1,4 @@ +-from . import G3Frame, G3FrameType ++from .._libcore import G3Frame, G3FrameType + + @staticmethod + def str_to_frame_types(types): +diff -urN spt3g_software_orig/core/python/g3decorators.py spt3g_software/core/python/g3decorators.py +--- spt3g_software_orig/core/python/g3decorators.py 2025-01-14 23:48:46.999901515 -0800 ++++ spt3g_software/core/python/g3decorators.py 2025-01-14 23:49:10.241796795 -0800 +@@ -1,4 +1,4 @@ +-from . import G3FrameType ++from .._libcore import G3FrameType + from copy import copy + import inspect + import textwrap +@@ -27,7 +27,7 @@ + This is a decorator for use with G3Modules written as functions. It enables + a function to use cached values from other types of frames in the processing + of a frame. +- ++ + To make that confusing sentence clearer with an example, in a lot of cases + we want to have a module that works on Scan frames, but have access to the + BolometerProperties. This decorator allows you to specify the information +@@ -47,7 +47,7 @@ + Key specifies the name of the argument that we pass the infromation to in + the decorated function. If the decorated function is called with Key as an + argument it will overwrite the value specified in the decorator. +- ++ + Value specifies the default path to look for the cached data. It will + search all of the frames that do not have the frame type 'type' for a key + with that value. This can be overridden when calling the decorated +@@ -74,7 +74,7 @@ + doc_append += ' %s = "%s"\n' % ( k, v ) + + class WrappedFunc: +- def __init__(self, *args, **kwargs): ++ def __init__(self, *args, **kwargs): + self.args = args + self.kwargs = kwargs + +diff -urN spt3g_software_orig/core/python/g3logging.py spt3g_software/core/python/g3logging.py +--- spt3g_software_orig/core/python/g3logging.py 2025-01-14 23:48:47.000901510 -0800 ++++ spt3g_software/core/python/g3logging.py 2025-01-14 23:49:10.241796795 -0800 +@@ -1,4 +1,4 @@ +-from . import G3Logger, G3LogLevel ++from .._libcore import G3Logger, G3LogLevel + import traceback + from functools import reduce + +@@ -27,12 +27,12 @@ + def log_fatal(*args, **kwargs): + ''' + Behaves like print but with logging behavior as described in logging.rst +- ++ + The only kwarg that it recognizes is "unit", for setting the logging unit. + ''' + message = reduce(lambda a,b: '%s %s' % ( str(a), str(b)), args) + unit = kwargs.get('unit') +- unit = unit if unit != None else 'Python' ++ unit = unit if unit != None else 'Python' + tb = traceback.extract_stack(limit=2)[0] + G3Logger.global_logger.log(G3LogLevel.LOG_FATAL, unit, tb[0], tb[1], + tb[2], message) +@@ -42,7 +42,7 @@ + ''' + Set log level to the requested level. If unit is not None, set the + log level for the given logging unit only. +- ++ + Example: core.set_log_level(core.G3LogLevel.LOG_DEBUG, 'GCPMuxDataDecoder') + ''' + +diff -urN spt3g_software_orig/core/python/modconstruct.py spt3g_software/core/python/modconstruct.py +--- spt3g_software_orig/core/python/modconstruct.py 2025-01-14 23:48:47.000901510 -0800 ++++ spt3g_software/core/python/modconstruct.py 2025-01-14 23:50:06.884541584 -0800 +@@ -1,4 +1,13 @@ +-from . import G3Module, G3Pipeline, G3PipelineInfo, G3Frame, G3FrameType, G3Time, G3ModuleConfig, log_fatal ++from .._libcore import ( ++ G3Module, ++ G3Pipeline, ++ G3PipelineInfo, ++ G3Frame, ++ G3FrameType, ++ G3Time, ++ G3ModuleConfig, ++) ++from .g3logging import log_fatal + try: + from . import multiprocess + multiproc_avail = True +@@ -240,7 +249,7 @@ + name = self.nameprefix + name + + # Record module configuration for root objects +- if self.nameprefix == '': ++ if self.nameprefix == '': + modconfig = G3ModuleConfig() + modconfig.instancename = name + modconfig.modname = '%s.%s' % (callable.__module__, callable_name) +diff -urN spt3g_software_orig/core/python/multiprocess.py spt3g_software/core/python/multiprocess.py +--- spt3g_software_orig/core/python/multiprocess.py 2025-01-14 23:48:47.009901470 -0800 ++++ spt3g_software/core/python/multiprocess.py 2025-01-14 23:49:10.242796791 -0800 +@@ -1,7 +1,7 @@ + from multiprocessing import get_context + import socket, pickle, errno, struct, time + +-from . import G3FrameType, G3Frame ++from .._libcore import G3FrameType, G3Frame + + # Require fork to avoid pickling errors + ctx = get_context("fork") +@@ -84,7 +84,7 @@ + while bytestoread > len(inbuf): + inbuf += self.queue[0].recv(bytestoread - len(inbuf)) + self.queue[0].setblocking(False) +- ++ + allout += pickle.loads(inbuf) + self.callsqueued -= 1 + icount = 0 # Forward progress! Try again quick +@@ -128,7 +128,7 @@ + outbuf = pickle.dumps(out) + outbuf = struct.pack('i', len(outbuf)) + outbuf + self.queue[1].send(outbuf) +- ++ + if frame.type == G3FrameType.EndProcessing: + return + +diff -urN spt3g_software_orig/core/python/quatextensions.py spt3g_software/core/python/quatextensions.py +--- spt3g_software_orig/core/python/quatextensions.py 2025-01-14 23:48:47.009901470 -0800 ++++ spt3g_software/core/python/quatextensions.py 2025-01-14 23:49:10.242796791 -0800 +@@ -1,5 +1,5 @@ + import numpy as np +-from . import Quat, G3VectorQuat, G3TimestreamQuat ++from .._libcore import Quat, G3VectorQuat, G3TimestreamQuat + + __all__ = [] + +diff -urN spt3g_software_orig/core/python/timestreamextensions.py spt3g_software/core/python/timestreamextensions.py +--- spt3g_software_orig/core/python/timestreamextensions.py 2025-01-14 23:48:47.009901470 -0800 ++++ spt3g_software/core/python/timestreamextensions.py 2025-01-14 23:49:10.243796786 -0800 +@@ -1,7 +1,22 @@ + import numpy +-from . import G3Timestream, DoubleVector, G3VectorDouble, G3TimestreamMap, G3VectorTime, G3Time, IntVector, G3VectorInt, \ +- G3VectorComplexDouble, ComplexDoubleVector, BoolVector, G3VectorBool +-from . import G3Units, log_fatal, log_warn, usefulfunc, G3FrameObject ++from .._libcore import ( ++ G3Timestream, ++ DoubleVector, ++ G3VectorDouble, ++ G3TimestreamMap, ++ G3VectorTime, ++ G3Time, ++ IntVector, ++ G3VectorInt, ++ G3VectorComplexDouble, ++ ComplexDoubleVector, ++ BoolVector, ++ G3VectorBool, ++ G3Units, ++ G3FrameObject, ++) ++from .g3logging import log_fatal, log_warn ++from .funcconstruct import usefulfunc + + __all__ = ['concatenate_timestreams'] + +diff -urN spt3g_software_orig/core/python/util.py spt3g_software/core/python/util.py +--- spt3g_software_orig/core/python/util.py 2025-01-14 23:48:47.010901465 -0800 ++++ spt3g_software/core/python/util.py 2025-01-14 23:49:10.243796786 -0800 +@@ -1,4 +1,6 @@ +-from . import indexmod, pipesegment, G3FrameType, log_fatal, G3Reader, G3NetworkSender ++from .._libcore import G3FrameType, G3Reader, G3NetworkSender ++from .g3logging import log_fatal ++from .modconstruct import indexmod, pipesegment + + @indexmod + def Delete(frame, keys=[], type=None): +@@ -65,7 +67,7 @@ + + The frame data is stored in the variable names "frame". + +- If debug_start_func is not None, only starts a debug session when ++ If debug_start_func is not None, only starts a debug session when + debug_start_func(frame) == True + ''' + if type is None or frame.type == type: +@@ -181,7 +183,7 @@ + else: + self.metacache.append(frame) + return [] +- ++ + + del indexmod + del pipesegment +diff -urN spt3g_software_orig/dfmux/python/DataQualityTools.py spt3g_software/dfmux/python/DataQualityTools.py +--- spt3g_software_orig/dfmux/python/DataQualityTools.py 2025-01-14 23:48:47.010901465 -0800 ++++ spt3g_software/dfmux/python/DataQualityTools.py 2025-01-14 23:49:10.243796786 -0800 +@@ -1,5 +1,5 @@ + from .. import core +-from . import DfMuxMetaSample ++from .._libdfmux import DfMuxMetaSample + import numpy as np + + def get_empty_timepoint(sample_time): +@@ -15,7 +15,7 @@ + + def __call__(self, frame): + if frame.type != core.G3FrameType.Timepoint: +- return ++ return + if 'EventHeader' not in frame: + return + new_time = frame['EventHeader'] +diff -urN spt3g_software_orig/dfmux/python/HardwareMapTools.py spt3g_software/dfmux/python/HardwareMapTools.py +--- spt3g_software_orig/dfmux/python/HardwareMapTools.py 2025-01-14 23:48:47.010901465 -0800 ++++ spt3g_software/dfmux/python/HardwareMapTools.py 2025-01-14 23:49:10.243796786 -0800 +@@ -1,5 +1,5 @@ + from .. import core +-from . import DfMuxWiringMap, DfMuxChannelMapping ++from .._libdfmux import DfMuxWiringMap, DfMuxChannelMapping + import struct, socket + + ''' +@@ -29,7 +29,7 @@ + ''' + Insert a wiring map derived from a pydfmux hardware map into the data + stream ahead of what would otherwise be the first frame. +- ++ + Optionally filter for detectors described by the mask in + (see pydfmux documentation for hwm.channel_maps_from_pstring()) and + detectors in one of the states identified by the state argument. +@@ -40,7 +40,7 @@ + from pydfmux.core import dfmux as pydfmux + self.hwmf = core.G3Frame(core.G3FrameType.Wiring) + hwm = DfMuxWiringMap() +- ++ + if pathstring: + chan_map_query = pydfmux_hwm.channel_maps_from_pstring(pathstring) + else: +@@ -114,7 +114,7 @@ + ''' + Insert a calibration frame following any wiring frame containing a + BolometerPropertiesMap named "NominalBolometerProperties" that has +- the properties of each bolometer as defined by the given pydfmux ++ the properties of each bolometer as defined by the given pydfmux + hardware map. + ''' + +diff -urN spt3g_software_orig/dfmux/python/Housekeeping.py spt3g_software/dfmux/python/Housekeeping.py +--- spt3g_software_orig/dfmux/python/Housekeeping.py 2025-01-14 23:48:47.010901465 -0800 ++++ spt3g_software/dfmux/python/Housekeeping.py 2025-01-14 23:49:10.243796786 -0800 +@@ -1,5 +1,5 @@ + from .. import core +-from . import DfMuxHousekeepingMap, HkBoardInfo, HkMezzanineInfo, HkModuleInfo, HkChannelInfo, DfMuxWiringMap, DfMuxChannelMapping ++from .._libdfmux import DfMuxHousekeepingMap, HkBoardInfo, HkMezzanineInfo, HkModuleInfo, HkChannelInfo, DfMuxWiringMap, DfMuxChannelMapping + + from .TuberClient import TuberClient + import socket, struct, time +@@ -345,7 +345,7 @@ + modhk.channels[k+1] = chanhk + mezzhk.modules[m+1] = modhk + boardhk.mezz[n+1] = mezzhk +- ++ + return boardhk + + @classmethod +diff -urN spt3g_software_orig/dfmux/python/LegacyHousekeeping.py spt3g_software/dfmux/python/LegacyHousekeeping.py +--- spt3g_software_orig/dfmux/python/LegacyHousekeeping.py 2025-01-14 23:48:47.011901461 -0800 ++++ spt3g_software/dfmux/python/LegacyHousekeeping.py 2025-01-14 23:49:10.243796786 -0800 +@@ -1,5 +1,11 @@ + from .. import core +-from . import DfMuxHousekeepingMap, HkBoardInfo, HkMezzanineInfo, HkModuleInfo, HkChannelInfo ++from .._libdfmux import ( ++ DfMuxHousekeepingMap, ++ HkBoardInfo, ++ HkMezzanineInfo, ++ HkModuleInfo, ++ HkChannelInfo, ++) + from .TuberClient import TuberClient + import socket, struct, time + +@@ -11,7 +17,7 @@ + any housekeeping frame that goes by containing the data as of the arrival + of the housekeeping frame. Use in conjunction with a + dfmux.PeriodicHousekeepingCollector to get data at fixed intervals. +- ++ + If collecting real-time data, you may want to set subprocess=True when + adding this module. + ''' +@@ -123,7 +129,7 @@ + modhk.channels[chan] = chanhk + mezzhk.modules[mod] = modhk + boardhk.mezz[mezz] = mezzhk +- ++ + return boardhk + + @classmethod +diff -urN spt3g_software_orig/gcp/python/ARCExtractor.py spt3g_software/gcp/python/ARCExtractor.py +--- spt3g_software_orig/gcp/python/ARCExtractor.py 2025-01-14 23:48:47.011901461 -0800 ++++ spt3g_software/gcp/python/ARCExtractor.py 2025-01-14 23:49:10.244796782 -0800 +@@ -1,7 +1,14 @@ + import numpy as np + import copy + from .. import core +-from . import ACUStatus, ACUState, TrackerStatus, TrackerState, TrackerPointing, CalFile ++from .._libgcp import ( ++ ACUStatus, ++ ACUState, ++ TrackerStatus, ++ TrackerState, ++ TrackerPointing, ++ CalFile, ++) + + @core.usefulfunc + def UnitValue(caldict_entry): +@@ -10,15 +17,15 @@ + if 'UnitValue' in caldict_entry: + return caldict_entry['UnitValue'] + +- try: ++ try: + uname = caldict_entry['UnitName'] + if uname and uname != 'None': + try: + if '/' in uname: + unames = list(filter(None,uname.split('/'))) +- uvalue1 = getattr(core.G3Units, ++ uvalue1 = getattr(core.G3Units, + list(filter(None,unames[0].split(' ')))[0]) +- uvalue2 = getattr(core.G3Units, ++ uvalue2 = getattr(core.G3Units, + list(filter(None,unames[1].split(' ')))[0]) + uvalue = uvalue1 / uvalue2 + else: +@@ -220,7 +227,7 @@ + # XXX units for rates seem to be wrong. I think this is in encoder counts + t.az_rate = np.asarray(board['actual_rates'][0], dtype = float) + t.el_rate = np.asarray(board['actual_rates'][1], dtype = float) +- ++ + # Expected values + t.az_command = np.asarray(board['expected'][0]) + t.el_command = np.asarray(board['expected'][1]) +@@ -239,7 +246,7 @@ + t.in_control = core.BoolVector(board['inControl'][0]) + t.in_control_int = core.IntVector(board['inControl'][0]) + t.scan_flag = core.BoolVector(board['scan_flag'][0]) +- ++ + t.lst = np.asarray(board['lst'][0], dtype=float) + + t.source_acquired = np.asarray(board['off_source'][0]) +@@ -275,7 +282,7 @@ + + t.encoder_off_x = np.asarray([board['encoder_off'][0]], dtype=np.double) + t.encoder_off_y = np.asarray([board['encoder_off'][1]], dtype=np.double) +- ++ + t.low_limit_az = np.asarray([board['az_limits'][0]], dtype=np.double) + t.high_limit_az = np.asarray([board['az_limits'][1]], dtype=np.double) + t.low_limit_el = np.asarray([board['el_limits'][0]], dtype=np.double) +@@ -309,7 +316,7 @@ + t.linsens_avg_l2 = np.asarray(board['linear_sensor_avg'][1]) + t.linsens_avg_r1 = np.asarray(board['linear_sensor_avg'][2]) + t.linsens_avg_r2 = np.asarray(board['linear_sensor_avg'][3]) +- ++ + t.telescope_temp = np.asarray([f['array']['weather']['airTemperature'].value]) + t.telescope_pressure = np.asarray([f['array']['weather']['pressure'].value]) + +@@ -443,7 +450,7 @@ + bencherr[key].stop = stop + + info_items = ['benchFocus', 'benchDeadBand', 'benchAcquiredThreshold', +- 'benchPrimaryState', 'benchSecondaryState', ++ 'benchPrimaryState', 'benchSecondaryState', + 'benchFault', 'timeLocked'] + bench_info = core.G3TimestreamMap() + for i, key in enumerate(info_items): +@@ -458,7 +465,7 @@ + f['BenchErrors'] = bencherr + f['BenchInfo'] = bench_info + f['BenchSampleTime'] = board['benchSampleTime'][0] +- ++ + @core.indexmod + def UnpackCryoData(f): + ''' +@@ -545,7 +552,7 @@ + + @core.indexmod + def UnpackPTData(f): +- '''Extracts pulse tube status information to PTStatus key ++ '''Extracts pulse tube status information to PTStatus key + in frame''' + + if f.type != core.G3FrameType.GcpSlow: +@@ -614,7 +621,7 @@ + + @core.indexmod + def UnpackWeatherData(f): +- '''Extracts weather status information to Weather key ++ '''Extracts weather status information to Weather key + in frame''' + + if f.type != core.G3FrameType.GcpSlow: +diff -urN spt3g_software_orig/gcp/python/ARCFile.py spt3g_software/gcp/python/ARCFile.py +--- spt3g_software_orig/gcp/python/ARCFile.py 2025-01-14 23:48:47.011901461 -0800 ++++ spt3g_software/gcp/python/ARCFile.py 2025-01-14 23:49:10.244796782 -0800 +@@ -1,4 +1,4 @@ +-from . import ARCFileReader, ARCExtract ++from .._libgcp import ARCFileReader, ARCExtract + + class ARCFile(object): + '''Iterable class for ARC files, as created by GCP. Loop through frames by doing something like: +diff -urN spt3g_software_orig/gcp/python/InfluxDB.py spt3g_software/gcp/python/InfluxDB.py +--- spt3g_software_orig/gcp/python/InfluxDB.py 2025-01-14 23:48:47.012901456 -0800 ++++ spt3g_software/gcp/python/InfluxDB.py 2025-01-14 23:49:10.244796782 -0800 +@@ -3,7 +3,7 @@ + import datetime as dt + from .. import core + from ..core import G3Units as U +-from . import ARCExtractor ++from .._libgcp import ARCExtractor + + def build_field_list(fr): + """ +diff -urN spt3g_software_orig/maps/python/coordsysmodules.py spt3g_software/maps/python/coordsysmodules.py +--- spt3g_software_orig/maps/python/coordsysmodules.py 2025-01-14 23:48:47.012901456 -0800 ++++ spt3g_software/maps/python/coordsysmodules.py 2025-01-14 23:49:10.244796782 -0800 +@@ -1,9 +1,11 @@ + from .. import core + from ..core import G3TimestreamQuat +-from . import MapCoordReference +-from . import get_origin_rotator_timestream, get_boresight_rotator_timestream +-from . import get_fk5_j2000_to_gal_quat +- ++from .._libmaps import ( ++ MapCoordReference, ++ get_origin_rotator_timestream, ++ get_boresight_rotator_timestream, ++ get_fk5_j2000_to_gal_quat, ++) + + __all__ = [ + "FillCoordTransRotations", +diff -urN spt3g_software_orig/maps/python/fitsio.py spt3g_software/maps/python/fitsio.py +--- spt3g_software_orig/maps/python/fitsio.py 2025-01-14 23:48:47.012901456 -0800 ++++ spt3g_software/maps/python/fitsio.py 2025-01-14 23:49:10.244796782 -0800 +@@ -1,6 +1,13 @@ + from .. import core +-from . import HealpixSkyMap, FlatSkyMap, G3SkyMapWeights +-from . import MapPolType, MapPolConv, MapCoordReference, MapProjection ++from .._libmaps import ( ++ HealpixSkyMap, ++ FlatSkyMap, ++ G3SkyMapWeights, ++ MapPolType, ++ MapPolConv, ++ MapCoordReference, ++ MapProjection, ++) + + import numpy as np + import os +diff -urN spt3g_software_orig/maps/python/map_modules.py spt3g_software/maps/python/map_modules.py +--- spt3g_software_orig/maps/python/map_modules.py 2025-01-14 23:48:47.012901456 -0800 ++++ spt3g_software/maps/python/map_modules.py 2025-01-14 23:49:10.244796782 -0800 +@@ -1,4 +1,18 @@ +-from .. import core, maps ++from .. import core ++from . import ( ++ remove_weights, ++ remove_weights_t, ++ apply_weights, ++ apply_weights_t, ++ MapPolConv, ++ FlatSkyMap, ++ flatten_pol, ++ MapPolType, ++ G3SkyMapWeights, ++ G3SkyMap, ++ reproj_map, ++ G3SkyMapMask, ++) + import numpy as np + + __all__ = [ +@@ -57,10 +71,10 @@ + wmap = frame["Wpol"] + qmap = frame.pop("Q") + umap = frame.pop("U") +- maps.remove_weights(tmap, qmap, umap, wmap, zero_nans=zero_nans) ++ remove_weights(tmap, qmap, umap, wmap, zero_nans=zero_nans) + else: + wmap = frame["Wunpol"] +- maps.remove_weights_t(tmap, wmap, zero_nans=zero_nans) ++ remove_weights_t(tmap, wmap, zero_nans=zero_nans) + + frame["T"] = tmap + if "Wpol" in frame: +@@ -89,10 +103,10 @@ + wmap = frame["Wpol"] + qmap = frame.pop("Q") + umap = frame.pop("U") +- maps.apply_weights(tmap, qmap, umap, wmap) ++ apply_weights(tmap, qmap, umap, wmap) + else: + wmap = frame["Wunpol"] +- maps.apply_weights_t(tmap, wmap) ++ apply_weights_t(tmap, wmap) + + frame["T"] = tmap + if "Wpol" in frame: +@@ -103,7 +117,7 @@ + + + @core.indexmod +-def SetPolConv(frame, pol_conv=maps.MapPolConv.IAU): ++def SetPolConv(frame, pol_conv=MapPolConv.IAU): + """ + Set or change the polarization convention of the input polarized map frame. + If switching between IAU and COSMO conventions, flip the sign of the U map +@@ -114,7 +128,7 @@ + # only polarized frames + return frame + +- if pol_conv == maps.MapPolConv.none or pol_conv is None: ++ if pol_conv == MapPolConv.none or pol_conv is None: + raise ValueError("Polarized maps must have pol_conv set to IAU or COSMO") + + tmap = frame.pop("T") +@@ -169,7 +183,7 @@ + if "Q" not in frame or "U" not in frame: + return + +- if any(not isinstance(frame[k], maps.FlatSkyMap) for k in "QU"): ++ if any(not isinstance(frame[k], FlatSkyMap) for k in "QU"): + return + + ValidateMaps(frame, ignore_missing_weights=True) +@@ -177,13 +191,13 @@ + + if "Wpol" in frame: + wmap = frame.pop("Wpol") +- maps.flatten_pol(qmap, umap, wmap, invert=invert) ++ flatten_pol(qmap, umap, wmap, invert=invert) + frame["Wpol"] = wmap + else: +- maps.flatten_pol(qmap, umap, invert=invert) ++ flatten_pol(qmap, umap, invert=invert) + + tmap.flat_pol = not invert +- ++ + frame["T"] = tmap + frame["Q"] = qmap + frame["U"] = umap +@@ -192,7 +206,7 @@ + + + @core.indexmod +-def MakeMapsPolarized(frame, pol_conv=maps.MapPolConv.IAU): ++def MakeMapsPolarized(frame, pol_conv=MapPolConv.IAU): + """ + Converts individual unpolarized maps to polarized versions of the same map, + with the given polarization convention +@@ -208,16 +222,16 @@ + del frame["Wunpol"] + + qmap = frame["T"].clone(False) +- qmap.pol_type = maps.MapPolType.Q ++ qmap.pol_type = MapPolType.Q + qmap.pol_conv = pol_conv + frame["Q"] = qmap + umap = frame["T"].clone(False) +- umap.pol_type = maps.MapPolType.U ++ umap.pol_type = MapPolType.U + umap.pol_conv = pol_conv + frame["U"] = umap + mask = wgt.to_mask().to_map() + +- wgt_out = maps.G3SkyMapWeights() ++ wgt_out = G3SkyMapWeights() + wgt_out.TT = wgt + wgt_out.TQ = wgt.clone(False) + wgt_out.TU = wgt.clone(False) +@@ -226,7 +240,7 @@ + wgt_out.UU = mask.clone(True) + + for k in wgt_out.keys(): +- wgt_out[k].pol_type = getattr(maps.MapPolType, k) ++ wgt_out[k].pol_type = getattr(MapPolType, k) + + frame["Wpol"] = wgt_out + +@@ -242,12 +256,12 @@ + return + + tmap = frame.pop("T") +- tmap.pol_conv = maps.MapPolConv.none ++ tmap.pol_conv = MapPolConv.none + frame["T"] = tmap + + wgt = frame.pop("Wpol").TT +- wgt.pol_conv = maps.MapPolConv.none +- wgt_out = maps.G3SkyMapWeights() ++ wgt.pol_conv = MapPolConv.none ++ wgt_out = G3SkyMapWeights() + wgt_out.TT = wgt + frame["Wunpol"] = wgt_out + +@@ -298,19 +312,19 @@ + ) + + if k in ["Wpol", "Wunpol"]: +- if frame[k].TT.pol_type == maps.MapPolType.TT: ++ if frame[k].TT.pol_type == MapPolType.TT: + continue + # set weights polarization properties + w = frame.pop(k) + for wk in w.keys(): +- w[wk].pol_type = getattr(maps.MapPolType, wk) ++ w[wk].pol_type = getattr(MapPolType, wk) + if k == "Wpol": + w[wk].pol_conv = frame["U"].pol_conv + frame[k] = w + + if k in "TQU": + if k == "U": +- if isinstance(frame[k], maps.FlatSkyMap) and ( ++ if isinstance(frame[k], FlatSkyMap) and ( + frame[k].flat_pol != frame["Q"].flat_pol + ): + core.log_fatal( +@@ -357,7 +371,7 @@ + "Map frame %s: %s and U maps have different pol_conv" % (map_id, k), + unit="ValidateMaps", + ) +- if isinstance(frame[k].QQ, maps.FlatSkyMap): ++ if isinstance(frame[k].QQ, FlatSkyMap): + if frame[k].flat_pol != frame["Q"].flat_pol: + core.log_fatal( + "Map frame %s: %s and U maps have different flat_pol" % (map_id, k), +@@ -447,17 +461,17 @@ + polarized = map_stub.polarized + + T = map_stub.clone(False) +- T.pol_type = maps.MapPolType.T ++ T.pol_type = MapPolType.T + self.map_frame["T"] = T + if polarized: + Q = map_stub.clone(False) +- Q.pol_type = maps.MapPolType.Q ++ Q.pol_type = MapPolType.Q + self.map_frame["Q"] = Q + U = map_stub.clone(False) +- U.pol_type = maps.MapPolType.U ++ U.pol_type = MapPolType.U + self.map_frame["U"] = U + if weighted: +- W = maps.G3SkyMapWeights(map_stub) ++ W = G3SkyMapWeights(map_stub) + self.map_frame["Wpol" if polarized else "Wunpol"] = W + + def __call__(self, frame): +@@ -492,12 +506,12 @@ + + if isinstance(maps_in, list): + for m in maps_in: +- if isinstance(m, maps.G3SkyMap): ++ if isinstance(m, G3SkyMap): + k = str(m.pol_type) + if k not in "TQU": + raise ValueError("Input map has invalid pol_type %s" % k) + self.map_frame[k] = m +- elif isinstance(m, maps.G3SkyMapWeights): ++ elif isinstance(m, G3SkyMapWeights): + self.map_frame["Wpol" if m.polarized else "Wunpol"] = m + else: + raise TypeError("maps_in must be G3SkyMap or G3SkyMapWeights") +@@ -923,14 +937,14 @@ + + if key in "TQUH": + mnew = self.stub.clone(False) +- maps.reproj_map( ++ reproj_map( + m, mnew, rebin=self.rebin, interp=self.interp, mask=self.mask + ) + + elif key in ["Wpol", "Wunpol"]: +- mnew = maps.G3SkyMapWeights(self.stub) ++ mnew = G3SkyMapWeights(self.stub) + for wkey in mnew.keys(): +- maps.reproj_map( ++ reproj_map( + m[wkey], + mnew[wkey], + rebin=self.rebin, +@@ -957,10 +971,10 @@ + if mask is None: + return + if self._mask is None and self.partial: +- if isinstance(mask, maps.G3SkyMapMask): ++ if isinstance(mask, G3SkyMapMask): + self._mask = mask +- elif isinstance(mask, maps.G3SkyMap): +- self._mask = maps.G3SkyMapMask( ++ elif isinstance(mask, G3SkyMap): ++ self._mask = G3SkyMapMask( + mask, use_data=True, zero_nans=True, zero_infs=True + ) + elif isinstance(mask, np.ndarray): +@@ -978,7 +992,7 @@ + ) + mask_copy[bad] = 0 + tmp[:] = mask_copy +- self._mask = maps.G3SkyMapMask(tmp, use_data=True) ++ self._mask = G3SkyMapMask(tmp, use_data=True) + else: + raise TypeError("Mask must be a G3SkyMapMask, G3SkyMap, or numpy array") + +diff -urN spt3g_software_orig/maps/python/skymapaddons.py spt3g_software/maps/python/skymapaddons.py +--- spt3g_software_orig/maps/python/skymapaddons.py 2025-01-14 23:48:47.012901456 -0800 ++++ spt3g_software/maps/python/skymapaddons.py 2025-01-14 23:49:10.244796782 -0800 +@@ -1,6 +1,6 @@ + import numpy + import warnings +-from . import G3SkyMapWeights, G3SkyMap, FlatSkyMap, G3SkyMapMask ++from .._libmaps import G3SkyMapWeights, G3SkyMap, FlatSkyMap, G3SkyMapMask + + # This file adds extra functionality to the python interface to G3SkyMap and + # G3SkyMapWeights. This is done in ways that exploit a large fraction of diff --git a/wheels/spt3g_disable_netcdf.patch b/wheels/spt3g_disable_netcdf.patch new file mode 100644 index 00000000..c529f69c --- /dev/null +++ b/wheels/spt3g_disable_netcdf.patch @@ -0,0 +1,15 @@ +diff -urN spt3g_software_orig/dfmux/CMakeLists.txt spt3g_software/dfmux/CMakeLists.txt +--- spt3g_software_orig/dfmux/CMakeLists.txt 2025-01-06 11:29:16.606854658 -0800 ++++ spt3g_software/dfmux/CMakeLists.txt 2025-01-07 10:15:20.506190895 -0800 +@@ -1,5 +1,9 @@ +-find_package(NetCDF MODULE) +-find_package(HDF5) ++if(NOT DISABLE_NETCDF) ++ find_package(NetCDF MODULE) ++endif() ++if(NOT DISABLE_HDF5) ++ find_package(HDF5) ++endif() + if (NetCDF_FOUND AND HDF5_FOUND) + set(DFMUX_LIB_EXTRA_SRC src/NetCDFDump.cxx) + else() diff --git a/wheels/temp_static_libs.patch b/wheels/temp_static_libs.patch new file mode 100644 index 00000000..c653f9d5 --- /dev/null +++ b/wheels/temp_static_libs.patch @@ -0,0 +1,51 @@ +diff -urN spt3g_software_orig/calibration/CMakeLists.txt spt3g_software/calibration/CMakeLists.txt +--- spt3g_software_orig/calibration/CMakeLists.txt 2025-01-14 08:30:26.736781581 -0800 ++++ spt3g_software/calibration/CMakeLists.txt 2025-01-14 09:42:28.714000945 -0800 +@@ -1,4 +1,4 @@ +-add_spt3g_library(calibration SHARED ++add_spt3g_library(calibration STATIC + src/BoloProperties.cxx src/PointingProperties.cxx + ) + target_link_libraries(calibration PUBLIC core) +diff -urN spt3g_software_orig/core/CMakeLists.txt spt3g_software/core/CMakeLists.txt +--- spt3g_software_orig/core/CMakeLists.txt 2025-01-14 08:30:26.738781571 -0800 ++++ spt3g_software/core/CMakeLists.txt 2025-01-14 09:42:37.038959548 -0800 +@@ -5,7 +5,7 @@ + set(CORE_EXTRA_SRCS "") + endif(APPLE) + +-add_spt3g_library(core SHARED ++add_spt3g_library(core STATIC + src/G3EventBuilder.cxx src/G3Frame.cxx src/G3TimeStamp.cxx + src/G3Pipeline.cxx src/G3Writer.cxx src/G3Reader.cxx + src/G3InfiniteSource.cxx src/G3Logging.cxx src/G3PrintfLogger.cxx +diff -urN spt3g_software_orig/dfmux/CMakeLists.txt spt3g_software/dfmux/CMakeLists.txt +--- spt3g_software_orig/dfmux/CMakeLists.txt 2025-01-14 08:30:26.829781120 -0800 ++++ spt3g_software/dfmux/CMakeLists.txt 2025-01-14 09:42:48.909900519 -0800 +@@ -10,7 +10,7 @@ + set(DFMUX_LIB_EXTRA_SRC "") + endif() + +-add_spt3g_library(dfmux SHARED ++add_spt3g_library(dfmux STATIC + src/DfMuxBuilder.cxx src/DfMuxCollector.cxx src/DfMuxSample.cxx + src/LegacyDfMuxCollector.cxx src/HardwareMap.cxx src/DfMuxCollator.cxx + src/Housekeeping.cxx +diff -urN spt3g_software_orig/gcp/CMakeLists.txt spt3g_software/gcp/CMakeLists.txt +--- spt3g_software_orig/gcp/CMakeLists.txt 2025-01-14 08:30:26.756781482 -0800 ++++ spt3g_software/gcp/CMakeLists.txt 2025-01-14 09:43:01.127839764 -0800 +@@ -1,4 +1,4 @@ +-add_spt3g_library(gcp SHARED ++add_spt3g_library(gcp STATIC + src/ARCFileReader.cxx + src/ACUStatus.cxx src/TrackerStatus.cxx src/TrackerPointing.cxx + src/GCPMuxDataDecoder.cxx src/GCPLogger.cxx +diff -urN spt3g_software_orig/maps/CMakeLists.txt spt3g_software/maps/CMakeLists.txt +--- spt3g_software_orig/maps/CMakeLists.txt 2025-01-14 08:30:26.758781472 -0800 ++++ spt3g_software/maps/CMakeLists.txt 2025-01-14 09:43:16.164764990 -0800 +@@ -1,4 +1,4 @@ +-add_spt3g_library(maps SHARED ++add_spt3g_library(maps STATIC + src/chealpix.c + src/G3SkyMap.cxx + src/G3SkyMapMask.cxx diff --git a/wheels/test_local_cibuildwheel.sh b/wheels/test_local_cibuildwheel.sh index 28932f89..2b879535 100755 --- a/wheels/test_local_cibuildwheel.sh +++ b/wheels/test_local_cibuildwheel.sh @@ -5,10 +5,10 @@ export CIBW_DEBUG_KEEP_CONTAINER=TRUE -export CIBW_BUILD="cp311-manylinux_x86_64" +export CIBW_BUILD="cp312-manylinux_x86_64" export CIBW_MANYLINUX_X86_64_IMAGE="manylinux2014" export CIBW_BUILD_VERBOSITY=3 -export CIBW_ENVIRONMENT_LINUX="CC=gcc CXX=g++ CFLAGS='-O3 -fPIC' CXXFLAGS='-O3 -fPIC -std=c++14'" +export CIBW_ENVIRONMENT_LINUX="CC=gcc CXX=g++ CFLAGS='-O3 -fPIC' CXXFLAGS='-O3 -fPIC -std=c++17'" export CIBW_BEFORE_BUILD_LINUX="./wheels/install_deps_linux.sh" export CIBW_REPAIR_WHEEL_COMMAND_LINUX="./wheels/repair_wheel_linux.sh {dest_dir} {wheel}" export CIBW_BEFORE_TEST="export OMP_NUM_THREADS=2" diff --git a/wheels/test_local_macos.sh b/wheels/test_local_macos.sh index 88b1f226..d00a8b8c 100755 --- a/wheels/test_local_macos.sh +++ b/wheels/test_local_macos.sh @@ -41,17 +41,17 @@ echo "Using homebrew installation in ${brew_root}" # Export compiler information use_gcc=yes -export CC=gcc-12 -export CXX=g++-12 -export FC=gfortran-12 +export CC=gcc-14 +export CXX=g++-14 +export FC=gfortran-14 # export CC=clang # export CXX=clang++ # export FC= export CFLAGS="-O3 -fPIC" export FCFLAGS="-O3 -fPIC" # Use the second when building with clang -CXXFLAGS="-O3 -fPIC -std=c++11" -#CXXFLAGS="-O3 -fPIC -std=c++11 -stdlib=libc++" +CXXFLAGS="-O3 -fPIC -std=c++17" +#CXXFLAGS="-O3 -fPIC -std=c++17 -stdlib=libc++" # Install most dependencies with homebrew, including python-3.9 eval ${brew_com} install flac @@ -103,39 +103,6 @@ export LD_LIBRARY_PATH="${brew_root}/lib" export DYLD_LIBRARY_PATH="${brew_root}/lib" export CPATH="${brew_root}/include" -# Install the qpoint package -have_qpoint=$(python -c " -try: - import qpoint - print('yes') -except ImportError: - print('no') -") -if [ ${have_qpoint} = "yes" ]; then - echo "qpoint package already installed" -else - qpoint_version=828126de9f195f88bfaf1996527f633382457461 - qpoint_dir="qpoint_temp" - echo "Installing qpoint version ${qpoint_version}" - - echo "Fetching qpoint..." - if [ ! -d ${qpoint_dir} ]; then - git clone https://github.com/arahlin/qpoint.git ${qpoint_dir} - fi - - echo "Building qpoint..." - pushd ${qpoint_dir} \ - && git checkout master \ - && if [ "x$(git branch -l | grep so3g)" != x ]; then \ - git branch -D so3g; fi \ - && git fetch \ - && git checkout -b so3g ${qpoint_version} \ - && python3 setup.py clean \ - && python3 setup.py build \ - && python3 setup.py install \ - && popd > /dev/null -fi - # Tell setup.py to look in the homebrew prefix for libraries when # building spt3g and so3g. export SPT3G_BUILD_CMAKE_INCLUDE_PATH="${brew_root}/include"