diff --git a/.github/ISSUE_TEMPLATE/bug-report.yml b/.github/ISSUE_TEMPLATE/bug-report.yml index fb6378764..bbdecca59 100644 --- a/.github/ISSUE_TEMPLATE/bug-report.yml +++ b/.github/ISSUE_TEMPLATE/bug-report.yml @@ -12,6 +12,14 @@ body: description: What is the problem? A clear and concise description of the bug. validations: required: true + - type: checkboxes + id: regression + attributes: + label: Regression Issue + description: What is a regression? If it worked in a previous version but doesn't in the latest version, it's considered a regression. In this case, please provide specific version number in the report. + options: + - label: Select this option if this issue appears to be a regression. + required: false - type: textarea id: expected attributes: diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 69216b9e9..ea94c5bfd 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -7,16 +7,14 @@ on: - 'docs' env: - BUILDER_VERSION: v0.9.59 + BUILDER_VERSION: v0.9.67 BUILDER_SOURCE: releases BUILDER_HOST: https://d19elf31gohf1l.cloudfront.net PACKAGE_NAME: aws-crt-python LINUX_BASE_IMAGE: ubuntu-18-x64 RUN: ${{ github.run_id }}-${{ github.run_number }} - AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }} - AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }} - AWS_DEFAULT_REGION: ${{ secrets.AWS_DEFAULT_REGION }} - AWS_REGION: us-east-1 + CRT_CI_ROLE: ${{ secrets.CRT_CI_ROLE_ARN }} + AWS_DEFAULT_REGION: us-east-1 jobs: manylinux1: @@ -28,14 +26,20 @@ jobs: - x64 - x86 python: - - cp37-cp37m - cp38-cp38 - cp39-cp39 + permissions: + id-token: write # This is required for requesting the JWT steps: - - name: Build ${{ env.PACKAGE_NAME }} - run: | - aws s3 cp s3://aws-crt-test-stuff/ci/${{ env.BUILDER_VERSION }}/linux-container-ci.sh ./linux-container-ci.sh && chmod a+x ./linux-container-ci.sh - ./linux-container-ci.sh ${{ env.BUILDER_VERSION }} aws-crt-manylinux1-${{ matrix.image }} build -p ${{ env.PACKAGE_NAME }} --python /opt/python/${{ matrix.python }}/bin/python + - name: configure AWS credentials (containers) + uses: aws-actions/configure-aws-credentials@v4 + with: + role-to-assume: ${{ env.CRT_CI_ROLE }} + aws-region: ${{ env.AWS_DEFAULT_REGION }} + - name: Build ${{ env.PACKAGE_NAME }} + run: | + aws s3 cp s3://aws-crt-test-stuff/ci/${{ env.BUILDER_VERSION }}/linux-container-ci.sh ./linux-container-ci.sh && chmod a+x ./linux-container-ci.sh + ./linux-container-ci.sh ${{ env.BUILDER_VERSION }} aws-crt-manylinux1-${{ matrix.image }} build -p ${{ env.PACKAGE_NAME }} --python /opt/python/${{ matrix.python }}/bin/python manylinux2014: runs-on: ubuntu-20.04 # latest @@ -47,21 +51,28 @@ jobs: - x86 - aarch64 python: - - cp37-cp37m - cp38-cp38 - cp39-cp39 - cp310-cp310 - cp311-cp311 - cp312-cp312 + - cp313-cp313 + permissions: + id-token: write # This is required for requesting the JWT steps: - # Only aarch64 needs this, but it doesn't hurt anything - - name: Install qemu/docker - run: docker run --rm --privileged multiarch/qemu-user-static --reset -p yes + - name: configure AWS credentials (containers) + uses: aws-actions/configure-aws-credentials@v4 + with: + role-to-assume: ${{ env.CRT_CI_ROLE }} + aws-region: ${{ env.AWS_DEFAULT_REGION }} + # Only aarch64 needs this, but it doesn't hurt anything + - name: Install qemu/docker + run: docker run --rm --privileged multiarch/qemu-user-static --reset -p yes - - name: Build ${{ env.PACKAGE_NAME }} - run: | - aws s3 cp s3://aws-crt-test-stuff/ci/${{ env.BUILDER_VERSION }}/linux-container-ci.sh ./linux-container-ci.sh && chmod a+x ./linux-container-ci.sh - ./linux-container-ci.sh ${{ env.BUILDER_VERSION }} aws-crt-manylinux2014-${{ matrix.image }} build -p ${{ env.PACKAGE_NAME }} --python /opt/python/${{ matrix.python }}/bin/python + - name: Build ${{ env.PACKAGE_NAME }} + run: | + aws s3 cp s3://aws-crt-test-stuff/ci/${{ env.BUILDER_VERSION }}/linux-container-ci.sh ./linux-container-ci.sh && chmod a+x ./linux-container-ci.sh + ./linux-container-ci.sh ${{ env.BUILDER_VERSION }} aws-crt-manylinux2014-${{ matrix.image }} build -p ${{ env.PACKAGE_NAME }} --python /opt/python/${{ matrix.python }}/bin/python musllinux-1-1: runs-on: ubuntu-22.04 # latest @@ -72,21 +83,29 @@ jobs: - x64 - aarch64 python: - - cp37-cp37m - cp38-cp38 - cp39-cp39 - cp310-cp310 - cp311-cp311 - cp312-cp312 + - cp313-cp313 + permissions: + id-token: write # This is required for requesting the JWT steps: - # Only aarch64 needs this, but it doesn't hurt anything - - name: Install qemu/docker - run: docker run --rm --privileged multiarch/qemu-user-static --reset -p yes + - name: configure AWS credentials (containers) + uses: aws-actions/configure-aws-credentials@v4 + with: + role-to-assume: ${{ env.CRT_CI_ROLE }} + aws-region: ${{ env.AWS_DEFAULT_REGION }} - - name: Build ${{ env.PACKAGE_NAME }} - run: | - aws s3 cp s3://aws-crt-test-stuff/ci/${{ env.BUILDER_VERSION }}/linux-container-ci.sh ./linux-container-ci.sh && chmod a+x ./linux-container-ci.sh - ./linux-container-ci.sh ${{ env.BUILDER_VERSION }} aws-crt-musllinux-1-1-${{ matrix.image }} build -p ${{ env.PACKAGE_NAME }} --python /opt/python/${{ matrix.python }}/bin/python + # Only aarch64 needs this, but it doesn't hurt anything + - name: Install qemu/docker + run: docker run --rm --privileged multiarch/qemu-user-static --reset -p yes + + - name: Build ${{ env.PACKAGE_NAME }} + run: | + aws s3 cp s3://aws-crt-test-stuff/ci/${{ env.BUILDER_VERSION }}/linux-container-ci.sh ./linux-container-ci.sh && chmod a+x ./linux-container-ci.sh + ./linux-container-ci.sh ${{ env.BUILDER_VERSION }} aws-crt-musllinux-1-1-${{ matrix.image }} build -p ${{ env.PACKAGE_NAME }} --python /opt/python/${{ matrix.python }}/bin/python raspberry: runs-on: ubuntu-20.04 # latest @@ -95,16 +114,23 @@ jobs: matrix: image: - raspbian-bullseye + permissions: + id-token: write # This is required for requesting the JWT steps: - # set arm arch - - name: Install qemu/docker - run: docker run --rm --privileged multiarch/qemu-user-static --reset -p yes + - name: configure AWS credentials (containers) + uses: aws-actions/configure-aws-credentials@v4 + with: + role-to-assume: ${{ env.CRT_CI_ROLE }} + aws-region: ${{ env.AWS_DEFAULT_REGION }} - - name: Build ${{ env.PACKAGE_NAME }} - run: | - aws s3 cp s3://aws-crt-test-stuff/ci/${{ env.BUILDER_VERSION }}/linux-container-ci.sh ./linux-container-ci.sh && chmod a+x ./linux-container-ci.sh - ./linux-container-ci.sh ${{ env.BUILDER_VERSION }} aws-crt-${{ matrix.image }} build -p ${{ env.PACKAGE_NAME }} + # set arm arch + - name: Install qemu/docker + run: docker run --rm --privileged multiarch/qemu-user-static --reset -p yes + - name: Build ${{ env.PACKAGE_NAME }} + run: | + aws s3 cp s3://aws-crt-test-stuff/ci/${{ env.BUILDER_VERSION }}/linux-container-ci.sh ./linux-container-ci.sh && chmod a+x ./linux-container-ci.sh + ./linux-container-ci.sh ${{ env.BUILDER_VERSION }} aws-crt-${{ matrix.image }} build -p ${{ env.PACKAGE_NAME }} linux-compat: runs-on: ubuntu-22.04 # latest @@ -115,13 +141,18 @@ jobs: - fedora-34-x64 - opensuse-leap - rhel8-x64 + permissions: + id-token: write # This is required for requesting the JWT steps: - # We can't use the `uses: docker://image` version yet, GitHub lacks authentication for actions -> packages - - name: Build ${{ env.PACKAGE_NAME }} - run: | - aws s3 cp s3://aws-crt-test-stuff/ci/${{ env.BUILDER_VERSION }}/linux-container-ci.sh ./linux-container-ci.sh && chmod a+x ./linux-container-ci.sh - ./linux-container-ci.sh ${{ env.BUILDER_VERSION }} aws-crt-${{ matrix.image }} build -p ${{ env.PACKAGE_NAME }} - + - name: configure AWS credentials (containers) + uses: aws-actions/configure-aws-credentials@v4 + with: + role-to-assume: ${{ env.CRT_CI_ROLE }} + aws-region: ${{ env.AWS_DEFAULT_REGION }} + - name: Build ${{ env.PACKAGE_NAME }} + run: | + aws s3 cp s3://aws-crt-test-stuff/ci/${{ env.BUILDER_VERSION }}/linux-container-ci.sh ./linux-container-ci.sh && chmod a+x ./linux-container-ci.sh + ./linux-container-ci.sh ${{ env.BUILDER_VERSION }} aws-crt-${{ matrix.image }} build -p ${{ env.PACKAGE_NAME }} linux-compiler-compat: runs-on: ubuntu-22.04 # latest @@ -138,8 +169,17 @@ jobs: - gcc-6 - gcc-7 - gcc-8 + permissions: + id-token: write # This is required for requesting the JWT steps: - # We can't use the `uses: docker://image` version yet, GitHub lacks authentication for actions -> packages + - name: configure AWS credentials (containers) + uses: aws-actions/configure-aws-credentials@v4 + with: + role-to-assume: ${{ env.CRT_CI_ROLE }} + aws-region: ${{ env.AWS_DEFAULT_REGION }} + - name: Caller Identity + run: | + aws sts get-caller-identity - name: Build ${{ env.PACKAGE_NAME }} run: | aws s3 cp s3://aws-crt-test-stuff/ci/${{ env.BUILDER_VERSION }}/linux-container-ci.sh ./linux-container-ci.sh && chmod a+x ./linux-container-ci.sh @@ -147,7 +187,14 @@ jobs: use-system-libcrypto: runs-on: ubuntu-20.04 # latest + permissions: + id-token: write # This is required for requesting the JWT steps: + - name: configure AWS credentials (containers) + uses: aws-actions/configure-aws-credentials@v4 + with: + role-to-assume: ${{ env.CRT_CI_ROLE }} + aws-region: ${{ env.AWS_DEFAULT_REGION }} - name: Build ${{ env.PACKAGE_NAME }} env: AWS_CRT_BUILD_USE_SYSTEM_LIBCRYPTO: '1' @@ -173,21 +220,51 @@ jobs: strategy: matrix: arch: [x86, x64] + permissions: + id-token: write # This is required for requesting the JWT steps: - - name: Build ${{ env.PACKAGE_NAME }} + consumers - run: | - python -c "from urllib.request import urlretrieve; urlretrieve('${{ env.BUILDER_HOST }}/${{ env.BUILDER_SOURCE }}/${{ env.BUILDER_VERSION }}/builder.pyz?run=${{ env.RUN }}', 'builder.pyz')" - python builder.pyz build -p ${{ env.PACKAGE_NAME }} --python "C:\\hostedtoolcache\\windows\\Python\\3.7.9\\${{ matrix.arch }}\\python.exe" + - name: configure AWS credentials (containers) + uses: aws-actions/configure-aws-credentials@v4 + with: + role-to-assume: ${{ env.CRT_CI_ROLE }} + aws-region: ${{ env.AWS_DEFAULT_REGION }} + - name: Build ${{ env.PACKAGE_NAME }} + consumers + run: | + python -c "from urllib.request import urlretrieve; urlretrieve('${{ env.BUILDER_HOST }}/${{ env.BUILDER_SOURCE }}/${{ env.BUILDER_VERSION }}/builder.pyz?run=${{ env.RUN }}', 'builder.pyz')" + python builder.pyz build -p ${{ env.PACKAGE_NAME }} --python "C:\\hostedtoolcache\\windows\\Python\\3.8.10\\${{ matrix.arch }}\\python.exe" - osx: - runs-on: macos-13 # latest + macos: + runs-on: macos-14 # latest + permissions: + id-token: write # This is required for requesting the JWT steps: - - name: Build ${{ env.PACKAGE_NAME }} + consumers - run: | - python3 -c "from urllib.request import urlretrieve; urlretrieve('${{ env.BUILDER_HOST }}/${{ env.BUILDER_SOURCE }}/${{ env.BUILDER_VERSION }}/builder.pyz?run=${{ env.RUN }}', 'builder')" - chmod a+x builder - ./builder build -p ${{ env.PACKAGE_NAME }} + - name: configure AWS credentials (containers) + uses: aws-actions/configure-aws-credentials@v4 + with: + role-to-assume: ${{ env.CRT_CI_ROLE }} + aws-region: ${{ env.AWS_DEFAULT_REGION }} + - name: Build ${{ env.PACKAGE_NAME }} + consumers + run: | + python3 -c "from urllib.request import urlretrieve; urlretrieve('${{ env.BUILDER_HOST }}/${{ env.BUILDER_SOURCE }}/${{ env.BUILDER_VERSION }}/builder.pyz?run=${{ env.RUN }}', 'builder')" + chmod a+x builder + ./builder build -p ${{ env.PACKAGE_NAME }} + + macos-x64: + runs-on: macos-14-large # latest + permissions: + id-token: write # This is required for requesting the JWT + steps: + - name: configure AWS credentials (containers) + uses: aws-actions/configure-aws-credentials@v4 + with: + role-to-assume: ${{ env.CRT_CI_ROLE }} + aws-region: ${{ env.AWS_DEFAULT_REGION }} + - name: Build ${{ env.PACKAGE_NAME }} + consumers + run: | + python3 -c "from urllib.request import urlretrieve; urlretrieve('${{ env.BUILDER_HOST }}/${{ env.BUILDER_SOURCE }}/${{ env.BUILDER_VERSION }}/builder.pyz?run=${{ env.RUN }}', 'builder')" + chmod a+x builder + ./builder build -p ${{ env.PACKAGE_NAME }} openbsd: @@ -197,9 +274,16 @@ jobs: matrix: # OpenBSD only supports the two most recent releases version: ['7.4', '7.5'] + permissions: + id-token: write # This is required for requesting the JWT steps: + - name: configure AWS credentials (containers) + uses: aws-actions/configure-aws-credentials@v4 + with: + role-to-assume: ${{ env.CRT_CI_ROLE }} + aws-region: ${{ env.AWS_DEFAULT_REGION }} # Cannot use builder to checkout as OpenBSD doesn't ship git in the base install - - uses: actions/checkout@v3 + - uses: actions/checkout@v4 with: submodules: true - name: Build ${{ env.PACKAGE_NAME }} + consumers @@ -209,7 +293,7 @@ jobs: version: ${{ matrix.version }} cpu_count: 4 shell: bash - environment_variables: AWS_ACCESS_KEY_ID AWS_SECRET_ACCESS_KEY AWS_DEFAULT_REGION AWS_REGION + environment_variables: AWS_ACCESS_KEY_ID AWS_SECRET_ACCESS_KEY AWS_SESSION_TOKEN AWS_DEFAULT_REGION run: | sudo pkg_add awscli py3-pip py3-urllib3 python3 -c "from urllib.request import urlretrieve; urlretrieve('${{ env.BUILDER_HOST }}/${{ env.BUILDER_SOURCE }}/${{ env.BUILDER_VERSION }}/builder.pyz', 'builder')" @@ -218,8 +302,15 @@ jobs: freebsd: runs-on: ubuntu-22.04 # latest + permissions: + id-token: write # This is required for requesting the JWT steps: - - uses: actions/checkout@v3 + - name: configure AWS credentials (containers) + uses: aws-actions/configure-aws-credentials@v4 + with: + role-to-assume: ${{ env.CRT_CI_ROLE }} + aws-region: ${{ env.AWS_DEFAULT_REGION }} + - uses: actions/checkout@v4 with: submodules: true @@ -230,26 +321,21 @@ jobs: version: '14.0' cpu_count: 4 shell: bash - environment_variables: AWS_ACCESS_KEY_ID AWS_SECRET_ACCESS_KEY AWS_DEFAULT_REGION AWS_REGION + environment_variables: AWS_ACCESS_KEY_ID AWS_SECRET_ACCESS_KEY AWS_SESSION_TOKEN AWS_DEFAULT_REGION run: | - sudo pkg install -y python3 py39-urllib3 py39-pip py39-awscli cmake + sudo pkg install -y python3 devel/py-pip net/py-urllib3 devel/py-awscli cmake python3 -c "from urllib.request import urlretrieve; urlretrieve('${{ env.BUILDER_HOST }}/${{ env.BUILDER_SOURCE }}/${{ env.BUILDER_VERSION }}/builder.pyz', 'builder')" chmod a+x builder ./builder build -p ${{ env.PACKAGE_NAME }} # check that tests requiring custom env-vars or AWS credentials are simply skipped - tests-ok-without-env-vars: + tests-ok-without-creds: runs-on: ubuntu-22.04 # latest steps: - - uses: actions/checkout@v3 + - uses: actions/checkout@v4 with: submodules: true - - name: Run tests without env-vars or AWS creds - env: - # unset env-vars that provide AWS credentials - AWS_ACCESS_KEY_ID: - AWS_SECRET_ACCESS_KEY: - AWS_DEFAULT_REGION: + - name: Run tests run: | python3 -m pip install --upgrade --requirement requirements-dev.txt python3 -m pip install . --verbose @@ -257,8 +343,15 @@ jobs: package-source: runs-on: ubuntu-22.04 # latest + permissions: + id-token: write # This is required for requesting the JWT steps: - - uses: actions/checkout@v3 + - name: configure AWS credentials (containers) + uses: aws-actions/configure-aws-credentials@v4 + with: + role-to-assume: ${{ env.CRT_CI_ROLE }} + aws-region: ${{ env.AWS_DEFAULT_REGION }} + - uses: actions/checkout@v4 with: submodules: true - name: Package source + install @@ -272,7 +365,7 @@ jobs: check-docs: runs-on: ubuntu-22.04 # latest steps: - - uses: actions/checkout@v3 + - uses: actions/checkout@v4 with: submodules: true - name: Check docs @@ -285,7 +378,7 @@ jobs: runs-on: ubuntu-22.04 # latest steps: - name: Checkout Source - uses: actions/checkout@v3 + uses: actions/checkout@v4 with: submodules: true fetch-depth: 0 diff --git a/.github/workflows/docs.yml b/.github/workflows/docs.yml index b32b191fa..3feac4bf8 100644 --- a/.github/workflows/docs.yml +++ b/.github/workflows/docs.yml @@ -14,7 +14,7 @@ jobs: contents: write # allow push steps: - name: Checkout - uses: actions/checkout@v3 + uses: actions/checkout@v4 with: submodules: true diff --git a/.github/workflows/issue-regression-labeler.yml b/.github/workflows/issue-regression-labeler.yml new file mode 100644 index 000000000..bd000719d --- /dev/null +++ b/.github/workflows/issue-regression-labeler.yml @@ -0,0 +1,32 @@ +# Apply potential regression label on issues +name: issue-regression-label +on: + issues: + types: [opened, edited] +jobs: + add-regression-label: + runs-on: ubuntu-latest + permissions: + issues: write + steps: + - name: Fetch template body + id: check_regression + uses: actions/github-script@v7 + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + TEMPLATE_BODY: ${{ github.event.issue.body }} + with: + script: | + const regressionPattern = /\[x\] Select this option if this issue appears to be a regression\./i; + const template = `${process.env.TEMPLATE_BODY}` + const match = regressionPattern.test(template); + core.setOutput('is_regression', match); + - name: Manage regression label + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + run: | + if [ "${{ steps.check_regression.outputs.is_regression }}" == "true" ]; then + gh issue edit ${{ github.event.issue.number }} --add-label "potential-regression" -R ${{ github.repository }} + else + gh issue edit ${{ github.event.issue.number }} --remove-label "potential-regression" -R ${{ github.repository }} + fi diff --git a/.github/workflows/lint.yml b/.github/workflows/lint.yml index 927417f98..7d152222e 100644 --- a/.github/workflows/lint.yml +++ b/.github/workflows/lint.yml @@ -24,7 +24,7 @@ jobs: steps: - name: Checkout Source - uses: actions/checkout@v2 + uses: actions/checkout@v4 - name: Build and Test run: | diff --git a/.gitignore b/.gitignore index d6360bbc0..2b72fd2b6 100644 --- a/.gitignore +++ b/.gitignore @@ -1,5 +1,5 @@ -# Created by https://www.toptal.com/developers/gitignore/api/git,c++,cmake,python,visualstudio,visualstudiocode,macos -# Edit at https://www.toptal.com/developers/gitignore?templates=git,c++,cmake,python,visualstudio,visualstudiocode,macos +# Created by https://www.toptal.com/developers/gitignore/api/c++,git,cmake,macos,python,visualstudio,visualstudiocode,pycharm +# Edit at https://www.toptal.com/developers/gitignore?templates=c++,git,cmake,macos,python,visualstudio,visualstudiocode,pycharm ### C++ ### # Prerequisites @@ -100,6 +100,120 @@ Temporary Items # iCloud generated files *.icloud +### PyCharm ### +# Covers JetBrains IDEs: IntelliJ, RubyMine, PhpStorm, AppCode, PyCharm, CLion, Android Studio, WebStorm and Rider +# Reference: https://intellij-support.jetbrains.com/hc/en-us/articles/206544839 + +# User-specific stuff +.idea/ +.idea/**/workspace.xml +.idea/**/tasks.xml +.idea/**/usage.statistics.xml +.idea/**/dictionaries +.idea/**/shelf + +# AWS User-specific +.idea/**/aws.xml + +# Generated files +.idea/**/contentModel.xml + +# Sensitive or high-churn files +.idea/**/dataSources/ +.idea/**/dataSources.ids +.idea/**/dataSources.local.xml +.idea/**/sqlDataSources.xml +.idea/**/dynamic.xml +.idea/**/uiDesigner.xml +.idea/**/dbnavigator.xml + +# Gradle +.idea/**/gradle.xml +.idea/**/libraries + +# Gradle and Maven with auto-import +# When using Gradle or Maven with auto-import, you should exclude module files, +# since they will be recreated, and may cause churn. Uncomment if using +# auto-import. +# .idea/artifacts +# .idea/compiler.xml +# .idea/jarRepositories.xml +# .idea/modules.xml +# .idea/*.iml +# .idea/modules +# *.iml +# *.ipr + +# CMake +cmake-build-*/ + +# Mongo Explorer plugin +.idea/**/mongoSettings.xml + +# File-based project format +*.iws + +# IntelliJ +out/ + +# mpeltonen/sbt-idea plugin +.idea_modules/ + +# JIRA plugin +atlassian-ide-plugin.xml + +# Cursive Clojure plugin +.idea/replstate.xml + +# SonarLint plugin +.idea/sonarlint/ + +# Crashlytics plugin (for Android Studio and IntelliJ) +com_crashlytics_export_strings.xml +crashlytics.properties +crashlytics-build.properties +fabric.properties + +# Editor-based Rest Client +.idea/httpRequests + +# Android studio 3.1+ serialized cache file +.idea/caches/build_file_checksums.ser + +### PyCharm Patch ### +# Comment Reason: https://github.com/joeblau/gitignore.io/issues/186#issuecomment-215987721 + +# *.iml +# modules.xml +# .idea/misc.xml +# *.ipr + +# Sonarlint plugin +# https://plugins.jetbrains.com/plugin/7973-sonarlint +.idea/**/sonarlint/ + +# SonarQube Plugin +# https://plugins.jetbrains.com/plugin/7238-sonarqube-community-plugin +.idea/**/sonarIssues.xml + +# Markdown Navigator plugin +# https://plugins.jetbrains.com/plugin/7896-markdown-navigator-enhanced +.idea/**/markdown-navigator.xml +.idea/**/markdown-navigator-enh.xml +.idea/**/markdown-navigator/ + +# Cache file creation bug +# See https://youtrack.jetbrains.com/issue/JBR-2257 +.idea/$CACHE_FILE$ + +# CodeStream plugin +# https://plugins.jetbrains.com/plugin/12206-codestream +.idea/codestream.xml + +# Azure Toolkit for IntelliJ plugin +# https://plugins.jetbrains.com/plugin/8053-azure-toolkit-for-intellij +.idea/**/azureSettings.xml + ### Python ### # Byte-compiled / optimized / DLL files __pycache__/ @@ -261,84 +375,6 @@ cython_debug/ # option (not recommended) you can uncomment the following to ignore the entire idea folder. #.idea/ -# Covers JetBrains IDEs: IntelliJ, RubyMine, PhpStorm, AppCode, PyCharm, CLion, Android Studio, WebStorm and Rider -# Reference: https://intellij-support.jetbrains.com/hc/en-us/articles/206544839 - -# User-specific stuff -.idea/**/workspace.xml -.idea/**/tasks.xml -.idea/**/usage.statistics.xml -.idea/**/dictionaries -.idea/**/shelf - -# AWS User-specific -.idea/**/aws.xml - -# Generated files -.idea/**/contentModel.xml - -# Sensitive or high-churn files -.idea/**/dataSources/ -.idea/**/dataSources.ids -.idea/**/dataSources.local.xml -.idea/**/sqlDataSources.xml -.idea/**/dynamic.xml -.idea/**/uiDesigner.xml -.idea/**/dbnavigator.xml - -# Gradle -.idea/**/gradle.xml -.idea/**/libraries - -# Gradle and Maven with auto-import -# When using Gradle or Maven with auto-import, you should exclude module files, -# since they will be recreated, and may cause churn. Uncomment if using -# auto-import. -# .idea/artifacts -# .idea/compiler.xml -# .idea/jarRepositories.xml -# .idea/modules.xml -# .idea/*.iml -# .idea/modules -# *.iml -# *.ipr - -# CMake -cmake-build-*/ - -# Mongo Explorer plugin -.idea/**/mongoSettings.xml - -# File-based project format -*.iws - -# IntelliJ -out/ - -# mpeltonen/sbt-idea plugin -.idea_modules/ - -# JIRA plugin -atlassian-ide-plugin.xml - -# Cursive Clojure plugin -.idea/replstate.xml - -# SonarLint plugin -.idea/sonarlint/ - -# Crashlytics plugin (for Android Studio and IntelliJ) -com_crashlytics_export_strings.xml -crashlytics.properties -crashlytics-build.properties -fabric.properties - -# Editor-based Rest Client -.idea/httpRequests - -# Android studio 3.1+ serialized cache file -.idea/caches/build_file_checksums.ser - ### Python Patch ### # Poetry local configuration file - https://python-poetry.org/docs/configuration/#local-configuration poetry.toml @@ -754,8 +790,7 @@ FodyWeavers.xsd ### VisualStudio Patch ### # Additional files built by Visual Studio -# End of -# https://www.toptal.com/developers/gitignore/api/git,c++,cmake,python,visualstudio,visualstudiocode,macos +# End of https://www.toptal.com/developers/gitignore/api/c++,git,cmake,macos,python,visualstudio,visualstudiocode,pycharm # credentials .key diff --git a/README.md b/README.md index 1d64c50f1..bd2359c7a 100644 --- a/README.md +++ b/README.md @@ -13,7 +13,7 @@ This library is licensed under the Apache 2.0 License. ## Minimum Requirements: -* Python 3.7+ +* Python 3.8+ ## Installation @@ -45,10 +45,10 @@ To simplify installation, aws-crt-python has its own copy of libcrypto. This lets you install a wheel from PyPI without having OpenSSL installed. Unix wheels on PyPI come with libcrypto statically compiled in. Code to build libcrypto comes from [AWS-LC](https://github.com/aws/aws-lc). -AWS-LC's code is included in the PyPI source package, +AWS-LC's code is included in the PyPI source package, and the git repository includes it as a submodule. -If you need aws-crt-python to use the libcrypto included on your system, +If you need aws-crt-python to use the libcrypto included on your system, set environment variable `AWS_CRT_BUILD_USE_SYSTEM_LIBCRYPTO=1` while building from source: ```sh @@ -59,6 +59,21 @@ AWS_CRT_BUILD_USE_SYSTEM_LIBCRYPTO=1 python3 -m pip install --no-binary :all: -- You can ignore all this on Windows and Apple platforms, where aws-crt-python uses the OS's default libraries for TLS and cryptography math. +### AWS_CRT_BUILD_USE_SYSTEM_LIBS ### + +aws-crt-python depends on several C libraries that make up the AWS Common Runtime (libaws-c-common, libaws-c-s3, etc). +By default, these libraries are built along with aws-crt-python and statically compiled in +(their source code is under [crt/](crt/)). + +To skip building these dependencies, because they're already available on your system, +set environment variable `AWS_CRT_BUILD_USE_SYSTEM_LIBS=1` while building from source: + +```sh +AWS_CRT_BUILD_USE_SYSTEM_LIBS=1 python3 -m pip install . +``` + +If these dependencies are available as both static and shared libs, you can force the static ones to be used by setting: `AWS_CRT_BUILD_FORCE_STATIC_LIBS=1` + ## Mac-Only TLS Behavior Please note that on Mac, once a private key is used with a certificate, that certificate-key pair is imported into the Mac Keychain. All subsequent uses of that certificate will use the stored private key and ignore anything passed in programmatically. Beginning in v0.6.2, when a stored private key from the Keychain is used, the following will be logged at the "info" log level: diff --git a/awscrt/auth.py b/awscrt/auth.py index a41648854..9afa78c36 100644 --- a/awscrt/auth.py +++ b/awscrt/auth.py @@ -507,7 +507,7 @@ class AwsSignedBodyHeaderType(IntEnum): """Do not add a header.""" X_AMZ_CONTENT_SHA_256 = 1 - """Add the "x-amz-content-sha-256" header with the canonical request's signed body value""" + """Add the "x-amz-content-sha256" header with the canonical request's signed body value""" class AwsSigningConfig(NativeResource): diff --git a/awscrt/checksums.py b/awscrt/checksums.py index 2ea6a3f64..06a0005b1 100644 --- a/awscrt/checksums.py +++ b/awscrt/checksums.py @@ -21,3 +21,12 @@ def crc32c(input: bytes, previous_crc32c: int = 0) -> int: Returns an unsigned 32-bit integer. """ return _awscrt.checksums_crc32c(input, previous_crc32c) + + +def crc64nvme(input: bytes, previous_crc64nvme: int = 0) -> int: + """ + Perform a CRC64 NVME computation. + If continuing to update a running CRC, pass its value into `previous_crc64nvme`. + Returns an unsigned 64-bit integer. + """ + return _awscrt.checksums_crc64nvme(input, previous_crc64nvme) diff --git a/awscrt/crypto.py b/awscrt/crypto.py index 35b7027fa..dd344d927 100644 --- a/awscrt/crypto.py +++ b/awscrt/crypto.py @@ -91,7 +91,12 @@ class RSASignatureAlgorithm(IntEnum): PKCSv1.5 padding with sha256 hash function """ - PSS_SHA256 = 1 + PKCS1_5_SHA1 = 1 + """ + PKCSv1.5 padding with sha1 hash function + """ + + PSS_SHA256 = 2 """ PSS padding with sha256 hash function """ @@ -118,6 +123,24 @@ def new_public_key_from_pem_data(pem_data: Union[str, bytes, bytearray, memoryvi """ return RSA(binding=_awscrt.rsa_public_key_from_pem_data(pem_data)) + @staticmethod + def new_private_key_from_der_data(der_data: Union[bytes, bytearray, memoryview]) -> 'RSA': + """ + Creates a new instance of private RSA key pair from der data. + Expects key in PKCS1 format. + Raises ValueError if pem does not have private key object. + """ + return RSA(binding=_awscrt.rsa_private_key_from_der_data(der_data)) + + @staticmethod + def new_public_key_from_der_data(der_data: Union[bytes, bytearray, memoryview]) -> 'RSA': + """ + Creates a new instance of public RSA key pair from der data. + Expects key in PKCS1 format. + Raises ValueError if pem does not have public key object. + """ + return RSA(binding=_awscrt.rsa_public_key_from_der_data(der_data)) + def encrypt(self, encryption_algorithm: RSAEncryptionAlgorithm, plaintext: Union[bytes, bytearray, memoryview]) -> bytes: """ diff --git a/awscrt/eventstream/__init__.py b/awscrt/eventstream/__init__.py index 09e24ada4..945027527 100644 --- a/awscrt/eventstream/__init__.py +++ b/awscrt/eventstream/__init__.py @@ -5,9 +5,8 @@ # Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. # SPDX-License-Identifier: Apache-2.0. -from collections.abc import ByteString from enum import IntEnum -from typing import Any +from typing import Any, Union from uuid import UUID __all__ = ['HeaderType', 'Header'] @@ -135,7 +134,7 @@ def from_int64(cls, name: str, value: int) -> 'Header': return cls(name, value, HeaderType.INT64) @classmethod - def from_byte_buf(cls, name: str, value: ByteString) -> 'Header': + def from_byte_buf(cls, name: str, value: Union[bytes, bytearray]) -> 'Header': """Create a Header of type :attr:`~HeaderType.BYTE_BUF` The value must be a bytes-like object""" @@ -246,7 +245,7 @@ def value_as_int64(self) -> int: Raises an exception if type is not :attr:`~HeaderType.INT64`""" return self._value_as(HeaderType.INT64) - def value_as_byte_buf(self) -> ByteString: + def value_as_byte_buf(self) -> Union[bytes, bytearray]: """Return value of bytes Raises an exception if type is not :attr:`~HeaderType.BYTE_BUF`""" diff --git a/awscrt/eventstream/rpc.py b/awscrt/eventstream/rpc.py index b70352e02..ff0096e26 100644 --- a/awscrt/eventstream/rpc.py +++ b/awscrt/eventstream/rpc.py @@ -11,11 +11,11 @@ import awscrt.exceptions from awscrt.eventstream import Header from awscrt.io import ClientBootstrap, SocketOptions, TlsConnectionOptions -from collections.abc import ByteString, Callable +from collections.abc import Callable from concurrent.futures import Future from enum import IntEnum from functools import partial -from typing import Optional, Sequence +from typing import Optional, Sequence, Union __all__ = [ 'MessageType', @@ -381,7 +381,7 @@ def send_protocol_message( self, *, headers: Optional[Sequence[Header]] = None, - payload: Optional[ByteString] = None, + payload: Optional[Union[bytes, bytearray]] = None, message_type: MessageType, flags: Optional[int] = None, on_flush: Callable = None) -> 'concurrent.futures.Future': @@ -483,7 +483,7 @@ def activate( *, operation: str, headers: Sequence[Header] = None, - payload: ByteString = None, + payload: Union[bytes, bytearray] = None, message_type: MessageType, flags: int = None, on_flush: Callable = None): @@ -553,7 +553,7 @@ def send_message( self, *, headers: Sequence[Header] = None, - payload: ByteString = None, + payload: Union[bytes, bytearray] = None, message_type: MessageType, flags: int = None, on_flush: Callable = None) -> 'concurrent.futures.Future': diff --git a/awscrt/s3.py b/awscrt/s3.py index 9a0aab525..03999b640 100644 --- a/awscrt/s3.py +++ b/awscrt/s3.py @@ -10,13 +10,12 @@ from awscrt import NativeResource from awscrt.http import HttpRequest from awscrt.io import ClientBootstrap, TlsConnectionOptions -from awscrt.auth import AwsCredentials, AwsCredentialsProvider, AwsSignatureType, AwsSignedBodyHeaderType, AwsSignedBodyValue, AwsSigningAlgorithm, AwsSigningConfig from awscrt.auth import AwsCredentialsProvider, AwsSignatureType, AwsSignedBodyHeaderType, AwsSignedBodyValue, \ AwsSigningAlgorithm, AwsSigningConfig import awscrt.exceptions import threading from dataclasses import dataclass -from typing import List, Optional, Tuple +from typing import List, Optional, Tuple, Sequence from enum import IntEnum @@ -103,6 +102,9 @@ class S3ChecksumAlgorithm(IntEnum): SHA256 = 4 """SHA-256""" + CRC64NVME = 5 + """CRC64NVME""" + class S3ChecksumLocation(IntEnum): """Where to put the checksum.""" @@ -205,6 +207,15 @@ class S3Client(NativeResource): client can use for buffering data for requests. Default values scale with target throughput and are currently between 2GiB and 8GiB (may change in future) + + network_interface_names: (Optional[Sequence(str)]) + **THIS IS AN EXPERIMENTAL AND UNSTABLE API.** + A sequence of network interface names. The client will distribute the + connections across network interfaces. If any interface name is invalid, goes down, + or has any issues like network access, you will see connection failures. + This option is only supported on Linux, MacOS, and platforms that have either SO_BINDTODEVICE or IP_BOUND_IF. It + is not supported on Windows. `AWS_ERROR_PLATFORM_NOT_SUPPORTED` will be raised on unsupported platforms. On + Linux, SO_BINDTODEVICE is used and requires kernel version >= 5.7 or root privileges. """ __slots__ = ('shutdown_event', '_region') @@ -222,7 +233,8 @@ def __init__( multipart_upload_threshold=None, throughput_target_gbps=None, enable_s3express=False, - memory_limit=None): + memory_limit=None, + network_interface_names: Optional[Sequence[str]] = None): assert isinstance(bootstrap, ClientBootstrap) or bootstrap is None assert isinstance(region, str) assert isinstance(signing_config, AwsSigningConfig) or signing_config is None @@ -235,6 +247,7 @@ def __init__( throughput_target_gbps, float) or throughput_target_gbps is None assert isinstance(enable_s3express, bool) or enable_s3express is None + assert isinstance(network_interface_names, Sequence) or network_interface_names is None if credential_provider and signing_config: raise ValueError("'credential_provider' has been deprecated in favor of 'signing_config'. " @@ -270,6 +283,10 @@ def on_shutdown(): throughput_target_gbps = 0 if memory_limit is None: memory_limit = 0 + if network_interface_names is not None: + # ensure this is a list, so it's simpler to process in C + if not isinstance(network_interface_names, list): + network_interface_names = list(network_interface_names) self._binding = _awscrt.s3_client_new( bootstrap, @@ -284,6 +301,7 @@ def on_shutdown(): throughput_target_gbps, enable_s3express, memory_limit, + network_interface_names, s3_client_core) def make_request( @@ -314,10 +332,25 @@ def make_request( request (HttpRequest): The overall outgoing API request for S3 operation. If the request body is a file, set send_filepath for better performance. - operation_name(Optional[str]): Optional S3 operation name (e.g. "CreateBucket"). - This will only be used when `type` is :attr:`~S3RequestType.DEFAULT`; - it is automatically populated for other types. + operation_name(Optional[str]): S3 operation name (e.g. "CreateBucket"). + This MUST be set when `type` is :attr:`~S3RequestType.DEFAULT`. + It is ignored for other types, since the operation is implicitly known. + See `S3 API documentation + `_ + for the canonical list of names. + This name is used to fill out details in metrics and error reports. + It also drives some operation-specific behavior. + If you pass the wrong name, you risk getting the wrong behavior. + + For example, every operation except "GetObject" has its response checked + for error, even if the HTTP status-code was 200 OK + (see `knowledge center `_). + If you used the :attr:`~S3RequestType.DEFAULT` type to do + `GetObject `_, + but mis-named it "Download", and the object looked like XML with an error code, + then the request would fail. You risk logging the full response body, + and leaking sensitive data. recv_filepath (Optional[str]): Optional file path. If set, the response body is written directly to a file and the @@ -499,6 +532,9 @@ def __init__( assert isinstance(part_size, int) or part_size is None assert isinstance(multipart_upload_threshold, int) or multipart_upload_threshold is None + if type == S3RequestType.DEFAULT and not operation_name: + raise ValueError("'operation_name' must be set when using S3RequestType.DEFAULT") + super().__init__() self._finished_future = Future() @@ -567,12 +603,11 @@ class S3ResponseError(awscrt.exceptions.AwsCrtError): headers (list[tuple[str, str]]): Headers from HTTP response. body (Optional[bytes]): Body of HTTP response (if any). This is usually XML. It may be None in the case of a HEAD response. - operation_name (Optional[str]): Name of the S3 operation that failed (if known). + operation_name: Name of the S3 operation that failed. For example, if a :attr:`~S3RequestType.PUT_OBJECT` fails this could be "PutObject", "CreateMultipartUpload", "UploadPart", "CompleteMultipartUpload", or others. For :attr:`~S3RequestType.DEFAULT`, this is the `operation_name` passed to :meth:`S3Client.make_request()`. - If the S3 operation name is unknown, this will be None. code (int): CRT error code. name (str): CRT error name. message (str): CRT error message. diff --git a/codebuild/cd/manylinux-x64-build.yml b/codebuild/cd/manylinux-x64-build.yml index eefd0a9b1..a3dd03af3 100644 --- a/codebuild/cd/manylinux-x64-build.yml +++ b/codebuild/cd/manylinux-x64-build.yml @@ -8,12 +8,10 @@ phases: commands: - export CC=gcc - cd aws-crt-python - - /opt/python/cp37-cp37m/bin/python ./continuous-delivery/update-version.py + - /opt/python/cp38-cp38/bin/python ./continuous-delivery/update-version.py build: commands: - echo Build started on `date` - - /opt/python/cp37-cp37m/bin/python setup.py sdist bdist_wheel - - auditwheel repair --plat manylinux1_x86_64 dist/awscrt-*cp37-cp37m-linux_x86_64.whl - /opt/python/cp38-cp38/bin/python setup.py sdist bdist_wheel - auditwheel repair --plat manylinux1_x86_64 dist/awscrt-*cp38-cp38-linux_x86_64.whl - /opt/python/cp39-cp39/bin/python setup.py sdist bdist_wheel diff --git a/codebuild/cd/manylinux-x86-build.yml b/codebuild/cd/manylinux-x86-build.yml index c255c8a24..4046bce48 100644 --- a/codebuild/cd/manylinux-x86-build.yml +++ b/codebuild/cd/manylinux-x86-build.yml @@ -8,12 +8,10 @@ phases: commands: - export CC=gcc - cd aws-crt-python - - /opt/python/cp37-cp37m/bin/python ./continuous-delivery/update-version.py + - /opt/python/cp38-cp38/bin/python ./continuous-delivery/update-version.py build: commands: - echo Build started on `date` - - /opt/python/cp37-cp37m/bin/python setup.py sdist bdist_wheel - - auditwheel repair --plat manylinux1_i686 dist/awscrt-*cp37-cp37m-linux_i686.whl - /opt/python/cp38-cp38/bin/python setup.py sdist bdist_wheel - auditwheel repair --plat manylinux1_i686 dist/awscrt-*cp38-cp38-linux_i686.whl - /opt/python/cp39-cp39/bin/python setup.py sdist bdist_wheel diff --git a/continuous-delivery/build-wheels-manylinux2014-aarch64.sh b/continuous-delivery/build-wheels-manylinux2014-aarch64.sh index a140b7d67..273de1cd7 100755 --- a/continuous-delivery/build-wheels-manylinux2014-aarch64.sh +++ b/continuous-delivery/build-wheels-manylinux2014-aarch64.sh @@ -4,9 +4,6 @@ set -ex /opt/python/cp39-cp39/bin/python ./continuous-delivery/update-version.py -/opt/python/cp37-cp37m/bin/python setup.py sdist bdist_wheel -auditwheel repair --plat manylinux2014_aarch64 dist/awscrt-*cp37*.whl - /opt/python/cp38-cp38/bin/python setup.py sdist bdist_wheel auditwheel repair --plat manylinux2014_aarch64 dist/awscrt-*cp38*.whl @@ -19,9 +16,17 @@ auditwheel repair --plat manylinux2014_aarch64 dist/awscrt-*cp310*.whl /opt/python/cp311-cp311/bin/python setup.py sdist bdist_wheel auditwheel repair --plat manylinux2014_aarch64 dist/awscrt-*cp311*.whl -# Don't need to build wheels for Python 3.12 and later. +# Don't need to build wheels for Python 3.12. # The 3.11 wheel uses the stable ABI, so it works with newer versions too. +# We are using the Python 3.13 stable ABI from Python 3.13 onwards because of deprecated functions. +# Manylinux images don't contain setuptools from Python 3.13, so we need to install it. +# Install in a custom location due to access issues. +/opt/python/cp313-cp313/bin/python -m pip install --target ./local -r requirements-dev.txt +export PYTHONPATH=./local:$PYTHONPATH +/opt/python/cp313-cp313/bin/python setup.py sdist bdist_wheel +auditwheel repair --plat manylinux2014_aarch64 dist/awscrt-*cp313*.whl + rm dist/*.whl cp -rv wheelhouse/* dist/ diff --git a/continuous-delivery/build-wheels-manylinux2014-x86_64.sh b/continuous-delivery/build-wheels-manylinux2014-x86_64.sh index 41ce9fdef..cf8d31d61 100755 --- a/continuous-delivery/build-wheels-manylinux2014-x86_64.sh +++ b/continuous-delivery/build-wheels-manylinux2014-x86_64.sh @@ -4,9 +4,6 @@ set -ex /opt/python/cp39-cp39/bin/python ./continuous-delivery/update-version.py -/opt/python/cp37-cp37m/bin/python setup.py sdist bdist_wheel -auditwheel repair --plat manylinux2014_x86_64 dist/awscrt-*cp37*.whl - /opt/python/cp38-cp38/bin/python setup.py sdist bdist_wheel auditwheel repair --plat manylinux2014_x86_64 dist/awscrt-*cp38*.whl @@ -19,9 +16,17 @@ auditwheel repair --plat manylinux2014_x86_64 dist/awscrt-*cp310*.whl /opt/python/cp311-cp311/bin/python setup.py sdist bdist_wheel auditwheel repair --plat manylinux2014_x86_64 dist/awscrt-*cp311*.whl -# Don't need to build wheels for Python 3.12 and later. +# Don't need to build wheels for Python 3.12. # The 3.11 wheel uses the stable ABI, so it works with newer versions too. +# We are using the Python 3.13 stable ABI from Python 3.13 onwards because of deprecated functions. +# Manylinux images don't contain setuptools from Python 3.13, so we need to install it. +# Install in a custom location due to access issues. +/opt/python/cp313-cp313/bin/python -m pip install --target ./local -r requirements-dev.txt +export PYTHONPATH=./local:$PYTHONPATH +/opt/python/cp313-cp313/bin/python setup.py sdist bdist_wheel +auditwheel repair --plat manylinux2014_x86_64 dist/awscrt-*cp313*.whl + rm dist/*.whl cp -rv wheelhouse/* dist/ diff --git a/continuous-delivery/build-wheels-musllinux-1-1-aarch64.sh b/continuous-delivery/build-wheels-musllinux-1-1-aarch64.sh index 0cbc1a196..d34d54563 100755 --- a/continuous-delivery/build-wheels-musllinux-1-1-aarch64.sh +++ b/continuous-delivery/build-wheels-musllinux-1-1-aarch64.sh @@ -4,9 +4,6 @@ set -ex /opt/python/cp39-cp39/bin/python ./continuous-delivery/update-version.py -/opt/python/cp37-cp37m/bin/python setup.py sdist bdist_wheel -auditwheel repair --plat musllinux_1_1_aarch64 dist/awscrt-*cp37*.whl - /opt/python/cp38-cp38/bin/python setup.py sdist bdist_wheel auditwheel repair --plat musllinux_1_1_aarch64 dist/awscrt-*cp38*.whl @@ -19,9 +16,17 @@ auditwheel repair --plat musllinux_1_1_aarch64 dist/awscrt-*cp310*.whl /opt/python/cp311-cp311/bin/python setup.py sdist bdist_wheel auditwheel repair --plat musllinux_1_1_aarch64 dist/awscrt-*cp311*.whl -# Don't need to build wheels for Python 3.12 and later. +# Don't need to build wheels for Python 3.12. # The 3.11 wheel uses the stable ABI, so it works with newer versions too. +# We are using the Python 3.13 stable ABI from Python 3.13 onwards because of deprecated functions. +# Manylinux images don't contain setuptools from Python 3.13, so we need to install it. +# Install in a custom location due to access issues. +/opt/python/cp313-cp313/bin/python -m pip install --target ./local -r requirements-dev.txt +export PYTHONPATH=./local:$PYTHONPATH +/opt/python/cp313-cp313/bin/python setup.py sdist bdist_wheel +auditwheel repair --plat musllinux_1_1_aarch64 dist/awscrt-*cp313*.whl + rm dist/*.whl cp -rv wheelhouse/* dist/ diff --git a/continuous-delivery/build-wheels-musllinux-1-1-x86_64.sh b/continuous-delivery/build-wheels-musllinux-1-1-x86_64.sh index 6637d95ac..503af47b3 100755 --- a/continuous-delivery/build-wheels-musllinux-1-1-x86_64.sh +++ b/continuous-delivery/build-wheels-musllinux-1-1-x86_64.sh @@ -4,9 +4,6 @@ set -ex /opt/python/cp39-cp39/bin/python ./continuous-delivery/update-version.py -/opt/python/cp37-cp37m/bin/python setup.py sdist bdist_wheel -auditwheel repair --plat musllinux_1_1_x86_64 dist/awscrt-*cp37*.whl - /opt/python/cp38-cp38/bin/python setup.py sdist bdist_wheel auditwheel repair --plat musllinux_1_1_x86_64 dist/awscrt-*cp38*.whl @@ -19,9 +16,17 @@ auditwheel repair --plat musllinux_1_1_x86_64 dist/awscrt-*cp310*.whl /opt/python/cp311-cp311/bin/python setup.py sdist bdist_wheel auditwheel repair --plat musllinux_1_1_x86_64 dist/awscrt-*cp311*.whl -# Don't need to build wheels for Python 3.12 and later. +# Don't need to build wheels for Python 3.12. # The 3.11 wheel uses the stable ABI, so it works with newer versions too. +# We are using the Python 3.13 stable ABI from Python 3.13 onwards because of deprecated functions. +# Manylinux images don't contain setuptools from Python 3.13, so we need to install it. +# Install in a custom location due to access issues. +/opt/python/cp313-cp313/bin/python -m pip install --target ./local -r requirements-dev.txt +export PYTHONPATH=./local:$PYTHONPATH +/opt/python/cp313-cp313/bin/python setup.py sdist bdist_wheel +auditwheel repair --plat musllinux_1_1_x86_64 dist/awscrt-*cp313*.whl + rm dist/*.whl cp -rv wheelhouse/* dist/ diff --git a/continuous-delivery/build-wheels-osx.sh b/continuous-delivery/build-wheels-osx.sh index a3a92bc63..6804018a6 100755 --- a/continuous-delivery/build-wheels-osx.sh +++ b/continuous-delivery/build-wheels-osx.sh @@ -5,13 +5,15 @@ set -ex /Library/Frameworks/Python.framework/Versions/3.9/bin/python3 ./continuous-delivery/update-version.py -/Library/Frameworks/Python.framework/Versions/3.7/bin/python3 setup.py sdist bdist_wheel /Library/Frameworks/Python.framework/Versions/3.8/bin/python3 setup.py sdist bdist_wheel /Library/Frameworks/Python.framework/Versions/3.9/bin/python3 setup.py sdist bdist_wheel /Library/Frameworks/Python.framework/Versions/3.10/bin/python3 setup.py sdist bdist_wheel /Library/Frameworks/Python.framework/Versions/3.11/bin/python3 setup.py sdist bdist_wheel -# Don't need to build wheels for Python 3.12 and later. +# Don't need to build wheels for Python 3.12. # The 3.11 wheel uses the stable ABI, so it works with newer versions too. +# We are using the Python 3.13 stable ABI from Python 3.13 onwards because of deprecated functions. +/Library/Frameworks/Python.framework/Versions/3.13/bin/python3 setup.py sdist bdist_wheel + #now you just need to run twine (that's in a different script) diff --git a/continuous-delivery/build-wheels-win32.bat b/continuous-delivery/build-wheels-win32.bat index 6a3e53977..16df4c870 100644 --- a/continuous-delivery/build-wheels-win32.bat +++ b/continuous-delivery/build-wheels-win32.bat @@ -1,12 +1,17 @@ "C:\Program Files (x86)\Python39-32\python.exe" .\continuous-delivery\update-version.py || goto error -"C:\Program Files (x86)\Python37-32\python.exe" setup.py sdist bdist_wheel || goto error "C:\Program Files (x86)\Python38-32\python.exe" setup.py sdist bdist_wheel || goto error "C:\Program Files (x86)\Python39-32\python.exe" setup.py sdist bdist_wheel || goto error "C:\Program Files (x86)\Python310-32\python.exe" setup.py sdist bdist_wheel || goto error "C:\Program Files (x86)\Python311-32\python.exe" setup.py sdist bdist_wheel || goto error +:: Don't need to build wheels for Python 3.12. +:: The 3.11 wheel uses the stable ABI, so it works with newer versions too. + +:: We are using the 3.13 stable ABI from 3.13 onwards because of deprecated functions. +"C:\Program Files (x86)\Python313-32\python.exe" setup.py sdist bdist_wheel || goto error + goto :EOF :error diff --git a/continuous-delivery/build-wheels-win64.bat b/continuous-delivery/build-wheels-win64.bat index 5a862e6e1..9b4971a2f 100644 --- a/continuous-delivery/build-wheels-win64.bat +++ b/continuous-delivery/build-wheels-win64.bat @@ -1,11 +1,16 @@ "C:\Program Files\Python39\python.exe" continuous-delivery\update-version.py || goto error -"C:\Program Files\Python37\python.exe" setup.py sdist bdist_wheel || goto error "C:\Program Files\Python38\python.exe" setup.py sdist bdist_wheel || goto error "C:\Program Files\Python39\python.exe" setup.py sdist bdist_wheel || goto error "C:\Program Files\Python310\python.exe" setup.py sdist bdist_wheel || goto error "C:\Program Files\Python311\python.exe" setup.py sdist bdist_wheel || goto error +:: Don't need to build wheels for Python 3.12. +:: The 3.11 wheel uses the stable ABI, so it works with newer versions too. + +:: We are using the 3.13 stable ABI from 3.13 onwards because of deprecated functions. +"C:\Program Files\Python313\python.exe" setup.py sdist bdist_wheel || goto error + goto :EOF :error diff --git a/continuous-delivery/sanity-check-test-pypi.bat b/continuous-delivery/sanity-check-test-pypi.bat index 949911612..8cc2d0166 100644 --- a/continuous-delivery/sanity-check-test-pypi.bat +++ b/continuous-delivery/sanity-check-test-pypi.bat @@ -1,8 +1,8 @@ FOR /F "delims=" %%A in ('git describe --tags') do ( set TAG_VERSION=%%A ) set CURRENT_VERSION=%TAG_VERSION:v=% -"C:\Program Files\Python37\python.exe" continuous-delivery\pip-install-with-retry.py --no-cache-dir -i https://testpypi.python.org/simple --user awscrt==%CURRENT_VERSION% || goto error -"C:\Program Files\Python37\python.exe" continuous-delivery\test-pip-install.py || goto error +"C:\Program Files\Python38\python.exe" continuous-delivery\pip-install-with-retry.py --no-cache-dir -i https://testpypi.python.org/simple --user awscrt==%CURRENT_VERSION% || goto error +"C:\Program Files\Python38\python.exe" continuous-delivery\test-pip-install.py || goto error goto :EOF diff --git a/crt/CMakeLists.txt b/crt/CMakeLists.txt index 05c1df8a6..afda9e150 100644 --- a/crt/CMakeLists.txt +++ b/crt/CMakeLists.txt @@ -1,6 +1,6 @@ # Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. # SPDX-License-Identifier: Apache-2.0. -cmake_minimum_required(VERSION 3.1) +cmake_minimum_required(VERSION 3.9) # This CMakeLists.txt exists so we can build all the C libraries we depend on # simultaneously. This is much faster than building dependencies one at a time. @@ -30,23 +30,38 @@ include(CTest) # (On Windows and Apple we use the default OS libraries) if(UNIX AND NOT APPLE) option(USE_OPENSSL "Set this if you want to use your system's OpenSSL compatible libcrypto" OFF) + include(AwsPrebuildDependency) if(NOT USE_OPENSSL) - set(DISABLE_GO ON CACHE BOOL "Build without using Go, we don't want the extra dependency") - set(BUILD_LIBSSL OFF CACHE BOOL "Don't need libssl, only need libcrypto") + + set(AWSLC_CMAKE_ARGUMENTS + -DDISABLE_GO=ON # Build without using Go, we don't want the extra dependency + -DDISABLE_PERL=ON # Build without using Perl, we don't want the extra dependency + -DBUILD_LIBSSL=OFF # Don't need libssl, only need libcrypto + -DBUILD_TESTING=OFF + ) if(CMAKE_C_COMPILER_ID MATCHES "GNU" AND CMAKE_C_COMPILER_VERSION VERSION_LESS "5.0") - set(DISABLE_PERL OFF CACHE BOOL "Build with Perl to avoid using pre-compiled binary with AVX512") - set(MY_ASSEMBLER_IS_TOO_OLD_FOR_512AVX ON CACHE BOOL "Disable AVX512 on old GCC that not supports it") - else() - set(DISABLE_PERL ON CACHE BOOL "Build without using Perl, we don't want the extra dependency") + # Disable AVX512 on old GCC that not supports it. + list(APPEND AWSLC_CMAKE_ARGUMENTS -DMY_ASSEMBLER_IS_TOO_OLD_FOR_512AVX=ON) endif() - add_subdirectory(aws-lc) + # s2n-tls uses libcrypto during its configuration, so we need to prebuild aws-lc. + aws_prebuild_dependency( + DEPENDENCY_NAME AWSLC + SOURCE_DIR ${CMAKE_CURRENT_SOURCE_DIR}/aws-lc + CMAKE_ARGUMENTS ${AWSLC_CMAKE_ARGUMENTS} + ) endif() - set(UNSAFE_TREAT_WARNINGS_AS_ERRORS OFF CACHE BOOL "") - add_subdirectory(s2n) + # prebuild s2n-tls. + aws_prebuild_dependency( + DEPENDENCY_NAME S2N + SOURCE_DIR ${CMAKE_CURRENT_SOURCE_DIR}/s2n + CMAKE_ARGUMENTS + -DUNSAFE_TREAT_WARNINGS_AS_ERRORS=OFF + -DBUILD_TESTING=OFF + ) endif() add_subdirectory(aws-c-common) diff --git a/crt/aws-c-auth b/crt/aws-c-auth index 53a31bacf..3982bd75f 160000 --- a/crt/aws-c-auth +++ b/crt/aws-c-auth @@ -1 +1 @@ -Subproject commit 53a31bacf2918e848e00b052d2e25cba0be069d9 +Subproject commit 3982bd75fea74efd8f9b462b27fedd4599db4f53 diff --git a/crt/aws-c-cal b/crt/aws-c-cal index 96c47e339..fbbe2612a 160000 --- a/crt/aws-c-cal +++ b/crt/aws-c-cal @@ -1 +1 @@ -Subproject commit 96c47e339d030d1fa4eaca201be948bc4442510d +Subproject commit fbbe2612a3385d1ded02a52d20ad7fd2da4501f4 diff --git a/crt/aws-c-common b/crt/aws-c-common index 6d974f92c..7a6f5df20 160000 --- a/crt/aws-c-common +++ b/crt/aws-c-common @@ -1 +1 @@ -Subproject commit 6d974f92c1d86391c1dcb1173239adf757c52b2d +Subproject commit 7a6f5df201cb4b1910932ea3221de83edaa39880 diff --git a/crt/aws-c-compression b/crt/aws-c-compression index ea1d421a4..c6c1191e5 160000 --- a/crt/aws-c-compression +++ b/crt/aws-c-compression @@ -1 +1 @@ -Subproject commit ea1d421a421ad83a540309a94c38d50b6a5d836b +Subproject commit c6c1191e525e5aa6ead9e1afc392e35d3b50331e diff --git a/crt/aws-c-event-stream b/crt/aws-c-event-stream index 1a70c50f7..d2dcc9344 160000 --- a/crt/aws-c-event-stream +++ b/crt/aws-c-event-stream @@ -1 +1 @@ -Subproject commit 1a70c50f78a6e706f1f91a4ed138478271b6d9d3 +Subproject commit d2dcc9344dae24de320866045d85166d8a91a0d1 diff --git a/crt/aws-c-http b/crt/aws-c-http index d83f8d701..fc3eded24 160000 --- a/crt/aws-c-http +++ b/crt/aws-c-http @@ -1 +1 @@ -Subproject commit d83f8d70143ddce5ab4e479175fbd44ba994211b +Subproject commit fc3eded2465c37d07fd9cc15e9b5b011224c9c9a diff --git a/crt/aws-c-io b/crt/aws-c-io index 878b4fa02..fcb38c804 160000 --- a/crt/aws-c-io +++ b/crt/aws-c-io @@ -1 +1 @@ -Subproject commit 878b4fa027bda4041493f06e0562d5e98bb3deb8 +Subproject commit fcb38c804364dd627c335da752a99a125a88f6e9 diff --git a/crt/aws-c-mqtt b/crt/aws-c-mqtt index ed7bbd68c..627c3334e 160000 --- a/crt/aws-c-mqtt +++ b/crt/aws-c-mqtt @@ -1 +1 @@ -Subproject commit ed7bbd68c03d7022c915a2924740ab7992ad2311 +Subproject commit 627c3334e52021aa8d5772b6ca076884610f3219 diff --git a/crt/aws-c-s3 b/crt/aws-c-s3 index 6588f9a71..21a4ab4da 160000 --- a/crt/aws-c-s3 +++ b/crt/aws-c-s3 @@ -1 +1 @@ -Subproject commit 6588f9a714ee7a8be1bddd63ea5ea1ea224d00b4 +Subproject commit 21a4ab4dacd1eaf61e6c044c7a299d68fd4c1876 diff --git a/crt/aws-c-sdkutils b/crt/aws-c-sdkutils index 8c7af71f9..ce09f7976 160000 --- a/crt/aws-c-sdkutils +++ b/crt/aws-c-sdkutils @@ -1 +1 @@ -Subproject commit 8c7af71f91ed5b9d2a043d51f120495f43723f80 +Subproject commit ce09f79768653dbdc810fc14cad8685dd90acba1 diff --git a/crt/aws-checksums b/crt/aws-checksums index aac442a2d..3e4101b9f 160000 --- a/crt/aws-checksums +++ b/crt/aws-checksums @@ -1 +1 @@ -Subproject commit aac442a2dbbb5e72d0a3eca8313cf65e7e1cac2f +Subproject commit 3e4101b9f85a2c090774d27ae2131fca1082f522 diff --git a/crt/aws-lc b/crt/aws-lc index 4e54dd836..1be42a3e1 160000 --- a/crt/aws-lc +++ b/crt/aws-lc @@ -1 +1 @@ -Subproject commit 4e54dd8363396f257d7a2317c48101e18170e6fb +Subproject commit 1be42a3e16a53c229690ae8215f0de8e2a1a54e7 diff --git a/crt/s2n b/crt/s2n index 114ccab0f..2e79e7efe 160000 --- a/crt/s2n +++ b/crt/s2n @@ -1 +1 @@ -Subproject commit 114ccab0ff2cde491203ac841837d0d39b767412 +Subproject commit 2e79e7efeb26f06eb59a1d4f3444ea63fc3e20c3 diff --git a/docsrc/source/api/checksums.rst b/docsrc/source/api/checksums.rst new file mode 100644 index 000000000..17b6fd75a --- /dev/null +++ b/docsrc/source/api/checksums.rst @@ -0,0 +1,5 @@ +awscrt.checksums +================ + +.. automodule:: awscrt.checksums + :members: diff --git a/docsrc/source/index.rst b/docsrc/source/index.rst index 0558f3f54..c2b65240a 100644 --- a/docsrc/source/index.rst +++ b/docsrc/source/index.rst @@ -12,6 +12,7 @@ API Reference :maxdepth: 2 api/auth + api/checksums api/common api/crypto api/exceptions diff --git a/setup.py b/setup.py index 43b053b4d..2eb888a4e 100644 --- a/setup.py +++ b/setup.py @@ -1,8 +1,6 @@ # Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. # SPDX-License-Identifier: Apache-2.0. - import codecs -import distutils.ccompiler import glob import os import os.path @@ -15,6 +13,12 @@ import sys import sysconfig from wheel.bdist_wheel import bdist_wheel +if sys.platform == 'win32': + # distutils is deprecated in Python 3.10 and removed in 3.12. However, it still works because Python defines a compatibility interface as long as setuptools is installed. + # We don't have an official alternative for distutils.ccompiler as of September 2024. See: https://github.com/pypa/setuptools/issues/2806 + # Once that issue is resolved, we can migrate to the official solution. + # For now, restrict distutils to Windows only, where it's needed. + import distutils.ccompiler def is_64bit(): @@ -33,6 +37,18 @@ def is_development_mode(): return 'develop' in sys.argv +def get_xcode_major_version(): + """Return major version of xcode present on the system""" + try: + output = subprocess.check_output( + ['xcodebuild', '-version'], text=True) + version_line = output.split('\n')[0] + version = version_line.split(' ')[-1] + return int(version.split('.')[0]) + except BaseException: + return 0 + + def run_cmd(args): print('>', subprocess.list2cmdline(args)) subprocess.check_call(args) @@ -52,7 +68,7 @@ def is_macos_universal2(): return False cflags = sysconfig.get_config_var('CFLAGS') - return '-arch x86_64' in cflags and '-arch x86_64' in cflags + return '-arch x86_64' in cflags and '-arch arm64' in cflags def determine_cross_compile_args(): @@ -122,8 +138,20 @@ def get_cmake_path(): raise Exception("CMake must be installed to build from source.") +def using_system_libs(): + """If true, don't build any dependencies. Use the libs that are already on the system.""" + return (os.getenv('AWS_CRT_BUILD_USE_SYSTEM_LIBS') == '1' + or not os.path.exists(os.path.join(PROJECT_DIR, 'crt', 'aws-c-common', 'CMakeLists.txt'))) + + def using_system_libcrypto(): - return os.getenv('AWS_CRT_BUILD_USE_SYSTEM_LIBCRYPTO') == '1' + """If true, don't build AWS-LC. Use the libcrypto that's already on the system.""" + return using_system_libs() or os.getenv('AWS_CRT_BUILD_USE_SYSTEM_LIBCRYPTO') == '1' + + +def forcing_static_libs(): + """If true, force libs to be linked statically.""" + return os.getenv('AWS_CRT_BUILD_FORCE_STATIC_LIBS') == '1' class AwsLib: @@ -134,12 +162,11 @@ def __init__(self, name, extra_cmake_args=[], libname=None): # The extension depends on these libs. -# They're built along with the extension. +# They're built along with the extension (unless using_system_libs() is True) AWS_LIBS = [] if sys.platform != 'darwin' and sys.platform != 'win32': - if not using_system_libcrypto(): - # aws-lc produces libcrypto.a - AWS_LIBS.append(AwsLib('aws-lc', libname='crypto')) + # aws-lc produces libcrypto.a + AWS_LIBS.append(AwsLib('aws-lc', libname='crypto')) AWS_LIBS.append(AwsLib('s2n')) AWS_LIBS.append(AwsLib('aws-c-common')) AWS_LIBS.append(AwsLib('aws-c-sdkutils')) @@ -211,7 +238,10 @@ def _build_dependencies_impl(self, build_dir, install_path, osx_arch=None): ] run_cmd(build_cmd) - def _build_dependencies(self, build_dir, install_path): + def _build_dependencies(self): + build_dir = os.path.join(self.build_temp, 'deps') + install_path = os.path.join(self.build_temp, 'deps', 'install') + if is_macos_universal2() and not is_development_mode(): # create macOS universal binary by compiling for x86_64 and arm64, # each in its own subfolder, and then creating a universal binary @@ -254,30 +284,26 @@ def _build_dependencies(self, build_dir, install_path): # normal build for a single architecture self._build_dependencies_impl(build_dir, install_path) - def run(self): - # build dependencies - dep_build_dir = os.path.join(self.build_temp, 'deps') - dep_install_path = os.path.join(self.build_temp, 'deps', 'install') - - if os.path.exists(os.path.join(PROJECT_DIR, 'crt', 'aws-c-common', 'CMakeLists.txt')): - self._build_dependencies(dep_build_dir, dep_install_path) - else: - print("Skip building dependencies, source not found.") - # update paths so awscrt_ext can access dependencies. # add to the front of any list so that our dependencies are preferred # over anything that might already be on the system (i.e. libcrypto.a) - self.include_dirs.insert(0, os.path.join(dep_install_path, 'include')) + self.include_dirs.insert(0, os.path.join(install_path, 'include')) # some platforms (ex: fedora) use /lib64 instead of just /lib lib_dir = 'lib' - if is_64bit() and os.path.exists(os.path.join(dep_install_path, 'lib64')): + if is_64bit() and os.path.exists(os.path.join(install_path, 'lib64')): lib_dir = 'lib64' - if is_32bit() and os.path.exists(os.path.join(dep_install_path, 'lib32')): + if is_32bit() and os.path.exists(os.path.join(install_path, 'lib32')): lib_dir = 'lib32' - self.library_dirs.insert(0, os.path.join(dep_install_path, lib_dir)) + self.library_dirs.insert(0, os.path.join(install_path, lib_dir)) + + def run(self): + if using_system_libs(): + print("Skip building dependencies") + else: + self._build_dependencies() # continue with normal build_ext.run() super().run() @@ -286,8 +312,12 @@ def run(self): class bdist_wheel_abi3(bdist_wheel): def get_tag(self): python, abi, plat = super().get_tag() - if python.startswith("cp") and sys.version_info >= (3, 11): - # on CPython, our wheels are abi3 and compatible back to 3.11 + # on CPython, our wheels are abi3 and compatible back to 3.11 + if python.startswith("cp") and sys.version_info >= (3, 13): + # 3.13 deprecates PyWeakref_GetObject(), adds alternative + return "cp313", "abi3", plat + elif python.startswith("cp") and sys.version_info >= (3, 11): + # 3.11 is the first stable ABI that has everything we need return "cp311", "abi3", plat return python, abi, plat @@ -318,44 +348,46 @@ def awscrt_ext(): extra_link_args += ['-framework', 'Security'] else: # unix - # linker will prefer shared libraries over static if it can find both. - # force linker to choose static variant by using using - # "-l:libaws-c-common.a" syntax instead of just "-laws-c-common". - # - # This helps AWS developers creating Lambda applications from Brazil. - # In Brazil, both shared and static libs are available. - # But Lambda requires all shared libs to be explicitly packaged up. - # So it's simpler to link them in statically and have less runtime dependencies. - libraries = [':lib{}.a'.format(x) for x in libraries] + if forcing_static_libs(): + # linker will prefer shared libraries over static if it can find both. + # force linker to choose static variant by using + # "-l:libaws-c-common.a" syntax instead of just "-laws-c-common". + # + # This helps AWS developers creating Lambda applications from Brazil. + # In Brazil, both shared and static libs are available. + # But Lambda requires all shared libs to be explicitly packaged up. + # So it's simpler to link them in statically and have less runtime dependencies. + # + # Don't apply this trick to dependencies that are always on the OS (e.g. librt) + libraries = [':lib{}.a'.format(x) for x in libraries] # OpenBSD doesn't have librt; functions are found in libc instead. if not sys.platform.startswith('openbsd'): libraries += ['rt'] - if using_system_libcrypto(): - libraries += ['crypto'] - else: - # hide the symbols from libcrypto.a - # this prevents weird crashes if an application also ends up using - # libcrypto.so from the system's OpenSSL installation. - extra_link_args += ['-Wl,--exclude-libs,libcrypto.a'] - - # OpenBSD 7.4+ defaults to linking with --execute-only, which is bad for AWS-LC. - # See: https://github.com/aws/aws-lc/blob/4b07805bddc55f68e5ce8c42f215da51c7a4e099/CMakeLists.txt#L44-L53 - # (If AWS-LC's CMakeLists.txt removes these lines in the future, we can remove this hack here as well) - if sys.platform.startswith('openbsd'): + # hide the symbols from libcrypto.a + # this prevents weird crashes if an application also ends up using + # libcrypto.so from the system's OpenSSL installation. + # Do this even if using system libcrypto, since it could still be a static lib. + extra_link_args += ['-Wl,--exclude-libs,libcrypto.a'] + + # OpenBSD 7.4+ defaults to linking with --execute-only, which is bad for AWS-LC. + # See: https://github.com/aws/aws-lc/blob/4b07805bddc55f68e5ce8c42f215da51c7a4e099/CMakeLists.txt#L44-L53 + # (If AWS-LC's CMakeLists.txt removes these lines in the future, we can remove this hack here as well) + if sys.platform.startswith('openbsd'): + if not using_system_libcrypto(): extra_link_args += ['-Wl,--no-execute-only'] # FreeBSD doesn't have execinfo as a part of libc like other Unix variant. # Passing linker flag to link execinfo properly if sys.platform.startswith('freebsd'): - extra_link_args += ['-lexecinfo'] + libraries += ['execinfo'] # python usually adds -pthread automatically, but we've observed # rare cases where that didn't happen, so let's be explicit. extra_link_args += ['-pthread'] - if distutils.ccompiler.get_default_compiler() != 'msvc': + if sys.platform != 'win32' or distutils.ccompiler.get_default_compiler() != 'msvc': extra_compile_args += ['-Wno-strict-aliasing', '-std=gnu99'] # treat warnings as errors in development mode @@ -368,12 +400,29 @@ def awscrt_ext(): if not is_macos_universal2(): if sys.platform == 'darwin': extra_link_args += ['-Wl,-fatal_warnings'] + # xcode 15 introduced a new linker that generates a warning + # when it sees duplicate libs or rpath during bundling. + # pyenv installed from homebrew put duplicate rpath entries + # into sysconfig, and setuptools happily passes them along + # to xcode, resulting in a warning + # (which is fatal in this branch). + # ex. https://github.com/pyenv/pyenv/issues/2890 + # lets revert back to old linker on xcode >= 15 until one of + # the involved parties fixes the issue. + if get_xcode_major_version() >= 15: + extra_link_args += ['-Wl,-ld_classic'] elif 'bsd' in sys.platform: extra_link_args += ['-Wl,-fatal-warnings'] else: extra_link_args += ['-Wl,--fatal-warnings'] - if sys.version_info >= (3, 11): + # prefer building with stable ABI, so a wheel can work with multiple major versions + if sys.version_info >= (3, 13): + # 3.13 deprecates PyWeakref_GetObject(), adds alternative + define_macros.append(('Py_LIMITED_API', '0x030D0000')) + py_limited_api = True + elif sys.version_info >= (3, 11): + # 3.11 is the first stable ABI that has everything we need define_macros.append(('Py_LIMITED_API', '0x030B0000')) py_limited_api = True @@ -422,7 +471,7 @@ def _load_version(): "Operating System :: Unix", "Operating System :: MacOS", ], - python_requires='>=3.7', + python_requires='>=3.8', ext_modules=[awscrt_ext()], cmdclass={'build_ext': awscrt_build_ext, "bdist_wheel": bdist_wheel_abi3}, test_suite='test', diff --git a/source/checksums.h b/source/checksums.h index 9ee27297d..824d743b2 100644 --- a/source/checksums.h +++ b/source/checksums.h @@ -8,5 +8,6 @@ PyObject *aws_py_checksums_crc32(PyObject *self, PyObject *args); PyObject *aws_py_checksums_crc32c(PyObject *self, PyObject *args); +PyObject *aws_py_checksums_crc64nvme(PyObject *self, PyObject *args); #endif /* AWS_CRT_PYTHON_CHECKSUMS_H */ diff --git a/source/crc.c b/source/crc.c index 67eee879e..b02fc4759 100644 --- a/source/crc.c +++ b/source/crc.c @@ -7,7 +7,7 @@ #include "aws/checksums/crc.h" #include "aws/common/byte_buf.h" -PyObject *checksums_crc_common(PyObject *args, uint32_t (*checksum_fn)(const uint8_t *, int, uint32_t)) { +PyObject *checksums_crc32_common(PyObject *args, uint32_t (*checksum_fn)(const uint8_t *, size_t, uint32_t)) { Py_buffer input; PyObject *py_previousCrc; PyObject *py_result = NULL; @@ -39,18 +39,11 @@ PyObject *checksums_crc_common(PyObject *args, uint32_t (*checksum_fn)(const uin /* clang-format off */ Py_BEGIN_ALLOW_THREADS - /* Avoid truncation of length for very large buffers. crc() takes - length as an int, which may be narrower than Py_ssize_t. */ - while ((size_t)len > INT_MAX) { - val = checksum_fn(buf, INT_MAX, val); - buf += (size_t)INT_MAX; - len -= (size_t)INT_MAX; - } - val = checksum_fn(buf, (int)len, val); + val = checksum_fn(buf, (size_t)len, val); Py_END_ALLOW_THREADS /* clang-format on */ } else { - val = checksum_fn(input.buf, (int)input.len, val); + val = checksum_fn(input.buf, (size_t)input.len, val); } py_result = PyLong_FromUnsignedLong(val); done: @@ -62,10 +55,52 @@ PyObject *checksums_crc_common(PyObject *args, uint32_t (*checksum_fn)(const uin PyObject *aws_py_checksums_crc32(PyObject *self, PyObject *args) { (void)self; - return checksums_crc_common(args, aws_checksums_crc32); + return checksums_crc32_common(args, aws_checksums_crc32_ex); } PyObject *aws_py_checksums_crc32c(PyObject *self, PyObject *args) { (void)self; - return checksums_crc_common(args, aws_checksums_crc32c); + return checksums_crc32_common(args, aws_checksums_crc32c_ex); +} + +PyObject *aws_py_checksums_crc64nvme(PyObject *self, PyObject *args) { + (void)self; + Py_buffer input; + PyObject *py_previousCrc64; + PyObject *py_result = NULL; + + if (!PyArg_ParseTuple(args, "s*O", &input, &py_previousCrc64)) { + return NULL; + } + + /* Note: PyArg_ParseTuple() doesn't do overflow checking on unsigned values + * so use PyLong_AsUnsignedLongLong() to get the value of the previousCrc arg */ + uint64_t previousCrc = PyLong_AsUnsignedLongLong(py_previousCrc64); + + if (previousCrc == (uint64_t)-1 && PyErr_Occurred()) { + goto done; + } + + if (!PyBuffer_IsContiguous(&input, 'C')) { + PyErr_SetString(PyExc_ValueError, "input must be contiguous buffer"); + goto done; + } + + /* Releasing the GIL for very small buffers is inefficient + and may lower performance */ + if (input.len > 1024 * 5) { + /* clang-format off */ + Py_BEGIN_ALLOW_THREADS + previousCrc = aws_checksums_crc64nvme_ex(input.buf, (size_t)input.len, previousCrc); + Py_END_ALLOW_THREADS + /* clang-format on */ + } else { + previousCrc = aws_checksums_crc64nvme_ex(input.buf, (size_t)input.len, previousCrc); + } + py_result = PyLong_FromUnsignedLongLong(previousCrc); +done: + if (input.obj) { + PyBuffer_Release(&input); + } + return py_result; } diff --git a/source/crypto.c b/source/crypto.c index 249e9276f..b996ecbd3 100644 --- a/source/crypto.c +++ b/source/crypto.c @@ -8,6 +8,7 @@ #include "aws/cal/hash.h" #include "aws/cal/hmac.h" #include "aws/cal/rsa.h" +#include "aws/common/encoding.h" #include "aws/io/pem.h" const char *s_capsule_name_hash = "aws_hash"; @@ -350,6 +351,62 @@ PyObject *aws_py_rsa_public_key_from_pem_data(PyObject *self, PyObject *args) { return capsule; } +PyObject *aws_py_rsa_private_key_from_der_data(PyObject *self, PyObject *args) { + (void)self; + + struct aws_byte_cursor der_data_cur; + if (!PyArg_ParseTuple(args, "y#", &der_data_cur.ptr, &der_data_cur.len)) { + return NULL; + } + + PyObject *capsule = NULL; + struct aws_allocator *allocator = aws_py_get_allocator(); + + struct aws_rsa_key_pair *key_pair = aws_rsa_key_pair_new_from_private_key_pkcs1(allocator, der_data_cur); + + if (key_pair == NULL) { + PyErr_AwsLastError(); + goto on_done; + } + + capsule = PyCapsule_New(key_pair, s_capsule_name_rsa, s_rsa_destructor); + + if (capsule == NULL) { + aws_rsa_key_pair_release(key_pair); + } + +on_done: + return capsule; +} + +PyObject *aws_py_rsa_public_key_from_der_data(PyObject *self, PyObject *args) { + (void)self; + + struct aws_byte_cursor der_data_cur; + if (!PyArg_ParseTuple(args, "y#", &der_data_cur.ptr, &der_data_cur.len)) { + return NULL; + } + + PyObject *capsule = NULL; + struct aws_allocator *allocator = aws_py_get_allocator(); + + struct aws_rsa_key_pair *key_pair = aws_rsa_key_pair_new_from_public_key_pkcs1(allocator, der_data_cur); + + if (key_pair == NULL) { + PyErr_AwsLastError(); + goto on_done; + } + + capsule = PyCapsule_New(key_pair, s_capsule_name_rsa, s_rsa_destructor); + + if (capsule == NULL) { + aws_rsa_key_pair_release(key_pair); + } + +on_done: + return capsule; +} + PyObject *aws_py_rsa_encrypt(PyObject *self, PyObject *args) { (void)self; diff --git a/source/crypto.h b/source/crypto.h index 4c03e65a4..3e8db5f74 100644 --- a/source/crypto.h +++ b/source/crypto.h @@ -32,6 +32,9 @@ PyObject *aws_py_sha256_hmac_compute(PyObject *self, PyObject *args); PyObject *aws_py_rsa_private_key_from_pem_data(PyObject *self, PyObject *args); PyObject *aws_py_rsa_public_key_from_pem_data(PyObject *self, PyObject *args); +PyObject *aws_py_rsa_private_key_from_der_data(PyObject *self, PyObject *args); +PyObject *aws_py_rsa_public_key_from_der_data(PyObject *self, PyObject *args); + PyObject *aws_py_rsa_encrypt(PyObject *self, PyObject *args); PyObject *aws_py_rsa_decrypt(PyObject *self, PyObject *args); PyObject *aws_py_rsa_sign(PyObject *self, PyObject *args); diff --git a/source/http_stream.c b/source/http_stream.c index bf7021862..6843e0ea8 100644 --- a/source/http_stream.c +++ b/source/http_stream.c @@ -203,7 +203,10 @@ static void s_on_stream_complete(struct aws_http_stream *native_stream, int erro } /* DECREF python self, we don't need to force it to stay alive any longer. */ - Py_DECREF(PyWeakref_GetObject(stream->self_proxy)); + PyObject *self = aws_py_weakref_get_ref(stream->self_proxy); + /* DECREF twice because `aws_py_weakref_get_ref` returns a strong reference */ + Py_XDECREF(self); + Py_XDECREF(self); PyGILState_Release(state); /*************** GIL RELEASE ***************/ diff --git a/source/module.c b/source/module.c index f5bda5067..0bb9329cb 100644 --- a/source/module.c +++ b/source/module.c @@ -525,6 +525,39 @@ PyObject *aws_py_memory_view_from_byte_buffer(struct aws_byte_buf *buf) { return PyMemoryView_FromMemory(mem_start, mem_size, PyBUF_WRITE); } +PyObject *aws_py_weakref_get_ref(PyObject *ref) { + /* If Python >= 3.13 */ +#if PY_VERSION_HEX >= 0x030D0000 + /* Use PyWeakref_GetRef() (new in Python 3.13), which gets you: + * a new strong reference, + * or NULL because ref is dead, + * or -1 because you called it wrong */ + PyObject *obj = NULL; + if (PyWeakref_GetRef(ref, &obj) == -1) { + PyErr_WriteUnraisable(PyErr_Occurred()); + AWS_ASSERT(0 && "expected a weakref"); + } + return obj; + +#else + /* Use PyWeakref_GetObject() (deprecated as of Python 3.13), which gets you: + * a borrowed reference, + * or Py_None because ref is dead, + * or NULL because you called it wrong */ + PyObject *obj = PyWeakref_GetObject(ref); /* borrowed reference */ + if (obj == NULL) { + PyErr_WriteUnraisable(PyErr_Occurred()); + AWS_ASSERT(0 && "expected a weakref"); + } else if (obj == Py_None) { + obj = NULL; + } else { + /* Be like PyWeakref_GetRef() and make it new strong reference */ + Py_INCREF(obj); + } + return obj; +#endif +} + int aws_py_gilstate_ensure(PyGILState_STATE *out_state) { if (AWS_LIKELY(Py_IsInitialized())) { *out_state = PyGILState_Ensure(); @@ -731,6 +764,8 @@ static PyMethodDef s_module_methods[] = { /* RSA crypto primitives */ AWS_PY_METHOD_DEF(rsa_private_key_from_pem_data, METH_VARARGS), AWS_PY_METHOD_DEF(rsa_public_key_from_pem_data, METH_VARARGS), + AWS_PY_METHOD_DEF(rsa_private_key_from_der_data, METH_VARARGS), + AWS_PY_METHOD_DEF(rsa_public_key_from_der_data, METH_VARARGS), AWS_PY_METHOD_DEF(rsa_encrypt, METH_VARARGS), AWS_PY_METHOD_DEF(rsa_decrypt, METH_VARARGS), AWS_PY_METHOD_DEF(rsa_sign, METH_VARARGS), @@ -739,6 +774,7 @@ static PyMethodDef s_module_methods[] = { /* Checksum primitives */ AWS_PY_METHOD_DEF(checksums_crc32, METH_VARARGS), AWS_PY_METHOD_DEF(checksums_crc32c, METH_VARARGS), + AWS_PY_METHOD_DEF(checksums_crc64nvme, METH_VARARGS), /* HTTP */ AWS_PY_METHOD_DEF(http_connection_close, METH_VARARGS), diff --git a/source/module.h b/source/module.h index 49d72346d..2f9dd217e 100644 --- a/source/module.h +++ b/source/module.h @@ -107,6 +107,27 @@ PyObject *aws_py_get_error_message(PyObject *self, PyObject *args); /* Create a write-only memoryview from the remaining free space in an aws_byte_buf */ PyObject *aws_py_memory_view_from_byte_buffer(struct aws_byte_buf *buf); +/* Python 3.13+ changed the function to get a reference from WeakRef. This function is an abstraction over two different + * APIs since we support Python versions before 3.13. Returns a strong reference if non-null, which you must release. */ + +/** + * Given a weak reference, returns a NEW strong reference to the referenced object, + * or NULL if the reference is dead (this function NEVER raises a python exception or AWS Error). + * + * You MUST NOT call this if ref came from a user, or ref is NULL. + * + * This is a simplified version of PyWeakref_GetRef() / PyWeakref_GetObject(). + * Simpler because: + * - Python 3.13 adds PyWeakref_GetRef() and deprecates PyWeakref_GetObject(). + * This function calls the appropriate one. + * + * - This functions has 2 outcomes instead of 3: + * The 3rd being a Python exception for calling it incorrectly. + * If that happens, this function calls PyErr_WriteUnraisable() to clear the exception, + * which is what you would have done anyway. + */ +PyObject *aws_py_weakref_get_ref(PyObject *ref); + /* Allocator that calls into PyObject_[Malloc|Free|Realloc] */ struct aws_allocator *aws_py_get_allocator(void); diff --git a/source/mqtt_client_connection.c b/source/mqtt_client_connection.c index 9eb73a950..78a26057e 100644 --- a/source/mqtt_client_connection.c +++ b/source/mqtt_client_connection.c @@ -140,8 +140,8 @@ static void s_on_connection_success( return; /* Python has shut down. Nothing matters anymore, but don't crash */ } - PyObject *self = PyWeakref_GetObject(py_connection->self_proxy); /* borrowed reference */ - if (self != Py_None) { + PyObject *self = aws_py_weakref_get_ref(py_connection->self_proxy); /* new reference */ + if (self != NULL) { PyObject *success_result = PyObject_CallMethod(self, "_on_connection_success", "(iN)", return_code, PyBool_FromLong(session_present)); if (success_result) { @@ -149,6 +149,7 @@ static void s_on_connection_success( } else { PyErr_WriteUnraisable(PyErr_Occurred()); } + Py_DECREF(self); } PyGILState_Release(state); @@ -167,14 +168,15 @@ static void s_on_connection_failure(struct aws_mqtt_client_connection *connectio return; /* Python has shut down. Nothing matters anymore, but don't crash */ } - PyObject *self = PyWeakref_GetObject(py_connection->self_proxy); /* borrowed reference */ - if (self != Py_None) { + PyObject *self = aws_py_weakref_get_ref(py_connection->self_proxy); /* new reference */ + if (self != NULL) { PyObject *success_result = PyObject_CallMethod(self, "_on_connection_failure", "(i)", error_code); if (success_result) { Py_DECREF(success_result); } else { PyErr_WriteUnraisable(PyErr_Occurred()); } + Py_DECREF(self); } PyGILState_Release(state); @@ -194,14 +196,15 @@ static void s_on_connection_interrupted(struct aws_mqtt_client_connection *conne } /* Ensure that python class is still alive */ - PyObject *self = PyWeakref_GetObject(py_connection->self_proxy); /* borrowed reference */ - if (self != Py_None) { + PyObject *self = aws_py_weakref_get_ref(py_connection->self_proxy); /* new reference */ + if (self != NULL) { PyObject *result = PyObject_CallMethod(self, "_on_connection_interrupted", "(i)", error_code); if (result) { Py_DECREF(result); } else { PyErr_WriteUnraisable(PyErr_Occurred()); } + Py_DECREF(self); } PyGILState_Release(state); @@ -227,8 +230,8 @@ static void s_on_connection_resumed( } /* Ensure that python class is still alive */ - PyObject *self = PyWeakref_GetObject(py_connection->self_proxy); /* borrowed reference */ - if (self != Py_None) { + PyObject *self = aws_py_weakref_get_ref(py_connection->self_proxy); /* new reference */ + if (self != NULL) { PyObject *result = PyObject_CallMethod(self, "_on_connection_resumed", "(iN)", return_code, PyBool_FromLong(session_present)); if (result) { @@ -236,6 +239,7 @@ static void s_on_connection_resumed( } else { PyErr_WriteUnraisable(PyErr_Occurred()); } + Py_DECREF(self); } PyGILState_Release(state); @@ -258,14 +262,15 @@ static void s_on_connection_closed( struct mqtt_connection_binding *py_connection = userdata; /* Ensure that python class is still alive */ - PyObject *self = PyWeakref_GetObject(py_connection->self_proxy); /* borrowed reference */ - if (self != Py_None) { + PyObject *self = aws_py_weakref_get_ref(py_connection->self_proxy); /* new reference */ + if (self != NULL) { PyObject *result = PyObject_CallMethod(self, "_on_connection_closed", "()"); if (result) { Py_DECREF(result); } else { PyErr_WriteUnraisable(PyErr_Occurred()); } + Py_DECREF(self); } PyGILState_Release(state); @@ -535,8 +540,9 @@ static void s_ws_handshake_transform( } /* Ensure python mqtt connection object is still alive */ - PyObject *connection_py = PyWeakref_GetObject(connection_binding->self_proxy); /* borrowed reference */ - if (connection_py == Py_None) { + + PyObject *connection_py = aws_py_weakref_get_ref(connection_binding->self_proxy); /* new reference */ + if (connection_py == NULL) { aws_raise_error(AWS_ERROR_INVALID_STATE); goto done; } @@ -593,6 +599,7 @@ static void s_ws_handshake_transform( done:; /* Save off error code, so it doesn't got stomped before we pass it to callback*/ int error_code = aws_last_error(); + Py_XDECREF(connection_py); if (ws_transform_capsule) { Py_DECREF(ws_transform_capsule); diff --git a/source/s3_client.c b/source/s3_client.c index 47d785698..8eafd03b5 100644 --- a/source/s3_client.c +++ b/source/s3_client.c @@ -245,22 +245,24 @@ PyObject *aws_py_s3_client_new(PyObject *self, PyObject *args) { struct aws_allocator *allocator = aws_py_get_allocator(); - PyObject *bootstrap_py; /* O */ - PyObject *signing_config_py; /* O */ - PyObject *credential_provider_py; /* O */ - PyObject *tls_options_py; /* O */ - PyObject *on_shutdown_py; /* O */ - struct aws_byte_cursor region; /* s# */ - int tls_mode; /* i */ - uint64_t part_size; /* K */ - uint64_t multipart_upload_threshold; /* K */ - double throughput_target_gbps; /* d */ - int enable_s3express; /* p */ - uint64_t mem_limit; /* K */ - PyObject *py_core; /* O */ + PyObject *bootstrap_py; /* O */ + PyObject *signing_config_py; /* O */ + PyObject *credential_provider_py; /* O */ + PyObject *tls_options_py; /* O */ + PyObject *on_shutdown_py; /* O */ + struct aws_byte_cursor region; /* s# */ + int tls_mode; /* i */ + uint64_t part_size; /* K */ + uint64_t multipart_upload_threshold; /* K */ + double throughput_target_gbps; /* d */ + int enable_s3express; /* p */ + uint64_t mem_limit; /* K */ + PyObject *network_interface_names_py; /* O */ + PyObject *py_core; /* O */ + if (!PyArg_ParseTuple( args, - "OOOOOs#iKKdpKO", + "OOOOOs#iKKdpKOO", &bootstrap_py, &signing_config_py, &credential_provider_py, @@ -274,6 +276,7 @@ PyObject *aws_py_s3_client_new(PyObject *self, PyObject *args) { &throughput_target_gbps, &enable_s3express, &mem_limit, + &network_interface_names_py, &py_core)) { return NULL; } @@ -304,10 +307,16 @@ PyObject *aws_py_s3_client_new(PyObject *self, PyObject *args) { struct aws_signing_config_aws *signing_config = NULL; struct aws_credentials *anonymous_credentials = NULL; + struct aws_byte_cursor *network_interface_names = NULL; + size_t num_network_interface_names = 0; + PyObject *capsule = NULL; + /* From hereon, we need to clean up if errors occur */ + bool success = false; + if (signing_config_py != Py_None) { signing_config = aws_py_get_signing_config(signing_config_py); if (!signing_config) { - return NULL; + goto cleanup; } } else if (credential_provider) { aws_s3_init_default_signing_config(&default_signing_config, region, credential_provider); @@ -321,13 +330,10 @@ PyObject *aws_py_s3_client_new(PyObject *self, PyObject *args) { struct s3_client_binding *s3_client = aws_mem_calloc(allocator, 1, sizeof(struct s3_client_binding)); - /* From hereon, we need to clean up if errors occur */ - - PyObject *capsule = PyCapsule_New(s3_client, s_capsule_name_s3_client, s_s3_client_capsule_destructor); + capsule = PyCapsule_New(s3_client, s_capsule_name_s3_client, s_s3_client_capsule_destructor); if (!capsule) { - aws_credentials_release(anonymous_credentials); aws_mem_release(allocator, s3_client); - return NULL; + goto cleanup; } s3_client->on_shutdown = on_shutdown_py; @@ -336,6 +342,31 @@ PyObject *aws_py_s3_client_new(PyObject *self, PyObject *args) { s3_client->py_core = py_core; Py_INCREF(s3_client->py_core); + if (network_interface_names_py != Py_None) { + if (!PyList_Check(network_interface_names_py)) { + PyErr_SetString(PyExc_TypeError, "Expected network_interface_names to be a sequence."); + goto cleanup; + } + Py_ssize_t list_size = PyList_Size(network_interface_names_py); + if (list_size < 0) { + goto cleanup; + } + num_network_interface_names = (size_t)list_size; + network_interface_names = + aws_mem_calloc(allocator, num_network_interface_names, sizeof(struct aws_byte_cursor)); + for (size_t i = 0; i < num_network_interface_names; ++i) { + PyObject *str_obj = PyList_GetItem(network_interface_names_py, i); /* Borrowed reference */ + if (!str_obj) { + goto cleanup; + } + network_interface_names[i] = aws_byte_cursor_from_pyunicode(str_obj); + if (network_interface_names[i].ptr == NULL) { + PyErr_SetString(PyExc_TypeError, "Expected all network_interface_names elements to be strings."); + goto cleanup; + } + } + } + struct aws_s3_client_config s3_config = { .region = region, .client_bootstrap = bootstrap, @@ -349,18 +380,23 @@ PyObject *aws_py_s3_client_new(PyObject *self, PyObject *args) { .shutdown_callback = s_s3_client_shutdown, .shutdown_callback_user_data = s3_client, .enable_s3express = enable_s3express, + .network_interface_names_array = network_interface_names, + .num_network_interface_names = num_network_interface_names, }; s3_client->native = aws_s3_client_new(allocator, &s3_config); if (s3_client->native == NULL) { PyErr_SetAwsLastError(); - goto error; + goto cleanup; } - aws_credentials_release(anonymous_credentials); - return capsule; + success = true; -error: +cleanup: aws_credentials_release(anonymous_credentials); - Py_DECREF(capsule); - return NULL; + aws_mem_release(allocator, network_interface_names); + if (!success) { + Py_XDECREF(capsule); + return NULL; + } + return capsule; } diff --git a/source/s3_meta_request.c b/source/s3_meta_request.c index 7586537e5..56ac5654f 100644 --- a/source/s3_meta_request.c +++ b/source/s3_meta_request.c @@ -24,12 +24,6 @@ struct s3_meta_request_binding { /* Reference to python object that reference to other related python object to keep it alive */ PyObject *py_core; - /** - * file path if set, it handles file operation from C land to reduce the cost - * passing chunks from C into python. One for recv/writing, the other for send/reading - **/ - FILE *recv_file; - /* Batch up the transferred size in one sec. */ uint64_t size_transferred; /* The time stamp when the progress reported */ @@ -42,9 +36,6 @@ struct aws_s3_meta_request *aws_py_get_s3_meta_request(PyObject *meta_request) { } static void s_destroy(struct s3_meta_request_binding *meta_request) { - if (meta_request->recv_file) { - fclose(meta_request->recv_file); - } Py_XDECREF(meta_request->py_core); aws_mem_release(aws_py_get_allocator(), meta_request); } @@ -148,22 +139,6 @@ static int s_s3_request_on_body( void *user_data) { (void)meta_request; struct s3_meta_request_binding *request_binding = user_data; - - if (request_binding->recv_file) { - /* The callback will be invoked with the right order, so we don't need to seek first. */ - if (fwrite((void *)body->ptr, body->len, 1, request_binding->recv_file) < 1) { - int errno_value = ferror(request_binding->recv_file) ? errno : 0; /* Always cache errno */ - aws_translate_and_raise_io_error_or(errno_value, AWS_ERROR_FILE_WRITE_FAILURE); - AWS_LOGF_ERROR( - AWS_LS_S3_META_REQUEST, - "id=%p Failed writing to file. errno:%d. aws-error:%s", - (void *)meta_request, - errno_value, - aws_error_name(aws_last_error())); - return AWS_OP_ERR; - } - return AWS_OP_SUCCESS; - } bool error = true; /*************** GIL ACQUIRE ***************/ PyGILState_STATE state; @@ -201,25 +176,6 @@ static void s_s3_request_on_finish( struct s3_meta_request_binding *request_binding = user_data; int error_code = meta_request_result->error_code; - - if (request_binding->recv_file) { - if (fclose(request_binding->recv_file) != 0) { - /* Failed to close file, so we can't guarantee it flushed to disk. - * If the meta-request's error_code was 0, change it to failure */ - if (error_code == 0) { - int errno_value = errno; /* Always cache errno before potential side-effect */ - aws_translate_and_raise_io_error_or(errno_value, AWS_ERROR_FILE_WRITE_FAILURE); - error_code = aws_last_error(); - AWS_LOGF_ERROR( - AWS_LS_S3_META_REQUEST, - "id=%p Failed closing file. errno:%d. aws-error:%s", - (void *)meta_request, - errno_value, - aws_error_name(error_code)); - } - } - request_binding->recv_file = NULL; - } /*************** GIL ACQUIRE ***************/ PyGILState_STATE state; if (aws_py_gilstate_ensure(&state)) { @@ -455,15 +411,6 @@ PyObject *aws_py_s3_client_make_meta_request(PyObject *self, PyObject *args) { meta_request->py_core = py_core; Py_INCREF(meta_request->py_core); - if (recv_filepath) { - meta_request->recv_file = aws_fopen(recv_filepath, "wb"); - if (!meta_request->recv_file) { - aws_translate_and_raise_io_error(errno); - PyErr_SetAwsLastError(); - goto error; - } - } - struct aws_s3_meta_request_options s3_meta_request_opt = { .type = type, .operation_name = aws_byte_cursor_from_c_str(operation_name), @@ -471,6 +418,7 @@ PyObject *aws_py_s3_client_make_meta_request(PyObject *self, PyObject *args) { .signing_config = signing_config, .checksum_config = &checksum_config, .send_filepath = aws_byte_cursor_from_c_str(send_filepath), + .recv_filepath = aws_byte_cursor_from_c_str(recv_filepath), .headers_callback = s_s3_request_on_headers, .body_callback = s_s3_request_on_body, .finish_callback = s_s3_request_on_finish, diff --git a/test/test_checksums.py b/test/test_checksums.py index 0f4e982d7..5d46581b8 100644 --- a/test/test_checksums.py +++ b/test/test_checksums.py @@ -95,6 +95,48 @@ def test_crc32c_huge_buffer(self): val = checksums.crc32c(huge_buffer) self.assertEqual(0x572a7c8a, val) + def test_crc64nvme_zeros_one_shot(self): + output = checksums.crc64nvme(bytes(32)) + expected = 0xcf3473434d4ecf3b + self.assertEqual(expected, output) + + def test_crc64nvme_zeros_iterated(self): + output = 0 + for i in range(32): + output = checksums.crc64nvme(bytes(1), output) + expected = 0xcf3473434d4ecf3b + self.assertEqual(expected, output) + + def test_crc64nvme_values_one_shot(self): + output = checksums.crc64nvme(''.join(chr(i) for i in range(32))) + expected = 0xb9d9d4a8492cbd7f + self.assertEqual(expected, output) + + def test_crc64nvme_values_iterated(self): + output = 0 + for i in range(32): + output = checksums.crc64nvme(chr(i), output) + expected = 0xb9d9d4a8492cbd7f + self.assertEqual(expected, output) + + def test_crc64nvme_large_buffer(self): + # stress test gil optimization for 32 bit architecture which cannot handle huge buffer + large_buffer = bytes(25 * 2**20) + val = checksums.crc64nvme(large_buffer) + self.assertEqual(0x5b6f5045463ca45e, val) + + def test_crc64nvme_huge_buffer(self): + if sys.platform.startswith('freebsd'): + # Skip this test for freebsd, as it simply crashes instead of raising exception in this case + raise unittest.SkipTest('Skip this test for freebsd') + try: + INT_MAX = 2**32 - 1 + huge_buffer = bytes(INT_MAX + 5) + except BaseException: + raise unittest.SkipTest('Machine cant allocate giant buffer for giant buffer test') + val = checksums.crc64nvme(huge_buffer) + self.assertEqual(0x2645c28052b1fbb0, val) + if __name__ == '__main__': unittest.main() diff --git a/test/test_crypto.py b/test/test_crypto.py index 7c74a6335..008b1645b 100644 --- a/test/test_crypto.py +++ b/test/test_crypto.py @@ -4,6 +4,7 @@ from test import NativeResourceTest from awscrt.crypto import Hash, RSA, RSAEncryptionAlgorithm, RSASignatureAlgorithm +import base64 import unittest RSA_PRIVATE_KEY_PEM = """ @@ -47,6 +48,41 @@ -----END RSA PUBLIC KEY----- """ +RSA_PUBLIC_KEY_DER_BASE64 = ( + 'MIIBCgKCAQEAxaEsLWE2t3kJqsF1sFHYk7rSCGfGTSDa+3r5typT0cb/TtJ989C8' + 'dLcfInx4Dxq0ewo6NOxQ/TD8JevUda86jSh1UKEQUOl7qy+QwOhFMpwHq/uOgMy5' + 'khDDLlkxD5U32RrDfqLK+4WUDapHlQ6g+E6wS1j1yDRoTZJk3WnTpR0sJHsttLWV' + '+mb2wPC7TkhGMbFMzbt6v0ahF7abVOOGiHVZ77uhS66hgP9nfgMHug8EN/xmVc/T' + 'xgMJci1Irh66xVZQ9aT2OZwb0TXglULm+b8HM+GKHgoTMwr9gAGpFDoYi22PvxC/' + 'cqKHKIaYw7KNOPwImzQ6cp5oQJTAPQKRUwIDAQAB') + +RSA_PRIVATE_KEY_DER_BASE64 = ( + 'MIIEowIBAAKCAQEAxaEsLWE2t3kJqsF1sFHYk7rSCGfGTSDa+3r5typT0cb/TtJ9' + '89C8dLcfInx4Dxq0ewo6NOxQ/TD8JevUda86jSh1UKEQUOl7qy+QwOhFMpwHq/uO' + 'gMy5khDDLlkxD5U32RrDfqLK+4WUDapHlQ6g+E6wS1j1yDRoTZJk3WnTpR0sJHst' + 'tLWV+mb2wPC7TkhGMbFMzbt6v0ahF7abVOOGiHVZ77uhS66hgP9nfgMHug8EN/xm' + 'Vc/TxgMJci1Irh66xVZQ9aT2OZwb0TXglULm+b8HM+GKHgoTMwr9gAGpFDoYi22P' + 'vxC/cqKHKIaYw7KNOPwImzQ6cp5oQJTAPQKRUwIDAQABAoIBACcuUfTZPiDX1UvO' + 'OQfw4hA/zJ4v/MeTyPZspg9jS+TeIAW/g4sQChzVpU2QAbl04O031NxjMZdQ29yk' + 'yaVfTStpJwEKPZLdB1CkCH3GTtm+x2KYZ+MvM2c6/Yc11Z0yRzU6siFsIvQEwpqG' + '9NQfZ1hzOU5m36uGgFtIt8iRz4z/RxpZUOXpaEosb0uMK3VPBuZBu8uVQBFdyAA7' + 'xAGtJphxQ5u0Ct9aidPjD7MhCVzcb2XbgCgxb2hbCmDMOgeNVYrTo2fdBzNxLcXv' + 'j4sUNmO+mLbUMFOePuP8JZaGNTTmznZkavskozfdbubuS3/4/0HH1goytFheVt1B' + 'vfxzpgkCgYEA9QgEMKny0knDHV7BC2uAd7Vvd+5iikA3WdJ9i11zas9AbMMmf9cX' + 'E3xNt6DO42hnVCNN4uAWH5uGWltWZ8pmGKk6mesqZfYPsyTz1cK6fP6KyQrkWRNT' + 'V3nRMEMbziAWxFD5hxP9p1KlqI2Py+W4fJ0LGZ4Mwvn3dKYOilxK+50CgYEAznny' + 'ZxQiJGt8/FtH9f/GDIY24Cz53Cuj+BWG2EH4kLo24ET2QTVvohFJVCm3Hf8Qe4cA' + 'ASabRUg1vS4Tr2FmIqD2Iw/ogSmDcJdYuwhdtWKa8fDbehCN5hmXjn2WKYvjvZNv' + 'Gcx6gfqULD9SaQv+N7lL8eJxKiLLBeVYD7qoha8CgYA8udnf/Z5yQ1mZw8vv+pqC' + 'EHMps+iz/qo5FpOKoIRkKiz7R3oZIMNVTu8r3Syo600Aayd4XLTe7HplllFZs62N' + '2xLs5n1Be7P0X+oWRgZVx/e5T3u8H6/98/DGFzui4A0EZlURBwFMII1xsnO6wpnw' + 'ODNyC9t5zt1nCWh9HdZveQKBgAm4+E8eRZVNcm83pSXSS3Mfhsn7lDBn5aqy6Mya' + 'HqhB/H+G/8mGSKFrCvbpl/PTpOUMMFXdiYYzpkQoPUkO3w5WYgC4qQwb9lKA7e6w' + 'sCjwYbduzgbrbKMfJWHSTBXcvnaY0Kx4UnR4Zi3HNYw4wlnBYfAb55RCWykF6aWj' + '9neFAoGBAMqQA2YWCHhnRtjn4iGMrTk8iOHBd8AGBBzX9rPKXDqWlOr/iQq90qX0' + '59309stR/bAhMzxOx31777XEPO1md854iXXr0XDMQlwCYkWyWb6hp4JlsqFBPMjn' + 'nGXWA0Gp6UWgpg4Hvjdsu+0FQ3AhDMBKZZ8fBFb4EW+HRQIHPnbH') + class TestCredentials(NativeResourceTest): @@ -134,21 +170,68 @@ def test_rsa_encryption_roundtrip(self): pt_pub = rsa.decrypt(p, ct_pub) self.assertEqual(test_pt, pt_pub) - def test_rsa_signing_roundtrip(self): - h = Hash.sha256_new() - h.update(b'totally original test string') - digest = h.digest() + def test_rsa_encryption_roundtrip_der(self): + param_list = [RSAEncryptionAlgorithm.PKCS1_5, + RSAEncryptionAlgorithm.OAEP_SHA256, + RSAEncryptionAlgorithm.OAEP_SHA512] + for p in param_list: + with self.subTest(msg="RSA Encryption Roundtrip using algo p", p=p): + test_pt = b'totally original test string' + private_key_der_bytes = base64.b64decode(RSA_PRIVATE_KEY_DER_BASE64) + rsa = RSA.new_private_key_from_der_data(private_key_der_bytes) + ct = rsa.encrypt(p, test_pt) + pt = rsa.decrypt(p, ct) + self.assertEqual(test_pt, pt) + + public_key_der_bytes = base64.b64decode(RSA_PUBLIC_KEY_DER_BASE64) + rsa_pub = RSA.new_public_key_from_der_data(public_key_der_bytes) + ct_pub = rsa_pub.encrypt(p, test_pt) + pt_pub = rsa.decrypt(p, ct_pub) + self.assertEqual(test_pt, pt_pub) + + def test_rsa_signing_roundtrip(self): param_list = [RSASignatureAlgorithm.PKCS1_5_SHA256, - RSASignatureAlgorithm.PSS_SHA256] + RSASignatureAlgorithm.PSS_SHA256, + RSASignatureAlgorithm.PKCS1_5_SHA1] for p in param_list: with self.subTest(msg="RSA Signing Roundtrip using algo p", p=p): + if (p == RSASignatureAlgorithm.PKCS1_5_SHA1): + h = Hash.sha1_new() + else: + h = Hash.sha256_new() + h.update(b'totally original test string') + digest = h.digest() + rsa = RSA.new_private_key_from_pem_data(RSA_PRIVATE_KEY_PEM) signature = rsa.sign(p, digest) self.assertTrue(rsa.verify(p, digest, signature)) - rsa_pub = RSA.new_private_key_from_pem_data(RSA_PRIVATE_KEY_PEM) + rsa_pub = RSA.new_public_key_from_pem_data(RSA_PUBLIC_KEY_PEM) + self.assertTrue(rsa_pub.verify(p, digest, signature)) + + def test_rsa_signing_roundtrip_der(self): + param_list = [RSASignatureAlgorithm.PKCS1_5_SHA256, + RSASignatureAlgorithm.PSS_SHA256, + RSASignatureAlgorithm.PKCS1_5_SHA1] + + for p in param_list: + with self.subTest(msg="RSA Signing Roundtrip using algo p", p=p): + if (p == RSASignatureAlgorithm.PKCS1_5_SHA1): + h = Hash.sha1_new() + else: + h = Hash.sha256_new() + h.update(b'totally original test string') + digest = h.digest() + + private_key_der_bytes = base64.b64decode(RSA_PRIVATE_KEY_DER_BASE64) + rsa = RSA.new_private_key_from_der_data(private_key_der_bytes) + signature = rsa.sign(p, digest) + self.assertTrue(rsa.verify(p, digest, signature)) + + public_key_der_bytes = base64.b64decode(RSA_PUBLIC_KEY_DER_BASE64) + rsa_pub = RSA.new_public_key_from_der_data(public_key_der_bytes) self.assertTrue(rsa_pub.verify(p, digest, signature)) def test_rsa_load_error(self): diff --git a/test/test_mqtt.py b/test/test_mqtt.py index c76bad7fa..547e1e65c 100644 --- a/test/test_mqtt.py +++ b/test/test_mqtt.py @@ -208,7 +208,16 @@ def on_message(**kwargs): ping_timeout_ms=10000, keep_alive_secs=30 ) - disconnecter.connect().result(TIMEOUT) + + # A race condition exists in IoT Core where the interrupter may get refused rather than the existing + # connection getting dropped. Loop until we successfully connect. + continue_connecting = True + while continue_connecting: + try: + disconnecter.connect().result(TIMEOUT) + continue_connecting = False + except BaseException: + pass # Receive message rcv = received.result(TIMEOUT) diff --git a/test/test_mqtt5.py b/test/test_mqtt5.py index 143fd04cf..2e1399307 100644 --- a/test/test_mqtt5.py +++ b/test/test_mqtt5.py @@ -1009,8 +1009,6 @@ def test_operation_sub_unsub(self): client.stop() callbacks.future_stopped.result(TIMEOUT) - sub1_callbacks = False - sub2_callbacks = False total_callbacks = 0 all_packets_received = Future() mutex = Lock() @@ -1020,7 +1018,6 @@ def subscriber1_callback(self, publish_received_data: mqtt5.PublishReceivedData) self.mutex.acquire() var = publish_received_data.publish_packet.payload self.received_subscriptions[int(var)] = 1 - self.sub1_callbacks = True self.total_callbacks = self.total_callbacks + 1 if self.total_callbacks == 10: self.all_packets_received.set_result(None) @@ -1030,7 +1027,6 @@ def subscriber2_callback(self, publish_received_data: mqtt5.PublishReceivedData) self.mutex.acquire() var = publish_received_data.publish_packet.payload self.received_subscriptions[int(var)] = 1 - self.sub2_callbacks = True self.total_callbacks = self.total_callbacks + 1 if self.total_callbacks == 10: self.all_packets_received.set_result(None) @@ -1154,8 +1150,6 @@ def test_operation_shared_subscription(self): unsuback_packet = unsubscribe_future.result(TIMEOUT) self.assertIsInstance(unsuback_packet, mqtt5.UnsubackPacket) - self.assertEqual(self.sub1_callbacks, True) - self.assertEqual(self.sub2_callbacks, True) self.assertEqual(self.total_callbacks, 10) for e in self.received_subscriptions: @@ -1221,6 +1215,9 @@ def test_operation_will(self): suback_packet = subscribe_future.result(TIMEOUT) self.assertIsInstance(suback_packet, mqtt5.SubackPacket) + # wait a few seconds to minimize chance of eventual consistency race condition between subscribe and publish + time.sleep(2) + disconnect_packet = mqtt5.DisconnectPacket(reason_code=mqtt5.DisconnectReasonCode.DISCONNECT_WITH_WILL_MESSAGE) client1.stop(disconnect_packet=disconnect_packet) callbacks1.future_stopped.result(TIMEOUT) diff --git a/test/test_mqtt5_credentials.py b/test/test_mqtt5_credentials.py index 03e5c401e..8a5d24b09 100644 --- a/test/test_mqtt5_credentials.py +++ b/test/test_mqtt5_credentials.py @@ -197,40 +197,6 @@ def test_mqtt5_ws_cred_static(self): input_role_secret_access_key, input_role_session_token ) - credentials = auth.AwsCredentialsProvider.new_default_chain() - - def sign_function(transform_args, **kwargs): - signing_config = auth.AwsSigningConfig( - algorithm=auth.AwsSigningAlgorithm.V4, - signature_type=auth.AwsSignatureType.HTTP_REQUEST_QUERY_PARAMS, - credentials_provider=credentials, - region=input_region, - service="iotdevicegateway", - omit_session_token=True - ) - signing_future = auth.aws_sign_request( - http_request=transform_args.http_request, - signing_config=signing_config) - signing_future.add_done_callback(lambda x: transform_args.set_done(x.exception())) - client_options.websocket_handshake_transform = sign_function - client_options.tls_ctx = io.ClientTlsContext(io.TlsContextOptions()) - - callbacks = Mqtt5TestCallbacks() - client = self._create_client(client_options=client_options, callbacks=callbacks) - client.start() - callbacks.future_connection_success.result(TIMEOUT) - client.stop() - callbacks.future_stopped.result(TIMEOUT) - - def test_mqtt5_ws_cred_default(self): - input_host_name = _get_env_variable("AWS_TEST_MQTT5_IOT_CORE_HOST") - input_region = _get_env_variable("AWS_TEST_MQTT5_IOT_CORE_REGION") - - client_options = mqtt5.ClientOptions( - host_name=input_host_name, - port=443 - ) - credentials = auth.AwsCredentialsProvider.new_default_chain() def sign_function(transform_args, **kwargs): signing_config = auth.AwsSigningConfig( @@ -380,6 +346,12 @@ def sign_function(transform_args, **kwargs): callbacks.future_stopped.result(TIMEOUT) def test_mqtt5_ws_cred_environment(self): + self._test_mqtt5_ws_cred_environment(use_default_chain=False) + + def test_mqtt5_ws_cred_default_chain(self): + self._test_mqtt5_ws_cred_environment(use_default_chain=True) + + def _test_mqtt5_ws_cred_environment(self, use_default_chain): input_host_name = _get_env_variable("AWS_TEST_MQTT5_IOT_CORE_HOST") input_access_key = _get_env_variable("AWS_TEST_MQTT5_ROLE_CREDENTIAL_ACCESS_KEY") input_secret_access_key = _get_env_variable("AWS_TEST_MQTT5_ROLE_CREDENTIAL_SECRET_ACCESS_KEY") @@ -399,7 +371,10 @@ def test_mqtt5_ws_cred_environment(self): os.environ["AWS_SECRET_ACCESS_KEY"] = input_secret_access_key os.environ["AWS_SESSION_TOKEN"] = input_session_token # This should load the environment variables we just set - credentials = auth.AwsCredentialsProvider.new_environment() + if use_default_chain: + credentials = auth.AwsCredentialsProvider.new_default_chain() + else: + credentials = auth.AwsCredentialsProvider.new_environment() def sign_function(transform_args, **kwargs): signing_config = auth.AwsSigningConfig( diff --git a/test/test_mqtt_credentials.py b/test/test_mqtt_credentials.py index 15335e3e2..e1bd2c68d 100644 --- a/test/test_mqtt_credentials.py +++ b/test/test_mqtt_credentials.py @@ -136,40 +136,6 @@ def sign_function(transform_args, **kwargs): connection.connect().result(TIMEOUT) connection.disconnect().result(TIMEOUT) - def test_mqtt311_ws_cred_default(self): - input_host_name = _get_env_variable("AWS_TEST_MQTT311_IOT_CORE_HOST") - input_region = _get_env_variable("AWS_TEST_MQTT311_IOT_CORE_REGION") - - credentials = auth.AwsCredentialsProvider.new_default_chain() - - def sign_function(transform_args, **kwargs): - signing_config = auth.AwsSigningConfig( - algorithm=auth.AwsSigningAlgorithm.V4, - signature_type=auth.AwsSignatureType.HTTP_REQUEST_QUERY_PARAMS, - credentials_provider=credentials, - region=input_region, - service="iotdevicegateway", - omit_session_token=True - ) - signing_future = auth.aws_sign_request( - http_request=transform_args.http_request, - signing_config=signing_config) - signing_future.add_done_callback(lambda x: transform_args.set_done(x.exception())) - - elg = EventLoopGroup() - resolver = DefaultHostResolver(elg) - bootstrap = ClientBootstrap(elg, resolver) - client = Client(bootstrap, ClientTlsContext(TlsContextOptions())) - connection = Connection( - client=client, - client_id=create_client_id(), - host_name=input_host_name, - port=int(443), - use_websockets=True, - websocket_handshake_transform=sign_function) - connection.connect().result(TIMEOUT) - connection.disconnect().result(TIMEOUT) - def test_mqtt311_ws_cred_cognito(self): input_cognito_endpoint = _get_env_variable("AWS_TEST_MQTT311_COGNITO_ENDPOINT") input_cognito_identity = _get_env_variable("AWS_TEST_MQTT311_COGNITO_IDENTITY") @@ -300,6 +266,12 @@ def sign_function(transform_args, **kwargs): connection.disconnect().result(TIMEOUT) def test_mqtt311_ws_cred_environment(self): + self._test_mqtt311_ws_cred_environment(use_default_chain=False) + + def test_mqtt311_ws_cred_default(self): + self._test_mqtt311_ws_cred_environment(use_default_chain=True) + + def _test_mqtt311_ws_cred_environment(self, use_default_chain): input_access_key = _get_env_variable("AWS_TEST_MQTT311_ROLE_CREDENTIAL_ACCESS_KEY") input_secret_access_key = _get_env_variable("AWS_TEST_MQTT311_ROLE_CREDENTIAL_SECRET_ACCESS_KEY") input_session_token = _get_env_variable("AWS_TEST_MQTT311_ROLE_CREDENTIAL_SESSION_TOKEN") @@ -314,8 +286,11 @@ def test_mqtt311_ws_cred_environment(self): os.environ["AWS_ACCESS_KEY_ID"] = input_access_key os.environ["AWS_SECRET_ACCESS_KEY"] = input_secret_access_key os.environ["AWS_SESSION_TOKEN"] = input_session_token - # This should load the environment variables we just set - credentials = auth.AwsCredentialsProvider.new_environment() + if use_default_chain: + credentials = auth.AwsCredentialsProvider.new_default_chain() + else: + # This should load the environment variables we just set + credentials = auth.AwsCredentialsProvider.new_environment() signing_config = auth.AwsSigningConfig( algorithm=auth.AwsSigningAlgorithm.V4, signature_type=auth.AwsSignatureType.HTTP_REQUEST_QUERY_PARAMS, diff --git a/test/test_s3.py b/test/test_s3.py index 9d3168725..8da9b11aa 100644 --- a/test/test_s3.py +++ b/test/test_s3.py @@ -14,9 +14,7 @@ from multiprocessing import Process from awscrt.http import HttpHeaders, HttpRequest -from awscrt.s3 import S3Client, S3RequestType, create_default_s3_signing_config -from awscrt.io import ClientBootstrap, ClientTlsContext, DefaultHostResolver, EventLoopGroup, TlsConnectionOptions, TlsContextOptions -from awscrt.auth import AwsCredentials, AwsCredentialsProvider, AwsSignatureType, AwsSignedBodyHeaderType, AwsSignedBodyValue, AwsSigningAlgorithm, AwsSigningConfig +from awscrt.auth import AwsCredentials from awscrt.s3 import ( S3ChecksumAlgorithm, S3ChecksumConfig, @@ -158,7 +156,8 @@ def s3_client_new( part_size=0, is_cancel_test=False, enable_s3express=False, - mem_limit=None): + mem_limit=None, + network_interface_names=None): if is_cancel_test: # for cancellation tests, make things slow, so it's less likely that @@ -189,7 +188,8 @@ def s3_client_new( part_size=part_size, throughput_target_gbps=throughput_target_gbps, enable_s3express=enable_s3express, - memory_limit=mem_limit) + memory_limit=mem_limit, + network_interface_names=network_interface_names) return s3_client @@ -221,6 +221,11 @@ def test_sanity_secure(self): s3_client = s3_client_new(True, self.region) self.assertIsNotNone(s3_client) + def test_sanity_network_interface_names(self): + # This is just a sanity test to ensure that we are passing the parameter correctly. + with self.assertRaises(Exception): + s3_client_new(True, self.region, network_interface_names=("eth0", "invalid-network-interface")) + def test_wait_shutdown(self): s3_client = s3_client_new(False, self.region) self.assertIsNotNone(s3_client) @@ -248,6 +253,7 @@ def setUp(self): self.num_threads = 0 self.special_path = "put_object_test_10MB@$%.txt" self.non_ascii_file_name = "ÉxÅmple.txt" + self.part_size = 5 * MB self.response_headers = None self.response_status_code = None @@ -358,13 +364,12 @@ def _test_s3_put_get_object( request_type, exception_name=None, enable_s3express=False, - region="us-west-2", mem_limit=None, **kwargs): s3_client = s3_client_new( False, - region, - 5 * MB, + self.region, + self.part_size, enable_s3express=enable_s3express, mem_limit=mem_limit) signing_config = None @@ -387,8 +392,14 @@ def _test_s3_put_get_object( self.assertTrue(shutdown_event.wait(self.timeout)) if exception_name is None: - finished_future.result() - self._validate_successful_response(request_type is S3RequestType.PUT_OBJECT) + try: + finished_future.result() + self._validate_successful_response(request_type is S3RequestType.PUT_OBJECT) + except S3ResponseError as e: + print(e.status_code) + print(e.headers) + print(e.body) + raise e else: e = finished_future.exception() self.assertEqual(e.name, exception_name) @@ -431,14 +442,16 @@ def test_put_object_unknown_content_length_single_part(self): put_body_stream.close() def test_get_object_s3express(self): + self.region = "us-east-1" request = self._get_object_request("/crt-download-10MB", enable_s3express=True) - self._test_s3_put_get_object(request, S3RequestType.GET_OBJECT, enable_s3express=True, region="us-east-1") + self._test_s3_put_get_object(request, S3RequestType.GET_OBJECT, enable_s3express=True) def test_put_object_s3express(self): + self.region = "us-east-1" put_body_stream = open(self.temp_put_obj_file_path, "rb") content_length = os.stat(self.temp_put_obj_file_path).st_size request = self._put_object_request(put_body_stream, content_length, enable_s3express=True) - self._test_s3_put_get_object(request, S3RequestType.PUT_OBJECT, enable_s3express=True, region="us-east-1") + self._test_s3_put_get_object(request, S3RequestType.PUT_OBJECT, enable_s3express=True) put_body_stream.close() def test_put_object_multiple_times(self): @@ -581,28 +594,45 @@ def on_done_remove_file(**kwargs): "the transferred length reported does not match body we sent") self._validate_successful_response(request_type is S3RequestType.PUT_OBJECT) - def test_put_get_with_checksum(self): - put_body = b'hello world' - put_body_stream = BytesIO(put_body) - content_length = len(put_body) - path = '/hello-world.txt' + def _round_trip_with_checksums_helper( + self, + algo=S3ChecksumAlgorithm.CRC32, + mpu=True, + provide_full_object_checksum=False): + if not mpu: + # increase the part size for the client to use single part upload + self.part_size = 20 * MB - # calculate expected CRC32 header value: - # a string containing the url-safe-base64-encoding of a big-endian-32-bit-CRC - crc32_int = zlib.crc32(put_body) - crc32_big_endian = crc32_int.to_bytes(4, 'big') - crc32_base64_bytes = base64.urlsafe_b64encode(crc32_big_endian) - crc32_base64_str = crc32_base64_bytes.decode() + put_body_stream = open(self.temp_put_obj_file_path, "rb") + content_length = os.stat(self.temp_put_obj_file_path).st_size + # construct different path to prevent race condition between tests + path = '/hello-world-' + algo.name + if mpu: + path += "-mpu" + if provide_full_object_checksum: + path += "-full-object" + + if algo == S3ChecksumAlgorithm.CRC32: + checksum_header_name = 'x-amz-checksum-crc32' + checksum_str = 'a9ccsg==' + elif algo == S3ChecksumAlgorithm.CRC64NVME: + checksum_header_name = 'x-amz-checksum-crc64nvme' + checksum_str = 'tPMvgM0jSDQ=' + else: + raise Exception("Checksum algo not supported by test helper") # upload, with client adding checksum upload_request = self._put_object_request(put_body_stream, content_length, path=path) upload_checksum_config = S3ChecksumConfig( - algorithm=S3ChecksumAlgorithm.CRC32, + algorithm=algo, location=S3ChecksumLocation.TRAILER) + if provide_full_object_checksum: + upload_request.headers.add(checksum_header_name, checksum_str) + # checksum will be provided from the header, don't set the checksum configs + upload_checksum_config = None + self._test_s3_put_get_object(upload_request, S3RequestType.PUT_OBJECT, checksum_config=upload_checksum_config) - self.assertEqual(HttpHeaders(self.response_headers).get('x-amz-checksum-crc32'), - crc32_base64_str) # download, with client validating checksum download_request = self._get_object_request(path) @@ -610,9 +640,31 @@ def test_put_get_with_checksum(self): self._test_s3_put_get_object(download_request, S3RequestType.GET_OBJECT, checksum_config=download_checksum_config) self.assertTrue(self.done_did_validate_checksum) - self.assertEqual(self.done_checksum_validation_algorithm, S3ChecksumAlgorithm.CRC32) - self.assertEqual(HttpHeaders(self.response_headers).get('x-amz-checksum-crc32'), - crc32_base64_str) + self.assertEqual(self.done_checksum_validation_algorithm, algo) + self.assertEqual(HttpHeaders(self.response_headers).get(checksum_header_name), + checksum_str) + put_body_stream.close() + + def test_round_trip_with_trailing_checksum(self): + self._round_trip_with_checksums_helper(S3ChecksumAlgorithm.CRC32, mpu=False) + + def test_round_trip_with_full_object_checksum_mpu(self): + self._round_trip_with_checksums_helper( + S3ChecksumAlgorithm.CRC64NVME, + mpu=True, + provide_full_object_checksum=True) + + def test_round_trip_with_full_object_checksum_single_part(self): + self._round_trip_with_checksums_helper( + S3ChecksumAlgorithm.CRC64NVME, + mpu=False, + provide_full_object_checksum=True) + + def test_round_trip_with_full_object_checksum_mpu_crc32(self): + self._round_trip_with_checksums_helper(S3ChecksumAlgorithm.CRC32, mpu=True, provide_full_object_checksum=True) + + def test_round_trip_with_full_object_checksum_single_part_crc32(self): + self._round_trip_with_checksums_helper(S3ChecksumAlgorithm.CRC32, mpu=False, provide_full_object_checksum=True) def _on_progress_cancel_after_first_chunk(self, progress): self.transferred_len += progress