diff --git a/.github/workflows/ci_examples_python.yml b/.github/workflows/ci_examples_python.yml new file mode 100644 index 000000000..83a79ff3a --- /dev/null +++ b/.github/workflows/ci_examples_python.yml @@ -0,0 +1,98 @@ +# This workflow performs tests in Python. +name: Python Examples + +on: + workflow_call: + inputs: + dafny: + description: "The Dafny version to run" + required: true + type: string + regenerate-code: + description: "Regenerate code using smithy-dafny" + required: false + default: false + type: boolean + mpl-version: + description: "MPL version to use" + required: false + type: string + mpl-head: + description: "Running on MPL HEAD" + required: false + default: false + type: boolean + +jobs: + testPython: + strategy: + max-parallel: 1 + matrix: + python-version: [3.11, 3.12, 3.13] + os: [macos-13] + runs-on: ${{ matrix.os }} + permissions: + id-token: write + contents: read + steps: + - name: Configure AWS Credentials + uses: aws-actions/configure-aws-credentials@v4 + with: + aws-region: us-west-2 + role-to-assume: arn:aws:iam::370957321024:role/GitHub-CI-DDBEC-Dafny-Role-us-west-2 + role-session-name: DDBEC-Dafny-Python-Tests + + - uses: actions/checkout@v3 + with: + submodules: recursive + + - name: Setup Python ${{ matrix.python-version }} + uses: actions/setup-python@v4 + with: + python-version: ${{ matrix.python-version }} + + - name: Setup Python ${{ matrix.python-version }} for running tests + run: | + python -m pip install --upgrade pip + pip install --upgrade tox + pip install poetry + + - name: Setup Dafny + uses: ./submodules/MaterialProviders/.github/actions/setup_dafny/ + with: + dafny-version: ${{ inputs.dafny }} + + - name: Update MPL submodule if using MPL HEAD + if: ${{ inputs.mpl-head == true }} + working-directory: submodules/MaterialProviders + run: | + git checkout main + git pull + git submodule update --init --recursive + git rev-parse HEAD + + - name: Install Smithy-Dafny codegen dependencies + uses: ./.github/actions/install_smithy_dafny_codegen_dependencies + + - name: Regenerate code using smithy-dafny if necessary + if: ${{ inputs.regenerate-code }} + uses: ./.github/actions/polymorph_codegen + with: + dafny: ${{ env.DAFNY_VERSION }} + library: DynamoDbEncryption + diff-generated-code: false + update-and-regenerate-mpl: true + + - name: Build and locally deploy dependencies for examples + shell: bash + working-directory: ./DynamoDbEncryption + run: | + make transpile_python + + - name: Test DynamoDbEncryption Examples + working-directory: ./Examples/runtimes/python + run: | + # Run simple examples + tox -e dynamodbencryption + # Run legacy migration examples + tox -e legacymigration diff --git a/.github/workflows/ci_static_analysis_python.yml b/.github/workflows/ci_static_analysis_python.yml new file mode 100644 index 000000000..2559227a6 --- /dev/null +++ b/.github/workflows/ci_static_analysis_python.yml @@ -0,0 +1,79 @@ +# This workflow performs static analysis in Python. +name: Python Static Analysis + +on: + workflow_call: + inputs: + regenerate-code: + description: "Regenerate code using smithy-dafny" + required: false + default: false + type: boolean + mpl-version: + description: "MPL version to use" + required: false + type: string + mpl-head: + description: "Running on MPL HEAD" + required: false + default: false + type: boolean + +jobs: + testPython: + strategy: + matrix: + python-version: [3.11] + os: [ubuntu-latest] + runs-on: ${{ matrix.os }} + permissions: + id-token: write + contents: read + steps: + - name: Configure AWS Credentials + uses: aws-actions/configure-aws-credentials@v4 + with: + aws-region: us-west-2 + role-to-assume: arn:aws:iam::370957321024:role/GitHub-CI-DDBEC-Dafny-Role-us-west-2 + role-session-name: DDBEC-Dafny-Python-Tests + + - uses: actions/checkout@v3 + with: + submodules: recursive + + - name: Setup Python ${{ matrix.python-version }} + uses: actions/setup-python@v4 + with: + python-version: ${{ matrix.python-version }} + + - name: Setup Python ${{ matrix.python-version }} for running tests + run: | + python -m pip install --upgrade pip + pip install --upgrade tox + pip install poetry + + - name: Update MPL submodule if using MPL HEAD + if: ${{ inputs.mpl-head == true }} + working-directory: submodules/MaterialProviders + run: | + git checkout main + git pull + git submodule update --init --recursive + git rev-parse HEAD + + - name: Install Smithy-Dafny codegen dependencies + uses: ./.github/actions/install_smithy_dafny_codegen_dependencies + + - name: Regenerate code using smithy-dafny if necessary + if: ${{ inputs.regenerate-code }} + uses: ./.github/actions/polymorph_codegen + with: + dafny: ${{ env.DAFNY_VERSION }} + library: DynamoDbEncryption + diff-generated-code: false + update-and-regenerate-mpl: true + + - name: Run static analysis + working-directory: ./DynamoDbEncryption/runtimes/python + run: | + tox -e lint-check diff --git a/.github/workflows/ci_test_python.yml b/.github/workflows/ci_test_python.yml new file mode 100644 index 000000000..3d43479e6 --- /dev/null +++ b/.github/workflows/ci_test_python.yml @@ -0,0 +1,132 @@ +# This workflow runs only Dafny-transpiled Python tests. +name: test python + +on: + workflow_call: + inputs: + dafny: + description: "The Dafny version to run" + required: true + type: string + regenerate-code: + description: "Regenerate code using smithy-dafny" + required: false + default: false + type: boolean + mpl-head: + description: "Running on MPL HEAD" + required: false + default: false + type: boolean + +jobs: + testPython: + strategy: + fail-fast: false + matrix: + library: [DynamoDbEncryption] + python-version: ["3.11", "3.12", "3.13"] + os: [ + macos-13, + ubuntu-22.04, + # Dafny-transpiled Python tests use a PYTHONPATH hack that doesn't work on Windows. + # Windows is tested with non-Dafny-transpiled Python tests. + # windows-latest + ] + runs-on: ${{ matrix.os }} + permissions: + id-token: write + contents: read + steps: + - name: Support longpaths on Git checkout + run: | + git config --global core.longpaths true + - uses: actions/checkout@v3 + with: + submodules: recursive + + - name: Setup Python + uses: actions/setup-python@v4 + with: + python-version: ${{ matrix.python-version }} + + - name: Setup Python ${{ matrix.python-version }} for running tests + run: | + python -m pip install --upgrade pip + pip install --upgrade tox + pip install poetry + + - name: Setup Dafny + uses: ./submodules/MaterialProviders/.github/actions/setup_dafny/ + with: + dafny-version: ${{ inputs.dafny }} + + - name: Update MPL submodule if using MPL HEAD + if: ${{ inputs.mpl-head == true }} + working-directory: submodules/MaterialProviders + run: | + git checkout main + git pull + git submodule update --init --recursive + git rev-parse HEAD + + - name: Install Smithy-Dafny codegen dependencies + uses: ./.github/actions/install_smithy_dafny_codegen_dependencies + + - name: Regenerate code using smithy-dafny if necessary + if: ${{ inputs.regenerate-code }} + uses: ./.github/actions/polymorph_codegen + with: + dafny: ${{ env.DAFNY_VERSION }} + library: ${{ matrix.library }} + diff-generated-code: false + update-and-regenerate-mpl: true + + - name: Download Dependencies + working-directory: ./${{ matrix.library }} + run: make setup_python + + - name: Configure AWS Credentials + uses: aws-actions/configure-aws-credentials@v4 + with: + aws-region: us-west-2 + role-to-assume: arn:aws:iam::370957321024:role/GitHub-CI-DDBEC-Dafny-Role-us-west-2 + role-session-name: DDBEC-Dafny-Net-Tests + + - name: Compile ${{ matrix.library }} implementation + shell: bash + working-directory: ./${{ matrix.library }} + run: | + # This works because `node` is installed by default on GHA runners + CORES=$(node -e 'console.log(os.cpus().length)') + make transpile_python CORES=$CORES + + - name: Test ${{ matrix.library }} Dafny-transpiled Python tests + # Dafny-transpiled Python tests use a PYTHONPATH hack that doesn't work on Windows. + # Windows is tested with non-Dafny-transpiled Python tests. + if: ${{ matrix.os != 'windows-latest' }} + working-directory: ./${{ matrix.library }}/runtimes/python + shell: bash + run: | + tox -e dafnytests + + - name: Test ${{ matrix.library }} Python unit tests + working-directory: ./${{ matrix.library }}/runtimes/python + shell: bash + run: | + tox -e unit + + - name: Test ${{ matrix.library }} Python integration tests + working-directory: ./${{ matrix.library }}/runtimes/python + shell: bash + run: | + tox -e integ + tox -e legacyinteg + + - name: Test ${{ matrix.library }} Python coverage + working-directory: ./${{ matrix.library }}/runtimes/python + shell: bash + run: | + tox -e encrypted-interface-coverage + tox -e client-to-resource-conversions-coverage + tox -e resource-to-client-conversions-coverage diff --git a/.github/workflows/ci_test_vector_python.yml b/.github/workflows/ci_test_vector_python.yml new file mode 100644 index 000000000..f525fdaec --- /dev/null +++ b/.github/workflows/ci_test_vector_python.yml @@ -0,0 +1,111 @@ +# This workflow performs test vectors in Python. +name: Library Python Test Vectors + +on: + workflow_call: + inputs: + dafny: + description: "The Dafny version to run" + required: true + type: string + regenerate-code: + description: "Regenerate code using smithy-dafny" + required: false + default: false + type: boolean + mpl-version: + description: "MPL version to use" + required: false + type: string + mpl-head: + description: "Running on MPL HEAD" + required: false + default: false + type: boolean + +jobs: + testPython: + strategy: + fail-fast: false + matrix: + library: [TestVectors] + python-version: [3.11, 3.12, 3.13] + # Only ubuntu for now; + # As of 4.10.1, Dafny's Python JSON processing is still very slow (1 hour to run test vectors on ubuntu) + # and Github's macOS runners are also very slow (~2x slower than ubuntu). + # If Dafny's JSON processing speed is improved, we can add macOS back. + os: [ubuntu-22.04] + interface: [client, resource, table] + runs-on: ${{ matrix.os }} + permissions: + id-token: write + contents: read + steps: + - name: Setup DynamoDB Local + uses: rrainn/dynamodb-action@v4.0.0 + with: + port: 8000 + cors: "*" + + - name: Configure AWS Credentials + uses: aws-actions/configure-aws-credentials@v4 + with: + aws-region: us-west-2 + role-to-assume: arn:aws:iam::370957321024:role/GitHub-CI-DDBEC-Dafny-Role-us-west-2 + role-session-name: DDBEC-Dafny-Python-Tests + role-duration-seconds: 7200 + + - uses: actions/checkout@v3 + with: + submodules: recursive + + - name: Setup Dafny + uses: ./submodules/MaterialProviders/.github/actions/setup_dafny/ + with: + dafny-version: ${{ inputs.dafny }} + + - name: Update MPL submodule if using MPL HEAD + if: ${{ inputs.mpl-head == true }} + working-directory: submodules/MaterialProviders + run: | + git checkout main + git pull + git submodule update --init --recursive + git rev-parse HEAD + + - name: Install Smithy-Dafny codegen dependencies + uses: ./.github/actions/install_smithy_dafny_codegen_dependencies + + - name: Regenerate code using smithy-dafny if necessary + if: ${{ inputs.regenerate-code }} + uses: ./.github/actions/polymorph_codegen + with: + dafny: ${{ env.DAFNY_VERSION }} + library: ${{ matrix.library }} + diff-generated-code: false + update-and-regenerate-mpl: true + + - name: Setup Python ${{ matrix.python-version }} + uses: actions/setup-python@v4 + with: + python-version: ${{ matrix.python-version }} + + - name: Setup Python ${{ matrix.python-version }} for running tests + run: | + python -m pip install --upgrade pip + pip install --upgrade tox + pip install poetry + + - name: Build Python TestVectors implementation + shell: bash + working-directory: ${{matrix.library}} + run: | + # This works because `node` is installed by default on GHA runners + CORES=$(node -e 'console.log(os.cpus().length)') + make transpile_python CORES=$CORES + + - name: Test Python TestVectors with ${{matrix.interface}} interface + working-directory: ${{matrix.library}} + run: | + cp runtimes/java/*.json runtimes/python + make test_python_${{matrix.interface}}_interface diff --git a/.github/workflows/daily_ci.yml b/.github/workflows/daily_ci.yml index ed5cf7e78..a00038ac5 100644 --- a/.github/workflows/daily_ci.yml +++ b/.github/workflows/daily_ci.yml @@ -61,6 +61,26 @@ jobs: uses: ./.github/workflows/library_rust_tests.yml with: dafny: ${{needs.getVersion.outputs.version}} + daily-ci-python: + needs: getVersion + uses: ./.github/workflows/ci_test_python.yml + with: + dafny: ${{needs.getVersion.outputs.version}} + daily-ci-python-examples: + needs: getVersion + uses: ./.github/workflows/ci_examples_python.yml + with: + dafny: ${{needs.getVersion.outputs.version}} + daily-ci-python-test-vectors: + needs: getVersion + uses: ./.github/workflows/ci_test_vector_python.yml + with: + dafny: ${{needs.getVersion.outputs.version}} + daily-ci-python-static-analysis: + needs: getVersion + uses: ./.github/workflows/ci_static_analysis_python.yml + with: + dafny: ${{needs.getVersion.outputs.version}} daily-ci-net-test-vectors: needs: getVersion uses: ./.github/workflows/ci_test_vector_net.yml diff --git a/.github/workflows/manual.yml b/.github/workflows/manual.yml index 8679b4f30..ca703d850 100644 --- a/.github/workflows/manual.yml +++ b/.github/workflows/manual.yml @@ -57,6 +57,26 @@ jobs: with: dafny: ${{ inputs.dafny }} regenerate-code: ${{ inputs.regenerate-code }} + manual-ci-python: + uses: ./.github/workflows/ci_test_python.yml + with: + dafny: ${{ inputs.dafny }} + regenerate-code: ${{ inputs.regenerate-code }} + manual-ci-python-examples: + uses: ./.github/workflows/ci_examples_python.yml + with: + dafny: ${{ inputs.dafny }} + regenerate-code: ${{ inputs.regenerate-code }} + manual-ci-python-test-vectors: + uses: ./.github/workflows/ci_test_vector_python.yml + with: + dafny: ${{ inputs.dafny }} + regenerate-code: ${{ inputs.regenerate-code }} + manual-ci-python-static-analysis: + uses: ./.github/workflows/ci_static_analysis_python.yml + with: + dafny: ${{ inputs.dafny }} + regenerate-code: ${{ inputs.regenerate-code }} manual-ci-net-test-vectors: uses: ./.github/workflows/ci_test_vector_net.yml with: diff --git a/.github/workflows/mpl-head.yml b/.github/workflows/mpl-head.yml index 6e2e06234..fffa31e0d 100644 --- a/.github/workflows/mpl-head.yml +++ b/.github/workflows/mpl-head.yml @@ -73,6 +73,18 @@ jobs: with: dafny: ${{needs.getVersion.outputs.version}} mpl-head: true + mpl-head-ci-python: + needs: getVersion + uses: ./.github/workflows/ci_test_python.yml + with: + dafny: ${{needs.getVersion.outputs.version}} + mpl-head: true + mpl-head-ci-python-examples: + needs: getVersion + uses: ./.github/workflows/ci_examples_python.yml + with: + dafny: ${{needs.getVersion.outputs.version}} + mpl-head: true mpl-head-ci-net-test-vectors: needs: getVersion uses: ./.github/workflows/ci_test_vector_net.yml diff --git a/.github/workflows/nightly.yml b/.github/workflows/nightly.yml index eb4bc6e20..ba72181f4 100644 --- a/.github/workflows/nightly.yml +++ b/.github/workflows/nightly.yml @@ -58,6 +58,18 @@ jobs: with: dafny: "nightly-latest" regenerate-code: true + dafny-nightly-python: + if: github.event_name != 'schedule' || github.repository_owner == 'aws' + uses: ./.github/workflows/ci_test_python.yml + with: + dafny: "nightly-latest" + regenerate-code: true + dafny-nightly-python-test-vectors: + if: github.event_name != 'schedule' || github.repository_owner == 'aws' + uses: ./.github/workflows/ci_test_vector_python.yml + with: + dafny: "nightly-latest" + regenerate-code: true dafny-nightly-test-vectors-net: if: github.event_name != 'schedule' || github.repository_owner == 'aws' uses: ./.github/workflows/ci_test_vector_net.yml diff --git a/.github/workflows/pull.yml b/.github/workflows/pull.yml index 3f237ee0d..e9e237653 100644 --- a/.github/workflows/pull.yml +++ b/.github/workflows/pull.yml @@ -54,6 +54,24 @@ jobs: uses: ./.github/workflows/library_rust_tests.yml with: dafny: ${{needs.getVersion.outputs.version}} + pr-ci-python: + needs: getVersion + uses: ./.github/workflows/ci_test_python.yml + with: + dafny: ${{needs.getVersion.outputs.version}} + pr-ci-python-testvectors: + needs: getVersion + uses: ./.github/workflows/ci_test_vector_python.yml + with: + dafny: ${{needs.getVersion.outputs.version}} + pr-ci-python-examples: + needs: getVersion + uses: ./.github/workflows/ci_examples_python.yml + with: + dafny: ${{needs.getVersion.outputs.version}} + pr-ci-python-static-analysis: + needs: getVersion + uses: ./.github/workflows/ci_static_analysis_python.yml pr-ci-net-test-vectors: needs: getVersion uses: ./.github/workflows/ci_test_vector_net.yml @@ -80,6 +98,10 @@ jobs: - pr-ci-rust - pr-ci-net-test-vectors - pr-ci-net-examples + - pr-ci-python + - pr-ci-python-testvectors + - pr-ci-python-examples + - pr-ci-python-static-analysis runs-on: ubuntu-22.04 steps: - name: Verify all required jobs passed diff --git a/.github/workflows/push.yml b/.github/workflows/push.yml index 9e49cf133..b3f66e2d2 100644 --- a/.github/workflows/push.yml +++ b/.github/workflows/push.yml @@ -56,6 +56,24 @@ jobs: uses: ./.github/workflows/library_rust_tests.yml with: dafny: ${{needs.getVersion.outputs.version}} + pr-ci-python: + needs: getVersion + uses: ./.github/workflows/ci_test_python.yml + with: + dafny: ${{needs.getVersion.outputs.version}} + pr-ci-python-examples: + needs: getVersion + uses: ./.github/workflows/ci_examples_python.yml + with: + dafny: ${{needs.getVersion.outputs.version}} + pr-ci-python-static-analysis: + needs: getVersion + uses: ./.github/workflows/ci_static_analysis_python.yml + pr-ci-python-test-vectors: + needs: getVersion + uses: ./.github/workflows/ci_test_vector_python.yml + with: + dafny: ${{needs.getVersion.outputs.version}} pr-ci-net-test-vectors: needs: getVersion uses: ./.github/workflows/ci_test_vector_net.yml diff --git a/.gitignore b/.gitignore index 4c01b8aed..7a03a225a 100644 --- a/.gitignore +++ b/.gitignore @@ -18,4 +18,9 @@ specification_compliance_report.html /.smithy.lsp.log # logs -*.log \ No newline at end of file +*.log + +# Performance testing artifacts +*.png +*.dot +*.prof \ No newline at end of file diff --git a/DynamoDbEncryption/Makefile b/DynamoDbEncryption/Makefile index ec52ca43d..65cc8292d 100644 --- a/DynamoDbEncryption/Makefile +++ b/DynamoDbEncryption/Makefile @@ -3,6 +3,7 @@ CORES=2 +ENABLE_EXTERN_PROCESSING=1 TRANSPILE_TESTS_IN_RUST=1 include ../SharedMakefile.mk @@ -99,3 +100,94 @@ SERVICE_DEPS_DynamoDbEncryptionTransforms := \ DynamoDbEncryption/dafny/DynamoDbEncryption \ DynamoDbEncryption/dafny/StructuredEncryption \ DynamoDbEncryption/dafny/DynamoDbItemEncryptor + +# Python + +PYTHON_MODULE_NAME=aws_dbesdk_dynamodb + +TRANSLATION_RECORD_PYTHON := \ + --translation-record ../submodules/MaterialProviders/StandardLibrary/runtimes/python/src/smithy_dafny_standard_library/internaldafny/generated/dafny_src-py.dtr \ + --translation-record ../submodules/MaterialProviders/ComAmazonawsKms/runtimes/python/src/aws_cryptography_internal_kms/internaldafny/generated/dafny_src-py.dtr \ + --translation-record ../submodules/MaterialProviders/ComAmazonawsDynamodb/runtimes/python/src/aws_cryptography_internal_dynamodb/internaldafny/generated/dafny_src-py.dtr \ + --translation-record ../submodules/MaterialProviders/AwsCryptographyPrimitives/runtimes/python/src/aws_cryptography_primitives/internaldafny/generated/dafny_src-py.dtr \ + --translation-record ../submodules/MaterialProviders/AwsCryptographicMaterialProviders/runtimes/python/src/aws_cryptographic_material_providers/internaldafny/generated/dafny_src-py.dtr + +PYTHON_DEPENDENCY_MODULE_NAMES := \ + --dependency-library-name=aws.cryptography.primitives=aws_cryptography_primitives \ + --dependency-library-name=com.amazonaws.kms=aws_cryptography_internal_kms \ + --dependency-library-name=com.amazonaws.dynamodb=aws_cryptography_internal_dynamodb \ + --dependency-library-name=aws.cryptography.materialProviders=aws_cryptographic_material_providers \ + --dependency-library-name=aws.cryptography.keyStore=aws_cryptographic_material_providers \ + --dependency-library-name=aws.cryptography.dbEncryptionSdk.structuredEncryption=aws_dbesdk_dynamodb \ + --dependency-library-name=aws.cryptography.dbEncryptionSdk.dynamoDb=aws_dbesdk_dynamodb \ + --dependency-library-name=aws.cryptography.dbEncryptionSdk.dynamoDb.itemEncryptor=aws_dbesdk_dynamodb \ + --dependency-library-name=aws.cryptography.dbEncryptionSdk.dynamoDb.transforms=aws_dbesdk_dynamodb \ + +# Override default test_python to run tox environment for Dafny tests +test_python: + rm -rf runtimes/python/.tox + python3 -m tox -c runtimes/python -e dafnytests --verbose + +# Constants for languages that drop extern names (Python, Go) + +DYNAMODB_TYPES_FILE_PATH=dafny/DynamoDbEncryption/Model/AwsCryptographyDbEncryptionSdkDynamoDbTypes.dfy +DYNAMODB_TYPES_FILE_WITH_EXTERN_STRING="module {:extern \"software.amazon.cryptography.dbencryptionsdk.dynamodb.internaldafny.types\" } AwsCryptographyDbEncryptionSdkDynamoDbTypes" +DYNAMODB_TYPES_FILE_WITHOUT_EXTERN_STRING="module AwsCryptographyDbEncryptionSdkDynamoDbTypes" + +DYNAMODB_INDEX_FILE_PATH=dafny/DynamoDbEncryption/src/Index.dfy +DYNAMODB_INDEX_FILE_WITH_EXTERN_STRING="module {:extern \"software.amazon.cryptography.dbencryptionsdk.dynamodb.internaldafny\" } DynamoDbEncryption" +DYNAMODB_INDEX_FILE_WITHOUT_EXTERN_STRING="module DynamoDbEncryption" + +ITEMENCRYPTOR_TYPES_FILE_PATH=dafny/DynamoDbItemEncryptor/Model/AwsCryptographyDbEncryptionSdkDynamoDbItemEncryptorTypes.dfy +ITEMENCRYPTOR_TYPES_FILE_WITH_EXTERN_STRING="module {:extern \"software.amazon.cryptography.dbencryptionsdk.dynamodb.itemencryptor.internaldafny.types\" } AwsCryptographyDbEncryptionSdkDynamoDbItemEncryptorTypes" +ITEMENCRYPTOR_TYPES_FILE_WITHOUT_EXTERN_STRING="module AwsCryptographyDbEncryptionSdkDynamoDbItemEncryptorTypes" + +ITEMENCRYPTOR_INDEX_FILE_PATH=dafny/DynamoDbItemEncryptor/src/Index.dfy +ITEMENCRYPTOR_INDEX_FILE_WITH_EXTERN_STRING="module {:extern \"software.amazon.cryptography.dbencryptionsdk.dynamodb.itemencryptor.internaldafny\" } DynamoDbItemEncryptor" +ITEMENCRYPTOR_INDEX_FILE_WITHOUT_EXTERN_STRING="module DynamoDbItemEncryptor" + +ITEMENCRYPTOR_LEGACY_FILE_PATH=dafny/DynamoDbItemEncryptor/src/InternalLegacyOverride.dfy +ITEMENCRYPTOR_LEGACY_FILE_WITH_EXTERN_STRING="module {:extern \"software.amazon.cryptography.dbencryptionsdk.dynamodb.itemencryptor.internaldafny.legacy\"} InternalLegacyOverride {" +ITEMENCRYPTOR_LEGACY_FILE_WITHOUT_EXTERN_STRING="module InternalLegacyOverride {" + +TRANSFORMS_TYPES_FILE_PATH=dafny/DynamoDbEncryptionTransforms/Model/AwsCryptographyDbEncryptionSdkDynamoDbTransformsTypes.dfy +TRANSFORMS_TYPES_FILE_WITH_EXTERN_STRING="module {:extern \"software.amazon.cryptography.dbencryptionsdk.dynamodb.transforms.internaldafny.types\" } AwsCryptographyDbEncryptionSdkDynamoDbTransformsTypes" +TRANSFORMS_TYPES_FILE_WITHOUT_EXTERN_STRING="module AwsCryptographyDbEncryptionSdkDynamoDbTransformsTypes" + +TRANSFORMS_INDEX_FILE_PATH=dafny/DynamoDbEncryptionTransforms/src/Index.dfy +TRANSFORMS_INDEX_FILE_WITH_EXTERN_STRING="module {:extern \"software.amazon.cryptography.dbencryptionsdk.dynamodb.transforms.internaldafny\" } DynamoDbEncryptionTransforms" +TRANSFORMS_INDEX_FILE_WITHOUT_EXTERN_STRING="module DynamoDbEncryptionTransforms" + +STRUCTUREDENCRYPTION_TYPES_FILE_PATH=dafny/StructuredEncryption/Model/AwsCryptographyDbEncryptionSdkStructuredEncryptionTypes.dfy +STRUCTUREDENCRYPTION_TYPES_FILE_WITH_EXTERN_STRING="module {:extern \"software.amazon.cryptography.dbencryptionsdk.structuredencryption.internaldafny.types\" } AwsCryptographyDbEncryptionSdkStructuredEncryptionTypes" +STRUCTUREDENCRYPTION_TYPES_FILE_WITHOUT_EXTERN_STRING="module AwsCryptographyDbEncryptionSdkStructuredEncryptionTypes" + +STRUCTUREDENCRYPTION_INDEX_FILE_PATH=dafny/StructuredEncryption/src/Index.dfy +STRUCTUREDENCRYPTION_INDEX_FILE_WITH_EXTERN_STRING="module {:extern \"software.amazon.cryptography.dbencryptionsdk.structuredencryption.internaldafny\" } StructuredEncryption" +STRUCTUREDENCRYPTION_INDEX_FILE_WITHOUT_EXTERN_STRING="module StructuredEncryption" + +_sed_types_file_remove_extern: + $(MAKE) _sed_file SED_FILE_PATH=$(DYNAMODB_TYPES_FILE_PATH) SED_BEFORE_STRING=$(DYNAMODB_TYPES_FILE_WITH_EXTERN_STRING) SED_AFTER_STRING=$(DYNAMODB_TYPES_FILE_WITHOUT_EXTERN_STRING) + $(MAKE) _sed_file SED_FILE_PATH=$(ITEMENCRYPTOR_TYPES_FILE_PATH) SED_BEFORE_STRING=$(ITEMENCRYPTOR_TYPES_FILE_WITH_EXTERN_STRING) SED_AFTER_STRING=$(ITEMENCRYPTOR_TYPES_FILE_WITHOUT_EXTERN_STRING) + $(MAKE) _sed_file SED_FILE_PATH=$(TRANSFORMS_TYPES_FILE_PATH) SED_BEFORE_STRING=$(TRANSFORMS_TYPES_FILE_WITH_EXTERN_STRING) SED_AFTER_STRING=$(TRANSFORMS_TYPES_FILE_WITHOUT_EXTERN_STRING) + $(MAKE) _sed_file SED_FILE_PATH=$(STRUCTUREDENCRYPTION_TYPES_FILE_PATH) SED_BEFORE_STRING=$(STRUCTUREDENCRYPTION_TYPES_FILE_WITH_EXTERN_STRING) SED_AFTER_STRING=$(STRUCTUREDENCRYPTION_TYPES_FILE_WITHOUT_EXTERN_STRING) + +_sed_index_file_remove_extern: + $(MAKE) _sed_file SED_FILE_PATH=$(DYNAMODB_INDEX_FILE_PATH) SED_BEFORE_STRING=$(DYNAMODB_INDEX_FILE_WITH_EXTERN_STRING) SED_AFTER_STRING=$(DYNAMODB_INDEX_FILE_WITHOUT_EXTERN_STRING) + $(MAKE) _sed_file SED_FILE_PATH=$(ITEMENCRYPTOR_INDEX_FILE_PATH) SED_BEFORE_STRING=$(ITEMENCRYPTOR_INDEX_FILE_WITH_EXTERN_STRING) SED_AFTER_STRING=$(ITEMENCRYPTOR_INDEX_FILE_WITHOUT_EXTERN_STRING) + $(MAKE) _sed_file SED_FILE_PATH=$(ITEMENCRYPTOR_LEGACY_FILE_PATH) SED_BEFORE_STRING=$(ITEMENCRYPTOR_LEGACY_FILE_WITH_EXTERN_STRING) SED_AFTER_STRING=$(ITEMENCRYPTOR_LEGACY_FILE_WITHOUT_EXTERN_STRING) + $(MAKE) _sed_file SED_FILE_PATH=$(TRANSFORMS_INDEX_FILE_PATH) SED_BEFORE_STRING=$(TRANSFORMS_INDEX_FILE_WITH_EXTERN_STRING) SED_AFTER_STRING=$(TRANSFORMS_INDEX_FILE_WITHOUT_EXTERN_STRING) + $(MAKE) _sed_file SED_FILE_PATH=$(STRUCTUREDENCRYPTION_INDEX_FILE_PATH) SED_BEFORE_STRING=$(STRUCTUREDENCRYPTION_INDEX_FILE_WITH_EXTERN_STRING) SED_AFTER_STRING=$(STRUCTUREDENCRYPTION_INDEX_FILE_WITHOUT_EXTERN_STRING) + +_sed_types_file_add_extern: + $(MAKE) _sed_file SED_FILE_PATH=$(DYNAMODB_TYPES_FILE_PATH) SED_BEFORE_STRING=$(DYNAMODB_TYPES_FILE_WITHOUT_EXTERN_STRING) SED_AFTER_STRING=$(DYNAMODB_TYPES_FILE_WITH_EXTERN_STRING) + $(MAKE) _sed_file SED_FILE_PATH=$(ITEMENCRYPTOR_TYPES_FILE_PATH) SED_BEFORE_STRING=$(ITEMENCRYPTOR_TYPES_FILE_WITHOUT_EXTERN_STRING) SED_AFTER_STRING=$(ITEMENCRYPTOR_TYPES_FILE_WITH_EXTERN_STRING) + $(MAKE) _sed_file SED_FILE_PATH=$(TRANSFORMS_TYPES_FILE_PATH) SED_BEFORE_STRING=$(TRANSFORMS_TYPES_FILE_WITHOUT_EXTERN_STRING) SED_AFTER_STRING=$(TRANSFORMS_TYPES_FILE_WITH_EXTERN_STRING) + $(MAKE) _sed_file SED_FILE_PATH=$(STRUCTUREDENCRYPTION_TYPES_FILE_PATH) SED_BEFORE_STRING=$(STRUCTUREDENCRYPTION_TYPES_FILE_WITHOUT_EXTERN_STRING) SED_AFTER_STRING=$(STRUCTUREDENCRYPTION_TYPES_FILE_WITH_EXTERN_STRING) + +_sed_index_file_add_extern: + $(MAKE) _sed_file SED_FILE_PATH=$(DYNAMODB_INDEX_FILE_PATH) SED_BEFORE_STRING=$(DYNAMODB_INDEX_FILE_WITHOUT_EXTERN_STRING) SED_AFTER_STRING=$(DYNAMODB_INDEX_FILE_WITH_EXTERN_STRING) + $(MAKE) _sed_file SED_FILE_PATH=$(ITEMENCRYPTOR_INDEX_FILE_PATH) SED_BEFORE_STRING=$(ITEMENCRYPTOR_INDEX_FILE_WITHOUT_EXTERN_STRING) SED_AFTER_STRING=$(ITEMENCRYPTOR_INDEX_FILE_WITH_EXTERN_STRING) + $(MAKE) _sed_file SED_FILE_PATH=$(ITEMENCRYPTOR_LEGACY_FILE_PATH) SED_BEFORE_STRING=$(ITEMENCRYPTOR_LEGACY_FILE_WITHOUT_EXTERN_STRING) SED_AFTER_STRING=$(ITEMENCRYPTOR_LEGACY_FILE_WITH_EXTERN_STRING) + $(MAKE) _sed_file SED_FILE_PATH=$(TRANSFORMS_INDEX_FILE_PATH) SED_BEFORE_STRING=$(TRANSFORMS_INDEX_FILE_WITHOUT_EXTERN_STRING) SED_AFTER_STRING=$(TRANSFORMS_INDEX_FILE_WITH_EXTERN_STRING) + $(MAKE) _sed_file SED_FILE_PATH=$(STRUCTUREDENCRYPTION_INDEX_FILE_PATH) SED_BEFORE_STRING=$(STRUCTUREDENCRYPTION_INDEX_FILE_WITHOUT_EXTERN_STRING) SED_AFTER_STRING=$(STRUCTUREDENCRYPTION_INDEX_FILE_WITH_EXTERN_STRING) diff --git a/DynamoDbEncryption/dafny/DynamoDbEncryption/src/Index.dfy b/DynamoDbEncryption/dafny/DynamoDbEncryption/src/Index.dfy index 8faa05326..13624c005 100644 --- a/DynamoDbEncryption/dafny/DynamoDbEncryption/src/Index.dfy +++ b/DynamoDbEncryption/dafny/DynamoDbEncryption/src/Index.dfy @@ -16,9 +16,8 @@ include "UpdateExpr.dfy" include "Util.dfy" include "Virtual.dfy" -module - {:extern "software.amazon.cryptography.dbencryptionsdk.dynamodb.internaldafny" } - DynamoDbEncryption refines AbstractAwsCryptographyDbEncryptionSdkDynamoDbService +module {:extern "software.amazon.cryptography.dbencryptionsdk.dynamodb.internaldafny" } DynamoDbEncryption + refines AbstractAwsCryptographyDbEncryptionSdkDynamoDbService { import Operations = AwsCryptographyDbEncryptionSdkDynamoDbOperations diff --git a/DynamoDbEncryption/dafny/DynamoDbEncryption/test/DynamoToStruct.dfy b/DynamoDbEncryption/dafny/DynamoDbEncryption/test/DynamoToStruct.dfy index 223678b96..ec8ce9dcc 100644 --- a/DynamoDbEncryption/dafny/DynamoDbEncryption/test/DynamoToStruct.dfy +++ b/DynamoDbEncryption/dafny/DynamoDbEncryption/test/DynamoToStruct.dfy @@ -376,7 +376,8 @@ module DynamoToStructTest { //= type=test //# Entries in a String Set MUST be ordered in ascending [UTF-16 binary order](./string-ordering.md#utf-16-binary-order). method {:test} TestSortSSAttr() { - var stringSetValue := AttributeValue.SS(["&","。","𐀂"]); + // "\ud800\udc02" <-> "𐀂" + var stringSetValue := AttributeValue.SS(["&","。","\ud800\udc02"]); // Note that string values are UTF-8 encoded, but sorted by UTF-16 encoding. var encodedStringSetData := StructuredDataTerminal(value := [ 0,0,0,3, // 3 entries in set @@ -395,7 +396,8 @@ module DynamoToStructTest { var newStringSetValue := StructuredToAttr(encodedStringSetData); expect newStringSetValue.Success?; - expect newStringSetValue.value == AttributeValue.SS(["&","𐀂","。"]); + // "\ud800\udc02" <-> "𐀂" + expect newStringSetValue.value == AttributeValue.SS(["&","\ud800\udc02","。"]); } //= specification/dynamodb-encryption-client/ddb-attribute-serialization.md#set-entries @@ -415,11 +417,13 @@ module DynamoToStructTest { method {:test} TestSetsInListAreSorted() { var nSetValue := AttributeValue.NS(["2","1","10"]); - var sSetValue := AttributeValue.SS(["&","。","𐀂"]); + // "\ud800\udc02" <-> "𐀂" + var sSetValue := AttributeValue.SS(["&","。","\ud800\udc02"]); var bSetValue := AttributeValue.BS([[1,0],[1],[2]]); var sortedNSetValue := AttributeValue.NS(["1","10","2"]); - var sortedSSetValue := AttributeValue.SS(["&","𐀂","。"]); + // "\ud800\udc02" <-> "𐀂" + var sortedSSetValue := AttributeValue.SS(["&","\ud800\udc02","。"]); var sortedBSetValue := AttributeValue.BS([[1],[1,0],[2]]); var listValue := AttributeValue.L([nSetValue, sSetValue, bSetValue]); @@ -444,11 +448,13 @@ module DynamoToStructTest { method {:test} TestSetsInMapAreSorted() { var nSetValue := AttributeValue.NS(["2","1","10"]); - var sSetValue := AttributeValue.SS(["&","。","𐀂"]); + // "\ud800\udc02" <-> "𐀂" + var sSetValue := AttributeValue.SS(["&","。","\ud800\udc02"]); var bSetValue := AttributeValue.BS([[1,0],[1],[2]]); var sortedNSetValue := AttributeValue.NS(["1","10","2"]); - var sortedSSetValue := AttributeValue.SS(["&","𐀂","。"]); + // "\ud800\udc02" <-> "𐀂" + var sortedSSetValue := AttributeValue.SS(["&","\ud800\udc02","。"]); var sortedBSetValue := AttributeValue.BS([[1],[1,0],[2]]); var mapValue := AttributeValue.M(map["keyA" := sSetValue, "keyB" := nSetValue, "keyC" := bSetValue]); @@ -490,7 +496,8 @@ module DynamoToStructTest { method {:test} TestSortMapKeys() { var nullValue := AttributeValue.NULL(true); - var mapValue := AttributeValue.M(map["&" := nullValue, "。" := nullValue, "𐀂" := nullValue]); + // "\ud800\udc02" <-> "𐀂" + var mapValue := AttributeValue.M(map["&" := nullValue, "。" := nullValue, "\ud800\udc02" := nullValue]); // Note that the string values are encoded as UTF-8, but are sorted according to UTF-16 encoding. var encodedMapData := StructuredDataTerminal( diff --git a/DynamoDbEncryption/dafny/DynamoDbEncryptionTransforms/src/Index.dfy b/DynamoDbEncryption/dafny/DynamoDbEncryptionTransforms/src/Index.dfy index 83e19fb0a..b82f33140 100644 --- a/DynamoDbEncryption/dafny/DynamoDbEncryptionTransforms/src/Index.dfy +++ b/DynamoDbEncryption/dafny/DynamoDbEncryptionTransforms/src/Index.dfy @@ -5,9 +5,8 @@ include "DdbMiddlewareConfig.dfy" include "AwsCryptographyDbEncryptionSdkDynamoDbTransformsOperations.dfy" include "../../DynamoDbEncryption/src/ConfigToInfo.dfy" -module - {:extern "software.amazon.cryptography.dbencryptionsdk.dynamodb.transforms.internaldafny" } - DynamoDbEncryptionTransforms refines AbstractAwsCryptographyDbEncryptionSdkDynamoDbTransformsService +module {:extern "software.amazon.cryptography.dbencryptionsdk.dynamodb.transforms.internaldafny" } DynamoDbEncryptionTransforms + refines AbstractAwsCryptographyDbEncryptionSdkDynamoDbTransformsService { import opened DdbMiddlewareConfig import opened StandardLibrary diff --git a/DynamoDbEncryption/dafny/DynamoDbItemEncryptor/src/Index.dfy b/DynamoDbEncryption/dafny/DynamoDbItemEncryptor/src/Index.dfy index 1ee469bab..4c207afad 100644 --- a/DynamoDbEncryption/dafny/DynamoDbItemEncryptor/src/Index.dfy +++ b/DynamoDbEncryption/dafny/DynamoDbItemEncryptor/src/Index.dfy @@ -4,9 +4,8 @@ include "AwsCryptographyDbEncryptionSdkDynamoDbItemEncryptorOperations.dfy" include "Util.dfy" -module - {:extern "software.amazon.cryptography.dbencryptionsdk.dynamodb.itemencryptor.internaldafny" } - DynamoDbItemEncryptor refines AbstractAwsCryptographyDbEncryptionSdkDynamoDbItemEncryptorService +module {:extern "software.amazon.cryptography.dbencryptionsdk.dynamodb.itemencryptor.internaldafny" } DynamoDbItemEncryptor + refines AbstractAwsCryptographyDbEncryptionSdkDynamoDbItemEncryptorService { import opened DynamoDbItemEncryptorUtil import StructuredEncryption diff --git a/DynamoDbEncryption/dafny/StructuredEncryption/src/Index.dfy b/DynamoDbEncryption/dafny/StructuredEncryption/src/Index.dfy index 19533014a..5b6d2271b 100644 --- a/DynamoDbEncryption/dafny/StructuredEncryption/src/Index.dfy +++ b/DynamoDbEncryption/dafny/StructuredEncryption/src/Index.dfy @@ -3,9 +3,8 @@ include "AwsCryptographyDbEncryptionSdkStructuredEncryptionOperations.dfy" -module - {:extern "software.amazon.cryptography.dbencryptionsdk.structuredencryption.internaldafny" } - StructuredEncryption refines AbstractAwsCryptographyDbEncryptionSdkStructuredEncryptionService +module {:extern "software.amazon.cryptography.dbencryptionsdk.structuredencryption.internaldafny" } StructuredEncryption + refines AbstractAwsCryptographyDbEncryptionSdkStructuredEncryptionService { import Operations = AwsCryptographyDbEncryptionSdkStructuredEncryptionOperations diff --git a/DynamoDbEncryption/runtimes/python/.gitignore b/DynamoDbEncryption/runtimes/python/.gitignore new file mode 100644 index 000000000..aaf44d4cd --- /dev/null +++ b/DynamoDbEncryption/runtimes/python/.gitignore @@ -0,0 +1,17 @@ +# Python build artifacts +__pycache__ +**/__pycache__ +*.pyc +src/**.egg-info/ +build +poetry.lock +**/poetry.lock +dist + +# Dafny-generated Python +**/internaldafny/generated + +# Python test artifacts +.tox +.pytest_cache + diff --git a/DynamoDbEncryption/runtimes/python/.readthedocs.yaml b/DynamoDbEncryption/runtimes/python/.readthedocs.yaml new file mode 100644 index 000000000..846d4d760 --- /dev/null +++ b/DynamoDbEncryption/runtimes/python/.readthedocs.yaml @@ -0,0 +1,38 @@ +# .readthedocs.yaml +# Read the Docs configuration file +# See https://docs.readthedocs.io/en/stable/config-file/v2.html for details + +# Required +version: 2 + +# Set the OS, Python version and other tools you might need +build: + os: ubuntu-22.04 + tools: + python: "3.11" + jobs: + post_create_environment: + # Install poetry + # https://python-poetry.org/docs/#installing-manually + - pip install poetry + # Get Dafny. + # readthedocs executes each command in a new shell process, so exported variables aren't persisted between commands. + # Any command that relies on exported variables needs to be executed in one command. + - export dafnyVersion=$(grep '^dafnyVersion=' project.properties | cut -d '=' -f 2) && curl https://github.com/dafny-lang/dafny/releases/download/v$dafnyVersion/dafny-$dafnyVersion-x64-ubuntu-20.04.zip -L -o dafny.zip + - unzip -qq dafny.zip && rm dafny.zip + - export PATH="$PWD/dafny:$PATH" && make transpile_python -C DynamoDbEncryption + post_install: + # Install project with 'docs' dependency group + # https://python-poetry.org/docs/managing-dependencies/#dependency-groups + # VIRTUAL_ENV needs to be set manually for now. + # See https://github.com/readthedocs/readthedocs.org/pull/11152/ + - VIRTUAL_ENV=$READTHEDOCS_VIRTUALENV_PATH poetry install --directory DynamoDbEncryption/runtimes/python --with docs + +# Build documentation in the doc/ directory with Sphinx +sphinx: + configuration: DynamoDbEncryption/runtimes/python/docs/conf.py + +# Need all submodules to transpile +submodules: + include: all + recursive: true diff --git a/DynamoDbEncryption/runtimes/python/README.md b/DynamoDbEncryption/runtimes/python/README.md new file mode 100644 index 000000000..6f6980e01 --- /dev/null +++ b/DynamoDbEncryption/runtimes/python/README.md @@ -0,0 +1,44 @@ +# Python AWS Database Encryption SDK for DynamoDB + +[![MPL-python-tests](https://github.com/aws/aws-database-encryption-sdk-dynamodb/actions/workflows/push.yml/badge.svg)](https://github.com/aws/aws-database-encryption-sdk-dynamodb/actions/workflows/push.yml) +[![Code style: black](https://img.shields.io/badge/code_style-black-000000.svg)](https://github.com/ambv/black) +[![Documentation Status](https://readthedocs.org/projects/aws-dbesdk-dynamodb-python/badge/)](https://aws-dbesdk-dynamodb-python.readthedocs.io/en/latest/) + +This is the official implementation of the AWS Database Encryption SDK for DynamoDB in Python. + +The latest documentation can be found at [Read the Docs](https://aws-dbesdk-dynamodb-python.readthedocs.io/en/latest/). + +Find the source code on [GitHub](https://github.com/aws/aws-database-encryption-sdk-dynamodb). + +## Security + +If you discover a potential security issue in this project +we ask that you notify AWS/Amazon Security via our +[vulnerability reporting page](http://aws.amazon.com/security/vulnerability-reporting/). +Please **do not** create a public GitHub issue. + +## Getting Started + +### Required Prerequisites + +- Python 3.11+ +- aws-cryptographic-material-providers 1.10.0+ + +### Installation + +> **Note:** +> If you have not already installed [cryptography](https://cryptography.io/en/latest/), you might need to install additional prerequisites as +> detailed in the [cryptography installation guide](https://cryptography.io/en/latest/installation/) for your operating system. + +```bash +$ pip install aws-dbesdk-dynamodb +``` + +### Concepts + +The AWS Database Encryption SDK for DynamoDB (DBESDK-DynamoDB) is available in multiple languages. +The concepts in the Python implementation of the DBESDK-DynamoDB are the same as in other languages. +For more information on concepts in the DBESDK-DynamoDB, see the [README](https://github.com/aws/aws-database-encryption-sdk-dynamodb/blob/main/README.md) for all languages. + +DBESDK-DynamoDB uses cryptographic material providers from the AWS Cryptographic Material Providers Library (MPL). +For more information on the MPL, see its [README](https://github.com/aws/aws-database-encryption-sdk-dynamodb/blob/main/README.md) or [readthedocs](https://aws-cryptographic-material-providers-library.readthedocs.io/en/latest/) page. diff --git a/DynamoDbEncryption/runtimes/python/docs/conf.py b/DynamoDbEncryption/runtimes/python/docs/conf.py new file mode 100644 index 000000000..0a9c1490a --- /dev/null +++ b/DynamoDbEncryption/runtimes/python/docs/conf.py @@ -0,0 +1,72 @@ +# -*- coding: utf-8 -*- +import os +import re +from datetime import datetime +import toml + +VERSION_RE = re.compile(r"""__version__ = ['"]([0-9.]+)['"]""") +HERE = os.path.abspath(os.path.dirname(__file__)) + + +def get_release(): + with open('../pyproject.toml', 'r') as toml_file: + data = toml.load(toml_file) + return data['tool']['poetry']['version'] + +def get_version(): + """Reads the version (MAJOR.MINOR) from this module.""" + release = get_release() + split_version = release.split(".") + if len(split_version) == 3: + return ".".join(split_version[:2]) + return release + +project = 'aws-dbesdk-dynamodb-python' +version = get_version() +release = get_release() + +# Add any Sphinx extension module names here, as strings. They can be extensions +# coming with Sphinx (named 'sphinx.ext.*') or your custom ones. +extensions = [ + "sphinx.ext.autodoc", + "sphinx.ext.doctest", + "sphinx.ext.intersphinx", + "sphinx.ext.todo", + "sphinx.ext.coverage", + "sphinx.ext.autosummary", + "sphinx.ext.napoleon", +] +napoleon_include_special_with_doc = False + +# Add any paths that contain templates here, relative to this directory. +templates_path = ["_templates"] + +source_suffix = ".rst" # The suffix of source filenames. +root_doc = "index" # The master toctree document. + +copyright = u"%s, Amazon" % datetime.now().year + +# List of directories, relative to source directory, that shouldn't be searched +# for source files. +exclude_trees = ["_build"] + +pygments_style = "sphinx" + +autoclass_content = "both" +autodoc_default_options = { + "show-inheritance": True, + "undoc-members": True, + 'special-members': '__init__', + "members": True +} +autodoc_member_order = "bysource" + +html_theme = "sphinx_rtd_theme" +html_static_path = ["_static"] +htmlhelp_basename = "%sdoc" % project + +# Example configuration for intersphinx: refer to the Python standard library. +intersphinx_mapping = {"http://docs.python.org/": None} + +# autosummary +autosummary_generate = True \ No newline at end of file diff --git a/DynamoDbEncryption/runtimes/python/docs/index.rst b/DynamoDbEncryption/runtimes/python/docs/index.rst new file mode 100644 index 000000000..52b759209 --- /dev/null +++ b/DynamoDbEncryption/runtimes/python/docs/index.rst @@ -0,0 +1,31 @@ +.. include:: ../README.md + :parser: myst_parser.docutils_ + + +******* +Modules +******* + +.. autosummary:: + :toctree: generated + + aws_dbesdk_dynamodb.encrypted.client + aws_dbesdk_dynamodb.encrypted.table + aws_dbesdk_dynamodb.encrypted.item + aws_dbesdk_dynamodb.encrypted.resource + aws_dbesdk_dynamodb.encrypted.paginator + aws_dbesdk_dynamodb.smithygenerated.aws_cryptography_dbencryptionsdk_dynamodb.models + aws_dbesdk_dynamodb.smithygenerated.aws_cryptography_dbencryptionsdk_structuredencryption.models + aws_dbesdk_dynamodb.smithygenerated.aws_cryptography_dbencryptionsdk_dynamodb_itemencryptor.models + aws_dbesdk_dynamodb.smithygenerated.aws_cryptography_dbencryptionsdk_dynamodb_itemencryptor.config + + +The content below applies to all languages of the AWS DBESDK for DynamoDB. + +---- + +.. include:: ../../../../README.md + :parser: myst_parser.docutils_ + +.. include:: ../../../../CHANGELOG.md + :parser: myst_parser.docutils_ diff --git a/DynamoDbEncryption/runtimes/python/pyproject.toml b/DynamoDbEncryption/runtimes/python/pyproject.toml new file mode 100644 index 000000000..e30d9a1e7 --- /dev/null +++ b/DynamoDbEncryption/runtimes/python/pyproject.toml @@ -0,0 +1,112 @@ +[tool.poetry] +name = "aws-dbesdk-dynamodb" +version = "0.1.0" +description = "" +authors = ["AWS Crypto Tools "] +packages = [ + { include = "aws_dbesdk_dynamodb", from = "src" }, +] +# Include all of the following .gitignored files in package distributions, +# even though it is not included in version control +include = ["**/internaldafny/generated/*.py"] + +[tool.poetry.dependencies] +python = "^3.11.0" +aws-cryptographic-material-providers = { path = "../../../submodules/MaterialProviders/AwsCryptographicMaterialProviders/runtimes/python", develop = false} +# Optional dependencies +# Should only include the legacy library if migrating from the legacy library +dynamodb_encryption_sdk = { version = "^3.3.0", optional = true } + +[tool.poetry.extras] +legacy-ddbec = ["dynamodb_encryption_sdk"] + +# Package testing + +[tool.poetry.group.test] +optional = true + +[tool.poetry.group.test.dependencies] +pytest = "^7.4.0" +pytest-cov = "^6" +mock = "^4.0.3" + +# Package release + +[tool.poetry.group.release] +optional = true + +[tool.poetry.group.release.dependencies] +poetry = "1.8.3" +twine = "5.1.1" +wheel = "0.38.4" + +# Package documentation + +[tool.poetry.group.docs] +optional = true + +[tool.poetry.group.docs.dependencies] +toml = "^0.10.2" +myst-parser = "^4" +sphinx = "^7" +sphinx_rtd_theme = "^2" + +# Package linting + +[tool.poetry.group.linting] +optional = true + +[tool.poetry.group.linting.dependencies] +ruff = "^0.11.5" +black = "^25.1.0" + +[tool.ruff] +exclude = [ + # Don't bother linting Dafny-generated code + "internaldafny", + # Don't re-lint Smithy-generated code + "smithygenerated", +] +line-length=120 +indent-width=4 +target-version = "py311" + +[tool.ruff.lint] +# Choose linting tools +select = [ + # pycodestyle: spacing, line length + "E", + # pyflakes: unused imports/variables + "F", + # isort: import sorting + "I", + # pydocstyle: docstring style + "D", +] +# Ignore incompatible linting options +ignore=[ + "D203", # `incorrect-blank-line-before-class`; incompatible with `no-blank-line-before-class` (D211) + "D212", # `multi-line-summary-first-line`; incompatible with `multi-line-summary-second-line` (D213) +] + +[tool.ruff.lint.per-file-ignores] +"src/aws_dbesdk_dynamodb/internal/*" = [ + # Ignore all "public"-related linting errors for internal modules + "D100", "D101", "D102", "D103", "D104", "D105", "D106", "D107", + # Ignore opinionated docstring linting errors for internal modules + "D205", "D400", "D401", "D403", "D404", "D415", +] +"test/*" = [ + # Ignore all "public"- and docstring-related linting errors for test modules + "D100", "D101", "D102", "D103", "D104", "D105", "D106", "D107", + # Ignore opinionated docstring linting errors for test modules + "D205", "D400", "D401", "D403", "D404", "D415", +] + +[tool.black] +# Don't bother linting Dafny-generated code; don't re-lint Smithy-generated code +exclude = "/(internaldafny|smithygenerated)(/|$)" + +[build-system] +requires = ["poetry-core<2.0.0"] +build-backend = "poetry.core.masonry.api" diff --git a/DynamoDbEncryption/runtimes/python/src/aws_dbesdk_dynamodb/__init__.py b/DynamoDbEncryption/runtimes/python/src/aws_dbesdk_dynamodb/__init__.py new file mode 100644 index 000000000..df819fb0b --- /dev/null +++ b/DynamoDbEncryption/runtimes/python/src/aws_dbesdk_dynamodb/__init__.py @@ -0,0 +1,47 @@ +# Copyright Amazon.com Inc. or its affiliates. All Rights Reserved. +# SPDX-License-Identifier: Apache-2.0 +"""Initialization code for AWS DBESDK for DynamoDB.""" + +# Disable sorting imports; this order initializes code in the required order: generated Dafny, then externs. +# ruff: noqa: I001, F401 +from .internaldafny.generated import module_ +from .internaldafny import extern + +""" +boto3 uses Python's decimal library to deserialize numbers retrieved by resources + (Tables, etc.) from strings to `decimal.Decimal`s. +boto3 deserializes strings to Decimals according to its DYNAMODB_CONTEXT: +https://github.com/boto/boto3/blob/develop/boto3/dynamodb/types.py#L37-L42 + +From the link above, boto3 is configured to raise an exception +if the deserialization is "Rounded": (`traps: [.. Rounded]`). +Documentation: https://docs.python.org/3/library/decimal.html#decimal.Rounded +From the link above, "Rounded" means some digits were discarded. +However, those digits may have been 0, and no information is lost. + +boto3 is also configured to raise an exception if the deserialization is "Inexact": +https://docs.python.org/3/library/decimal.html#decimal.Inexact +"Inexact" means non-zero digits are discarded, and the result is inexact. + +Other DBESDK DynamoDB runtimes treat "Rounded" as acceptable, but "Inexact" as unacceptable. +By default, boto3 will treat both "Rounded" and "Inexact" as unacceptable. + +For DBESDK DynamoDB, change the DynamoDB context to treat "Rounded" as acceptable. +""" +# Keep these imports down here for clarity +# ruff: noqa: E402 +from decimal import Rounded + +import boto3.dynamodb.types + +old_context = boto3.dynamodb.types.DYNAMODB_CONTEXT +try: + old_traps = old_context.__getattribute__("traps") +except AttributeError: + raise AttributeError( + "boto3.dynamodb.types.DYNAMODB_CONTEXT must have a 'traps' attribute to use DBESDK for DynamoDB." + ) + +# traps structure: {k (trap class) : v (True if trap should raise Exception; False otherwise)} +old_traps[Rounded] = False +boto3.dynamodb.types.DYNAMODB_CONTEXT.__setattr__("traps", old_traps) diff --git a/DynamoDbEncryption/runtimes/python/src/aws_dbesdk_dynamodb/encrypted/boto3_interface.py b/DynamoDbEncryption/runtimes/python/src/aws_dbesdk_dynamodb/encrypted/boto3_interface.py new file mode 100644 index 000000000..fbb36174f --- /dev/null +++ b/DynamoDbEncryption/runtimes/python/src/aws_dbesdk_dynamodb/encrypted/boto3_interface.py @@ -0,0 +1,53 @@ +# Copyright Amazon.com Inc. or its affiliates. All Rights Reserved. +# SPDX-License-Identifier: Apache-2.0 +"""Interface for encrypted boto3 interfaces.""" +import abc +from abc import abstractmethod +from typing import Any + + +class EncryptedBotoInterface(abc.ABC): + """Interface for encrypted boto3 interfaces.""" + + def _copy_sdk_response_to_dbesdk_response( + self, sdk_response: dict[str, Any], dbesdk_response: dict[str, Any] + ) -> dict[str, Any]: + """ + Copy any missing fields from the SDK response to the DBESDK response. + + Args: + sdk_response: The raw SDK response + dbesdk_response: The current DBESDK response + + Returns: + dict: The DBESDK response with any missing fields copied from SDK response + + """ + for sdk_response_key, sdk_response_value in sdk_response.items(): + if sdk_response_key not in dbesdk_response: + dbesdk_response[sdk_response_key] = sdk_response_value + return dbesdk_response + + @property + @abstractmethod + def _boto_client_attr_name(self) -> str: + """Name of the attribute containing the underlying boto3 client.""" + + def __getattr__(self, name: str) -> Any: + """ + Delegate unknown attributes to the underlying client. + + Args: + name: The name of the attribute to get + + Returns: + Any: The attribute value from the underlying client + + Raises: + AttributeError: If the attribute doesn't exist on the underlying client + + """ + client_attr = getattr(self, self._boto_client_attr_name) + if hasattr(client_attr, name): + return getattr(client_attr, name) + raise AttributeError(f"'{self.__class__.__name__}' object has no attribute '{name}'") diff --git a/DynamoDbEncryption/runtimes/python/src/aws_dbesdk_dynamodb/encrypted/client.py b/DynamoDbEncryption/runtimes/python/src/aws_dbesdk_dynamodb/encrypted/client.py new file mode 100644 index 000000000..ece10c19f --- /dev/null +++ b/DynamoDbEncryption/runtimes/python/src/aws_dbesdk_dynamodb/encrypted/client.py @@ -0,0 +1,654 @@ +# Copyright Amazon.com Inc. or its affiliates. All Rights Reserved. +# SPDX-License-Identifier: Apache-2.0 +"""High-level helper class to provide an encrypting wrapper for boto3 DynamoDB clients.""" +from copy import deepcopy +from typing import Any + +import botocore.client + +from aws_dbesdk_dynamodb.encrypted.boto3_interface import EncryptedBotoInterface +from aws_dbesdk_dynamodb.encrypted.paginator import EncryptedPaginator +from aws_dbesdk_dynamodb.internal.client_to_resource import ClientShapeToResourceShapeConverter +from aws_dbesdk_dynamodb.internal.resource_to_client import ResourceShapeToClientShapeConverter +from aws_dbesdk_dynamodb.smithygenerated.aws_cryptography_dbencryptionsdk_dynamodb.models import ( + DynamoDbTablesEncryptionConfig, +) +from aws_dbesdk_dynamodb.smithygenerated.aws_cryptography_dbencryptionsdk_dynamodb_transforms.client import ( + DynamoDbEncryptionTransforms, +) +from aws_dbesdk_dynamodb.smithygenerated.aws_cryptography_dbencryptionsdk_dynamodb_transforms.models import ( + BatchExecuteStatementInputTransformInput, + BatchExecuteStatementOutputTransformInput, + BatchGetItemInputTransformInput, + BatchGetItemOutputTransformInput, + BatchWriteItemInputTransformInput, + BatchWriteItemOutputTransformInput, + DeleteItemInputTransformInput, + DeleteItemOutputTransformInput, + ExecuteStatementInputTransformInput, + ExecuteStatementOutputTransformInput, + ExecuteTransactionInputTransformInput, + ExecuteTransactionOutputTransformInput, + GetItemInputTransformInput, + GetItemOutputTransformInput, + PutItemInputTransformInput, + PutItemOutputTransformInput, + QueryInputTransformInput, + QueryOutputTransformInput, + ScanInputTransformInput, + ScanOutputTransformInput, + TransactGetItemsInputTransformInput, + TransactGetItemsOutputTransformInput, + TransactWriteItemsInputTransformInput, + TransactWriteItemsOutputTransformInput, + UpdateItemInputTransformInput, + UpdateItemOutputTransformInput, +) + + +class EncryptedClient(EncryptedBotoInterface): + """ + Wrapper for a boto3 DynamoDB client that transparently encrypts/decrypts items. + + This class implements the complete boto3 DynamoDB client API, allowing it to serve as a + drop-in replacement that transparently handles encryption and decryption of items. + + The API matches the standard boto3 DynamoDB client interface: + + https://boto3.amazonaws.com/v1/documentation/api/latest/reference/services/dynamodb.html#client + + This class will encrypt/decrypt items for the following operations: + + * ``put_item`` + * ``get_item`` + * ``query`` + * ``scan`` + * ``batch_write_item`` + * ``batch_get_item`` + * ``transact_get_items`` + * ``transact_write_items`` + * ``delete_item`` + + Any calls to ``update_item`` can only update unsigned attributes. If an attribute to be updated is marked as signed, + this operation will raise a ``DynamoDbEncryptionTransformsException``. + + The following operations are not supported for encrypted tables: + + * ``execute_statement`` + * ``execute_transaction`` + * ``batch_execute_statement`` + + Calling these operations for encrypted tables will raise a ``DynamoDbEncryptionTransformsException``. + This client can still be used to call these operations on plaintext tables. + + Any other operations on this class will defer to the underlying boto3 DynamoDB client's implementation. + + ``EncryptedClient`` can also return an ``EncryptedPaginator`` for transparent decryption of paginated results. + """ + + _client: botocore.client.BaseClient + _encryption_config: DynamoDbTablesEncryptionConfig + _transformer: DynamoDbEncryptionTransforms + _expect_standard_dictionaries: bool + + def __init__( + self, + *, + client: botocore.client.BaseClient, + encryption_config: DynamoDbTablesEncryptionConfig, + expect_standard_dictionaries: bool | None = False, + ): + """ + Create an ``EncryptedClient`` object. + + Args: + client (botocore.client.BaseClient): Initialized boto3 DynamoDB client + encryption_config (DynamoDbTablesEncryptionConfig): Initialized DynamoDbTablesEncryptionConfig + expect_standard_dictionaries (Optional[bool]): Does the underlying boto3 client expect items + to be standard Python dictionaries? This should only be set to True if you are using a + client obtained from a service resource or table resource (ex: ``table.meta.client``). + If this is True, ``EncryptedClient`` will expect item-like shapes to be + standard Python dictionaries (default: False). + + """ + self._client = client + self._encryption_config = encryption_config + self._transformer = DynamoDbEncryptionTransforms(config=encryption_config) + self._expect_standard_dictionaries = expect_standard_dictionaries + self._resource_to_client_shape_converter = ResourceShapeToClientShapeConverter() + self._client_to_resource_shape_converter = ClientShapeToResourceShapeConverter(delete_table_name=False) + + def put_item(self, **kwargs) -> dict[str, Any]: + """ + Put a single item to a table. Encrypts the item before writing to DynamoDB. + + The input and output syntaxes match those for the boto3 DynamoDB ``put_item`` API: + + https://boto3.amazonaws.com/v1/documentation/api/latest/reference/services/dynamodb/client/put_item.html + + Args: + **kwargs: Keyword arguments to pass to the operation. This matches the boto3 client ``put_item`` request + syntax. The value in ``"Item"`` will be encrypted locally before being written to DynamoDB. + + Returns: + dict: The response from DynamoDB. This matches the boto3 client ``put_item`` response syntax. + + """ + return self._client_operation_logic( + operation_input=kwargs, + input_item_to_ddb_transform_method=self._resource_to_client_shape_converter.put_item_request, + input_item_to_dict_transform_method=self._client_to_resource_shape_converter.put_item_request, + input_transform_method=self._transformer.put_item_input_transform, + input_transform_shape=PutItemInputTransformInput, + output_transform_method=self._transformer.put_item_output_transform, + output_transform_shape=PutItemOutputTransformInput, + output_item_to_ddb_transform_method=self._resource_to_client_shape_converter.put_item_response, + output_item_to_dict_transform_method=self._client_to_resource_shape_converter.put_item_response, + client_method=self._client.put_item, + ) + + def get_item(self, **kwargs) -> dict[str, Any]: + """ + Get a single item from a table. Decrypts the item after reading from DynamoDB. + + The input and output syntaxes match those for the boto3 DynamoDB ``get_item`` API: + + https://boto3.amazonaws.com/v1/documentation/api/latest/reference/services/dynamodb/client/get_item.html + + Args: + **kwargs: Keyword arguments to pass to the operation. This matches the boto3 client ``get_item`` request + syntax. + + Returns: + dict: The response from DynamoDB. This matches the boto3 client ``get_item`` response syntax. + The value in ``"Item"`` field be decrypted locally after being read from DynamoDB. + + """ + return self._client_operation_logic( + operation_input=kwargs, + input_item_to_ddb_transform_method=self._resource_to_client_shape_converter.get_item_request, + input_item_to_dict_transform_method=self._client_to_resource_shape_converter.get_item_request, + input_transform_method=self._transformer.get_item_input_transform, + input_transform_shape=GetItemInputTransformInput, + output_transform_method=self._transformer.get_item_output_transform, + output_transform_shape=GetItemOutputTransformInput, + client_method=self._client.get_item, + output_item_to_ddb_transform_method=self._resource_to_client_shape_converter.get_item_response, + output_item_to_dict_transform_method=self._client_to_resource_shape_converter.get_item_response, + ) + + def query(self, **kwargs) -> dict[str, Any]: + """ + Query items from a table or index. Decrypts any returned items. + + The input and output syntaxes match those for the boto3 DynamoDB client ``query`` API: + + https://boto3.amazonaws.com/v1/documentation/api/latest/reference/services/dynamodb/client/query.html + + Args: + **kwargs: Keyword arguments to pass to the operation. These match the boto3 query API parameters. + + Returns: + dict: The response from DynamoDB. This matches the boto3 client ``query`` API response. + The ``"Items"`` field will be decrypted locally after being read from DynamoDB. + + """ + return self._client_operation_logic( + operation_input=kwargs, + input_item_to_ddb_transform_method=self._resource_to_client_shape_converter.query_request, + input_item_to_dict_transform_method=self._client_to_resource_shape_converter.query_request, + input_transform_method=self._transformer.query_input_transform, + input_transform_shape=QueryInputTransformInput, + output_transform_method=self._transformer.query_output_transform, + output_transform_shape=QueryOutputTransformInput, + client_method=self._client.query, + output_item_to_dict_transform_method=self._client_to_resource_shape_converter.query_response, + output_item_to_ddb_transform_method=self._resource_to_client_shape_converter.query_response, + ) + + def scan(self, **kwargs) -> dict[str, Any]: + """ + Scan an entire table or index. Decrypts any returned items. + + The input and output syntaxes match those for the boto3 DynamoDB ``scan`` API: + + https://boto3.amazonaws.com/v1/documentation/api/latest/reference/services/dynamodb/client/scan.html + + Args: + **kwargs: Keyword arguments to pass to the operation. This matches the boto3 client ``scan`` request syntax. + + Returns: + dict: The response from DynamoDB. This matches the boto3 client ``scan`` response syntax. + The values in ``"Items"`` will be decrypted locally after being read from DynamoDB. + + """ + return self._client_operation_logic( + operation_input=kwargs, + input_item_to_ddb_transform_method=self._resource_to_client_shape_converter.scan_request, + input_item_to_dict_transform_method=self._client_to_resource_shape_converter.scan_request, + input_transform_method=self._transformer.scan_input_transform, + input_transform_shape=ScanInputTransformInput, + output_transform_method=self._transformer.scan_output_transform, + output_transform_shape=ScanOutputTransformInput, + client_method=self._client.scan, + output_item_to_dict_transform_method=self._client_to_resource_shape_converter.scan_response, + output_item_to_ddb_transform_method=self._resource_to_client_shape_converter.scan_response, + ) + + def batch_write_item(self, **kwargs) -> dict[str, Any]: + """ + Put or delete multiple items in one or more tables. + + For put operations, encrypts items before writing. + + The input and output syntaxes match those for the boto3 DynamoDB ``batch_write_item`` API: + + https://boto3.amazonaws.com/v1/documentation/api/latest/reference/services/dynamodb/client/batch_write_item.html + + Args: + **kwargs: Keyword arguments to pass to the operation. This matches the boto3 client ``batch_write_item`` + request syntax. Items in ``"PutRequest"`` values in the ``"RequestItems"`` argument will be encrypted + locally before being written to DynamoDB. + + Returns: + dict: The response from DynamoDB. This matches the boto3 client ``batch_write_item`` response syntax. + + """ + return self._client_operation_logic( + operation_input=kwargs, + input_item_to_ddb_transform_method=self._resource_to_client_shape_converter.batch_write_item_request, + input_item_to_dict_transform_method=self._client_to_resource_shape_converter.batch_write_item_request, + input_transform_method=self._transformer.batch_write_item_input_transform, + input_transform_shape=BatchWriteItemInputTransformInput, + output_transform_method=self._transformer.batch_write_item_output_transform, + output_transform_shape=BatchWriteItemOutputTransformInput, + client_method=self._client.batch_write_item, + output_item_to_dict_transform_method=self._client_to_resource_shape_converter.batch_write_item_response, + output_item_to_ddb_transform_method=self._resource_to_client_shape_converter.batch_write_item_response, + ) + + def batch_get_item(self, **kwargs) -> dict[str, Any]: + """ + Get multiple items from one or more tables. Decrypts any returned items. + + The input and output syntaxes match those for the boto3 DynamoDB ``batch_get_item`` API: + + https://boto3.amazonaws.com/v1/documentation/api/latest/reference/services/dynamodb/client/batch_get_item.html + + Args: + **kwargs: Keyword arguments to pass to the operation. This matches the boto3 ``batch_get_item`` + request syntax. + + Returns: + dict: The response from DynamoDB. This matches the boto3 client ``batch_get_item`` response syntax. + The values in ``"Responses"`` will be decrypted locally after being read from DynamoDB. + + """ + return self._client_operation_logic( + operation_input=kwargs, + input_item_to_ddb_transform_method=self._resource_to_client_shape_converter.batch_get_item_request, + input_item_to_dict_transform_method=self._client_to_resource_shape_converter.batch_get_item_request, + input_transform_method=self._transformer.batch_get_item_input_transform, + input_transform_shape=BatchGetItemInputTransformInput, + output_transform_method=self._transformer.batch_get_item_output_transform, + output_transform_shape=BatchGetItemOutputTransformInput, + client_method=self._client.batch_get_item, + output_item_to_dict_transform_method=self._client_to_resource_shape_converter.batch_get_item_response, + output_item_to_ddb_transform_method=self._resource_to_client_shape_converter.batch_get_item_response, + ) + + def transact_get_items(self, **kwargs) -> dict[str, Any]: + """ + Get multiple items in a single transaction. Decrypts any returned items. + + The input and output syntaxes match those for the boto3 DynamoDB ``transact_get_items`` API: + + https://boto3.amazonaws.com/v1/documentation/api/latest/reference/services/dynamodb/client/transact_get_items.html + + Args: + **kwargs: Keyword arguments to pass to the operation. This matches the boto3 client ``transact_get_items`` + request syntax. + + Returns: + dict: The response from DynamoDB. This matches the boto3 client ``transact_get_items`` response syntax. + + """ + return self._client_operation_logic( + operation_input=kwargs, + input_item_to_ddb_transform_method=self._resource_to_client_shape_converter.transact_get_items_request, + input_item_to_dict_transform_method=self._client_to_resource_shape_converter.transact_get_items_request, + input_transform_method=self._transformer.transact_get_items_input_transform, + input_transform_shape=TransactGetItemsInputTransformInput, + output_transform_method=self._transformer.transact_get_items_output_transform, + output_transform_shape=TransactGetItemsOutputTransformInput, + client_method=self._client.transact_get_items, + output_item_to_dict_transform_method=self._client_to_resource_shape_converter.transact_get_items_response, + output_item_to_ddb_transform_method=self._resource_to_client_shape_converter.transact_get_items_response, + ) + + def transact_write_items(self, **kwargs) -> dict[str, Any]: + """ + Perform multiple write operations in a single transaction. + + For put operations, encrypts items before writing. + + The input and output syntaxes match those for the boto3 DynamoDB client ``transact_write_items`` API: + + https://boto3.amazonaws.com/v1/documentation/api/latest/reference/services/dynamodb/client/transact_write_items.html + + Args: + **kwargs: Keyword arguments to pass to the operation. This matches the boto3 client ``transact_write_items`` + request syntax. Any ``"PutRequest"`` values in the ``"TransactItems"`` argument will be encrypted + locally before being written to DynamoDB. + + Returns: + dict: The response from DynamoDB. This matches the boto3 client ``transact_write_items`` response syntax. + + """ + return self._client_operation_logic( + operation_input=kwargs, + input_item_to_ddb_transform_method=self._resource_to_client_shape_converter.transact_write_items_request, + input_item_to_dict_transform_method=self._client_to_resource_shape_converter.transact_write_items_request, + input_transform_method=self._transformer.transact_write_items_input_transform, + input_transform_shape=TransactWriteItemsInputTransformInput, + output_transform_method=self._transformer.transact_write_items_output_transform, + output_transform_shape=TransactWriteItemsOutputTransformInput, + client_method=self._client.transact_write_items, + output_item_to_dict_transform_method=self._client_to_resource_shape_converter.transact_write_items_response, + output_item_to_ddb_transform_method=self._resource_to_client_shape_converter.transact_write_items_response, + ) + + def delete_item(self, **kwargs): + """ + Delete an item from a table by the specified key. + + The input and output syntaxes match those for the boto3 DynamoDB client ``delete_item`` API: + + https://boto3.amazonaws.com/v1/documentation/api/latest/reference/services/dynamodb/client/delete_item.html + + Args: + **kwargs: Keyword arguments to pass to the operation. This matches the boto3 client ``delete_item`` + request syntax. + + Returns: + dict: The response from DynamoDB. This matches the boto3 client ``delete_item`` response syntax. + Any values in the ``"Attributes"`` field will be decrypted locally after being read from DynamoDB. + + """ + return self._client_operation_logic( + operation_input=kwargs, + input_item_to_ddb_transform_method=self._resource_to_client_shape_converter.delete_item_request, + input_item_to_dict_transform_method=self._client_to_resource_shape_converter.delete_item_request, + input_transform_method=self._transformer.delete_item_input_transform, + input_transform_shape=DeleteItemInputTransformInput, + output_transform_method=self._transformer.delete_item_output_transform, + output_transform_shape=DeleteItemOutputTransformInput, + client_method=self._client.delete_item, + output_item_to_dict_transform_method=self._client_to_resource_shape_converter.delete_item_response, + output_item_to_ddb_transform_method=self._resource_to_client_shape_converter.delete_item_response, + ) + + def update_item(self, **kwargs): + """ + Update an unsigned attribute in an item on a table. + + If the attribute is signed, this operation will raise DynamoDbEncryptionTransformsException. + + The input and output syntaxes match those for the boto3 DynamoDB client ``update_item`` API: + + https://boto3.amazonaws.com/v1/documentation/api/latest/reference/services/dynamodb/client/update_item.html + + Args: + **kwargs: Keyword arguments to pass to the operation. This matches the boto3 client ``update_item`` + request syntax. + + Returns: + dict: The response from DynamoDB. This matches the boto3 client ``update_item`` response syntax. + + Raises: + DynamoDbEncryptionTransformsException: If an attribute specified in the ``UpdateExpression`` is signed. + + """ + return self._client_operation_logic( + operation_input=kwargs, + input_item_to_ddb_transform_method=self._resource_to_client_shape_converter.update_item_request, + input_item_to_dict_transform_method=self._client_to_resource_shape_converter.update_item_request, + input_transform_method=self._transformer.update_item_input_transform, + input_transform_shape=UpdateItemInputTransformInput, + output_transform_method=self._transformer.update_item_output_transform, + output_transform_shape=UpdateItemOutputTransformInput, + client_method=self._client.update_item, + output_item_to_dict_transform_method=self._client_to_resource_shape_converter.update_item_response, + output_item_to_ddb_transform_method=self._resource_to_client_shape_converter.update_item_response, + ) + + def execute_statement(self, **kwargs): + """ + Call ``execute_statement`` on the underlying client if the table is not configured for encryption. + + If the table is configured for encryption, this operation will raise DynamoDbEncryptionTransformsException. + + The input and output syntaxes match those for the boto3 DynamoDB client ``execute_statement`` API: + + https://boto3.amazonaws.com/v1/documentation/api/latest/reference/services/dynamodb/client/execute_statement.html + + Args: + **kwargs: Keyword arguments to pass to the operation. This matches the boto3 client ``execute_statement`` + request syntax. + + Returns: + dict: The response from DynamoDB. This matches the boto3 client ``execute_statement`` response syntax. + + Raises: + DynamoDbEncryptionTransformsException: If this operation is called for an encrypted table. + + """ + return self._client_operation_logic( + operation_input=kwargs, + input_item_to_ddb_transform_method=self._resource_to_client_shape_converter.execute_statement_request, + input_item_to_dict_transform_method=self._client_to_resource_shape_converter.execute_statement_request, + input_transform_method=self._transformer.execute_statement_input_transform, + input_transform_shape=ExecuteStatementInputTransformInput, + output_transform_method=self._transformer.execute_statement_output_transform, + output_transform_shape=ExecuteStatementOutputTransformInput, + client_method=self._client.execute_statement, + output_item_to_dict_transform_method=self._client_to_resource_shape_converter.execute_statement_response, + output_item_to_ddb_transform_method=self._resource_to_client_shape_converter.execute_statement_response, + ) + + def execute_transaction(self, **kwargs): + """ + Call ``execute_transaction`` on the underlying client if the table is not configured for encryption. + + If the table is configured for encryption, this operation will raise DynamoDbEncryptionTransformsException. + + The input and output syntaxes match those for the boto3 DynamoDB client ``execute_transaction`` API: + + https://boto3.amazonaws.com/v1/documentation/api/latest/reference/services/dynamodb/client/execute_transaction.html + + Args: + **kwargs: Keyword arguments to pass to the operation. This matches the boto3 client ``execute_transaction`` + request syntax. + + Returns: + dict: The response from DynamoDB. This matches the boto3 client ``execute_transaction`` response syntax. + + Raises: + DynamoDbEncryptionTransformsException: If this operation is called for an encrypted table. + + """ + return self._client_operation_logic( + operation_input=kwargs, + input_item_to_ddb_transform_method=self._resource_to_client_shape_converter.execute_transaction_request, + input_item_to_dict_transform_method=self._client_to_resource_shape_converter.execute_transaction_request, + input_transform_method=self._transformer.execute_transaction_input_transform, + input_transform_shape=ExecuteTransactionInputTransformInput, + output_transform_method=self._transformer.execute_transaction_output_transform, + output_transform_shape=ExecuteTransactionOutputTransformInput, + client_method=self._client.execute_transaction, + output_item_to_dict_transform_method=self._client_to_resource_shape_converter.execute_transaction_response, + output_item_to_ddb_transform_method=self._resource_to_client_shape_converter.execute_transaction_response, + ) + + def batch_execute_statement(self, **kwargs): + """ + Call ``batch_execute_statement`` on the underlying client if the table is not configured for encryption. + + If the table is configured for encryption, this operation will raise DynamoDbEncryptionTransformsException. + + The input and output syntaxes match those for the boto3 DynamoDB client ``batch_execute_statement`` API: + + https://boto3.amazonaws.com/v1/documentation/api/latest/reference/services/dynamodb/client/batch_execute_statement.html + + Args: + **kwargs: Keyword arguments to pass to the operation. This matches the boto3 client + ``batch_execute_statement`` request syntax. + + Returns: + dict: The response from DynamoDB. This matches the boto3 client ``batch_execute_statement`` response syntax. + + Raises: + DynamoDbEncryptionTransformsException: If this operation is called for an encrypted table. + + """ + return self._client_operation_logic( + operation_input=kwargs, + input_item_to_ddb_transform_method=self._resource_to_client_shape_converter.batch_execute_statement_request, + input_item_to_dict_transform_method=self._client_to_resource_shape_converter.batch_execute_statement_request, + input_transform_method=self._transformer.batch_execute_statement_input_transform, + input_transform_shape=BatchExecuteStatementInputTransformInput, + output_transform_method=self._transformer.batch_execute_statement_output_transform, + output_transform_shape=BatchExecuteStatementOutputTransformInput, + client_method=self._client.batch_execute_statement, + output_item_to_dict_transform_method=self._client_to_resource_shape_converter.batch_execute_statement_response, + output_item_to_ddb_transform_method=self._resource_to_client_shape_converter.batch_execute_statement_response, + ) + + def get_paginator(self, operation_name: str) -> EncryptedPaginator | botocore.client.Paginator: + """ + Get a paginator from the underlying client. + + If the paginator requested is for "scan" or "query", the paginator returned will + transparently decrypt the returned items. + + https://boto3.amazonaws.com/v1/documentation/api/latest/reference/services/dynamodb.html#paginators + + Args: + operation_name (str): Name of operation for which to get paginator + + Returns: + EncryptedPaginator | botocore.client.Paginator: An EncryptedPaginator that will transparently decrypt items + for ``scan``/``query`` operations; for other operations, the standard paginator. + + """ + paginator = self._client.get_paginator(operation_name) + + if operation_name in ("scan", "query"): + return EncryptedPaginator( + paginator=paginator, + encryption_config=self._encryption_config, + expect_standard_dictionaries=self._expect_standard_dictionaries, + ) + else: + # The paginator can still be used for list_backups, list_tables, and list_tags_of_resource, + # but there is nothing to encrypt/decrypt in these operations. + return paginator + + def _client_operation_logic( + self, + *, + operation_input: dict[str, Any], + input_item_to_ddb_transform_method: callable, + input_item_to_dict_transform_method: callable, + input_transform_method: Any, + input_transform_shape: Any, + output_transform_method: Any, + output_transform_shape: Any, + client_method: Any, + output_item_to_ddb_transform_method: callable, + output_item_to_dict_transform_method: callable, + ) -> dict[str, Any]: + """ + Shared logic to interface between boto3 Client operation inputs and encryption transformers. + + This captures the shared pattern to call encryption/decryption transformer code + and boto3 Clients across all methods in this class. + + Args: + operation_input: The input to the operation + input_item_to_ddb_transform_method: Method to transform input items from standard dictionaries + to DynamoDB JSON + input_item_to_dict_transform_method: Method to transform input items from DynamoDB JSON + to standard dictionaries + input_transform_method: The method to transform the input for encryption + input_transform_shape: The shape of the input transform + output_transform_method: The method to transform the output for decryption + output_transform_shape: The shape of the output transform + client_method: The underlying client method to call + output_item_to_ddb_transform_method: Method to transform output items from standard dictionaries + to DynamoDB JSON + output_item_to_dict_transform_method: Method to transform output items from DynamoDB JSON + to standard dictionaries + + Returns: + dict: The transformed response from DynamoDB + + """ + # If _expect_standard_dictionaries is true, input items are expected to be standard dictionaries, + # and need to be converted to DDB-JSON before encryption. + sdk_input = deepcopy(operation_input) + if self._expect_standard_dictionaries: + # Some operations do not require a table name. + # (e.g. execute_statement, execute_transaction, batch_execute_statement) + # If the table name is not provided, explicitly set it to None to remove any previously-set value. + if "TableName" in sdk_input: + self._resource_to_client_shape_converter.table_name = sdk_input["TableName"] + else: + self._resource_to_client_shape_converter.table_name = None + sdk_input = input_item_to_ddb_transform_method(sdk_input) + + # Apply DBESDK transformation to the input + transformed_request = input_transform_method(input_transform_shape(sdk_input=sdk_input)).transformed_input + + # If _expect_standard_dictionaries is true, the boto3 client expects items to be standard dictionaries, + # and need to be converted from DDB-JSON to a standard dictionary before being passed to the boto3 client. + if self._expect_standard_dictionaries: + transformed_request = input_item_to_dict_transform_method(transformed_request) + + sdk_response = client_method(**transformed_request) + + # If _expect_standard_dictionaries is true, the boto3 client returns items as standard dictionaries, + # and needs to convert the standard dictionary to DDB-JSON before passing the response to the DBESDK. + if self._expect_standard_dictionaries: + sdk_response = output_item_to_ddb_transform_method(sdk_response) + + # Apply DBESDK transformation to the boto3 output + dbesdk_response = output_transform_method( + output_transform_shape( + original_input=sdk_input, + sdk_output=sdk_response, + ) + ).transformed_output + + # Copy any missing fields from the SDK output to the response (e.g. ConsumedCapacity) + dbesdk_response = self._copy_sdk_response_to_dbesdk_response(sdk_response, dbesdk_response) + + # If _expect_standard_dictionaries is true, output items are expected to be standard dictionaries, + # and need to be converted from DDB-JSON to a standard dictionary before returning the response. + if self._expect_standard_dictionaries: + dbesdk_response = output_item_to_dict_transform_method(dbesdk_response) + # Clean up the expression builder for the next operation + self._resource_to_client_shape_converter.expression_builder.reset() + + return dbesdk_response + + @property + def _boto_client_attr_name(self) -> str: + """ + Name of the attribute containing the underlying boto3 client. + + Returns: + str: '_client' + + """ + return "_client" diff --git a/DynamoDbEncryption/runtimes/python/src/aws_dbesdk_dynamodb/encrypted/item.py b/DynamoDbEncryption/runtimes/python/src/aws_dbesdk_dynamodb/encrypted/item.py new file mode 100644 index 000000000..046f04da4 --- /dev/null +++ b/DynamoDbEncryption/runtimes/python/src/aws_dbesdk_dynamodb/encrypted/item.py @@ -0,0 +1,291 @@ +# Copyright Amazon.com Inc. or its affiliates. All Rights Reserved. +# SPDX-License-Identifier: Apache-2.0 +"""Class for encrypting and decrypting individual DynamoDB items.""" +from typing import Any + +from aws_dbesdk_dynamodb.smithygenerated.aws_cryptography_dbencryptionsdk_dynamodb_itemencryptor.client import ( + DynamoDbItemEncryptor, +) +from aws_dbesdk_dynamodb.smithygenerated.aws_cryptography_dbencryptionsdk_dynamodb_itemencryptor.config import ( + DynamoDbItemEncryptorConfig, +) +from aws_dbesdk_dynamodb.smithygenerated.aws_cryptography_dbencryptionsdk_dynamodb_itemencryptor.models import ( + DecryptItemInput, + DecryptItemOutput, + EncryptItemInput, + EncryptItemOutput, +) +from aws_dbesdk_dynamodb.transform import ( + ddb_to_dict, + dict_to_ddb, +) + + +class ItemEncryptor: + """Class providing item-level encryption for DynamoDB items / Python dictionaries.""" + + _internal_client: DynamoDbItemEncryptor + + def __init__( + self, + item_encryptor_config: DynamoDbItemEncryptorConfig, + ): + """ + Create an ``ItemEncryptor``. + + Args: + item_encryptor_config (DynamoDbItemEncryptorConfig): Encryption configuration object. + + """ + self._internal_client = DynamoDbItemEncryptor(config=item_encryptor_config) + + def encrypt_python_item( + self, + plaintext_dict_item: dict[str, Any], + ) -> EncryptItemOutput: + """ + Encrypt a Python dictionary. + + This method will transform the Python dictionary into DynamoDB JSON, + encrypt the DynamoDB JSON, + transform the encrypted DynamoDB JSON into an encrypted Python dictionary, + then return the encrypted Python dictionary. + + See the boto3 documentation for details on Python/DynamoDB type transfomations: + + https://boto3.amazonaws.com/v1/documentation/api/latest/_modules/boto3/dynamodb/types.html + + boto3 DynamoDB Tables and Resources expect items formatted as native Python dictionaries. + Use this method to encrypt an item if you intend to pass the encrypted item + to a boto3 DynamoDB Table or Resource interface to store it. + (Alternatively, you can use this library's ``EncryptedTable`` or ``EncryptedResource`` interfaces + to transparently encrypt items without an intermediary ``ItemEncryptor``.) + + Args: + plaintext_dict_item (dict[str, Any]): A standard Python dictionary. + + Returns: + EncryptItemOutput: Structure containing the following fields: + + - **encrypted_item** (*dict[str, Any]*): The encrypted Python dictionary. + **Note:** The item was encrypted as DynamoDB JSON, then transformed to a Python dictionary. + - **parsed_header** (*Optional[ParsedHeader]*): The encrypted DynamoDB item's header + (parsed ``aws_dbe_head`` value). + + Example: + >>> plaintext_item = { + ... 'some': 'data', + ... 'more': 5 + ... } + >>> encrypt_output = item_encryptor.encrypt_python_item(plaintext_item) + >>> encrypted_item = encrypt_output.encrypted_item + >>> header = encrypt_output.parsed_header + + """ + plaintext_ddb_item = dict_to_ddb(plaintext_dict_item) + encrypted_ddb_item: EncryptItemOutput = self.encrypt_dynamodb_item(plaintext_ddb_item) + encrypted_dict_item = ddb_to_dict(encrypted_ddb_item.encrypted_item) + return EncryptItemOutput(encrypted_item=encrypted_dict_item, parsed_header=encrypted_ddb_item.parsed_header) + + def encrypt_dynamodb_item( + self, + plaintext_dynamodb_item: dict[str, dict[str, Any]], + ) -> EncryptItemOutput: + """ + Encrypt DynamoDB-formatted JSON. + + boto3 DynamoDB clients expect items formatted as DynamoDB JSON: + + https://docs.aws.amazon.com/amazondynamodb/latest/developerguide/Programming.LowLevelAPI.html + + Use this method to encrypt an item if you intend to pass the encrypted item + to a boto3 DynamoDB client to store it. + (Alternatively, you can use this library's ``EncryptedClient`` interface + to transparently encrypt items without an intermediary ``ItemEncryptor``.) + + Args: + plaintext_dynamodb_item (dict[str, dict[str, Any]]): The item to encrypt formatted as DynamoDB JSON. + + Returns: + EncryptItemOutput: Structure containing the following fields: + + - **encrypted_item** (*dict[str, Any]*): A dictionary containing the encrypted DynamoDB item + formatted as DynamoDB JSON. + - **parsed_header** (*Optional[ParsedHeader]*): The encrypted DynamoDB item's header + (``aws_dbe_head`` value). + + Example: + >>> plaintext_item = { + ... 'some': {'S': 'data'}, + ... 'more': {'N': '5'} + ... } + >>> encrypt_output = item_encryptor.encrypt_dynamodb_item(plaintext_item) + >>> encrypted_item = encrypt_output.encrypted_item + >>> header = encrypt_output.parsed_header + + """ + return self.encrypt_item(EncryptItemInput(plaintext_item=plaintext_dynamodb_item)) + + def encrypt_item( + self, + encrypt_item_input: EncryptItemInput, + ) -> EncryptItemOutput: + """ + Encrypt a DynamoDB item. + + The input item should contain a dictionary formatted as DynamoDB JSON: + + https://docs.aws.amazon.com/amazondynamodb/latest/developerguide/Programming.LowLevelAPI.html + + Args: + encrypt_item_input (EncryptItemInput): Structure containing the following field: + + - plaintext_item (dict[str, Any]): The item to encrypt formatted as DynamoDB JSON. + + Returns: + EncryptItemOutput: Structure containing the following fields: + + - **encrypted_item** (*dict[str, Any]*): The encrypted DynamoDB item formatted as DynamoDB JSON. + - **parsed_header** (*Optional[ParsedHeader]*): The encrypted DynamoDB item's header + (``aws_dbe_head`` value). + + Example: + >>> plaintext_item = { + ... 'some': {'S': 'data'}, + ... 'more': {'N': '5'} + ... } + >>> encrypt_output = item_encryptor.encrypt_item( + ... EncryptItemInput( + ... plaintext_ddb_item = plaintext_item + ... ) + ... ) + >>> encrypted_item = encrypt_output.encrypted_item + >>> header = encrypt_output.parsed_header + + """ + return self._internal_client.encrypt_item(encrypt_item_input) + + def decrypt_python_item( + self, + encrypted_dict_item: dict[str, Any], + ) -> DecryptItemOutput: + """ + Decrypt a Python dictionary. + + This method will transform the Python dictionary into DynamoDB JSON, + decrypt the DynamoDB JSON, + transform the plaintext DynamoDB JSON into a plaintext Python dictionary, + then return the plaintext Python dictionary. + + See the boto3 documentation for details on Python/DynamoDB type transfomations: + + https://boto3.amazonaws.com/v1/documentation/api/latest/_modules/boto3/dynamodb/types.html + + boto3 DynamoDB Tables and Resources return items formatted as native Python dictionaries. + Use this method to decrypt an item if you retrieve the encrypted item + from a boto3 DynamoDB Table or Resource interface. + (Alternatively, you can use this library's ``EncryptedTable`` or ``EncryptedResource`` interfaces + to transparently decrypt items without an intermediary ``ItemEncryptor``.) + + Args: + encrypted_dict_item (dict[str, Any]): A standard Python dictionary with encrypted values. + + Returns: + DecryptItemOutput: Structure containing the following fields: + + - **plaintext_item** (*dict[str, Any]*): The decrypted Python dictionary. + **Note:** The item was decrypted as DynamoDB JSON, then transformed to a Python dictionary. + - **parsed_header** (*Optional[ParsedHeader]*): The encrypted DynamoDB item's header + (parsed ``aws_dbe_head`` value). + + Example: + >>> encrypted_item = { + ... 'some': b'ENCRYPTED_DATA', + ... 'more': b'ENCRYPTED_DATA', + ... } + >>> decrypt_output = item_encryptor.decrypt_python_item(encrypted_item) + >>> plaintext_item = decrypt_output.plaintext_item + >>> header = decrypt_output.parsed_header + + """ + encrypted_ddb_item = dict_to_ddb(encrypted_dict_item) + plaintext_ddb_item: DecryptItemOutput = self.decrypt_dynamodb_item(encrypted_ddb_item) + plaintext_dict_item = ddb_to_dict(plaintext_ddb_item.plaintext_item) + return DecryptItemOutput(plaintext_item=plaintext_dict_item, parsed_header=plaintext_ddb_item.parsed_header) + + def decrypt_dynamodb_item( + self, + encrypted_dynamodb_item: dict[str, dict[str, Any]], + ) -> DecryptItemOutput: + """ + Decrypt DynamoDB-formatted JSON. + + boto3 DynamoDB clients return items formatted as DynamoDB JSON: + + https://docs.aws.amazon.com/amazondynamodb/latest/developerguide/Programming.LowLevelAPI.html + + Use this method to decrypt an item if you retrieved the encrypted item + from a boto3 DynamoDB client. + (Alternatively, you can use this library's ``EncryptedClient`` interface + to transparently decrypt items without an intermediary ``ItemEncryptor``.) + + Args: + encrypted_dynamodb_item (dict[str, dict[str, Any]]): The item to decrypt formatted as DynamoDB JSON. + + Returns: + DecryptItemOutput: Structure containing the following fields: + + - **plaintext_item** (*dict[str, Any]*): The plaintext DynamoDB item formatted as DynamoDB JSON. + - **parsed_header** (*Optional[ParsedHeader]*): The decrypted DynamoDB item's header + (``aws_dbe_head`` value). + + Example: + >>> encrypted_item = { + ... 'some': {'B': b'ENCRYPTED_DATA'}, + ... 'more': {'B': b'ENCRYPTED_DATA'} + ... } + >>> decrypt_output = item_encryptor.decrypt_dynamodb_item(encrypted_item) + >>> plaintext_item = decrypt_output.plaintext_item + >>> header = decrypt_output.parsed_header + + """ + return self.decrypt_item(DecryptItemInput(encrypted_item=encrypted_dynamodb_item)) + + def decrypt_item( + self, + decrypt_item_input: DecryptItemInput, + ) -> DecryptItemOutput: + """ + Decrypt a DynamoDB item. + + The input item should contain a dictionary formatted as DynamoDB JSON: + + https://docs.aws.amazon.com/amazondynamodb/latest/developerguide/Programming.LowLevelAPI.html + + Args: + decrypt_item_input (DecryptItemInput): Structure containing the following fields: + + - **encrypted_item** (*dict[str, Any]*): The item to decrypt formatted as DynamoDB JSON. + + Returns: + DecryptItemOutput: Structure containing the following fields: + + - **plaintext_item** (*dict[str, Any]*): The decrypted DynamoDB item formatted as DynamoDB JSON. + - **parsed_header** (*Optional[ParsedHeader]*): The decrypted DynamoDB item's header + (``aws_dbe_head`` value). + + Example: + >>> encrypted_item = { + ... 'some': {'B': b'ENCRYPTED_DATA'}, + ... 'more': {'B': b'ENCRYPTED_DATA'} + ... } + >>> decrypted_item = item_encryptor.decrypt_item( + ... DecryptItemInput( + ... encrypted_item = encrypted_item, + ... ) + ... ) + >>> plaintext_item = decrypted_item.plaintext_item + >>> header = decrypted_item.parsed_header + + """ + return self._internal_client.decrypt_item(decrypt_item_input) diff --git a/DynamoDbEncryption/runtimes/python/src/aws_dbesdk_dynamodb/encrypted/paginator.py b/DynamoDbEncryption/runtimes/python/src/aws_dbesdk_dynamodb/encrypted/paginator.py new file mode 100644 index 000000000..9c22db791 --- /dev/null +++ b/DynamoDbEncryption/runtimes/python/src/aws_dbesdk_dynamodb/encrypted/paginator.py @@ -0,0 +1,203 @@ +# Copyright Amazon.com Inc. or its affiliates. All Rights Reserved. +# SPDX-License-Identifier: Apache-2.0 +"""High-level helper class to provide an encrypting wrapper for boto3 DynamoDB paginators.""" +from collections.abc import Callable, Generator +from copy import deepcopy +from typing import Any + +from botocore.paginate import ( + Paginator, +) + +from aws_dbesdk_dynamodb.encrypted.boto3_interface import EncryptedBotoInterface +from aws_dbesdk_dynamodb.internal.client_to_resource import ClientShapeToResourceShapeConverter +from aws_dbesdk_dynamodb.internal.resource_to_client import ResourceShapeToClientShapeConverter +from aws_dbesdk_dynamodb.smithygenerated.aws_cryptography_dbencryptionsdk_dynamodb.models import ( + DynamoDbTablesEncryptionConfig, +) +from aws_dbesdk_dynamodb.smithygenerated.aws_cryptography_dbencryptionsdk_dynamodb_transforms.client import ( + DynamoDbEncryptionTransforms, +) +from aws_dbesdk_dynamodb.smithygenerated.aws_cryptography_dbencryptionsdk_dynamodb_transforms.models import ( + QueryInputTransformInput, + QueryOutputTransformInput, + ScanInputTransformInput, + ScanOutputTransformInput, +) + + +class EncryptedPaginator(EncryptedBotoInterface): + """Wrapping class for boto3 Paginators that decrypts returned items before returning them.""" + + def __init__( + self, + *, + paginator: Paginator, + encryption_config: DynamoDbTablesEncryptionConfig, + expect_standard_dictionaries: bool | None = False, + ): + """ + Create an EncryptedPaginator. + + Args: + paginator (Paginator): A boto3 Paginator object for DynamoDB operations. + This can be either a "query" or "scan" Paginator. + encryption_config (DynamoDbTablesEncryptionConfig): Encryption configuration object. + expect_standard_dictionaries (Optional[bool]): Does the underlying boto3 client expect items + to be standard Python dictionaries? This should only be set to True if you are using a + client obtained from a service resource or table resource (ex: ``table.meta.client``). + If this is True, EncryptedClient will expect item-like shapes to be + standard Python dictionaries (default: False). + + """ + self._paginator = paginator + self._encryption_config = encryption_config + self._transformer = DynamoDbEncryptionTransforms(config=encryption_config) + self._expect_standard_dictionaries = expect_standard_dictionaries + self._resource_to_client_shape_converter = ResourceShapeToClientShapeConverter() + self._client_to_resource_shape_converter = ClientShapeToResourceShapeConverter(delete_table_name=False) + + def paginate(self, **kwargs) -> Generator[dict, None, None]: + """ + Yield a generator that paginates through responses from DynamoDB, decrypting items. + + Note: + Calling ``botocore.paginate.Paginator``'s ``paginate`` method for Query or Scan + returns a ``PageIterator`` object, but this implementation returns a Python generator. + However, you can use this generator to iterate exactly as described in the + boto3 documentation: + + https://botocore.amazonaws.com/v1/documentation/api/latest/topics/paginators.html + + Any other operations on this class will defer to the underlying boto3 Paginator's implementation. + + Args: + **kwargs: Keyword arguments passed directly to the underlying DynamoDB paginator. + + For a Scan operation, structure these arguments according to: + + https://boto3.amazonaws.com/v1/documentation/api/latest/reference/services/dynamodb/paginator/Scan.html + + For a Query operation, structure these arguments according to: + + https://boto3.amazonaws.com/v1/documentation/api/latest/reference/services/dynamodb/paginator/Query.html + + Returns: + Generator[dict, None, None]: A generator yielding pages as dictionaries. + For "scan" or "query" operations, the items in the pages will be decrypted locally after being read from + DynamoDB. + + """ + if self._paginator._model.name == "Query": + yield from self._paginate_query(**kwargs) + elif self._paginator._model.name == "Scan": + yield from self._paginate_scan(**kwargs) + else: + yield from self._paginator.paginate(**kwargs) + + def _paginate_query(self, **paginate_query_kwargs): + return self._paginate_request( + paginate_kwargs=paginate_query_kwargs, + input_item_to_ddb_transform_method=self._resource_to_client_shape_converter.query_request, + input_item_to_dict_transform_method=self._client_to_resource_shape_converter.query_request, + input_transform_method=self._transformer.query_input_transform, + input_transform_shape=QueryInputTransformInput, + output_item_to_ddb_transform_method=self._resource_to_client_shape_converter.query_response, + output_item_to_dict_transform_method=self._client_to_resource_shape_converter.query_response, + output_transform_method=self._transformer.query_output_transform, + output_transform_shape=QueryOutputTransformInput, + ) + + def _paginate_scan(self, **paginate_scan_kwargs): + return self._paginate_request( + paginate_kwargs=paginate_scan_kwargs, + input_item_to_ddb_transform_method=self._resource_to_client_shape_converter.scan_request, + input_item_to_dict_transform_method=self._client_to_resource_shape_converter.scan_request, + input_transform_method=self._transformer.scan_input_transform, + input_transform_shape=ScanInputTransformInput, + output_item_to_ddb_transform_method=self._resource_to_client_shape_converter.scan_response, + output_item_to_dict_transform_method=self._client_to_resource_shape_converter.scan_response, + output_transform_method=self._transformer.scan_output_transform, + output_transform_shape=ScanOutputTransformInput, + ) + + def _paginate_request( + self, + *, + paginate_kwargs: dict[str, Any], + input_item_to_ddb_transform_method: Callable, + input_item_to_dict_transform_method: Callable, + input_transform_method: Callable, + input_transform_shape: Any, + output_item_to_ddb_transform_method: Callable, + output_item_to_dict_transform_method: Callable, + output_transform_method: Callable, + output_transform_shape: Any, + ): + client_kwargs = deepcopy(paginate_kwargs) + try: + # Remove PaginationConfig from the request if it exists. + # The input_transform_method does not expect it. + # It is added back to the request sent to the SDK. + pagination_config = client_kwargs["PaginationConfig"] + del client_kwargs["PaginationConfig"] + except KeyError: + pagination_config = None + + # If _expect_standard_dictionaries is true, input items are expected to be standard dictionaries, + # and need to be converted to DDB-JSON before encryption. + if self._expect_standard_dictionaries: + if "TableName" in client_kwargs: + self._resource_to_client_shape_converter.table_name = client_kwargs["TableName"] + client_kwargs = input_item_to_ddb_transform_method(client_kwargs) + + # Apply DBESDK transformations to the input + transformed_request = input_transform_method(input_transform_shape(sdk_input=client_kwargs)).transformed_input + + # If _expect_standard_dictionaries is true, the boto3 client expects items to be standard dictionaries, + # and need to be converted from DDB-JSON to a standard dictionary before being passed to the boto3 client. + if self._expect_standard_dictionaries: + transformed_request = input_item_to_dict_transform_method(transformed_request) + + if pagination_config is not None: + transformed_request["PaginationConfig"] = pagination_config + + sdk_page_response = self._paginator.paginate(**transformed_request) + + for page in sdk_page_response: + # If _expect_standard_dictionaries is true, the boto3 client returns items as standard dictionaries, + # and needs to convert the standard dictionary to DDB-JSON before passing the response to the DBESDK. + if self._expect_standard_dictionaries: + page = output_item_to_ddb_transform_method(page) + + # Apply DBESDK transformation to the boto3 output + dbesdk_response = output_transform_method( + output_transform_shape( + original_input=client_kwargs, + sdk_output=page, + ) + ).transformed_output + + # Copy any missing fields from the SDK output to the response (e.g. ConsumedCapacity) + dbesdk_response = self._copy_sdk_response_to_dbesdk_response(page, dbesdk_response) + + # If _expect_standard_dictionaries is true, the boto3 client expects items to be standard dictionaries, + # and need to be converted from DDB-JSON to a standard dictionary before returning the response. + if self._expect_standard_dictionaries: + dbesdk_response = output_item_to_dict_transform_method(dbesdk_response) + + yield dbesdk_response + + # Clean up the expression builder for the next operation + self._resource_to_client_shape_converter.expression_builder.reset() + + @property + def _boto_client_attr_name(self) -> str: + """ + Name of the attribute containing the underlying boto3 client. + + Returns: + str: '_paginator' + + """ + return "_paginator" diff --git a/DynamoDbEncryption/runtimes/python/src/aws_dbesdk_dynamodb/encrypted/resource.py b/DynamoDbEncryption/runtimes/python/src/aws_dbesdk_dynamodb/encrypted/resource.py new file mode 100644 index 000000000..dc268a615 --- /dev/null +++ b/DynamoDbEncryption/runtimes/python/src/aws_dbesdk_dynamodb/encrypted/resource.py @@ -0,0 +1,314 @@ +# Copyright Amazon.com Inc. or its affiliates. All Rights Reserved. +# SPDX-License-Identifier: Apache-2.0 +"""High-level helper classes to provide encrypting wrappers for boto3 DynamoDB resources.""" +from collections.abc import Callable, Generator +from copy import deepcopy +from typing import Any + +from boto3.resources.base import ServiceResource +from boto3.resources.collection import CollectionManager + +from aws_dbesdk_dynamodb.encrypted.boto3_interface import EncryptedBotoInterface +from aws_dbesdk_dynamodb.encrypted.table import EncryptedTable +from aws_dbesdk_dynamodb.internal.client_to_resource import ClientShapeToResourceShapeConverter +from aws_dbesdk_dynamodb.internal.resource_to_client import ResourceShapeToClientShapeConverter +from aws_dbesdk_dynamodb.smithygenerated.aws_cryptography_dbencryptionsdk_dynamodb.models import ( + DynamoDbTablesEncryptionConfig, +) +from aws_dbesdk_dynamodb.smithygenerated.aws_cryptography_dbencryptionsdk_dynamodb_transforms.client import ( + DynamoDbEncryptionTransforms, +) +from aws_dbesdk_dynamodb.smithygenerated.aws_cryptography_dbencryptionsdk_dynamodb_transforms.models import ( + BatchGetItemInputTransformInput, + BatchGetItemOutputTransformInput, + BatchWriteItemInputTransformInput, + BatchWriteItemOutputTransformInput, +) + + +class EncryptedTablesCollectionManager(EncryptedBotoInterface): + """ + Collection manager that yields EncryptedTable objects. + + The API matches boto3's tables collection manager interface: + + https://boto3.amazonaws.com/v1/documentation/api/latest/reference/services/dynamodb/service-resource/tables.html + + All operations on this class will yield ``EncryptedTable`` objects. + """ + + def __init__( + self, + *, + collection: CollectionManager, + encryption_config: DynamoDbTablesEncryptionConfig, + ): + """ + Create an ``EncryptedTablesCollectionManager`` object. + + Args: + collection (CollectionManager): Pre-configured boto3 DynamoDB table collection manager + encryption_config (DynamoDbTablesEncryptionConfig): Initialized DynamoDbTablesEncryptionConfig + + """ + self._collection = collection + self._encryption_config = encryption_config + + def all(self) -> Generator[EncryptedTable, None, None]: + """ + Create an iterable of all EncryptedTable resources in the collection. + + https://boto3.amazonaws.com/v1/documentation/api/latest/reference/services/dynamodb/service-resource/tables.html#DynamoDB.ServiceResource.all + + Returns: + Generator[EncryptedTable, None, None]: An iterable of EncryptedTable objects + + """ + yield from self._transform_table(self._collection.all) + + def filter(self, **kwargs) -> Generator[EncryptedTable, None, None]: + """ + Create an iterable of all EncryptedTable resources in the collection filtered by kwargs passed to method. + + https://boto3.amazonaws.com/v1/documentation/api/latest/reference/services/dynamodb/service-resource/tables.html#filter + + Returns: + Generator[EncryptedTable, None, None]: An iterable of EncryptedTable objects + + """ + yield from self._transform_table(self._collection.filter, **kwargs) + + def limit(self, **kwargs) -> Generator[EncryptedTable, None, None]: + """ + Create an iterable of all EncryptedTable resources in the collection filtered by kwargs passed to method. + + https://boto3.amazonaws.com/v1/documentation/api/latest/reference/services/dynamodb/service-resource/tables.html#limit + + Returns: + Generator[EncryptedTable, None, None]: An iterable of EncryptedTable objects + + """ + yield from self._transform_table(self._collection.limit, **kwargs) + + def page_size(self, **kwargs) -> Generator[EncryptedTable, None, None]: + """ + Create an iterable of all EncryptedTable resources in the collection. + + This limits the number of items returned by each service call by the specified amount. + + https://boto3.amazonaws.com/v1/documentation/api/latest/reference/services/dynamodb/service-resource/tables.html#page_size + + Returns: + Generator[EncryptedTable, None, None]: An iterable of EncryptedTable objects + + """ + yield from self._transform_table(self._collection.page_size, **kwargs) + + def _transform_table( + self, + method: Callable, + **kwargs, + ) -> Generator[EncryptedTable, None, None]: + for table in method(**kwargs): + yield EncryptedTable(table=table, encryption_config=self._encryption_config) + + @property + def _boto_client_attr_name(self) -> str: + """ + Name of the attribute containing the underlying boto3 client. + + Returns: + str: '_collection' + + """ + return "_collection" + + +class EncryptedResource(EncryptedBotoInterface): + """ + Wrapper for a boto3 DynamoDB resource. + + This class implements the complete boto3 DynamoDB resource API, allowing it to serve as a + drop-in replacement that transparently handles encryption and decryption of items. + + The API matches the standard boto3 DynamoDB resource interface: + + https://boto3.amazonaws.com/v1/documentation/api/latest/reference/services/dynamodb/service-resource/index.html + + This class will encrypt/decrypt items for the following operations: + + * ``batch_get_item`` + * ``batch_write_item`` + + Calling ``Table()`` will return an ``EncryptedTable`` object. + + Any other operations on this class will defer to the underlying boto3 DynamoDB resource's implementation + and will not be encrypted/decrypted. + + """ + + def __init__( + self, + *, + resource: ServiceResource, + encryption_config: DynamoDbTablesEncryptionConfig, + ): + """ + Create an ``EncryptedResource`` object. + + Args: + resource (ServiceResource): Initialized boto3 DynamoDB resource + encryption_config (DynamoDbTablesEncryptionConfig): Initialized DynamoDbTablesEncryptionConfig + + """ + self._resource = resource + self._encryption_config = encryption_config + self._transformer = DynamoDbEncryptionTransforms(config=encryption_config) + self._client_shape_to_resource_shape_converter = ClientShapeToResourceShapeConverter() + self._resource_shape_to_client_shape_converter = ResourceShapeToClientShapeConverter() + self.tables = EncryptedTablesCollectionManager( + collection=self._resource.tables, encryption_config=self._encryption_config + ) + + def Table(self, name): + """ + Create an ``EncryptedTable`` resource. + + https://boto3.amazonaws.com/v1/documentation/api/latest/reference/services/dynamodb/service-resource/Table.html + + Args: + name (str): The EncryptedTable's name identifier. This must be set. + + Returns: + EncryptedTable: An ``EncryptedTable`` resource + + """ + return EncryptedTable(table=self._resource.Table(name), encryption_config=self._encryption_config) + + def batch_get_item(self, **kwargs): + """ + Get multiple items from one or more tables. Decrypts any returned items. + + The input and output syntaxes match those for the boto3 DynamoDB resource ``batch_get_item`` API: + + https://boto3.amazonaws.com/v1/documentation/api/latest/reference/services/dynamodb/service-resource/batch_get_item.html + + Args: + **kwargs: Keyword arguments to pass to the operation. These match the boto3 resource ``batch_get_item`` + request syntax. + + Returns: + dict: The response from DynamoDB. This matches the boto3 resource ``batch_get_item`` response syntax. + The ``"Responses"`` field will be decrypted locally after being read from DynamoDB. + + """ + return self._resource_operation_logic( + operation_input=kwargs, + input_resource_to_client_shape_transform_method=self._resource_shape_to_client_shape_converter.batch_get_item_request, + input_client_to_resource_shape_transform_method=self._client_shape_to_resource_shape_converter.batch_get_item_request, + input_encryption_transform_method=self._transformer.batch_get_item_input_transform, + input_encryption_transform_shape=BatchGetItemInputTransformInput, + output_encryption_transform_method=self._transformer.batch_get_item_output_transform, + output_encryption_transform_shape=BatchGetItemOutputTransformInput, + output_resource_to_client_shape_transform_method=self._resource_shape_to_client_shape_converter.batch_get_item_response, + output_client_to_resource_shape_transform_method=self._client_shape_to_resource_shape_converter.batch_get_item_response, + resource_method=self._resource.batch_get_item, + ) + + def batch_write_item(self, **kwargs): + """ + Put or delete multiple items in one or more tables. + + For put operations, encrypts items before writing. + + The input and output syntaxes match those for the boto3 DynamoDB resource ``batch_write_item`` API: + + https://boto3.amazonaws.com/v1/documentation/api/latest/reference/services/dynamodb/service-resource/batch_write_item.html + + Args: + **kwargs: Keyword arguments to pass to the operation. These match the boto3 resource + ``batch_write_item`` request syntax. Any ``"PutRequest"`` values in the ``"RequestItems"`` + argument will be encrypted locally before being written to DynamoDB. + + Returns: + dict: The response from DynamoDB. This matches the boto3 resource ``batch_write_item`` response syntax. + + """ + return self._resource_operation_logic( + operation_input=kwargs, + input_resource_to_client_shape_transform_method=self._resource_shape_to_client_shape_converter.batch_write_item_request, + input_client_to_resource_shape_transform_method=self._client_shape_to_resource_shape_converter.batch_write_item_request, + input_encryption_transform_method=self._transformer.batch_write_item_input_transform, + input_encryption_transform_shape=BatchWriteItemInputTransformInput, + output_encryption_transform_method=self._transformer.batch_write_item_output_transform, + output_encryption_transform_shape=BatchWriteItemOutputTransformInput, + output_resource_to_client_shape_transform_method=self._resource_shape_to_client_shape_converter.batch_write_item_response, + output_client_to_resource_shape_transform_method=self._client_shape_to_resource_shape_converter.batch_write_item_response, + resource_method=self._resource.batch_write_item, + ) + + def _resource_operation_logic( + self, + *, + operation_input: dict[str, Any], + input_resource_to_client_shape_transform_method: Callable, + input_client_to_resource_shape_transform_method: Callable, + input_encryption_transform_method: Callable, + input_encryption_transform_shape: Any, + output_encryption_transform_method: Callable, + output_encryption_transform_shape: Any, + output_resource_to_client_shape_transform_method: Callable, + output_client_to_resource_shape_transform_method: Callable, + resource_method: Callable, + ): + operation_input = deepcopy(operation_input) + # Table inputs are formatted as Python dictionary JSON, but encryption transformers expect DynamoDB JSON. + # `input_resource_to_client_shape_transform_method` formats the supplied Python dictionary as DynamoDB JSON. + input_transform_input = input_resource_to_client_shape_transform_method(operation_input) + + # Apply encryption transformation to the user-supplied input + input_transform_output = input_encryption_transform_method( + input_encryption_transform_shape(sdk_input=input_transform_input) + ).transformed_input + + # The encryption transformation result is formatted in DynamoDB JSON, + # but the underlying boto3 table expects Python dictionary JSON. + # `input_client_to_resource_shape_transform_method` formats the transformation as Python dictionary JSON. + sdk_input = input_client_to_resource_shape_transform_method(input_transform_output) + + # Call boto3 Table method with Python-dictionary-JSON-formatted, encryption-transformed input, + # and receive Python-dictionary-JSON-formatted boto3 output. + sdk_output = resource_method(**sdk_input) + + # Format Python dictionary JSON-formatted SDK output as DynamoDB JSON for encryption transformer + output_transform_input = output_resource_to_client_shape_transform_method(sdk_output) + + # Apply encryption transformer to boto3 output + output_transform_output = output_encryption_transform_method( + output_encryption_transform_shape( + original_input=input_transform_input, + sdk_output=output_transform_input, + ) + ).transformed_output + + # Format DynamoDB JSON-formatted encryption transformation result as Python dictionary JSON + dbesdk_response = output_client_to_resource_shape_transform_method(output_transform_output) + # Copy any missing fields from the SDK output to the response + # (e.g. `ConsumedCapacity`) + dbesdk_response = self._copy_sdk_response_to_dbesdk_response(sdk_output, dbesdk_response) + + # Clean up the expression builder for the next operation + self._resource_shape_to_client_shape_converter.expression_builder.reset() + + return dbesdk_response + + @property + def _boto_client_attr_name(self) -> str: + """ + Name of the attribute containing the underlying boto3 client. + + Returns: + str: '_resource' + + """ + return "_resource" diff --git a/DynamoDbEncryption/runtimes/python/src/aws_dbesdk_dynamodb/encrypted/table.py b/DynamoDbEncryption/runtimes/python/src/aws_dbesdk_dynamodb/encrypted/table.py new file mode 100644 index 000000000..45ca61ceb --- /dev/null +++ b/DynamoDbEncryption/runtimes/python/src/aws_dbesdk_dynamodb/encrypted/table.py @@ -0,0 +1,382 @@ +# Copyright Amazon.com Inc. or its affiliates. All Rights Reserved. +# SPDX-License-Identifier: Apache-2.0 +"""High-level helper class to provide an encrypting wrapper for boto3 DynamoDB tables.""" +from collections.abc import Callable +from copy import deepcopy +from typing import Any + +from boto3.dynamodb.table import BatchWriter +from boto3.resources.base import ServiceResource + +from aws_dbesdk_dynamodb.encrypted.boto3_interface import EncryptedBotoInterface +from aws_dbesdk_dynamodb.encrypted.client import EncryptedClient +from aws_dbesdk_dynamodb.internal.client_to_resource import ClientShapeToResourceShapeConverter +from aws_dbesdk_dynamodb.internal.resource_to_client import ResourceShapeToClientShapeConverter +from aws_dbesdk_dynamodb.smithygenerated.aws_cryptography_dbencryptionsdk_dynamodb.models import ( + DynamoDbTablesEncryptionConfig, +) +from aws_dbesdk_dynamodb.smithygenerated.aws_cryptography_dbencryptionsdk_dynamodb_transforms.client import ( + DynamoDbEncryptionTransforms, +) +from aws_dbesdk_dynamodb.smithygenerated.aws_cryptography_dbencryptionsdk_dynamodb_transforms.models import ( + DeleteItemInputTransformInput, + DeleteItemOutputTransformInput, + GetItemInputTransformInput, + GetItemOutputTransformInput, + PutItemInputTransformInput, + PutItemOutputTransformInput, + QueryInputTransformInput, + QueryOutputTransformInput, + ScanInputTransformInput, + ScanOutputTransformInput, + UpdateItemInputTransformInput, + UpdateItemOutputTransformInput, +) + + +class EncryptedTable(EncryptedBotoInterface): + """ + Wrapper for a boto3 DynamoDB table that transparently encrypts/decrypts items. + + This class implements the complete boto3 DynamoDB table API, allowing it to serve as a + drop-in replacement that transparently handles encryption and decryption of items. + + The API matches the standard boto3 DynamoDB table interface: + + https://boto3.amazonaws.com/v1/documentation/api/latest/reference/services/dynamodb/table/index.html + + This class will encrypt/decrypt items for the following operations: + + * ``put_item`` + * ``get_item`` + * ``query`` + * ``scan`` + * ``delete_item`` + + Any calls to ``update_item`` can only update unsigned attributes. If an attribute to be updated is marked as signed, + this operation will raise a ``DynamoDbEncryptionTransformsException``. + + Calling ``batch_writer()`` will return a ``BatchWriter`` that transparently encrypts batch write requests. + + Any other operations on this class will defer to the underlying boto3 DynamoDB Table's implementation + and will not be encrypted/decrypted. + """ + + def __init__( + self, + *, + table: ServiceResource, + encryption_config: DynamoDbTablesEncryptionConfig, + ): + """ + Create an ``EncryptedTable`` object. + + Args: + table (ServiceResource): Initialized boto3 DynamoDB table + encryption_config (DynamoDbTablesEncryptionConfig): Initialized DynamoDbTablesEncryptionConfig + + """ + self._table = table + self._encryption_config = encryption_config + self._transformer = DynamoDbEncryptionTransforms(config=encryption_config) + self._client_shape_to_resource_shape_converter = ClientShapeToResourceShapeConverter() + self._resource_shape_to_client_shape_converter = ResourceShapeToClientShapeConverter( + table_name=self._table.table_name + ) + + def put_item(self, **kwargs) -> dict[str, Any]: + """ + Put a single item to the table. Encrypts the item before writing to DynamoDB. + + The input and output syntaxes match those for the boto3 DynamoDB table ``put_item`` API: + + https://boto3.amazonaws.com/v1/documentation/api/latest/reference/services/dynamodb/table/put_item.html + + Args: + **kwargs: Keyword arguments to pass to the operation. This matches the boto3 Table ``put_item`` request + syntax. The value in ``"Item"`` will be encrypted locally before being written to DynamoDB. + + Returns: + dict: The response from DynamoDB. This matches the boto3 ``put_item`` response syntax. + + """ + return self._table_operation_logic( + operation_input=kwargs, + input_encryption_transform_method=self._transformer.put_item_input_transform, + input_encryption_transform_shape=PutItemInputTransformInput, + input_resource_to_client_shape_transform_method=self._resource_shape_to_client_shape_converter.put_item_request, + input_client_to_resource_shape_transform_method=self._client_shape_to_resource_shape_converter.put_item_request, + output_encryption_transform_method=self._transformer.put_item_output_transform, + output_encryption_transform_shape=PutItemOutputTransformInput, + output_resource_to_client_shape_transform_method=self._resource_shape_to_client_shape_converter.put_item_response, + output_client_to_resource_shape_transform_method=self._client_shape_to_resource_shape_converter.put_item_response, + table_method=self._table.put_item, + ) + + def get_item(self, **kwargs) -> dict[str, Any]: + """ + Get a single item from the table. Decrypts the item after reading from DynamoDB. + + The input and output syntaxes match those for the boto3 DynamoDB table ``get_item`` API: + + https://boto3.amazonaws.com/v1/documentation/api/latest/reference/services/dynamodb/table/get_item.html + + Args: + **kwargs: Keyword arguments to pass to the operation. This matches the boto3 Table ``get_item`` request + syntax. + + Returns: + dict: The response from DynamoDB. This matches the boto3 Table ``get_item`` response syntax. + The value in ``"Item"`` will be decrypted locally after being read from DynamoDB. + + """ + return self._table_operation_logic( + operation_input=kwargs, + input_encryption_transform_method=self._transformer.get_item_input_transform, + input_encryption_transform_shape=GetItemInputTransformInput, + input_resource_to_client_shape_transform_method=self._resource_shape_to_client_shape_converter.get_item_request, + input_client_to_resource_shape_transform_method=self._client_shape_to_resource_shape_converter.get_item_request, + output_encryption_transform_method=self._transformer.get_item_output_transform, + output_encryption_transform_shape=GetItemOutputTransformInput, + output_resource_to_client_shape_transform_method=self._resource_shape_to_client_shape_converter.get_item_response, + output_client_to_resource_shape_transform_method=self._client_shape_to_resource_shape_converter.get_item_response, + table_method=self._table.get_item, + ) + + def query(self, **kwargs) -> dict[str, Any]: + """ + Query items from the table or index. Decrypts any returned items. + + The input and output syntaxes match those for the boto3 DynamoDB table ``query`` API: + + https://boto3.amazonaws.com/v1/documentation/api/latest/reference/services/dynamodb/table/query.html + + Args: + **kwargs: Keyword arguments to pass to the operation. This matches the boto3 Table ``query`` request + syntax. + + Returns: + dict: The response from DynamoDB. This matches the boto3 Table ``query`` response syntax. + The value in ``"Items"`` will be decrypted locally after being read from DynamoDB. + + """ + return self._table_operation_logic( + operation_input=kwargs, + input_encryption_transform_method=self._transformer.query_input_transform, + input_encryption_transform_shape=QueryInputTransformInput, + input_resource_to_client_shape_transform_method=self._resource_shape_to_client_shape_converter.query_request, + input_client_to_resource_shape_transform_method=self._client_shape_to_resource_shape_converter.query_request, + output_encryption_transform_method=self._transformer.query_output_transform, + output_encryption_transform_shape=QueryOutputTransformInput, + output_resource_to_client_shape_transform_method=self._resource_shape_to_client_shape_converter.query_response, + output_client_to_resource_shape_transform_method=self._client_shape_to_resource_shape_converter.query_response, + table_method=self._table.query, + ) + + def scan(self, **kwargs) -> dict[str, Any]: + """ + Scan the entire table or index. Decrypts any returned items. + + The input and output syntaxes match those for the boto3 DynamoDB table ``scan`` API: + + https://boto3.amazonaws.com/v1/documentation/api/latest/reference/services/dynamodb/table/scan.html + + Args: + **kwargs: Keyword arguments to pass to the operation. This matches the boto3 Table ``scan`` request + syntax. + + Returns: + dict: The response from DynamoDB. This matches the boto3 Table ``scan`` response syntax. + The value in ``"Items"`` will be decrypted locally after being read from DynamoDB. + + """ + return self._table_operation_logic( + operation_input=kwargs, + input_encryption_transform_method=self._transformer.scan_input_transform, + input_encryption_transform_shape=ScanInputTransformInput, + input_resource_to_client_shape_transform_method=self._resource_shape_to_client_shape_converter.scan_request, + input_client_to_resource_shape_transform_method=self._client_shape_to_resource_shape_converter.scan_request, + output_encryption_transform_method=self._transformer.scan_output_transform, + output_encryption_transform_shape=ScanOutputTransformInput, + output_resource_to_client_shape_transform_method=self._resource_shape_to_client_shape_converter.scan_response, + output_client_to_resource_shape_transform_method=self._client_shape_to_resource_shape_converter.scan_response, + table_method=self._table.scan, + ) + + def delete_item(self, **kwargs) -> dict[str, Any]: + """ + Delete an item from the table. + + The input and output syntaxes match those for the boto3 DynamoDB table ``delete_item`` API: + + https://boto3.amazonaws.com/v1/documentation/api/latest/reference/services/dynamodb/table/delete_item.html + + Args: + **kwargs: Keyword arguments to pass to the operation. This matches the boto3 Table ``delete_item`` request + syntax. + + Returns: + dict: The response from DynamoDB. This matches the boto3 Table ``delete_item`` response syntax. + Any values in ``"Attributes"`` will be decrypted locally after being read from DynamoDB. + + """ + return self._table_operation_logic( + operation_input=kwargs, + input_encryption_transform_method=self._transformer.delete_item_input_transform, + input_encryption_transform_shape=DeleteItemInputTransformInput, + input_resource_to_client_shape_transform_method=self._resource_shape_to_client_shape_converter.delete_item_request, + input_client_to_resource_shape_transform_method=self._client_shape_to_resource_shape_converter.delete_item_request, + output_encryption_transform_method=self._transformer.delete_item_output_transform, + output_encryption_transform_shape=DeleteItemOutputTransformInput, + output_resource_to_client_shape_transform_method=self._resource_shape_to_client_shape_converter.delete_item_response, + output_client_to_resource_shape_transform_method=self._client_shape_to_resource_shape_converter.delete_item_response, + table_method=self._table.delete_item, + ) + + def update_item(self, **kwargs): + """ + Update an unsigned attribute in the table. + + If the attribute is signed, this operation will raise DynamoDbEncryptionTransformsException. + + The input and output syntaxes match those for the boto3 DynamoDB table ``update_item`` API: + + https://boto3.amazonaws.com/v1/documentation/api/latest/reference/services/dynamodb/table/update_item.html + + Args: + **kwargs: Keyword arguments to pass to the operation. This matches the boto3 Table ``update_item`` request + syntax. + + Returns: + dict: The response from DynamoDB. This matches the boto3 Table ``update_item`` response syntax. + + Raises: + DynamoDbEncryptionTransformsException: If an attribute specified in the ``UpdateExpression`` is signed. + + """ + return self._table_operation_logic( + operation_input=kwargs, + input_encryption_transform_method=self._transformer.update_item_input_transform, + input_encryption_transform_shape=UpdateItemInputTransformInput, + input_resource_to_client_shape_transform_method=self._resource_shape_to_client_shape_converter.update_item_request, + input_client_to_resource_shape_transform_method=self._client_shape_to_resource_shape_converter.update_item_request, + output_encryption_transform_method=self._transformer.update_item_output_transform, + output_encryption_transform_shape=UpdateItemOutputTransformInput, + output_resource_to_client_shape_transform_method=self._resource_shape_to_client_shape_converter.update_item_response, + output_client_to_resource_shape_transform_method=self._client_shape_to_resource_shape_converter.update_item_response, + table_method=self._table.update_item, + ) + + def batch_writer(self, overwrite_by_pkeys: list[str] | None = None) -> BatchWriter: + """ + Create a batch writer object that will transparently encrypt requests to DynamoDB. + + https://boto3.amazonaws.com/v1/documentation/api/latest/reference/services/dynamodb/table/batch_writer.html + + Args: + overwrite_by_pkeys: De-duplicate request items in buffer if match new request + item on specified primary keys. i.e ``["partition_key1", "sort_key2", "sort_key3"]`` + + Returns: + BatchWriter: A batch writer that will transparently encrypt requests + + """ + encrypted_client = EncryptedClient( + client=self._table.meta.client, + encryption_config=self._encryption_config, + # The boto3 client comes from the underlying table, which is a ServiceResource. + # ServiceResource clients expect standard dictionaries, not DynamoDB JSON. + expect_standard_dictionaries=True, + ) + return BatchWriter(table_name=self._table.name, client=encrypted_client, overwrite_by_pkeys=overwrite_by_pkeys) + + def _table_operation_logic( + self, + *, + operation_input: dict[str, Any], + input_encryption_transform_method: Callable, + input_encryption_transform_shape: Any, + input_resource_to_client_shape_transform_method: Callable, + input_client_to_resource_shape_transform_method: Callable, + output_encryption_transform_method: Callable, + output_encryption_transform_shape: Any, + output_resource_to_client_shape_transform_method: Callable, + output_client_to_resource_shape_transform_method: Any, + table_method: Callable, + ) -> dict[str, Any]: + """ + Interface between user-supplied input, encryption/decryption transformers, and boto3 Tables. + + Args: + operation_input: User-supplied input to the operation + input_encryption_transform_method: The method to transform the input for encryption/decryption + input_encryption_transform_shape: The shape to supply to the input encryption/decryption transform + input_resource_to_client_shape_transform_method: Method to transform resource-formatted input shape + to client-formattted input shape + input_client_to_resource_shape_transform_method: Method to transform client-formatted input shape + to resource-formattted input shape + output_encryption_transform_method: The method to transform the output for encryption/decryption + output_encryption_transform_shape: The shape to supply to the output encryption/decryption transform + output_resource_to_client_shape_transform_method: Method to transform resource-formatted output shape + to client-formattted output shape + output_client_to_resource_shape_transform_method: Method to transform client-formatted output shape + to resource-formattted output shape + table_method: The underlying table method to call + + Returns: + dict: The transformed response from DynamoDB + + """ + table_input = deepcopy(operation_input) + + # EncryptedTable inputs are formatted as standard dictionaries, but DBESDK transformations expect DynamoDB JSON. + # Convert from standard dictionaries to DynamoDB JSON. + input_transform_input = input_resource_to_client_shape_transform_method(table_input) + + # Apply DBESDK transformation to the input + input_transform_output = input_encryption_transform_method( + input_encryption_transform_shape(sdk_input=input_transform_input) + ).transformed_input + + # The encryption transformation result is formatted in DynamoDB JSON, + # but the underlying boto3 table expects standard dictionaries. + # Convert from DynamoDB JSON to standard dictionaries. + sdk_input = input_client_to_resource_shape_transform_method(input_transform_output) + + sdk_output = table_method(**sdk_input) + + # Table outputs are formatted as standard dictionaries, but DBESDK transformations expect DynamoDB JSON. + # Convert from standard dictionaries to DynamoDB JSON. + output_transform_input = output_resource_to_client_shape_transform_method(sdk_output) + + # Apply DBESDK transformation to boto3 output + output_transform_output = output_encryption_transform_method( + output_encryption_transform_shape( + original_input=input_transform_input, + sdk_output=output_transform_input, + ) + ).transformed_output + + # EncryptedTable outputs are formatted as standard dictionaries, + # but DBESDK transformations provide DynamoDB JSON. + # Convert from DynamoDB JSON to standard dictionaries. + dbesdk_response = output_client_to_resource_shape_transform_method(output_transform_output) + + # Copy any missing fields from the SDK output to the response (e.g. `ConsumedCapacity`) + dbesdk_response = self._copy_sdk_response_to_dbesdk_response(sdk_output, dbesdk_response) + + # Clean up the expression builder for the next operation + self._resource_shape_to_client_shape_converter.expression_builder.reset() + + return dbesdk_response + + @property + def _boto_client_attr_name(self) -> str: + """ + Name of the attribute containing the underlying boto3 client. + + Returns: + str: '_table' + + """ + return "_table" diff --git a/DynamoDbEncryption/runtimes/python/src/aws_dbesdk_dynamodb/internal/client_to_resource.py b/DynamoDbEncryption/runtimes/python/src/aws_dbesdk_dynamodb/internal/client_to_resource.py new file mode 100644 index 000000000..9c4244310 --- /dev/null +++ b/DynamoDbEncryption/runtimes/python/src/aws_dbesdk_dynamodb/internal/client_to_resource.py @@ -0,0 +1,147 @@ +# Copyright Amazon.com Inc. or its affiliates. All Rights Reserved. +# SPDX-License-Identifier: Apache-2.0 +from aws_cryptography_internal_dynamodb.smithygenerated.com_amazonaws_dynamodb.boto3_conversions import ( + InternalBoto3DynamoDBFormatConverter, +) +from boto3.dynamodb.types import TypeDeserializer + + +class ClientShapeToResourceShapeConverter: + + def __init__(self, delete_table_name=True): + # Some callers expect the TableName kwarg to be removed from the outputs of this class. + # (EncryptedResource, EncryptedTable.) + # These callers' boto3 shapes do not include TableName. + # Other callers expect the TableName kwarg to be included in the outputs of this class. + # (EncryptedClient, EncryptedPaginator.) + # These callers' boto3 shapes include TableName. + self.delete_table_name = delete_table_name + self.boto3_converter = InternalBoto3DynamoDBFormatConverter( + item_handler=TypeDeserializer().deserialize, condition_handler=self.condition_handler + ) + + def condition_handler(self, expression_key, request): + """Returns the input condition/names/values as-is.""" + # Conditions do not need to be converted from strings to boto3 Attrs. + # Resources accept either strings or Attrs. + # Return the provided condition string. + condition = request[expression_key] + + # This conversion in client_to_resource does not update ExpressionAttributeNames or ExpressionAttributeValues. + # However, resource_to_client condition_handler may add new ExpressionAttributeNames and + # ExpressionAttributeValues. + # Smithy-generated code expects condition_handlers to return ExpressionAttributeNames and + # ExpressionAttributeValues, + # expecting empty dicts if there are none. + try: + names = request["ExpressionAttributeNames"] + except KeyError: + names = {} + + try: + values = request["ExpressionAttributeValues"] + except KeyError: + values = {} + return condition, names, values + + def put_item_request(self, put_item_request): + out = self.boto3_converter.PutItemInput(put_item_request) + # put_item requests on resources do not have a table name. + if self.delete_table_name: + del out["TableName"] + return out + + def put_item_response(self, put_item_response): + return self.boto3_converter.PutItemOutput(put_item_response) + + def get_item_request(self, get_item_request): + out = self.boto3_converter.GetItemInput(get_item_request) + # get_item requests on resources do not have a table name. + if self.delete_table_name: + del out["TableName"] + return out + + def get_item_response(self, get_item_response): + return self.boto3_converter.GetItemOutput(get_item_response) + + def query_request(self, query_request): + out = self.boto3_converter.QueryInput(query_request) + # query requests on resources do not have a table name. + if self.delete_table_name: + del out["TableName"] + return out + + def query_response(self, query_response): + return self.boto3_converter.QueryOutput(query_response) + + def scan_request(self, scan_request): + out = self.boto3_converter.ScanInput(scan_request) + # scan requests on resources do not have a table name. + if self.delete_table_name: + del out["TableName"] + return out + + def scan_response(self, scan_response): + return self.boto3_converter.ScanOutput(scan_response) + + def delete_item_request(self, delete_item_request): + out = self.boto3_converter.DeleteItemInput(delete_item_request) + # delete_item requests on resources do not have a table name. + if self.delete_table_name: + del out["TableName"] + return out + + def delete_item_response(self, delete_item_response): + return self.boto3_converter.DeleteItemOutput(delete_item_response) + + def update_item_request(self, update_item_request): + out = self.boto3_converter.UpdateItemInput(update_item_request) + # update_item requests on resources do not have a table name. + if self.delete_table_name: + del out["TableName"] + return out + + def update_item_response(self, update_item_response): + return self.boto3_converter.UpdateItemOutput(update_item_response) + + def transact_get_items_request(self, transact_get_items_request): + return self.boto3_converter.TransactGetItemsInput(transact_get_items_request) + + def transact_get_items_response(self, transact_get_items_response): + return self.boto3_converter.TransactGetItemsOutput(transact_get_items_response) + + def transact_write_items_request(self, transact_write_items_request): + return self.boto3_converter.TransactWriteItemsInput(transact_write_items_request) + + def transact_write_items_response(self, transact_write_items_response): + return self.boto3_converter.TransactWriteItemsOutput(transact_write_items_response) + + def batch_get_item_request(self, batch_get_item_request): + return self.boto3_converter.BatchGetItemInput(batch_get_item_request) + + def batch_get_item_response(self, batch_get_item_response): + return self.boto3_converter.BatchGetItemOutput(batch_get_item_response) + + def batch_write_item_request(self, batch_write_item_request): + return self.boto3_converter.BatchWriteItemInput(batch_write_item_request) + + def batch_write_item_response(self, batch_write_item_response): + return self.boto3_converter.BatchWriteItemOutput(batch_write_item_response) + + def batch_execute_statement_request(self, batch_execute_statement_request): + return self.boto3_converter.BatchExecuteStatementInput(batch_execute_statement_request) + + def batch_execute_statement_response(self, batch_execute_statement_response): + return self.boto3_converter.BatchExecuteStatementOutput(batch_execute_statement_response) + + def execute_statement_request(self, execute_statement_request): + return self.boto3_converter.ExecuteStatementInput(execute_statement_request) + + def execute_statement_response(self, execute_statement_response): + return self.boto3_converter.ExecuteStatementOutput(execute_statement_response) + + def execute_transaction_request(self, execute_transaction_request): + return self.boto3_converter.ExecuteTransactionInput(execute_transaction_request) + + def execute_transaction_response(self, execute_transaction_response): + return self.boto3_converter.ExecuteTransactionOutput(execute_transaction_response) diff --git a/DynamoDbEncryption/runtimes/python/src/aws_dbesdk_dynamodb/internal/resource_to_client.py b/DynamoDbEncryption/runtimes/python/src/aws_dbesdk_dynamodb/internal/resource_to_client.py new file mode 100644 index 000000000..aa3f94ad7 --- /dev/null +++ b/DynamoDbEncryption/runtimes/python/src/aws_dbesdk_dynamodb/internal/resource_to_client.py @@ -0,0 +1,155 @@ +# Copyright Amazon.com Inc. or its affiliates. All Rights Reserved. +# SPDX-License-Identifier: Apache-2.0 +from aws_cryptography_internal_dynamodb.smithygenerated.com_amazonaws_dynamodb.boto3_conversions import ( + InternalBoto3DynamoDBFormatConverter, +) +from boto3.dynamodb.conditions import ConditionExpressionBuilder +from boto3.dynamodb.types import TypeSerializer + + +class ResourceShapeToClientShapeConverter: + + def __init__(self, table_name=None): + self.boto3_converter = InternalBoto3DynamoDBFormatConverter( + item_handler=TypeSerializer().serialize, condition_handler=self.condition_handler + ) + # TableName is optional; + # Some requests require it (ex. put_item, update_item, delete_item), + # but others do not (ex. transact_get_items, batch_write_item). + self.table_name = table_name + self.expression_builder = ConditionExpressionBuilder() + + def condition_handler(self, expression_key, request): + """ + Converts an object from boto3.dynamodb.conditions to a string + and updates ExpressionAttributeNames and ExpressionAttributeValues with any new names/values. + The ExpressionAttributeValues are returned in resource format (Python dictionaries). + """ + condition_expression = request[expression_key] + + try: + existing_expression_attribute_names = request["ExpressionAttributeNames"] + except KeyError: + existing_expression_attribute_names = {} + try: + existing_expression_attribute_values = request["ExpressionAttributeValues"] + except KeyError: + existing_expression_attribute_values = {} + + # Only convert if the condition expression is a boto3.dynamodb.conditions object. + # Resources also accept strings. + # If condition is not from boto3.dynamodb.conditions, assume the condition is string-like, and return as-is. + if ( + hasattr(condition_expression, "__module__") + and condition_expression.__module__ == "boto3.dynamodb.conditions" + ): + built_condition_expression = self.expression_builder.build_expression(condition_expression) + return ( + built_condition_expression.condition_expression, + built_condition_expression.attribute_name_placeholders, + built_condition_expression.attribute_value_placeholders, + ) + else: + return condition_expression, existing_expression_attribute_names, existing_expression_attribute_values + + def put_item_request(self, put_item_request): + # put_item requests on a boto3.resource.Table require a configured table name. + if not self.table_name: + raise ValueError("Table name must be provided to ResourceShapeToClientShapeConverter to use put_item") + put_item_request["TableName"] = self.table_name + return self.boto3_converter.PutItemInput(put_item_request) + + def get_item_request(self, get_item_request): + # get_item requests on a boto3.resource.Table require a configured table name. + if not self.table_name: + raise ValueError("Table name must be provided to ResourceShapeToClientShapeConverter to use get_item") + get_item_request["TableName"] = self.table_name + return self.boto3_converter.GetItemInput(get_item_request) + + def query_request(self, query_request): + # query requests on a boto3.resource.Table require a configured table name. + if not self.table_name: + raise ValueError("Table name must be provided to ResourceShapeToClientShapeConverter to use query") + query_request["TableName"] = self.table_name + return self.boto3_converter.QueryInput(query_request) + + def scan_request(self, scan_request): + # scan requests on a boto3.resource.Table require a configured table name. + if not self.table_name: + raise ValueError("Table name must be provided to ResourceShapeToClientShapeConverter to use scan") + scan_request["TableName"] = self.table_name + return self.boto3_converter.ScanInput(scan_request) + + def update_item_request(self, update_item_request): + # update_item requests on a boto3.resource.Table require a configured table name. + if not self.table_name: + raise ValueError("Table name must be provided to ResourceShapeToClientShapeConverter to use update_item") + update_item_request["TableName"] = self.table_name + return self.boto3_converter.UpdateItemInput(update_item_request) + + def delete_item_request(self, delete_item_request): + # delete_item requests on a boto3.resource.Table require a configured table name. + if not self.table_name: + raise ValueError("Table name must be provided to ResourceShapeToClientShapeConverter to use delete_item") + delete_item_request["TableName"] = self.table_name + return self.boto3_converter.DeleteItemInput(delete_item_request) + + def transact_get_items_request(self, transact_get_items_request): + return self.boto3_converter.TransactGetItemsInput(transact_get_items_request) + + def transact_get_items_response(self, transact_get_items_response): + return self.boto3_converter.TransactGetItemsOutput(transact_get_items_response) + + def transact_write_items_request(self, transact_write_items_request): + return self.boto3_converter.TransactWriteItemsInput(transact_write_items_request) + + def transact_write_items_response(self, transact_write_items_response): + return self.boto3_converter.TransactWriteItemsOutput(transact_write_items_response) + + def batch_get_item_request(self, batch_get_item_request): + return self.boto3_converter.BatchGetItemInput(batch_get_item_request) + + def batch_get_item_response(self, batch_get_item_response): + return self.boto3_converter.BatchGetItemOutput(batch_get_item_response) + + def batch_write_item_request(self, batch_write_item_request): + return self.boto3_converter.BatchWriteItemInput(batch_write_item_request) + + def batch_write_item_response(self, batch_write_item_response): + return self.boto3_converter.BatchWriteItemOutput(batch_write_item_response) + + def batch_execute_statement_request(self, batch_execute_statement_request): + return self.boto3_converter.BatchExecuteStatementInput(batch_execute_statement_request) + + def batch_execute_statement_response(self, batch_execute_statement_response): + return self.boto3_converter.BatchExecuteStatementOutput(batch_execute_statement_response) + + def execute_statement_request(self, execute_statement_request): + return self.boto3_converter.ExecuteStatementInput(execute_statement_request) + + def execute_statement_response(self, execute_statement_response): + return self.boto3_converter.ExecuteStatementOutput(execute_statement_response) + + def execute_transaction_request(self, execute_transaction_request): + return self.boto3_converter.ExecuteTransactionInput(execute_transaction_request) + + def execute_transaction_response(self, execute_transaction_response): + return self.boto3_converter.ExecuteTransactionOutput(execute_transaction_response) + + def scan_response(self, scan_response): + return self.boto3_converter.ScanOutput(scan_response) + + def query_response(self, query_response): + return self.boto3_converter.QueryOutput(query_response) + + def get_item_response(self, get_item_response): + return self.boto3_converter.GetItemOutput(get_item_response) + + def put_item_response(self, put_item_response): + return self.boto3_converter.PutItemOutput(put_item_response) + + def update_item_response(self, update_item_response): + return self.boto3_converter.UpdateItemOutput(update_item_response) + + def delete_item_response(self, delete_item_response): + return self.boto3_converter.DeleteItemOutput(delete_item_response) diff --git a/DynamoDbEncryption/runtimes/python/src/aws_dbesdk_dynamodb/internaldafny/extern/InternalLegacyOverride.py b/DynamoDbEncryption/runtimes/python/src/aws_dbesdk_dynamodb/internaldafny/extern/InternalLegacyOverride.py new file mode 100644 index 000000000..8c42812e4 --- /dev/null +++ b/DynamoDbEncryption/runtimes/python/src/aws_dbesdk_dynamodb/internaldafny/extern/InternalLegacyOverride.py @@ -0,0 +1,268 @@ +# Copyright Amazon.com Inc. or its affiliates. All Rights Reserved. +# SPDX-License-Identifier: Apache-2.0 +from _dafny import Seq + +import aws_dbesdk_dynamodb.internaldafny.generated.InternalLegacyOverride +from aws_dbesdk_dynamodb.internaldafny.generated.AwsCryptographyDbEncryptionSdkDynamoDbItemEncryptorTypes import ( + DynamoDbItemEncryptorConfig_DynamoDbItemEncryptorConfig, + Error_DynamoDbItemEncryptorException, + Error_Opaque, + DecryptItemInput_DecryptItemInput, + EncryptItemInput_EncryptItemInput, +) +from aws_dbesdk_dynamodb.internaldafny.generated.AwsCryptographyDbEncryptionSdkStructuredEncryptionTypes import ( + CryptoAction_ENCRYPT__AND__SIGN, + CryptoAction_SIGN__ONLY, + CryptoAction_DO__NOTHING, +) +from aws_dbesdk_dynamodb.smithygenerated.aws_cryptography_dbencryptionsdk_dynamodb.references import ( + ILegacyDynamoDbEncryptor, +) +from aws_dbesdk_dynamodb.smithygenerated.aws_cryptography_dbencryptionsdk_dynamodb_itemencryptor.models import ( + EncryptItemInput, + EncryptItemOutput, + DecryptItemOutput, + DecryptItemInput, +) + +try: + from dynamodb_encryption_sdk.encrypted.client import EncryptedClient + from dynamodb_encryption_sdk.encrypted.table import EncryptedTable + from dynamodb_encryption_sdk.encrypted.resource import EncryptedResource + from dynamodb_encryption_sdk.encrypted.client import EncryptedPaginator + from dynamodb_encryption_sdk.encrypted.item import encrypt_dynamodb_item, decrypt_dynamodb_item + from dynamodb_encryption_sdk.structures import EncryptionContext, AttributeActions + from dynamodb_encryption_sdk.identifiers import CryptoAction + from dynamodb_encryption_sdk.encrypted import CryptoConfig + from dynamodb_encryption_sdk.internal.identifiers import ReservedAttributes + + _HAS_LEGACY_DDBEC = True +except ImportError: + _HAS_LEGACY_DDBEC = False + + +class InternalLegacyOverride(aws_dbesdk_dynamodb.internaldafny.generated.InternalLegacyOverride.InternalLegacyOverride): + def __init__(self): + super().__init__() + self.crypto_config = None + self.policy = None + + @staticmethod + def Build(config: DynamoDbItemEncryptorConfig_DynamoDbItemEncryptorConfig): + # Check for early return (Postcondition): If there is no legacyOverride there is nothing to do. + if config.legacyOverride.is_None: + return InternalLegacyOverride.CreateBuildSuccess(InternalLegacyOverride.CreateInternalLegacyOverrideNone()) + + legacy_override = config.legacyOverride.value + + # Precondition: The encryptor MUST be a DynamoDBEncryptor + if not _HAS_LEGACY_DDBEC: + return InternalLegacyOverride.CreateBuildFailure( + InternalLegacyOverride.CreateError("Could not find aws-dynamodb-encryption-python installation") + ) + + # Precondition: The encryptor MUST be one of the supported legacy types + if not ( + isinstance(legacy_override.encryptor, EncryptedClient) + or isinstance(legacy_override.encryptor, EncryptedTable) + or isinstance(legacy_override.encryptor, EncryptedResource) + ): + return InternalLegacyOverride.CreateBuildFailure( + InternalLegacyOverride.CreateError("Legacy encryptor is not supported") + ) + + # Preconditions: MUST be able to create valid encryption context + maybe_encryption_context = InternalLegacyOverride.legacyEncryptionContext(config) + if maybe_encryption_context.is_Failure: + return maybe_encryption_context + + # Precondition: All actions MUST be supported types + maybe_actions = InternalLegacyOverride.legacyActions(legacy_override.attributeActionsOnEncrypt) + if maybe_actions.is_Failure: + return maybe_actions + + # Create and return the legacy override instance + legacy_instance = InternalLegacyOverride() + legacy_instance.policy = legacy_override.policy + legacy_instance.crypto_config = CryptoConfig( + materials_provider=legacy_override.encryptor._materials_provider, + encryption_context=maybe_encryption_context.value, + attribute_actions=maybe_actions.value, + ) + return InternalLegacyOverride.CreateBuildSuccess( + InternalLegacyOverride.CreateInternalLegacyOverrideSome(legacy_instance) + ) + + @staticmethod + def legacyEncryptionContext(config: DynamoDbItemEncryptorConfig_DynamoDbItemEncryptorConfig): + """Create the legacy encryption context from the config.""" + try: + # Convert Dafny types to Python strings for the encryption context + table_name = InternalLegacyOverride.DafnyStringToNativeString(config.logicalTableName) + partition_key_name = InternalLegacyOverride.DafnyStringToNativeString(config.partitionKeyName) + sort_key_name = ( + InternalLegacyOverride.DafnyStringToNativeString(config.sortKeyName.value) + if config.sortKeyName.is_Some + else None + ) + + # Create the legacy encryption context with the extracted values + encryption_context = EncryptionContext( + table_name=table_name, + partition_key_name=partition_key_name, + sort_key_name=sort_key_name, + ) + + return InternalLegacyOverride.CreateBuildSuccess(encryption_context) + except Exception as ex: + return InternalLegacyOverride.CreateBuildFailure(Error_Opaque(ex)) + + @staticmethod + def legacyActions(attribute_actions_on_encrypt): + """Create the legacy attribute actions from the config.""" + try: + # Create a new AttributeActions with default ENCRYPT_AND_SIGN + # Default Action to take if no specific action is defined in ``attribute_actions`` + # https://docs.aws.amazon.com/database-encryption-sdk/latest/devguide/DDBEC-legacy-concepts.html#legacy-attribute-actions + legacy_actions = AttributeActions(default_action=CryptoAction.ENCRYPT_AND_SIGN) + + # Map the action from the config to legacy actions + attribute_actions = {} + for key, action in attribute_actions_on_encrypt.items: + key_str = InternalLegacyOverride.DafnyStringToNativeString(key) + + # Map the action type to the appropriate CryptoAction + if action == CryptoAction_ENCRYPT__AND__SIGN(): + attribute_actions[key_str] = CryptoAction.ENCRYPT_AND_SIGN + elif action == CryptoAction_SIGN__ONLY(): + attribute_actions[key_str] = CryptoAction.SIGN_ONLY + elif action == CryptoAction_DO__NOTHING(): + attribute_actions[key_str] = CryptoAction.DO_NOTHING + else: + return InternalLegacyOverride.CreateBuildFailure( + InternalLegacyOverride.CreateError(f"Unknown action type: {action}") + ) + + # Update the attribute_actions dictionary + legacy_actions.attribute_actions = attribute_actions + return InternalLegacyOverride.CreateBuildSuccess(legacy_actions) + except Exception as ex: + return InternalLegacyOverride.CreateBuildFailure(Error_Opaque(ex)) + + def EncryptItem(self, input: EncryptItemInput_EncryptItemInput): + """Encrypt an item using the legacy DynamoDB encryptor. + + :param input: EncryptItemInput containing the plaintext item to encrypt + :returns Result containing the encrypted item or an error + """ + try: + # Precondition: Policy MUST allow the caller to encrypt. + if not self.policy.is_FORCE__LEGACY__ENCRYPT__ALLOW__LEGACY__DECRYPT: + return self.CreateEncryptItemFailure( + InternalLegacyOverride.CreateError("Legacy policy does not support encrypt") + ) + + # Get the Native Plaintext Item + native_input = aws_dbesdk_dynamodb.smithygenerated.aws_cryptography_dbencryptionsdk_dynamodb_itemencryptor.dafny_to_smithy.aws_cryptography_dbencryptionsdk_dynamodb_itemencryptor_EncryptItemInput( + input + ) + + # Encrypt the item using the instance attributes + encrypted_item = encrypt_dynamodb_item( + item=native_input.plaintext_item, + crypto_config=self.crypto_config.with_item(native_input.plaintext_item), + ) + + # Return the encrypted item + # The legacy encryption method returns items in the format that Dafny expects, + # so no additional conversion is needed here + native_output = EncryptItemOutput(encrypted_item=encrypted_item, parsed_header=None) + dafny_output = aws_dbesdk_dynamodb.smithygenerated.aws_cryptography_dbencryptionsdk_dynamodb_itemencryptor.smithy_to_dafny.aws_cryptography_dbencryptionsdk_dynamodb_itemencryptor_EncryptItemOutput( + native_output + ) + return self.CreateEncryptItemSuccess(dafny_output) + + except Exception as ex: + return self.CreateEncryptItemFailure(InternalLegacyOverride.CreateError(Error_Opaque(ex))) + + def DecryptItem(self, input: DecryptItemInput_DecryptItemInput): + """Decrypt an item using the legacy DynamoDB encryptor. + + :param input: DecryptItemInput containing the encrypted item to decrypt + :returns Result containing the decrypted item or an error + """ + try: + # Precondition: Policy MUST allow the caller to decrypt. + # = specification/dynamodb-encryption-client/decrypt-item.md#behavior + ## If a [Legacy Policy](./ddb-table-encryption-config.md#legacy-policy) of + ## `FORBID_LEGACY_ENCRYPT_FORBID_LEGACY_DECRYPT` is configured, + ## and the input item [is an item written in the legacy format](#determining-legacy-items), + ## this operation MUST fail. + if not ( + self.policy.is_FORCE__LEGACY__ENCRYPT__ALLOW__LEGACY__DECRYPT + or self.policy.is_FORBID__LEGACY__ENCRYPT__ALLOW__LEGACY__DECRYPT + ): + return self.CreateDecryptItemFailure( + InternalLegacyOverride.CreateError("Legacy policy does not support decrypt") + ) + + # Get the Native DecryptItemInput + native_input: DecryptItemInput = ( + aws_dbesdk_dynamodb.smithygenerated.aws_cryptography_dbencryptionsdk_dynamodb_itemencryptor.dafny_to_smithy.aws_cryptography_dbencryptionsdk_dynamodb_itemencryptor_DecryptItemInput( + input + ) + ) + # Decrypt the item using the instance attributes + decrypted_item = decrypt_dynamodb_item( + item=native_input.encrypted_item, + crypto_config=self.crypto_config.with_item(native_input.encrypted_item), + ) + + native_output = DecryptItemOutput(plaintext_item=decrypted_item, parsed_header=None) + dafny_output = aws_dbesdk_dynamodb.smithygenerated.aws_cryptography_dbencryptionsdk_dynamodb_itemencryptor.smithy_to_dafny.aws_cryptography_dbencryptionsdk_dynamodb_itemencryptor_DecryptItemOutput( + native_output + ) + return self.CreateDecryptItemSuccess(dafny_output) + except Exception as ex: + return self.CreateDecryptItemFailure(InternalLegacyOverride.CreateError(Error_Opaque(ex))) + + def IsLegacyInput(self, input: DecryptItemInput_DecryptItemInput): + """ + Determine if the input is from a legacy client. + + :param input: The decrypt item input to check + :returns Boolean indicating if the input is from a legacy client + """ + if not input.is_DecryptItemInput: + return False + + # Get the Native DecryptItemInput + native_input: DecryptItemInput = ( + aws_dbesdk_dynamodb.smithygenerated.aws_cryptography_dbencryptionsdk_dynamodb_itemencryptor.dafny_to_smithy.aws_cryptography_dbencryptionsdk_dynamodb_itemencryptor_DecryptItemInput( + input + ) + ) + # = specification/dynamodb-encryption-client/decrypt-item.md#determining-legacy-items + ## An item MUST be determined to be encrypted under the legacy format if it contains + ## attributes for the material description and the signature. + return ( + "*amzn-ddb-map-desc*" in native_input.encrypted_item and "*amzn-ddb-map-sig*" in native_input.encrypted_item + ) + + @staticmethod + def DafnyStringToNativeString(dafny_input): + return b"".join(ord(c).to_bytes(2, "big") for c in dafny_input).decode("utf-16-be") + + @staticmethod + def NativeStringToDafnyString(native_input): + return Seq( + "".join([chr(int.from_bytes(pair, "big")) for pair in zip(*[iter(native_input.encode("utf-16-be"))] * 2)]) + ) + + @staticmethod + def CreateError(message): + """Create an Error with the given message.""" + return Error_DynamoDbItemEncryptorException(InternalLegacyOverride.NativeStringToDafnyString(message)) + + +aws_dbesdk_dynamodb.internaldafny.generated.InternalLegacyOverride.InternalLegacyOverride = InternalLegacyOverride diff --git a/DynamoDbEncryption/runtimes/python/src/aws_dbesdk_dynamodb/internaldafny/extern/__init__.py b/DynamoDbEncryption/runtimes/python/src/aws_dbesdk_dynamodb/internaldafny/extern/__init__.py new file mode 100644 index 000000000..34cab732e --- /dev/null +++ b/DynamoDbEncryption/runtimes/python/src/aws_dbesdk_dynamodb/internaldafny/extern/__init__.py @@ -0,0 +1,5 @@ +# Copyright Amazon.com Inc. or its affiliates. All Rights Reserved. +# SPDX-License-Identifier: Apache-2.0 +from . import ( + InternalLegacyOverride, +) diff --git a/DynamoDbEncryption/runtimes/python/src/aws_dbesdk_dynamodb/smithygenerated/aws_cryptography_dbencryptionsdk_dynamodb/__init__.py b/DynamoDbEncryption/runtimes/python/src/aws_dbesdk_dynamodb/smithygenerated/aws_cryptography_dbencryptionsdk_dynamodb/__init__.py new file mode 100644 index 000000000..09be6133b --- /dev/null +++ b/DynamoDbEncryption/runtimes/python/src/aws_dbesdk_dynamodb/smithygenerated/aws_cryptography_dbencryptionsdk_dynamodb/__init__.py @@ -0,0 +1,3 @@ +# Copyright Amazon.com Inc. or its affiliates. All Rights Reserved. +# SPDX-License-Identifier: Apache-2.0 +# Do not modify this file. This file is machine generated, and any changes to it will be overwritten. diff --git a/DynamoDbEncryption/runtimes/python/src/aws_dbesdk_dynamodb/smithygenerated/aws_cryptography_dbencryptionsdk_dynamodb/aws_sdk_to_dafny.py b/DynamoDbEncryption/runtimes/python/src/aws_dbesdk_dynamodb/smithygenerated/aws_cryptography_dbencryptionsdk_dynamodb/aws_sdk_to_dafny.py new file mode 100644 index 000000000..c58eacb37 --- /dev/null +++ b/DynamoDbEncryption/runtimes/python/src/aws_dbesdk_dynamodb/smithygenerated/aws_cryptography_dbencryptionsdk_dynamodb/aws_sdk_to_dafny.py @@ -0,0 +1,113 @@ +# Copyright Amazon.com Inc. or its affiliates. All Rights Reserved. +# SPDX-License-Identifier: Apache-2.0 +# Do not modify this file. This file is machine generated, and any changes to it will be overwritten. + +from _dafny import Map, Seq +from aws_cryptography_internal_dynamodb.internaldafny.generated.ComAmazonawsDynamodbTypes import ( + AttributeValue_B, + AttributeValue_BOOL, + AttributeValue_BS, + AttributeValue_L, + AttributeValue_M, + AttributeValue_N, + AttributeValue_NS, + AttributeValue_NULL, + AttributeValue_S, + AttributeValue_SS, +) +import aws_cryptography_internal_dynamodb.smithygenerated.com_amazonaws_dynamodb.aws_sdk_to_dafny + + +def com_amazonaws_dynamodb_AttributeValue(native_input): + if "S" in native_input.keys(): + AttributeValue_union_value = AttributeValue_S( + Seq( + "".join( + [ + chr(int.from_bytes(pair, "big")) + for pair in zip(*[iter(native_input["S"].encode("utf-16-be"))] * 2) + ] + ) + ) + ) + elif "N" in native_input.keys(): + AttributeValue_union_value = AttributeValue_N( + Seq( + "".join( + [ + chr(int.from_bytes(pair, "big")) + for pair in zip(*[iter(native_input["N"].encode("utf-16-be"))] * 2) + ] + ) + ) + ) + elif "B" in native_input.keys(): + AttributeValue_union_value = AttributeValue_B(Seq(native_input["B"])) + elif "SS" in native_input.keys(): + AttributeValue_union_value = AttributeValue_SS( + Seq( + [ + Seq( + "".join( + [ + chr(int.from_bytes(pair, "big")) + for pair in zip(*[iter(list_element.encode("utf-16-be"))] * 2) + ] + ) + ) + for list_element in native_input["SS"] + ] + ) + ) + elif "NS" in native_input.keys(): + AttributeValue_union_value = AttributeValue_NS( + Seq( + [ + Seq( + "".join( + [ + chr(int.from_bytes(pair, "big")) + for pair in zip(*[iter(list_element.encode("utf-16-be"))] * 2) + ] + ) + ) + for list_element in native_input["NS"] + ] + ) + ) + elif "BS" in native_input.keys(): + AttributeValue_union_value = AttributeValue_BS(Seq([Seq(list_element) for list_element in native_input["BS"]])) + elif "M" in native_input.keys(): + AttributeValue_union_value = AttributeValue_M( + Map( + { + Seq( + "".join( + [chr(int.from_bytes(pair, "big")) for pair in zip(*[iter(key.encode("utf-16-be"))] * 2)] + ) + ): aws_cryptography_internal_dynamodb.smithygenerated.com_amazonaws_dynamodb.aws_sdk_to_dafny.com_amazonaws_dynamodb_AttributeValue( + value + ) + for (key, value) in native_input["M"].items() + } + ) + ) + elif "L" in native_input.keys(): + AttributeValue_union_value = AttributeValue_L( + Seq( + [ + aws_cryptography_internal_dynamodb.smithygenerated.com_amazonaws_dynamodb.aws_sdk_to_dafny.com_amazonaws_dynamodb_AttributeValue( + list_element + ) + for list_element in native_input["L"] + ] + ) + ) + elif "NULL" in native_input.keys(): + AttributeValue_union_value = AttributeValue_NULL(native_input["NULL"]) + elif "BOOL" in native_input.keys(): + AttributeValue_union_value = AttributeValue_BOOL(native_input["BOOL"]) + else: + raise ValueError("No recognized union value in union type: " + str(native_input)) + + return AttributeValue_union_value diff --git a/DynamoDbEncryption/runtimes/python/src/aws_dbesdk_dynamodb/smithygenerated/aws_cryptography_dbencryptionsdk_dynamodb/client.py b/DynamoDbEncryption/runtimes/python/src/aws_dbesdk_dynamodb/smithygenerated/aws_cryptography_dbencryptionsdk_dynamodb/client.py new file mode 100644 index 000000000..93914b228 --- /dev/null +++ b/DynamoDbEncryption/runtimes/python/src/aws_dbesdk_dynamodb/smithygenerated/aws_cryptography_dbencryptionsdk_dynamodb/client.py @@ -0,0 +1,348 @@ +# Copyright Amazon.com Inc. or its affiliates. All Rights Reserved. +# SPDX-License-Identifier: Apache-2.0 +# Do not modify this file. This file is machine generated, and any changes to it will be overwritten. + +from aws_dbesdk_dynamodb.internaldafny.generated.AwsCryptographyDbEncryptionSdkDynamoDbTypes import ( + IDynamoDbEncryptionClient, +) +from typing import Callable, TypeVar, cast + +from .config import Config, DynamoDbEncryptionConfig +from .dafny_protocol import DafnyRequest, DafnyResponse +from .plugin import set_config_impl +from smithy_python.exceptions import SmithyRetryException +from smithy_python.interfaces.interceptor import Interceptor, InterceptorContext +from smithy_python.interfaces.retries import RetryErrorInfo, RetryErrorType + +from .config import Plugin +from .deserialize import ( + _deserialize_create_dynamo_db_encryption_branch_key_id_supplier, + _deserialize_get_encrypted_data_key_description, +) +from .errors import ServiceError +from .models import ( + CreateDynamoDbEncryptionBranchKeyIdSupplierInput, + CreateDynamoDbEncryptionBranchKeyIdSupplierOutput, + GetEncryptedDataKeyDescriptionInput, + GetEncryptedDataKeyDescriptionOutput, +) +from .serialize import ( + _serialize_create_dynamo_db_encryption_branch_key_id_supplier, + _serialize_get_encrypted_data_key_description, +) + + +Input = TypeVar("Input") +Output = TypeVar("Output") + + +class DynamoDbEncryption: + """Client for DynamoDbEncryption. + + :param config: Configuration for the client. + """ + + def __init__( + self, + config: DynamoDbEncryptionConfig | None = None, + dafny_client: IDynamoDbEncryptionClient | None = None, + ): + if config is None: + self._config = Config() + else: + self._config = config + + client_plugins: list[Plugin] = [ + set_config_impl, + ] + + for plugin in client_plugins: + plugin(self._config) + + if dafny_client is not None: + self._config.dafnyImplInterface.impl = dafny_client + + def create_dynamo_db_encryption_branch_key_id_supplier( + self, input: CreateDynamoDbEncryptionBranchKeyIdSupplierInput + ) -> CreateDynamoDbEncryptionBranchKeyIdSupplierOutput: + """Create a Branch Key Supplier for use with the Hierarchical Keyring + that decides what Branch Key to use based on the primary key of the + DynamoDB item being read or written. + + :param input: Inputs for creating a Branch Key Supplier from a + DynamoDB Key Branch Key Id Supplier + """ + return self._execute_operation( + input=input, + plugins=[], + serialize=_serialize_create_dynamo_db_encryption_branch_key_id_supplier, + deserialize=_deserialize_create_dynamo_db_encryption_branch_key_id_supplier, + config=self._config, + operation_name="CreateDynamoDbEncryptionBranchKeyIdSupplier", + ) + + def get_encrypted_data_key_description( + self, input: GetEncryptedDataKeyDescriptionInput + ) -> GetEncryptedDataKeyDescriptionOutput: + """Returns encrypted data key description. + + :param input: Input for getting encrypted data key description. + """ + return self._execute_operation( + input=input, + plugins=[], + serialize=_serialize_get_encrypted_data_key_description, + deserialize=_deserialize_get_encrypted_data_key_description, + config=self._config, + operation_name="GetEncryptedDataKeyDescription", + ) + + def _execute_operation( + self, + input: Input, + plugins: list[Plugin], + serialize: Callable[[Input, Config], DafnyRequest], + deserialize: Callable[[DafnyResponse, Config], Output], + config: Config, + operation_name: str, + ) -> Output: + try: + return self._handle_execution(input, plugins, serialize, deserialize, config, operation_name) + except Exception as e: + # Make sure every exception that we throw is an instance of ServiceError so + # customers can reliably catch everything we throw. + if not isinstance(e, ServiceError): + raise ServiceError(e) from e + raise e + + def _handle_execution( + self, + input: Input, + plugins: list[Plugin], + serialize: Callable[[Input, Config], DafnyRequest], + deserialize: Callable[[DafnyResponse, Config], Output], + config: Config, + operation_name: str, + ) -> Output: + context: InterceptorContext[Input, None, None, None] = InterceptorContext( + request=input, + response=None, + transport_request=None, + transport_response=None, + ) + try: + _client_interceptors = config.interceptors + except AttributeError: + config.interceptors = [] + _client_interceptors = config.interceptors + client_interceptors = cast( + list[Interceptor[Input, Output, DafnyRequest, DafnyResponse]], + _client_interceptors, + ) + interceptors = client_interceptors + + try: + # Step 1a: Invoke read_before_execution on client-level interceptors + for interceptor in client_interceptors: + interceptor.read_before_execution(context) + + # Step 1b: Run operation-level plugins + for plugin in plugins: + plugin(config) + + _client_interceptors = config.interceptors + interceptors = cast( + list[Interceptor[Input, Output, DafnyRequest, DafnyResponse]], + _client_interceptors, + ) + + # Step 1c: Invoke the read_before_execution hooks on newly added + # interceptors. + for interceptor in interceptors: + if interceptor not in client_interceptors: + interceptor.read_before_execution(context) + + # Step 2: Invoke the modify_before_serialization hooks + for interceptor in interceptors: + context._request = interceptor.modify_before_serialization(context) + + # Step 3: Invoke the read_before_serialization hooks + for interceptor in interceptors: + interceptor.read_before_serialization(context) + + # Step 4: Serialize the request + context_with_transport_request = cast(InterceptorContext[Input, None, DafnyRequest, None], context) + context_with_transport_request._transport_request = serialize( + context_with_transport_request.request, config + ) + + # Step 5: Invoke read_after_serialization + for interceptor in interceptors: + interceptor.read_after_serialization(context_with_transport_request) + + # Step 6: Invoke modify_before_retry_loop + for interceptor in interceptors: + context_with_transport_request._transport_request = interceptor.modify_before_retry_loop( + context_with_transport_request + ) + + # Step 7: Acquire the retry token. + retry_strategy = config.retry_strategy + retry_token = retry_strategy.acquire_initial_retry_token() + + while True: + # Make an attempt, creating a copy of the context so we don't pass + # around old data. + context_with_response = self._handle_attempt( + deserialize, + interceptors, + context_with_transport_request.copy(), + config, + operation_name, + ) + + # We perform this type-ignored re-assignment because `context` needs + # to point at the latest context so it can be generically handled + # later on. This is only an issue here because we've created a copy, + # so we're no longer simply pointing at the same object in memory + # with different names and type hints. It is possible to address this + # without having to fall back to the type ignore, but it would impose + # unnecessary runtime costs. + context = context_with_response # type: ignore + + if isinstance(context_with_response.response, Exception): + # Step 7u: Reacquire retry token if the attempt failed + try: + retry_token = retry_strategy.refresh_retry_token_for_retry( + token_to_renew=retry_token, + error_info=RetryErrorInfo( + # TODO: Determine the error type. + error_type=RetryErrorType.CLIENT_ERROR, + ), + ) + except SmithyRetryException: + raise context_with_response.response + else: + # Step 8: Invoke record_success + retry_strategy.record_success(token=retry_token) + break + except Exception as e: + context._response = e + + # At this point, the context's request will have been definitively set, and + # The response will be set either with the modeled output or an exception. The + # transport_request and transport_response may be set or None. + execution_context = cast( + InterceptorContext[Input, Output, DafnyRequest | None, DafnyResponse | None], + context, + ) + return self._finalize_execution(interceptors, execution_context) + + def _handle_attempt( + self, + deserialize: Callable[[DafnyResponse, Config], Output], + interceptors: list[Interceptor[Input, Output, DafnyRequest, DafnyResponse]], + context: InterceptorContext[Input, None, DafnyRequest, None], + config: Config, + operation_name: str, + ) -> InterceptorContext[Input, Output, DafnyRequest, DafnyResponse | None]: + try: + # Step 7a: Invoke read_before_attempt + for interceptor in interceptors: + interceptor.read_before_attempt(context) + + # Step 7m: Involve client Dafny impl + if config.dafnyImplInterface.impl is None: + raise Exception("No impl found on the operation config.") + + context_with_response = cast(InterceptorContext[Input, None, DafnyRequest, DafnyResponse], context) + + context_with_response._transport_response = config.dafnyImplInterface.handle_request( + input=context_with_response.transport_request + ) + + # Step 7n: Invoke read_after_transmit + for interceptor in interceptors: + interceptor.read_after_transmit(context_with_response) + + # Step 7o: Invoke modify_before_deserialization + for interceptor in interceptors: + context_with_response._transport_response = interceptor.modify_before_deserialization( + context_with_response + ) + + # Step 7p: Invoke read_before_deserialization + for interceptor in interceptors: + interceptor.read_before_deserialization(context_with_response) + + # Step 7q: deserialize + context_with_output = cast( + InterceptorContext[Input, Output, DafnyRequest, DafnyResponse], + context_with_response, + ) + context_with_output._response = deserialize(context_with_output._transport_response, config) + + # Step 7r: Invoke read_after_deserialization + for interceptor in interceptors: + interceptor.read_after_deserialization(context_with_output) + except Exception as e: + context._response = e + + # At this point, the context's request and transport_request have definitively been set, + # the response is either set or an exception, and the transport_resposne is either set or + # None. This will also be true after _finalize_attempt because there is no opportunity + # there to set the transport_response. + attempt_context = cast( + InterceptorContext[Input, Output, DafnyRequest, DafnyResponse | None], + context, + ) + return self._finalize_attempt(interceptors, attempt_context) + + def _finalize_attempt( + self, + interceptors: list[Interceptor[Input, Output, DafnyRequest, DafnyResponse]], + context: InterceptorContext[Input, Output, DafnyRequest, DafnyResponse | None], + ) -> InterceptorContext[Input, Output, DafnyRequest, DafnyResponse | None]: + # Step 7s: Invoke modify_before_attempt_completion + try: + for interceptor in interceptors: + context._response = interceptor.modify_before_attempt_completion(context) + except Exception as e: + context._response = e + + # Step 7t: Invoke read_after_attempt + for interceptor in interceptors: + try: + interceptor.read_after_attempt(context) + except Exception as e: + context._response = e + + return context + + def _finalize_execution( + self, + interceptors: list[Interceptor[Input, Output, DafnyRequest, DafnyResponse]], + context: InterceptorContext[Input, Output, DafnyRequest | None, DafnyResponse | None], + ) -> Output: + try: + # Step 9: Invoke modify_before_completion + for interceptor in interceptors: + context._response = interceptor.modify_before_completion(context) + + except Exception as e: + context._response = e + + # Step 11: Invoke read_after_execution + for interceptor in interceptors: + try: + interceptor.read_after_execution(context) + except Exception as e: + context._response = e + + # Step 12: Return / throw + if isinstance(context.response, Exception): + raise context.response + + # We may want to add some aspects of this context to the output types so we can + # return it to the end-users. + return context.response diff --git a/DynamoDbEncryption/runtimes/python/src/aws_dbesdk_dynamodb/smithygenerated/aws_cryptography_dbencryptionsdk_dynamodb/config.py b/DynamoDbEncryption/runtimes/python/src/aws_dbesdk_dynamodb/smithygenerated/aws_cryptography_dbencryptionsdk_dynamodb/config.py new file mode 100644 index 000000000..788f97a41 --- /dev/null +++ b/DynamoDbEncryption/runtimes/python/src/aws_dbesdk_dynamodb/smithygenerated/aws_cryptography_dbencryptionsdk_dynamodb/config.py @@ -0,0 +1,92 @@ +# Copyright Amazon.com Inc. or its affiliates. All Rights Reserved. +# SPDX-License-Identifier: Apache-2.0 +# Do not modify this file. This file is machine generated, and any changes to it will be overwritten. + +from aws_dbesdk_dynamodb.internaldafny.generated.AwsCryptographyDbEncryptionSdkDynamoDbTypes import ( + DynamoDbEncryptionConfig_DynamoDbEncryptionConfig as DafnyDynamoDbEncryptionConfig, +) +import aws_dbesdk_dynamodb.internaldafny.generated.module_ +import aws_dbesdk_dynamodb.smithygenerated.aws_cryptography_dbencryptionsdk_dynamodb.dafny_to_smithy +import aws_dbesdk_dynamodb.smithygenerated.aws_cryptography_dbencryptionsdk_dynamodb.smithy_to_dafny +from dataclasses import dataclass +from typing import Any, Callable, Dict, TypeAlias + +from .dafnyImplInterface import DafnyImplInterface +from smithy_python._private.retries import SimpleRetryStrategy +from smithy_python.interfaces.retries import RetryStrategy + + +_ServiceInterceptor = Any + + +@dataclass(init=False) +class Config: + """Configuration for DynamoDbEncryption.""" + + interceptors: list[_ServiceInterceptor] + retry_strategy: RetryStrategy + dafnyImplInterface: DafnyImplInterface | None + + def __init__( + self, + *, + interceptors: list[_ServiceInterceptor] | None = None, + retry_strategy: RetryStrategy | None = None, + dafnyImplInterface: DafnyImplInterface | None = None, + ): + """Constructor. + + :param interceptors: The list of interceptors, which are hooks + that are called during the execution of a request. + :param retry_strategy: The retry strategy for issuing retry + tokens and computing retry delays. + :param dafnyImplInterface: + """ + self.interceptors = interceptors or [] + self.retry_strategy = retry_strategy or SimpleRetryStrategy() + self.dafnyImplInterface = dafnyImplInterface + + +# A callable that allows customizing the config object on each request. +Plugin: TypeAlias = Callable[[Config], None] + + +class DynamoDbEncryptionConfig(Config): + def __init__( + self, + ): + """Constructor for DynamoDbEncryptionConfig.""" + super().__init__() + + def as_dict(self) -> Dict[str, Any]: + """Converts the DynamoDbEncryptionConfig to a dictionary.""" + return {} + + @staticmethod + def from_dict(d: Dict[str, Any]) -> "DynamoDbEncryptionConfig": + """Creates a DynamoDbEncryptionConfig from a dictionary.""" + return DynamoDbEncryptionConfig() + + def __repr__(self) -> str: + result = "DynamoDbEncryptionConfig(" + + return result + ")" + + def __eq__(self, other: Any) -> bool: + return isinstance(other, DynamoDbEncryptionConfig) + + +def dafny_config_to_smithy_config(dafny_config) -> DynamoDbEncryptionConfig: + """Converts the provided Dafny shape for this localService's config into + the corresponding Smithy-modelled shape.""" + return aws_dbesdk_dynamodb.smithygenerated.aws_cryptography_dbencryptionsdk_dynamodb.dafny_to_smithy.aws_cryptography_dbencryptionsdk_dynamodb_DynamoDbEncryptionConfig( + dafny_config + ) + + +def smithy_config_to_dafny_config(smithy_config) -> DafnyDynamoDbEncryptionConfig: + """Converts the provided Smithy-modelled shape for this localService's + config into the corresponding Dafny shape.""" + return aws_dbesdk_dynamodb.smithygenerated.aws_cryptography_dbencryptionsdk_dynamodb.smithy_to_dafny.aws_cryptography_dbencryptionsdk_dynamodb_DynamoDbEncryptionConfig( + smithy_config + ) diff --git a/DynamoDbEncryption/runtimes/python/src/aws_dbesdk_dynamodb/smithygenerated/aws_cryptography_dbencryptionsdk_dynamodb/dafnyImplInterface.py b/DynamoDbEncryption/runtimes/python/src/aws_dbesdk_dynamodb/smithygenerated/aws_cryptography_dbencryptionsdk_dynamodb/dafnyImplInterface.py new file mode 100644 index 000000000..2f25de773 --- /dev/null +++ b/DynamoDbEncryption/runtimes/python/src/aws_dbesdk_dynamodb/smithygenerated/aws_cryptography_dbencryptionsdk_dynamodb/dafnyImplInterface.py @@ -0,0 +1,34 @@ +# Copyright Amazon.com Inc. or its affiliates. All Rights Reserved. +# SPDX-License-Identifier: Apache-2.0 +# Do not modify this file. This file is machine generated, and any changes to it will be overwritten. + +from aws_dbesdk_dynamodb.internaldafny.generated.DynamoDbEncryption import ( + DynamoDbEncryptionClient, +) +from .dafny_protocol import DafnyRequest + + +class DafnyImplInterface: + impl: DynamoDbEncryptionClient | None = None + + # operation_map cannot be created at dafnyImplInterface create time, + # as the map's values reference values inside `self.impl`, + # and impl is only populated at runtime. + # Accessing these before impl is populated results in an error. + # At runtime, the map is populated once and cached. + operation_map = None + + def handle_request(self, input: DafnyRequest): + if self.operation_map is None: + self.operation_map = { + "CreateDynamoDbEncryptionBranchKeyIdSupplier": self.impl.CreateDynamoDbEncryptionBranchKeyIdSupplier, + "GetEncryptedDataKeyDescription": self.impl.GetEncryptedDataKeyDescription, + } + + # This logic is where a typical Smithy client would expect the "server" to be. + # This code can be thought of as logic our Dafny "server" uses + # to route incoming client requests to the correct request handler code. + if input.dafny_operation_input is None: + return self.operation_map[input.operation_name]() + else: + return self.operation_map[input.operation_name](input.dafny_operation_input) diff --git a/DynamoDbEncryption/runtimes/python/src/aws_dbesdk_dynamodb/smithygenerated/aws_cryptography_dbencryptionsdk_dynamodb/dafny_protocol.py b/DynamoDbEncryption/runtimes/python/src/aws_dbesdk_dynamodb/smithygenerated/aws_cryptography_dbencryptionsdk_dynamodb/dafny_protocol.py new file mode 100644 index 000000000..da77ee605 --- /dev/null +++ b/DynamoDbEncryption/runtimes/python/src/aws_dbesdk_dynamodb/smithygenerated/aws_cryptography_dbencryptionsdk_dynamodb/dafny_protocol.py @@ -0,0 +1,33 @@ +# Copyright Amazon.com Inc. or its affiliates. All Rights Reserved. +# SPDX-License-Identifier: Apache-2.0 +# Do not modify this file. This file is machine generated, and any changes to it will be overwritten. + +from aws_dbesdk_dynamodb.internaldafny.generated.AwsCryptographyDbEncryptionSdkDynamoDbTypes import ( + CreateDynamoDbEncryptionBranchKeyIdSupplierInput_CreateDynamoDbEncryptionBranchKeyIdSupplierInput as DafnyCreateDynamoDbEncryptionBranchKeyIdSupplierInput, + GetEncryptedDataKeyDescriptionInput_GetEncryptedDataKeyDescriptionInput as DafnyGetEncryptedDataKeyDescriptionInput, +) +import aws_dbesdk_dynamodb.internaldafny.generated.module_ + + +import smithy_dafny_standard_library.internaldafny.generated.Wrappers as Wrappers +from typing import Union + + +class DafnyRequest: + operation_name: str + + # dafny_operation_input can take on any one of the types + # of the input values passed to the Dafny implementation + dafny_operation_input: Union[ + DafnyCreateDynamoDbEncryptionBranchKeyIdSupplierInput, + DafnyGetEncryptedDataKeyDescriptionInput, + ] + + def __init__(self, operation_name, dafny_operation_input): + self.operation_name = operation_name + self.dafny_operation_input = dafny_operation_input + + +class DafnyResponse(Wrappers.Result): + def __init__(self): + super().__init__(self) diff --git a/DynamoDbEncryption/runtimes/python/src/aws_dbesdk_dynamodb/smithygenerated/aws_cryptography_dbencryptionsdk_dynamodb/dafny_to_aws_sdk.py b/DynamoDbEncryption/runtimes/python/src/aws_dbesdk_dynamodb/smithygenerated/aws_cryptography_dbencryptionsdk_dynamodb/dafny_to_aws_sdk.py new file mode 100644 index 000000000..9aca0964e --- /dev/null +++ b/DynamoDbEncryption/runtimes/python/src/aws_dbesdk_dynamodb/smithygenerated/aws_cryptography_dbencryptionsdk_dynamodb/dafny_to_aws_sdk.py @@ -0,0 +1,75 @@ +# Copyright Amazon.com Inc. or its affiliates. All Rights Reserved. +# SPDX-License-Identifier: Apache-2.0 +# Do not modify this file. This file is machine generated, and any changes to it will be overwritten. + +from aws_cryptography_internal_dynamodb.internaldafny.generated.ComAmazonawsDynamodbTypes import ( + AttributeValue_B, + AttributeValue_BOOL, + AttributeValue_BS, + AttributeValue_L, + AttributeValue_M, + AttributeValue_N, + AttributeValue_NS, + AttributeValue_NULL, + AttributeValue_S, + AttributeValue_SS, +) +import aws_cryptography_internal_dynamodb.smithygenerated.com_amazonaws_dynamodb.dafny_to_aws_sdk + + +def com_amazonaws_dynamodb_AttributeValue(dafny_input): + # Convert AttributeValue + if isinstance(dafny_input, AttributeValue_S): + AttributeValue_union_value = { + "S": b"".join(ord(c).to_bytes(2, "big") for c in dafny_input.S).decode("utf-16-be") + } + elif isinstance(dafny_input, AttributeValue_N): + AttributeValue_union_value = { + "N": b"".join(ord(c).to_bytes(2, "big") for c in dafny_input.N).decode("utf-16-be") + } + elif isinstance(dafny_input, AttributeValue_B): + AttributeValue_union_value = {"B": bytes(dafny_input.B)} + elif isinstance(dafny_input, AttributeValue_SS): + AttributeValue_union_value = { + "SS": [ + b"".join(ord(c).to_bytes(2, "big") for c in list_element).decode("utf-16-be") + for list_element in dafny_input.SS + ] + } + elif isinstance(dafny_input, AttributeValue_NS): + AttributeValue_union_value = { + "NS": [ + b"".join(ord(c).to_bytes(2, "big") for c in list_element).decode("utf-16-be") + for list_element in dafny_input.NS + ] + } + elif isinstance(dafny_input, AttributeValue_BS): + AttributeValue_union_value = {"BS": [bytes(list_element) for list_element in dafny_input.BS]} + elif isinstance(dafny_input, AttributeValue_M): + AttributeValue_union_value = { + "M": { + b"".join(ord(c).to_bytes(2, "big") for c in key).decode( + "utf-16-be" + ): aws_cryptography_internal_dynamodb.smithygenerated.com_amazonaws_dynamodb.dafny_to_aws_sdk.com_amazonaws_dynamodb_AttributeValue( + value + ) + for (key, value) in dafny_input.M.items + } + } + elif isinstance(dafny_input, AttributeValue_L): + AttributeValue_union_value = { + "L": [ + aws_cryptography_internal_dynamodb.smithygenerated.com_amazonaws_dynamodb.dafny_to_aws_sdk.com_amazonaws_dynamodb_AttributeValue( + list_element + ) + for list_element in dafny_input.L + ] + } + elif isinstance(dafny_input, AttributeValue_NULL): + AttributeValue_union_value = {"NULL": dafny_input.NULL} + elif isinstance(dafny_input, AttributeValue_BOOL): + AttributeValue_union_value = {"BOOL": dafny_input.BOOL} + else: + raise ValueError("No recognized union value in union type: " + str(dafny_input)) + + return AttributeValue_union_value diff --git a/DynamoDbEncryption/runtimes/python/src/aws_dbesdk_dynamodb/smithygenerated/aws_cryptography_dbencryptionsdk_dynamodb/dafny_to_smithy.py b/DynamoDbEncryption/runtimes/python/src/aws_dbesdk_dynamodb/smithygenerated/aws_cryptography_dbencryptionsdk_dynamodb/dafny_to_smithy.py new file mode 100644 index 000000000..9542044ab --- /dev/null +++ b/DynamoDbEncryption/runtimes/python/src/aws_dbesdk_dynamodb/smithygenerated/aws_cryptography_dbencryptionsdk_dynamodb/dafny_to_smithy.py @@ -0,0 +1,820 @@ +# Copyright Amazon.com Inc. or its affiliates. All Rights Reserved. +# SPDX-License-Identifier: Apache-2.0 +# Do not modify this file. This file is machine generated, and any changes to it will be overwritten. + +import aws_cryptographic_material_providers.smithygenerated.aws_cryptography_materialproviders.dafny_to_smithy +import aws_cryptography_internal_dynamodb.smithygenerated.com_amazonaws_dynamodb.dafny_to_aws_sdk +from aws_dbesdk_dynamodb.internaldafny.generated.AwsCryptographyDbEncryptionSdkDynamoDbTypes import ( + BeaconKeySource_multi, + BeaconKeySource_single, + BeaconStyle_asSet, + BeaconStyle_partOnly, + BeaconStyle_shared, + BeaconStyle_sharedSet, + GetEncryptedDataKeyDescriptionUnion_header, + GetEncryptedDataKeyDescriptionUnion_item, + LegacyPolicy_FORBID__LEGACY__ENCRYPT__ALLOW__LEGACY__DECRYPT, + LegacyPolicy_FORBID__LEGACY__ENCRYPT__FORBID__LEGACY__DECRYPT, + LegacyPolicy_FORCE__LEGACY__ENCRYPT__ALLOW__LEGACY__DECRYPT, + PlaintextOverride_FORBID__PLAINTEXT__WRITE__ALLOW__PLAINTEXT__READ, + PlaintextOverride_FORBID__PLAINTEXT__WRITE__FORBID__PLAINTEXT__READ, + PlaintextOverride_FORCE__PLAINTEXT__WRITE__ALLOW__PLAINTEXT__READ, + VirtualTransform_insert, + VirtualTransform_lower, + VirtualTransform_prefix, + VirtualTransform_segment, + VirtualTransform_segments, + VirtualTransform_substring, + VirtualTransform_suffix, + VirtualTransform_upper, +) +import aws_dbesdk_dynamodb.internaldafny.generated.module_ +import aws_dbesdk_dynamodb.smithygenerated.aws_cryptography_dbencryptionsdk_dynamodb.dafny_to_smithy +import aws_dbesdk_dynamodb.smithygenerated.aws_cryptography_dbencryptionsdk_dynamodb.models +import aws_dbesdk_dynamodb.smithygenerated.aws_cryptography_dbencryptionsdk_structuredencryption.dafny_to_smithy + + +def aws_cryptography_dbencryptionsdk_dynamodb_GetBranchKeyIdFromDdbKeyInput( + dafny_input, +): + return aws_dbesdk_dynamodb.smithygenerated.aws_cryptography_dbencryptionsdk_dynamodb.models.GetBranchKeyIdFromDdbKeyInput( + ddb_key={ + b"".join(ord(c).to_bytes(2, "big") for c in key).decode( + "utf-16-be" + ): aws_cryptography_internal_dynamodb.smithygenerated.com_amazonaws_dynamodb.dafny_to_aws_sdk.com_amazonaws_dynamodb_AttributeValue( + value + ) + for (key, value) in dafny_input.ddbKey.items + }, + ) + + +def aws_cryptography_dbencryptionsdk_dynamodb_GetBranchKeyIdFromDdbKeyOutput( + dafny_input, +): + return aws_dbesdk_dynamodb.smithygenerated.aws_cryptography_dbencryptionsdk_dynamodb.models.GetBranchKeyIdFromDdbKeyOutput( + branch_key_id=b"".join(ord(c).to_bytes(2, "big") for c in dafny_input.branchKeyId).decode("utf-16-be"), + ) + + +def aws_cryptography_dbencryptionsdk_dynamodb_DynamoDbKeyBranchKeyIdSupplierReference( + dafny_input, +): + if hasattr(dafny_input, "_native_impl"): + return dafny_input._native_impl + + else: + from aws_dbesdk_dynamodb.smithygenerated.aws_cryptography_dbencryptionsdk_dynamodb.references import ( + DynamoDbKeyBranchKeyIdSupplier, + ) + + return DynamoDbKeyBranchKeyIdSupplier(_impl=dafny_input) + + +def aws_cryptography_dbencryptionsdk_dynamodb_CreateDynamoDbEncryptionBranchKeyIdSupplierInput( + dafny_input, +): + return aws_dbesdk_dynamodb.smithygenerated.aws_cryptography_dbencryptionsdk_dynamodb.models.CreateDynamoDbEncryptionBranchKeyIdSupplierInput( + ddb_key_branch_key_id_supplier=( + ( + aws_dbesdk_dynamodb.smithygenerated.aws_cryptography_dbencryptionsdk_dynamodb.dafny_to_smithy.aws_cryptography_dbencryptionsdk_dynamodb_DynamoDbKeyBranchKeyIdSupplierReference( + dafny_input.ddbKeyBranchKeyIdSupplier + ) + ) + if (dafny_input.ddbKeyBranchKeyIdSupplier is not None) + else None + ), + ) + + +def aws_cryptography_dbencryptionsdk_dynamodb_GetEncryptedDataKeyDescriptionUnion( + dafny_input, +): + # Convert GetEncryptedDataKeyDescriptionUnion + if isinstance(dafny_input, GetEncryptedDataKeyDescriptionUnion_header): + GetEncryptedDataKeyDescriptionUnion_union_value = aws_dbesdk_dynamodb.smithygenerated.aws_cryptography_dbencryptionsdk_dynamodb.models.GetEncryptedDataKeyDescriptionUnionHeader( + bytes(dafny_input.header) + ) + elif isinstance(dafny_input, GetEncryptedDataKeyDescriptionUnion_item): + GetEncryptedDataKeyDescriptionUnion_union_value = aws_dbesdk_dynamodb.smithygenerated.aws_cryptography_dbencryptionsdk_dynamodb.models.GetEncryptedDataKeyDescriptionUnionItem( + { + b"".join(ord(c).to_bytes(2, "big") for c in key).decode( + "utf-16-be" + ): aws_cryptography_internal_dynamodb.smithygenerated.com_amazonaws_dynamodb.dafny_to_aws_sdk.com_amazonaws_dynamodb_AttributeValue( + value + ) + for (key, value) in dafny_input.item.items + } + ) + else: + raise ValueError("No recognized union value in union type: " + str(dafny_input)) + + return GetEncryptedDataKeyDescriptionUnion_union_value + + +def aws_cryptography_dbencryptionsdk_dynamodb_GetEncryptedDataKeyDescriptionInput( + dafny_input, +): + return aws_dbesdk_dynamodb.smithygenerated.aws_cryptography_dbencryptionsdk_dynamodb.models.GetEncryptedDataKeyDescriptionInput( + input=aws_dbesdk_dynamodb.smithygenerated.aws_cryptography_dbencryptionsdk_dynamodb.dafny_to_smithy.aws_cryptography_dbencryptionsdk_dynamodb_GetEncryptedDataKeyDescriptionUnion( + dafny_input.input + ), + ) + + +def aws_cryptography_dbencryptionsdk_dynamodb_CreateDynamoDbEncryptionBranchKeyIdSupplierOutput( + dafny_input, +): + return aws_dbesdk_dynamodb.smithygenerated.aws_cryptography_dbencryptionsdk_dynamodb.models.CreateDynamoDbEncryptionBranchKeyIdSupplierOutput( + branch_key_id_supplier=( + ( + aws_cryptographic_material_providers.smithygenerated.aws_cryptography_materialproviders.dafny_to_smithy.aws_cryptography_materialproviders_BranchKeyIdSupplierReference( + dafny_input.branchKeyIdSupplier + ) + ) + if (dafny_input.branchKeyIdSupplier is not None) + else None + ), + ) + + +def aws_cryptography_dbencryptionsdk_dynamodb_EncryptedDataKeyDescription(dafny_input): + return aws_dbesdk_dynamodb.smithygenerated.aws_cryptography_dbencryptionsdk_dynamodb.models.EncryptedDataKeyDescription( + key_provider_id=b"".join(ord(c).to_bytes(2, "big") for c in dafny_input.keyProviderId).decode("utf-16-be"), + key_provider_info=( + (b"".join(ord(c).to_bytes(2, "big") for c in dafny_input.keyProviderInfo.value).decode("utf-16-be")) + if (dafny_input.keyProviderInfo.is_Some) + else None + ), + branch_key_id=( + (b"".join(ord(c).to_bytes(2, "big") for c in dafny_input.branchKeyId.value).decode("utf-16-be")) + if (dafny_input.branchKeyId.is_Some) + else None + ), + branch_key_version=( + (b"".join(ord(c).to_bytes(2, "big") for c in dafny_input.branchKeyVersion.value).decode("utf-16-be")) + if (dafny_input.branchKeyVersion.is_Some) + else None + ), + ) + + +def aws_cryptography_dbencryptionsdk_dynamodb_GetEncryptedDataKeyDescriptionOutput( + dafny_input, +): + return aws_dbesdk_dynamodb.smithygenerated.aws_cryptography_dbencryptionsdk_dynamodb.models.GetEncryptedDataKeyDescriptionOutput( + encrypted_data_key_description_output=[ + aws_dbesdk_dynamodb.smithygenerated.aws_cryptography_dbencryptionsdk_dynamodb.dafny_to_smithy.aws_cryptography_dbencryptionsdk_dynamodb_EncryptedDataKeyDescription( + list_element + ) + for list_element in dafny_input.EncryptedDataKeyDescriptionOutput + ], + ) + + +def aws_cryptography_dbencryptionsdk_dynamodb_AsSet(dafny_input): + return aws_dbesdk_dynamodb.smithygenerated.aws_cryptography_dbencryptionsdk_dynamodb.models.AsSet() + + +def aws_cryptography_dbencryptionsdk_dynamodb_AtomicPrimitivesReference(dafny_input): + from aws_cryptography_primitives.smithygenerated.aws_cryptography_primitives.client import ( + AwsCryptographicPrimitives, + ) + + return AwsCryptographicPrimitives(config=None, dafny_client=dafny_input) + + +def aws_cryptography_dbencryptionsdk_dynamodb_MultiKeyStore(dafny_input): + return aws_dbesdk_dynamodb.smithygenerated.aws_cryptography_dbencryptionsdk_dynamodb.models.MultiKeyStore( + key_field_name=b"".join(ord(c).to_bytes(2, "big") for c in dafny_input.keyFieldName).decode("utf-16-be"), + cache_ttl=dafny_input.cacheTTL, + cache=( + ( + aws_cryptographic_material_providers.smithygenerated.aws_cryptography_materialproviders.dafny_to_smithy.aws_cryptography_materialproviders_CacheType( + dafny_input.cache.value + ) + ) + if (dafny_input.cache.is_Some) + else None + ), + partition_id=( + (b"".join(ord(c).to_bytes(2, "big") for c in dafny_input.partitionId.value).decode("utf-16-be")) + if (dafny_input.partitionId.is_Some) + else None + ), + ) + + +def aws_cryptography_dbencryptionsdk_dynamodb_SingleKeyStore(dafny_input): + return aws_dbesdk_dynamodb.smithygenerated.aws_cryptography_dbencryptionsdk_dynamodb.models.SingleKeyStore( + key_id=b"".join(ord(c).to_bytes(2, "big") for c in dafny_input.keyId).decode("utf-16-be"), + cache_ttl=dafny_input.cacheTTL, + cache=( + ( + aws_cryptographic_material_providers.smithygenerated.aws_cryptography_materialproviders.dafny_to_smithy.aws_cryptography_materialproviders_CacheType( + dafny_input.cache.value + ) + ) + if (dafny_input.cache.is_Some) + else None + ), + partition_id=( + (b"".join(ord(c).to_bytes(2, "big") for c in dafny_input.partitionId.value).decode("utf-16-be")) + if (dafny_input.partitionId.is_Some) + else None + ), + ) + + +def aws_cryptography_dbencryptionsdk_dynamodb_BeaconKeySource(dafny_input): + # Convert BeaconKeySource + if isinstance(dafny_input, BeaconKeySource_single): + BeaconKeySource_union_value = aws_dbesdk_dynamodb.smithygenerated.aws_cryptography_dbencryptionsdk_dynamodb.models.BeaconKeySourceSingle( + aws_dbesdk_dynamodb.smithygenerated.aws_cryptography_dbencryptionsdk_dynamodb.dafny_to_smithy.aws_cryptography_dbencryptionsdk_dynamodb_SingleKeyStore( + dafny_input.single + ) + ) + elif isinstance(dafny_input, BeaconKeySource_multi): + BeaconKeySource_union_value = aws_dbesdk_dynamodb.smithygenerated.aws_cryptography_dbencryptionsdk_dynamodb.models.BeaconKeySourceMulti( + aws_dbesdk_dynamodb.smithygenerated.aws_cryptography_dbencryptionsdk_dynamodb.dafny_to_smithy.aws_cryptography_dbencryptionsdk_dynamodb_MultiKeyStore( + dafny_input.multi + ) + ) + else: + raise ValueError("No recognized union value in union type: " + str(dafny_input)) + + return BeaconKeySource_union_value + + +def aws_cryptography_dbencryptionsdk_dynamodb_PartOnly(dafny_input): + return aws_dbesdk_dynamodb.smithygenerated.aws_cryptography_dbencryptionsdk_dynamodb.models.PartOnly() + + +def aws_cryptography_dbencryptionsdk_dynamodb_Shared(dafny_input): + return aws_dbesdk_dynamodb.smithygenerated.aws_cryptography_dbencryptionsdk_dynamodb.models.Shared( + other=b"".join(ord(c).to_bytes(2, "big") for c in dafny_input.other).decode("utf-16-be"), + ) + + +def aws_cryptography_dbencryptionsdk_dynamodb_SharedSet(dafny_input): + return aws_dbesdk_dynamodb.smithygenerated.aws_cryptography_dbencryptionsdk_dynamodb.models.SharedSet( + other=b"".join(ord(c).to_bytes(2, "big") for c in dafny_input.other).decode("utf-16-be"), + ) + + +def aws_cryptography_dbencryptionsdk_dynamodb_BeaconStyle(dafny_input): + # Convert BeaconStyle + if isinstance(dafny_input, BeaconStyle_partOnly): + BeaconStyle_union_value = aws_dbesdk_dynamodb.smithygenerated.aws_cryptography_dbencryptionsdk_dynamodb.models.BeaconStylePartOnly( + aws_dbesdk_dynamodb.smithygenerated.aws_cryptography_dbencryptionsdk_dynamodb.dafny_to_smithy.aws_cryptography_dbencryptionsdk_dynamodb_PartOnly( + dafny_input.partOnly + ) + ) + elif isinstance(dafny_input, BeaconStyle_shared): + BeaconStyle_union_value = aws_dbesdk_dynamodb.smithygenerated.aws_cryptography_dbencryptionsdk_dynamodb.models.BeaconStyleShared( + aws_dbesdk_dynamodb.smithygenerated.aws_cryptography_dbencryptionsdk_dynamodb.dafny_to_smithy.aws_cryptography_dbencryptionsdk_dynamodb_Shared( + dafny_input.shared + ) + ) + elif isinstance(dafny_input, BeaconStyle_asSet): + BeaconStyle_union_value = aws_dbesdk_dynamodb.smithygenerated.aws_cryptography_dbencryptionsdk_dynamodb.models.BeaconStyleAsSet( + aws_dbesdk_dynamodb.smithygenerated.aws_cryptography_dbencryptionsdk_dynamodb.dafny_to_smithy.aws_cryptography_dbencryptionsdk_dynamodb_AsSet( + dafny_input.asSet + ) + ) + elif isinstance(dafny_input, BeaconStyle_sharedSet): + BeaconStyle_union_value = aws_dbesdk_dynamodb.smithygenerated.aws_cryptography_dbencryptionsdk_dynamodb.models.BeaconStyleSharedSet( + aws_dbesdk_dynamodb.smithygenerated.aws_cryptography_dbencryptionsdk_dynamodb.dafny_to_smithy.aws_cryptography_dbencryptionsdk_dynamodb_SharedSet( + dafny_input.sharedSet + ) + ) + else: + raise ValueError("No recognized union value in union type: " + str(dafny_input)) + + return BeaconStyle_union_value + + +def aws_cryptography_dbencryptionsdk_dynamodb_ConstructorPart(dafny_input): + return aws_dbesdk_dynamodb.smithygenerated.aws_cryptography_dbencryptionsdk_dynamodb.models.ConstructorPart( + name=b"".join(ord(c).to_bytes(2, "big") for c in dafny_input.name).decode("utf-16-be"), + required=dafny_input.required, + ) + + +def aws_cryptography_dbencryptionsdk_dynamodb_Constructor(dafny_input): + return aws_dbesdk_dynamodb.smithygenerated.aws_cryptography_dbencryptionsdk_dynamodb.models.Constructor( + parts=[ + aws_dbesdk_dynamodb.smithygenerated.aws_cryptography_dbencryptionsdk_dynamodb.dafny_to_smithy.aws_cryptography_dbencryptionsdk_dynamodb_ConstructorPart( + list_element + ) + for list_element in dafny_input.parts + ], + ) + + +def aws_cryptography_dbencryptionsdk_dynamodb_EncryptedPart(dafny_input): + return aws_dbesdk_dynamodb.smithygenerated.aws_cryptography_dbencryptionsdk_dynamodb.models.EncryptedPart( + name=b"".join(ord(c).to_bytes(2, "big") for c in dafny_input.name).decode("utf-16-be"), + prefix=b"".join(ord(c).to_bytes(2, "big") for c in dafny_input.prefix).decode("utf-16-be"), + ) + + +def aws_cryptography_dbencryptionsdk_dynamodb_SignedPart(dafny_input): + return aws_dbesdk_dynamodb.smithygenerated.aws_cryptography_dbencryptionsdk_dynamodb.models.SignedPart( + name=b"".join(ord(c).to_bytes(2, "big") for c in dafny_input.name).decode("utf-16-be"), + prefix=b"".join(ord(c).to_bytes(2, "big") for c in dafny_input.prefix).decode("utf-16-be"), + loc=( + (b"".join(ord(c).to_bytes(2, "big") for c in dafny_input.loc.value).decode("utf-16-be")) + if (dafny_input.loc.is_Some) + else None + ), + ) + + +def aws_cryptography_dbencryptionsdk_dynamodb_CompoundBeacon(dafny_input): + return aws_dbesdk_dynamodb.smithygenerated.aws_cryptography_dbencryptionsdk_dynamodb.models.CompoundBeacon( + name=b"".join(ord(c).to_bytes(2, "big") for c in dafny_input.name).decode("utf-16-be"), + split=b"".join(ord(c).to_bytes(2, "big") for c in dafny_input.split).decode("utf-16-be"), + encrypted=( + ( + [ + aws_dbesdk_dynamodb.smithygenerated.aws_cryptography_dbencryptionsdk_dynamodb.dafny_to_smithy.aws_cryptography_dbencryptionsdk_dynamodb_EncryptedPart( + list_element + ) + for list_element in dafny_input.encrypted.value + ] + ) + if (dafny_input.encrypted.is_Some) + else None + ), + signed=( + ( + [ + aws_dbesdk_dynamodb.smithygenerated.aws_cryptography_dbencryptionsdk_dynamodb.dafny_to_smithy.aws_cryptography_dbencryptionsdk_dynamodb_SignedPart( + list_element + ) + for list_element in dafny_input.signed.value + ] + ) + if (dafny_input.signed.is_Some) + else None + ), + constructors=( + ( + [ + aws_dbesdk_dynamodb.smithygenerated.aws_cryptography_dbencryptionsdk_dynamodb.dafny_to_smithy.aws_cryptography_dbencryptionsdk_dynamodb_Constructor( + list_element + ) + for list_element in dafny_input.constructors.value + ] + ) + if (dafny_input.constructors.is_Some) + else None + ), + ) + + +def aws_cryptography_dbencryptionsdk_dynamodb_KeyStoreReference(dafny_input): + from aws_cryptographic_material_providers.smithygenerated.aws_cryptography_keystore.client import ( + KeyStore, + ) + + return KeyStore(config=None, dafny_client=dafny_input) + + +def aws_cryptography_dbencryptionsdk_dynamodb_StandardBeacon(dafny_input): + return aws_dbesdk_dynamodb.smithygenerated.aws_cryptography_dbencryptionsdk_dynamodb.models.StandardBeacon( + name=b"".join(ord(c).to_bytes(2, "big") for c in dafny_input.name).decode("utf-16-be"), + length=dafny_input.length, + loc=( + (b"".join(ord(c).to_bytes(2, "big") for c in dafny_input.loc.value).decode("utf-16-be")) + if (dafny_input.loc.is_Some) + else None + ), + style=( + ( + aws_dbesdk_dynamodb.smithygenerated.aws_cryptography_dbencryptionsdk_dynamodb.dafny_to_smithy.aws_cryptography_dbencryptionsdk_dynamodb_BeaconStyle( + dafny_input.style.value + ) + ) + if (dafny_input.style.is_Some) + else None + ), + ) + + +def aws_cryptography_dbencryptionsdk_dynamodb_Insert(dafny_input): + return aws_dbesdk_dynamodb.smithygenerated.aws_cryptography_dbencryptionsdk_dynamodb.models.Insert( + literal=b"".join(ord(c).to_bytes(2, "big") for c in dafny_input.literal).decode("utf-16-be"), + ) + + +def aws_cryptography_dbencryptionsdk_dynamodb_Lower(dafny_input): + return aws_dbesdk_dynamodb.smithygenerated.aws_cryptography_dbencryptionsdk_dynamodb.models.Lower() + + +def aws_cryptography_dbencryptionsdk_dynamodb_GetPrefix(dafny_input): + return aws_dbesdk_dynamodb.smithygenerated.aws_cryptography_dbencryptionsdk_dynamodb.models.GetPrefix( + length=dafny_input.length, + ) + + +def aws_cryptography_dbencryptionsdk_dynamodb_GetSegment(dafny_input): + return aws_dbesdk_dynamodb.smithygenerated.aws_cryptography_dbencryptionsdk_dynamodb.models.GetSegment( + split=b"".join(ord(c).to_bytes(2, "big") for c in dafny_input.split).decode("utf-16-be"), + index=dafny_input.index, + ) + + +def aws_cryptography_dbencryptionsdk_dynamodb_GetSegments(dafny_input): + return aws_dbesdk_dynamodb.smithygenerated.aws_cryptography_dbencryptionsdk_dynamodb.models.GetSegments( + split=b"".join(ord(c).to_bytes(2, "big") for c in dafny_input.split).decode("utf-16-be"), + low=dafny_input.low, + high=dafny_input.high, + ) + + +def aws_cryptography_dbencryptionsdk_dynamodb_GetSubstring(dafny_input): + return aws_dbesdk_dynamodb.smithygenerated.aws_cryptography_dbencryptionsdk_dynamodb.models.GetSubstring( + low=dafny_input.low, + high=dafny_input.high, + ) + + +def aws_cryptography_dbencryptionsdk_dynamodb_GetSuffix(dafny_input): + return aws_dbesdk_dynamodb.smithygenerated.aws_cryptography_dbencryptionsdk_dynamodb.models.GetSuffix( + length=dafny_input.length, + ) + + +def aws_cryptography_dbencryptionsdk_dynamodb_Upper(dafny_input): + return aws_dbesdk_dynamodb.smithygenerated.aws_cryptography_dbencryptionsdk_dynamodb.models.Upper() + + +def aws_cryptography_dbencryptionsdk_dynamodb_VirtualTransform(dafny_input): + # Convert VirtualTransform + if isinstance(dafny_input, VirtualTransform_upper): + VirtualTransform_union_value = aws_dbesdk_dynamodb.smithygenerated.aws_cryptography_dbencryptionsdk_dynamodb.models.VirtualTransformUpper( + aws_dbesdk_dynamodb.smithygenerated.aws_cryptography_dbencryptionsdk_dynamodb.dafny_to_smithy.aws_cryptography_dbencryptionsdk_dynamodb_Upper( + dafny_input.upper + ) + ) + elif isinstance(dafny_input, VirtualTransform_lower): + VirtualTransform_union_value = aws_dbesdk_dynamodb.smithygenerated.aws_cryptography_dbencryptionsdk_dynamodb.models.VirtualTransformLower( + aws_dbesdk_dynamodb.smithygenerated.aws_cryptography_dbencryptionsdk_dynamodb.dafny_to_smithy.aws_cryptography_dbencryptionsdk_dynamodb_Lower( + dafny_input.lower + ) + ) + elif isinstance(dafny_input, VirtualTransform_insert): + VirtualTransform_union_value = aws_dbesdk_dynamodb.smithygenerated.aws_cryptography_dbencryptionsdk_dynamodb.models.VirtualTransformInsert( + aws_dbesdk_dynamodb.smithygenerated.aws_cryptography_dbencryptionsdk_dynamodb.dafny_to_smithy.aws_cryptography_dbencryptionsdk_dynamodb_Insert( + dafny_input.insert + ) + ) + elif isinstance(dafny_input, VirtualTransform_prefix): + VirtualTransform_union_value = aws_dbesdk_dynamodb.smithygenerated.aws_cryptography_dbencryptionsdk_dynamodb.models.VirtualTransformPrefix( + aws_dbesdk_dynamodb.smithygenerated.aws_cryptography_dbencryptionsdk_dynamodb.dafny_to_smithy.aws_cryptography_dbencryptionsdk_dynamodb_GetPrefix( + dafny_input.prefix + ) + ) + elif isinstance(dafny_input, VirtualTransform_suffix): + VirtualTransform_union_value = aws_dbesdk_dynamodb.smithygenerated.aws_cryptography_dbencryptionsdk_dynamodb.models.VirtualTransformSuffix( + aws_dbesdk_dynamodb.smithygenerated.aws_cryptography_dbencryptionsdk_dynamodb.dafny_to_smithy.aws_cryptography_dbencryptionsdk_dynamodb_GetSuffix( + dafny_input.suffix + ) + ) + elif isinstance(dafny_input, VirtualTransform_substring): + VirtualTransform_union_value = aws_dbesdk_dynamodb.smithygenerated.aws_cryptography_dbencryptionsdk_dynamodb.models.VirtualTransformSubstring( + aws_dbesdk_dynamodb.smithygenerated.aws_cryptography_dbencryptionsdk_dynamodb.dafny_to_smithy.aws_cryptography_dbencryptionsdk_dynamodb_GetSubstring( + dafny_input.substring + ) + ) + elif isinstance(dafny_input, VirtualTransform_segment): + VirtualTransform_union_value = aws_dbesdk_dynamodb.smithygenerated.aws_cryptography_dbencryptionsdk_dynamodb.models.VirtualTransformSegment( + aws_dbesdk_dynamodb.smithygenerated.aws_cryptography_dbencryptionsdk_dynamodb.dafny_to_smithy.aws_cryptography_dbencryptionsdk_dynamodb_GetSegment( + dafny_input.segment + ) + ) + elif isinstance(dafny_input, VirtualTransform_segments): + VirtualTransform_union_value = aws_dbesdk_dynamodb.smithygenerated.aws_cryptography_dbencryptionsdk_dynamodb.models.VirtualTransformSegments( + aws_dbesdk_dynamodb.smithygenerated.aws_cryptography_dbencryptionsdk_dynamodb.dafny_to_smithy.aws_cryptography_dbencryptionsdk_dynamodb_GetSegments( + dafny_input.segments + ) + ) + else: + raise ValueError("No recognized union value in union type: " + str(dafny_input)) + + return VirtualTransform_union_value + + +def aws_cryptography_dbencryptionsdk_dynamodb_VirtualPart(dafny_input): + return aws_dbesdk_dynamodb.smithygenerated.aws_cryptography_dbencryptionsdk_dynamodb.models.VirtualPart( + loc=b"".join(ord(c).to_bytes(2, "big") for c in dafny_input.loc).decode("utf-16-be"), + trans=( + ( + [ + aws_dbesdk_dynamodb.smithygenerated.aws_cryptography_dbencryptionsdk_dynamodb.dafny_to_smithy.aws_cryptography_dbencryptionsdk_dynamodb_VirtualTransform( + list_element + ) + for list_element in dafny_input.trans.value + ] + ) + if (dafny_input.trans.is_Some) + else None + ), + ) + + +def aws_cryptography_dbencryptionsdk_dynamodb_VirtualField(dafny_input): + return aws_dbesdk_dynamodb.smithygenerated.aws_cryptography_dbencryptionsdk_dynamodb.models.VirtualField( + name=b"".join(ord(c).to_bytes(2, "big") for c in dafny_input.name).decode("utf-16-be"), + parts=[ + aws_dbesdk_dynamodb.smithygenerated.aws_cryptography_dbencryptionsdk_dynamodb.dafny_to_smithy.aws_cryptography_dbencryptionsdk_dynamodb_VirtualPart( + list_element + ) + for list_element in dafny_input.parts + ], + ) + + +def aws_cryptography_dbencryptionsdk_dynamodb_BeaconVersion(dafny_input): + return aws_dbesdk_dynamodb.smithygenerated.aws_cryptography_dbencryptionsdk_dynamodb.models.BeaconVersion( + version=dafny_input.version, + key_store=( + ( + aws_dbesdk_dynamodb.smithygenerated.aws_cryptography_dbencryptionsdk_dynamodb.dafny_to_smithy.aws_cryptography_dbencryptionsdk_dynamodb_KeyStoreReference( + dafny_input.keyStore + ) + ) + if (dafny_input.keyStore is not None) + else None + ), + key_source=aws_dbesdk_dynamodb.smithygenerated.aws_cryptography_dbencryptionsdk_dynamodb.dafny_to_smithy.aws_cryptography_dbencryptionsdk_dynamodb_BeaconKeySource( + dafny_input.keySource + ), + standard_beacons=[ + aws_dbesdk_dynamodb.smithygenerated.aws_cryptography_dbencryptionsdk_dynamodb.dafny_to_smithy.aws_cryptography_dbencryptionsdk_dynamodb_StandardBeacon( + list_element + ) + for list_element in dafny_input.standardBeacons + ], + compound_beacons=( + ( + [ + aws_dbesdk_dynamodb.smithygenerated.aws_cryptography_dbencryptionsdk_dynamodb.dafny_to_smithy.aws_cryptography_dbencryptionsdk_dynamodb_CompoundBeacon( + list_element + ) + for list_element in dafny_input.compoundBeacons.value + ] + ) + if (dafny_input.compoundBeacons.is_Some) + else None + ), + virtual_fields=( + ( + [ + aws_dbesdk_dynamodb.smithygenerated.aws_cryptography_dbencryptionsdk_dynamodb.dafny_to_smithy.aws_cryptography_dbencryptionsdk_dynamodb_VirtualField( + list_element + ) + for list_element in dafny_input.virtualFields.value + ] + ) + if (dafny_input.virtualFields.is_Some) + else None + ), + encrypted_parts=( + ( + [ + aws_dbesdk_dynamodb.smithygenerated.aws_cryptography_dbencryptionsdk_dynamodb.dafny_to_smithy.aws_cryptography_dbencryptionsdk_dynamodb_EncryptedPart( + list_element + ) + for list_element in dafny_input.encryptedParts.value + ] + ) + if (dafny_input.encryptedParts.is_Some) + else None + ), + signed_parts=( + ( + [ + aws_dbesdk_dynamodb.smithygenerated.aws_cryptography_dbencryptionsdk_dynamodb.dafny_to_smithy.aws_cryptography_dbencryptionsdk_dynamodb_SignedPart( + list_element + ) + for list_element in dafny_input.signedParts.value + ] + ) + if (dafny_input.signedParts.is_Some) + else None + ), + ) + + +def aws_cryptography_dbencryptionsdk_dynamodb_DynamoDbEncryptionConfig(dafny_input): + # Deferred import of .config to avoid circular dependency + import aws_dbesdk_dynamodb.smithygenerated.aws_cryptography_dbencryptionsdk_dynamodb.config + + return ( + aws_dbesdk_dynamodb.smithygenerated.aws_cryptography_dbencryptionsdk_dynamodb.config.DynamoDbEncryptionConfig() + ) + + +def aws_cryptography_dbencryptionsdk_dynamodb_LegacyDynamoDbEncryptorReference( + dafny_input, +): + if hasattr(dafny_input, "_native_impl"): + return dafny_input._native_impl + + else: + from aws_dbesdk_dynamodb.smithygenerated.aws_cryptography_dbencryptionsdk_dynamodb.references import ( + LegacyDynamoDbEncryptor, + ) + + return LegacyDynamoDbEncryptor(_impl=dafny_input) + + +def aws_cryptography_dbencryptionsdk_dynamodb_LegacyPolicy(dafny_input): + if isinstance(dafny_input, LegacyPolicy_FORCE__LEGACY__ENCRYPT__ALLOW__LEGACY__DECRYPT): + return "FORCE_LEGACY_ENCRYPT_ALLOW_LEGACY_DECRYPT" + + elif isinstance(dafny_input, LegacyPolicy_FORBID__LEGACY__ENCRYPT__ALLOW__LEGACY__DECRYPT): + return "FORBID_LEGACY_ENCRYPT_ALLOW_LEGACY_DECRYPT" + + elif isinstance(dafny_input, LegacyPolicy_FORBID__LEGACY__ENCRYPT__FORBID__LEGACY__DECRYPT): + return "FORBID_LEGACY_ENCRYPT_FORBID_LEGACY_DECRYPT" + + else: + raise ValueError(f"No recognized enum value in enum type: {dafny_input=}") + + +def aws_cryptography_dbencryptionsdk_dynamodb_LegacyOverride(dafny_input): + return aws_dbesdk_dynamodb.smithygenerated.aws_cryptography_dbencryptionsdk_dynamodb.models.LegacyOverride( + policy=aws_dbesdk_dynamodb.smithygenerated.aws_cryptography_dbencryptionsdk_dynamodb.dafny_to_smithy.aws_cryptography_dbencryptionsdk_dynamodb_LegacyPolicy( + dafny_input.policy + ), + encryptor=( + ( + aws_dbesdk_dynamodb.smithygenerated.aws_cryptography_dbencryptionsdk_dynamodb.dafny_to_smithy.aws_cryptography_dbencryptionsdk_dynamodb_LegacyDynamoDbEncryptorReference( + dafny_input.encryptor + ) + ) + if (dafny_input.encryptor is not None) + else None + ), + attribute_actions_on_encrypt={ + b"".join(ord(c).to_bytes(2, "big") for c in key).decode( + "utf-16-be" + ): aws_dbesdk_dynamodb.smithygenerated.aws_cryptography_dbencryptionsdk_structuredencryption.dafny_to_smithy.aws_cryptography_dbencryptionsdk_structuredencryption_CryptoAction( + value + ) + for (key, value) in dafny_input.attributeActionsOnEncrypt.items + }, + default_attribute_flag=( + ( + aws_dbesdk_dynamodb.smithygenerated.aws_cryptography_dbencryptionsdk_structuredencryption.dafny_to_smithy.aws_cryptography_dbencryptionsdk_structuredencryption_CryptoAction( + dafny_input.defaultAttributeFlag.value + ) + ) + if (dafny_input.defaultAttributeFlag.is_Some) + else None + ), + ) + + +def aws_cryptography_dbencryptionsdk_dynamodb_PlaintextOverride(dafny_input): + if isinstance(dafny_input, PlaintextOverride_FORCE__PLAINTEXT__WRITE__ALLOW__PLAINTEXT__READ): + return "FORCE_PLAINTEXT_WRITE_ALLOW_PLAINTEXT_READ" + + elif isinstance(dafny_input, PlaintextOverride_FORBID__PLAINTEXT__WRITE__ALLOW__PLAINTEXT__READ): + return "FORBID_PLAINTEXT_WRITE_ALLOW_PLAINTEXT_READ" + + elif isinstance(dafny_input, PlaintextOverride_FORBID__PLAINTEXT__WRITE__FORBID__PLAINTEXT__READ): + return "FORBID_PLAINTEXT_WRITE_FORBID_PLAINTEXT_READ" + + else: + raise ValueError(f"No recognized enum value in enum type: {dafny_input=}") + + +def aws_cryptography_dbencryptionsdk_dynamodb_SearchConfig(dafny_input): + return aws_dbesdk_dynamodb.smithygenerated.aws_cryptography_dbencryptionsdk_dynamodb.models.SearchConfig( + versions=[ + aws_dbesdk_dynamodb.smithygenerated.aws_cryptography_dbencryptionsdk_dynamodb.dafny_to_smithy.aws_cryptography_dbencryptionsdk_dynamodb_BeaconVersion( + list_element + ) + for list_element in dafny_input.versions + ], + write_version=dafny_input.writeVersion, + ) + + +def aws_cryptography_dbencryptionsdk_dynamodb_DynamoDbTableEncryptionConfig( + dafny_input, +): + return aws_dbesdk_dynamodb.smithygenerated.aws_cryptography_dbencryptionsdk_dynamodb.models.DynamoDbTableEncryptionConfig( + logical_table_name=b"".join(ord(c).to_bytes(2, "big") for c in dafny_input.logicalTableName).decode( + "utf-16-be" + ), + partition_key_name=b"".join(ord(c).to_bytes(2, "big") for c in dafny_input.partitionKeyName).decode( + "utf-16-be" + ), + sort_key_name=( + (b"".join(ord(c).to_bytes(2, "big") for c in dafny_input.sortKeyName.value).decode("utf-16-be")) + if (dafny_input.sortKeyName.is_Some) + else None + ), + search=( + ( + aws_dbesdk_dynamodb.smithygenerated.aws_cryptography_dbencryptionsdk_dynamodb.dafny_to_smithy.aws_cryptography_dbencryptionsdk_dynamodb_SearchConfig( + dafny_input.search.value + ) + ) + if (dafny_input.search.is_Some) + else None + ), + attribute_actions_on_encrypt={ + b"".join(ord(c).to_bytes(2, "big") for c in key).decode( + "utf-16-be" + ): aws_dbesdk_dynamodb.smithygenerated.aws_cryptography_dbencryptionsdk_structuredencryption.dafny_to_smithy.aws_cryptography_dbencryptionsdk_structuredencryption_CryptoAction( + value + ) + for (key, value) in dafny_input.attributeActionsOnEncrypt.items + }, + allowed_unsigned_attributes=( + ( + [ + b"".join(ord(c).to_bytes(2, "big") for c in list_element).decode("utf-16-be") + for list_element in dafny_input.allowedUnsignedAttributes.value + ] + ) + if (dafny_input.allowedUnsignedAttributes.is_Some) + else None + ), + allowed_unsigned_attribute_prefix=( + ( + b"".join(ord(c).to_bytes(2, "big") for c in dafny_input.allowedUnsignedAttributePrefix.value).decode( + "utf-16-be" + ) + ) + if (dafny_input.allowedUnsignedAttributePrefix.is_Some) + else None + ), + algorithm_suite_id=( + ( + aws_cryptographic_material_providers.smithygenerated.aws_cryptography_materialproviders.dafny_to_smithy.aws_cryptography_materialproviders_DBEAlgorithmSuiteId( + dafny_input.algorithmSuiteId.value + ) + ) + if (dafny_input.algorithmSuiteId.is_Some) + else None + ), + keyring=( + ( + aws_cryptographic_material_providers.smithygenerated.aws_cryptography_materialproviders.dafny_to_smithy.aws_cryptography_materialproviders_KeyringReference( + dafny_input.keyring.UnwrapOr(None) + ) + ) + if (dafny_input.keyring.UnwrapOr(None) is not None) + else None + ), + cmm=( + ( + aws_cryptographic_material_providers.smithygenerated.aws_cryptography_materialproviders.dafny_to_smithy.aws_cryptography_materialproviders_CryptographicMaterialsManagerReference( + dafny_input.cmm.UnwrapOr(None) + ) + ) + if (dafny_input.cmm.UnwrapOr(None) is not None) + else None + ), + legacy_override=( + ( + aws_dbesdk_dynamodb.smithygenerated.aws_cryptography_dbencryptionsdk_dynamodb.dafny_to_smithy.aws_cryptography_dbencryptionsdk_dynamodb_LegacyOverride( + dafny_input.legacyOverride.value + ) + ) + if (dafny_input.legacyOverride.is_Some) + else None + ), + plaintext_override=( + ( + aws_dbesdk_dynamodb.smithygenerated.aws_cryptography_dbencryptionsdk_dynamodb.dafny_to_smithy.aws_cryptography_dbencryptionsdk_dynamodb_PlaintextOverride( + dafny_input.plaintextOverride.value + ) + ) + if (dafny_input.plaintextOverride.is_Some) + else None + ), + ) + + +def aws_cryptography_dbencryptionsdk_dynamodb_DynamoDbTablesEncryptionConfig( + dafny_input, +): + return aws_dbesdk_dynamodb.smithygenerated.aws_cryptography_dbencryptionsdk_dynamodb.models.DynamoDbTablesEncryptionConfig( + table_encryption_configs={ + b"".join(ord(c).to_bytes(2, "big") for c in key).decode( + "utf-16-be" + ): aws_dbesdk_dynamodb.smithygenerated.aws_cryptography_dbencryptionsdk_dynamodb.dafny_to_smithy.aws_cryptography_dbencryptionsdk_dynamodb_DynamoDbTableEncryptionConfig( + value + ) + for (key, value) in dafny_input.tableEncryptionConfigs.items + }, + ) diff --git a/DynamoDbEncryption/runtimes/python/src/aws_dbesdk_dynamodb/smithygenerated/aws_cryptography_dbencryptionsdk_dynamodb/deserialize.py b/DynamoDbEncryption/runtimes/python/src/aws_dbesdk_dynamodb/smithygenerated/aws_cryptography_dbencryptionsdk_dynamodb/deserialize.py new file mode 100644 index 000000000..425850b07 --- /dev/null +++ b/DynamoDbEncryption/runtimes/python/src/aws_dbesdk_dynamodb/smithygenerated/aws_cryptography_dbencryptionsdk_dynamodb/deserialize.py @@ -0,0 +1,96 @@ +# Copyright Amazon.com Inc. or its affiliates. All Rights Reserved. +# SPDX-License-Identifier: Apache-2.0 +# Do not modify this file. This file is machine generated, and any changes to it will be overwritten. + +import _dafny +from aws_dbesdk_dynamodb.internaldafny.generated.AwsCryptographyDbEncryptionSdkDynamoDbTypes import ( + CreateDynamoDbEncryptionBranchKeyIdSupplierOutput_CreateDynamoDbEncryptionBranchKeyIdSupplierOutput as DafnyCreateDynamoDbEncryptionBranchKeyIdSupplierOutput, + Error, + Error_DynamoDbEncryptionException, + GetEncryptedDataKeyDescriptionOutput_GetEncryptedDataKeyDescriptionOutput as DafnyGetEncryptedDataKeyDescriptionOutput, +) +import aws_dbesdk_dynamodb.internaldafny.generated.module_ +import aws_dbesdk_dynamodb.smithygenerated.aws_cryptography_dbencryptionsdk_dynamodb.dafny_to_smithy +from typing import Any + +from .dafny_protocol import DafnyResponse +from .errors import ( + AwsCryptographicMaterialProviders, + AwsCryptographicPrimitives, + CollectionOfErrors, + ComAmazonawsDynamodb, + DynamoDbEncryptionException, + KeyStore, + OpaqueError, + ServiceError, + StructuredEncryption, +) +from aws_cryptographic_material_providers.smithygenerated.aws_cryptography_keystore.deserialize import ( + _deserialize_error as aws_cryptography_keystore_deserialize_error, +) +from aws_cryptographic_material_providers.smithygenerated.aws_cryptography_materialproviders.deserialize import ( + _deserialize_error as aws_cryptography_materialproviders_deserialize_error, +) +from aws_cryptography_internal_dynamodb.smithygenerated.com_amazonaws_dynamodb.shim import ( + _sdk_error_to_dafny_error as com_amazonaws_dynamodb_sdk_error_to_dafny_error, +) +from aws_cryptography_primitives.smithygenerated.aws_cryptography_primitives.deserialize import ( + _deserialize_error as aws_cryptography_primitives_deserialize_error, +) + +from ..aws_cryptography_dbencryptionsdk_structuredencryption.deserialize import ( + _deserialize_error as aws_cryptography_dbencryptionsdk_structuredencryption_deserialize_error, +) +from .config import Config + + +def _deserialize_create_dynamo_db_encryption_branch_key_id_supplier(input: DafnyResponse, config: Config): + + if input.IsFailure(): + return _deserialize_error(input.error) + return aws_dbesdk_dynamodb.smithygenerated.aws_cryptography_dbencryptionsdk_dynamodb.dafny_to_smithy.aws_cryptography_dbencryptionsdk_dynamodb_CreateDynamoDbEncryptionBranchKeyIdSupplierOutput( + input.value + ) + + +def _deserialize_get_encrypted_data_key_description(input: DafnyResponse, config: Config): + + if input.IsFailure(): + return _deserialize_error(input.error) + return aws_dbesdk_dynamodb.smithygenerated.aws_cryptography_dbencryptionsdk_dynamodb.dafny_to_smithy.aws_cryptography_dbencryptionsdk_dynamodb_GetEncryptedDataKeyDescriptionOutput( + input.value + ) + + +def _deserialize_error(error: Error) -> ServiceError: + if error.is_Opaque: + return OpaqueError(obj=error.obj) + elif error.is_OpaqueWithText: + return OpaqueErrorWithText(obj=error.obj, obj_message=error.objMessage) + elif error.is_CollectionOfErrors: + return CollectionOfErrors( + message=_dafny.string_of(error.message), + list=[_deserialize_error(dafny_e) for dafny_e in error.list], + ) + elif error.is_DynamoDbEncryptionException: + return DynamoDbEncryptionException(message=_dafny.string_of(error.message)) + elif error.is_AwsCryptographyDbEncryptionSdkStructuredEncryption: + return StructuredEncryption( + aws_cryptography_dbencryptionsdk_structuredencryption_deserialize_error( + error.AwsCryptographyDbEncryptionSdkStructuredEncryption + ) + ) + elif error.is_AwsCryptographyPrimitives: + return AwsCryptographicPrimitives( + aws_cryptography_primitives_deserialize_error(error.AwsCryptographyPrimitives) + ) + elif error.is_AwsCryptographyMaterialProviders: + return AwsCryptographicMaterialProviders( + aws_cryptography_materialproviders_deserialize_error(error.AwsCryptographyMaterialProviders) + ) + elif error.is_AwsCryptographyKeyStore: + return KeyStore(aws_cryptography_keystore_deserialize_error(error.AwsCryptographyKeyStore)) + elif error.is_ComAmazonawsDynamodb: + return ComAmazonawsDynamodb(message=_dafny.string_of(error.ComAmazonawsDynamodb.message)) + else: + return OpaqueError(obj=error) diff --git a/DynamoDbEncryption/runtimes/python/src/aws_dbesdk_dynamodb/smithygenerated/aws_cryptography_dbencryptionsdk_dynamodb/errors.py b/DynamoDbEncryption/runtimes/python/src/aws_dbesdk_dynamodb/smithygenerated/aws_cryptography_dbencryptionsdk_dynamodb/errors.py new file mode 100644 index 000000000..a749fd5e5 --- /dev/null +++ b/DynamoDbEncryption/runtimes/python/src/aws_dbesdk_dynamodb/smithygenerated/aws_cryptography_dbencryptionsdk_dynamodb/errors.py @@ -0,0 +1,335 @@ +# Copyright Amazon.com Inc. or its affiliates. All Rights Reserved. +# SPDX-License-Identifier: Apache-2.0 +# Do not modify this file. This file is machine generated, and any changes to it will be overwritten. + +import _dafny +from aws_cryptographic_material_providers.smithygenerated.aws_cryptography_keystore.errors import ( + _smithy_error_to_dafny_error as aws_cryptography_keystore_smithy_error_to_dafny_error, +) +from aws_cryptographic_material_providers.smithygenerated.aws_cryptography_materialproviders.errors import ( + _smithy_error_to_dafny_error as aws_cryptography_materialproviders_smithy_error_to_dafny_error, +) +from aws_cryptography_internal_dynamodb.smithygenerated.com_amazonaws_dynamodb.shim import ( + _sdk_error_to_dafny_error as com_amazonaws_dynamodb_sdk_error_to_dafny_error, +) +from aws_cryptography_primitives.smithygenerated.aws_cryptography_primitives.errors import ( + _smithy_error_to_dafny_error as aws_cryptography_primitives_smithy_error_to_dafny_error, +) +import aws_dbesdk_dynamodb.internaldafny.generated +import aws_dbesdk_dynamodb.internaldafny.generated.AwsCryptographyDbEncryptionSdkDynamoDbTypes +import aws_dbesdk_dynamodb.smithygenerated.aws_cryptography_dbencryptionsdk_dynamodb.errors +from aws_dbesdk_dynamodb.smithygenerated.aws_cryptography_dbencryptionsdk_structuredencryption.errors import ( + _smithy_error_to_dafny_error as aws_cryptography_dbencryptionsdk_structuredencryption_smithy_error_to_dafny_error, +) +from typing import Any, Dict, Generic, List, Literal, TypeVar + + +class ServiceError(Exception): + """Base error for all errors in the service.""" + + pass + + +T = TypeVar("T") + + +class ApiError(ServiceError, Generic[T]): + """Base error for all api errors in the service.""" + + code: T + + def __init__(self, message: str): + super().__init__(message) + self.message = message + + +class UnknownApiError(ApiError[Literal["Unknown"]]): + """Error representing any unknown api errors.""" + + code: Literal["Unknown"] = "Unknown" + + +class DynamoDbEncryptionException(ApiError[Literal["DynamoDbEncryptionException"]]): + code: Literal["DynamoDbEncryptionException"] = "DynamoDbEncryptionException" + message: str + + def __init__( + self, + *, + message: str, + ): + super().__init__(message) + + def as_dict(self) -> Dict[str, Any]: + """Converts the DynamoDbEncryptionException to a dictionary.""" + return { + "message": self.message, + "code": self.code, + } + + @staticmethod + def from_dict(d: Dict[str, Any]) -> "DynamoDbEncryptionException": + """Creates a DynamoDbEncryptionException from a dictionary.""" + kwargs: Dict[str, Any] = { + "message": d["message"], + } + + return DynamoDbEncryptionException(**kwargs) + + def __repr__(self) -> str: + result = "DynamoDbEncryptionException(" + if self.message is not None: + result += f"message={repr(self.message)}" + + return result + ")" + + def __eq__(self, other: Any) -> bool: + if not isinstance(other, DynamoDbEncryptionException): + return False + attributes: list[str] = [ + "message", + "message", + ] + return all(getattr(self, a) == getattr(other, a) for a in attributes) + + +class DynamoDbEncryptionException(ApiError[Literal["DynamoDbEncryptionException"]]): + code: Literal["DynamoDbEncryptionException"] = "DynamoDbEncryptionException" + message: str + + +class AwsCryptographicPrimitives(ApiError[Literal["AwsCryptographicPrimitives"]]): + AwsCryptographicPrimitives: Any + + +class ComAmazonawsDynamodb(ApiError[Literal["ComAmazonawsDynamodb"]]): + ComAmazonawsDynamodb: Any + + +class AwsCryptographicMaterialProviders(ApiError[Literal["AwsCryptographicMaterialProviders"]]): + AwsCryptographicMaterialProviders: Any + + +class StructuredEncryption(ApiError[Literal["StructuredEncryption"]]): + StructuredEncryption: Any + + +class KeyStore(ApiError[Literal["KeyStore"]]): + KeyStore: Any + + +class CollectionOfErrors(ApiError[Literal["CollectionOfErrors"]]): + code: Literal["CollectionOfErrors"] = "CollectionOfErrors" + message: str + list: List[ServiceError] + + def __init__(self, *, message: str, list): + super().__init__(message) + self.list = list + + def as_dict(self) -> Dict[str, Any]: + """Converts the CollectionOfErrors to a dictionary. + + The dictionary uses the modeled shape names rather than the + parameter names as keys to be mostly compatible with boto3. + """ + return { + "message": self.message, + "code": self.code, + "list": self.list, + } + + @staticmethod + def from_dict(d: Dict[str, Any]) -> "CollectionOfErrors": + """Creates a CollectionOfErrors from a dictionary. + + The dictionary is expected to use the modeled shape names rather + than the parameter names as keys to be mostly compatible with + boto3. + """ + kwargs: Dict[str, Any] = {"message": d["message"], "list": d["list"]} + + return CollectionOfErrors(**kwargs) + + def __repr__(self) -> str: + result = "CollectionOfErrors(" + result += f"message={self.message}," + if self.message is not None: + result += f"message={repr(self.message)}" + result += f"list={self.list}" + result += ")" + return result + + def __eq__(self, other: Any) -> bool: + if not isinstance(other, CollectionOfErrors): + return False + if not (self.list == other.list): + return False + attributes: list[str] = ["message", "message"] + return all(getattr(self, a) == getattr(other, a) for a in attributes) + + +class OpaqueError(ApiError[Literal["OpaqueError"]]): + code: Literal["OpaqueError"] = "OpaqueError" + obj: Any # As an OpaqueError, type of obj is unknown + + def __init__(self, *, obj): + super().__init__("") + self.obj = obj + + def as_dict(self) -> Dict[str, Any]: + """Converts the OpaqueError to a dictionary. + + The dictionary uses the modeled shape names rather than the + parameter names as keys to be mostly compatible with boto3. + """ + return { + "message": self.message, + "code": self.code, + "obj": self.obj, + } + + @staticmethod + def from_dict(d: Dict[str, Any]) -> "OpaqueError": + """Creates a OpaqueError from a dictionary. + + The dictionary is expected to use the modeled shape names rather + than the parameter names as keys to be mostly compatible with + boto3. + """ + kwargs: Dict[str, Any] = {"message": d["message"], "obj": d["obj"]} + + return OpaqueError(**kwargs) + + def __repr__(self) -> str: + result = "OpaqueError(" + result += f"message={self.message}," + if self.message is not None: + result += f"message={repr(self.message)}" + result += f"obj={self.obj}" + result += ")" + return result + + def __eq__(self, other: Any) -> bool: + if not isinstance(other, OpaqueError): + return False + if not (self.obj == other.obj): + return False + attributes: list[str] = ["message", "message"] + return all(getattr(self, a) == getattr(other, a) for a in attributes) + + +class OpaqueWithTextError(ApiError[Literal["OpaqueWithTextError"]]): + code: Literal["OpaqueWithTextError"] = "OpaqueWithTextError" + obj: Any # As an OpaqueWithTextError, type of obj is unknown + obj_message: str # obj_message is a message representing the details of obj + + def __init__(self, *, obj, obj_message): + super().__init__("") + self.obj = obj + self.obj_message = obj_message + + def as_dict(self) -> Dict[str, Any]: + """Converts the OpaqueWithTextError to a dictionary. + + The dictionary uses the modeled shape names rather than the + parameter names as keys to be mostly compatible with boto3. + """ + return { + "message": self.message, + "code": self.code, + "obj": self.obj, + "obj_message": self.obj_message, + } + + @staticmethod + def from_dict(d: Dict[str, Any]) -> "OpaqueWithTextError": + """Creates a OpaqueWithTextError from a dictionary. + + The dictionary is expected to use the modeled shape names rather + than the parameter names as keys to be mostly compatible with + boto3. + """ + kwargs: Dict[str, Any] = { + "message": d["message"], + "obj": d["obj"], + "obj_message": d["obj_message"], + } + + return OpaqueWithTextError(**kwargs) + + def __repr__(self) -> str: + result = "OpaqueWithTextError(" + result += f"message={self.message}," + if self.message is not None: + result += f"message={repr(self.message)}" + result += f"obj={self.obj}" + result += f"obj_message={self.obj_message}" + result += ")" + return result + + def __eq__(self, other: Any) -> bool: + if not isinstance(other, OpaqueWithTextError): + return False + if not (self.obj == other.obj): + return False + attributes: list[str] = ["message", "message"] + return all(getattr(self, a) == getattr(other, a) for a in attributes) + + +def _smithy_error_to_dafny_error(e: ServiceError): + """Converts the provided native Smithy-modeled error into the corresponding + Dafny error.""" + if isinstance( + e, + aws_dbesdk_dynamodb.smithygenerated.aws_cryptography_dbencryptionsdk_dynamodb.errors.DynamoDbEncryptionException, + ): + return aws_dbesdk_dynamodb.internaldafny.generated.AwsCryptographyDbEncryptionSdkDynamoDbTypes.Error_DynamoDbEncryptionException( + message=_dafny.Seq(e.message) + ) + + if isinstance(e, AwsCryptographicPrimitives): + return aws_dbesdk_dynamodb.internaldafny.generated.AwsCryptographyDbEncryptionSdkDynamoDbTypes.Error_AwsCryptographyPrimitives( + aws_cryptography_primitives_smithy_error_to_dafny_error(e.message) + ) + + if isinstance(e, ComAmazonawsDynamodb): + return aws_dbesdk_dynamodb.internaldafny.generated.AwsCryptographyDbEncryptionSdkDynamoDbTypes.Error_ComAmazonawsDynamodb( + com_amazonaws_dynamodb_sdk_error_to_dafny_error(e.message) + ) + + if isinstance(e, AwsCryptographicMaterialProviders): + return aws_dbesdk_dynamodb.internaldafny.generated.AwsCryptographyDbEncryptionSdkDynamoDbTypes.Error_AwsCryptographyMaterialProviders( + aws_cryptography_materialproviders_smithy_error_to_dafny_error(e.message) + ) + + if isinstance(e, StructuredEncryption): + return aws_dbesdk_dynamodb.internaldafny.generated.AwsCryptographyDbEncryptionSdkDynamoDbTypes.Error_AwsCryptographyDbEncryptionSdkStructuredEncryption( + aws_cryptography_dbencryptionsdk_structuredencryption_smithy_error_to_dafny_error(e.message) + ) + + if isinstance(e, KeyStore): + return aws_dbesdk_dynamodb.internaldafny.generated.AwsCryptographyDbEncryptionSdkDynamoDbTypes.Error_AwsCryptographyKeyStore( + aws_cryptography_keystore_smithy_error_to_dafny_error(e.message) + ) + + if isinstance(e, CollectionOfErrors): + return aws_dbesdk_dynamodb.internaldafny.generated.AwsCryptographyDbEncryptionSdkDynamoDbTypes.Error_CollectionOfErrors( + message=_dafny.Seq(e.message), + list=_dafny.Seq(_smithy_error_to_dafny_error(native_err) for native_err in e.list), + ) + + if isinstance(e, OpaqueError): + return aws_dbesdk_dynamodb.internaldafny.generated.AwsCryptographyDbEncryptionSdkDynamoDbTypes.Error_Opaque( + obj=e.obj + ) + + if isinstance(e, OpaqueWithTextError): + return aws_dbesdk_dynamodb.internaldafny.generated.AwsCryptographyDbEncryptionSdkDynamoDbTypes.Error_OpaqueWithText( + obj=e.obj, objMessage=e.obj_message + ) + + else: + return aws_dbesdk_dynamodb.internaldafny.generated.AwsCryptographyDbEncryptionSdkDynamoDbTypes.Error_Opaque( + obj=e + ) diff --git a/DynamoDbEncryption/runtimes/python/src/aws_dbesdk_dynamodb/smithygenerated/aws_cryptography_dbencryptionsdk_dynamodb/models.py b/DynamoDbEncryption/runtimes/python/src/aws_dbesdk_dynamodb/smithygenerated/aws_cryptography_dbencryptionsdk_dynamodb/models.py new file mode 100644 index 000000000..1bf09dab2 --- /dev/null +++ b/DynamoDbEncryption/runtimes/python/src/aws_dbesdk_dynamodb/smithygenerated/aws_cryptography_dbencryptionsdk_dynamodb/models.py @@ -0,0 +1,2999 @@ +# Copyright Amazon.com Inc. or its affiliates. All Rights Reserved. +# SPDX-License-Identifier: Apache-2.0 +# Do not modify this file. This file is machine generated, and any changes to it will be overwritten. + +import aws_cryptographic_material_providers.smithygenerated.aws_cryptography_keystore.client +import aws_cryptographic_material_providers.smithygenerated.aws_cryptography_materialproviders.references +import aws_dbesdk_dynamodb.smithygenerated.aws_cryptography_dbencryptionsdk_dynamodb.references +from typing import Any, Dict, List, Optional, Union + +from aws_cryptographic_material_providers.smithygenerated.aws_cryptography_materialproviders.models import ( + CacheType, + _cache_type_from_dict, +) + + +class CreateDynamoDbEncryptionBranchKeyIdSupplierInput: + ddb_key_branch_key_id_supplier: "aws_dbesdk_dynamodb.smithygenerated.aws_cryptography_dbencryptionsdk_dynamodb.references.DynamoDbKeyBranchKeyIdSupplier" + + def __init__( + self, + *, + ddb_key_branch_key_id_supplier: "aws_dbesdk_dynamodb.smithygenerated.aws_cryptography_dbencryptionsdk_dynamodb.references.DynamoDbKeyBranchKeyIdSupplier", + ): + """Inputs for creating a Branch Key Supplier from a DynamoDB Key Branch + Key Id Supplier. + + :param ddb_key_branch_key_id_supplier: An implementation of the + DynamoDbKeyBranchKeyIdSupplier interface, which determines + what Branch Key to use for data key wrapping/unwrapping + based on the DynamoDB item being written/read. + """ + self.ddb_key_branch_key_id_supplier = ddb_key_branch_key_id_supplier + + def as_dict(self) -> Dict[str, Any]: + """Converts the CreateDynamoDbEncryptionBranchKeyIdSupplierInput to a + dictionary.""" + return { + "ddb_key_branch_key_id_supplier": self.ddb_key_branch_key_id_supplier.as_dict(), + } + + @staticmethod + def from_dict( + d: Dict[str, Any], + ) -> "CreateDynamoDbEncryptionBranchKeyIdSupplierInput": + """Creates a CreateDynamoDbEncryptionBranchKeyIdSupplierInput from a + dictionary.""" + from aws_dbesdk_dynamodb.smithygenerated.aws_cryptography_dbencryptionsdk_dynamodb.references import ( + DynamoDbKeyBranchKeyIdSupplier, + ) + + kwargs: Dict[str, Any] = { + "ddb_key_branch_key_id_supplier": DynamoDbKeyBranchKeyIdSupplier.from_dict( + d["ddb_key_branch_key_id_supplier"] + ), + } + + return CreateDynamoDbEncryptionBranchKeyIdSupplierInput(**kwargs) + + def __repr__(self) -> str: + result = "CreateDynamoDbEncryptionBranchKeyIdSupplierInput(" + if self.ddb_key_branch_key_id_supplier is not None: + result += f"ddb_key_branch_key_id_supplier={repr(self.ddb_key_branch_key_id_supplier)}" + + return result + ")" + + def __eq__(self, other: Any) -> bool: + if not isinstance(other, CreateDynamoDbEncryptionBranchKeyIdSupplierInput): + return False + attributes: list[str] = [ + "ddb_key_branch_key_id_supplier", + ] + return all(getattr(self, a) == getattr(other, a) for a in attributes) + + +class CreateDynamoDbEncryptionBranchKeyIdSupplierOutput: + branch_key_id_supplier: "aws_cryptographic_material_providers.smithygenerated.aws_cryptography_materialproviders.references.BranchKeyIdSupplier" + + def __init__( + self, + *, + branch_key_id_supplier: "aws_cryptographic_material_providers.smithygenerated.aws_cryptography_materialproviders.references.BranchKeyIdSupplier", + ): + """Outputs for creating a Branch Key Supplier from a DynamoDB Key + Branch Key Id Supplier. + + :param branch_key_id_supplier: The Branch Key Supplier for use + with the Hierarchical Keyring. + """ + self.branch_key_id_supplier = branch_key_id_supplier + + def as_dict(self) -> Dict[str, Any]: + """Converts the CreateDynamoDbEncryptionBranchKeyIdSupplierOutput to a + dictionary.""" + return { + "branch_key_id_supplier": self.branch_key_id_supplier.as_dict(), + } + + @staticmethod + def from_dict( + d: Dict[str, Any], + ) -> "CreateDynamoDbEncryptionBranchKeyIdSupplierOutput": + """Creates a CreateDynamoDbEncryptionBranchKeyIdSupplierOutput from a + dictionary.""" + from aws_cryptographic_material_providers.smithygenerated.aws_cryptography_materialproviders.references import ( + BranchKeyIdSupplier, + ) + + kwargs: Dict[str, Any] = { + "branch_key_id_supplier": BranchKeyIdSupplier.from_dict(d["branch_key_id_supplier"]), + } + + return CreateDynamoDbEncryptionBranchKeyIdSupplierOutput(**kwargs) + + def __repr__(self) -> str: + result = "CreateDynamoDbEncryptionBranchKeyIdSupplierOutput(" + if self.branch_key_id_supplier is not None: + result += f"branch_key_id_supplier={repr(self.branch_key_id_supplier)}" + + return result + ")" + + def __eq__(self, other: Any) -> bool: + if not isinstance(other, CreateDynamoDbEncryptionBranchKeyIdSupplierOutput): + return False + attributes: list[str] = [ + "branch_key_id_supplier", + ] + return all(getattr(self, a) == getattr(other, a) for a in attributes) + + +class GetBranchKeyIdFromDdbKeyOutput: + branch_key_id: str + + def __init__( + self, + *, + branch_key_id: str, + ): + """Outputs for getting the Branch Key that should be used for wrapping + and unwrapping data keys. + + :param branch_key_id: The ID of the Branch Key that should be + used to wrap and unwrap data keys for this item. + """ + self.branch_key_id = branch_key_id + + def as_dict(self) -> Dict[str, Any]: + """Converts the GetBranchKeyIdFromDdbKeyOutput to a dictionary.""" + return { + "branch_key_id": self.branch_key_id, + } + + @staticmethod + def from_dict(d: Dict[str, Any]) -> "GetBranchKeyIdFromDdbKeyOutput": + """Creates a GetBranchKeyIdFromDdbKeyOutput from a dictionary.""" + kwargs: Dict[str, Any] = { + "branch_key_id": d["branch_key_id"], + } + + return GetBranchKeyIdFromDdbKeyOutput(**kwargs) + + def __repr__(self) -> str: + result = "GetBranchKeyIdFromDdbKeyOutput(" + if self.branch_key_id is not None: + result += f"branch_key_id={repr(self.branch_key_id)}" + + return result + ")" + + def __eq__(self, other: Any) -> bool: + if not isinstance(other, GetBranchKeyIdFromDdbKeyOutput): + return False + attributes: list[str] = [ + "branch_key_id", + ] + return all(getattr(self, a) == getattr(other, a) for a in attributes) + + +class EncryptedDataKeyDescription: + key_provider_id: str + key_provider_info: Optional[str] + branch_key_id: Optional[str] + branch_key_version: Optional[str] + + def __init__( + self, + *, + key_provider_id: str, + key_provider_info: Optional[str] = None, + branch_key_id: Optional[str] = None, + branch_key_version: Optional[str] = None, + ): + """ + :param key_provider_id: Key provider id of the encrypted data key. + :param key_provider_info: Key provider information of the encrypted data key. + :param branch_key_id: Branch key id of the encrypted data key. + :param branch_key_version: Branch key version of the encrypted data key. + """ + self.key_provider_id = key_provider_id + self.key_provider_info = key_provider_info + self.branch_key_id = branch_key_id + self.branch_key_version = branch_key_version + + def as_dict(self) -> Dict[str, Any]: + """Converts the EncryptedDataKeyDescription to a dictionary.""" + d: Dict[str, Any] = { + "key_provider_id": self.key_provider_id, + } + + if self.key_provider_info is not None: + d["key_provider_info"] = self.key_provider_info + + if self.branch_key_id is not None: + d["branch_key_id"] = self.branch_key_id + + if self.branch_key_version is not None: + d["branch_key_version"] = self.branch_key_version + + return d + + @staticmethod + def from_dict(d: Dict[str, Any]) -> "EncryptedDataKeyDescription": + """Creates a EncryptedDataKeyDescription from a dictionary.""" + kwargs: Dict[str, Any] = { + "key_provider_id": d["key_provider_id"], + } + + if "key_provider_info" in d: + kwargs["key_provider_info"] = d["key_provider_info"] + + if "branch_key_id" in d: + kwargs["branch_key_id"] = d["branch_key_id"] + + if "branch_key_version" in d: + kwargs["branch_key_version"] = d["branch_key_version"] + + return EncryptedDataKeyDescription(**kwargs) + + def __repr__(self) -> str: + result = "EncryptedDataKeyDescription(" + if self.key_provider_id is not None: + result += f"key_provider_id={repr(self.key_provider_id)}, " + + if self.key_provider_info is not None: + result += f"key_provider_info={repr(self.key_provider_info)}, " + + if self.branch_key_id is not None: + result += f"branch_key_id={repr(self.branch_key_id)}, " + + if self.branch_key_version is not None: + result += f"branch_key_version={repr(self.branch_key_version)}" + + return result + ")" + + def __eq__(self, other: Any) -> bool: + if not isinstance(other, EncryptedDataKeyDescription): + return False + attributes: list[str] = [ + "key_provider_id", + "key_provider_info", + "branch_key_id", + "branch_key_version", + ] + return all(getattr(self, a) == getattr(other, a) for a in attributes) + + +class GetEncryptedDataKeyDescriptionOutput: + encrypted_data_key_description_output: list[EncryptedDataKeyDescription] + + def __init__( + self, + *, + encrypted_data_key_description_output: list[EncryptedDataKeyDescription], + ): + """Output for getting encrypted data key description. + + :param encrypted_data_key_description_output: A list of + encrypted data key description. + """ + self.encrypted_data_key_description_output = encrypted_data_key_description_output + + def as_dict(self) -> Dict[str, Any]: + """Converts the GetEncryptedDataKeyDescriptionOutput to a + dictionary.""" + return { + "encrypted_data_key_description_output": _encrypted_data_key_description_list_as_dict( + self.encrypted_data_key_description_output + ), + } + + @staticmethod + def from_dict(d: Dict[str, Any]) -> "GetEncryptedDataKeyDescriptionOutput": + """Creates a GetEncryptedDataKeyDescriptionOutput from a dictionary.""" + kwargs: Dict[str, Any] = { + "encrypted_data_key_description_output": _encrypted_data_key_description_list_from_dict( + d["encrypted_data_key_description_output"] + ), + } + + return GetEncryptedDataKeyDescriptionOutput(**kwargs) + + def __repr__(self) -> str: + result = "GetEncryptedDataKeyDescriptionOutput(" + if self.encrypted_data_key_description_output is not None: + result += f"encrypted_data_key_description_output={repr(self.encrypted_data_key_description_output)}" + + return result + ")" + + def __eq__(self, other: Any) -> bool: + if not isinstance(other, GetEncryptedDataKeyDescriptionOutput): + return False + attributes: list[str] = [ + "encrypted_data_key_description_output", + ] + return all(getattr(self, a) == getattr(other, a) for a in attributes) + + +class GetBranchKeyIdFromDdbKeyInput: + ddb_key: "dict[str, dict[str, Any]]" + + def __init__( + self, + *, + ddb_key: "dict[str, dict[str, Any]]", + ): + """Inputs for getting the Branch Key that should be used for wrapping + and unwrapping data keys. + + :param ddb_key: The partition and sort (if it exists) attributes + on the item being read or written, along with the values of + any attributes configured as + SIGN_AND_INCLUDE_IN_ENCRYPTION_CONTEXT. + """ + self.ddb_key = ddb_key + + def as_dict(self) -> Dict[str, Any]: + """Converts the GetBranchKeyIdFromDdbKeyInput to a dictionary.""" + return { + "ddb_key": self.ddb_key, + } + + @staticmethod + def from_dict(d: Dict[str, Any]) -> "GetBranchKeyIdFromDdbKeyInput": + """Creates a GetBranchKeyIdFromDdbKeyInput from a dictionary.""" + kwargs: Dict[str, Any] = { + "ddb_key": d["ddb_key"], + } + + return GetBranchKeyIdFromDdbKeyInput(**kwargs) + + def __repr__(self) -> str: + result = "GetBranchKeyIdFromDdbKeyInput(" + if self.ddb_key is not None: + result += f"ddb_key={repr(self.ddb_key)}" + + return result + ")" + + def __eq__(self, other: Any) -> bool: + if not isinstance(other, GetBranchKeyIdFromDdbKeyInput): + return False + attributes: list[str] = [ + "ddb_key", + ] + return all(getattr(self, a) == getattr(other, a) for a in attributes) + + +class GetEncryptedDataKeyDescriptionUnionHeader: + """A binary header value.""" + + def __init__(self, value: bytes | bytearray): + self.value = value + + def as_dict(self) -> Dict[str, Any]: + return {"header": self.value} + + @staticmethod + def from_dict(d: Dict[str, Any]) -> "GetEncryptedDataKeyDescriptionUnionHeader": + if len(d) != 1: + raise TypeError(f"Unions may have exactly 1 value, but found {len(d)}") + + return GetEncryptedDataKeyDescriptionUnionHeader(d["header"]) + + def __repr__(self) -> str: + return f"GetEncryptedDataKeyDescriptionUnionHeader(value=repr(self.value))" + + def __eq__(self, other: Any) -> bool: + if not isinstance(other, GetEncryptedDataKeyDescriptionUnionHeader): + return False + return self.value == other.value + + +class GetEncryptedDataKeyDescriptionUnionItem: + """A DynamoDB item.""" + + def __init__(self, value: "dict[str, dict[str, Any]]"): + self.value = value + + def as_dict(self) -> Dict[str, Any]: + return {"item": self.value} + + @staticmethod + def from_dict(d: Dict[str, Any]) -> "GetEncryptedDataKeyDescriptionUnionItem": + if len(d) != 1: + raise TypeError(f"Unions may have exactly 1 value, but found {len(d)}") + + return GetEncryptedDataKeyDescriptionUnionItem(d["item"]) + + def __repr__(self) -> str: + return f"GetEncryptedDataKeyDescriptionUnionItem(value=repr(self.value))" + + def __eq__(self, other: Any) -> bool: + if not isinstance(other, GetEncryptedDataKeyDescriptionUnionItem): + return False + return self.value == other.value + + +class GetEncryptedDataKeyDescriptionUnionUnknown: + """Represents an unknown variant. + + If you receive this value, you will need to update your library to + receive the parsed value. + + This value may not be deliberately sent. + """ + + def __init__(self, tag: str): + self.tag = tag + + def as_dict(self) -> Dict[str, Any]: + return {"SDK_UNKNOWN_MEMBER": {"name": self.tag}} + + @staticmethod + def from_dict(d: Dict[str, Any]) -> "GetEncryptedDataKeyDescriptionUnionUnknown": + if len(d) != 1: + raise TypeError(f"Unions may have exactly 1 value, but found {len(d)}") + return GetEncryptedDataKeyDescriptionUnionUnknown(d["SDK_UNKNOWN_MEMBER"]["name"]) + + def __repr__(self) -> str: + return f"GetEncryptedDataKeyDescriptionUnionUnknown(tag={self.tag})" + + +GetEncryptedDataKeyDescriptionUnion = Union[ + GetEncryptedDataKeyDescriptionUnionHeader, + GetEncryptedDataKeyDescriptionUnionItem, + GetEncryptedDataKeyDescriptionUnionUnknown, +] + + +def _get_encrypted_data_key_description_union_from_dict( + d: Dict[str, Any], +) -> GetEncryptedDataKeyDescriptionUnion: + if "header" in d: + return GetEncryptedDataKeyDescriptionUnionHeader.from_dict(d) + + if "item" in d: + return GetEncryptedDataKeyDescriptionUnionItem.from_dict(d) + + raise TypeError(f"Unions may have exactly 1 value, but found {len(d)}") + + +class GetEncryptedDataKeyDescriptionInput: + input: "GetEncryptedDataKeyDescriptionUnion" + + def __init__( + self, + *, + input: "GetEncryptedDataKeyDescriptionUnion", + ): + """Input for getting encrypted data key description.""" + self.input = input + + def as_dict(self) -> Dict[str, Any]: + """Converts the GetEncryptedDataKeyDescriptionInput to a dictionary.""" + return { + "input": self.input.as_dict(), + } + + @staticmethod + def from_dict(d: Dict[str, Any]) -> "GetEncryptedDataKeyDescriptionInput": + """Creates a GetEncryptedDataKeyDescriptionInput from a dictionary.""" + kwargs: Dict[str, Any] = { + "input": _get_encrypted_data_key_description_union_from_dict(d["input"]), + } + + return GetEncryptedDataKeyDescriptionInput(**kwargs) + + def __repr__(self) -> str: + result = "GetEncryptedDataKeyDescriptionInput(" + if self.input is not None: + result += f"input={repr(self.input)}" + + return result + ")" + + def __eq__(self, other: Any) -> bool: + if not isinstance(other, GetEncryptedDataKeyDescriptionInput): + return False + attributes: list[str] = [ + "input", + ] + return all(getattr(self, a) == getattr(other, a) for a in attributes) + + +class AsSet: + """Attribute must be a Set. + + Beacon value will also be a Set. + """ + + def as_dict(self) -> Dict[str, Any]: + """Converts the AsSet to a dictionary.""" + return {} + + @staticmethod + def from_dict(d: Dict[str, Any]) -> "AsSet": + """Creates a AsSet from a dictionary.""" + return AsSet() + + def __repr__(self) -> str: + result = "AsSet(" + + return result + ")" + + def __eq__(self, other: Any) -> bool: + return isinstance(other, AsSet) + + +class MultiKeyStore: + key_field_name: str + cache_ttl: int + cache: Optional[CacheType] + partition_id: Optional[str] + + def __init__( + self, + *, + key_field_name: str, + cache_ttl: int, + cache: Optional[CacheType] = None, + partition_id: Optional[str] = None, + ): + """The configuration for using multiple Beacon Keys. + + :param key_field_name: The name of the field that stores the + Beacon Key. This may be a Virtual Field. + :param cache_ttl: How long (in seconds) the beacon key material + is cached locally before it is re-retrieved from DynamoDB + and re-authed with AWS KMS. + :param cache: Which type of local cache to use. + :param partition_id: Partition ID to distinguish Beacon Key + Sources writing to a Shared cache. If the Partition ID is + the same for two Beacon Key Sources, they can share the same + cache entries in the Shared cache. + """ + self.key_field_name = key_field_name + self.cache_ttl = cache_ttl + self.cache = cache + self.partition_id = partition_id + + def as_dict(self) -> Dict[str, Any]: + """Converts the MultiKeyStore to a dictionary.""" + d: Dict[str, Any] = { + "key_field_name": self.key_field_name, + "cache_ttl": self.cache_ttl, + } + + if self.cache is not None: + d["cache"] = self.cache.as_dict() + + if self.partition_id is not None: + d["partition_id"] = self.partition_id + + return d + + @staticmethod + def from_dict(d: Dict[str, Any]) -> "MultiKeyStore": + """Creates a MultiKeyStore from a dictionary.""" + kwargs: Dict[str, Any] = { + "key_field_name": d["key_field_name"], + "cache_ttl": d["cache_ttl"], + } + + if "cache" in d: + kwargs["cache"] = (_cache_type_from_dict(d["cache"]),) + + if "partition_id" in d: + kwargs["partition_id"] = d["partition_id"] + + return MultiKeyStore(**kwargs) + + def __repr__(self) -> str: + result = "MultiKeyStore(" + if self.key_field_name is not None: + result += f"key_field_name={repr(self.key_field_name)}, " + + if self.cache_ttl is not None: + result += f"cache_ttl={repr(self.cache_ttl)}, " + + if self.cache is not None: + result += f"cache={repr(self.cache)}, " + + if self.partition_id is not None: + result += f"partition_id={repr(self.partition_id)}" + + return result + ")" + + def __eq__(self, other: Any) -> bool: + if not isinstance(other, MultiKeyStore): + return False + attributes: list[str] = [ + "key_field_name", + "cache_ttl", + "cache", + "partition_id", + ] + return all(getattr(self, a) == getattr(other, a) for a in attributes) + + +class SingleKeyStore: + key_id: str + cache_ttl: int + cache: Optional[CacheType] + partition_id: Optional[str] + + def __init__( + self, + *, + key_id: str, + cache_ttl: int, + cache: Optional[CacheType] = None, + partition_id: Optional[str] = None, + ): + """The configuration for using a single Beacon Key. + + :param key_id: The Beacon Key ID. + :param cache_ttl: How long (in seconds) the beacon key material is cached + locally before it is re-retrieved from DynamoDB and re-authed with AWS KMS. + :param cache: Which type of local cache to use. Please see the + [spec](https://github.com/aws/aws-database-encryption-sdk-dynamodb/blob/main/specification/searchable-encryption/search-config.md#key-store-cache) + on how to provide a cache for a SingleKeyStore. + :param partition_id: Partition ID to distinguish Beacon Key Sources writing to a + Shared cache. If the Partition ID is the same for two Beacon Key Sources, they + can share the same cache entries in the Shared cache. + """ + self.key_id = key_id + self.cache_ttl = cache_ttl + self.cache = cache + self.partition_id = partition_id + + def as_dict(self) -> Dict[str, Any]: + """Converts the SingleKeyStore to a dictionary.""" + d: Dict[str, Any] = { + "key_id": self.key_id, + "cache_ttl": self.cache_ttl, + } + + if self.cache is not None: + d["cache"] = self.cache.as_dict() + + if self.partition_id is not None: + d["partition_id"] = self.partition_id + + return d + + @staticmethod + def from_dict(d: Dict[str, Any]) -> "SingleKeyStore": + """Creates a SingleKeyStore from a dictionary.""" + kwargs: Dict[str, Any] = { + "key_id": d["key_id"], + "cache_ttl": d["cache_ttl"], + } + + if "cache" in d: + kwargs["cache"] = (_cache_type_from_dict(d["cache"]),) + + if "partition_id" in d: + kwargs["partition_id"] = d["partition_id"] + + return SingleKeyStore(**kwargs) + + def __repr__(self) -> str: + result = "SingleKeyStore(" + if self.key_id is not None: + result += f"key_id={repr(self.key_id)}, " + + if self.cache_ttl is not None: + result += f"cache_ttl={repr(self.cache_ttl)}, " + + if self.cache is not None: + result += f"cache={repr(self.cache)}, " + + if self.partition_id is not None: + result += f"partition_id={repr(self.partition_id)}" + + return result + ")" + + def __eq__(self, other: Any) -> bool: + if not isinstance(other, SingleKeyStore): + return False + attributes: list[str] = [ + "key_id", + "cache_ttl", + "cache", + "partition_id", + ] + return all(getattr(self, a) == getattr(other, a) for a in attributes) + + +class BeaconKeySourceSingle: + """The configuration for using a single Beacon Key.""" + + def __init__(self, value: SingleKeyStore): + self.value = value + + def as_dict(self) -> Dict[str, Any]: + return {"single": self.value.as_dict()} + + @staticmethod + def from_dict(d: Dict[str, Any]) -> "BeaconKeySourceSingle": + if len(d) != 1: + raise TypeError(f"Unions may have exactly 1 value, but found {len(d)}") + + return BeaconKeySourceSingle(SingleKeyStore.from_dict(d["single"])) + + def __repr__(self) -> str: + return f"BeaconKeySourceSingle(value=repr(self.value))" + + def __eq__(self, other: Any) -> bool: + if not isinstance(other, BeaconKeySourceSingle): + return False + return self.value == other.value + + +class BeaconKeySourceMulti: + """The configuration for using multiple Beacon Keys.""" + + def __init__(self, value: MultiKeyStore): + self.value = value + + def as_dict(self) -> Dict[str, Any]: + return {"multi": self.value.as_dict()} + + @staticmethod + def from_dict(d: Dict[str, Any]) -> "BeaconKeySourceMulti": + if len(d) != 1: + raise TypeError(f"Unions may have exactly 1 value, but found {len(d)}") + + return BeaconKeySourceMulti(MultiKeyStore.from_dict(d["multi"])) + + def __repr__(self) -> str: + return f"BeaconKeySourceMulti(value=repr(self.value))" + + def __eq__(self, other: Any) -> bool: + if not isinstance(other, BeaconKeySourceMulti): + return False + return self.value == other.value + + +class BeaconKeySourceUnknown: + """Represents an unknown variant. + + If you receive this value, you will need to update your library to + receive the parsed value. + + This value may not be deliberately sent. + """ + + def __init__(self, tag: str): + self.tag = tag + + def as_dict(self) -> Dict[str, Any]: + return {"SDK_UNKNOWN_MEMBER": {"name": self.tag}} + + @staticmethod + def from_dict(d: Dict[str, Any]) -> "BeaconKeySourceUnknown": + if len(d) != 1: + raise TypeError(f"Unions may have exactly 1 value, but found {len(d)}") + return BeaconKeySourceUnknown(d["SDK_UNKNOWN_MEMBER"]["name"]) + + def __repr__(self) -> str: + return f"BeaconKeySourceUnknown(tag={self.tag})" + + +BeaconKeySource = Union[BeaconKeySourceSingle, BeaconKeySourceMulti, BeaconKeySourceUnknown] + + +def _beacon_key_source_from_dict(d: Dict[str, Any]) -> BeaconKeySource: + if "single" in d: + return BeaconKeySourceSingle.from_dict(d) + + if "multi" in d: + return BeaconKeySourceMulti.from_dict(d) + + raise TypeError(f"Unions may have exactly 1 value, but found {len(d)}") + + +class PartOnly: + """Attribute must be used as part of a Compound Beacon, never alone.""" + + def as_dict(self) -> Dict[str, Any]: + """Converts the PartOnly to a dictionary.""" + return {} + + @staticmethod + def from_dict(d: Dict[str, Any]) -> "PartOnly": + """Creates a PartOnly from a dictionary.""" + return PartOnly() + + def __repr__(self) -> str: + result = "PartOnly(" + + return result + ")" + + def __eq__(self, other: Any) -> bool: + return isinstance(other, PartOnly) + + +class Shared: + other: str + + def __init__( + self, + *, + other: str, + ): + """This beacon should calculate values like another beacon, so they can + be compared. + + :param other: Calculate beacon values as for this beacon. + """ + self.other = other + + def as_dict(self) -> Dict[str, Any]: + """Converts the Shared to a dictionary.""" + return { + "other": self.other, + } + + @staticmethod + def from_dict(d: Dict[str, Any]) -> "Shared": + """Creates a Shared from a dictionary.""" + kwargs: Dict[str, Any] = { + "other": d["other"], + } + + return Shared(**kwargs) + + def __repr__(self) -> str: + result = "Shared(" + if self.other is not None: + result += f"other={repr(self.other)}" + + return result + ")" + + def __eq__(self, other: Any) -> bool: + if not isinstance(other, Shared): + return False + attributes: list[str] = [ + "other", + ] + return all(getattr(self, a) == getattr(other, a) for a in attributes) + + +class SharedSet: + other: str + + def __init__( + self, + *, + other: str, + ): + """Both Shared and AsSet. + + :param other: Calculate beacon values as for this beacon. + """ + self.other = other + + def as_dict(self) -> Dict[str, Any]: + """Converts the SharedSet to a dictionary.""" + return { + "other": self.other, + } + + @staticmethod + def from_dict(d: Dict[str, Any]) -> "SharedSet": + """Creates a SharedSet from a dictionary.""" + kwargs: Dict[str, Any] = { + "other": d["other"], + } + + return SharedSet(**kwargs) + + def __repr__(self) -> str: + result = "SharedSet(" + if self.other is not None: + result += f"other={repr(self.other)}" + + return result + ")" + + def __eq__(self, other: Any) -> bool: + if not isinstance(other, SharedSet): + return False + attributes: list[str] = [ + "other", + ] + return all(getattr(self, a) == getattr(other, a) for a in attributes) + + +class BeaconStylePartOnly: + """Attribute must be used as part of a Compound Beacon, never alone.""" + + def __init__(self, value: PartOnly): + self.value = value + + def as_dict(self) -> Dict[str, Any]: + return {"partOnly": self.value.as_dict()} + + @staticmethod + def from_dict(d: Dict[str, Any]) -> "BeaconStylePartOnly": + if len(d) != 1: + raise TypeError(f"Unions may have exactly 1 value, but found {len(d)}") + + return BeaconStylePartOnly(PartOnly.from_dict(d["partOnly"])) + + def __repr__(self) -> str: + return f"BeaconStylePartOnly(value=repr(self.value))" + + def __eq__(self, other: Any) -> bool: + if not isinstance(other, BeaconStylePartOnly): + return False + return self.value == other.value + + +class BeaconStyleShared: + """This beacon should calculate values like another beacon, so they can be + compared.""" + + def __init__(self, value: Shared): + self.value = value + + def as_dict(self) -> Dict[str, Any]: + return {"shared": self.value.as_dict()} + + @staticmethod + def from_dict(d: Dict[str, Any]) -> "BeaconStyleShared": + if len(d) != 1: + raise TypeError(f"Unions may have exactly 1 value, but found {len(d)}") + + return BeaconStyleShared(Shared.from_dict(d["shared"])) + + def __repr__(self) -> str: + return f"BeaconStyleShared(value=repr(self.value))" + + def __eq__(self, other: Any) -> bool: + if not isinstance(other, BeaconStyleShared): + return False + return self.value == other.value + + +class BeaconStyleAsSet: + """Attribute must be a Set. + + Beacon value will also be a Set. + """ + + def __init__(self, value: AsSet): + self.value = value + + def as_dict(self) -> Dict[str, Any]: + return {"asSet": self.value.as_dict()} + + @staticmethod + def from_dict(d: Dict[str, Any]) -> "BeaconStyleAsSet": + if len(d) != 1: + raise TypeError(f"Unions may have exactly 1 value, but found {len(d)}") + + return BeaconStyleAsSet(AsSet.from_dict(d["asSet"])) + + def __repr__(self) -> str: + return f"BeaconStyleAsSet(value=repr(self.value))" + + def __eq__(self, other: Any) -> bool: + if not isinstance(other, BeaconStyleAsSet): + return False + return self.value == other.value + + +class BeaconStyleSharedSet: + """Both Shared and AsSet.""" + + def __init__(self, value: SharedSet): + self.value = value + + def as_dict(self) -> Dict[str, Any]: + return {"sharedSet": self.value.as_dict()} + + @staticmethod + def from_dict(d: Dict[str, Any]) -> "BeaconStyleSharedSet": + if len(d) != 1: + raise TypeError(f"Unions may have exactly 1 value, but found {len(d)}") + + return BeaconStyleSharedSet(SharedSet.from_dict(d["sharedSet"])) + + def __repr__(self) -> str: + return f"BeaconStyleSharedSet(value=repr(self.value))" + + def __eq__(self, other: Any) -> bool: + if not isinstance(other, BeaconStyleSharedSet): + return False + return self.value == other.value + + +class BeaconStyleUnknown: + """Represents an unknown variant. + + If you receive this value, you will need to update your library to + receive the parsed value. + + This value may not be deliberately sent. + """ + + def __init__(self, tag: str): + self.tag = tag + + def as_dict(self) -> Dict[str, Any]: + return {"SDK_UNKNOWN_MEMBER": {"name": self.tag}} + + @staticmethod + def from_dict(d: Dict[str, Any]) -> "BeaconStyleUnknown": + if len(d) != 1: + raise TypeError(f"Unions may have exactly 1 value, but found {len(d)}") + return BeaconStyleUnknown(d["SDK_UNKNOWN_MEMBER"]["name"]) + + def __repr__(self) -> str: + return f"BeaconStyleUnknown(tag={self.tag})" + + +BeaconStyle = Union[ + BeaconStylePartOnly, + BeaconStyleShared, + BeaconStyleAsSet, + BeaconStyleSharedSet, + BeaconStyleUnknown, +] + + +def _beacon_style_from_dict(d: Dict[str, Any]) -> BeaconStyle: + if "partOnly" in d: + return BeaconStylePartOnly.from_dict(d) + + if "shared" in d: + return BeaconStyleShared.from_dict(d) + + if "asSet" in d: + return BeaconStyleAsSet.from_dict(d) + + if "sharedSet" in d: + return BeaconStyleSharedSet.from_dict(d) + + raise TypeError(f"Unions may have exactly 1 value, but found {len(d)}") + + +class ConstructorPart: + name: str + required: bool + + def __init__( + self, + *, + name: str, + required: bool, + ): + """A part of a Compound Becaon Construction. + + :param name: The name of the Encrypted Part or Signed Part for + which this constructor part gets a value. + :param required: Whether this Encrypted Part or Signed Part is + required for this construction to succeed. + """ + self.name = name + self.required = required + + def as_dict(self) -> Dict[str, Any]: + """Converts the ConstructorPart to a dictionary.""" + return { + "name": self.name, + "required": self.required, + } + + @staticmethod + def from_dict(d: Dict[str, Any]) -> "ConstructorPart": + """Creates a ConstructorPart from a dictionary.""" + kwargs: Dict[str, Any] = { + "name": d["name"], + "required": d["required"], + } + + return ConstructorPart(**kwargs) + + def __repr__(self) -> str: + result = "ConstructorPart(" + if self.name is not None: + result += f"name={repr(self.name)}, " + + if self.required is not None: + result += f"required={repr(self.required)}" + + return result + ")" + + def __eq__(self, other: Any) -> bool: + if not isinstance(other, ConstructorPart): + return False + attributes: list[str] = [ + "name", + "required", + ] + return all(getattr(self, a) == getattr(other, a) for a in attributes) + + +class Constructor: + parts: list[ConstructorPart] + + def __init__( + self, + *, + parts: list[ConstructorPart], + ): + """The configuration for a particular Compound Beacon construction. + + :param parts: The ordered list of parts for a particular + Compound Beacon construction. If the item contains all + required Parts, a Compound beacon will be written using each + Part that exists on the item, in the order specified. + """ + if (parts is not None) and (len(parts) < 1): + raise ValueError("The size of parts must be greater than or equal to 1") + + self.parts = parts + + def as_dict(self) -> Dict[str, Any]: + """Converts the Constructor to a dictionary.""" + return { + "parts": _constructor_part_list_as_dict(self.parts), + } + + @staticmethod + def from_dict(d: Dict[str, Any]) -> "Constructor": + """Creates a Constructor from a dictionary.""" + kwargs: Dict[str, Any] = { + "parts": _constructor_part_list_from_dict(d["parts"]), + } + + return Constructor(**kwargs) + + def __repr__(self) -> str: + result = "Constructor(" + if self.parts is not None: + result += f"parts={repr(self.parts)}" + + return result + ")" + + def __eq__(self, other: Any) -> bool: + if not isinstance(other, Constructor): + return False + attributes: list[str] = [ + "parts", + ] + return all(getattr(self, a) == getattr(other, a) for a in attributes) + + +class EncryptedPart: + name: str + prefix: str + + def __init__( + self, + *, + name: str, + prefix: str, + ): + """A part of a Compound Beacon that contains a beacon over encrypted + data. + + :param name: The name of the Standard Beacon, whose value this + Part will hold. + :param prefix: The prefix that is written with this Encrypted + Part. + """ + self.name = name + if (prefix is not None) and (len(prefix) < 1): + raise ValueError("The size of prefix must be greater than or equal to 1") + + self.prefix = prefix + + def as_dict(self) -> Dict[str, Any]: + """Converts the EncryptedPart to a dictionary.""" + return { + "name": self.name, + "prefix": self.prefix, + } + + @staticmethod + def from_dict(d: Dict[str, Any]) -> "EncryptedPart": + """Creates a EncryptedPart from a dictionary.""" + kwargs: Dict[str, Any] = { + "name": d["name"], + "prefix": d["prefix"], + } + + return EncryptedPart(**kwargs) + + def __repr__(self) -> str: + result = "EncryptedPart(" + if self.name is not None: + result += f"name={repr(self.name)}, " + + if self.prefix is not None: + result += f"prefix={repr(self.prefix)}" + + return result + ")" + + def __eq__(self, other: Any) -> bool: + if not isinstance(other, EncryptedPart): + return False + attributes: list[str] = [ + "name", + "prefix", + ] + return all(getattr(self, a) == getattr(other, a) for a in attributes) + + +class SignedPart: + name: str + prefix: str + loc: Optional[str] + + def __init__( + self, + *, + name: str, + prefix: str, + loc: Optional[str] = None, + ): + """A part of a Compound Beacon that contains signed plaintext data. + + :param name: The name for this Signed Part. + :param prefix: The prefix that is written with this Signed Part. + :param loc: The DynamoDB document path to the value for this + Signed Part. If not provided, the 'name' is used for the + location. + """ + self.name = name + if (prefix is not None) and (len(prefix) < 1): + raise ValueError("The size of prefix must be greater than or equal to 1") + + self.prefix = prefix + if (loc is not None) and (len(loc) < 1): + raise ValueError("The size of loc must be greater than or equal to 1") + + self.loc = loc + + def as_dict(self) -> Dict[str, Any]: + """Converts the SignedPart to a dictionary.""" + d: Dict[str, Any] = { + "name": self.name, + "prefix": self.prefix, + } + + if self.loc is not None: + d["loc"] = self.loc + + return d + + @staticmethod + def from_dict(d: Dict[str, Any]) -> "SignedPart": + """Creates a SignedPart from a dictionary.""" + kwargs: Dict[str, Any] = { + "name": d["name"], + "prefix": d["prefix"], + } + + if "loc" in d: + kwargs["loc"] = d["loc"] + + return SignedPart(**kwargs) + + def __repr__(self) -> str: + result = "SignedPart(" + if self.name is not None: + result += f"name={repr(self.name)}, " + + if self.prefix is not None: + result += f"prefix={repr(self.prefix)}, " + + if self.loc is not None: + result += f"loc={repr(self.loc)}" + + return result + ")" + + def __eq__(self, other: Any) -> bool: + if not isinstance(other, SignedPart): + return False + attributes: list[str] = [ + "name", + "prefix", + "loc", + ] + return all(getattr(self, a) == getattr(other, a) for a in attributes) + + +class CompoundBeacon: + name: str + split: str + encrypted: Optional[list[EncryptedPart]] + signed: Optional[list[SignedPart]] + constructors: Optional[list[Constructor]] + + def __init__( + self, + *, + name: str, + split: str, + encrypted: Optional[list[EncryptedPart]] = None, + signed: Optional[list[SignedPart]] = None, + constructors: Optional[list[Constructor]] = None, + ): + """The configuration for a Compound Beacon. + + :param name: The name of the Compound Beacon. + :param split: The characters used to split parts of a compound + beacon. The split character should be a character that does + not appear in any Signed Part or Prefix used by the Compound + Beacon. + :param encrypted: The list of Encrypted Parts that may be + included in the compound beacon. + :param signed: The list of Signed Parts that may be included in + the compound beacon. + :param constructors: The ordered list of constructors that may + be used to create the Compound Beacon. Each constructor is + checked, in order, to see if it can construct the beacon. + The first constructor that can construct the beacon is used. + If no constructor can construct the beacon, the Compound + Beacon is not written to the item. + """ + self.name = name + if (split is not None) and (len(split) < 1): + raise ValueError("The size of split must be greater than or equal to 1") + + if (split is not None) and (len(split) > 1): + raise ValueError("The size of split must be less than or equal to 1") + + self.split = split + if (encrypted is not None) and (len(encrypted) < 1): + raise ValueError("The size of encrypted must be greater than or equal to 1") + + self.encrypted = encrypted + if (signed is not None) and (len(signed) < 1): + raise ValueError("The size of signed must be greater than or equal to 1") + + self.signed = signed + if (constructors is not None) and (len(constructors) < 1): + raise ValueError("The size of constructors must be greater than or equal to 1") + + self.constructors = constructors + + def as_dict(self) -> Dict[str, Any]: + """Converts the CompoundBeacon to a dictionary.""" + d: Dict[str, Any] = { + "name": self.name, + "split": self.split, + } + + if self.encrypted is not None: + d["encrypted"] = (_encrypted_parts_list_as_dict(self.encrypted),) + + if self.signed is not None: + d["signed"] = (_signed_parts_list_as_dict(self.signed),) + + if self.constructors is not None: + d["constructors"] = (_constructor_list_as_dict(self.constructors),) + + return d + + @staticmethod + def from_dict(d: Dict[str, Any]) -> "CompoundBeacon": + """Creates a CompoundBeacon from a dictionary.""" + kwargs: Dict[str, Any] = { + "name": d["name"], + "split": d["split"], + } + + if "encrypted" in d: + kwargs["encrypted"] = (_encrypted_parts_list_from_dict(d["encrypted"]),) + + if "signed" in d: + kwargs["signed"] = (_signed_parts_list_from_dict(d["signed"]),) + + if "constructors" in d: + kwargs["constructors"] = (_constructor_list_from_dict(d["constructors"]),) + + return CompoundBeacon(**kwargs) + + def __repr__(self) -> str: + result = "CompoundBeacon(" + if self.name is not None: + result += f"name={repr(self.name)}, " + + if self.split is not None: + result += f"split={repr(self.split)}, " + + if self.encrypted is not None: + result += f"encrypted={repr(self.encrypted)}, " + + if self.signed is not None: + result += f"signed={repr(self.signed)}, " + + if self.constructors is not None: + result += f"constructors={repr(self.constructors)}" + + return result + ")" + + def __eq__(self, other: Any) -> bool: + if not isinstance(other, CompoundBeacon): + return False + attributes: list[str] = [ + "name", + "split", + "encrypted", + "signed", + "constructors", + ] + return all(getattr(self, a) == getattr(other, a) for a in attributes) + + +class StandardBeacon: + name: str + length: int + loc: Optional[str] + style: Optional[BeaconStyle] + + def __init__( + self, + *, + name: str, + length: int = 0, + loc: Optional[str] = None, + style: Optional[BeaconStyle] = None, + ): + """The configuration for a Standard Beacon. + + :param name: The name for this Standard Beacon. + :param length: The length of the calculated beacon. + :param loc: The DynamoDB document path to the value this beacon + will calculate over. If not specified, the beacon will + calculate values for the attribute with the name specified + in 'name'. + :param style: Optional augmented behavior. + """ + self.name = name + if (length is not None) and (length < 1): + raise ValueError("length must be greater than or equal to 1") + + if (length is not None) and (length > 63): + raise ValueError("length must be less than or equal to 63") + + self.length = length + if (loc is not None) and (len(loc) < 1): + raise ValueError("The size of loc must be greater than or equal to 1") + + self.loc = loc + self.style = style + + def as_dict(self) -> Dict[str, Any]: + """Converts the StandardBeacon to a dictionary.""" + d: Dict[str, Any] = { + "name": self.name, + } + + if self.length is not None: + d["length"] = self.length + + if self.loc is not None: + d["loc"] = self.loc + + if self.style is not None: + d["style"] = self.style.as_dict() + + return d + + @staticmethod + def from_dict(d: Dict[str, Any]) -> "StandardBeacon": + """Creates a StandardBeacon from a dictionary.""" + kwargs: Dict[str, Any] = { + "name": d["name"], + } + + if "length" in d: + kwargs["length"] = d["length"] + + if "loc" in d: + kwargs["loc"] = d["loc"] + + if "style" in d: + kwargs["style"] = (_beacon_style_from_dict(d["style"]),) + + return StandardBeacon(**kwargs) + + def __repr__(self) -> str: + result = "StandardBeacon(" + if self.name is not None: + result += f"name={repr(self.name)}, " + + if self.length is not None: + result += f"length={repr(self.length)}, " + + if self.loc is not None: + result += f"loc={repr(self.loc)}, " + + if self.style is not None: + result += f"style={repr(self.style)}" + + return result + ")" + + def __eq__(self, other: Any) -> bool: + if not isinstance(other, StandardBeacon): + return False + attributes: list[str] = [ + "name", + "length", + "loc", + "style", + ] + return all(getattr(self, a) == getattr(other, a) for a in attributes) + + +class Insert: + literal: str + + def __init__( + self, + *, + literal: str, + ): + """The Virtual Part Transformation that appends a literal string. + + :param literal: The literal string to append. + """ + self.literal = literal + + def as_dict(self) -> Dict[str, Any]: + """Converts the Insert to a dictionary.""" + return { + "literal": self.literal, + } + + @staticmethod + def from_dict(d: Dict[str, Any]) -> "Insert": + """Creates a Insert from a dictionary.""" + kwargs: Dict[str, Any] = { + "literal": d["literal"], + } + + return Insert(**kwargs) + + def __repr__(self) -> str: + result = "Insert(" + if self.literal is not None: + result += f"literal={repr(self.literal)}" + + return result + ")" + + def __eq__(self, other: Any) -> bool: + if not isinstance(other, Insert): + return False + attributes: list[str] = [ + "literal", + ] + return all(getattr(self, a) == getattr(other, a) for a in attributes) + + +class Lower: + """The Virtual Part Transformation that converts ASCII characters to lower + case.""" + + def as_dict(self) -> Dict[str, Any]: + """Converts the Lower to a dictionary.""" + return {} + + @staticmethod + def from_dict(d: Dict[str, Any]) -> "Lower": + """Creates a Lower from a dictionary.""" + return Lower() + + def __repr__(self) -> str: + result = "Lower(" + + return result + ")" + + def __eq__(self, other: Any) -> bool: + return isinstance(other, Lower) + + +class GetPrefix: + length: int + + def __init__( + self, + *, + length: int, + ): + """The Virtual Part Transformation that gets the prefix of a string. + + :param length: If positive, the number of characters to return + from the front. If negative, the absolute number of + characters to exclude from the end. e.g. GetPrefix(-1) + returns all but the last character. + """ + self.length = length + + def as_dict(self) -> Dict[str, Any]: + """Converts the GetPrefix to a dictionary.""" + return { + "length": self.length, + } + + @staticmethod + def from_dict(d: Dict[str, Any]) -> "GetPrefix": + """Creates a GetPrefix from a dictionary.""" + kwargs: Dict[str, Any] = { + "length": d["length"], + } + + return GetPrefix(**kwargs) + + def __repr__(self) -> str: + result = "GetPrefix(" + if self.length is not None: + result += f"length={repr(self.length)}" + + return result + ")" + + def __eq__(self, other: Any) -> bool: + if not isinstance(other, GetPrefix): + return False + attributes: list[str] = [ + "length", + ] + return all(getattr(self, a) == getattr(other, a) for a in attributes) + + +class GetSegment: + split: str + index: int + + def __init__( + self, + *, + split: str, + index: int, + ): + """The Virtual Part Transformation that splits a string and gets a + particular segment of that split. + + :param split: The characters to split on. + :param index: The index of the split string result to return. 0 + represents the segment before the first split character. -1 + respresents the segment after the last split character. + """ + if (split is not None) and (len(split) < 1): + raise ValueError("The size of split must be greater than or equal to 1") + + if (split is not None) and (len(split) > 1): + raise ValueError("The size of split must be less than or equal to 1") + + self.split = split + self.index = index + + def as_dict(self) -> Dict[str, Any]: + """Converts the GetSegment to a dictionary.""" + return { + "split": self.split, + "index": self.index, + } + + @staticmethod + def from_dict(d: Dict[str, Any]) -> "GetSegment": + """Creates a GetSegment from a dictionary.""" + kwargs: Dict[str, Any] = { + "split": d["split"], + "index": d["index"], + } + + return GetSegment(**kwargs) + + def __repr__(self) -> str: + result = "GetSegment(" + if self.split is not None: + result += f"split={repr(self.split)}, " + + if self.index is not None: + result += f"index={repr(self.index)}" + + return result + ")" + + def __eq__(self, other: Any) -> bool: + if not isinstance(other, GetSegment): + return False + attributes: list[str] = [ + "split", + "index", + ] + return all(getattr(self, a) == getattr(other, a) for a in attributes) + + +class GetSegments: + split: str + low: int + high: int + + def __init__( + self, + *, + split: str, + low: int, + high: int, + ): + """The Virtual Part Transformation that splits a string and gets a + range of segments of that split. + + :param split: The characters to split on. + :param low: The index to start the segments from, inclusive. + Negative numbers count from the end. -1 is the last segment. + :param high: The index to stop the segments at, exclusive. + Negative numbers count from the end. -1 is the last segment. + """ + if (split is not None) and (len(split) < 1): + raise ValueError("The size of split must be greater than or equal to 1") + + if (split is not None) and (len(split) > 1): + raise ValueError("The size of split must be less than or equal to 1") + + self.split = split + self.low = low + self.high = high + + def as_dict(self) -> Dict[str, Any]: + """Converts the GetSegments to a dictionary.""" + return { + "split": self.split, + "low": self.low, + "high": self.high, + } + + @staticmethod + def from_dict(d: Dict[str, Any]) -> "GetSegments": + """Creates a GetSegments from a dictionary.""" + kwargs: Dict[str, Any] = { + "split": d["split"], + "low": d["low"], + "high": d["high"], + } + + return GetSegments(**kwargs) + + def __repr__(self) -> str: + result = "GetSegments(" + if self.split is not None: + result += f"split={repr(self.split)}, " + + if self.low is not None: + result += f"low={repr(self.low)}, " + + if self.high is not None: + result += f"high={repr(self.high)}" + + return result + ")" + + def __eq__(self, other: Any) -> bool: + if not isinstance(other, GetSegments): + return False + attributes: list[str] = [ + "split", + "low", + "high", + ] + return all(getattr(self, a) == getattr(other, a) for a in attributes) + + +class GetSubstring: + low: int + high: int + + def __init__( + self, + *, + low: int, + high: int, + ): + """The Virtual Part Transformation that gets a substring from a string. + + :param low: The index to start the substring from, inclusive. + Negative numbers count from the end. -1 is the last + character of a string. + :param high: The index to stop the substring at, exclusive. + Negative numbers count from the end. -1 is the last + character of a string. + """ + self.low = low + self.high = high + + def as_dict(self) -> Dict[str, Any]: + """Converts the GetSubstring to a dictionary.""" + return { + "low": self.low, + "high": self.high, + } + + @staticmethod + def from_dict(d: Dict[str, Any]) -> "GetSubstring": + """Creates a GetSubstring from a dictionary.""" + kwargs: Dict[str, Any] = { + "low": d["low"], + "high": d["high"], + } + + return GetSubstring(**kwargs) + + def __repr__(self) -> str: + result = "GetSubstring(" + if self.low is not None: + result += f"low={repr(self.low)}, " + + if self.high is not None: + result += f"high={repr(self.high)}" + + return result + ")" + + def __eq__(self, other: Any) -> bool: + if not isinstance(other, GetSubstring): + return False + attributes: list[str] = [ + "low", + "high", + ] + return all(getattr(self, a) == getattr(other, a) for a in attributes) + + +class GetSuffix: + length: int + + def __init__( + self, + *, + length: int, + ): + """The Virtual Part Transformation that gets the suffix of a string. + + :param length: If positive, the number of characters to return + from the end. If negative, the absolute number of characters + to exclude from the front. e.g. GetSuffix(-1) returns all + but the first character. + """ + self.length = length + + def as_dict(self) -> Dict[str, Any]: + """Converts the GetSuffix to a dictionary.""" + return { + "length": self.length, + } + + @staticmethod + def from_dict(d: Dict[str, Any]) -> "GetSuffix": + """Creates a GetSuffix from a dictionary.""" + kwargs: Dict[str, Any] = { + "length": d["length"], + } + + return GetSuffix(**kwargs) + + def __repr__(self) -> str: + result = "GetSuffix(" + if self.length is not None: + result += f"length={repr(self.length)}" + + return result + ")" + + def __eq__(self, other: Any) -> bool: + if not isinstance(other, GetSuffix): + return False + attributes: list[str] = [ + "length", + ] + return all(getattr(self, a) == getattr(other, a) for a in attributes) + + +class Upper: + """The Virtual Part Transformation that converts ASCII characters to upper + case.""" + + def as_dict(self) -> Dict[str, Any]: + """Converts the Upper to a dictionary.""" + return {} + + @staticmethod + def from_dict(d: Dict[str, Any]) -> "Upper": + """Creates a Upper from a dictionary.""" + return Upper() + + def __repr__(self) -> str: + result = "Upper(" + + return result + ")" + + def __eq__(self, other: Any) -> bool: + return isinstance(other, Upper) + + +class VirtualTransformUpper: + """The Virtual Part Transformation that converts ASCII characters to upper + case.""" + + def __init__(self, value: Upper): + self.value = value + + def as_dict(self) -> Dict[str, Any]: + return {"upper": self.value.as_dict()} + + @staticmethod + def from_dict(d: Dict[str, Any]) -> "VirtualTransformUpper": + if len(d) != 1: + raise TypeError(f"Unions may have exactly 1 value, but found {len(d)}") + + return VirtualTransformUpper(Upper.from_dict(d["upper"])) + + def __repr__(self) -> str: + return f"VirtualTransformUpper(value=repr(self.value))" + + def __eq__(self, other: Any) -> bool: + if not isinstance(other, VirtualTransformUpper): + return False + return self.value == other.value + + +class VirtualTransformLower: + """The Virtual Part Transformation that converts ASCII characters to lower + case.""" + + def __init__(self, value: Lower): + self.value = value + + def as_dict(self) -> Dict[str, Any]: + return {"lower": self.value.as_dict()} + + @staticmethod + def from_dict(d: Dict[str, Any]) -> "VirtualTransformLower": + if len(d) != 1: + raise TypeError(f"Unions may have exactly 1 value, but found {len(d)}") + + return VirtualTransformLower(Lower.from_dict(d["lower"])) + + def __repr__(self) -> str: + return f"VirtualTransformLower(value=repr(self.value))" + + def __eq__(self, other: Any) -> bool: + if not isinstance(other, VirtualTransformLower): + return False + return self.value == other.value + + +class VirtualTransformInsert: + """The Virtual Part Transformation that appends a literal string.""" + + def __init__(self, value: Insert): + self.value = value + + def as_dict(self) -> Dict[str, Any]: + return {"insert": self.value.as_dict()} + + @staticmethod + def from_dict(d: Dict[str, Any]) -> "VirtualTransformInsert": + if len(d) != 1: + raise TypeError(f"Unions may have exactly 1 value, but found {len(d)}") + + return VirtualTransformInsert(Insert.from_dict(d["insert"])) + + def __repr__(self) -> str: + return f"VirtualTransformInsert(value=repr(self.value))" + + def __eq__(self, other: Any) -> bool: + if not isinstance(other, VirtualTransformInsert): + return False + return self.value == other.value + + +class VirtualTransformPrefix: + """The Virtual Part Transformation that gets the prefix of a string.""" + + def __init__(self, value: GetPrefix): + self.value = value + + def as_dict(self) -> Dict[str, Any]: + return {"prefix": self.value.as_dict()} + + @staticmethod + def from_dict(d: Dict[str, Any]) -> "VirtualTransformPrefix": + if len(d) != 1: + raise TypeError(f"Unions may have exactly 1 value, but found {len(d)}") + + return VirtualTransformPrefix(GetPrefix.from_dict(d["prefix"])) + + def __repr__(self) -> str: + return f"VirtualTransformPrefix(value=repr(self.value))" + + def __eq__(self, other: Any) -> bool: + if not isinstance(other, VirtualTransformPrefix): + return False + return self.value == other.value + + +class VirtualTransformSuffix: + """The Virtual Part Transformation that gets the suffix of a string.""" + + def __init__(self, value: GetSuffix): + self.value = value + + def as_dict(self) -> Dict[str, Any]: + return {"suffix": self.value.as_dict()} + + @staticmethod + def from_dict(d: Dict[str, Any]) -> "VirtualTransformSuffix": + if len(d) != 1: + raise TypeError(f"Unions may have exactly 1 value, but found {len(d)}") + + return VirtualTransformSuffix(GetSuffix.from_dict(d["suffix"])) + + def __repr__(self) -> str: + return f"VirtualTransformSuffix(value=repr(self.value))" + + def __eq__(self, other: Any) -> bool: + if not isinstance(other, VirtualTransformSuffix): + return False + return self.value == other.value + + +class VirtualTransformSubstring: + """The Virtual Part Transformation that gets a substring from a string.""" + + def __init__(self, value: GetSubstring): + self.value = value + + def as_dict(self) -> Dict[str, Any]: + return {"substring": self.value.as_dict()} + + @staticmethod + def from_dict(d: Dict[str, Any]) -> "VirtualTransformSubstring": + if len(d) != 1: + raise TypeError(f"Unions may have exactly 1 value, but found {len(d)}") + + return VirtualTransformSubstring(GetSubstring.from_dict(d["substring"])) + + def __repr__(self) -> str: + return f"VirtualTransformSubstring(value=repr(self.value))" + + def __eq__(self, other: Any) -> bool: + if not isinstance(other, VirtualTransformSubstring): + return False + return self.value == other.value + + +class VirtualTransformSegment: + """The Virtual Part Transformation that splits a string and gets a + particular segment of that split.""" + + def __init__(self, value: GetSegment): + self.value = value + + def as_dict(self) -> Dict[str, Any]: + return {"segment": self.value.as_dict()} + + @staticmethod + def from_dict(d: Dict[str, Any]) -> "VirtualTransformSegment": + if len(d) != 1: + raise TypeError(f"Unions may have exactly 1 value, but found {len(d)}") + + return VirtualTransformSegment(GetSegment.from_dict(d["segment"])) + + def __repr__(self) -> str: + return f"VirtualTransformSegment(value=repr(self.value))" + + def __eq__(self, other: Any) -> bool: + if not isinstance(other, VirtualTransformSegment): + return False + return self.value == other.value + + +class VirtualTransformSegments: + """The Virtual Part Transformation that splits a string and gets a range of + segments of that split.""" + + def __init__(self, value: GetSegments): + self.value = value + + def as_dict(self) -> Dict[str, Any]: + return {"segments": self.value.as_dict()} + + @staticmethod + def from_dict(d: Dict[str, Any]) -> "VirtualTransformSegments": + if len(d) != 1: + raise TypeError(f"Unions may have exactly 1 value, but found {len(d)}") + + return VirtualTransformSegments(GetSegments.from_dict(d["segments"])) + + def __repr__(self) -> str: + return f"VirtualTransformSegments(value=repr(self.value))" + + def __eq__(self, other: Any) -> bool: + if not isinstance(other, VirtualTransformSegments): + return False + return self.value == other.value + + +class VirtualTransformUnknown: + """Represents an unknown variant. + + If you receive this value, you will need to update your library to + receive the parsed value. + + This value may not be deliberately sent. + """ + + def __init__(self, tag: str): + self.tag = tag + + def as_dict(self) -> Dict[str, Any]: + return {"SDK_UNKNOWN_MEMBER": {"name": self.tag}} + + @staticmethod + def from_dict(d: Dict[str, Any]) -> "VirtualTransformUnknown": + if len(d) != 1: + raise TypeError(f"Unions may have exactly 1 value, but found {len(d)}") + return VirtualTransformUnknown(d["SDK_UNKNOWN_MEMBER"]["name"]) + + def __repr__(self) -> str: + return f"VirtualTransformUnknown(tag={self.tag})" + + +VirtualTransform = Union[ + VirtualTransformUpper, + VirtualTransformLower, + VirtualTransformInsert, + VirtualTransformPrefix, + VirtualTransformSuffix, + VirtualTransformSubstring, + VirtualTransformSegment, + VirtualTransformSegments, + VirtualTransformUnknown, +] + + +def _virtual_transform_from_dict(d: Dict[str, Any]) -> VirtualTransform: + if "upper" in d: + return VirtualTransformUpper.from_dict(d) + + if "lower" in d: + return VirtualTransformLower.from_dict(d) + + if "insert" in d: + return VirtualTransformInsert.from_dict(d) + + if "prefix" in d: + return VirtualTransformPrefix.from_dict(d) + + if "suffix" in d: + return VirtualTransformSuffix.from_dict(d) + + if "substring" in d: + return VirtualTransformSubstring.from_dict(d) + + if "segment" in d: + return VirtualTransformSegment.from_dict(d) + + if "segments" in d: + return VirtualTransformSegments.from_dict(d) + + raise TypeError(f"Unions may have exactly 1 value, but found {len(d)}") + + +class VirtualPart: + loc: str + trans: Optional[list[VirtualTransform]] + + def __init__( + self, + *, + loc: str, + trans: Optional[list[VirtualTransform]] = None, + ): + """A Virtual Part is the configuration of a transformation on an + existing field in an item. + + :param loc: The DynamoDB document path to the value for this + part. + :param trans: A list of transformations performed on the value + for this part. + """ + if (loc is not None) and (len(loc) < 1): + raise ValueError("The size of loc must be greater than or equal to 1") + + self.loc = loc + if (trans is not None) and (len(trans) < 1): + raise ValueError("The size of trans must be greater than or equal to 1") + + self.trans = trans + + def as_dict(self) -> Dict[str, Any]: + """Converts the VirtualPart to a dictionary.""" + d: Dict[str, Any] = { + "loc": self.loc, + } + + if self.trans is not None: + d["trans"] = (_virtual_transform_list_as_dict(self.trans),) + + return d + + @staticmethod + def from_dict(d: Dict[str, Any]) -> "VirtualPart": + """Creates a VirtualPart from a dictionary.""" + kwargs: Dict[str, Any] = { + "loc": d["loc"], + } + + if "trans" in d: + kwargs["trans"] = (_virtual_transform_list_from_dict(d["trans"]),) + + return VirtualPart(**kwargs) + + def __repr__(self) -> str: + result = "VirtualPart(" + if self.loc is not None: + result += f"loc={repr(self.loc)}, " + + if self.trans is not None: + result += f"trans={repr(self.trans)}" + + return result + ")" + + def __eq__(self, other: Any) -> bool: + if not isinstance(other, VirtualPart): + return False + attributes: list[str] = [ + "loc", + "trans", + ] + return all(getattr(self, a) == getattr(other, a) for a in attributes) + + +class VirtualField: + name: str + parts: list[VirtualPart] + + def __init__( + self, + *, + name: str, + parts: list[VirtualPart], + ): + """The configuration for a Virtual Field. A Virtual Field is a field + constructed from parts of other fields for use with beacons, but never + itself stored on items. + + :param name: The name of the Virtual Field. + :param parts: The list of ordered parts that make up a Virtual + Field. + """ + self.name = name + if (parts is not None) and (len(parts) < 1): + raise ValueError("The size of parts must be greater than or equal to 1") + + self.parts = parts + + def as_dict(self) -> Dict[str, Any]: + """Converts the VirtualField to a dictionary.""" + return { + "name": self.name, + "parts": _virtual_part_list_as_dict(self.parts), + } + + @staticmethod + def from_dict(d: Dict[str, Any]) -> "VirtualField": + """Creates a VirtualField from a dictionary.""" + kwargs: Dict[str, Any] = { + "name": d["name"], + "parts": _virtual_part_list_from_dict(d["parts"]), + } + + return VirtualField(**kwargs) + + def __repr__(self) -> str: + result = "VirtualField(" + if self.name is not None: + result += f"name={repr(self.name)}, " + + if self.parts is not None: + result += f"parts={repr(self.parts)}" + + return result + ")" + + def __eq__(self, other: Any) -> bool: + if not isinstance(other, VirtualField): + return False + attributes: list[str] = [ + "name", + "parts", + ] + return all(getattr(self, a) == getattr(other, a) for a in attributes) + + +class BeaconVersion: + version: int + key_store: "aws_cryptographic_material_providers.smithygenerated.aws_cryptography_keystore.client.KeyStore" + key_source: BeaconKeySource + standard_beacons: list[StandardBeacon] + compound_beacons: Optional[list[CompoundBeacon]] + virtual_fields: Optional[list[VirtualField]] + encrypted_parts: Optional[list[EncryptedPart]] + signed_parts: Optional[list[SignedPart]] + + def __init__( + self, + *, + key_store: "aws_cryptographic_material_providers.smithygenerated.aws_cryptography_keystore.client.KeyStore", + key_source: BeaconKeySource, + standard_beacons: list[StandardBeacon], + version: int = 0, + compound_beacons: Optional[list[CompoundBeacon]] = None, + virtual_fields: Optional[list[VirtualField]] = None, + encrypted_parts: Optional[list[EncryptedPart]] = None, + signed_parts: Optional[list[SignedPart]] = None, + ): + """The configuration for a particular version of searchable encryption. + Currently the only supported version is '1'. + + :param key_store: The Key Store that contains the Beacon Keys to + use with searchable encryption. + :param key_source: The configuration for what beacon key(s) to + use. + :param standard_beacons: The Standard Beacons to be written with + items. + :param version: The version of searchable encryption configured. + This must be '1'. + :param compound_beacons: The Compound Beacons to be written with + items. + :param virtual_fields: The Virtual Fields to be calculated, + supporting other searchable enryption configurations. + :param encrypted_parts: The list of Encrypted Parts that may be + included in any compound beacon. + :param signed_parts: The list of Signed Parts that may be + included in any compound beacon. + """ + self.key_store = key_store + self.key_source = key_source + if (standard_beacons is not None) and (len(standard_beacons) < 1): + raise ValueError("The size of standard_beacons must be greater than or equal to 1") + + self.standard_beacons = standard_beacons + if (version is not None) and (version < 1): + raise ValueError("version must be greater than or equal to 1") + + self.version = version + if (compound_beacons is not None) and (len(compound_beacons) < 1): + raise ValueError("The size of compound_beacons must be greater than or equal to 1") + + self.compound_beacons = compound_beacons + if (virtual_fields is not None) and (len(virtual_fields) < 1): + raise ValueError("The size of virtual_fields must be greater than or equal to 1") + + self.virtual_fields = virtual_fields + if (encrypted_parts is not None) and (len(encrypted_parts) < 1): + raise ValueError("The size of encrypted_parts must be greater than or equal to 1") + + self.encrypted_parts = encrypted_parts + if (signed_parts is not None) and (len(signed_parts) < 1): + raise ValueError("The size of signed_parts must be greater than or equal to 1") + + self.signed_parts = signed_parts + + def as_dict(self) -> Dict[str, Any]: + """Converts the BeaconVersion to a dictionary.""" + d: Dict[str, Any] = { + "key_store": self.key_store.as_dict(), + "key_source": self.key_source.as_dict(), + "standard_beacons": _standard_beacon_list_as_dict(self.standard_beacons), + } + + if self.version is not None: + d["version"] = self.version + + if self.compound_beacons is not None: + d["compound_beacons"] = (_compound_beacon_list_as_dict(self.compound_beacons),) + + if self.virtual_fields is not None: + d["virtual_fields"] = (_virtual_field_list_as_dict(self.virtual_fields),) + + if self.encrypted_parts is not None: + d["encrypted_parts"] = (_encrypted_parts_list_as_dict(self.encrypted_parts),) + + if self.signed_parts is not None: + d["signed_parts"] = (_signed_parts_list_as_dict(self.signed_parts),) + + return d + + @staticmethod + def from_dict(d: Dict[str, Any]) -> "BeaconVersion": + """Creates a BeaconVersion from a dictionary.""" + from aws_cryptographic_material_providers.smithygenerated.aws_cryptography_keystore.client import ( + KeyStore, + ) + + kwargs: Dict[str, Any] = { + "key_store": KeyStore.from_dict(d["key_store"]), + "key_source": _beacon_key_source_from_dict(d["key_source"]), + "standard_beacons": _standard_beacon_list_from_dict(d["standard_beacons"]), + } + + if "version" in d: + kwargs["version"] = d["version"] + + if "compound_beacons" in d: + kwargs["compound_beacons"] = (_compound_beacon_list_from_dict(d["compound_beacons"]),) + + if "virtual_fields" in d: + kwargs["virtual_fields"] = (_virtual_field_list_from_dict(d["virtual_fields"]),) + + if "encrypted_parts" in d: + kwargs["encrypted_parts"] = (_encrypted_parts_list_from_dict(d["encrypted_parts"]),) + + if "signed_parts" in d: + kwargs["signed_parts"] = (_signed_parts_list_from_dict(d["signed_parts"]),) + + return BeaconVersion(**kwargs) + + def __repr__(self) -> str: + result = "BeaconVersion(" + if self.version is not None: + result += f"version={repr(self.version)}, " + + if self.key_store is not None: + result += f"key_store={repr(self.key_store)}, " + + if self.key_source is not None: + result += f"key_source={repr(self.key_source)}, " + + if self.standard_beacons is not None: + result += f"standard_beacons={repr(self.standard_beacons)}, " + + if self.compound_beacons is not None: + result += f"compound_beacons={repr(self.compound_beacons)}, " + + if self.virtual_fields is not None: + result += f"virtual_fields={repr(self.virtual_fields)}, " + + if self.encrypted_parts is not None: + result += f"encrypted_parts={repr(self.encrypted_parts)}, " + + if self.signed_parts is not None: + result += f"signed_parts={repr(self.signed_parts)}" + + return result + ")" + + def __eq__(self, other: Any) -> bool: + if not isinstance(other, BeaconVersion): + return False + attributes: list[str] = [ + "version", + "key_store", + "key_source", + "standard_beacons", + "compound_beacons", + "virtual_fields", + "encrypted_parts", + "signed_parts", + ] + return all(getattr(self, a) == getattr(other, a) for a in attributes) + + +class LegacyPolicy: + FORCE_LEGACY_ENCRYPT_ALLOW_LEGACY_DECRYPT = "FORCE_LEGACY_ENCRYPT_ALLOW_LEGACY_DECRYPT" + + FORBID_LEGACY_ENCRYPT_ALLOW_LEGACY_DECRYPT = "FORBID_LEGACY_ENCRYPT_ALLOW_LEGACY_DECRYPT" + + FORBID_LEGACY_ENCRYPT_FORBID_LEGACY_DECRYPT = "FORBID_LEGACY_ENCRYPT_FORBID_LEGACY_DECRYPT" + + # This set contains every possible value known at the time this was generated. New + # values may be added in the future. + values = frozenset( + { + "FORCE_LEGACY_ENCRYPT_ALLOW_LEGACY_DECRYPT", + "FORBID_LEGACY_ENCRYPT_ALLOW_LEGACY_DECRYPT", + "FORBID_LEGACY_ENCRYPT_FORBID_LEGACY_DECRYPT", + } + ) + + +class LegacyOverride: + policy: str + encryptor: "aws_dbesdk_dynamodb.smithygenerated.aws_cryptography_dbencryptionsdk_dynamodb.references.LegacyDynamoDbEncryptor" + attribute_actions_on_encrypt: dict[str, str] + default_attribute_flag: Optional[str] + + def __init__( + self, + *, + policy: str, + encryptor: "aws_dbesdk_dynamodb.smithygenerated.aws_cryptography_dbencryptionsdk_dynamodb.references.LegacyDynamoDbEncryptor", + attribute_actions_on_encrypt: dict[str, str], + default_attribute_flag: Optional[str] = None, + ): + """A configuration for overriding encryption and/or decryption to + instead perform legacy encryption and decryption. + + :param policy: A policy which configurates whether legacy + behavior overrides encryption and/or decryption. + :param encryptor: A configuration for the legacy DynamoDB + Encryption Client's Encryptor. + :param attribute_actions_on_encrypt: Overrides which attributes + are encrypted and/or signed for any items read or written + with legacy behavior. + :param default_attribute_flag: This input is not used in the + Java Client and should not be specified. + """ + self.policy = policy + self.encryptor = encryptor + self.attribute_actions_on_encrypt = attribute_actions_on_encrypt + self.default_attribute_flag = default_attribute_flag + + def as_dict(self) -> Dict[str, Any]: + """Converts the LegacyOverride to a dictionary.""" + d: Dict[str, Any] = { + "policy": self.policy, + "encryptor": self.encryptor.as_dict(), + "attribute_actions_on_encrypt": self.attribute_actions_on_encrypt, + } + + if self.default_attribute_flag is not None: + d["default_attribute_flag"] = self.default_attribute_flag + + return d + + @staticmethod + def from_dict(d: Dict[str, Any]) -> "LegacyOverride": + """Creates a LegacyOverride from a dictionary.""" + from aws_dbesdk_dynamodb.smithygenerated.aws_cryptography_dbencryptionsdk_dynamodb.references import ( + LegacyDynamoDbEncryptor, + ) + + kwargs: Dict[str, Any] = { + "policy": d["policy"], + "encryptor": LegacyDynamoDbEncryptor.from_dict(d["encryptor"]), + "attribute_actions_on_encrypt": d["attribute_actions_on_encrypt"], + } + + if "default_attribute_flag" in d: + kwargs["default_attribute_flag"] = d["default_attribute_flag"] + + return LegacyOverride(**kwargs) + + def __repr__(self) -> str: + result = "LegacyOverride(" + if self.policy is not None: + result += f"policy={repr(self.policy)}, " + + if self.encryptor is not None: + result += f"encryptor={repr(self.encryptor)}, " + + if self.attribute_actions_on_encrypt is not None: + result += f"attribute_actions_on_encrypt={repr(self.attribute_actions_on_encrypt)}, " + + if self.default_attribute_flag is not None: + result += f"default_attribute_flag={repr(self.default_attribute_flag)}" + + return result + ")" + + def __eq__(self, other: Any) -> bool: + if not isinstance(other, LegacyOverride): + return False + attributes: list[str] = [ + "policy", + "encryptor", + "attribute_actions_on_encrypt", + "default_attribute_flag", + ] + return all(getattr(self, a) == getattr(other, a) for a in attributes) + + +class PlaintextOverride: + FORCE_PLAINTEXT_WRITE_ALLOW_PLAINTEXT_READ = "FORCE_PLAINTEXT_WRITE_ALLOW_PLAINTEXT_READ" + + FORBID_PLAINTEXT_WRITE_ALLOW_PLAINTEXT_READ = "FORBID_PLAINTEXT_WRITE_ALLOW_PLAINTEXT_READ" + + FORBID_PLAINTEXT_WRITE_FORBID_PLAINTEXT_READ = "FORBID_PLAINTEXT_WRITE_FORBID_PLAINTEXT_READ" + + # This set contains every possible value known at the time this was generated. New + # values may be added in the future. + values = frozenset( + { + "FORCE_PLAINTEXT_WRITE_ALLOW_PLAINTEXT_READ", + "FORBID_PLAINTEXT_WRITE_ALLOW_PLAINTEXT_READ", + "FORBID_PLAINTEXT_WRITE_FORBID_PLAINTEXT_READ", + } + ) + + +class SearchConfig: + versions: list[BeaconVersion] + write_version: int + + def __init__( + self, + *, + versions: list[BeaconVersion], + write_version: int = 0, + ): + """The configuration for searchable encryption. + + :param versions: The versions of searchable encryption to + support reading. Currently must contain a single + configuration with version '1'. + :param write_version: The searchable encryption version to use + when writing new items. Must be '1'. + """ + if (versions is not None) and (len(versions) < 1): + raise ValueError("The size of versions must be greater than or equal to 1") + + if (versions is not None) and (len(versions) > 1): + raise ValueError("The size of versions must be less than or equal to 1") + + self.versions = versions + if (write_version is not None) and (write_version < 1): + raise ValueError("write_version must be greater than or equal to 1") + + self.write_version = write_version + + def as_dict(self) -> Dict[str, Any]: + """Converts the SearchConfig to a dictionary.""" + d: Dict[str, Any] = { + "versions": _beacon_version_list_as_dict(self.versions), + } + + if self.write_version is not None: + d["write_version"] = self.write_version + + return d + + @staticmethod + def from_dict(d: Dict[str, Any]) -> "SearchConfig": + """Creates a SearchConfig from a dictionary.""" + kwargs: Dict[str, Any] = { + "versions": _beacon_version_list_from_dict(d["versions"]), + } + + if "write_version" in d: + kwargs["write_version"] = d["write_version"] + + return SearchConfig(**kwargs) + + def __repr__(self) -> str: + result = "SearchConfig(" + if self.versions is not None: + result += f"versions={repr(self.versions)}, " + + if self.write_version is not None: + result += f"write_version={repr(self.write_version)}" + + return result + ")" + + def __eq__(self, other: Any) -> bool: + if not isinstance(other, SearchConfig): + return False + attributes: list[str] = [ + "versions", + "write_version", + ] + return all(getattr(self, a) == getattr(other, a) for a in attributes) + + +class DynamoDbTableEncryptionConfig: + logical_table_name: str + partition_key_name: str + sort_key_name: Optional[str] + search: Optional[SearchConfig] + attribute_actions_on_encrypt: dict[str, str] + allowed_unsigned_attributes: Optional[list[str]] + allowed_unsigned_attribute_prefix: Optional[str] + algorithm_suite_id: Optional[str] + keyring: Optional[ + "aws_cryptographic_material_providers.smithygenerated.aws_cryptography_materialproviders.references.Keyring" + ] + cmm: Optional[ + "aws_cryptographic_material_providers.smithygenerated.aws_cryptography_materialproviders.references.CryptographicMaterialsManager" + ] + legacy_override: Optional[LegacyOverride] + plaintext_override: Optional[str] + + def __init__( + self, + *, + logical_table_name: str, + partition_key_name: str, + attribute_actions_on_encrypt: dict[str, str], + sort_key_name: Optional[str] = None, + search: Optional[SearchConfig] = None, + allowed_unsigned_attributes: Optional[list[str]] = None, + allowed_unsigned_attribute_prefix: Optional[str] = None, + algorithm_suite_id: Optional[str] = None, + keyring: Optional[ + "aws_cryptographic_material_providers.smithygenerated.aws_cryptography_materialproviders.references.Keyring" + ] = None, + cmm: Optional[ + "aws_cryptographic_material_providers.smithygenerated.aws_cryptography_materialproviders.references.CryptographicMaterialsManager" + ] = None, + legacy_override: Optional[LegacyOverride] = None, + plaintext_override: Optional[str] = None, + ): + """The configuration for client-side encryption for a particular + DynamoDB table. + + :param logical_table_name: The logical table name for this + table. This is the name that is cryptographically bound with + your data. This can be the same as the actual DynamoDB table + name. It's purpose is to be distinct from the DynamoDB table + name so that the data may still be authenticated if being + read from different (but logically similar) tables, such as + a backup table. + :param partition_key_name: The name of the partition key on this + table. + :param attribute_actions_on_encrypt: A map that describes what + attributes should be encrypted and/or signed on encrypt. + This map must contain all attributes that might be + encountered during encryption. + :param sort_key_name: If this table contains a sort key, the + name of the sort key on this table. + :param search: The configuration for searchable encryption. + :param allowed_unsigned_attributes: A list of attribute names + such that, if encountered during decryption, those + attributes are treated as unsigned. + :param allowed_unsigned_attribute_prefix: A prefix such that, if + during decryption any attribute has a name with this prefix, + it is treated as unsigned. + :param algorithm_suite_id: An ID for the algorithm suite to use + during encryption and decryption. + :param keyring: The Keyring that should be used to wrap and + unwrap data keys. If specified a Default Cryptographic + Materials Manager with this Keyring is used to obtain + materials for encryption and decryption. Either a Keyring or + a Cryptographic Materials Manager must be specified. + :param cmm: The Cryptographic Materials Manager that is used to + obtain materials for encryption and decryption. Either a + Keyring or a Cryptographic Materials Manager must be + specified. + :param legacy_override: A configuration that override encryption + and/or decryption to instead perform legacy encryption + and/or decryption. Used as part of migration from version + 2.x to version 3.x. + :param plaintext_override: A configuration that override + encryption and/or decryption to instead passthrough and + write and/or read plaintext. Used to update plaintext tables + to fully use client-side encryption. + """ + self.logical_table_name = logical_table_name + if (partition_key_name is not None) and (len(partition_key_name) < 1): + raise ValueError("The size of partition_key_name must be greater than or equal to 1") + + if (partition_key_name is not None) and (len(partition_key_name) > 255): + raise ValueError("The size of partition_key_name must be less than or equal to 255") + + self.partition_key_name = partition_key_name + self.attribute_actions_on_encrypt = attribute_actions_on_encrypt + if (sort_key_name is not None) and (len(sort_key_name) < 1): + raise ValueError("The size of sort_key_name must be greater than or equal to 1") + + if (sort_key_name is not None) and (len(sort_key_name) > 255): + raise ValueError("The size of sort_key_name must be less than or equal to 255") + + self.sort_key_name = sort_key_name + self.search = search + if (allowed_unsigned_attributes is not None) and (len(allowed_unsigned_attributes) < 1): + raise ValueError("The size of allowed_unsigned_attributes must be greater than or equal to 1") + + self.allowed_unsigned_attributes = allowed_unsigned_attributes + self.allowed_unsigned_attribute_prefix = allowed_unsigned_attribute_prefix + self.algorithm_suite_id = algorithm_suite_id + self.keyring = keyring + self.cmm = cmm + self.legacy_override = legacy_override + self.plaintext_override = plaintext_override + + def as_dict(self) -> Dict[str, Any]: + """Converts the DynamoDbTableEncryptionConfig to a dictionary.""" + d: Dict[str, Any] = { + "logical_table_name": self.logical_table_name, + "partition_key_name": self.partition_key_name, + "attribute_actions_on_encrypt": self.attribute_actions_on_encrypt, + } + + if self.sort_key_name is not None: + d["sort_key_name"] = self.sort_key_name + + if self.search is not None: + d["search"] = self.search.as_dict() + + if self.allowed_unsigned_attributes is not None: + d["allowed_unsigned_attributes"] = self.allowed_unsigned_attributes + + if self.allowed_unsigned_attribute_prefix is not None: + d["allowed_unsigned_attribute_prefix"] = self.allowed_unsigned_attribute_prefix + + if self.algorithm_suite_id is not None: + d["algorithm_suite_id"] = self.algorithm_suite_id + + if self.keyring is not None: + d["keyring"] = self.keyring.as_dict() + + if self.cmm is not None: + d["cmm"] = self.cmm.as_dict() + + if self.legacy_override is not None: + d["legacy_override"] = self.legacy_override.as_dict() + + if self.plaintext_override is not None: + d["plaintext_override"] = self.plaintext_override + + return d + + @staticmethod + def from_dict(d: Dict[str, Any]) -> "DynamoDbTableEncryptionConfig": + """Creates a DynamoDbTableEncryptionConfig from a dictionary.""" + from aws_cryptographic_material_providers.smithygenerated.aws_cryptography_materialproviders.references import ( + Keyring, + ) + from aws_cryptographic_material_providers.smithygenerated.aws_cryptography_materialproviders.references import ( + CryptographicMaterialsManager, + ) + + kwargs: Dict[str, Any] = { + "logical_table_name": d["logical_table_name"], + "partition_key_name": d["partition_key_name"], + "attribute_actions_on_encrypt": d["attribute_actions_on_encrypt"], + } + + if "sort_key_name" in d: + kwargs["sort_key_name"] = d["sort_key_name"] + + if "search" in d: + kwargs["search"] = SearchConfig.from_dict(d["search"]) + + if "allowed_unsigned_attributes" in d: + kwargs["allowed_unsigned_attributes"] = d["allowed_unsigned_attributes"] + + if "allowed_unsigned_attribute_prefix" in d: + kwargs["allowed_unsigned_attribute_prefix"] = d["allowed_unsigned_attribute_prefix"] + + if "algorithm_suite_id" in d: + kwargs["algorithm_suite_id"] = d["algorithm_suite_id"] + + if "keyring" in d: + kwargs["keyring"] = Keyring.from_dict(d["keyring"]) + + if "cmm" in d: + kwargs["cmm"] = CryptographicMaterialsManager.from_dict(d["cmm"]) + + if "legacy_override" in d: + kwargs["legacy_override"] = LegacyOverride.from_dict(d["legacy_override"]) + + if "plaintext_override" in d: + kwargs["plaintext_override"] = d["plaintext_override"] + + return DynamoDbTableEncryptionConfig(**kwargs) + + def __repr__(self) -> str: + result = "DynamoDbTableEncryptionConfig(" + if self.logical_table_name is not None: + result += f"logical_table_name={repr(self.logical_table_name)}, " + + if self.partition_key_name is not None: + result += f"partition_key_name={repr(self.partition_key_name)}, " + + if self.sort_key_name is not None: + result += f"sort_key_name={repr(self.sort_key_name)}, " + + if self.search is not None: + result += f"search={repr(self.search)}, " + + if self.attribute_actions_on_encrypt is not None: + result += f"attribute_actions_on_encrypt={repr(self.attribute_actions_on_encrypt)}, " + + if self.allowed_unsigned_attributes is not None: + result += f"allowed_unsigned_attributes={repr(self.allowed_unsigned_attributes)}, " + + if self.allowed_unsigned_attribute_prefix is not None: + result += f"allowed_unsigned_attribute_prefix={repr(self.allowed_unsigned_attribute_prefix)}, " + + if self.algorithm_suite_id is not None: + result += f"algorithm_suite_id={repr(self.algorithm_suite_id)}, " + + if self.keyring is not None: + result += f"keyring={repr(self.keyring)}, " + + if self.cmm is not None: + result += f"cmm={repr(self.cmm)}, " + + if self.legacy_override is not None: + result += f"legacy_override={repr(self.legacy_override)}, " + + if self.plaintext_override is not None: + result += f"plaintext_override={repr(self.plaintext_override)}" + + return result + ")" + + def __eq__(self, other: Any) -> bool: + if not isinstance(other, DynamoDbTableEncryptionConfig): + return False + attributes: list[str] = [ + "logical_table_name", + "partition_key_name", + "sort_key_name", + "search", + "attribute_actions_on_encrypt", + "allowed_unsigned_attributes", + "allowed_unsigned_attribute_prefix", + "algorithm_suite_id", + "keyring", + "cmm", + "legacy_override", + "plaintext_override", + ] + return all(getattr(self, a) == getattr(other, a) for a in attributes) + + +class DynamoDbTablesEncryptionConfig: + table_encryption_configs: dict[str, DynamoDbTableEncryptionConfig] + + def __init__( + self, + *, + table_encryption_configs: dict[str, DynamoDbTableEncryptionConfig], + ): + """The configuration for client-side encryption with multiple DynamoDB + table. + + :param table_encryption_configs: A map of DynamoDB table name to + its configuration for client-side encryption. + """ + self.table_encryption_configs = table_encryption_configs + + def as_dict(self) -> Dict[str, Any]: + """Converts the DynamoDbTablesEncryptionConfig to a dictionary.""" + return { + "table_encryption_configs": _dynamo_db_table_encryption_config_list_as_dict(self.table_encryption_configs), + } + + @staticmethod + def from_dict(d: Dict[str, Any]) -> "DynamoDbTablesEncryptionConfig": + """Creates a DynamoDbTablesEncryptionConfig from a dictionary.""" + kwargs: Dict[str, Any] = { + "table_encryption_configs": _dynamo_db_table_encryption_config_list_from_dict( + d["table_encryption_configs"] + ), + } + + return DynamoDbTablesEncryptionConfig(**kwargs) + + def __repr__(self) -> str: + result = "DynamoDbTablesEncryptionConfig(" + if self.table_encryption_configs is not None: + result += f"table_encryption_configs={repr(self.table_encryption_configs)}" + + return result + ")" + + def __eq__(self, other: Any) -> bool: + if not isinstance(other, DynamoDbTablesEncryptionConfig): + return False + attributes: list[str] = [ + "table_encryption_configs", + ] + return all(getattr(self, a) == getattr(other, a) for a in attributes) + + +def _encrypted_data_key_description_list_as_dict( + given: list[EncryptedDataKeyDescription], +) -> List[Any]: + return [v.as_dict() for v in given] + + +def _encrypted_data_key_description_list_from_dict( + given: List[Any], +) -> list[EncryptedDataKeyDescription]: + return [EncryptedDataKeyDescription.from_dict(v) for v in given] + + +class Unit: + pass diff --git a/DynamoDbEncryption/runtimes/python/src/aws_dbesdk_dynamodb/smithygenerated/aws_cryptography_dbencryptionsdk_dynamodb/plugin.py b/DynamoDbEncryption/runtimes/python/src/aws_dbesdk_dynamodb/smithygenerated/aws_cryptography_dbencryptionsdk_dynamodb/plugin.py new file mode 100644 index 000000000..e4c614d01 --- /dev/null +++ b/DynamoDbEncryption/runtimes/python/src/aws_dbesdk_dynamodb/smithygenerated/aws_cryptography_dbencryptionsdk_dynamodb/plugin.py @@ -0,0 +1,49 @@ +# Copyright Amazon.com Inc. or its affiliates. All Rights Reserved. +# SPDX-License-Identifier: Apache-2.0 +# Do not modify this file. This file is machine generated, and any changes to it will be overwritten. + +from .config import ( + Config, + Plugin, + smithy_config_to_dafny_config, + DynamoDbEncryptionConfig, +) +from smithy_python.interfaces.retries import RetryStrategy +from smithy_python.exceptions import SmithyRetryException +from .dafnyImplInterface import DafnyImplInterface + + +def set_config_impl(config: Config): + """Set the Dafny-compiled implementation in the Smithy-Python client Config + and load our custom NoRetriesStrategy.""" + config.dafnyImplInterface = DafnyImplInterface() + if isinstance(config, DynamoDbEncryptionConfig): + from aws_dbesdk_dynamodb.internaldafny.generated.DynamoDbEncryption import ( + default__, + ) + + config.dafnyImplInterface.impl = default__.DynamoDbEncryption(smithy_config_to_dafny_config(config)).value + config.retry_strategy = NoRetriesStrategy() + + +class ZeroRetryDelayToken: + """Placeholder class required by Smithy-Python client implementation. + + Do not wait to retry. + """ + + retry_delay = 0 + + +class NoRetriesStrategy(RetryStrategy): + """Placeholder class required by Smithy-Python client implementation. + + Do not retry calling Dafny code. + """ + + def acquire_initial_retry_token(self): + return ZeroRetryDelayToken() + + def refresh_retry_token_for_retry(self, token_to_renew, error_info): + # Do not retry + raise SmithyRetryException() diff --git a/DynamoDbEncryption/runtimes/python/src/aws_dbesdk_dynamodb/smithygenerated/aws_cryptography_dbencryptionsdk_dynamodb/references.py b/DynamoDbEncryption/runtimes/python/src/aws_dbesdk_dynamodb/smithygenerated/aws_cryptography_dbencryptionsdk_dynamodb/references.py new file mode 100644 index 000000000..66e8de9cb --- /dev/null +++ b/DynamoDbEncryption/runtimes/python/src/aws_dbesdk_dynamodb/smithygenerated/aws_cryptography_dbencryptionsdk_dynamodb/references.py @@ -0,0 +1,142 @@ +# Copyright Amazon.com Inc. or its affiliates. All Rights Reserved. +# SPDX-License-Identifier: Apache-2.0 +# Do not modify this file. This file is machine generated, and any changes to it will be overwritten. + +import abc +import aws_dbesdk_dynamodb.internaldafny.generated.AwsCryptographyDbEncryptionSdkDynamoDbTypes +from aws_dbesdk_dynamodb.internaldafny.generated.AwsCryptographyDbEncryptionSdkDynamoDbTypes import ( + GetBranchKeyIdFromDdbKeyInput_GetBranchKeyIdFromDdbKeyInput as DafnyGetBranchKeyIdFromDdbKeyInput, + GetBranchKeyIdFromDdbKeyOutput_GetBranchKeyIdFromDdbKeyOutput as DafnyGetBranchKeyIdFromDdbKeyOutput, +) +import aws_dbesdk_dynamodb.internaldafny.generated.module_ +import aws_dbesdk_dynamodb.smithygenerated.aws_cryptography_dbencryptionsdk_dynamodb.dafny_to_smithy +from aws_dbesdk_dynamodb.smithygenerated.aws_cryptography_dbencryptionsdk_dynamodb.errors import ( + _smithy_error_to_dafny_error, +) +import aws_dbesdk_dynamodb.smithygenerated.aws_cryptography_dbencryptionsdk_dynamodb.models +import aws_dbesdk_dynamodb.smithygenerated.aws_cryptography_dbencryptionsdk_dynamodb.smithy_to_dafny +from smithy_dafny_standard_library.internaldafny.generated import Wrappers +from typing import Any, Dict + + +class ILegacyDynamoDbEncryptor(metaclass=abc.ABCMeta): + + @classmethod + def __subclasshook__(cls, subclass): + return () + + +class LegacyDynamoDbEncryptor(ILegacyDynamoDbEncryptor): + + _impl: ( + aws_dbesdk_dynamodb.internaldafny.generated.AwsCryptographyDbEncryptionSdkDynamoDbTypes.ILegacyDynamoDbEncryptor + ) + + def __init__( + self, + _impl: aws_dbesdk_dynamodb.internaldafny.generated.AwsCryptographyDbEncryptionSdkDynamoDbTypes.ILegacyDynamoDbEncryptor, + ): + self._impl = _impl + + @staticmethod + def from_dict(d: Dict[str, Any]) -> "LegacyDynamoDbEncryptor": + return LegacyDynamoDbEncryptor(d["_impl"]) + + def as_dict(self) -> Dict[str, Any]: + return {"_impl": self._impl} + + +class IDynamoDbKeyBranchKeyIdSupplier(metaclass=abc.ABCMeta): + + @classmethod + def __subclasshook__(cls, subclass): + return hasattr(subclass, "GetBranchKeyIdFromDdbKey") and callable(subclass.GetBranchKeyIdFromDdbKey) + + @abc.abstractmethod + def get_branch_key_id_from_ddb_key( + self, + param: "aws_dbesdk_dynamodb.smithygenerated.aws_cryptography_dbencryptionsdk_dynamodb.models.GetBranchKeyIdFromDdbKeyInput", + ) -> "aws_dbesdk_dynamodb.smithygenerated.aws_cryptography_dbencryptionsdk_dynamodb.models.GetBranchKeyIdFromDdbKeyOutput": + """Get the Branch Key that should be used for wrapping and unwrapping + data keys based on the primary key of the item being read or written, + along with the values of any attributes configured as + SIGN_AND_INCLUDE_IN_ENCRYPTION_CONTEXT. + + :param param: Inputs for getting the Branch Key that should be + used for wrapping and unwrapping data keys. + :returns: Outputs for getting the Branch Key that should be used + for wrapping and unwrapping data keys. + """ + raise NotImplementedError + + def GetBranchKeyIdFromDdbKey( + self, dafny_input: "DafnyGetBranchKeyIdFromDdbKeyInput" + ) -> "DafnyGetBranchKeyIdFromDdbKeyOutput": + """Do not use. + + This method allows custom implementations of this interface to + interact with generated code. + """ + native_input = aws_dbesdk_dynamodb.smithygenerated.aws_cryptography_dbencryptionsdk_dynamodb.dafny_to_smithy.aws_cryptography_dbencryptionsdk_dynamodb_GetBranchKeyIdFromDdbKeyInput( + dafny_input + ) + try: + native_output = self.get_branch_key_id_from_ddb_key(native_input) + dafny_output = aws_dbesdk_dynamodb.smithygenerated.aws_cryptography_dbencryptionsdk_dynamodb.smithy_to_dafny.aws_cryptography_dbencryptionsdk_dynamodb_GetBranchKeyIdFromDdbKeyOutput( + native_output + ) + return Wrappers.Result_Success(dafny_output) + except Exception as e: + error = _smithy_error_to_dafny_error(e) + return Wrappers.Result_Failure(error) + + +class DynamoDbKeyBranchKeyIdSupplier(IDynamoDbKeyBranchKeyIdSupplier): + + _impl: ( + aws_dbesdk_dynamodb.internaldafny.generated.AwsCryptographyDbEncryptionSdkDynamoDbTypes.IDynamoDbKeyBranchKeyIdSupplier + ) + + def __init__( + self, + _impl: aws_dbesdk_dynamodb.internaldafny.generated.AwsCryptographyDbEncryptionSdkDynamoDbTypes.IDynamoDbKeyBranchKeyIdSupplier, + ): + self._impl = _impl + + def get_branch_key_id_from_ddb_key( + self, + param: "aws_dbesdk_dynamodb.smithygenerated.aws_cryptography_dbencryptionsdk_dynamodb.models.GetBranchKeyIdFromDdbKeyInput", + ) -> "aws_dbesdk_dynamodb.smithygenerated.aws_cryptography_dbencryptionsdk_dynamodb.models.GetBranchKeyIdFromDdbKeyOutput": + """Get the Branch Key that should be used for wrapping and unwrapping + data keys based on the primary key of the item being read or written, + along with the values of any attributes configured as + SIGN_AND_INCLUDE_IN_ENCRYPTION_CONTEXT. + + :param param: Inputs for getting the Branch Key that should be + used for wrapping and unwrapping data keys. + :returns: Outputs for getting the Branch Key that should be used + for wrapping and unwrapping data keys. + """ + dafny_output = self._impl.GetBranchKeyIdFromDdbKey( + aws_dbesdk_dynamodb.smithygenerated.aws_cryptography_dbencryptionsdk_dynamodb.smithy_to_dafny.aws_cryptography_dbencryptionsdk_dynamodb_GetBranchKeyIdFromDdbKeyInput( + param + ) + ) + if dafny_output.IsFailure(): + from aws_dbesdk_dynamodb.smithygenerated.aws_cryptography_dbencryptionsdk_dynamodb.deserialize import ( + _deserialize_error as aws_cryptography_dbencryptionsdk_dynamodb_deserialize_error, + ) + + raise aws_cryptography_dbencryptionsdk_dynamodb_deserialize_error(dafny_output.error) + + else: + return aws_dbesdk_dynamodb.smithygenerated.aws_cryptography_dbencryptionsdk_dynamodb.dafny_to_smithy.aws_cryptography_dbencryptionsdk_dynamodb_GetBranchKeyIdFromDdbKeyOutput( + dafny_output.value + ) + + @staticmethod + def from_dict(d: Dict[str, Any]) -> "DynamoDbKeyBranchKeyIdSupplier": + return DynamoDbKeyBranchKeyIdSupplier(d["_impl"]) + + def as_dict(self) -> Dict[str, Any]: + return {"_impl": self._impl} diff --git a/DynamoDbEncryption/runtimes/python/src/aws_dbesdk_dynamodb/smithygenerated/aws_cryptography_dbencryptionsdk_dynamodb/serialize.py b/DynamoDbEncryption/runtimes/python/src/aws_dbesdk_dynamodb/smithygenerated/aws_cryptography_dbencryptionsdk_dynamodb/serialize.py new file mode 100644 index 000000000..69429dbfa --- /dev/null +++ b/DynamoDbEncryption/runtimes/python/src/aws_dbesdk_dynamodb/smithygenerated/aws_cryptography_dbencryptionsdk_dynamodb/serialize.py @@ -0,0 +1,27 @@ +# Copyright Amazon.com Inc. or its affiliates. All Rights Reserved. +# SPDX-License-Identifier: Apache-2.0 +# Do not modify this file. This file is machine generated, and any changes to it will be overwritten. + +import aws_dbesdk_dynamodb.smithygenerated.aws_cryptography_dbencryptionsdk_dynamodb.smithy_to_dafny + +from .dafny_protocol import DafnyRequest + +from .config import Config + + +def _serialize_create_dynamo_db_encryption_branch_key_id_supplier(input, config: Config) -> DafnyRequest: + return DafnyRequest( + operation_name="CreateDynamoDbEncryptionBranchKeyIdSupplier", + dafny_operation_input=aws_dbesdk_dynamodb.smithygenerated.aws_cryptography_dbencryptionsdk_dynamodb.smithy_to_dafny.aws_cryptography_dbencryptionsdk_dynamodb_CreateDynamoDbEncryptionBranchKeyIdSupplierInput( + input + ), + ) + + +def _serialize_get_encrypted_data_key_description(input, config: Config) -> DafnyRequest: + return DafnyRequest( + operation_name="GetEncryptedDataKeyDescription", + dafny_operation_input=aws_dbesdk_dynamodb.smithygenerated.aws_cryptography_dbencryptionsdk_dynamodb.smithy_to_dafny.aws_cryptography_dbencryptionsdk_dynamodb_GetEncryptedDataKeyDescriptionInput( + input + ), + ) diff --git a/DynamoDbEncryption/runtimes/python/src/aws_dbesdk_dynamodb/smithygenerated/aws_cryptography_dbencryptionsdk_dynamodb/smithy_to_dafny.py b/DynamoDbEncryption/runtimes/python/src/aws_dbesdk_dynamodb/smithygenerated/aws_cryptography_dbencryptionsdk_dynamodb/smithy_to_dafny.py new file mode 100644 index 000000000..535d2dff4 --- /dev/null +++ b/DynamoDbEncryption/runtimes/python/src/aws_dbesdk_dynamodb/smithygenerated/aws_cryptography_dbencryptionsdk_dynamodb/smithy_to_dafny.py @@ -0,0 +1,1155 @@ +# Copyright Amazon.com Inc. or its affiliates. All Rights Reserved. +# SPDX-License-Identifier: Apache-2.0 +# Do not modify this file. This file is machine generated, and any changes to it will be overwritten. + +from _dafny import Map, Seq +import aws_cryptographic_material_providers.smithygenerated.aws_cryptography_materialproviders.smithy_to_dafny +import aws_cryptography_internal_dynamodb.smithygenerated.com_amazonaws_dynamodb.aws_sdk_to_dafny +from aws_dbesdk_dynamodb.internaldafny.generated.AwsCryptographyDbEncryptionSdkDynamoDbTypes import ( + AsSet_AsSet as DafnyAsSet, + BeaconKeySource_multi, + BeaconKeySource_single, + BeaconStyle_asSet, + BeaconStyle_partOnly, + BeaconStyle_shared, + BeaconStyle_sharedSet, + BeaconVersion_BeaconVersion as DafnyBeaconVersion, + CompoundBeacon_CompoundBeacon as DafnyCompoundBeacon, + ConstructorPart_ConstructorPart as DafnyConstructorPart, + Constructor_Constructor as DafnyConstructor, + CreateDynamoDbEncryptionBranchKeyIdSupplierInput_CreateDynamoDbEncryptionBranchKeyIdSupplierInput as DafnyCreateDynamoDbEncryptionBranchKeyIdSupplierInput, + CreateDynamoDbEncryptionBranchKeyIdSupplierOutput_CreateDynamoDbEncryptionBranchKeyIdSupplierOutput as DafnyCreateDynamoDbEncryptionBranchKeyIdSupplierOutput, + DynamoDbEncryptionConfig_DynamoDbEncryptionConfig as DafnyDynamoDbEncryptionConfig, + DynamoDbTableEncryptionConfig_DynamoDbTableEncryptionConfig as DafnyDynamoDbTableEncryptionConfig, + DynamoDbTablesEncryptionConfig_DynamoDbTablesEncryptionConfig as DafnyDynamoDbTablesEncryptionConfig, + EncryptedDataKeyDescription_EncryptedDataKeyDescription as DafnyEncryptedDataKeyDescription, + EncryptedPart_EncryptedPart as DafnyEncryptedPart, + GetBranchKeyIdFromDdbKeyInput_GetBranchKeyIdFromDdbKeyInput as DafnyGetBranchKeyIdFromDdbKeyInput, + GetBranchKeyIdFromDdbKeyOutput_GetBranchKeyIdFromDdbKeyOutput as DafnyGetBranchKeyIdFromDdbKeyOutput, + GetEncryptedDataKeyDescriptionInput_GetEncryptedDataKeyDescriptionInput as DafnyGetEncryptedDataKeyDescriptionInput, + GetEncryptedDataKeyDescriptionOutput_GetEncryptedDataKeyDescriptionOutput as DafnyGetEncryptedDataKeyDescriptionOutput, + GetEncryptedDataKeyDescriptionUnion_header, + GetEncryptedDataKeyDescriptionUnion_item, + GetPrefix_GetPrefix as DafnyGetPrefix, + GetSegment_GetSegment as DafnyGetSegment, + GetSegments_GetSegments as DafnyGetSegments, + GetSubstring_GetSubstring as DafnyGetSubstring, + GetSuffix_GetSuffix as DafnyGetSuffix, + Insert_Insert as DafnyInsert, + LegacyOverride_LegacyOverride as DafnyLegacyOverride, + LegacyPolicy_FORBID__LEGACY__ENCRYPT__ALLOW__LEGACY__DECRYPT, + LegacyPolicy_FORBID__LEGACY__ENCRYPT__FORBID__LEGACY__DECRYPT, + LegacyPolicy_FORCE__LEGACY__ENCRYPT__ALLOW__LEGACY__DECRYPT, + Lower_Lower as DafnyLower, + MultiKeyStore_MultiKeyStore as DafnyMultiKeyStore, + PartOnly_PartOnly as DafnyPartOnly, + PlaintextOverride_FORBID__PLAINTEXT__WRITE__ALLOW__PLAINTEXT__READ, + PlaintextOverride_FORBID__PLAINTEXT__WRITE__FORBID__PLAINTEXT__READ, + PlaintextOverride_FORCE__PLAINTEXT__WRITE__ALLOW__PLAINTEXT__READ, + SearchConfig_SearchConfig as DafnySearchConfig, + SharedSet_SharedSet as DafnySharedSet, + Shared_Shared as DafnyShared, + SignedPart_SignedPart as DafnySignedPart, + SingleKeyStore_SingleKeyStore as DafnySingleKeyStore, + StandardBeacon_StandardBeacon as DafnyStandardBeacon, + Upper_Upper as DafnyUpper, + VirtualField_VirtualField as DafnyVirtualField, + VirtualPart_VirtualPart as DafnyVirtualPart, + VirtualTransform_insert, + VirtualTransform_lower, + VirtualTransform_prefix, + VirtualTransform_segment, + VirtualTransform_segments, + VirtualTransform_substring, + VirtualTransform_suffix, + VirtualTransform_upper, +) +import aws_dbesdk_dynamodb.internaldafny.generated.module_ +import aws_dbesdk_dynamodb.smithygenerated.aws_cryptography_dbencryptionsdk_dynamodb.models +import aws_dbesdk_dynamodb.smithygenerated.aws_cryptography_dbencryptionsdk_dynamodb.smithy_to_dafny +import aws_dbesdk_dynamodb.smithygenerated.aws_cryptography_dbencryptionsdk_structuredencryption.smithy_to_dafny +from smithy_dafny_standard_library.internaldafny.generated.Wrappers import ( + Option_None, + Option_Some, +) + + +def aws_cryptography_dbencryptionsdk_dynamodb_GetBranchKeyIdFromDdbKeyInput( + native_input, +): + return DafnyGetBranchKeyIdFromDdbKeyInput( + ddbKey=Map( + { + Seq( + "".join([chr(int.from_bytes(pair, "big")) for pair in zip(*[iter(key.encode("utf-16-be"))] * 2)]) + ): aws_cryptography_internal_dynamodb.smithygenerated.com_amazonaws_dynamodb.aws_sdk_to_dafny.com_amazonaws_dynamodb_AttributeValue( + value + ) + for (key, value) in native_input.ddb_key.items() + } + ), + ) + + +def aws_cryptography_dbencryptionsdk_dynamodb_GetBranchKeyIdFromDdbKeyOutput( + native_input, +): + return DafnyGetBranchKeyIdFromDdbKeyOutput( + branchKeyId=Seq( + "".join( + [ + chr(int.from_bytes(pair, "big")) + for pair in zip(*[iter(native_input.branch_key_id.encode("utf-16-be"))] * 2) + ] + ) + ), + ) + + +def aws_cryptography_dbencryptionsdk_dynamodb_CreateDynamoDbEncryptionBranchKeyIdSupplierInput( + native_input, +): + return DafnyCreateDynamoDbEncryptionBranchKeyIdSupplierInput( + ddbKeyBranchKeyIdSupplier=aws_dbesdk_dynamodb.smithygenerated.aws_cryptography_dbencryptionsdk_dynamodb.smithy_to_dafny.aws_cryptography_dbencryptionsdk_dynamodb_DynamoDbKeyBranchKeyIdSupplierReference( + native_input.ddb_key_branch_key_id_supplier + ), + ) + + +def aws_cryptography_dbencryptionsdk_dynamodb_DynamoDbKeyBranchKeyIdSupplierReference( + native_input, +): + if hasattr(native_input, "_impl"): + return native_input._impl + + else: + return native_input + + +def aws_cryptography_dbencryptionsdk_dynamodb_GetEncryptedDataKeyDescriptionInput( + native_input, +): + return DafnyGetEncryptedDataKeyDescriptionInput( + input=aws_dbesdk_dynamodb.smithygenerated.aws_cryptography_dbencryptionsdk_dynamodb.smithy_to_dafny.aws_cryptography_dbencryptionsdk_dynamodb_GetEncryptedDataKeyDescriptionUnion( + native_input.input + ), + ) + + +def aws_cryptography_dbencryptionsdk_dynamodb_GetEncryptedDataKeyDescriptionUnion( + native_input, +): + if isinstance( + native_input, + aws_dbesdk_dynamodb.smithygenerated.aws_cryptography_dbencryptionsdk_dynamodb.models.GetEncryptedDataKeyDescriptionUnionHeader, + ): + GetEncryptedDataKeyDescriptionUnion_union_value = GetEncryptedDataKeyDescriptionUnion_header( + Seq(native_input.value) + ) + elif isinstance( + native_input, + aws_dbesdk_dynamodb.smithygenerated.aws_cryptography_dbencryptionsdk_dynamodb.models.GetEncryptedDataKeyDescriptionUnionItem, + ): + GetEncryptedDataKeyDescriptionUnion_union_value = GetEncryptedDataKeyDescriptionUnion_item( + Map( + { + Seq( + "".join( + [chr(int.from_bytes(pair, "big")) for pair in zip(*[iter(key.encode("utf-16-be"))] * 2)] + ) + ): aws_cryptography_internal_dynamodb.smithygenerated.com_amazonaws_dynamodb.aws_sdk_to_dafny.com_amazonaws_dynamodb_AttributeValue( + value + ) + for (key, value) in native_input.value.items() + } + ) + ) + else: + raise ValueError("No recognized union value in union type: " + str(native_input)) + + return GetEncryptedDataKeyDescriptionUnion_union_value + + +def aws_cryptography_dbencryptionsdk_dynamodb_CreateDynamoDbEncryptionBranchKeyIdSupplierOutput( + native_input, +): + return DafnyCreateDynamoDbEncryptionBranchKeyIdSupplierOutput( + branchKeyIdSupplier=aws_cryptographic_material_providers.smithygenerated.aws_cryptography_materialproviders.smithy_to_dafny.aws_cryptography_materialproviders_BranchKeyIdSupplierReference( + native_input.branch_key_id_supplier + ), + ) + + +def aws_cryptography_dbencryptionsdk_dynamodb_GetEncryptedDataKeyDescriptionOutput( + native_input, +): + return DafnyGetEncryptedDataKeyDescriptionOutput( + EncryptedDataKeyDescriptionOutput=Seq( + [ + aws_dbesdk_dynamodb.smithygenerated.aws_cryptography_dbencryptionsdk_dynamodb.smithy_to_dafny.aws_cryptography_dbencryptionsdk_dynamodb_EncryptedDataKeyDescription( + list_element + ) + for list_element in native_input.encrypted_data_key_description_output + ] + ), + ) + + +def aws_cryptography_dbencryptionsdk_dynamodb_EncryptedDataKeyDescription(native_input): + return DafnyEncryptedDataKeyDescription( + keyProviderId=Seq( + "".join( + [ + chr(int.from_bytes(pair, "big")) + for pair in zip(*[iter(native_input.key_provider_id.encode("utf-16-be"))] * 2) + ] + ) + ), + keyProviderInfo=( + ( + Option_Some( + Seq( + "".join( + [ + chr(int.from_bytes(pair, "big")) + for pair in zip(*[iter(native_input.key_provider_info.encode("utf-16-be"))] * 2) + ] + ) + ) + ) + ) + if (native_input.key_provider_info is not None) + else (Option_None()) + ), + branchKeyId=( + ( + Option_Some( + Seq( + "".join( + [ + chr(int.from_bytes(pair, "big")) + for pair in zip(*[iter(native_input.branch_key_id.encode("utf-16-be"))] * 2) + ] + ) + ) + ) + ) + if (native_input.branch_key_id is not None) + else (Option_None()) + ), + branchKeyVersion=( + ( + Option_Some( + Seq( + "".join( + [ + chr(int.from_bytes(pair, "big")) + for pair in zip(*[iter(native_input.branch_key_version.encode("utf-16-be"))] * 2) + ] + ) + ) + ) + ) + if (native_input.branch_key_version is not None) + else (Option_None()) + ), + ) + + +def aws_cryptography_dbencryptionsdk_dynamodb_AsSet(native_input): + return DafnyAsSet() + + +def aws_cryptography_dbencryptionsdk_dynamodb_AtomicPrimitivesReference(native_input): + return native_input._config.dafnyImplInterface.impl + + +def aws_cryptography_dbencryptionsdk_dynamodb_MultiKeyStore(native_input): + return DafnyMultiKeyStore( + keyFieldName=Seq( + "".join( + [ + chr(int.from_bytes(pair, "big")) + for pair in zip(*[iter(native_input.key_field_name.encode("utf-16-be"))] * 2) + ] + ) + ), + cacheTTL=native_input.cache_ttl, + cache=( + ( + Option_Some( + aws_cryptographic_material_providers.smithygenerated.aws_cryptography_materialproviders.smithy_to_dafny.aws_cryptography_materialproviders_CacheType( + native_input.cache + ) + ) + ) + if (native_input.cache is not None) + else (Option_None()) + ), + partitionId=( + ( + Option_Some( + Seq( + "".join( + [ + chr(int.from_bytes(pair, "big")) + for pair in zip(*[iter(native_input.partition_id.encode("utf-16-be"))] * 2) + ] + ) + ) + ) + ) + if (native_input.partition_id is not None) + else (Option_None()) + ), + ) + + +def aws_cryptography_dbencryptionsdk_dynamodb_SingleKeyStore(native_input): + return DafnySingleKeyStore( + keyId=Seq( + "".join( + [chr(int.from_bytes(pair, "big")) for pair in zip(*[iter(native_input.key_id.encode("utf-16-be"))] * 2)] + ) + ), + cacheTTL=native_input.cache_ttl, + cache=( + ( + Option_Some( + aws_cryptographic_material_providers.smithygenerated.aws_cryptography_materialproviders.smithy_to_dafny.aws_cryptography_materialproviders_CacheType( + native_input.cache + ) + ) + ) + if (native_input.cache is not None) + else (Option_None()) + ), + partitionId=( + ( + Option_Some( + Seq( + "".join( + [ + chr(int.from_bytes(pair, "big")) + for pair in zip(*[iter(native_input.partition_id.encode("utf-16-be"))] * 2) + ] + ) + ) + ) + ) + if (native_input.partition_id is not None) + else (Option_None()) + ), + ) + + +def aws_cryptography_dbencryptionsdk_dynamodb_BeaconKeySource(native_input): + if isinstance( + native_input, + aws_dbesdk_dynamodb.smithygenerated.aws_cryptography_dbencryptionsdk_dynamodb.models.BeaconKeySourceSingle, + ): + BeaconKeySource_union_value = BeaconKeySource_single( + aws_dbesdk_dynamodb.smithygenerated.aws_cryptography_dbencryptionsdk_dynamodb.smithy_to_dafny.aws_cryptography_dbencryptionsdk_dynamodb_SingleKeyStore( + native_input.value + ) + ) + elif isinstance( + native_input, + aws_dbesdk_dynamodb.smithygenerated.aws_cryptography_dbencryptionsdk_dynamodb.models.BeaconKeySourceMulti, + ): + BeaconKeySource_union_value = BeaconKeySource_multi( + aws_dbesdk_dynamodb.smithygenerated.aws_cryptography_dbencryptionsdk_dynamodb.smithy_to_dafny.aws_cryptography_dbencryptionsdk_dynamodb_MultiKeyStore( + native_input.value + ) + ) + else: + raise ValueError("No recognized union value in union type: " + str(native_input)) + + return BeaconKeySource_union_value + + +def aws_cryptography_dbencryptionsdk_dynamodb_PartOnly(native_input): + return DafnyPartOnly() + + +def aws_cryptography_dbencryptionsdk_dynamodb_Shared(native_input): + return DafnyShared( + other=Seq( + "".join( + [chr(int.from_bytes(pair, "big")) for pair in zip(*[iter(native_input.other.encode("utf-16-be"))] * 2)] + ) + ), + ) + + +def aws_cryptography_dbencryptionsdk_dynamodb_SharedSet(native_input): + return DafnySharedSet( + other=Seq( + "".join( + [chr(int.from_bytes(pair, "big")) for pair in zip(*[iter(native_input.other.encode("utf-16-be"))] * 2)] + ) + ), + ) + + +def aws_cryptography_dbencryptionsdk_dynamodb_BeaconStyle(native_input): + if isinstance( + native_input, + aws_dbesdk_dynamodb.smithygenerated.aws_cryptography_dbencryptionsdk_dynamodb.models.BeaconStylePartOnly, + ): + BeaconStyle_union_value = BeaconStyle_partOnly( + aws_dbesdk_dynamodb.smithygenerated.aws_cryptography_dbencryptionsdk_dynamodb.smithy_to_dafny.aws_cryptography_dbencryptionsdk_dynamodb_PartOnly( + native_input.value + ) + ) + elif isinstance( + native_input, + aws_dbesdk_dynamodb.smithygenerated.aws_cryptography_dbencryptionsdk_dynamodb.models.BeaconStyleShared, + ): + BeaconStyle_union_value = BeaconStyle_shared( + aws_dbesdk_dynamodb.smithygenerated.aws_cryptography_dbencryptionsdk_dynamodb.smithy_to_dafny.aws_cryptography_dbencryptionsdk_dynamodb_Shared( + native_input.value + ) + ) + elif isinstance( + native_input, + aws_dbesdk_dynamodb.smithygenerated.aws_cryptography_dbencryptionsdk_dynamodb.models.BeaconStyleAsSet, + ): + BeaconStyle_union_value = BeaconStyle_asSet( + aws_dbesdk_dynamodb.smithygenerated.aws_cryptography_dbencryptionsdk_dynamodb.smithy_to_dafny.aws_cryptography_dbencryptionsdk_dynamodb_AsSet( + native_input.value + ) + ) + elif isinstance( + native_input, + aws_dbesdk_dynamodb.smithygenerated.aws_cryptography_dbencryptionsdk_dynamodb.models.BeaconStyleSharedSet, + ): + BeaconStyle_union_value = BeaconStyle_sharedSet( + aws_dbesdk_dynamodb.smithygenerated.aws_cryptography_dbencryptionsdk_dynamodb.smithy_to_dafny.aws_cryptography_dbencryptionsdk_dynamodb_SharedSet( + native_input.value + ) + ) + else: + raise ValueError("No recognized union value in union type: " + str(native_input)) + + return BeaconStyle_union_value + + +def aws_cryptography_dbencryptionsdk_dynamodb_ConstructorPart(native_input): + return DafnyConstructorPart( + name=Seq( + "".join( + [chr(int.from_bytes(pair, "big")) for pair in zip(*[iter(native_input.name.encode("utf-16-be"))] * 2)] + ) + ), + required=native_input.required, + ) + + +def aws_cryptography_dbencryptionsdk_dynamodb_Constructor(native_input): + return DafnyConstructor( + parts=Seq( + [ + aws_dbesdk_dynamodb.smithygenerated.aws_cryptography_dbencryptionsdk_dynamodb.smithy_to_dafny.aws_cryptography_dbencryptionsdk_dynamodb_ConstructorPart( + list_element + ) + for list_element in native_input.parts + ] + ), + ) + + +def aws_cryptography_dbencryptionsdk_dynamodb_EncryptedPart(native_input): + return DafnyEncryptedPart( + name=Seq( + "".join( + [chr(int.from_bytes(pair, "big")) for pair in zip(*[iter(native_input.name.encode("utf-16-be"))] * 2)] + ) + ), + prefix=Seq( + "".join( + [chr(int.from_bytes(pair, "big")) for pair in zip(*[iter(native_input.prefix.encode("utf-16-be"))] * 2)] + ) + ), + ) + + +def aws_cryptography_dbencryptionsdk_dynamodb_SignedPart(native_input): + return DafnySignedPart( + name=Seq( + "".join( + [chr(int.from_bytes(pair, "big")) for pair in zip(*[iter(native_input.name.encode("utf-16-be"))] * 2)] + ) + ), + prefix=Seq( + "".join( + [chr(int.from_bytes(pair, "big")) for pair in zip(*[iter(native_input.prefix.encode("utf-16-be"))] * 2)] + ) + ), + loc=( + ( + Option_Some( + Seq( + "".join( + [ + chr(int.from_bytes(pair, "big")) + for pair in zip(*[iter(native_input.loc.encode("utf-16-be"))] * 2) + ] + ) + ) + ) + ) + if (native_input.loc is not None) + else (Option_None()) + ), + ) + + +def aws_cryptography_dbencryptionsdk_dynamodb_CompoundBeacon(native_input): + return DafnyCompoundBeacon( + name=Seq( + "".join( + [chr(int.from_bytes(pair, "big")) for pair in zip(*[iter(native_input.name.encode("utf-16-be"))] * 2)] + ) + ), + split=Seq( + "".join( + [chr(int.from_bytes(pair, "big")) for pair in zip(*[iter(native_input.split.encode("utf-16-be"))] * 2)] + ) + ), + encrypted=( + ( + Option_Some( + Seq( + [ + aws_dbesdk_dynamodb.smithygenerated.aws_cryptography_dbencryptionsdk_dynamodb.smithy_to_dafny.aws_cryptography_dbencryptionsdk_dynamodb_EncryptedPart( + list_element + ) + for list_element in native_input.encrypted + ] + ) + ) + ) + if (native_input.encrypted is not None) + else (Option_None()) + ), + signed=( + ( + Option_Some( + Seq( + [ + aws_dbesdk_dynamodb.smithygenerated.aws_cryptography_dbencryptionsdk_dynamodb.smithy_to_dafny.aws_cryptography_dbencryptionsdk_dynamodb_SignedPart( + list_element + ) + for list_element in native_input.signed + ] + ) + ) + ) + if (native_input.signed is not None) + else (Option_None()) + ), + constructors=( + ( + Option_Some( + Seq( + [ + aws_dbesdk_dynamodb.smithygenerated.aws_cryptography_dbencryptionsdk_dynamodb.smithy_to_dafny.aws_cryptography_dbencryptionsdk_dynamodb_Constructor( + list_element + ) + for list_element in native_input.constructors + ] + ) + ) + ) + if (native_input.constructors is not None) + else (Option_None()) + ), + ) + + +def aws_cryptography_dbencryptionsdk_dynamodb_KeyStoreReference(native_input): + return native_input._config.dafnyImplInterface.impl + + +def aws_cryptography_dbencryptionsdk_dynamodb_StandardBeacon(native_input): + return DafnyStandardBeacon( + name=Seq( + "".join( + [chr(int.from_bytes(pair, "big")) for pair in zip(*[iter(native_input.name.encode("utf-16-be"))] * 2)] + ) + ), + length=native_input.length, + loc=( + ( + Option_Some( + Seq( + "".join( + [ + chr(int.from_bytes(pair, "big")) + for pair in zip(*[iter(native_input.loc.encode("utf-16-be"))] * 2) + ] + ) + ) + ) + ) + if (native_input.loc is not None) + else (Option_None()) + ), + style=( + ( + Option_Some( + aws_dbesdk_dynamodb.smithygenerated.aws_cryptography_dbencryptionsdk_dynamodb.smithy_to_dafny.aws_cryptography_dbencryptionsdk_dynamodb_BeaconStyle( + native_input.style + ) + ) + ) + if (native_input.style is not None) + else (Option_None()) + ), + ) + + +def aws_cryptography_dbencryptionsdk_dynamodb_Insert(native_input): + return DafnyInsert( + literal=Seq( + "".join( + [ + chr(int.from_bytes(pair, "big")) + for pair in zip(*[iter(native_input.literal.encode("utf-16-be"))] * 2) + ] + ) + ), + ) + + +def aws_cryptography_dbencryptionsdk_dynamodb_Lower(native_input): + return DafnyLower() + + +def aws_cryptography_dbencryptionsdk_dynamodb_GetPrefix(native_input): + return DafnyGetPrefix( + length=native_input.length, + ) + + +def aws_cryptography_dbencryptionsdk_dynamodb_GetSegment(native_input): + return DafnyGetSegment( + split=Seq( + "".join( + [chr(int.from_bytes(pair, "big")) for pair in zip(*[iter(native_input.split.encode("utf-16-be"))] * 2)] + ) + ), + index=native_input.index, + ) + + +def aws_cryptography_dbencryptionsdk_dynamodb_GetSegments(native_input): + return DafnyGetSegments( + split=Seq( + "".join( + [chr(int.from_bytes(pair, "big")) for pair in zip(*[iter(native_input.split.encode("utf-16-be"))] * 2)] + ) + ), + low=native_input.low, + high=native_input.high, + ) + + +def aws_cryptography_dbencryptionsdk_dynamodb_GetSubstring(native_input): + return DafnyGetSubstring( + low=native_input.low, + high=native_input.high, + ) + + +def aws_cryptography_dbencryptionsdk_dynamodb_GetSuffix(native_input): + return DafnyGetSuffix( + length=native_input.length, + ) + + +def aws_cryptography_dbencryptionsdk_dynamodb_Upper(native_input): + return DafnyUpper() + + +def aws_cryptography_dbencryptionsdk_dynamodb_VirtualTransform(native_input): + if isinstance( + native_input, + aws_dbesdk_dynamodb.smithygenerated.aws_cryptography_dbencryptionsdk_dynamodb.models.VirtualTransformUpper, + ): + VirtualTransform_union_value = VirtualTransform_upper( + aws_dbesdk_dynamodb.smithygenerated.aws_cryptography_dbencryptionsdk_dynamodb.smithy_to_dafny.aws_cryptography_dbencryptionsdk_dynamodb_Upper( + native_input.value + ) + ) + elif isinstance( + native_input, + aws_dbesdk_dynamodb.smithygenerated.aws_cryptography_dbencryptionsdk_dynamodb.models.VirtualTransformLower, + ): + VirtualTransform_union_value = VirtualTransform_lower( + aws_dbesdk_dynamodb.smithygenerated.aws_cryptography_dbencryptionsdk_dynamodb.smithy_to_dafny.aws_cryptography_dbencryptionsdk_dynamodb_Lower( + native_input.value + ) + ) + elif isinstance( + native_input, + aws_dbesdk_dynamodb.smithygenerated.aws_cryptography_dbencryptionsdk_dynamodb.models.VirtualTransformInsert, + ): + VirtualTransform_union_value = VirtualTransform_insert( + aws_dbesdk_dynamodb.smithygenerated.aws_cryptography_dbencryptionsdk_dynamodb.smithy_to_dafny.aws_cryptography_dbencryptionsdk_dynamodb_Insert( + native_input.value + ) + ) + elif isinstance( + native_input, + aws_dbesdk_dynamodb.smithygenerated.aws_cryptography_dbencryptionsdk_dynamodb.models.VirtualTransformPrefix, + ): + VirtualTransform_union_value = VirtualTransform_prefix( + aws_dbesdk_dynamodb.smithygenerated.aws_cryptography_dbencryptionsdk_dynamodb.smithy_to_dafny.aws_cryptography_dbencryptionsdk_dynamodb_GetPrefix( + native_input.value + ) + ) + elif isinstance( + native_input, + aws_dbesdk_dynamodb.smithygenerated.aws_cryptography_dbencryptionsdk_dynamodb.models.VirtualTransformSuffix, + ): + VirtualTransform_union_value = VirtualTransform_suffix( + aws_dbesdk_dynamodb.smithygenerated.aws_cryptography_dbencryptionsdk_dynamodb.smithy_to_dafny.aws_cryptography_dbencryptionsdk_dynamodb_GetSuffix( + native_input.value + ) + ) + elif isinstance( + native_input, + aws_dbesdk_dynamodb.smithygenerated.aws_cryptography_dbencryptionsdk_dynamodb.models.VirtualTransformSubstring, + ): + VirtualTransform_union_value = VirtualTransform_substring( + aws_dbesdk_dynamodb.smithygenerated.aws_cryptography_dbencryptionsdk_dynamodb.smithy_to_dafny.aws_cryptography_dbencryptionsdk_dynamodb_GetSubstring( + native_input.value + ) + ) + elif isinstance( + native_input, + aws_dbesdk_dynamodb.smithygenerated.aws_cryptography_dbencryptionsdk_dynamodb.models.VirtualTransformSegment, + ): + VirtualTransform_union_value = VirtualTransform_segment( + aws_dbesdk_dynamodb.smithygenerated.aws_cryptography_dbencryptionsdk_dynamodb.smithy_to_dafny.aws_cryptography_dbencryptionsdk_dynamodb_GetSegment( + native_input.value + ) + ) + elif isinstance( + native_input, + aws_dbesdk_dynamodb.smithygenerated.aws_cryptography_dbencryptionsdk_dynamodb.models.VirtualTransformSegments, + ): + VirtualTransform_union_value = VirtualTransform_segments( + aws_dbesdk_dynamodb.smithygenerated.aws_cryptography_dbencryptionsdk_dynamodb.smithy_to_dafny.aws_cryptography_dbencryptionsdk_dynamodb_GetSegments( + native_input.value + ) + ) + else: + raise ValueError("No recognized union value in union type: " + str(native_input)) + + return VirtualTransform_union_value + + +def aws_cryptography_dbencryptionsdk_dynamodb_VirtualPart(native_input): + return DafnyVirtualPart( + loc=Seq( + "".join( + [chr(int.from_bytes(pair, "big")) for pair in zip(*[iter(native_input.loc.encode("utf-16-be"))] * 2)] + ) + ), + trans=( + ( + Option_Some( + Seq( + [ + aws_dbesdk_dynamodb.smithygenerated.aws_cryptography_dbencryptionsdk_dynamodb.smithy_to_dafny.aws_cryptography_dbencryptionsdk_dynamodb_VirtualTransform( + list_element + ) + for list_element in native_input.trans + ] + ) + ) + ) + if (native_input.trans is not None) + else (Option_None()) + ), + ) + + +def aws_cryptography_dbencryptionsdk_dynamodb_VirtualField(native_input): + return DafnyVirtualField( + name=Seq( + "".join( + [chr(int.from_bytes(pair, "big")) for pair in zip(*[iter(native_input.name.encode("utf-16-be"))] * 2)] + ) + ), + parts=Seq( + [ + aws_dbesdk_dynamodb.smithygenerated.aws_cryptography_dbencryptionsdk_dynamodb.smithy_to_dafny.aws_cryptography_dbencryptionsdk_dynamodb_VirtualPart( + list_element + ) + for list_element in native_input.parts + ] + ), + ) + + +def aws_cryptography_dbencryptionsdk_dynamodb_BeaconVersion(native_input): + return DafnyBeaconVersion( + version=native_input.version, + keyStore=aws_dbesdk_dynamodb.smithygenerated.aws_cryptography_dbencryptionsdk_dynamodb.smithy_to_dafny.aws_cryptography_dbencryptionsdk_dynamodb_KeyStoreReference( + native_input.key_store + ), + keySource=aws_dbesdk_dynamodb.smithygenerated.aws_cryptography_dbencryptionsdk_dynamodb.smithy_to_dafny.aws_cryptography_dbencryptionsdk_dynamodb_BeaconKeySource( + native_input.key_source + ), + standardBeacons=Seq( + [ + aws_dbesdk_dynamodb.smithygenerated.aws_cryptography_dbencryptionsdk_dynamodb.smithy_to_dafny.aws_cryptography_dbencryptionsdk_dynamodb_StandardBeacon( + list_element + ) + for list_element in native_input.standard_beacons + ] + ), + compoundBeacons=( + ( + Option_Some( + Seq( + [ + aws_dbesdk_dynamodb.smithygenerated.aws_cryptography_dbencryptionsdk_dynamodb.smithy_to_dafny.aws_cryptography_dbencryptionsdk_dynamodb_CompoundBeacon( + list_element + ) + for list_element in native_input.compound_beacons + ] + ) + ) + ) + if (native_input.compound_beacons is not None) + else (Option_None()) + ), + virtualFields=( + ( + Option_Some( + Seq( + [ + aws_dbesdk_dynamodb.smithygenerated.aws_cryptography_dbencryptionsdk_dynamodb.smithy_to_dafny.aws_cryptography_dbencryptionsdk_dynamodb_VirtualField( + list_element + ) + for list_element in native_input.virtual_fields + ] + ) + ) + ) + if (native_input.virtual_fields is not None) + else (Option_None()) + ), + encryptedParts=( + ( + Option_Some( + Seq( + [ + aws_dbesdk_dynamodb.smithygenerated.aws_cryptography_dbencryptionsdk_dynamodb.smithy_to_dafny.aws_cryptography_dbencryptionsdk_dynamodb_EncryptedPart( + list_element + ) + for list_element in native_input.encrypted_parts + ] + ) + ) + ) + if (native_input.encrypted_parts is not None) + else (Option_None()) + ), + signedParts=( + ( + Option_Some( + Seq( + [ + aws_dbesdk_dynamodb.smithygenerated.aws_cryptography_dbencryptionsdk_dynamodb.smithy_to_dafny.aws_cryptography_dbencryptionsdk_dynamodb_SignedPart( + list_element + ) + for list_element in native_input.signed_parts + ] + ) + ) + ) + if (native_input.signed_parts is not None) + else (Option_None()) + ), + ) + + +def aws_cryptography_dbencryptionsdk_dynamodb_DynamoDbEncryptionConfig(native_input): + return DafnyDynamoDbEncryptionConfig() + + +def aws_cryptography_dbencryptionsdk_dynamodb_LegacyDynamoDbEncryptorReference( + native_input, +): + if hasattr(native_input, "_impl"): + return native_input._impl + + else: + return native_input + + +def aws_cryptography_dbencryptionsdk_dynamodb_LegacyPolicy(native_input): + if native_input == "FORCE_LEGACY_ENCRYPT_ALLOW_LEGACY_DECRYPT": + return LegacyPolicy_FORCE__LEGACY__ENCRYPT__ALLOW__LEGACY__DECRYPT() + + elif native_input == "FORBID_LEGACY_ENCRYPT_ALLOW_LEGACY_DECRYPT": + return LegacyPolicy_FORBID__LEGACY__ENCRYPT__ALLOW__LEGACY__DECRYPT() + + elif native_input == "FORBID_LEGACY_ENCRYPT_FORBID_LEGACY_DECRYPT": + return LegacyPolicy_FORBID__LEGACY__ENCRYPT__FORBID__LEGACY__DECRYPT() + + else: + raise ValueError(f"No recognized enum value in enum type: {native_input=}") + + +def aws_cryptography_dbencryptionsdk_dynamodb_LegacyOverride(native_input): + return DafnyLegacyOverride( + policy=aws_dbesdk_dynamodb.smithygenerated.aws_cryptography_dbencryptionsdk_dynamodb.smithy_to_dafny.aws_cryptography_dbencryptionsdk_dynamodb_LegacyPolicy( + native_input.policy + ), + encryptor=aws_dbesdk_dynamodb.smithygenerated.aws_cryptography_dbencryptionsdk_dynamodb.smithy_to_dafny.aws_cryptography_dbencryptionsdk_dynamodb_LegacyDynamoDbEncryptorReference( + native_input.encryptor + ), + attributeActionsOnEncrypt=Map( + { + Seq( + "".join([chr(int.from_bytes(pair, "big")) for pair in zip(*[iter(key.encode("utf-16-be"))] * 2)]) + ): aws_dbesdk_dynamodb.smithygenerated.aws_cryptography_dbencryptionsdk_structuredencryption.smithy_to_dafny.aws_cryptography_dbencryptionsdk_structuredencryption_CryptoAction( + value + ) + for (key, value) in native_input.attribute_actions_on_encrypt.items() + } + ), + defaultAttributeFlag=( + ( + Option_Some( + aws_dbesdk_dynamodb.smithygenerated.aws_cryptography_dbencryptionsdk_structuredencryption.smithy_to_dafny.aws_cryptography_dbencryptionsdk_structuredencryption_CryptoAction( + native_input.default_attribute_flag + ) + ) + ) + if (native_input.default_attribute_flag is not None) + else (Option_None()) + ), + ) + + +def aws_cryptography_dbencryptionsdk_dynamodb_PlaintextOverride(native_input): + if native_input == "FORCE_PLAINTEXT_WRITE_ALLOW_PLAINTEXT_READ": + return PlaintextOverride_FORCE__PLAINTEXT__WRITE__ALLOW__PLAINTEXT__READ() + + elif native_input == "FORBID_PLAINTEXT_WRITE_ALLOW_PLAINTEXT_READ": + return PlaintextOverride_FORBID__PLAINTEXT__WRITE__ALLOW__PLAINTEXT__READ() + + elif native_input == "FORBID_PLAINTEXT_WRITE_FORBID_PLAINTEXT_READ": + return PlaintextOverride_FORBID__PLAINTEXT__WRITE__FORBID__PLAINTEXT__READ() + + else: + raise ValueError(f"No recognized enum value in enum type: {native_input=}") + + +def aws_cryptography_dbencryptionsdk_dynamodb_SearchConfig(native_input): + return DafnySearchConfig( + versions=Seq( + [ + aws_dbesdk_dynamodb.smithygenerated.aws_cryptography_dbencryptionsdk_dynamodb.smithy_to_dafny.aws_cryptography_dbencryptionsdk_dynamodb_BeaconVersion( + list_element + ) + for list_element in native_input.versions + ] + ), + writeVersion=native_input.write_version, + ) + + +def aws_cryptography_dbencryptionsdk_dynamodb_DynamoDbTableEncryptionConfig( + native_input, +): + return DafnyDynamoDbTableEncryptionConfig( + logicalTableName=Seq( + "".join( + [ + chr(int.from_bytes(pair, "big")) + for pair in zip(*[iter(native_input.logical_table_name.encode("utf-16-be"))] * 2) + ] + ) + ), + partitionKeyName=Seq( + "".join( + [ + chr(int.from_bytes(pair, "big")) + for pair in zip(*[iter(native_input.partition_key_name.encode("utf-16-be"))] * 2) + ] + ) + ), + sortKeyName=( + ( + Option_Some( + Seq( + "".join( + [ + chr(int.from_bytes(pair, "big")) + for pair in zip(*[iter(native_input.sort_key_name.encode("utf-16-be"))] * 2) + ] + ) + ) + ) + ) + if (native_input.sort_key_name is not None) + else (Option_None()) + ), + search=( + ( + Option_Some( + aws_dbesdk_dynamodb.smithygenerated.aws_cryptography_dbencryptionsdk_dynamodb.smithy_to_dafny.aws_cryptography_dbencryptionsdk_dynamodb_SearchConfig( + native_input.search + ) + ) + ) + if (native_input.search is not None) + else (Option_None()) + ), + attributeActionsOnEncrypt=Map( + { + Seq( + "".join([chr(int.from_bytes(pair, "big")) for pair in zip(*[iter(key.encode("utf-16-be"))] * 2)]) + ): aws_dbesdk_dynamodb.smithygenerated.aws_cryptography_dbencryptionsdk_structuredencryption.smithy_to_dafny.aws_cryptography_dbencryptionsdk_structuredencryption_CryptoAction( + value + ) + for (key, value) in native_input.attribute_actions_on_encrypt.items() + } + ), + allowedUnsignedAttributes=( + ( + Option_Some( + Seq( + [ + Seq( + "".join( + [ + chr(int.from_bytes(pair, "big")) + for pair in zip(*[iter(list_element.encode("utf-16-be"))] * 2) + ] + ) + ) + for list_element in native_input.allowed_unsigned_attributes + ] + ) + ) + ) + if (native_input.allowed_unsigned_attributes is not None) + else (Option_None()) + ), + allowedUnsignedAttributePrefix=( + ( + Option_Some( + Seq( + "".join( + [ + chr(int.from_bytes(pair, "big")) + for pair in zip( + *[iter(native_input.allowed_unsigned_attribute_prefix.encode("utf-16-be"))] * 2 + ) + ] + ) + ) + ) + ) + if (native_input.allowed_unsigned_attribute_prefix is not None) + else (Option_None()) + ), + algorithmSuiteId=( + ( + Option_Some( + aws_cryptographic_material_providers.smithygenerated.aws_cryptography_materialproviders.smithy_to_dafny.aws_cryptography_materialproviders_DBEAlgorithmSuiteId( + native_input.algorithm_suite_id + ) + ) + ) + if (native_input.algorithm_suite_id is not None) + else (Option_None()) + ), + keyring=( + ( + Option_Some( + aws_cryptographic_material_providers.smithygenerated.aws_cryptography_materialproviders.smithy_to_dafny.aws_cryptography_materialproviders_KeyringReference( + native_input.keyring + ) + ) + ) + if ( + (native_input.keyring is not None) + and ( + aws_cryptographic_material_providers.smithygenerated.aws_cryptography_materialproviders.smithy_to_dafny.aws_cryptography_materialproviders_KeyringReference( + native_input.keyring + ) + is not None + ) + ) + else (Option_None()) + ), + cmm=( + ( + Option_Some( + aws_cryptographic_material_providers.smithygenerated.aws_cryptography_materialproviders.smithy_to_dafny.aws_cryptography_materialproviders_CryptographicMaterialsManagerReference( + native_input.cmm + ) + ) + ) + if ( + (native_input.cmm is not None) + and ( + aws_cryptographic_material_providers.smithygenerated.aws_cryptography_materialproviders.smithy_to_dafny.aws_cryptography_materialproviders_CryptographicMaterialsManagerReference( + native_input.cmm + ) + is not None + ) + ) + else (Option_None()) + ), + legacyOverride=( + ( + Option_Some( + aws_dbesdk_dynamodb.smithygenerated.aws_cryptography_dbencryptionsdk_dynamodb.smithy_to_dafny.aws_cryptography_dbencryptionsdk_dynamodb_LegacyOverride( + native_input.legacy_override + ) + ) + ) + if (native_input.legacy_override is not None) + else (Option_None()) + ), + plaintextOverride=( + ( + Option_Some( + aws_dbesdk_dynamodb.smithygenerated.aws_cryptography_dbencryptionsdk_dynamodb.smithy_to_dafny.aws_cryptography_dbencryptionsdk_dynamodb_PlaintextOverride( + native_input.plaintext_override + ) + ) + ) + if (native_input.plaintext_override is not None) + else (Option_None()) + ), + ) + + +def aws_cryptography_dbencryptionsdk_dynamodb_DynamoDbTablesEncryptionConfig( + native_input, +): + return DafnyDynamoDbTablesEncryptionConfig( + tableEncryptionConfigs=Map( + { + Seq( + "".join([chr(int.from_bytes(pair, "big")) for pair in zip(*[iter(key.encode("utf-16-be"))] * 2)]) + ): aws_dbesdk_dynamodb.smithygenerated.aws_cryptography_dbencryptionsdk_dynamodb.smithy_to_dafny.aws_cryptography_dbencryptionsdk_dynamodb_DynamoDbTableEncryptionConfig( + value + ) + for (key, value) in native_input.table_encryption_configs.items() + } + ), + ) diff --git a/DynamoDbEncryption/runtimes/python/src/aws_dbesdk_dynamodb/smithygenerated/aws_cryptography_dbencryptionsdk_dynamodb_itemencryptor/__init__.py b/DynamoDbEncryption/runtimes/python/src/aws_dbesdk_dynamodb/smithygenerated/aws_cryptography_dbencryptionsdk_dynamodb_itemencryptor/__init__.py new file mode 100644 index 000000000..09be6133b --- /dev/null +++ b/DynamoDbEncryption/runtimes/python/src/aws_dbesdk_dynamodb/smithygenerated/aws_cryptography_dbencryptionsdk_dynamodb_itemencryptor/__init__.py @@ -0,0 +1,3 @@ +# Copyright Amazon.com Inc. or its affiliates. All Rights Reserved. +# SPDX-License-Identifier: Apache-2.0 +# Do not modify this file. This file is machine generated, and any changes to it will be overwritten. diff --git a/DynamoDbEncryption/runtimes/python/src/aws_dbesdk_dynamodb/smithygenerated/aws_cryptography_dbencryptionsdk_dynamodb_itemencryptor/aws_sdk_to_dafny.py b/DynamoDbEncryption/runtimes/python/src/aws_dbesdk_dynamodb/smithygenerated/aws_cryptography_dbencryptionsdk_dynamodb_itemencryptor/aws_sdk_to_dafny.py new file mode 100644 index 000000000..c58eacb37 --- /dev/null +++ b/DynamoDbEncryption/runtimes/python/src/aws_dbesdk_dynamodb/smithygenerated/aws_cryptography_dbencryptionsdk_dynamodb_itemencryptor/aws_sdk_to_dafny.py @@ -0,0 +1,113 @@ +# Copyright Amazon.com Inc. or its affiliates. All Rights Reserved. +# SPDX-License-Identifier: Apache-2.0 +# Do not modify this file. This file is machine generated, and any changes to it will be overwritten. + +from _dafny import Map, Seq +from aws_cryptography_internal_dynamodb.internaldafny.generated.ComAmazonawsDynamodbTypes import ( + AttributeValue_B, + AttributeValue_BOOL, + AttributeValue_BS, + AttributeValue_L, + AttributeValue_M, + AttributeValue_N, + AttributeValue_NS, + AttributeValue_NULL, + AttributeValue_S, + AttributeValue_SS, +) +import aws_cryptography_internal_dynamodb.smithygenerated.com_amazonaws_dynamodb.aws_sdk_to_dafny + + +def com_amazonaws_dynamodb_AttributeValue(native_input): + if "S" in native_input.keys(): + AttributeValue_union_value = AttributeValue_S( + Seq( + "".join( + [ + chr(int.from_bytes(pair, "big")) + for pair in zip(*[iter(native_input["S"].encode("utf-16-be"))] * 2) + ] + ) + ) + ) + elif "N" in native_input.keys(): + AttributeValue_union_value = AttributeValue_N( + Seq( + "".join( + [ + chr(int.from_bytes(pair, "big")) + for pair in zip(*[iter(native_input["N"].encode("utf-16-be"))] * 2) + ] + ) + ) + ) + elif "B" in native_input.keys(): + AttributeValue_union_value = AttributeValue_B(Seq(native_input["B"])) + elif "SS" in native_input.keys(): + AttributeValue_union_value = AttributeValue_SS( + Seq( + [ + Seq( + "".join( + [ + chr(int.from_bytes(pair, "big")) + for pair in zip(*[iter(list_element.encode("utf-16-be"))] * 2) + ] + ) + ) + for list_element in native_input["SS"] + ] + ) + ) + elif "NS" in native_input.keys(): + AttributeValue_union_value = AttributeValue_NS( + Seq( + [ + Seq( + "".join( + [ + chr(int.from_bytes(pair, "big")) + for pair in zip(*[iter(list_element.encode("utf-16-be"))] * 2) + ] + ) + ) + for list_element in native_input["NS"] + ] + ) + ) + elif "BS" in native_input.keys(): + AttributeValue_union_value = AttributeValue_BS(Seq([Seq(list_element) for list_element in native_input["BS"]])) + elif "M" in native_input.keys(): + AttributeValue_union_value = AttributeValue_M( + Map( + { + Seq( + "".join( + [chr(int.from_bytes(pair, "big")) for pair in zip(*[iter(key.encode("utf-16-be"))] * 2)] + ) + ): aws_cryptography_internal_dynamodb.smithygenerated.com_amazonaws_dynamodb.aws_sdk_to_dafny.com_amazonaws_dynamodb_AttributeValue( + value + ) + for (key, value) in native_input["M"].items() + } + ) + ) + elif "L" in native_input.keys(): + AttributeValue_union_value = AttributeValue_L( + Seq( + [ + aws_cryptography_internal_dynamodb.smithygenerated.com_amazonaws_dynamodb.aws_sdk_to_dafny.com_amazonaws_dynamodb_AttributeValue( + list_element + ) + for list_element in native_input["L"] + ] + ) + ) + elif "NULL" in native_input.keys(): + AttributeValue_union_value = AttributeValue_NULL(native_input["NULL"]) + elif "BOOL" in native_input.keys(): + AttributeValue_union_value = AttributeValue_BOOL(native_input["BOOL"]) + else: + raise ValueError("No recognized union value in union type: " + str(native_input)) + + return AttributeValue_union_value diff --git a/DynamoDbEncryption/runtimes/python/src/aws_dbesdk_dynamodb/smithygenerated/aws_cryptography_dbencryptionsdk_dynamodb_itemencryptor/client.py b/DynamoDbEncryption/runtimes/python/src/aws_dbesdk_dynamodb/smithygenerated/aws_cryptography_dbencryptionsdk_dynamodb_itemencryptor/client.py new file mode 100644 index 000000000..80b139bad --- /dev/null +++ b/DynamoDbEncryption/runtimes/python/src/aws_dbesdk_dynamodb/smithygenerated/aws_cryptography_dbencryptionsdk_dynamodb_itemencryptor/client.py @@ -0,0 +1,335 @@ +# Copyright Amazon.com Inc. or its affiliates. All Rights Reserved. +# SPDX-License-Identifier: Apache-2.0 +# Do not modify this file. This file is machine generated, and any changes to it will be overwritten. + +from aws_dbesdk_dynamodb.internaldafny.generated.AwsCryptographyDbEncryptionSdkDynamoDbItemEncryptorTypes import ( + IDynamoDbItemEncryptorClient, +) +from typing import Callable, TypeVar, cast + +from .config import Config, DynamoDbItemEncryptorConfig +from .dafny_protocol import DafnyRequest, DafnyResponse +from .plugin import set_config_impl +from smithy_python.exceptions import SmithyRetryException +from smithy_python.interfaces.interceptor import Interceptor, InterceptorContext +from smithy_python.interfaces.retries import RetryErrorInfo, RetryErrorType + +from .config import Plugin +from .deserialize import _deserialize_decrypt_item, _deserialize_encrypt_item +from .errors import ServiceError +from .models import ( + DecryptItemInput, + DecryptItemOutput, + EncryptItemInput, + EncryptItemOutput, +) +from .serialize import _serialize_decrypt_item, _serialize_encrypt_item + + +Input = TypeVar("Input") +Output = TypeVar("Output") + + +class DynamoDbItemEncryptor: + """Client for DynamoDbItemEncryptor. + + :param config: Configuration for the client. + """ + + def __init__( + self, + config: DynamoDbItemEncryptorConfig | None = None, + dafny_client: IDynamoDbItemEncryptorClient | None = None, + ): + if config is None: + self._config = Config() + else: + self._config = config + + client_plugins: list[Plugin] = [ + set_config_impl, + ] + + for plugin in client_plugins: + plugin(self._config) + + if dafny_client is not None: + self._config.dafnyImplInterface.impl = dafny_client + + def encrypt_item(self, input: EncryptItemInput) -> EncryptItemOutput: + """Encrypt a DynamoDB Item. + + :param input: Inputs for encrypting a DynamoDB Item. + """ + return self._execute_operation( + input=input, + plugins=[], + serialize=_serialize_encrypt_item, + deserialize=_deserialize_encrypt_item, + config=self._config, + operation_name="EncryptItem", + ) + + def decrypt_item(self, input: DecryptItemInput) -> DecryptItemOutput: + """Decrypt a DynamoDB Item. + + :param input: Inputs for decrypting a DynamoDB Item. + """ + return self._execute_operation( + input=input, + plugins=[], + serialize=_serialize_decrypt_item, + deserialize=_deserialize_decrypt_item, + config=self._config, + operation_name="DecryptItem", + ) + + def _execute_operation( + self, + input: Input, + plugins: list[Plugin], + serialize: Callable[[Input, Config], DafnyRequest], + deserialize: Callable[[DafnyResponse, Config], Output], + config: Config, + operation_name: str, + ) -> Output: + try: + return self._handle_execution(input, plugins, serialize, deserialize, config, operation_name) + except Exception as e: + # Make sure every exception that we throw is an instance of ServiceError so + # customers can reliably catch everything we throw. + if not isinstance(e, ServiceError): + raise ServiceError(e) from e + raise e + + def _handle_execution( + self, + input: Input, + plugins: list[Plugin], + serialize: Callable[[Input, Config], DafnyRequest], + deserialize: Callable[[DafnyResponse, Config], Output], + config: Config, + operation_name: str, + ) -> Output: + context: InterceptorContext[Input, None, None, None] = InterceptorContext( + request=input, + response=None, + transport_request=None, + transport_response=None, + ) + try: + _client_interceptors = config.interceptors + except AttributeError: + config.interceptors = [] + _client_interceptors = config.interceptors + client_interceptors = cast( + list[Interceptor[Input, Output, DafnyRequest, DafnyResponse]], + _client_interceptors, + ) + interceptors = client_interceptors + + try: + # Step 1a: Invoke read_before_execution on client-level interceptors + for interceptor in client_interceptors: + interceptor.read_before_execution(context) + + # Step 1b: Run operation-level plugins + for plugin in plugins: + plugin(config) + + _client_interceptors = config.interceptors + interceptors = cast( + list[Interceptor[Input, Output, DafnyRequest, DafnyResponse]], + _client_interceptors, + ) + + # Step 1c: Invoke the read_before_execution hooks on newly added + # interceptors. + for interceptor in interceptors: + if interceptor not in client_interceptors: + interceptor.read_before_execution(context) + + # Step 2: Invoke the modify_before_serialization hooks + for interceptor in interceptors: + context._request = interceptor.modify_before_serialization(context) + + # Step 3: Invoke the read_before_serialization hooks + for interceptor in interceptors: + interceptor.read_before_serialization(context) + + # Step 4: Serialize the request + context_with_transport_request = cast(InterceptorContext[Input, None, DafnyRequest, None], context) + context_with_transport_request._transport_request = serialize( + context_with_transport_request.request, config + ) + + # Step 5: Invoke read_after_serialization + for interceptor in interceptors: + interceptor.read_after_serialization(context_with_transport_request) + + # Step 6: Invoke modify_before_retry_loop + for interceptor in interceptors: + context_with_transport_request._transport_request = interceptor.modify_before_retry_loop( + context_with_transport_request + ) + + # Step 7: Acquire the retry token. + retry_strategy = config.retry_strategy + retry_token = retry_strategy.acquire_initial_retry_token() + + while True: + # Make an attempt, creating a copy of the context so we don't pass + # around old data. + context_with_response = self._handle_attempt( + deserialize, + interceptors, + context_with_transport_request.copy(), + config, + operation_name, + ) + + # We perform this type-ignored re-assignment because `context` needs + # to point at the latest context so it can be generically handled + # later on. This is only an issue here because we've created a copy, + # so we're no longer simply pointing at the same object in memory + # with different names and type hints. It is possible to address this + # without having to fall back to the type ignore, but it would impose + # unnecessary runtime costs. + context = context_with_response # type: ignore + + if isinstance(context_with_response.response, Exception): + # Step 7u: Reacquire retry token if the attempt failed + try: + retry_token = retry_strategy.refresh_retry_token_for_retry( + token_to_renew=retry_token, + error_info=RetryErrorInfo( + # TODO: Determine the error type. + error_type=RetryErrorType.CLIENT_ERROR, + ), + ) + except SmithyRetryException: + raise context_with_response.response + else: + # Step 8: Invoke record_success + retry_strategy.record_success(token=retry_token) + break + except Exception as e: + context._response = e + + # At this point, the context's request will have been definitively set, and + # The response will be set either with the modeled output or an exception. The + # transport_request and transport_response may be set or None. + execution_context = cast( + InterceptorContext[Input, Output, DafnyRequest | None, DafnyResponse | None], + context, + ) + return self._finalize_execution(interceptors, execution_context) + + def _handle_attempt( + self, + deserialize: Callable[[DafnyResponse, Config], Output], + interceptors: list[Interceptor[Input, Output, DafnyRequest, DafnyResponse]], + context: InterceptorContext[Input, None, DafnyRequest, None], + config: Config, + operation_name: str, + ) -> InterceptorContext[Input, Output, DafnyRequest, DafnyResponse | None]: + try: + # Step 7a: Invoke read_before_attempt + for interceptor in interceptors: + interceptor.read_before_attempt(context) + + # Step 7m: Involve client Dafny impl + if config.dafnyImplInterface.impl is None: + raise Exception("No impl found on the operation config.") + + context_with_response = cast(InterceptorContext[Input, None, DafnyRequest, DafnyResponse], context) + + context_with_response._transport_response = config.dafnyImplInterface.handle_request( + input=context_with_response.transport_request + ) + + # Step 7n: Invoke read_after_transmit + for interceptor in interceptors: + interceptor.read_after_transmit(context_with_response) + + # Step 7o: Invoke modify_before_deserialization + for interceptor in interceptors: + context_with_response._transport_response = interceptor.modify_before_deserialization( + context_with_response + ) + + # Step 7p: Invoke read_before_deserialization + for interceptor in interceptors: + interceptor.read_before_deserialization(context_with_response) + + # Step 7q: deserialize + context_with_output = cast( + InterceptorContext[Input, Output, DafnyRequest, DafnyResponse], + context_with_response, + ) + context_with_output._response = deserialize(context_with_output._transport_response, config) + + # Step 7r: Invoke read_after_deserialization + for interceptor in interceptors: + interceptor.read_after_deserialization(context_with_output) + except Exception as e: + context._response = e + + # At this point, the context's request and transport_request have definitively been set, + # the response is either set or an exception, and the transport_resposne is either set or + # None. This will also be true after _finalize_attempt because there is no opportunity + # there to set the transport_response. + attempt_context = cast( + InterceptorContext[Input, Output, DafnyRequest, DafnyResponse | None], + context, + ) + return self._finalize_attempt(interceptors, attempt_context) + + def _finalize_attempt( + self, + interceptors: list[Interceptor[Input, Output, DafnyRequest, DafnyResponse]], + context: InterceptorContext[Input, Output, DafnyRequest, DafnyResponse | None], + ) -> InterceptorContext[Input, Output, DafnyRequest, DafnyResponse | None]: + # Step 7s: Invoke modify_before_attempt_completion + try: + for interceptor in interceptors: + context._response = interceptor.modify_before_attempt_completion(context) + except Exception as e: + context._response = e + + # Step 7t: Invoke read_after_attempt + for interceptor in interceptors: + try: + interceptor.read_after_attempt(context) + except Exception as e: + context._response = e + + return context + + def _finalize_execution( + self, + interceptors: list[Interceptor[Input, Output, DafnyRequest, DafnyResponse]], + context: InterceptorContext[Input, Output, DafnyRequest | None, DafnyResponse | None], + ) -> Output: + try: + # Step 9: Invoke modify_before_completion + for interceptor in interceptors: + context._response = interceptor.modify_before_completion(context) + + except Exception as e: + context._response = e + + # Step 11: Invoke read_after_execution + for interceptor in interceptors: + try: + interceptor.read_after_execution(context) + except Exception as e: + context._response = e + + # Step 12: Return / throw + if isinstance(context.response, Exception): + raise context.response + + # We may want to add some aspects of this context to the output types so we can + # return it to the end-users. + return context.response diff --git a/DynamoDbEncryption/runtimes/python/src/aws_dbesdk_dynamodb/smithygenerated/aws_cryptography_dbencryptionsdk_dynamodb_itemencryptor/config.py b/DynamoDbEncryption/runtimes/python/src/aws_dbesdk_dynamodb/smithygenerated/aws_cryptography_dbencryptionsdk_dynamodb_itemencryptor/config.py new file mode 100644 index 000000000..4391e5b97 --- /dev/null +++ b/DynamoDbEncryption/runtimes/python/src/aws_dbesdk_dynamodb/smithygenerated/aws_cryptography_dbencryptionsdk_dynamodb_itemencryptor/config.py @@ -0,0 +1,310 @@ +# Copyright Amazon.com Inc. or its affiliates. All Rights Reserved. +# SPDX-License-Identifier: Apache-2.0 +# Do not modify this file. This file is machine generated, and any changes to it will be overwritten. + +import aws_cryptographic_material_providers.smithygenerated.aws_cryptography_materialproviders.references +from aws_dbesdk_dynamodb.internaldafny.generated.AwsCryptographyDbEncryptionSdkDynamoDbItemEncryptorTypes import ( + DynamoDbItemEncryptorConfig_DynamoDbItemEncryptorConfig as DafnyDynamoDbItemEncryptorConfig, +) +import aws_dbesdk_dynamodb.internaldafny.generated.module_ +import aws_dbesdk_dynamodb.smithygenerated.aws_cryptography_dbencryptionsdk_dynamodb_itemencryptor.dafny_to_smithy +import aws_dbesdk_dynamodb.smithygenerated.aws_cryptography_dbencryptionsdk_dynamodb_itemencryptor.smithy_to_dafny +from dataclasses import dataclass +from typing import Any, Callable, Dict, Optional, TypeAlias + +from .dafnyImplInterface import DafnyImplInterface +from smithy_python._private.retries import SimpleRetryStrategy +from smithy_python.interfaces.retries import RetryStrategy + +from ..aws_cryptography_dbencryptionsdk_dynamodb.models import LegacyOverride + + +_ServiceInterceptor = Any + + +@dataclass(init=False) +class Config: + """Configuration for DynamoDbItemEncryptor.""" + + interceptors: list[_ServiceInterceptor] + retry_strategy: RetryStrategy + dafnyImplInterface: DafnyImplInterface | None + + def __init__( + self, + *, + interceptors: list[_ServiceInterceptor] | None = None, + retry_strategy: RetryStrategy | None = None, + dafnyImplInterface: DafnyImplInterface | None = None, + ): + """Constructor. + + :param interceptors: The list of interceptors, which are hooks + that are called during the execution of a request. + :param retry_strategy: The retry strategy for issuing retry + tokens and computing retry delays. + :param dafnyImplInterface: + """ + self.interceptors = interceptors or [] + self.retry_strategy = retry_strategy or SimpleRetryStrategy() + self.dafnyImplInterface = dafnyImplInterface + + +# A callable that allows customizing the config object on each request. +Plugin: TypeAlias = Callable[[Config], None] + + +class DynamoDbItemEncryptorConfig(Config): + logical_table_name: str + partition_key_name: str + sort_key_name: Optional[str] + attribute_actions_on_encrypt: dict[str, str] + allowed_unsigned_attributes: Optional[list[str]] + allowed_unsigned_attribute_prefix: Optional[str] + algorithm_suite_id: Optional[str] + keyring: Optional[ + "aws_cryptographic_material_providers.smithygenerated.aws_cryptography_materialproviders.references.Keyring" + ] + cmm: Optional[ + "aws_cryptographic_material_providers.smithygenerated.aws_cryptography_materialproviders.references.CryptographicMaterialsManager" + ] + legacy_override: Optional[LegacyOverride] + plaintext_override: Optional[str] + + def __init__( + self, + *, + logical_table_name: str, + partition_key_name: str, + attribute_actions_on_encrypt: dict[str, str], + sort_key_name: Optional[str] = None, + allowed_unsigned_attributes: Optional[list[str]] = None, + allowed_unsigned_attribute_prefix: Optional[str] = None, + algorithm_suite_id: Optional[str] = None, + keyring: Optional[ + "aws_cryptographic_material_providers.smithygenerated.aws_cryptography_materialproviders.references.Keyring" + ] = None, + cmm: Optional[ + "aws_cryptographic_material_providers.smithygenerated.aws_cryptography_materialproviders.references.CryptographicMaterialsManager" + ] = None, + legacy_override: Optional[LegacyOverride] = None, + plaintext_override: Optional[str] = None, + ): + """The configuration for the client-side encryption of DynamoDB items. + + :param logical_table_name: The logical table name for this + table. This is the name that is cryptographically bound with + your data. This can be the same as the actual DynamoDB table + name. It's purpose is to be distinct from the DynamoDB table + name so that the data may still be authenticated if being + read from different (but logically similar) tables, such as + a backup table. + :param partition_key_name: The name of the partition key on the + table this item will be written to or was read from. + :param attribute_actions_on_encrypt: A map that describes what + attributes should be encrypted and/or signed on encrypt. + This map must contain all attributes that might be + encountered during encryption. + :param sort_key_name: If this table contains a sort key, the + name of the sort key on the table this item will be written + to or was read from. + :param allowed_unsigned_attributes: A list of attribute names + such that, if encountered during decryption, those + attributes are treated as unsigned. + :param allowed_unsigned_attribute_prefix: A prefix such that, if + during decryption any attribute has a name with this prefix, + it is treated as unsigned. + :param algorithm_suite_id: An ID for the algorithm suite to use + during encryption and decryption. + :param keyring: The Keyring that should be used to wrap and + unwrap data keys. If specified a Default Cryptographic + Materials Manager with this Keyring is used to obtain + materials for encryption and decryption. Either a Keyring or + a Cryptographic Materials Manager must be specified. + :param cmm: The Cryptographic Materials Manager that is used to + obtain materials for encryption and decryption. Either a + Keyring or a Cryptographic Materials Manager must be + specified. + :param legacy_override: A configuration that override encryption + and/or decryption to instead perform legacy encryption + and/or decryption. Used as part of migration from version + 2.x to version 3.x. + :param plaintext_override: A configuration that override + encryption and/or decryption to instead passthrough and + write and/or read plaintext. Used to update plaintext tables + to fully use client-side encryption. + """ + super().__init__() + self.logical_table_name = logical_table_name + if (partition_key_name is not None) and (len(partition_key_name) < 1): + raise ValueError("The size of partition_key_name must be greater than or equal to 1") + + if (partition_key_name is not None) and (len(partition_key_name) > 255): + raise ValueError("The size of partition_key_name must be less than or equal to 255") + + self.partition_key_name = partition_key_name + self.attribute_actions_on_encrypt = attribute_actions_on_encrypt + if (sort_key_name is not None) and (len(sort_key_name) < 1): + raise ValueError("The size of sort_key_name must be greater than or equal to 1") + + if (sort_key_name is not None) and (len(sort_key_name) > 255): + raise ValueError("The size of sort_key_name must be less than or equal to 255") + + self.sort_key_name = sort_key_name + if (allowed_unsigned_attributes is not None) and (len(allowed_unsigned_attributes) < 1): + raise ValueError("The size of allowed_unsigned_attributes must be greater than or equal to 1") + + self.allowed_unsigned_attributes = allowed_unsigned_attributes + self.allowed_unsigned_attribute_prefix = allowed_unsigned_attribute_prefix + self.algorithm_suite_id = algorithm_suite_id + self.keyring = keyring + self.cmm = cmm + self.legacy_override = legacy_override + self.plaintext_override = plaintext_override + + def as_dict(self) -> Dict[str, Any]: + """Converts the DynamoDbItemEncryptorConfig to a dictionary.""" + d: Dict[str, Any] = { + "logical_table_name": self.logical_table_name, + "partition_key_name": self.partition_key_name, + "attribute_actions_on_encrypt": self.attribute_actions_on_encrypt, + } + + if self.sort_key_name is not None: + d["sort_key_name"] = self.sort_key_name + + if self.allowed_unsigned_attributes is not None: + d["allowed_unsigned_attributes"] = self.allowed_unsigned_attributes + + if self.allowed_unsigned_attribute_prefix is not None: + d["allowed_unsigned_attribute_prefix"] = self.allowed_unsigned_attribute_prefix + + if self.algorithm_suite_id is not None: + d["algorithm_suite_id"] = self.algorithm_suite_id + + if self.keyring is not None: + d["keyring"] = self.keyring.as_dict() + + if self.cmm is not None: + d["cmm"] = self.cmm.as_dict() + + if self.legacy_override is not None: + d["legacy_override"] = self.legacy_override.as_dict() + + if self.plaintext_override is not None: + d["plaintext_override"] = self.plaintext_override + + return d + + @staticmethod + def from_dict(d: Dict[str, Any]) -> "DynamoDbItemEncryptorConfig": + """Creates a DynamoDbItemEncryptorConfig from a dictionary.""" + from aws_cryptographic_material_providers.smithygenerated.aws_cryptography_materialproviders.references import ( + Keyring, + ) + from aws_cryptographic_material_providers.smithygenerated.aws_cryptography_materialproviders.references import ( + CryptographicMaterialsManager, + ) + + kwargs: Dict[str, Any] = { + "logical_table_name": d["logical_table_name"], + "partition_key_name": d["partition_key_name"], + "attribute_actions_on_encrypt": d["attribute_actions_on_encrypt"], + } + + if "sort_key_name" in d: + kwargs["sort_key_name"] = d["sort_key_name"] + + if "allowed_unsigned_attributes" in d: + kwargs["allowed_unsigned_attributes"] = d["allowed_unsigned_attributes"] + + if "allowed_unsigned_attribute_prefix" in d: + kwargs["allowed_unsigned_attribute_prefix"] = d["allowed_unsigned_attribute_prefix"] + + if "algorithm_suite_id" in d: + kwargs["algorithm_suite_id"] = d["algorithm_suite_id"] + + if "keyring" in d: + kwargs["keyring"] = Keyring.from_dict(d["keyring"]) + + if "cmm" in d: + kwargs["cmm"] = CryptographicMaterialsManager.from_dict(d["cmm"]) + + if "legacy_override" in d: + kwargs["legacy_override"] = LegacyOverride.from_dict(d["legacy_override"]) + + if "plaintext_override" in d: + kwargs["plaintext_override"] = d["plaintext_override"] + + return DynamoDbItemEncryptorConfig(**kwargs) + + def __repr__(self) -> str: + result = "DynamoDbItemEncryptorConfig(" + if self.logical_table_name is not None: + result += f"logical_table_name={repr(self.logical_table_name)}, " + + if self.partition_key_name is not None: + result += f"partition_key_name={repr(self.partition_key_name)}, " + + if self.sort_key_name is not None: + result += f"sort_key_name={repr(self.sort_key_name)}, " + + if self.attribute_actions_on_encrypt is not None: + result += f"attribute_actions_on_encrypt={repr(self.attribute_actions_on_encrypt)}, " + + if self.allowed_unsigned_attributes is not None: + result += f"allowed_unsigned_attributes={repr(self.allowed_unsigned_attributes)}, " + + if self.allowed_unsigned_attribute_prefix is not None: + result += f"allowed_unsigned_attribute_prefix={repr(self.allowed_unsigned_attribute_prefix)}, " + + if self.algorithm_suite_id is not None: + result += f"algorithm_suite_id={repr(self.algorithm_suite_id)}, " + + if self.keyring is not None: + result += f"keyring={repr(self.keyring)}, " + + if self.cmm is not None: + result += f"cmm={repr(self.cmm)}, " + + if self.legacy_override is not None: + result += f"legacy_override={repr(self.legacy_override)}, " + + if self.plaintext_override is not None: + result += f"plaintext_override={repr(self.plaintext_override)}" + + return result + ")" + + def __eq__(self, other: Any) -> bool: + if not isinstance(other, DynamoDbItemEncryptorConfig): + return False + attributes: list[str] = [ + "logical_table_name", + "partition_key_name", + "sort_key_name", + "attribute_actions_on_encrypt", + "allowed_unsigned_attributes", + "allowed_unsigned_attribute_prefix", + "algorithm_suite_id", + "keyring", + "cmm", + "legacy_override", + "plaintext_override", + ] + return all(getattr(self, a) == getattr(other, a) for a in attributes) + + +def dafny_config_to_smithy_config(dafny_config) -> DynamoDbItemEncryptorConfig: + """Converts the provided Dafny shape for this localService's config into + the corresponding Smithy-modelled shape.""" + return aws_dbesdk_dynamodb.smithygenerated.aws_cryptography_dbencryptionsdk_dynamodb_itemencryptor.dafny_to_smithy.aws_cryptography_dbencryptionsdk_dynamodb_itemencryptor_DynamoDbItemEncryptorConfig( + dafny_config + ) + + +def smithy_config_to_dafny_config(smithy_config) -> DafnyDynamoDbItemEncryptorConfig: + """Converts the provided Smithy-modelled shape for this localService's + config into the corresponding Dafny shape.""" + return aws_dbesdk_dynamodb.smithygenerated.aws_cryptography_dbencryptionsdk_dynamodb_itemencryptor.smithy_to_dafny.aws_cryptography_dbencryptionsdk_dynamodb_itemencryptor_DynamoDbItemEncryptorConfig( + smithy_config + ) diff --git a/DynamoDbEncryption/runtimes/python/src/aws_dbesdk_dynamodb/smithygenerated/aws_cryptography_dbencryptionsdk_dynamodb_itemencryptor/dafnyImplInterface.py b/DynamoDbEncryption/runtimes/python/src/aws_dbesdk_dynamodb/smithygenerated/aws_cryptography_dbencryptionsdk_dynamodb_itemencryptor/dafnyImplInterface.py new file mode 100644 index 000000000..06ead47b7 --- /dev/null +++ b/DynamoDbEncryption/runtimes/python/src/aws_dbesdk_dynamodb/smithygenerated/aws_cryptography_dbencryptionsdk_dynamodb_itemencryptor/dafnyImplInterface.py @@ -0,0 +1,34 @@ +# Copyright Amazon.com Inc. or its affiliates. All Rights Reserved. +# SPDX-License-Identifier: Apache-2.0 +# Do not modify this file. This file is machine generated, and any changes to it will be overwritten. + +from aws_dbesdk_dynamodb.internaldafny.generated.DynamoDbItemEncryptor import ( + DynamoDbItemEncryptorClient, +) +from .dafny_protocol import DafnyRequest + + +class DafnyImplInterface: + impl: DynamoDbItemEncryptorClient | None = None + + # operation_map cannot be created at dafnyImplInterface create time, + # as the map's values reference values inside `self.impl`, + # and impl is only populated at runtime. + # Accessing these before impl is populated results in an error. + # At runtime, the map is populated once and cached. + operation_map = None + + def handle_request(self, input: DafnyRequest): + if self.operation_map is None: + self.operation_map = { + "EncryptItem": self.impl.EncryptItem, + "DecryptItem": self.impl.DecryptItem, + } + + # This logic is where a typical Smithy client would expect the "server" to be. + # This code can be thought of as logic our Dafny "server" uses + # to route incoming client requests to the correct request handler code. + if input.dafny_operation_input is None: + return self.operation_map[input.operation_name]() + else: + return self.operation_map[input.operation_name](input.dafny_operation_input) diff --git a/DynamoDbEncryption/runtimes/python/src/aws_dbesdk_dynamodb/smithygenerated/aws_cryptography_dbencryptionsdk_dynamodb_itemencryptor/dafny_protocol.py b/DynamoDbEncryption/runtimes/python/src/aws_dbesdk_dynamodb/smithygenerated/aws_cryptography_dbencryptionsdk_dynamodb_itemencryptor/dafny_protocol.py new file mode 100644 index 000000000..bb2adb9a8 --- /dev/null +++ b/DynamoDbEncryption/runtimes/python/src/aws_dbesdk_dynamodb/smithygenerated/aws_cryptography_dbencryptionsdk_dynamodb_itemencryptor/dafny_protocol.py @@ -0,0 +1,33 @@ +# Copyright Amazon.com Inc. or its affiliates. All Rights Reserved. +# SPDX-License-Identifier: Apache-2.0 +# Do not modify this file. This file is machine generated, and any changes to it will be overwritten. + +from aws_dbesdk_dynamodb.internaldafny.generated.AwsCryptographyDbEncryptionSdkDynamoDbItemEncryptorTypes import ( + DecryptItemInput_DecryptItemInput as DafnyDecryptItemInput, + EncryptItemInput_EncryptItemInput as DafnyEncryptItemInput, +) +import aws_dbesdk_dynamodb.internaldafny.generated.module_ + + +import smithy_dafny_standard_library.internaldafny.generated.Wrappers as Wrappers +from typing import Union + + +class DafnyRequest: + operation_name: str + + # dafny_operation_input can take on any one of the types + # of the input values passed to the Dafny implementation + dafny_operation_input: Union[ + DafnyEncryptItemInput, + DafnyDecryptItemInput, + ] + + def __init__(self, operation_name, dafny_operation_input): + self.operation_name = operation_name + self.dafny_operation_input = dafny_operation_input + + +class DafnyResponse(Wrappers.Result): + def __init__(self): + super().__init__(self) diff --git a/DynamoDbEncryption/runtimes/python/src/aws_dbesdk_dynamodb/smithygenerated/aws_cryptography_dbencryptionsdk_dynamodb_itemencryptor/dafny_to_aws_sdk.py b/DynamoDbEncryption/runtimes/python/src/aws_dbesdk_dynamodb/smithygenerated/aws_cryptography_dbencryptionsdk_dynamodb_itemencryptor/dafny_to_aws_sdk.py new file mode 100644 index 000000000..9aca0964e --- /dev/null +++ b/DynamoDbEncryption/runtimes/python/src/aws_dbesdk_dynamodb/smithygenerated/aws_cryptography_dbencryptionsdk_dynamodb_itemencryptor/dafny_to_aws_sdk.py @@ -0,0 +1,75 @@ +# Copyright Amazon.com Inc. or its affiliates. All Rights Reserved. +# SPDX-License-Identifier: Apache-2.0 +# Do not modify this file. This file is machine generated, and any changes to it will be overwritten. + +from aws_cryptography_internal_dynamodb.internaldafny.generated.ComAmazonawsDynamodbTypes import ( + AttributeValue_B, + AttributeValue_BOOL, + AttributeValue_BS, + AttributeValue_L, + AttributeValue_M, + AttributeValue_N, + AttributeValue_NS, + AttributeValue_NULL, + AttributeValue_S, + AttributeValue_SS, +) +import aws_cryptography_internal_dynamodb.smithygenerated.com_amazonaws_dynamodb.dafny_to_aws_sdk + + +def com_amazonaws_dynamodb_AttributeValue(dafny_input): + # Convert AttributeValue + if isinstance(dafny_input, AttributeValue_S): + AttributeValue_union_value = { + "S": b"".join(ord(c).to_bytes(2, "big") for c in dafny_input.S).decode("utf-16-be") + } + elif isinstance(dafny_input, AttributeValue_N): + AttributeValue_union_value = { + "N": b"".join(ord(c).to_bytes(2, "big") for c in dafny_input.N).decode("utf-16-be") + } + elif isinstance(dafny_input, AttributeValue_B): + AttributeValue_union_value = {"B": bytes(dafny_input.B)} + elif isinstance(dafny_input, AttributeValue_SS): + AttributeValue_union_value = { + "SS": [ + b"".join(ord(c).to_bytes(2, "big") for c in list_element).decode("utf-16-be") + for list_element in dafny_input.SS + ] + } + elif isinstance(dafny_input, AttributeValue_NS): + AttributeValue_union_value = { + "NS": [ + b"".join(ord(c).to_bytes(2, "big") for c in list_element).decode("utf-16-be") + for list_element in dafny_input.NS + ] + } + elif isinstance(dafny_input, AttributeValue_BS): + AttributeValue_union_value = {"BS": [bytes(list_element) for list_element in dafny_input.BS]} + elif isinstance(dafny_input, AttributeValue_M): + AttributeValue_union_value = { + "M": { + b"".join(ord(c).to_bytes(2, "big") for c in key).decode( + "utf-16-be" + ): aws_cryptography_internal_dynamodb.smithygenerated.com_amazonaws_dynamodb.dafny_to_aws_sdk.com_amazonaws_dynamodb_AttributeValue( + value + ) + for (key, value) in dafny_input.M.items + } + } + elif isinstance(dafny_input, AttributeValue_L): + AttributeValue_union_value = { + "L": [ + aws_cryptography_internal_dynamodb.smithygenerated.com_amazonaws_dynamodb.dafny_to_aws_sdk.com_amazonaws_dynamodb_AttributeValue( + list_element + ) + for list_element in dafny_input.L + ] + } + elif isinstance(dafny_input, AttributeValue_NULL): + AttributeValue_union_value = {"NULL": dafny_input.NULL} + elif isinstance(dafny_input, AttributeValue_BOOL): + AttributeValue_union_value = {"BOOL": dafny_input.BOOL} + else: + raise ValueError("No recognized union value in union type: " + str(dafny_input)) + + return AttributeValue_union_value diff --git a/DynamoDbEncryption/runtimes/python/src/aws_dbesdk_dynamodb/smithygenerated/aws_cryptography_dbencryptionsdk_dynamodb_itemencryptor/dafny_to_smithy.py b/DynamoDbEncryption/runtimes/python/src/aws_dbesdk_dynamodb/smithygenerated/aws_cryptography_dbencryptionsdk_dynamodb_itemencryptor/dafny_to_smithy.py new file mode 100644 index 000000000..9922ce15d --- /dev/null +++ b/DynamoDbEncryption/runtimes/python/src/aws_dbesdk_dynamodb/smithygenerated/aws_cryptography_dbencryptionsdk_dynamodb_itemencryptor/dafny_to_smithy.py @@ -0,0 +1,239 @@ +# Copyright Amazon.com Inc. or its affiliates. All Rights Reserved. +# SPDX-License-Identifier: Apache-2.0 +# Do not modify this file. This file is machine generated, and any changes to it will be overwritten. + +import aws_cryptographic_material_providers.smithygenerated.aws_cryptography_materialproviders.dafny_to_smithy +import aws_cryptography_internal_dynamodb.smithygenerated.com_amazonaws_dynamodb.dafny_to_aws_sdk +import aws_dbesdk_dynamodb.smithygenerated.aws_cryptography_dbencryptionsdk_dynamodb.dafny_to_smithy +import aws_dbesdk_dynamodb.smithygenerated.aws_cryptography_dbencryptionsdk_dynamodb_itemencryptor.dafny_to_smithy +import aws_dbesdk_dynamodb.smithygenerated.aws_cryptography_dbencryptionsdk_dynamodb_itemencryptor.models +import aws_dbesdk_dynamodb.smithygenerated.aws_cryptography_dbencryptionsdk_structuredencryption.dafny_to_smithy + + +def aws_cryptography_dbencryptionsdk_dynamodb_itemencryptor_EncryptItemInput( + dafny_input, +): + return aws_dbesdk_dynamodb.smithygenerated.aws_cryptography_dbencryptionsdk_dynamodb_itemencryptor.models.EncryptItemInput( + plaintext_item={ + b"".join(ord(c).to_bytes(2, "big") for c in key).decode( + "utf-16-be" + ): aws_cryptography_internal_dynamodb.smithygenerated.com_amazonaws_dynamodb.dafny_to_aws_sdk.com_amazonaws_dynamodb_AttributeValue( + value + ) + for (key, value) in dafny_input.plaintextItem.items + }, + ) + + +def aws_cryptography_dbencryptionsdk_dynamodb_itemencryptor_DecryptItemInput( + dafny_input, +): + return aws_dbesdk_dynamodb.smithygenerated.aws_cryptography_dbencryptionsdk_dynamodb_itemencryptor.models.DecryptItemInput( + encrypted_item={ + b"".join(ord(c).to_bytes(2, "big") for c in key).decode( + "utf-16-be" + ): aws_cryptography_internal_dynamodb.smithygenerated.com_amazonaws_dynamodb.dafny_to_aws_sdk.com_amazonaws_dynamodb_AttributeValue( + value + ) + for (key, value) in dafny_input.encryptedItem.items + }, + ) + + +def aws_cryptography_dbencryptionsdk_dynamodb_itemencryptor_ParsedHeader(dafny_input): + return aws_dbesdk_dynamodb.smithygenerated.aws_cryptography_dbencryptionsdk_dynamodb_itemencryptor.models.ParsedHeader( + attribute_actions_on_encrypt={ + b"".join(ord(c).to_bytes(2, "big") for c in key).decode( + "utf-16-be" + ): aws_dbesdk_dynamodb.smithygenerated.aws_cryptography_dbencryptionsdk_structuredencryption.dafny_to_smithy.aws_cryptography_dbencryptionsdk_structuredencryption_CryptoAction( + value + ) + for (key, value) in dafny_input.attributeActionsOnEncrypt.items + }, + algorithm_suite_id=aws_cryptographic_material_providers.smithygenerated.aws_cryptography_materialproviders.dafny_to_smithy.aws_cryptography_materialproviders_DBEAlgorithmSuiteId( + dafny_input.algorithmSuiteId + ), + encrypted_data_keys=[ + aws_cryptographic_material_providers.smithygenerated.aws_cryptography_materialproviders.dafny_to_smithy.aws_cryptography_materialproviders_EncryptedDataKey( + list_element + ) + for list_element in dafny_input.encryptedDataKeys + ], + stored_encryption_context={ + bytes(key.Elements).decode("utf-8"): bytes(value.Elements).decode("utf-8") + for (key, value) in dafny_input.storedEncryptionContext.items + }, + encryption_context={ + bytes(key.Elements).decode("utf-8"): bytes(value.Elements).decode("utf-8") + for (key, value) in dafny_input.encryptionContext.items + }, + selector_context={ + b"".join(ord(c).to_bytes(2, "big") for c in key).decode( + "utf-16-be" + ): aws_cryptography_internal_dynamodb.smithygenerated.com_amazonaws_dynamodb.dafny_to_aws_sdk.com_amazonaws_dynamodb_AttributeValue( + value + ) + for (key, value) in dafny_input.selectorContext.items + }, + ) + + +def aws_cryptography_dbencryptionsdk_dynamodb_itemencryptor_EncryptItemOutput( + dafny_input, +): + return aws_dbesdk_dynamodb.smithygenerated.aws_cryptography_dbencryptionsdk_dynamodb_itemencryptor.models.EncryptItemOutput( + encrypted_item={ + b"".join(ord(c).to_bytes(2, "big") for c in key).decode( + "utf-16-be" + ): aws_cryptography_internal_dynamodb.smithygenerated.com_amazonaws_dynamodb.dafny_to_aws_sdk.com_amazonaws_dynamodb_AttributeValue( + value + ) + for (key, value) in dafny_input.encryptedItem.items + }, + parsed_header=( + ( + aws_dbesdk_dynamodb.smithygenerated.aws_cryptography_dbencryptionsdk_dynamodb_itemencryptor.dafny_to_smithy.aws_cryptography_dbencryptionsdk_dynamodb_itemencryptor_ParsedHeader( + dafny_input.parsedHeader.value + ) + ) + if (dafny_input.parsedHeader.is_Some) + else None + ), + ) + + +def aws_cryptography_dbencryptionsdk_dynamodb_itemencryptor_DecryptItemOutput( + dafny_input, +): + return aws_dbesdk_dynamodb.smithygenerated.aws_cryptography_dbencryptionsdk_dynamodb_itemencryptor.models.DecryptItemOutput( + plaintext_item={ + b"".join(ord(c).to_bytes(2, "big") for c in key).decode( + "utf-16-be" + ): aws_cryptography_internal_dynamodb.smithygenerated.com_amazonaws_dynamodb.dafny_to_aws_sdk.com_amazonaws_dynamodb_AttributeValue( + value + ) + for (key, value) in dafny_input.plaintextItem.items + }, + parsed_header=( + ( + aws_dbesdk_dynamodb.smithygenerated.aws_cryptography_dbencryptionsdk_dynamodb_itemencryptor.dafny_to_smithy.aws_cryptography_dbencryptionsdk_dynamodb_itemencryptor_ParsedHeader( + dafny_input.parsedHeader.value + ) + ) + if (dafny_input.parsedHeader.is_Some) + else None + ), + ) + + +def aws_cryptography_dbencryptionsdk_dynamodb_itemencryptor_AtomicPrimitivesReference( + dafny_input, +): + from aws_cryptography_primitives.smithygenerated.aws_cryptography_primitives.client import ( + AwsCryptographicPrimitives, + ) + + return AwsCryptographicPrimitives(config=None, dafny_client=dafny_input) + + +def aws_cryptography_dbencryptionsdk_dynamodb_itemencryptor_DynamoDbItemEncryptorConfig( + dafny_input, +): + # Deferred import of .config to avoid circular dependency + import aws_dbesdk_dynamodb.smithygenerated.aws_cryptography_dbencryptionsdk_dynamodb_itemencryptor.config + + return aws_dbesdk_dynamodb.smithygenerated.aws_cryptography_dbencryptionsdk_dynamodb_itemencryptor.config.DynamoDbItemEncryptorConfig( + logical_table_name=b"".join(ord(c).to_bytes(2, "big") for c in dafny_input.logicalTableName).decode( + "utf-16-be" + ), + partition_key_name=b"".join(ord(c).to_bytes(2, "big") for c in dafny_input.partitionKeyName).decode( + "utf-16-be" + ), + sort_key_name=( + (b"".join(ord(c).to_bytes(2, "big") for c in dafny_input.sortKeyName.value).decode("utf-16-be")) + if (dafny_input.sortKeyName.is_Some) + else None + ), + attribute_actions_on_encrypt={ + b"".join(ord(c).to_bytes(2, "big") for c in key).decode( + "utf-16-be" + ): aws_dbesdk_dynamodb.smithygenerated.aws_cryptography_dbencryptionsdk_structuredencryption.dafny_to_smithy.aws_cryptography_dbencryptionsdk_structuredencryption_CryptoAction( + value + ) + for (key, value) in dafny_input.attributeActionsOnEncrypt.items + }, + allowed_unsigned_attributes=( + ( + [ + b"".join(ord(c).to_bytes(2, "big") for c in list_element).decode("utf-16-be") + for list_element in dafny_input.allowedUnsignedAttributes.value + ] + ) + if (dafny_input.allowedUnsignedAttributes.is_Some) + else None + ), + allowed_unsigned_attribute_prefix=( + ( + b"".join(ord(c).to_bytes(2, "big") for c in dafny_input.allowedUnsignedAttributePrefix.value).decode( + "utf-16-be" + ) + ) + if (dafny_input.allowedUnsignedAttributePrefix.is_Some) + else None + ), + algorithm_suite_id=( + ( + aws_cryptographic_material_providers.smithygenerated.aws_cryptography_materialproviders.dafny_to_smithy.aws_cryptography_materialproviders_DBEAlgorithmSuiteId( + dafny_input.algorithmSuiteId.value + ) + ) + if (dafny_input.algorithmSuiteId.is_Some) + else None + ), + keyring=( + ( + aws_cryptographic_material_providers.smithygenerated.aws_cryptography_materialproviders.dafny_to_smithy.aws_cryptography_materialproviders_KeyringReference( + dafny_input.keyring.UnwrapOr(None) + ) + ) + if (dafny_input.keyring.UnwrapOr(None) is not None) + else None + ), + cmm=( + ( + aws_cryptographic_material_providers.smithygenerated.aws_cryptography_materialproviders.dafny_to_smithy.aws_cryptography_materialproviders_CryptographicMaterialsManagerReference( + dafny_input.cmm.UnwrapOr(None) + ) + ) + if (dafny_input.cmm.UnwrapOr(None) is not None) + else None + ), + legacy_override=( + ( + aws_dbesdk_dynamodb.smithygenerated.aws_cryptography_dbencryptionsdk_dynamodb.dafny_to_smithy.aws_cryptography_dbencryptionsdk_dynamodb_LegacyOverride( + dafny_input.legacyOverride.value + ) + ) + if (dafny_input.legacyOverride.is_Some) + else None + ), + plaintext_override=( + ( + aws_dbesdk_dynamodb.smithygenerated.aws_cryptography_dbencryptionsdk_dynamodb.dafny_to_smithy.aws_cryptography_dbencryptionsdk_dynamodb_PlaintextOverride( + dafny_input.plaintextOverride.value + ) + ) + if (dafny_input.plaintextOverride.is_Some) + else None + ), + ) + + +def aws_cryptography_dbencryptionsdk_dynamodb_itemencryptor_StructuredEncryptionReference( + dafny_input, +): + from aws_dbesdk_dynamodb.smithygenerated.aws_cryptography_dbencryptionsdk_structuredencryption.client import ( + StructuredEncryption, + ) + + return StructuredEncryption(config=None, dafny_client=dafny_input) diff --git a/DynamoDbEncryption/runtimes/python/src/aws_dbesdk_dynamodb/smithygenerated/aws_cryptography_dbencryptionsdk_dynamodb_itemencryptor/deserialize.py b/DynamoDbEncryption/runtimes/python/src/aws_dbesdk_dynamodb/smithygenerated/aws_cryptography_dbencryptionsdk_dynamodb_itemencryptor/deserialize.py new file mode 100644 index 000000000..fb27cecff --- /dev/null +++ b/DynamoDbEncryption/runtimes/python/src/aws_dbesdk_dynamodb/smithygenerated/aws_cryptography_dbencryptionsdk_dynamodb_itemencryptor/deserialize.py @@ -0,0 +1,98 @@ +# Copyright Amazon.com Inc. or its affiliates. All Rights Reserved. +# SPDX-License-Identifier: Apache-2.0 +# Do not modify this file. This file is machine generated, and any changes to it will be overwritten. + +import _dafny +from aws_dbesdk_dynamodb.internaldafny.generated.AwsCryptographyDbEncryptionSdkDynamoDbItemEncryptorTypes import ( + DecryptItemOutput_DecryptItemOutput as DafnyDecryptItemOutput, + EncryptItemOutput_EncryptItemOutput as DafnyEncryptItemOutput, + Error, + Error_DynamoDbItemEncryptorException, +) +import aws_dbesdk_dynamodb.internaldafny.generated.module_ +import aws_dbesdk_dynamodb.smithygenerated.aws_cryptography_dbencryptionsdk_dynamodb_itemencryptor.dafny_to_smithy +from typing import Any + +from .dafny_protocol import DafnyResponse +from .errors import ( + AwsCryptographicMaterialProviders, + AwsCryptographicPrimitives, + CollectionOfErrors, + ComAmazonawsDynamodb, + DynamoDbEncryption, + DynamoDbItemEncryptorException, + OpaqueError, + ServiceError, + StructuredEncryption, +) +from aws_cryptographic_material_providers.smithygenerated.aws_cryptography_materialproviders.deserialize import ( + _deserialize_error as aws_cryptography_materialproviders_deserialize_error, +) +from aws_cryptography_internal_dynamodb.smithygenerated.com_amazonaws_dynamodb.shim import ( + _sdk_error_to_dafny_error as com_amazonaws_dynamodb_sdk_error_to_dafny_error, +) +from aws_cryptography_primitives.smithygenerated.aws_cryptography_primitives.deserialize import ( + _deserialize_error as aws_cryptography_primitives_deserialize_error, +) + +from ..aws_cryptography_dbencryptionsdk_dynamodb.deserialize import ( + _deserialize_error as aws_cryptography_dbencryptionsdk_dynamodb_deserialize_error, +) +from ..aws_cryptography_dbencryptionsdk_structuredencryption.deserialize import ( + _deserialize_error as aws_cryptography_dbencryptionsdk_structuredencryption_deserialize_error, +) +from .config import Config + + +def _deserialize_encrypt_item(input: DafnyResponse, config: Config): + + if input.IsFailure(): + return _deserialize_error(input.error) + return aws_dbesdk_dynamodb.smithygenerated.aws_cryptography_dbencryptionsdk_dynamodb_itemencryptor.dafny_to_smithy.aws_cryptography_dbencryptionsdk_dynamodb_itemencryptor_EncryptItemOutput( + input.value + ) + + +def _deserialize_decrypt_item(input: DafnyResponse, config: Config): + + if input.IsFailure(): + return _deserialize_error(input.error) + return aws_dbesdk_dynamodb.smithygenerated.aws_cryptography_dbencryptionsdk_dynamodb_itemencryptor.dafny_to_smithy.aws_cryptography_dbencryptionsdk_dynamodb_itemencryptor_DecryptItemOutput( + input.value + ) + + +def _deserialize_error(error: Error) -> ServiceError: + if error.is_Opaque: + return OpaqueError(obj=error.obj) + elif error.is_OpaqueWithText: + return OpaqueErrorWithText(obj=error.obj, obj_message=error.objMessage) + elif error.is_CollectionOfErrors: + return CollectionOfErrors( + message=_dafny.string_of(error.message), + list=[_deserialize_error(dafny_e) for dafny_e in error.list], + ) + elif error.is_DynamoDbItemEncryptorException: + return DynamoDbItemEncryptorException(message=_dafny.string_of(error.message)) + elif error.is_AwsCryptographyDbEncryptionSdkStructuredEncryption: + return StructuredEncryption( + aws_cryptography_dbencryptionsdk_structuredencryption_deserialize_error( + error.AwsCryptographyDbEncryptionSdkStructuredEncryption + ) + ) + elif error.is_AwsCryptographyPrimitives: + return AwsCryptographicPrimitives( + aws_cryptography_primitives_deserialize_error(error.AwsCryptographyPrimitives) + ) + elif error.is_AwsCryptographyDbEncryptionSdkDynamoDb: + return DynamoDbEncryption( + aws_cryptography_dbencryptionsdk_dynamodb_deserialize_error(error.AwsCryptographyDbEncryptionSdkDynamoDb) + ) + elif error.is_AwsCryptographyMaterialProviders: + return AwsCryptographicMaterialProviders( + aws_cryptography_materialproviders_deserialize_error(error.AwsCryptographyMaterialProviders) + ) + elif error.is_ComAmazonawsDynamodb: + return ComAmazonawsDynamodb(message=_dafny.string_of(error.ComAmazonawsDynamodb.message)) + else: + return OpaqueError(obj=error) diff --git a/DynamoDbEncryption/runtimes/python/src/aws_dbesdk_dynamodb/smithygenerated/aws_cryptography_dbencryptionsdk_dynamodb_itemencryptor/errors.py b/DynamoDbEncryption/runtimes/python/src/aws_dbesdk_dynamodb/smithygenerated/aws_cryptography_dbencryptionsdk_dynamodb_itemencryptor/errors.py new file mode 100644 index 000000000..207a1df12 --- /dev/null +++ b/DynamoDbEncryption/runtimes/python/src/aws_dbesdk_dynamodb/smithygenerated/aws_cryptography_dbencryptionsdk_dynamodb_itemencryptor/errors.py @@ -0,0 +1,335 @@ +# Copyright Amazon.com Inc. or its affiliates. All Rights Reserved. +# SPDX-License-Identifier: Apache-2.0 +# Do not modify this file. This file is machine generated, and any changes to it will be overwritten. + +import _dafny +from aws_cryptographic_material_providers.smithygenerated.aws_cryptography_materialproviders.errors import ( + _smithy_error_to_dafny_error as aws_cryptography_materialproviders_smithy_error_to_dafny_error, +) +from aws_cryptography_internal_dynamodb.smithygenerated.com_amazonaws_dynamodb.shim import ( + _sdk_error_to_dafny_error as com_amazonaws_dynamodb_sdk_error_to_dafny_error, +) +from aws_cryptography_primitives.smithygenerated.aws_cryptography_primitives.errors import ( + _smithy_error_to_dafny_error as aws_cryptography_primitives_smithy_error_to_dafny_error, +) +import aws_dbesdk_dynamodb.internaldafny.generated +import aws_dbesdk_dynamodb.internaldafny.generated.AwsCryptographyDbEncryptionSdkDynamoDbItemEncryptorTypes +from aws_dbesdk_dynamodb.smithygenerated.aws_cryptography_dbencryptionsdk_dynamodb.errors import ( + _smithy_error_to_dafny_error as aws_cryptography_dbencryptionsdk_dynamodb_smithy_error_to_dafny_error, +) +import aws_dbesdk_dynamodb.smithygenerated.aws_cryptography_dbencryptionsdk_dynamodb_itemencryptor.errors +from aws_dbesdk_dynamodb.smithygenerated.aws_cryptography_dbencryptionsdk_structuredencryption.errors import ( + _smithy_error_to_dafny_error as aws_cryptography_dbencryptionsdk_structuredencryption_smithy_error_to_dafny_error, +) +from typing import Any, Dict, Generic, List, Literal, TypeVar + + +class ServiceError(Exception): + """Base error for all errors in the service.""" + + pass + + +T = TypeVar("T") + + +class ApiError(ServiceError, Generic[T]): + """Base error for all api errors in the service.""" + + code: T + + def __init__(self, message: str): + super().__init__(message) + self.message = message + + +class UnknownApiError(ApiError[Literal["Unknown"]]): + """Error representing any unknown api errors.""" + + code: Literal["Unknown"] = "Unknown" + + +class DynamoDbItemEncryptorException(ApiError[Literal["DynamoDbItemEncryptorException"]]): + code: Literal["DynamoDbItemEncryptorException"] = "DynamoDbItemEncryptorException" + message: str + + def __init__( + self, + *, + message: str, + ): + super().__init__(message) + + def as_dict(self) -> Dict[str, Any]: + """Converts the DynamoDbItemEncryptorException to a dictionary.""" + return { + "message": self.message, + "code": self.code, + } + + @staticmethod + def from_dict(d: Dict[str, Any]) -> "DynamoDbItemEncryptorException": + """Creates a DynamoDbItemEncryptorException from a dictionary.""" + kwargs: Dict[str, Any] = { + "message": d["message"], + } + + return DynamoDbItemEncryptorException(**kwargs) + + def __repr__(self) -> str: + result = "DynamoDbItemEncryptorException(" + if self.message is not None: + result += f"message={repr(self.message)}" + + return result + ")" + + def __eq__(self, other: Any) -> bool: + if not isinstance(other, DynamoDbItemEncryptorException): + return False + attributes: list[str] = [ + "message", + "message", + ] + return all(getattr(self, a) == getattr(other, a) for a in attributes) + + +class DynamoDbItemEncryptorException(ApiError[Literal["DynamoDbItemEncryptorException"]]): + code: Literal["DynamoDbItemEncryptorException"] = "DynamoDbItemEncryptorException" + message: str + + +class AwsCryptographicPrimitives(ApiError[Literal["AwsCryptographicPrimitives"]]): + AwsCryptographicPrimitives: Any + + +class ComAmazonawsDynamodb(ApiError[Literal["ComAmazonawsDynamodb"]]): + ComAmazonawsDynamodb: Any + + +class AwsCryptographicMaterialProviders(ApiError[Literal["AwsCryptographicMaterialProviders"]]): + AwsCryptographicMaterialProviders: Any + + +class StructuredEncryption(ApiError[Literal["StructuredEncryption"]]): + StructuredEncryption: Any + + +class DynamoDbEncryption(ApiError[Literal["DynamoDbEncryption"]]): + DynamoDbEncryption: Any + + +class CollectionOfErrors(ApiError[Literal["CollectionOfErrors"]]): + code: Literal["CollectionOfErrors"] = "CollectionOfErrors" + message: str + list: List[ServiceError] + + def __init__(self, *, message: str, list): + super().__init__(message) + self.list = list + + def as_dict(self) -> Dict[str, Any]: + """Converts the CollectionOfErrors to a dictionary. + + The dictionary uses the modeled shape names rather than the + parameter names as keys to be mostly compatible with boto3. + """ + return { + "message": self.message, + "code": self.code, + "list": self.list, + } + + @staticmethod + def from_dict(d: Dict[str, Any]) -> "CollectionOfErrors": + """Creates a CollectionOfErrors from a dictionary. + + The dictionary is expected to use the modeled shape names rather + than the parameter names as keys to be mostly compatible with + boto3. + """ + kwargs: Dict[str, Any] = {"message": d["message"], "list": d["list"]} + + return CollectionOfErrors(**kwargs) + + def __repr__(self) -> str: + result = "CollectionOfErrors(" + result += f"message={self.message}," + if self.message is not None: + result += f"message={repr(self.message)}" + result += f"list={self.list}" + result += ")" + return result + + def __eq__(self, other: Any) -> bool: + if not isinstance(other, CollectionOfErrors): + return False + if not (self.list == other.list): + return False + attributes: list[str] = ["message", "message"] + return all(getattr(self, a) == getattr(other, a) for a in attributes) + + +class OpaqueError(ApiError[Literal["OpaqueError"]]): + code: Literal["OpaqueError"] = "OpaqueError" + obj: Any # As an OpaqueError, type of obj is unknown + + def __init__(self, *, obj): + super().__init__("") + self.obj = obj + + def as_dict(self) -> Dict[str, Any]: + """Converts the OpaqueError to a dictionary. + + The dictionary uses the modeled shape names rather than the + parameter names as keys to be mostly compatible with boto3. + """ + return { + "message": self.message, + "code": self.code, + "obj": self.obj, + } + + @staticmethod + def from_dict(d: Dict[str, Any]) -> "OpaqueError": + """Creates a OpaqueError from a dictionary. + + The dictionary is expected to use the modeled shape names rather + than the parameter names as keys to be mostly compatible with + boto3. + """ + kwargs: Dict[str, Any] = {"message": d["message"], "obj": d["obj"]} + + return OpaqueError(**kwargs) + + def __repr__(self) -> str: + result = "OpaqueError(" + result += f"message={self.message}," + if self.message is not None: + result += f"message={repr(self.message)}" + result += f"obj={self.obj}" + result += ")" + return result + + def __eq__(self, other: Any) -> bool: + if not isinstance(other, OpaqueError): + return False + if not (self.obj == other.obj): + return False + attributes: list[str] = ["message", "message"] + return all(getattr(self, a) == getattr(other, a) for a in attributes) + + +class OpaqueWithTextError(ApiError[Literal["OpaqueWithTextError"]]): + code: Literal["OpaqueWithTextError"] = "OpaqueWithTextError" + obj: Any # As an OpaqueWithTextError, type of obj is unknown + obj_message: str # obj_message is a message representing the details of obj + + def __init__(self, *, obj, obj_message): + super().__init__("") + self.obj = obj + self.obj_message = obj_message + + def as_dict(self) -> Dict[str, Any]: + """Converts the OpaqueWithTextError to a dictionary. + + The dictionary uses the modeled shape names rather than the + parameter names as keys to be mostly compatible with boto3. + """ + return { + "message": self.message, + "code": self.code, + "obj": self.obj, + "obj_message": self.obj_message, + } + + @staticmethod + def from_dict(d: Dict[str, Any]) -> "OpaqueWithTextError": + """Creates a OpaqueWithTextError from a dictionary. + + The dictionary is expected to use the modeled shape names rather + than the parameter names as keys to be mostly compatible with + boto3. + """ + kwargs: Dict[str, Any] = { + "message": d["message"], + "obj": d["obj"], + "obj_message": d["obj_message"], + } + + return OpaqueWithTextError(**kwargs) + + def __repr__(self) -> str: + result = "OpaqueWithTextError(" + result += f"message={self.message}," + if self.message is not None: + result += f"message={repr(self.message)}" + result += f"obj={self.obj}" + result += f"obj_message={self.obj_message}" + result += ")" + return result + + def __eq__(self, other: Any) -> bool: + if not isinstance(other, OpaqueWithTextError): + return False + if not (self.obj == other.obj): + return False + attributes: list[str] = ["message", "message"] + return all(getattr(self, a) == getattr(other, a) for a in attributes) + + +def _smithy_error_to_dafny_error(e: ServiceError): + """Converts the provided native Smithy-modeled error into the corresponding + Dafny error.""" + if isinstance( + e, + aws_dbesdk_dynamodb.smithygenerated.aws_cryptography_dbencryptionsdk_dynamodb_itemencryptor.errors.DynamoDbItemEncryptorException, + ): + return aws_dbesdk_dynamodb.internaldafny.generated.AwsCryptographyDbEncryptionSdkDynamoDbItemEncryptorTypes.Error_DynamoDbItemEncryptorException( + message=_dafny.Seq(e.message) + ) + + if isinstance(e, AwsCryptographicPrimitives): + return aws_dbesdk_dynamodb.internaldafny.generated.AwsCryptographyDbEncryptionSdkDynamoDbItemEncryptorTypes.Error_AwsCryptographyPrimitives( + aws_cryptography_primitives_smithy_error_to_dafny_error(e.message) + ) + + if isinstance(e, ComAmazonawsDynamodb): + return aws_dbesdk_dynamodb.internaldafny.generated.AwsCryptographyDbEncryptionSdkDynamoDbItemEncryptorTypes.Error_ComAmazonawsDynamodb( + com_amazonaws_dynamodb_sdk_error_to_dafny_error(e.message) + ) + + if isinstance(e, AwsCryptographicMaterialProviders): + return aws_dbesdk_dynamodb.internaldafny.generated.AwsCryptographyDbEncryptionSdkDynamoDbItemEncryptorTypes.Error_AwsCryptographyMaterialProviders( + aws_cryptography_materialproviders_smithy_error_to_dafny_error(e.message) + ) + + if isinstance(e, StructuredEncryption): + return aws_dbesdk_dynamodb.internaldafny.generated.AwsCryptographyDbEncryptionSdkDynamoDbItemEncryptorTypes.Error_AwsCryptographyDbEncryptionSdkStructuredEncryption( + aws_cryptography_dbencryptionsdk_structuredencryption_smithy_error_to_dafny_error(e.message) + ) + + if isinstance(e, DynamoDbEncryption): + return aws_dbesdk_dynamodb.internaldafny.generated.AwsCryptographyDbEncryptionSdkDynamoDbItemEncryptorTypes.Error_AwsCryptographyDbEncryptionSdkDynamoDb( + aws_cryptography_dbencryptionsdk_dynamodb_smithy_error_to_dafny_error(e.message) + ) + + if isinstance(e, CollectionOfErrors): + return aws_dbesdk_dynamodb.internaldafny.generated.AwsCryptographyDbEncryptionSdkDynamoDbItemEncryptorTypes.Error_CollectionOfErrors( + message=_dafny.Seq(e.message), + list=_dafny.Seq(_smithy_error_to_dafny_error(native_err) for native_err in e.list), + ) + + if isinstance(e, OpaqueError): + return aws_dbesdk_dynamodb.internaldafny.generated.AwsCryptographyDbEncryptionSdkDynamoDbItemEncryptorTypes.Error_Opaque( + obj=e.obj + ) + + if isinstance(e, OpaqueWithTextError): + return aws_dbesdk_dynamodb.internaldafny.generated.AwsCryptographyDbEncryptionSdkDynamoDbItemEncryptorTypes.Error_OpaqueWithText( + obj=e.obj, objMessage=e.obj_message + ) + + else: + return aws_dbesdk_dynamodb.internaldafny.generated.AwsCryptographyDbEncryptionSdkDynamoDbItemEncryptorTypes.Error_Opaque( + obj=e + ) diff --git a/DynamoDbEncryption/runtimes/python/src/aws_dbesdk_dynamodb/smithygenerated/aws_cryptography_dbencryptionsdk_dynamodb_itemencryptor/models.py b/DynamoDbEncryption/runtimes/python/src/aws_dbesdk_dynamodb/smithygenerated/aws_cryptography_dbencryptionsdk_dynamodb_itemencryptor/models.py new file mode 100644 index 000000000..96f5b86d3 --- /dev/null +++ b/DynamoDbEncryption/runtimes/python/src/aws_dbesdk_dynamodb/smithygenerated/aws_cryptography_dbencryptionsdk_dynamodb_itemencryptor/models.py @@ -0,0 +1,329 @@ +# Copyright Amazon.com Inc. or its affiliates. All Rights Reserved. +# SPDX-License-Identifier: Apache-2.0 +# Do not modify this file. This file is machine generated, and any changes to it will be overwritten. + +from typing import Any, Dict, Optional + +from aws_cryptographic_material_providers.smithygenerated.aws_cryptography_materialproviders.models import ( + EncryptedDataKey, +) + + +class DecryptItemInput: + encrypted_item: "dict[str, dict[str, Any]]" + + def __init__( + self, + *, + encrypted_item: "dict[str, dict[str, Any]]", + ): + """Inputs for decrypting a DynamoDB Item. + + :param encrypted_item: The encrypted DynamoDB item to decrypt. + """ + self.encrypted_item = encrypted_item + + def as_dict(self) -> Dict[str, Any]: + """Converts the DecryptItemInput to a dictionary.""" + return { + "encrypted_item": self.encrypted_item, + } + + @staticmethod + def from_dict(d: Dict[str, Any]) -> "DecryptItemInput": + """Creates a DecryptItemInput from a dictionary.""" + kwargs: Dict[str, Any] = { + "encrypted_item": d["encrypted_item"], + } + + return DecryptItemInput(**kwargs) + + def __repr__(self) -> str: + result = "DecryptItemInput(" + if self.encrypted_item is not None: + result += f"encrypted_item={repr(self.encrypted_item)}" + + return result + ")" + + def __eq__(self, other: Any) -> bool: + if not isinstance(other, DecryptItemInput): + return False + attributes: list[str] = [ + "encrypted_item", + ] + return all(getattr(self, a) == getattr(other, a) for a in attributes) + + +class EncryptItemInput: + plaintext_item: "dict[str, dict[str, Any]]" + + def __init__( + self, + *, + plaintext_item: "dict[str, dict[str, Any]]", + ): + """Inputs for encrypting a DynamoDB Item. + + :param plaintext_item: The DynamoDB item to encrypt. + """ + self.plaintext_item = plaintext_item + + def as_dict(self) -> Dict[str, Any]: + """Converts the EncryptItemInput to a dictionary.""" + return { + "plaintext_item": self.plaintext_item, + } + + @staticmethod + def from_dict(d: Dict[str, Any]) -> "EncryptItemInput": + """Creates a EncryptItemInput from a dictionary.""" + kwargs: Dict[str, Any] = { + "plaintext_item": d["plaintext_item"], + } + + return EncryptItemInput(**kwargs) + + def __repr__(self) -> str: + result = "EncryptItemInput(" + if self.plaintext_item is not None: + result += f"plaintext_item={repr(self.plaintext_item)}" + + return result + ")" + + def __eq__(self, other: Any) -> bool: + if not isinstance(other, EncryptItemInput): + return False + attributes: list[str] = [ + "plaintext_item", + ] + return all(getattr(self, a) == getattr(other, a) for a in attributes) + + +class ParsedHeader: + attribute_actions_on_encrypt: dict[str, str] + algorithm_suite_id: str + encrypted_data_keys: list[EncryptedDataKey] + stored_encryption_context: dict[str, str] + encryption_context: dict[str, str] + selector_context: "dict[str, dict[str, Any]]" + + def __init__( + self, + *, + attribute_actions_on_encrypt: dict[str, str], + algorithm_suite_id: str, + encrypted_data_keys: list[EncryptedDataKey], + stored_encryption_context: dict[str, str], + encryption_context: dict[str, str], + selector_context: "dict[str, dict[str, Any]]", + ): + """A parsed version of the header that was written with or read on an + encrypted DynamoDB item. + + :param attribute_actions_on_encrypt: The non-DO_NOTHING Crypto + Actions that were configured when this item was originally + encrypted. + :param algorithm_suite_id: The ID of the algorithm suite that + was used to encrypt this item. + :param encrypted_data_keys: The encrypted data keys that are + stored in the header of this item. + :param stored_encryption_context: The portion of the encryption + context that was stored in the header of this item. + :param encryption_context: The full encryption context. + :param selector_context: The encryption context as presented to + the branch key selector. + """ + self.attribute_actions_on_encrypt = attribute_actions_on_encrypt + self.algorithm_suite_id = algorithm_suite_id + self.encrypted_data_keys = encrypted_data_keys + self.stored_encryption_context = stored_encryption_context + self.encryption_context = encryption_context + self.selector_context = selector_context + + def as_dict(self) -> Dict[str, Any]: + """Converts the ParsedHeader to a dictionary.""" + return { + "attribute_actions_on_encrypt": self.attribute_actions_on_encrypt, + "algorithm_suite_id": self.algorithm_suite_id, + "encrypted_data_keys": self.encrypted_data_keys, + "stored_encryption_context": self.stored_encryption_context, + "encryption_context": self.encryption_context, + "selector_context": self.selector_context, + } + + @staticmethod + def from_dict(d: Dict[str, Any]) -> "ParsedHeader": + """Creates a ParsedHeader from a dictionary.""" + kwargs: Dict[str, Any] = { + "attribute_actions_on_encrypt": d["attribute_actions_on_encrypt"], + "algorithm_suite_id": d["algorithm_suite_id"], + "encrypted_data_keys": d["encrypted_data_keys"], + "stored_encryption_context": d["stored_encryption_context"], + "encryption_context": d["encryption_context"], + "selector_context": d["selector_context"], + } + + return ParsedHeader(**kwargs) + + def __repr__(self) -> str: + result = "ParsedHeader(" + if self.attribute_actions_on_encrypt is not None: + result += f"attribute_actions_on_encrypt={repr(self.attribute_actions_on_encrypt)}, " + + if self.algorithm_suite_id is not None: + result += f"algorithm_suite_id={repr(self.algorithm_suite_id)}, " + + if self.encrypted_data_keys is not None: + result += f"encrypted_data_keys={repr(self.encrypted_data_keys)}, " + + if self.stored_encryption_context is not None: + result += f"stored_encryption_context={repr(self.stored_encryption_context)}, " + + if self.encryption_context is not None: + result += f"encryption_context={repr(self.encryption_context)}, " + + if self.selector_context is not None: + result += f"selector_context={repr(self.selector_context)}" + + return result + ")" + + def __eq__(self, other: Any) -> bool: + if not isinstance(other, ParsedHeader): + return False + attributes: list[str] = [ + "attribute_actions_on_encrypt", + "algorithm_suite_id", + "encrypted_data_keys", + "stored_encryption_context", + "encryption_context", + "selector_context", + ] + return all(getattr(self, a) == getattr(other, a) for a in attributes) + + +class DecryptItemOutput: + plaintext_item: "dict[str, dict[str, Any]]" + parsed_header: Optional["ParsedHeader"] + + def __init__( + self, + *, + plaintext_item: "dict[str, dict[str, Any]]", + parsed_header: Optional["ParsedHeader"] = None, + ): + """Outputs for decrypting a DynamoDB Item. + + :param plaintext_item: The decrypted DynamoDB item. + :param parsed_header: A parsed version of the header on the + encrypted DynamoDB item. + """ + self.plaintext_item = plaintext_item + self.parsed_header = parsed_header + + def as_dict(self) -> Dict[str, Any]: + """Converts the DecryptItemOutput to a dictionary.""" + d: Dict[str, Any] = { + "plaintext_item": self.plaintext_item, + } + + if self.parsed_header is not None: + d["parsed_header"] = self.parsed_header.as_dict() + + return d + + @staticmethod + def from_dict(d: Dict[str, Any]) -> "DecryptItemOutput": + """Creates a DecryptItemOutput from a dictionary.""" + kwargs: Dict[str, Any] = { + "plaintext_item": d["plaintext_item"], + } + + if "parsed_header" in d: + kwargs["parsed_header"] = ParsedHeader.from_dict(d["parsed_header"]) + + return DecryptItemOutput(**kwargs) + + def __repr__(self) -> str: + result = "DecryptItemOutput(" + if self.plaintext_item is not None: + result += f"plaintext_item={repr(self.plaintext_item)}, " + + if self.parsed_header is not None: + result += f"parsed_header={repr(self.parsed_header)}" + + return result + ")" + + def __eq__(self, other: Any) -> bool: + if not isinstance(other, DecryptItemOutput): + return False + attributes: list[str] = [ + "plaintext_item", + "parsed_header", + ] + return all(getattr(self, a) == getattr(other, a) for a in attributes) + + +class EncryptItemOutput: + encrypted_item: "dict[str, dict[str, Any]]" + parsed_header: Optional["ParsedHeader"] + + def __init__( + self, + *, + encrypted_item: "dict[str, dict[str, Any]]", + parsed_header: Optional["ParsedHeader"] = None, + ): + """Outputs for encrypting a DynamoDB Item. + + :param encrypted_item: The encrypted DynamoDB item. + :param parsed_header: A parsed version of the header written + with the encrypted DynamoDB item. + """ + self.encrypted_item = encrypted_item + self.parsed_header = parsed_header + + def as_dict(self) -> Dict[str, Any]: + """Converts the EncryptItemOutput to a dictionary.""" + d: Dict[str, Any] = { + "encrypted_item": self.encrypted_item, + } + + if self.parsed_header is not None: + d["parsed_header"] = self.parsed_header.as_dict() + + return d + + @staticmethod + def from_dict(d: Dict[str, Any]) -> "EncryptItemOutput": + """Creates a EncryptItemOutput from a dictionary.""" + kwargs: Dict[str, Any] = { + "encrypted_item": d["encrypted_item"], + } + + if "parsed_header" in d: + kwargs["parsed_header"] = ParsedHeader.from_dict(d["parsed_header"]) + + return EncryptItemOutput(**kwargs) + + def __repr__(self) -> str: + result = "EncryptItemOutput(" + if self.encrypted_item is not None: + result += f"encrypted_item={repr(self.encrypted_item)}, " + + if self.parsed_header is not None: + result += f"parsed_header={repr(self.parsed_header)}" + + return result + ")" + + def __eq__(self, other: Any) -> bool: + if not isinstance(other, EncryptItemOutput): + return False + attributes: list[str] = [ + "encrypted_item", + "parsed_header", + ] + return all(getattr(self, a) == getattr(other, a) for a in attributes) + + +class Unit: + pass diff --git a/DynamoDbEncryption/runtimes/python/src/aws_dbesdk_dynamodb/smithygenerated/aws_cryptography_dbencryptionsdk_dynamodb_itemencryptor/plugin.py b/DynamoDbEncryption/runtimes/python/src/aws_dbesdk_dynamodb/smithygenerated/aws_cryptography_dbencryptionsdk_dynamodb_itemencryptor/plugin.py new file mode 100644 index 000000000..35a252355 --- /dev/null +++ b/DynamoDbEncryption/runtimes/python/src/aws_dbesdk_dynamodb/smithygenerated/aws_cryptography_dbencryptionsdk_dynamodb_itemencryptor/plugin.py @@ -0,0 +1,49 @@ +# Copyright Amazon.com Inc. or its affiliates. All Rights Reserved. +# SPDX-License-Identifier: Apache-2.0 +# Do not modify this file. This file is machine generated, and any changes to it will be overwritten. + +from .config import ( + Config, + Plugin, + smithy_config_to_dafny_config, + DynamoDbItemEncryptorConfig, +) +from smithy_python.interfaces.retries import RetryStrategy +from smithy_python.exceptions import SmithyRetryException +from .dafnyImplInterface import DafnyImplInterface + + +def set_config_impl(config: Config): + """Set the Dafny-compiled implementation in the Smithy-Python client Config + and load our custom NoRetriesStrategy.""" + config.dafnyImplInterface = DafnyImplInterface() + if isinstance(config, DynamoDbItemEncryptorConfig): + from aws_dbesdk_dynamodb.internaldafny.generated.DynamoDbItemEncryptor import ( + default__, + ) + + config.dafnyImplInterface.impl = default__.DynamoDbItemEncryptor(smithy_config_to_dafny_config(config)).value + config.retry_strategy = NoRetriesStrategy() + + +class ZeroRetryDelayToken: + """Placeholder class required by Smithy-Python client implementation. + + Do not wait to retry. + """ + + retry_delay = 0 + + +class NoRetriesStrategy(RetryStrategy): + """Placeholder class required by Smithy-Python client implementation. + + Do not retry calling Dafny code. + """ + + def acquire_initial_retry_token(self): + return ZeroRetryDelayToken() + + def refresh_retry_token_for_retry(self, token_to_renew, error_info): + # Do not retry + raise SmithyRetryException() diff --git a/DynamoDbEncryption/runtimes/python/src/aws_dbesdk_dynamodb/smithygenerated/aws_cryptography_dbencryptionsdk_dynamodb_itemencryptor/serialize.py b/DynamoDbEncryption/runtimes/python/src/aws_dbesdk_dynamodb/smithygenerated/aws_cryptography_dbencryptionsdk_dynamodb_itemencryptor/serialize.py new file mode 100644 index 000000000..8764734dd --- /dev/null +++ b/DynamoDbEncryption/runtimes/python/src/aws_dbesdk_dynamodb/smithygenerated/aws_cryptography_dbencryptionsdk_dynamodb_itemencryptor/serialize.py @@ -0,0 +1,27 @@ +# Copyright Amazon.com Inc. or its affiliates. All Rights Reserved. +# SPDX-License-Identifier: Apache-2.0 +# Do not modify this file. This file is machine generated, and any changes to it will be overwritten. + +import aws_dbesdk_dynamodb.smithygenerated.aws_cryptography_dbencryptionsdk_dynamodb_itemencryptor.smithy_to_dafny + +from .dafny_protocol import DafnyRequest + +from .config import Config + + +def _serialize_encrypt_item(input, config: Config) -> DafnyRequest: + return DafnyRequest( + operation_name="EncryptItem", + dafny_operation_input=aws_dbesdk_dynamodb.smithygenerated.aws_cryptography_dbencryptionsdk_dynamodb_itemencryptor.smithy_to_dafny.aws_cryptography_dbencryptionsdk_dynamodb_itemencryptor_EncryptItemInput( + input + ), + ) + + +def _serialize_decrypt_item(input, config: Config) -> DafnyRequest: + return DafnyRequest( + operation_name="DecryptItem", + dafny_operation_input=aws_dbesdk_dynamodb.smithygenerated.aws_cryptography_dbencryptionsdk_dynamodb_itemencryptor.smithy_to_dafny.aws_cryptography_dbencryptionsdk_dynamodb_itemencryptor_DecryptItemInput( + input + ), + ) diff --git a/DynamoDbEncryption/runtimes/python/src/aws_dbesdk_dynamodb/smithygenerated/aws_cryptography_dbencryptionsdk_dynamodb_itemencryptor/smithy_to_dafny.py b/DynamoDbEncryption/runtimes/python/src/aws_dbesdk_dynamodb/smithygenerated/aws_cryptography_dbencryptionsdk_dynamodb_itemencryptor/smithy_to_dafny.py new file mode 100644 index 000000000..215ceeff3 --- /dev/null +++ b/DynamoDbEncryption/runtimes/python/src/aws_dbesdk_dynamodb/smithygenerated/aws_cryptography_dbencryptionsdk_dynamodb_itemencryptor/smithy_to_dafny.py @@ -0,0 +1,332 @@ +# Copyright Amazon.com Inc. or its affiliates. All Rights Reserved. +# SPDX-License-Identifier: Apache-2.0 +# Do not modify this file. This file is machine generated, and any changes to it will be overwritten. + +from _dafny import Map, Seq +import aws_cryptographic_material_providers.smithygenerated.aws_cryptography_materialproviders.smithy_to_dafny +import aws_cryptography_internal_dynamodb.smithygenerated.com_amazonaws_dynamodb.aws_sdk_to_dafny +from aws_dbesdk_dynamodb.internaldafny.generated.AwsCryptographyDbEncryptionSdkDynamoDbItemEncryptorTypes import ( + DecryptItemInput_DecryptItemInput as DafnyDecryptItemInput, + DecryptItemOutput_DecryptItemOutput as DafnyDecryptItemOutput, + DynamoDbItemEncryptorConfig_DynamoDbItemEncryptorConfig as DafnyDynamoDbItemEncryptorConfig, + EncryptItemInput_EncryptItemInput as DafnyEncryptItemInput, + EncryptItemOutput_EncryptItemOutput as DafnyEncryptItemOutput, + ParsedHeader_ParsedHeader as DafnyParsedHeader, +) +import aws_dbesdk_dynamodb.internaldafny.generated.module_ +import aws_dbesdk_dynamodb.smithygenerated.aws_cryptography_dbencryptionsdk_dynamodb.smithy_to_dafny +import aws_dbesdk_dynamodb.smithygenerated.aws_cryptography_dbencryptionsdk_dynamodb_itemencryptor.smithy_to_dafny +import aws_dbesdk_dynamodb.smithygenerated.aws_cryptography_dbencryptionsdk_structuredencryption.smithy_to_dafny +from smithy_dafny_standard_library.internaldafny.generated.Wrappers import ( + Option_None, + Option_Some, +) + + +def aws_cryptography_dbencryptionsdk_dynamodb_itemencryptor_EncryptItemInput( + native_input, +): + return DafnyEncryptItemInput( + plaintextItem=Map( + { + Seq( + "".join([chr(int.from_bytes(pair, "big")) for pair in zip(*[iter(key.encode("utf-16-be"))] * 2)]) + ): aws_cryptography_internal_dynamodb.smithygenerated.com_amazonaws_dynamodb.aws_sdk_to_dafny.com_amazonaws_dynamodb_AttributeValue( + value + ) + for (key, value) in native_input.plaintext_item.items() + } + ), + ) + + +def aws_cryptography_dbencryptionsdk_dynamodb_itemencryptor_DecryptItemInput( + native_input, +): + return DafnyDecryptItemInput( + encryptedItem=Map( + { + Seq( + "".join([chr(int.from_bytes(pair, "big")) for pair in zip(*[iter(key.encode("utf-16-be"))] * 2)]) + ): aws_cryptography_internal_dynamodb.smithygenerated.com_amazonaws_dynamodb.aws_sdk_to_dafny.com_amazonaws_dynamodb_AttributeValue( + value + ) + for (key, value) in native_input.encrypted_item.items() + } + ), + ) + + +def aws_cryptography_dbencryptionsdk_dynamodb_itemencryptor_EncryptItemOutput( + native_input, +): + return DafnyEncryptItemOutput( + encryptedItem=Map( + { + Seq( + "".join([chr(int.from_bytes(pair, "big")) for pair in zip(*[iter(key.encode("utf-16-be"))] * 2)]) + ): aws_cryptography_internal_dynamodb.smithygenerated.com_amazonaws_dynamodb.aws_sdk_to_dafny.com_amazonaws_dynamodb_AttributeValue( + value + ) + for (key, value) in native_input.encrypted_item.items() + } + ), + parsedHeader=( + ( + Option_Some( + aws_dbesdk_dynamodb.smithygenerated.aws_cryptography_dbencryptionsdk_dynamodb_itemencryptor.smithy_to_dafny.aws_cryptography_dbencryptionsdk_dynamodb_itemencryptor_ParsedHeader( + native_input.parsed_header + ) + ) + ) + if (native_input.parsed_header is not None) + else (Option_None()) + ), + ) + + +def aws_cryptography_dbencryptionsdk_dynamodb_itemencryptor_ParsedHeader(native_input): + return DafnyParsedHeader( + attributeActionsOnEncrypt=Map( + { + Seq( + "".join([chr(int.from_bytes(pair, "big")) for pair in zip(*[iter(key.encode("utf-16-be"))] * 2)]) + ): aws_dbesdk_dynamodb.smithygenerated.aws_cryptography_dbencryptionsdk_structuredencryption.smithy_to_dafny.aws_cryptography_dbencryptionsdk_structuredencryption_CryptoAction( + value + ) + for (key, value) in native_input.attribute_actions_on_encrypt.items() + } + ), + algorithmSuiteId=aws_cryptographic_material_providers.smithygenerated.aws_cryptography_materialproviders.smithy_to_dafny.aws_cryptography_materialproviders_DBEAlgorithmSuiteId( + native_input.algorithm_suite_id + ), + encryptedDataKeys=Seq( + [ + aws_cryptographic_material_providers.smithygenerated.aws_cryptography_materialproviders.smithy_to_dafny.aws_cryptography_materialproviders_EncryptedDataKey( + list_element + ) + for list_element in native_input.encrypted_data_keys + ] + ), + storedEncryptionContext=Map( + { + Seq(key.encode("utf-8")): Seq(value.encode("utf-8")) + for (key, value) in native_input.stored_encryption_context.items() + } + ), + encryptionContext=Map( + { + Seq(key.encode("utf-8")): Seq(value.encode("utf-8")) + for (key, value) in native_input.encryption_context.items() + } + ), + selectorContext=Map( + { + Seq( + "".join([chr(int.from_bytes(pair, "big")) for pair in zip(*[iter(key.encode("utf-16-be"))] * 2)]) + ): aws_cryptography_internal_dynamodb.smithygenerated.com_amazonaws_dynamodb.aws_sdk_to_dafny.com_amazonaws_dynamodb_AttributeValue( + value + ) + for (key, value) in native_input.selector_context.items() + } + ), + ) + + +def aws_cryptography_dbencryptionsdk_dynamodb_itemencryptor_DecryptItemOutput( + native_input, +): + return DafnyDecryptItemOutput( + plaintextItem=Map( + { + Seq( + "".join([chr(int.from_bytes(pair, "big")) for pair in zip(*[iter(key.encode("utf-16-be"))] * 2)]) + ): aws_cryptography_internal_dynamodb.smithygenerated.com_amazonaws_dynamodb.aws_sdk_to_dafny.com_amazonaws_dynamodb_AttributeValue( + value + ) + for (key, value) in native_input.plaintext_item.items() + } + ), + parsedHeader=( + ( + Option_Some( + aws_dbesdk_dynamodb.smithygenerated.aws_cryptography_dbencryptionsdk_dynamodb_itemencryptor.smithy_to_dafny.aws_cryptography_dbencryptionsdk_dynamodb_itemencryptor_ParsedHeader( + native_input.parsed_header + ) + ) + ) + if (native_input.parsed_header is not None) + else (Option_None()) + ), + ) + + +def aws_cryptography_dbencryptionsdk_dynamodb_itemencryptor_AtomicPrimitivesReference( + native_input, +): + return native_input._config.dafnyImplInterface.impl + + +def aws_cryptography_dbencryptionsdk_dynamodb_itemencryptor_DynamoDbItemEncryptorConfig( + native_input, +): + return DafnyDynamoDbItemEncryptorConfig( + logicalTableName=Seq( + "".join( + [ + chr(int.from_bytes(pair, "big")) + for pair in zip(*[iter(native_input.logical_table_name.encode("utf-16-be"))] * 2) + ] + ) + ), + partitionKeyName=Seq( + "".join( + [ + chr(int.from_bytes(pair, "big")) + for pair in zip(*[iter(native_input.partition_key_name.encode("utf-16-be"))] * 2) + ] + ) + ), + sortKeyName=( + ( + Option_Some( + Seq( + "".join( + [ + chr(int.from_bytes(pair, "big")) + for pair in zip(*[iter(native_input.sort_key_name.encode("utf-16-be"))] * 2) + ] + ) + ) + ) + ) + if (native_input.sort_key_name is not None) + else (Option_None()) + ), + attributeActionsOnEncrypt=Map( + { + Seq( + "".join([chr(int.from_bytes(pair, "big")) for pair in zip(*[iter(key.encode("utf-16-be"))] * 2)]) + ): aws_dbesdk_dynamodb.smithygenerated.aws_cryptography_dbencryptionsdk_structuredencryption.smithy_to_dafny.aws_cryptography_dbencryptionsdk_structuredencryption_CryptoAction( + value + ) + for (key, value) in native_input.attribute_actions_on_encrypt.items() + } + ), + allowedUnsignedAttributes=( + ( + Option_Some( + Seq( + [ + Seq( + "".join( + [ + chr(int.from_bytes(pair, "big")) + for pair in zip(*[iter(list_element.encode("utf-16-be"))] * 2) + ] + ) + ) + for list_element in native_input.allowed_unsigned_attributes + ] + ) + ) + ) + if (native_input.allowed_unsigned_attributes is not None) + else (Option_None()) + ), + allowedUnsignedAttributePrefix=( + ( + Option_Some( + Seq( + "".join( + [ + chr(int.from_bytes(pair, "big")) + for pair in zip( + *[iter(native_input.allowed_unsigned_attribute_prefix.encode("utf-16-be"))] * 2 + ) + ] + ) + ) + ) + ) + if (native_input.allowed_unsigned_attribute_prefix is not None) + else (Option_None()) + ), + algorithmSuiteId=( + ( + Option_Some( + aws_cryptographic_material_providers.smithygenerated.aws_cryptography_materialproviders.smithy_to_dafny.aws_cryptography_materialproviders_DBEAlgorithmSuiteId( + native_input.algorithm_suite_id + ) + ) + ) + if (native_input.algorithm_suite_id is not None) + else (Option_None()) + ), + keyring=( + ( + Option_Some( + aws_cryptographic_material_providers.smithygenerated.aws_cryptography_materialproviders.smithy_to_dafny.aws_cryptography_materialproviders_KeyringReference( + native_input.keyring + ) + ) + ) + if ( + (native_input.keyring is not None) + and ( + aws_cryptographic_material_providers.smithygenerated.aws_cryptography_materialproviders.smithy_to_dafny.aws_cryptography_materialproviders_KeyringReference( + native_input.keyring + ) + is not None + ) + ) + else (Option_None()) + ), + cmm=( + ( + Option_Some( + aws_cryptographic_material_providers.smithygenerated.aws_cryptography_materialproviders.smithy_to_dafny.aws_cryptography_materialproviders_CryptographicMaterialsManagerReference( + native_input.cmm + ) + ) + ) + if ( + (native_input.cmm is not None) + and ( + aws_cryptographic_material_providers.smithygenerated.aws_cryptography_materialproviders.smithy_to_dafny.aws_cryptography_materialproviders_CryptographicMaterialsManagerReference( + native_input.cmm + ) + is not None + ) + ) + else (Option_None()) + ), + legacyOverride=( + ( + Option_Some( + aws_dbesdk_dynamodb.smithygenerated.aws_cryptography_dbencryptionsdk_dynamodb.smithy_to_dafny.aws_cryptography_dbencryptionsdk_dynamodb_LegacyOverride( + native_input.legacy_override + ) + ) + ) + if (native_input.legacy_override is not None) + else (Option_None()) + ), + plaintextOverride=( + ( + Option_Some( + aws_dbesdk_dynamodb.smithygenerated.aws_cryptography_dbencryptionsdk_dynamodb.smithy_to_dafny.aws_cryptography_dbencryptionsdk_dynamodb_PlaintextOverride( + native_input.plaintext_override + ) + ) + ) + if (native_input.plaintext_override is not None) + else (Option_None()) + ), + ) + + +def aws_cryptography_dbencryptionsdk_dynamodb_itemencryptor_StructuredEncryptionReference( + native_input, +): + return native_input._config.dafnyImplInterface.impl diff --git a/DynamoDbEncryption/runtimes/python/src/aws_dbesdk_dynamodb/smithygenerated/aws_cryptography_dbencryptionsdk_dynamodb_transforms/__init__.py b/DynamoDbEncryption/runtimes/python/src/aws_dbesdk_dynamodb/smithygenerated/aws_cryptography_dbencryptionsdk_dynamodb_transforms/__init__.py new file mode 100644 index 000000000..09be6133b --- /dev/null +++ b/DynamoDbEncryption/runtimes/python/src/aws_dbesdk_dynamodb/smithygenerated/aws_cryptography_dbencryptionsdk_dynamodb_transforms/__init__.py @@ -0,0 +1,3 @@ +# Copyright Amazon.com Inc. or its affiliates. All Rights Reserved. +# SPDX-License-Identifier: Apache-2.0 +# Do not modify this file. This file is machine generated, and any changes to it will be overwritten. diff --git a/DynamoDbEncryption/runtimes/python/src/aws_dbesdk_dynamodb/smithygenerated/aws_cryptography_dbencryptionsdk_dynamodb_transforms/aws_sdk_to_dafny.py b/DynamoDbEncryption/runtimes/python/src/aws_dbesdk_dynamodb/smithygenerated/aws_cryptography_dbencryptionsdk_dynamodb_transforms/aws_sdk_to_dafny.py new file mode 100644 index 000000000..12d8711b8 --- /dev/null +++ b/DynamoDbEncryption/runtimes/python/src/aws_dbesdk_dynamodb/smithygenerated/aws_cryptography_dbencryptionsdk_dynamodb_transforms/aws_sdk_to_dafny.py @@ -0,0 +1,3174 @@ +# Copyright Amazon.com Inc. or its affiliates. All Rights Reserved. +# SPDX-License-Identifier: Apache-2.0 +# Do not modify this file. This file is machine generated, and any changes to it will be overwritten. + +from _dafny import Map, Seq +from aws_cryptography_internal_dynamodb.internaldafny.generated.ComAmazonawsDynamodbTypes import ( + AttributeAction_ADD, + AttributeAction_DELETE, + AttributeAction_PUT, + AttributeValueUpdate_AttributeValueUpdate as DafnyAttributeValueUpdate, + AttributeValue_B, + AttributeValue_BOOL, + AttributeValue_BS, + AttributeValue_L, + AttributeValue_M, + AttributeValue_N, + AttributeValue_NS, + AttributeValue_NULL, + AttributeValue_S, + AttributeValue_SS, + BatchExecuteStatementInput_BatchExecuteStatementInput as DafnyBatchExecuteStatementInput, + BatchExecuteStatementOutput_BatchExecuteStatementOutput as DafnyBatchExecuteStatementOutput, + BatchGetItemInput_BatchGetItemInput as DafnyBatchGetItemInput, + BatchGetItemOutput_BatchGetItemOutput as DafnyBatchGetItemOutput, + BatchStatementErrorCodeEnum_AccessDenied, + BatchStatementErrorCodeEnum_ConditionalCheckFailed, + BatchStatementErrorCodeEnum_DuplicateItem, + BatchStatementErrorCodeEnum_InternalServerError, + BatchStatementErrorCodeEnum_ItemCollectionSizeLimitExceeded, + BatchStatementErrorCodeEnum_ProvisionedThroughputExceeded, + BatchStatementErrorCodeEnum_RequestLimitExceeded, + BatchStatementErrorCodeEnum_ResourceNotFound, + BatchStatementErrorCodeEnum_ThrottlingError, + BatchStatementErrorCodeEnum_TransactionConflict, + BatchStatementErrorCodeEnum_ValidationError, + BatchStatementError_BatchStatementError as DafnyBatchStatementError, + BatchStatementRequest_BatchStatementRequest as DafnyBatchStatementRequest, + BatchStatementResponse_BatchStatementResponse as DafnyBatchStatementResponse, + BatchWriteItemInput_BatchWriteItemInput as DafnyBatchWriteItemInput, + BatchWriteItemOutput_BatchWriteItemOutput as DafnyBatchWriteItemOutput, + Capacity_Capacity as DafnyCapacity, + ComparisonOperator_BEGINS__WITH, + ComparisonOperator_BETWEEN, + ComparisonOperator_CONTAINS, + ComparisonOperator_EQ, + ComparisonOperator_GE, + ComparisonOperator_GT, + ComparisonOperator_IN, + ComparisonOperator_LE, + ComparisonOperator_LT, + ComparisonOperator_NE, + ComparisonOperator_NOT__CONTAINS, + ComparisonOperator_NOT__NULL, + ComparisonOperator_NULL, + ConditionCheck_ConditionCheck as DafnyConditionCheck, + Condition_Condition as DafnyCondition, + ConditionalOperator_AND, + ConditionalOperator_OR, + ConsumedCapacity_ConsumedCapacity as DafnyConsumedCapacity, + DeleteItemInput_DeleteItemInput as DafnyDeleteItemInput, + DeleteItemOutput_DeleteItemOutput as DafnyDeleteItemOutput, + DeleteRequest_DeleteRequest as DafnyDeleteRequest, + Delete_Delete as DafnyDelete, + ExecuteStatementInput_ExecuteStatementInput as DafnyExecuteStatementInput, + ExecuteStatementOutput_ExecuteStatementOutput as DafnyExecuteStatementOutput, + ExecuteTransactionInput_ExecuteTransactionInput as DafnyExecuteTransactionInput, + ExecuteTransactionOutput_ExecuteTransactionOutput as DafnyExecuteTransactionOutput, + ExpectedAttributeValue_ExpectedAttributeValue as DafnyExpectedAttributeValue, + GetItemInput_GetItemInput as DafnyGetItemInput, + GetItemOutput_GetItemOutput as DafnyGetItemOutput, + Get_Get as DafnyGet, + ItemCollectionMetrics_ItemCollectionMetrics as DafnyItemCollectionMetrics, + ItemResponse_ItemResponse as DafnyItemResponse, + KeysAndAttributes_KeysAndAttributes as DafnyKeysAndAttributes, + ParameterizedStatement_ParameterizedStatement as DafnyParameterizedStatement, + PutItemInput_PutItemInput as DafnyPutItemInput, + PutItemOutput_PutItemOutput as DafnyPutItemOutput, + PutRequest_PutRequest as DafnyPutRequest, + Put_Put as DafnyPut, + QueryInput_QueryInput as DafnyQueryInput, + QueryOutput_QueryOutput as DafnyQueryOutput, + ReturnConsumedCapacity_INDEXES, + ReturnConsumedCapacity_NONE, + ReturnConsumedCapacity_TOTAL, + ReturnItemCollectionMetrics_NONE, + ReturnItemCollectionMetrics_SIZE, + ReturnValue_ALL__NEW, + ReturnValue_ALL__OLD, + ReturnValue_NONE, + ReturnValue_UPDATED__NEW, + ReturnValue_UPDATED__OLD, + ReturnValuesOnConditionCheckFailure_ALL__OLD, + ReturnValuesOnConditionCheckFailure_NONE, + ScanInput_ScanInput as DafnyScanInput, + ScanOutput_ScanOutput as DafnyScanOutput, + Select_ALL__ATTRIBUTES, + Select_ALL__PROJECTED__ATTRIBUTES, + Select_COUNT, + Select_SPECIFIC__ATTRIBUTES, + TransactGetItem_TransactGetItem as DafnyTransactGetItem, + TransactGetItemsInput_TransactGetItemsInput as DafnyTransactGetItemsInput, + TransactGetItemsOutput_TransactGetItemsOutput as DafnyTransactGetItemsOutput, + TransactWriteItem_TransactWriteItem as DafnyTransactWriteItem, + TransactWriteItemsInput_TransactWriteItemsInput as DafnyTransactWriteItemsInput, + TransactWriteItemsOutput_TransactWriteItemsOutput as DafnyTransactWriteItemsOutput, + UpdateItemInput_UpdateItemInput as DafnyUpdateItemInput, + UpdateItemOutput_UpdateItemOutput as DafnyUpdateItemOutput, + Update_Update as DafnyUpdate, + WriteRequest_WriteRequest as DafnyWriteRequest, +) +import aws_cryptography_internal_dynamodb.internaldafny.generated.module_ +import aws_cryptography_internal_dynamodb.smithygenerated.com_amazonaws_dynamodb.aws_sdk_to_dafny +from smithy_dafny_standard_library.internaldafny.generated.Wrappers import ( + Option_None, + Option_Some, +) + + +def com_amazonaws_dynamodb_PutItemInput(native_input): + return DafnyPutItemInput( + TableName=Seq( + "".join( + [ + chr(int.from_bytes(pair, "big")) + for pair in zip(*[iter(native_input["TableName"].encode("utf-16-be"))] * 2) + ] + ) + ), + Item=Map( + { + Seq( + "".join([chr(int.from_bytes(pair, "big")) for pair in zip(*[iter(key.encode("utf-16-be"))] * 2)]) + ): aws_cryptography_internal_dynamodb.smithygenerated.com_amazonaws_dynamodb.aws_sdk_to_dafny.com_amazonaws_dynamodb_AttributeValue( + value + ) + for (key, value) in native_input["Item"].items() + } + ), + Expected=( + Option_Some( + Map( + { + Seq( + "".join( + [chr(int.from_bytes(pair, "big")) for pair in zip(*[iter(key.encode("utf-16-be"))] * 2)] + ) + ): aws_cryptography_internal_dynamodb.smithygenerated.com_amazonaws_dynamodb.aws_sdk_to_dafny.com_amazonaws_dynamodb_ExpectedAttributeValue( + value + ) + for (key, value) in native_input["Expected"].items() + } + ) + ) + if "Expected" in native_input.keys() + else Option_None() + ), + ReturnValues=( + Option_Some( + aws_cryptography_internal_dynamodb.smithygenerated.com_amazonaws_dynamodb.aws_sdk_to_dafny.com_amazonaws_dynamodb_ReturnValue( + native_input["ReturnValues"] + ) + ) + if "ReturnValues" in native_input.keys() + else Option_None() + ), + ReturnConsumedCapacity=( + Option_Some( + aws_cryptography_internal_dynamodb.smithygenerated.com_amazonaws_dynamodb.aws_sdk_to_dafny.com_amazonaws_dynamodb_ReturnConsumedCapacity( + native_input["ReturnConsumedCapacity"] + ) + ) + if "ReturnConsumedCapacity" in native_input.keys() + else Option_None() + ), + ReturnItemCollectionMetrics=( + Option_Some( + aws_cryptography_internal_dynamodb.smithygenerated.com_amazonaws_dynamodb.aws_sdk_to_dafny.com_amazonaws_dynamodb_ReturnItemCollectionMetrics( + native_input["ReturnItemCollectionMetrics"] + ) + ) + if "ReturnItemCollectionMetrics" in native_input.keys() + else Option_None() + ), + ConditionalOperator=( + Option_Some( + aws_cryptography_internal_dynamodb.smithygenerated.com_amazonaws_dynamodb.aws_sdk_to_dafny.com_amazonaws_dynamodb_ConditionalOperator( + native_input["ConditionalOperator"] + ) + ) + if "ConditionalOperator" in native_input.keys() + else Option_None() + ), + ConditionExpression=( + Option_Some( + Seq( + "".join( + [ + chr(int.from_bytes(pair, "big")) + for pair in zip(*[iter(native_input["ConditionExpression"].encode("utf-16-be"))] * 2) + ] + ) + ) + ) + if "ConditionExpression" in native_input.keys() + else Option_None() + ), + ExpressionAttributeNames=( + Option_Some( + Map( + { + Seq( + "".join( + [chr(int.from_bytes(pair, "big")) for pair in zip(*[iter(key.encode("utf-16-be"))] * 2)] + ) + ): Seq( + "".join( + [ + chr(int.from_bytes(pair, "big")) + for pair in zip(*[iter(value.encode("utf-16-be"))] * 2) + ] + ) + ) + for (key, value) in native_input["ExpressionAttributeNames"].items() + } + ) + ) + if "ExpressionAttributeNames" in native_input.keys() + else Option_None() + ), + ExpressionAttributeValues=( + Option_Some( + Map( + { + Seq( + "".join( + [chr(int.from_bytes(pair, "big")) for pair in zip(*[iter(key.encode("utf-16-be"))] * 2)] + ) + ): aws_cryptography_internal_dynamodb.smithygenerated.com_amazonaws_dynamodb.aws_sdk_to_dafny.com_amazonaws_dynamodb_AttributeValue( + value + ) + for (key, value) in native_input["ExpressionAttributeValues"].items() + } + ) + ) + if "ExpressionAttributeValues" in native_input.keys() + else Option_None() + ), + ReturnValuesOnConditionCheckFailure=( + Option_Some( + aws_cryptography_internal_dynamodb.smithygenerated.com_amazonaws_dynamodb.aws_sdk_to_dafny.com_amazonaws_dynamodb_ReturnValuesOnConditionCheckFailure( + native_input["ReturnValuesOnConditionCheckFailure"] + ) + ) + if "ReturnValuesOnConditionCheckFailure" in native_input.keys() + else Option_None() + ), + ) + + +def com_amazonaws_dynamodb_AttributeValue(native_input): + if "S" in native_input.keys(): + AttributeValue_union_value = AttributeValue_S( + Seq( + "".join( + [ + chr(int.from_bytes(pair, "big")) + for pair in zip(*[iter(native_input["S"].encode("utf-16-be"))] * 2) + ] + ) + ) + ) + elif "N" in native_input.keys(): + AttributeValue_union_value = AttributeValue_N( + Seq( + "".join( + [ + chr(int.from_bytes(pair, "big")) + for pair in zip(*[iter(native_input["N"].encode("utf-16-be"))] * 2) + ] + ) + ) + ) + elif "B" in native_input.keys(): + AttributeValue_union_value = AttributeValue_B(Seq(native_input["B"])) + elif "SS" in native_input.keys(): + AttributeValue_union_value = AttributeValue_SS( + Seq( + [ + Seq( + "".join( + [ + chr(int.from_bytes(pair, "big")) + for pair in zip(*[iter(list_element.encode("utf-16-be"))] * 2) + ] + ) + ) + for list_element in native_input["SS"] + ] + ) + ) + elif "NS" in native_input.keys(): + AttributeValue_union_value = AttributeValue_NS( + Seq( + [ + Seq( + "".join( + [ + chr(int.from_bytes(pair, "big")) + for pair in zip(*[iter(list_element.encode("utf-16-be"))] * 2) + ] + ) + ) + for list_element in native_input["NS"] + ] + ) + ) + elif "BS" in native_input.keys(): + AttributeValue_union_value = AttributeValue_BS(Seq([Seq(list_element) for list_element in native_input["BS"]])) + elif "M" in native_input.keys(): + AttributeValue_union_value = AttributeValue_M( + Map( + { + Seq( + "".join( + [chr(int.from_bytes(pair, "big")) for pair in zip(*[iter(key.encode("utf-16-be"))] * 2)] + ) + ): aws_cryptography_internal_dynamodb.smithygenerated.com_amazonaws_dynamodb.aws_sdk_to_dafny.com_amazonaws_dynamodb_AttributeValue( + value + ) + for (key, value) in native_input["M"].items() + } + ) + ) + elif "L" in native_input.keys(): + AttributeValue_union_value = AttributeValue_L( + Seq( + [ + aws_cryptography_internal_dynamodb.smithygenerated.com_amazonaws_dynamodb.aws_sdk_to_dafny.com_amazonaws_dynamodb_AttributeValue( + list_element + ) + for list_element in native_input["L"] + ] + ) + ) + elif "NULL" in native_input.keys(): + AttributeValue_union_value = AttributeValue_NULL(native_input["NULL"]) + elif "BOOL" in native_input.keys(): + AttributeValue_union_value = AttributeValue_BOOL(native_input["BOOL"]) + else: + raise ValueError("No recognized union value in union type: " + str(native_input)) + + return AttributeValue_union_value + + +def com_amazonaws_dynamodb_ExpectedAttributeValue(native_input): + return DafnyExpectedAttributeValue( + Value=( + Option_Some( + aws_cryptography_internal_dynamodb.smithygenerated.com_amazonaws_dynamodb.aws_sdk_to_dafny.com_amazonaws_dynamodb_AttributeValue( + native_input["Value"] + ) + ) + if "Value" in native_input.keys() + else Option_None() + ), + Exists=(Option_Some(native_input["Exists"]) if "Exists" in native_input.keys() else Option_None()), + ComparisonOperator=( + Option_Some( + aws_cryptography_internal_dynamodb.smithygenerated.com_amazonaws_dynamodb.aws_sdk_to_dafny.com_amazonaws_dynamodb_ComparisonOperator( + native_input["ComparisonOperator"] + ) + ) + if "ComparisonOperator" in native_input.keys() + else Option_None() + ), + AttributeValueList=( + Option_Some( + Seq( + [ + aws_cryptography_internal_dynamodb.smithygenerated.com_amazonaws_dynamodb.aws_sdk_to_dafny.com_amazonaws_dynamodb_AttributeValue( + list_element + ) + for list_element in native_input["AttributeValueList"] + ] + ) + ) + if "AttributeValueList" in native_input.keys() + else Option_None() + ), + ) + + +def com_amazonaws_dynamodb_ComparisonOperator(native_input): + # Convert ComparisonOperator + if native_input == "EQ": + return ComparisonOperator_EQ() + elif native_input == "NE": + return ComparisonOperator_NE() + elif native_input == "IN": + return ComparisonOperator_IN() + elif native_input == "LE": + return ComparisonOperator_LE() + elif native_input == "LT": + return ComparisonOperator_LT() + elif native_input == "GE": + return ComparisonOperator_GE() + elif native_input == "GT": + return ComparisonOperator_GT() + elif native_input == "BETWEEN": + return ComparisonOperator_BETWEEN() + elif native_input == "NOT_NULL": + return ComparisonOperator_NOT__NULL() + elif native_input == "NULL": + return ComparisonOperator_NULL() + elif native_input == "CONTAINS": + return ComparisonOperator_CONTAINS() + elif native_input == "NOT_CONTAINS": + return ComparisonOperator_NOT__CONTAINS() + elif native_input == "BEGINS_WITH": + return ComparisonOperator_BEGINS__WITH() + else: + raise ValueError("No recognized enum value in enum type: " + native_input) + + +def com_amazonaws_dynamodb_ReturnValue(native_input): + # Convert ReturnValue + if native_input == "NONE": + return ReturnValue_NONE() + elif native_input == "ALL_OLD": + return ReturnValue_ALL__OLD() + elif native_input == "UPDATED_OLD": + return ReturnValue_UPDATED__OLD() + elif native_input == "ALL_NEW": + return ReturnValue_ALL__NEW() + elif native_input == "UPDATED_NEW": + return ReturnValue_UPDATED__NEW() + else: + raise ValueError("No recognized enum value in enum type: " + native_input) + + +def com_amazonaws_dynamodb_ReturnConsumedCapacity(native_input): + # Convert ReturnConsumedCapacity + if native_input == "INDEXES": + return ReturnConsumedCapacity_INDEXES() + elif native_input == "TOTAL": + return ReturnConsumedCapacity_TOTAL() + elif native_input == "NONE": + return ReturnConsumedCapacity_NONE() + else: + raise ValueError("No recognized enum value in enum type: " + native_input) + + +def com_amazonaws_dynamodb_ReturnItemCollectionMetrics(native_input): + # Convert ReturnItemCollectionMetrics + if native_input == "SIZE": + return ReturnItemCollectionMetrics_SIZE() + elif native_input == "NONE": + return ReturnItemCollectionMetrics_NONE() + else: + raise ValueError("No recognized enum value in enum type: " + native_input) + + +def com_amazonaws_dynamodb_ConditionalOperator(native_input): + # Convert ConditionalOperator + if native_input == "AND": + return ConditionalOperator_AND() + elif native_input == "OR": + return ConditionalOperator_OR() + else: + raise ValueError("No recognized enum value in enum type: " + native_input) + + +def com_amazonaws_dynamodb_ReturnValuesOnConditionCheckFailure(native_input): + # Convert ReturnValuesOnConditionCheckFailure + if native_input == "ALL_OLD": + return ReturnValuesOnConditionCheckFailure_ALL__OLD() + elif native_input == "NONE": + return ReturnValuesOnConditionCheckFailure_NONE() + else: + raise ValueError("No recognized enum value in enum type: " + native_input) + + +def com_amazonaws_dynamodb_PutItemOutput(native_input): + return DafnyPutItemOutput( + Attributes=( + Option_Some( + Map( + { + Seq( + "".join( + [chr(int.from_bytes(pair, "big")) for pair in zip(*[iter(key.encode("utf-16-be"))] * 2)] + ) + ): aws_cryptography_internal_dynamodb.smithygenerated.com_amazonaws_dynamodb.aws_sdk_to_dafny.com_amazonaws_dynamodb_AttributeValue( + value + ) + for (key, value) in native_input["Attributes"].items() + } + ) + ) + if "Attributes" in native_input.keys() + else Option_None() + ), + ConsumedCapacity=( + Option_Some( + aws_cryptography_internal_dynamodb.smithygenerated.com_amazonaws_dynamodb.aws_sdk_to_dafny.com_amazonaws_dynamodb_ConsumedCapacity( + native_input["ConsumedCapacity"] + ) + ) + if "ConsumedCapacity" in native_input.keys() + else Option_None() + ), + ItemCollectionMetrics=( + Option_Some( + aws_cryptography_internal_dynamodb.smithygenerated.com_amazonaws_dynamodb.aws_sdk_to_dafny.com_amazonaws_dynamodb_ItemCollectionMetrics( + native_input["ItemCollectionMetrics"] + ) + ) + if "ItemCollectionMetrics" in native_input.keys() + else Option_None() + ), + ) + + +def com_amazonaws_dynamodb_ConsumedCapacity(native_input): + return DafnyConsumedCapacity( + TableName=( + Option_Some( + Seq( + "".join( + [ + chr(int.from_bytes(pair, "big")) + for pair in zip(*[iter(native_input["TableName"].encode("utf-16-be"))] * 2) + ] + ) + ) + ) + if "TableName" in native_input.keys() + else Option_None() + ), + CapacityUnits=( + Option_Some(native_input["CapacityUnits"]) if "CapacityUnits" in native_input.keys() else Option_None() + ), + ReadCapacityUnits=( + Option_Some(native_input["ReadCapacityUnits"]) + if "ReadCapacityUnits" in native_input.keys() + else Option_None() + ), + WriteCapacityUnits=( + Option_Some(native_input["WriteCapacityUnits"]) + if "WriteCapacityUnits" in native_input.keys() + else Option_None() + ), + Table=( + Option_Some( + aws_cryptography_internal_dynamodb.smithygenerated.com_amazonaws_dynamodb.aws_sdk_to_dafny.com_amazonaws_dynamodb_Capacity( + native_input["Table"] + ) + ) + if "Table" in native_input.keys() + else Option_None() + ), + LocalSecondaryIndexes=( + Option_Some( + Map( + { + Seq( + "".join( + [chr(int.from_bytes(pair, "big")) for pair in zip(*[iter(key.encode("utf-16-be"))] * 2)] + ) + ): aws_cryptography_internal_dynamodb.smithygenerated.com_amazonaws_dynamodb.aws_sdk_to_dafny.com_amazonaws_dynamodb_Capacity( + value + ) + for (key, value) in native_input["LocalSecondaryIndexes"].items() + } + ) + ) + if "LocalSecondaryIndexes" in native_input.keys() + else Option_None() + ), + GlobalSecondaryIndexes=( + Option_Some( + Map( + { + Seq( + "".join( + [chr(int.from_bytes(pair, "big")) for pair in zip(*[iter(key.encode("utf-16-be"))] * 2)] + ) + ): aws_cryptography_internal_dynamodb.smithygenerated.com_amazonaws_dynamodb.aws_sdk_to_dafny.com_amazonaws_dynamodb_Capacity( + value + ) + for (key, value) in native_input["GlobalSecondaryIndexes"].items() + } + ) + ) + if "GlobalSecondaryIndexes" in native_input.keys() + else Option_None() + ), + ) + + +def com_amazonaws_dynamodb_Capacity(native_input): + return DafnyCapacity( + ReadCapacityUnits=( + Option_Some(native_input["ReadCapacityUnits"]) + if "ReadCapacityUnits" in native_input.keys() + else Option_None() + ), + WriteCapacityUnits=( + Option_Some(native_input["WriteCapacityUnits"]) + if "WriteCapacityUnits" in native_input.keys() + else Option_None() + ), + CapacityUnits=( + Option_Some(native_input["CapacityUnits"]) if "CapacityUnits" in native_input.keys() else Option_None() + ), + ) + + +def com_amazonaws_dynamodb_ItemCollectionMetrics(native_input): + return DafnyItemCollectionMetrics( + ItemCollectionKey=( + Option_Some( + Map( + { + Seq( + "".join( + [chr(int.from_bytes(pair, "big")) for pair in zip(*[iter(key.encode("utf-16-be"))] * 2)] + ) + ): aws_cryptography_internal_dynamodb.smithygenerated.com_amazonaws_dynamodb.aws_sdk_to_dafny.com_amazonaws_dynamodb_AttributeValue( + value + ) + for (key, value) in native_input["ItemCollectionKey"].items() + } + ) + ) + if "ItemCollectionKey" in native_input.keys() + else Option_None() + ), + SizeEstimateRangeGB=( + Option_Some(Seq([list_element for list_element in native_input["SizeEstimateRangeGB"]])) + if "SizeEstimateRangeGB" in native_input.keys() + else Option_None() + ), + ) + + +def com_amazonaws_dynamodb_GetItemInput(native_input): + return DafnyGetItemInput( + TableName=Seq( + "".join( + [ + chr(int.from_bytes(pair, "big")) + for pair in zip(*[iter(native_input["TableName"].encode("utf-16-be"))] * 2) + ] + ) + ), + Key=Map( + { + Seq( + "".join([chr(int.from_bytes(pair, "big")) for pair in zip(*[iter(key.encode("utf-16-be"))] * 2)]) + ): aws_cryptography_internal_dynamodb.smithygenerated.com_amazonaws_dynamodb.aws_sdk_to_dafny.com_amazonaws_dynamodb_AttributeValue( + value + ) + for (key, value) in native_input["Key"].items() + } + ), + AttributesToGet=( + Option_Some( + Seq( + [ + Seq( + "".join( + [ + chr(int.from_bytes(pair, "big")) + for pair in zip(*[iter(list_element.encode("utf-16-be"))] * 2) + ] + ) + ) + for list_element in native_input["AttributesToGet"] + ] + ) + ) + if "AttributesToGet" in native_input.keys() + else Option_None() + ), + ConsistentRead=( + Option_Some(native_input["ConsistentRead"]) if "ConsistentRead" in native_input.keys() else Option_None() + ), + ReturnConsumedCapacity=( + Option_Some( + aws_cryptography_internal_dynamodb.smithygenerated.com_amazonaws_dynamodb.aws_sdk_to_dafny.com_amazonaws_dynamodb_ReturnConsumedCapacity( + native_input["ReturnConsumedCapacity"] + ) + ) + if "ReturnConsumedCapacity" in native_input.keys() + else Option_None() + ), + ProjectionExpression=( + Option_Some( + Seq( + "".join( + [ + chr(int.from_bytes(pair, "big")) + for pair in zip(*[iter(native_input["ProjectionExpression"].encode("utf-16-be"))] * 2) + ] + ) + ) + ) + if "ProjectionExpression" in native_input.keys() + else Option_None() + ), + ExpressionAttributeNames=( + Option_Some( + Map( + { + Seq( + "".join( + [chr(int.from_bytes(pair, "big")) for pair in zip(*[iter(key.encode("utf-16-be"))] * 2)] + ) + ): Seq( + "".join( + [ + chr(int.from_bytes(pair, "big")) + for pair in zip(*[iter(value.encode("utf-16-be"))] * 2) + ] + ) + ) + for (key, value) in native_input["ExpressionAttributeNames"].items() + } + ) + ) + if "ExpressionAttributeNames" in native_input.keys() + else Option_None() + ), + ) + + +def com_amazonaws_dynamodb_GetItemOutput(native_input): + return DafnyGetItemOutput( + Item=( + Option_Some( + Map( + { + Seq( + "".join( + [chr(int.from_bytes(pair, "big")) for pair in zip(*[iter(key.encode("utf-16-be"))] * 2)] + ) + ): aws_cryptography_internal_dynamodb.smithygenerated.com_amazonaws_dynamodb.aws_sdk_to_dafny.com_amazonaws_dynamodb_AttributeValue( + value + ) + for (key, value) in native_input["Item"].items() + } + ) + ) + if "Item" in native_input.keys() + else Option_None() + ), + ConsumedCapacity=( + Option_Some( + aws_cryptography_internal_dynamodb.smithygenerated.com_amazonaws_dynamodb.aws_sdk_to_dafny.com_amazonaws_dynamodb_ConsumedCapacity( + native_input["ConsumedCapacity"] + ) + ) + if "ConsumedCapacity" in native_input.keys() + else Option_None() + ), + ) + + +def com_amazonaws_dynamodb_BatchWriteItemInput(native_input): + return DafnyBatchWriteItemInput( + RequestItems=Map( + { + Seq( + "".join([chr(int.from_bytes(pair, "big")) for pair in zip(*[iter(key.encode("utf-16-be"))] * 2)]) + ): Seq( + [ + aws_cryptography_internal_dynamodb.smithygenerated.com_amazonaws_dynamodb.aws_sdk_to_dafny.com_amazonaws_dynamodb_WriteRequest( + list_element + ) + for list_element in value + ] + ) + for (key, value) in native_input["RequestItems"].items() + } + ), + ReturnConsumedCapacity=( + Option_Some( + aws_cryptography_internal_dynamodb.smithygenerated.com_amazonaws_dynamodb.aws_sdk_to_dafny.com_amazonaws_dynamodb_ReturnConsumedCapacity( + native_input["ReturnConsumedCapacity"] + ) + ) + if "ReturnConsumedCapacity" in native_input.keys() + else Option_None() + ), + ReturnItemCollectionMetrics=( + Option_Some( + aws_cryptography_internal_dynamodb.smithygenerated.com_amazonaws_dynamodb.aws_sdk_to_dafny.com_amazonaws_dynamodb_ReturnItemCollectionMetrics( + native_input["ReturnItemCollectionMetrics"] + ) + ) + if "ReturnItemCollectionMetrics" in native_input.keys() + else Option_None() + ), + ) + + +def com_amazonaws_dynamodb_WriteRequest(native_input): + return DafnyWriteRequest( + PutRequest=( + Option_Some( + aws_cryptography_internal_dynamodb.smithygenerated.com_amazonaws_dynamodb.aws_sdk_to_dafny.com_amazonaws_dynamodb_PutRequest( + native_input["PutRequest"] + ) + ) + if "PutRequest" in native_input.keys() + else Option_None() + ), + DeleteRequest=( + Option_Some( + aws_cryptography_internal_dynamodb.smithygenerated.com_amazonaws_dynamodb.aws_sdk_to_dafny.com_amazonaws_dynamodb_DeleteRequest( + native_input["DeleteRequest"] + ) + ) + if "DeleteRequest" in native_input.keys() + else Option_None() + ), + ) + + +def com_amazonaws_dynamodb_PutRequest(native_input): + return DafnyPutRequest( + Item=Map( + { + Seq( + "".join([chr(int.from_bytes(pair, "big")) for pair in zip(*[iter(key.encode("utf-16-be"))] * 2)]) + ): aws_cryptography_internal_dynamodb.smithygenerated.com_amazonaws_dynamodb.aws_sdk_to_dafny.com_amazonaws_dynamodb_AttributeValue( + value + ) + for (key, value) in native_input["Item"].items() + } + ), + ) + + +def com_amazonaws_dynamodb_DeleteRequest(native_input): + return DafnyDeleteRequest( + Key=Map( + { + Seq( + "".join([chr(int.from_bytes(pair, "big")) for pair in zip(*[iter(key.encode("utf-16-be"))] * 2)]) + ): aws_cryptography_internal_dynamodb.smithygenerated.com_amazonaws_dynamodb.aws_sdk_to_dafny.com_amazonaws_dynamodb_AttributeValue( + value + ) + for (key, value) in native_input["Key"].items() + } + ), + ) + + +def com_amazonaws_dynamodb_BatchWriteItemOutput(native_input): + return DafnyBatchWriteItemOutput( + UnprocessedItems=( + Option_Some( + Map( + { + Seq( + "".join( + [chr(int.from_bytes(pair, "big")) for pair in zip(*[iter(key.encode("utf-16-be"))] * 2)] + ) + ): Seq( + [ + aws_cryptography_internal_dynamodb.smithygenerated.com_amazonaws_dynamodb.aws_sdk_to_dafny.com_amazonaws_dynamodb_WriteRequest( + list_element + ) + for list_element in value + ] + ) + for (key, value) in native_input["UnprocessedItems"].items() + } + ) + ) + if "UnprocessedItems" in native_input.keys() + else Option_None() + ), + ItemCollectionMetrics=( + Option_Some( + Map( + { + Seq( + "".join( + [chr(int.from_bytes(pair, "big")) for pair in zip(*[iter(key.encode("utf-16-be"))] * 2)] + ) + ): Seq( + [ + aws_cryptography_internal_dynamodb.smithygenerated.com_amazonaws_dynamodb.aws_sdk_to_dafny.com_amazonaws_dynamodb_ItemCollectionMetrics( + list_element + ) + for list_element in value + ] + ) + for (key, value) in native_input["ItemCollectionMetrics"].items() + } + ) + ) + if "ItemCollectionMetrics" in native_input.keys() + else Option_None() + ), + ConsumedCapacity=( + Option_Some( + Seq( + [ + aws_cryptography_internal_dynamodb.smithygenerated.com_amazonaws_dynamodb.aws_sdk_to_dafny.com_amazonaws_dynamodb_ConsumedCapacity( + list_element + ) + for list_element in native_input["ConsumedCapacity"] + ] + ) + ) + if "ConsumedCapacity" in native_input.keys() + else Option_None() + ), + ) + + +def com_amazonaws_dynamodb_BatchGetItemInput(native_input): + return DafnyBatchGetItemInput( + RequestItems=Map( + { + Seq( + "".join([chr(int.from_bytes(pair, "big")) for pair in zip(*[iter(key.encode("utf-16-be"))] * 2)]) + ): aws_cryptography_internal_dynamodb.smithygenerated.com_amazonaws_dynamodb.aws_sdk_to_dafny.com_amazonaws_dynamodb_KeysAndAttributes( + value + ) + for (key, value) in native_input["RequestItems"].items() + } + ), + ReturnConsumedCapacity=( + Option_Some( + aws_cryptography_internal_dynamodb.smithygenerated.com_amazonaws_dynamodb.aws_sdk_to_dafny.com_amazonaws_dynamodb_ReturnConsumedCapacity( + native_input["ReturnConsumedCapacity"] + ) + ) + if "ReturnConsumedCapacity" in native_input.keys() + else Option_None() + ), + ) + + +def com_amazonaws_dynamodb_KeysAndAttributes(native_input): + return DafnyKeysAndAttributes( + Keys=Seq( + [ + Map( + { + Seq( + "".join( + [chr(int.from_bytes(pair, "big")) for pair in zip(*[iter(key.encode("utf-16-be"))] * 2)] + ) + ): aws_cryptography_internal_dynamodb.smithygenerated.com_amazonaws_dynamodb.aws_sdk_to_dafny.com_amazonaws_dynamodb_AttributeValue( + value + ) + for (key, value) in list_element.items() + } + ) + for list_element in native_input["Keys"] + ] + ), + AttributesToGet=( + Option_Some( + Seq( + [ + Seq( + "".join( + [ + chr(int.from_bytes(pair, "big")) + for pair in zip(*[iter(list_element.encode("utf-16-be"))] * 2) + ] + ) + ) + for list_element in native_input["AttributesToGet"] + ] + ) + ) + if "AttributesToGet" in native_input.keys() + else Option_None() + ), + ConsistentRead=( + Option_Some(native_input["ConsistentRead"]) if "ConsistentRead" in native_input.keys() else Option_None() + ), + ProjectionExpression=( + Option_Some( + Seq( + "".join( + [ + chr(int.from_bytes(pair, "big")) + for pair in zip(*[iter(native_input["ProjectionExpression"].encode("utf-16-be"))] * 2) + ] + ) + ) + ) + if "ProjectionExpression" in native_input.keys() + else Option_None() + ), + ExpressionAttributeNames=( + Option_Some( + Map( + { + Seq( + "".join( + [chr(int.from_bytes(pair, "big")) for pair in zip(*[iter(key.encode("utf-16-be"))] * 2)] + ) + ): Seq( + "".join( + [ + chr(int.from_bytes(pair, "big")) + for pair in zip(*[iter(value.encode("utf-16-be"))] * 2) + ] + ) + ) + for (key, value) in native_input["ExpressionAttributeNames"].items() + } + ) + ) + if "ExpressionAttributeNames" in native_input.keys() + else Option_None() + ), + ) + + +def com_amazonaws_dynamodb_BatchGetItemOutput(native_input): + return DafnyBatchGetItemOutput( + Responses=( + Option_Some( + Map( + { + Seq( + "".join( + [chr(int.from_bytes(pair, "big")) for pair in zip(*[iter(key.encode("utf-16-be"))] * 2)] + ) + ): Seq( + [ + Map( + { + Seq( + "".join( + [ + chr(int.from_bytes(pair, "big")) + for pair in zip(*[iter(key.encode("utf-16-be"))] * 2) + ] + ) + ): aws_cryptography_internal_dynamodb.smithygenerated.com_amazonaws_dynamodb.aws_sdk_to_dafny.com_amazonaws_dynamodb_AttributeValue( + value + ) + for (key, value) in list_element.items() + } + ) + for list_element in value + ] + ) + for (key, value) in native_input["Responses"].items() + } + ) + ) + if "Responses" in native_input.keys() + else Option_None() + ), + UnprocessedKeys=( + Option_Some( + Map( + { + Seq( + "".join( + [chr(int.from_bytes(pair, "big")) for pair in zip(*[iter(key.encode("utf-16-be"))] * 2)] + ) + ): aws_cryptography_internal_dynamodb.smithygenerated.com_amazonaws_dynamodb.aws_sdk_to_dafny.com_amazonaws_dynamodb_KeysAndAttributes( + value + ) + for (key, value) in native_input["UnprocessedKeys"].items() + } + ) + ) + if "UnprocessedKeys" in native_input.keys() + else Option_None() + ), + ConsumedCapacity=( + Option_Some( + Seq( + [ + aws_cryptography_internal_dynamodb.smithygenerated.com_amazonaws_dynamodb.aws_sdk_to_dafny.com_amazonaws_dynamodb_ConsumedCapacity( + list_element + ) + for list_element in native_input["ConsumedCapacity"] + ] + ) + ) + if "ConsumedCapacity" in native_input.keys() + else Option_None() + ), + ) + + +def com_amazonaws_dynamodb_ScanInput(native_input): + return DafnyScanInput( + TableName=Seq( + "".join( + [ + chr(int.from_bytes(pair, "big")) + for pair in zip(*[iter(native_input["TableName"].encode("utf-16-be"))] * 2) + ] + ) + ), + IndexName=( + Option_Some( + Seq( + "".join( + [ + chr(int.from_bytes(pair, "big")) + for pair in zip(*[iter(native_input["IndexName"].encode("utf-16-be"))] * 2) + ] + ) + ) + ) + if "IndexName" in native_input.keys() + else Option_None() + ), + AttributesToGet=( + Option_Some( + Seq( + [ + Seq( + "".join( + [ + chr(int.from_bytes(pair, "big")) + for pair in zip(*[iter(list_element.encode("utf-16-be"))] * 2) + ] + ) + ) + for list_element in native_input["AttributesToGet"] + ] + ) + ) + if "AttributesToGet" in native_input.keys() + else Option_None() + ), + Limit=(Option_Some(native_input["Limit"]) if "Limit" in native_input.keys() else Option_None()), + Select=( + Option_Some( + aws_cryptography_internal_dynamodb.smithygenerated.com_amazonaws_dynamodb.aws_sdk_to_dafny.com_amazonaws_dynamodb_Select( + native_input["Select"] + ) + ) + if "Select" in native_input.keys() + else Option_None() + ), + ScanFilter=( + Option_Some( + Map( + { + Seq( + "".join( + [chr(int.from_bytes(pair, "big")) for pair in zip(*[iter(key.encode("utf-16-be"))] * 2)] + ) + ): aws_cryptography_internal_dynamodb.smithygenerated.com_amazonaws_dynamodb.aws_sdk_to_dafny.com_amazonaws_dynamodb_Condition( + value + ) + for (key, value) in native_input["ScanFilter"].items() + } + ) + ) + if "ScanFilter" in native_input.keys() + else Option_None() + ), + ConditionalOperator=( + Option_Some( + aws_cryptography_internal_dynamodb.smithygenerated.com_amazonaws_dynamodb.aws_sdk_to_dafny.com_amazonaws_dynamodb_ConditionalOperator( + native_input["ConditionalOperator"] + ) + ) + if "ConditionalOperator" in native_input.keys() + else Option_None() + ), + ExclusiveStartKey=( + Option_Some( + Map( + { + Seq( + "".join( + [chr(int.from_bytes(pair, "big")) for pair in zip(*[iter(key.encode("utf-16-be"))] * 2)] + ) + ): aws_cryptography_internal_dynamodb.smithygenerated.com_amazonaws_dynamodb.aws_sdk_to_dafny.com_amazonaws_dynamodb_AttributeValue( + value + ) + for (key, value) in native_input["ExclusiveStartKey"].items() + } + ) + ) + if "ExclusiveStartKey" in native_input.keys() + else Option_None() + ), + ReturnConsumedCapacity=( + Option_Some( + aws_cryptography_internal_dynamodb.smithygenerated.com_amazonaws_dynamodb.aws_sdk_to_dafny.com_amazonaws_dynamodb_ReturnConsumedCapacity( + native_input["ReturnConsumedCapacity"] + ) + ) + if "ReturnConsumedCapacity" in native_input.keys() + else Option_None() + ), + TotalSegments=( + Option_Some(native_input["TotalSegments"]) if "TotalSegments" in native_input.keys() else Option_None() + ), + Segment=(Option_Some(native_input["Segment"]) if "Segment" in native_input.keys() else Option_None()), + ProjectionExpression=( + Option_Some( + Seq( + "".join( + [ + chr(int.from_bytes(pair, "big")) + for pair in zip(*[iter(native_input["ProjectionExpression"].encode("utf-16-be"))] * 2) + ] + ) + ) + ) + if "ProjectionExpression" in native_input.keys() + else Option_None() + ), + FilterExpression=( + Option_Some( + Seq( + "".join( + [ + chr(int.from_bytes(pair, "big")) + for pair in zip(*[iter(native_input["FilterExpression"].encode("utf-16-be"))] * 2) + ] + ) + ) + ) + if "FilterExpression" in native_input.keys() + else Option_None() + ), + ExpressionAttributeNames=( + Option_Some( + Map( + { + Seq( + "".join( + [chr(int.from_bytes(pair, "big")) for pair in zip(*[iter(key.encode("utf-16-be"))] * 2)] + ) + ): Seq( + "".join( + [ + chr(int.from_bytes(pair, "big")) + for pair in zip(*[iter(value.encode("utf-16-be"))] * 2) + ] + ) + ) + for (key, value) in native_input["ExpressionAttributeNames"].items() + } + ) + ) + if "ExpressionAttributeNames" in native_input.keys() + else Option_None() + ), + ExpressionAttributeValues=( + Option_Some( + Map( + { + Seq( + "".join( + [chr(int.from_bytes(pair, "big")) for pair in zip(*[iter(key.encode("utf-16-be"))] * 2)] + ) + ): aws_cryptography_internal_dynamodb.smithygenerated.com_amazonaws_dynamodb.aws_sdk_to_dafny.com_amazonaws_dynamodb_AttributeValue( + value + ) + for (key, value) in native_input["ExpressionAttributeValues"].items() + } + ) + ) + if "ExpressionAttributeValues" in native_input.keys() + else Option_None() + ), + ConsistentRead=( + Option_Some(native_input["ConsistentRead"]) if "ConsistentRead" in native_input.keys() else Option_None() + ), + ) + + +def com_amazonaws_dynamodb_Select(native_input): + # Convert Select + if native_input == "ALL_ATTRIBUTES": + return Select_ALL__ATTRIBUTES() + elif native_input == "ALL_PROJECTED_ATTRIBUTES": + return Select_ALL__PROJECTED__ATTRIBUTES() + elif native_input == "SPECIFIC_ATTRIBUTES": + return Select_SPECIFIC__ATTRIBUTES() + elif native_input == "COUNT": + return Select_COUNT() + else: + raise ValueError("No recognized enum value in enum type: " + native_input) + + +def com_amazonaws_dynamodb_Condition(native_input): + return DafnyCondition( + AttributeValueList=( + Option_Some( + Seq( + [ + aws_cryptography_internal_dynamodb.smithygenerated.com_amazonaws_dynamodb.aws_sdk_to_dafny.com_amazonaws_dynamodb_AttributeValue( + list_element + ) + for list_element in native_input["AttributeValueList"] + ] + ) + ) + if "AttributeValueList" in native_input.keys() + else Option_None() + ), + ComparisonOperator=aws_cryptography_internal_dynamodb.smithygenerated.com_amazonaws_dynamodb.aws_sdk_to_dafny.com_amazonaws_dynamodb_ComparisonOperator( + native_input["ComparisonOperator"] + ), + ) + + +def com_amazonaws_dynamodb_ScanOutput(native_input): + return DafnyScanOutput( + Items=( + Option_Some( + Seq( + [ + Map( + { + Seq( + "".join( + [ + chr(int.from_bytes(pair, "big")) + for pair in zip(*[iter(key.encode("utf-16-be"))] * 2) + ] + ) + ): aws_cryptography_internal_dynamodb.smithygenerated.com_amazonaws_dynamodb.aws_sdk_to_dafny.com_amazonaws_dynamodb_AttributeValue( + value + ) + for (key, value) in list_element.items() + } + ) + for list_element in native_input["Items"] + ] + ) + ) + if "Items" in native_input.keys() + else Option_None() + ), + Count=(Option_Some(native_input["Count"]) if "Count" in native_input.keys() else Option_None()), + ScannedCount=( + Option_Some(native_input["ScannedCount"]) if "ScannedCount" in native_input.keys() else Option_None() + ), + LastEvaluatedKey=( + Option_Some( + Map( + { + Seq( + "".join( + [chr(int.from_bytes(pair, "big")) for pair in zip(*[iter(key.encode("utf-16-be"))] * 2)] + ) + ): aws_cryptography_internal_dynamodb.smithygenerated.com_amazonaws_dynamodb.aws_sdk_to_dafny.com_amazonaws_dynamodb_AttributeValue( + value + ) + for (key, value) in native_input["LastEvaluatedKey"].items() + } + ) + ) + if "LastEvaluatedKey" in native_input.keys() + else Option_None() + ), + ConsumedCapacity=( + Option_Some( + aws_cryptography_internal_dynamodb.smithygenerated.com_amazonaws_dynamodb.aws_sdk_to_dafny.com_amazonaws_dynamodb_ConsumedCapacity( + native_input["ConsumedCapacity"] + ) + ) + if "ConsumedCapacity" in native_input.keys() + else Option_None() + ), + ) + + +def com_amazonaws_dynamodb_QueryInput(native_input): + return DafnyQueryInput( + TableName=Seq( + "".join( + [ + chr(int.from_bytes(pair, "big")) + for pair in zip(*[iter(native_input["TableName"].encode("utf-16-be"))] * 2) + ] + ) + ), + IndexName=( + Option_Some( + Seq( + "".join( + [ + chr(int.from_bytes(pair, "big")) + for pair in zip(*[iter(native_input["IndexName"].encode("utf-16-be"))] * 2) + ] + ) + ) + ) + if "IndexName" in native_input.keys() + else Option_None() + ), + Select=( + Option_Some( + aws_cryptography_internal_dynamodb.smithygenerated.com_amazonaws_dynamodb.aws_sdk_to_dafny.com_amazonaws_dynamodb_Select( + native_input["Select"] + ) + ) + if "Select" in native_input.keys() + else Option_None() + ), + AttributesToGet=( + Option_Some( + Seq( + [ + Seq( + "".join( + [ + chr(int.from_bytes(pair, "big")) + for pair in zip(*[iter(list_element.encode("utf-16-be"))] * 2) + ] + ) + ) + for list_element in native_input["AttributesToGet"] + ] + ) + ) + if "AttributesToGet" in native_input.keys() + else Option_None() + ), + Limit=(Option_Some(native_input["Limit"]) if "Limit" in native_input.keys() else Option_None()), + ConsistentRead=( + Option_Some(native_input["ConsistentRead"]) if "ConsistentRead" in native_input.keys() else Option_None() + ), + KeyConditions=( + Option_Some( + Map( + { + Seq( + "".join( + [chr(int.from_bytes(pair, "big")) for pair in zip(*[iter(key.encode("utf-16-be"))] * 2)] + ) + ): aws_cryptography_internal_dynamodb.smithygenerated.com_amazonaws_dynamodb.aws_sdk_to_dafny.com_amazonaws_dynamodb_Condition( + value + ) + for (key, value) in native_input["KeyConditions"].items() + } + ) + ) + if "KeyConditions" in native_input.keys() + else Option_None() + ), + QueryFilter=( + Option_Some( + Map( + { + Seq( + "".join( + [chr(int.from_bytes(pair, "big")) for pair in zip(*[iter(key.encode("utf-16-be"))] * 2)] + ) + ): aws_cryptography_internal_dynamodb.smithygenerated.com_amazonaws_dynamodb.aws_sdk_to_dafny.com_amazonaws_dynamodb_Condition( + value + ) + for (key, value) in native_input["QueryFilter"].items() + } + ) + ) + if "QueryFilter" in native_input.keys() + else Option_None() + ), + ConditionalOperator=( + Option_Some( + aws_cryptography_internal_dynamodb.smithygenerated.com_amazonaws_dynamodb.aws_sdk_to_dafny.com_amazonaws_dynamodb_ConditionalOperator( + native_input["ConditionalOperator"] + ) + ) + if "ConditionalOperator" in native_input.keys() + else Option_None() + ), + ScanIndexForward=( + Option_Some(native_input["ScanIndexForward"]) + if "ScanIndexForward" in native_input.keys() + else Option_None() + ), + ExclusiveStartKey=( + Option_Some( + Map( + { + Seq( + "".join( + [chr(int.from_bytes(pair, "big")) for pair in zip(*[iter(key.encode("utf-16-be"))] * 2)] + ) + ): aws_cryptography_internal_dynamodb.smithygenerated.com_amazonaws_dynamodb.aws_sdk_to_dafny.com_amazonaws_dynamodb_AttributeValue( + value + ) + for (key, value) in native_input["ExclusiveStartKey"].items() + } + ) + ) + if "ExclusiveStartKey" in native_input.keys() + else Option_None() + ), + ReturnConsumedCapacity=( + Option_Some( + aws_cryptography_internal_dynamodb.smithygenerated.com_amazonaws_dynamodb.aws_sdk_to_dafny.com_amazonaws_dynamodb_ReturnConsumedCapacity( + native_input["ReturnConsumedCapacity"] + ) + ) + if "ReturnConsumedCapacity" in native_input.keys() + else Option_None() + ), + ProjectionExpression=( + Option_Some( + Seq( + "".join( + [ + chr(int.from_bytes(pair, "big")) + for pair in zip(*[iter(native_input["ProjectionExpression"].encode("utf-16-be"))] * 2) + ] + ) + ) + ) + if "ProjectionExpression" in native_input.keys() + else Option_None() + ), + FilterExpression=( + Option_Some( + Seq( + "".join( + [ + chr(int.from_bytes(pair, "big")) + for pair in zip(*[iter(native_input["FilterExpression"].encode("utf-16-be"))] * 2) + ] + ) + ) + ) + if "FilterExpression" in native_input.keys() + else Option_None() + ), + KeyConditionExpression=( + Option_Some( + Seq( + "".join( + [ + chr(int.from_bytes(pair, "big")) + for pair in zip(*[iter(native_input["KeyConditionExpression"].encode("utf-16-be"))] * 2) + ] + ) + ) + ) + if "KeyConditionExpression" in native_input.keys() + else Option_None() + ), + ExpressionAttributeNames=( + Option_Some( + Map( + { + Seq( + "".join( + [chr(int.from_bytes(pair, "big")) for pair in zip(*[iter(key.encode("utf-16-be"))] * 2)] + ) + ): Seq( + "".join( + [ + chr(int.from_bytes(pair, "big")) + for pair in zip(*[iter(value.encode("utf-16-be"))] * 2) + ] + ) + ) + for (key, value) in native_input["ExpressionAttributeNames"].items() + } + ) + ) + if "ExpressionAttributeNames" in native_input.keys() + else Option_None() + ), + ExpressionAttributeValues=( + Option_Some( + Map( + { + Seq( + "".join( + [chr(int.from_bytes(pair, "big")) for pair in zip(*[iter(key.encode("utf-16-be"))] * 2)] + ) + ): aws_cryptography_internal_dynamodb.smithygenerated.com_amazonaws_dynamodb.aws_sdk_to_dafny.com_amazonaws_dynamodb_AttributeValue( + value + ) + for (key, value) in native_input["ExpressionAttributeValues"].items() + } + ) + ) + if "ExpressionAttributeValues" in native_input.keys() + else Option_None() + ), + ) + + +def com_amazonaws_dynamodb_QueryOutput(native_input): + return DafnyQueryOutput( + Items=( + Option_Some( + Seq( + [ + Map( + { + Seq( + "".join( + [ + chr(int.from_bytes(pair, "big")) + for pair in zip(*[iter(key.encode("utf-16-be"))] * 2) + ] + ) + ): aws_cryptography_internal_dynamodb.smithygenerated.com_amazonaws_dynamodb.aws_sdk_to_dafny.com_amazonaws_dynamodb_AttributeValue( + value + ) + for (key, value) in list_element.items() + } + ) + for list_element in native_input["Items"] + ] + ) + ) + if "Items" in native_input.keys() + else Option_None() + ), + Count=(Option_Some(native_input["Count"]) if "Count" in native_input.keys() else Option_None()), + ScannedCount=( + Option_Some(native_input["ScannedCount"]) if "ScannedCount" in native_input.keys() else Option_None() + ), + LastEvaluatedKey=( + Option_Some( + Map( + { + Seq( + "".join( + [chr(int.from_bytes(pair, "big")) for pair in zip(*[iter(key.encode("utf-16-be"))] * 2)] + ) + ): aws_cryptography_internal_dynamodb.smithygenerated.com_amazonaws_dynamodb.aws_sdk_to_dafny.com_amazonaws_dynamodb_AttributeValue( + value + ) + for (key, value) in native_input["LastEvaluatedKey"].items() + } + ) + ) + if "LastEvaluatedKey" in native_input.keys() + else Option_None() + ), + ConsumedCapacity=( + Option_Some( + aws_cryptography_internal_dynamodb.smithygenerated.com_amazonaws_dynamodb.aws_sdk_to_dafny.com_amazonaws_dynamodb_ConsumedCapacity( + native_input["ConsumedCapacity"] + ) + ) + if "ConsumedCapacity" in native_input.keys() + else Option_None() + ), + ) + + +def com_amazonaws_dynamodb_TransactWriteItemsInput(native_input): + return DafnyTransactWriteItemsInput( + TransactItems=Seq( + [ + aws_cryptography_internal_dynamodb.smithygenerated.com_amazonaws_dynamodb.aws_sdk_to_dafny.com_amazonaws_dynamodb_TransactWriteItem( + list_element + ) + for list_element in native_input["TransactItems"] + ] + ), + ReturnConsumedCapacity=( + Option_Some( + aws_cryptography_internal_dynamodb.smithygenerated.com_amazonaws_dynamodb.aws_sdk_to_dafny.com_amazonaws_dynamodb_ReturnConsumedCapacity( + native_input["ReturnConsumedCapacity"] + ) + ) + if "ReturnConsumedCapacity" in native_input.keys() + else Option_None() + ), + ReturnItemCollectionMetrics=( + Option_Some( + aws_cryptography_internal_dynamodb.smithygenerated.com_amazonaws_dynamodb.aws_sdk_to_dafny.com_amazonaws_dynamodb_ReturnItemCollectionMetrics( + native_input["ReturnItemCollectionMetrics"] + ) + ) + if "ReturnItemCollectionMetrics" in native_input.keys() + else Option_None() + ), + ClientRequestToken=( + Option_Some( + Seq( + "".join( + [ + chr(int.from_bytes(pair, "big")) + for pair in zip(*[iter(native_input["ClientRequestToken"].encode("utf-16-be"))] * 2) + ] + ) + ) + ) + if "ClientRequestToken" in native_input.keys() + else Option_None() + ), + ) + + +def com_amazonaws_dynamodb_TransactWriteItem(native_input): + return DafnyTransactWriteItem( + ConditionCheck=( + Option_Some( + aws_cryptography_internal_dynamodb.smithygenerated.com_amazonaws_dynamodb.aws_sdk_to_dafny.com_amazonaws_dynamodb_ConditionCheck( + native_input["ConditionCheck"] + ) + ) + if "ConditionCheck" in native_input.keys() + else Option_None() + ), + Put=( + Option_Some( + aws_cryptography_internal_dynamodb.smithygenerated.com_amazonaws_dynamodb.aws_sdk_to_dafny.com_amazonaws_dynamodb_Put( + native_input["Put"] + ) + ) + if "Put" in native_input.keys() + else Option_None() + ), + Delete=( + Option_Some( + aws_cryptography_internal_dynamodb.smithygenerated.com_amazonaws_dynamodb.aws_sdk_to_dafny.com_amazonaws_dynamodb_Delete( + native_input["Delete"] + ) + ) + if "Delete" in native_input.keys() + else Option_None() + ), + Update=( + Option_Some( + aws_cryptography_internal_dynamodb.smithygenerated.com_amazonaws_dynamodb.aws_sdk_to_dafny.com_amazonaws_dynamodb_Update( + native_input["Update"] + ) + ) + if "Update" in native_input.keys() + else Option_None() + ), + ) + + +def com_amazonaws_dynamodb_ConditionCheck(native_input): + return DafnyConditionCheck( + Key=Map( + { + Seq( + "".join([chr(int.from_bytes(pair, "big")) for pair in zip(*[iter(key.encode("utf-16-be"))] * 2)]) + ): aws_cryptography_internal_dynamodb.smithygenerated.com_amazonaws_dynamodb.aws_sdk_to_dafny.com_amazonaws_dynamodb_AttributeValue( + value + ) + for (key, value) in native_input["Key"].items() + } + ), + TableName=Seq( + "".join( + [ + chr(int.from_bytes(pair, "big")) + for pair in zip(*[iter(native_input["TableName"].encode("utf-16-be"))] * 2) + ] + ) + ), + ConditionExpression=Seq( + "".join( + [ + chr(int.from_bytes(pair, "big")) + for pair in zip(*[iter(native_input["ConditionExpression"].encode("utf-16-be"))] * 2) + ] + ) + ), + ExpressionAttributeNames=( + Option_Some( + Map( + { + Seq( + "".join( + [chr(int.from_bytes(pair, "big")) for pair in zip(*[iter(key.encode("utf-16-be"))] * 2)] + ) + ): Seq( + "".join( + [ + chr(int.from_bytes(pair, "big")) + for pair in zip(*[iter(value.encode("utf-16-be"))] * 2) + ] + ) + ) + for (key, value) in native_input["ExpressionAttributeNames"].items() + } + ) + ) + if "ExpressionAttributeNames" in native_input.keys() + else Option_None() + ), + ExpressionAttributeValues=( + Option_Some( + Map( + { + Seq( + "".join( + [chr(int.from_bytes(pair, "big")) for pair in zip(*[iter(key.encode("utf-16-be"))] * 2)] + ) + ): aws_cryptography_internal_dynamodb.smithygenerated.com_amazonaws_dynamodb.aws_sdk_to_dafny.com_amazonaws_dynamodb_AttributeValue( + value + ) + for (key, value) in native_input["ExpressionAttributeValues"].items() + } + ) + ) + if "ExpressionAttributeValues" in native_input.keys() + else Option_None() + ), + ReturnValuesOnConditionCheckFailure=( + Option_Some( + aws_cryptography_internal_dynamodb.smithygenerated.com_amazonaws_dynamodb.aws_sdk_to_dafny.com_amazonaws_dynamodb_ReturnValuesOnConditionCheckFailure( + native_input["ReturnValuesOnConditionCheckFailure"] + ) + ) + if "ReturnValuesOnConditionCheckFailure" in native_input.keys() + else Option_None() + ), + ) + + +def com_amazonaws_dynamodb_Put(native_input): + return DafnyPut( + Item=Map( + { + Seq( + "".join([chr(int.from_bytes(pair, "big")) for pair in zip(*[iter(key.encode("utf-16-be"))] * 2)]) + ): aws_cryptography_internal_dynamodb.smithygenerated.com_amazonaws_dynamodb.aws_sdk_to_dafny.com_amazonaws_dynamodb_AttributeValue( + value + ) + for (key, value) in native_input["Item"].items() + } + ), + TableName=Seq( + "".join( + [ + chr(int.from_bytes(pair, "big")) + for pair in zip(*[iter(native_input["TableName"].encode("utf-16-be"))] * 2) + ] + ) + ), + ConditionExpression=( + Option_Some( + Seq( + "".join( + [ + chr(int.from_bytes(pair, "big")) + for pair in zip(*[iter(native_input["ConditionExpression"].encode("utf-16-be"))] * 2) + ] + ) + ) + ) + if "ConditionExpression" in native_input.keys() + else Option_None() + ), + ExpressionAttributeNames=( + Option_Some( + Map( + { + Seq( + "".join( + [chr(int.from_bytes(pair, "big")) for pair in zip(*[iter(key.encode("utf-16-be"))] * 2)] + ) + ): Seq( + "".join( + [ + chr(int.from_bytes(pair, "big")) + for pair in zip(*[iter(value.encode("utf-16-be"))] * 2) + ] + ) + ) + for (key, value) in native_input["ExpressionAttributeNames"].items() + } + ) + ) + if "ExpressionAttributeNames" in native_input.keys() + else Option_None() + ), + ExpressionAttributeValues=( + Option_Some( + Map( + { + Seq( + "".join( + [chr(int.from_bytes(pair, "big")) for pair in zip(*[iter(key.encode("utf-16-be"))] * 2)] + ) + ): aws_cryptography_internal_dynamodb.smithygenerated.com_amazonaws_dynamodb.aws_sdk_to_dafny.com_amazonaws_dynamodb_AttributeValue( + value + ) + for (key, value) in native_input["ExpressionAttributeValues"].items() + } + ) + ) + if "ExpressionAttributeValues" in native_input.keys() + else Option_None() + ), + ReturnValuesOnConditionCheckFailure=( + Option_Some( + aws_cryptography_internal_dynamodb.smithygenerated.com_amazonaws_dynamodb.aws_sdk_to_dafny.com_amazonaws_dynamodb_ReturnValuesOnConditionCheckFailure( + native_input["ReturnValuesOnConditionCheckFailure"] + ) + ) + if "ReturnValuesOnConditionCheckFailure" in native_input.keys() + else Option_None() + ), + ) + + +def com_amazonaws_dynamodb_Delete(native_input): + return DafnyDelete( + Key=Map( + { + Seq( + "".join([chr(int.from_bytes(pair, "big")) for pair in zip(*[iter(key.encode("utf-16-be"))] * 2)]) + ): aws_cryptography_internal_dynamodb.smithygenerated.com_amazonaws_dynamodb.aws_sdk_to_dafny.com_amazonaws_dynamodb_AttributeValue( + value + ) + for (key, value) in native_input["Key"].items() + } + ), + TableName=Seq( + "".join( + [ + chr(int.from_bytes(pair, "big")) + for pair in zip(*[iter(native_input["TableName"].encode("utf-16-be"))] * 2) + ] + ) + ), + ConditionExpression=( + Option_Some( + Seq( + "".join( + [ + chr(int.from_bytes(pair, "big")) + for pair in zip(*[iter(native_input["ConditionExpression"].encode("utf-16-be"))] * 2) + ] + ) + ) + ) + if "ConditionExpression" in native_input.keys() + else Option_None() + ), + ExpressionAttributeNames=( + Option_Some( + Map( + { + Seq( + "".join( + [chr(int.from_bytes(pair, "big")) for pair in zip(*[iter(key.encode("utf-16-be"))] * 2)] + ) + ): Seq( + "".join( + [ + chr(int.from_bytes(pair, "big")) + for pair in zip(*[iter(value.encode("utf-16-be"))] * 2) + ] + ) + ) + for (key, value) in native_input["ExpressionAttributeNames"].items() + } + ) + ) + if "ExpressionAttributeNames" in native_input.keys() + else Option_None() + ), + ExpressionAttributeValues=( + Option_Some( + Map( + { + Seq( + "".join( + [chr(int.from_bytes(pair, "big")) for pair in zip(*[iter(key.encode("utf-16-be"))] * 2)] + ) + ): aws_cryptography_internal_dynamodb.smithygenerated.com_amazonaws_dynamodb.aws_sdk_to_dafny.com_amazonaws_dynamodb_AttributeValue( + value + ) + for (key, value) in native_input["ExpressionAttributeValues"].items() + } + ) + ) + if "ExpressionAttributeValues" in native_input.keys() + else Option_None() + ), + ReturnValuesOnConditionCheckFailure=( + Option_Some( + aws_cryptography_internal_dynamodb.smithygenerated.com_amazonaws_dynamodb.aws_sdk_to_dafny.com_amazonaws_dynamodb_ReturnValuesOnConditionCheckFailure( + native_input["ReturnValuesOnConditionCheckFailure"] + ) + ) + if "ReturnValuesOnConditionCheckFailure" in native_input.keys() + else Option_None() + ), + ) + + +def com_amazonaws_dynamodb_Update(native_input): + return DafnyUpdate( + Key=Map( + { + Seq( + "".join([chr(int.from_bytes(pair, "big")) for pair in zip(*[iter(key.encode("utf-16-be"))] * 2)]) + ): aws_cryptography_internal_dynamodb.smithygenerated.com_amazonaws_dynamodb.aws_sdk_to_dafny.com_amazonaws_dynamodb_AttributeValue( + value + ) + for (key, value) in native_input["Key"].items() + } + ), + UpdateExpression=Seq( + "".join( + [ + chr(int.from_bytes(pair, "big")) + for pair in zip(*[iter(native_input["UpdateExpression"].encode("utf-16-be"))] * 2) + ] + ) + ), + TableName=Seq( + "".join( + [ + chr(int.from_bytes(pair, "big")) + for pair in zip(*[iter(native_input["TableName"].encode("utf-16-be"))] * 2) + ] + ) + ), + ConditionExpression=( + Option_Some( + Seq( + "".join( + [ + chr(int.from_bytes(pair, "big")) + for pair in zip(*[iter(native_input["ConditionExpression"].encode("utf-16-be"))] * 2) + ] + ) + ) + ) + if "ConditionExpression" in native_input.keys() + else Option_None() + ), + ExpressionAttributeNames=( + Option_Some( + Map( + { + Seq( + "".join( + [chr(int.from_bytes(pair, "big")) for pair in zip(*[iter(key.encode("utf-16-be"))] * 2)] + ) + ): Seq( + "".join( + [ + chr(int.from_bytes(pair, "big")) + for pair in zip(*[iter(value.encode("utf-16-be"))] * 2) + ] + ) + ) + for (key, value) in native_input["ExpressionAttributeNames"].items() + } + ) + ) + if "ExpressionAttributeNames" in native_input.keys() + else Option_None() + ), + ExpressionAttributeValues=( + Option_Some( + Map( + { + Seq( + "".join( + [chr(int.from_bytes(pair, "big")) for pair in zip(*[iter(key.encode("utf-16-be"))] * 2)] + ) + ): aws_cryptography_internal_dynamodb.smithygenerated.com_amazonaws_dynamodb.aws_sdk_to_dafny.com_amazonaws_dynamodb_AttributeValue( + value + ) + for (key, value) in native_input["ExpressionAttributeValues"].items() + } + ) + ) + if "ExpressionAttributeValues" in native_input.keys() + else Option_None() + ), + ReturnValuesOnConditionCheckFailure=( + Option_Some( + aws_cryptography_internal_dynamodb.smithygenerated.com_amazonaws_dynamodb.aws_sdk_to_dafny.com_amazonaws_dynamodb_ReturnValuesOnConditionCheckFailure( + native_input["ReturnValuesOnConditionCheckFailure"] + ) + ) + if "ReturnValuesOnConditionCheckFailure" in native_input.keys() + else Option_None() + ), + ) + + +def com_amazonaws_dynamodb_TransactWriteItemsOutput(native_input): + return DafnyTransactWriteItemsOutput( + ConsumedCapacity=( + Option_Some( + Seq( + [ + aws_cryptography_internal_dynamodb.smithygenerated.com_amazonaws_dynamodb.aws_sdk_to_dafny.com_amazonaws_dynamodb_ConsumedCapacity( + list_element + ) + for list_element in native_input["ConsumedCapacity"] + ] + ) + ) + if "ConsumedCapacity" in native_input.keys() + else Option_None() + ), + ItemCollectionMetrics=( + Option_Some( + Map( + { + Seq( + "".join( + [chr(int.from_bytes(pair, "big")) for pair in zip(*[iter(key.encode("utf-16-be"))] * 2)] + ) + ): Seq( + [ + aws_cryptography_internal_dynamodb.smithygenerated.com_amazonaws_dynamodb.aws_sdk_to_dafny.com_amazonaws_dynamodb_ItemCollectionMetrics( + list_element + ) + for list_element in value + ] + ) + for (key, value) in native_input["ItemCollectionMetrics"].items() + } + ) + ) + if "ItemCollectionMetrics" in native_input.keys() + else Option_None() + ), + ) + + +def com_amazonaws_dynamodb_UpdateItemInput(native_input): + return DafnyUpdateItemInput( + TableName=Seq( + "".join( + [ + chr(int.from_bytes(pair, "big")) + for pair in zip(*[iter(native_input["TableName"].encode("utf-16-be"))] * 2) + ] + ) + ), + Key=Map( + { + Seq( + "".join([chr(int.from_bytes(pair, "big")) for pair in zip(*[iter(key.encode("utf-16-be"))] * 2)]) + ): aws_cryptography_internal_dynamodb.smithygenerated.com_amazonaws_dynamodb.aws_sdk_to_dafny.com_amazonaws_dynamodb_AttributeValue( + value + ) + for (key, value) in native_input["Key"].items() + } + ), + AttributeUpdates=( + Option_Some( + Map( + { + Seq( + "".join( + [chr(int.from_bytes(pair, "big")) for pair in zip(*[iter(key.encode("utf-16-be"))] * 2)] + ) + ): aws_cryptography_internal_dynamodb.smithygenerated.com_amazonaws_dynamodb.aws_sdk_to_dafny.com_amazonaws_dynamodb_AttributeValueUpdate( + value + ) + for (key, value) in native_input["AttributeUpdates"].items() + } + ) + ) + if "AttributeUpdates" in native_input.keys() + else Option_None() + ), + Expected=( + Option_Some( + Map( + { + Seq( + "".join( + [chr(int.from_bytes(pair, "big")) for pair in zip(*[iter(key.encode("utf-16-be"))] * 2)] + ) + ): aws_cryptography_internal_dynamodb.smithygenerated.com_amazonaws_dynamodb.aws_sdk_to_dafny.com_amazonaws_dynamodb_ExpectedAttributeValue( + value + ) + for (key, value) in native_input["Expected"].items() + } + ) + ) + if "Expected" in native_input.keys() + else Option_None() + ), + ConditionalOperator=( + Option_Some( + aws_cryptography_internal_dynamodb.smithygenerated.com_amazonaws_dynamodb.aws_sdk_to_dafny.com_amazonaws_dynamodb_ConditionalOperator( + native_input["ConditionalOperator"] + ) + ) + if "ConditionalOperator" in native_input.keys() + else Option_None() + ), + ReturnValues=( + Option_Some( + aws_cryptography_internal_dynamodb.smithygenerated.com_amazonaws_dynamodb.aws_sdk_to_dafny.com_amazonaws_dynamodb_ReturnValue( + native_input["ReturnValues"] + ) + ) + if "ReturnValues" in native_input.keys() + else Option_None() + ), + ReturnConsumedCapacity=( + Option_Some( + aws_cryptography_internal_dynamodb.smithygenerated.com_amazonaws_dynamodb.aws_sdk_to_dafny.com_amazonaws_dynamodb_ReturnConsumedCapacity( + native_input["ReturnConsumedCapacity"] + ) + ) + if "ReturnConsumedCapacity" in native_input.keys() + else Option_None() + ), + ReturnItemCollectionMetrics=( + Option_Some( + aws_cryptography_internal_dynamodb.smithygenerated.com_amazonaws_dynamodb.aws_sdk_to_dafny.com_amazonaws_dynamodb_ReturnItemCollectionMetrics( + native_input["ReturnItemCollectionMetrics"] + ) + ) + if "ReturnItemCollectionMetrics" in native_input.keys() + else Option_None() + ), + UpdateExpression=( + Option_Some( + Seq( + "".join( + [ + chr(int.from_bytes(pair, "big")) + for pair in zip(*[iter(native_input["UpdateExpression"].encode("utf-16-be"))] * 2) + ] + ) + ) + ) + if "UpdateExpression" in native_input.keys() + else Option_None() + ), + ConditionExpression=( + Option_Some( + Seq( + "".join( + [ + chr(int.from_bytes(pair, "big")) + for pair in zip(*[iter(native_input["ConditionExpression"].encode("utf-16-be"))] * 2) + ] + ) + ) + ) + if "ConditionExpression" in native_input.keys() + else Option_None() + ), + ExpressionAttributeNames=( + Option_Some( + Map( + { + Seq( + "".join( + [chr(int.from_bytes(pair, "big")) for pair in zip(*[iter(key.encode("utf-16-be"))] * 2)] + ) + ): Seq( + "".join( + [ + chr(int.from_bytes(pair, "big")) + for pair in zip(*[iter(value.encode("utf-16-be"))] * 2) + ] + ) + ) + for (key, value) in native_input["ExpressionAttributeNames"].items() + } + ) + ) + if "ExpressionAttributeNames" in native_input.keys() + else Option_None() + ), + ExpressionAttributeValues=( + Option_Some( + Map( + { + Seq( + "".join( + [chr(int.from_bytes(pair, "big")) for pair in zip(*[iter(key.encode("utf-16-be"))] * 2)] + ) + ): aws_cryptography_internal_dynamodb.smithygenerated.com_amazonaws_dynamodb.aws_sdk_to_dafny.com_amazonaws_dynamodb_AttributeValue( + value + ) + for (key, value) in native_input["ExpressionAttributeValues"].items() + } + ) + ) + if "ExpressionAttributeValues" in native_input.keys() + else Option_None() + ), + ReturnValuesOnConditionCheckFailure=( + Option_Some( + aws_cryptography_internal_dynamodb.smithygenerated.com_amazonaws_dynamodb.aws_sdk_to_dafny.com_amazonaws_dynamodb_ReturnValuesOnConditionCheckFailure( + native_input["ReturnValuesOnConditionCheckFailure"] + ) + ) + if "ReturnValuesOnConditionCheckFailure" in native_input.keys() + else Option_None() + ), + ) + + +def com_amazonaws_dynamodb_AttributeValueUpdate(native_input): + return DafnyAttributeValueUpdate( + Value=( + Option_Some( + aws_cryptography_internal_dynamodb.smithygenerated.com_amazonaws_dynamodb.aws_sdk_to_dafny.com_amazonaws_dynamodb_AttributeValue( + native_input["Value"] + ) + ) + if "Value" in native_input.keys() + else Option_None() + ), + Action=( + Option_Some( + aws_cryptography_internal_dynamodb.smithygenerated.com_amazonaws_dynamodb.aws_sdk_to_dafny.com_amazonaws_dynamodb_AttributeAction( + native_input["Action"] + ) + ) + if "Action" in native_input.keys() + else Option_None() + ), + ) + + +def com_amazonaws_dynamodb_AttributeAction(native_input): + # Convert AttributeAction + if native_input == "ADD": + return AttributeAction_ADD() + elif native_input == "PUT": + return AttributeAction_PUT() + elif native_input == "DELETE": + return AttributeAction_DELETE() + else: + raise ValueError("No recognized enum value in enum type: " + native_input) + + +def com_amazonaws_dynamodb_UpdateItemOutput(native_input): + return DafnyUpdateItemOutput( + Attributes=( + Option_Some( + Map( + { + Seq( + "".join( + [chr(int.from_bytes(pair, "big")) for pair in zip(*[iter(key.encode("utf-16-be"))] * 2)] + ) + ): aws_cryptography_internal_dynamodb.smithygenerated.com_amazonaws_dynamodb.aws_sdk_to_dafny.com_amazonaws_dynamodb_AttributeValue( + value + ) + for (key, value) in native_input["Attributes"].items() + } + ) + ) + if "Attributes" in native_input.keys() + else Option_None() + ), + ConsumedCapacity=( + Option_Some( + aws_cryptography_internal_dynamodb.smithygenerated.com_amazonaws_dynamodb.aws_sdk_to_dafny.com_amazonaws_dynamodb_ConsumedCapacity( + native_input["ConsumedCapacity"] + ) + ) + if "ConsumedCapacity" in native_input.keys() + else Option_None() + ), + ItemCollectionMetrics=( + Option_Some( + aws_cryptography_internal_dynamodb.smithygenerated.com_amazonaws_dynamodb.aws_sdk_to_dafny.com_amazonaws_dynamodb_ItemCollectionMetrics( + native_input["ItemCollectionMetrics"] + ) + ) + if "ItemCollectionMetrics" in native_input.keys() + else Option_None() + ), + ) + + +def com_amazonaws_dynamodb_DeleteItemInput(native_input): + return DafnyDeleteItemInput( + TableName=Seq( + "".join( + [ + chr(int.from_bytes(pair, "big")) + for pair in zip(*[iter(native_input["TableName"].encode("utf-16-be"))] * 2) + ] + ) + ), + Key=Map( + { + Seq( + "".join([chr(int.from_bytes(pair, "big")) for pair in zip(*[iter(key.encode("utf-16-be"))] * 2)]) + ): aws_cryptography_internal_dynamodb.smithygenerated.com_amazonaws_dynamodb.aws_sdk_to_dafny.com_amazonaws_dynamodb_AttributeValue( + value + ) + for (key, value) in native_input["Key"].items() + } + ), + Expected=( + Option_Some( + Map( + { + Seq( + "".join( + [chr(int.from_bytes(pair, "big")) for pair in zip(*[iter(key.encode("utf-16-be"))] * 2)] + ) + ): aws_cryptography_internal_dynamodb.smithygenerated.com_amazonaws_dynamodb.aws_sdk_to_dafny.com_amazonaws_dynamodb_ExpectedAttributeValue( + value + ) + for (key, value) in native_input["Expected"].items() + } + ) + ) + if "Expected" in native_input.keys() + else Option_None() + ), + ConditionalOperator=( + Option_Some( + aws_cryptography_internal_dynamodb.smithygenerated.com_amazonaws_dynamodb.aws_sdk_to_dafny.com_amazonaws_dynamodb_ConditionalOperator( + native_input["ConditionalOperator"] + ) + ) + if "ConditionalOperator" in native_input.keys() + else Option_None() + ), + ReturnValues=( + Option_Some( + aws_cryptography_internal_dynamodb.smithygenerated.com_amazonaws_dynamodb.aws_sdk_to_dafny.com_amazonaws_dynamodb_ReturnValue( + native_input["ReturnValues"] + ) + ) + if "ReturnValues" in native_input.keys() + else Option_None() + ), + ReturnConsumedCapacity=( + Option_Some( + aws_cryptography_internal_dynamodb.smithygenerated.com_amazonaws_dynamodb.aws_sdk_to_dafny.com_amazonaws_dynamodb_ReturnConsumedCapacity( + native_input["ReturnConsumedCapacity"] + ) + ) + if "ReturnConsumedCapacity" in native_input.keys() + else Option_None() + ), + ReturnItemCollectionMetrics=( + Option_Some( + aws_cryptography_internal_dynamodb.smithygenerated.com_amazonaws_dynamodb.aws_sdk_to_dafny.com_amazonaws_dynamodb_ReturnItemCollectionMetrics( + native_input["ReturnItemCollectionMetrics"] + ) + ) + if "ReturnItemCollectionMetrics" in native_input.keys() + else Option_None() + ), + ConditionExpression=( + Option_Some( + Seq( + "".join( + [ + chr(int.from_bytes(pair, "big")) + for pair in zip(*[iter(native_input["ConditionExpression"].encode("utf-16-be"))] * 2) + ] + ) + ) + ) + if "ConditionExpression" in native_input.keys() + else Option_None() + ), + ExpressionAttributeNames=( + Option_Some( + Map( + { + Seq( + "".join( + [chr(int.from_bytes(pair, "big")) for pair in zip(*[iter(key.encode("utf-16-be"))] * 2)] + ) + ): Seq( + "".join( + [ + chr(int.from_bytes(pair, "big")) + for pair in zip(*[iter(value.encode("utf-16-be"))] * 2) + ] + ) + ) + for (key, value) in native_input["ExpressionAttributeNames"].items() + } + ) + ) + if "ExpressionAttributeNames" in native_input.keys() + else Option_None() + ), + ExpressionAttributeValues=( + Option_Some( + Map( + { + Seq( + "".join( + [chr(int.from_bytes(pair, "big")) for pair in zip(*[iter(key.encode("utf-16-be"))] * 2)] + ) + ): aws_cryptography_internal_dynamodb.smithygenerated.com_amazonaws_dynamodb.aws_sdk_to_dafny.com_amazonaws_dynamodb_AttributeValue( + value + ) + for (key, value) in native_input["ExpressionAttributeValues"].items() + } + ) + ) + if "ExpressionAttributeValues" in native_input.keys() + else Option_None() + ), + ReturnValuesOnConditionCheckFailure=( + Option_Some( + aws_cryptography_internal_dynamodb.smithygenerated.com_amazonaws_dynamodb.aws_sdk_to_dafny.com_amazonaws_dynamodb_ReturnValuesOnConditionCheckFailure( + native_input["ReturnValuesOnConditionCheckFailure"] + ) + ) + if "ReturnValuesOnConditionCheckFailure" in native_input.keys() + else Option_None() + ), + ) + + +def com_amazonaws_dynamodb_DeleteItemOutput(native_input): + return DafnyDeleteItemOutput( + Attributes=( + Option_Some( + Map( + { + Seq( + "".join( + [chr(int.from_bytes(pair, "big")) for pair in zip(*[iter(key.encode("utf-16-be"))] * 2)] + ) + ): aws_cryptography_internal_dynamodb.smithygenerated.com_amazonaws_dynamodb.aws_sdk_to_dafny.com_amazonaws_dynamodb_AttributeValue( + value + ) + for (key, value) in native_input["Attributes"].items() + } + ) + ) + if "Attributes" in native_input.keys() + else Option_None() + ), + ConsumedCapacity=( + Option_Some( + aws_cryptography_internal_dynamodb.smithygenerated.com_amazonaws_dynamodb.aws_sdk_to_dafny.com_amazonaws_dynamodb_ConsumedCapacity( + native_input["ConsumedCapacity"] + ) + ) + if "ConsumedCapacity" in native_input.keys() + else Option_None() + ), + ItemCollectionMetrics=( + Option_Some( + aws_cryptography_internal_dynamodb.smithygenerated.com_amazonaws_dynamodb.aws_sdk_to_dafny.com_amazonaws_dynamodb_ItemCollectionMetrics( + native_input["ItemCollectionMetrics"] + ) + ) + if "ItemCollectionMetrics" in native_input.keys() + else Option_None() + ), + ) + + +def com_amazonaws_dynamodb_TransactGetItemsInput(native_input): + return DafnyTransactGetItemsInput( + TransactItems=Seq( + [ + aws_cryptography_internal_dynamodb.smithygenerated.com_amazonaws_dynamodb.aws_sdk_to_dafny.com_amazonaws_dynamodb_TransactGetItem( + list_element + ) + for list_element in native_input["TransactItems"] + ] + ), + ReturnConsumedCapacity=( + Option_Some( + aws_cryptography_internal_dynamodb.smithygenerated.com_amazonaws_dynamodb.aws_sdk_to_dafny.com_amazonaws_dynamodb_ReturnConsumedCapacity( + native_input["ReturnConsumedCapacity"] + ) + ) + if "ReturnConsumedCapacity" in native_input.keys() + else Option_None() + ), + ) + + +def com_amazonaws_dynamodb_TransactGetItem(native_input): + return DafnyTransactGetItem( + Get=aws_cryptography_internal_dynamodb.smithygenerated.com_amazonaws_dynamodb.aws_sdk_to_dafny.com_amazonaws_dynamodb_Get( + native_input["Get"] + ), + ) + + +def com_amazonaws_dynamodb_Get(native_input): + return DafnyGet( + Key=Map( + { + Seq( + "".join([chr(int.from_bytes(pair, "big")) for pair in zip(*[iter(key.encode("utf-16-be"))] * 2)]) + ): aws_cryptography_internal_dynamodb.smithygenerated.com_amazonaws_dynamodb.aws_sdk_to_dafny.com_amazonaws_dynamodb_AttributeValue( + value + ) + for (key, value) in native_input["Key"].items() + } + ), + TableName=Seq( + "".join( + [ + chr(int.from_bytes(pair, "big")) + for pair in zip(*[iter(native_input["TableName"].encode("utf-16-be"))] * 2) + ] + ) + ), + ProjectionExpression=( + Option_Some( + Seq( + "".join( + [ + chr(int.from_bytes(pair, "big")) + for pair in zip(*[iter(native_input["ProjectionExpression"].encode("utf-16-be"))] * 2) + ] + ) + ) + ) + if "ProjectionExpression" in native_input.keys() + else Option_None() + ), + ExpressionAttributeNames=( + Option_Some( + Map( + { + Seq( + "".join( + [chr(int.from_bytes(pair, "big")) for pair in zip(*[iter(key.encode("utf-16-be"))] * 2)] + ) + ): Seq( + "".join( + [ + chr(int.from_bytes(pair, "big")) + for pair in zip(*[iter(value.encode("utf-16-be"))] * 2) + ] + ) + ) + for (key, value) in native_input["ExpressionAttributeNames"].items() + } + ) + ) + if "ExpressionAttributeNames" in native_input.keys() + else Option_None() + ), + ) + + +def com_amazonaws_dynamodb_TransactGetItemsOutput(native_input): + return DafnyTransactGetItemsOutput( + ConsumedCapacity=( + Option_Some( + Seq( + [ + aws_cryptography_internal_dynamodb.smithygenerated.com_amazonaws_dynamodb.aws_sdk_to_dafny.com_amazonaws_dynamodb_ConsumedCapacity( + list_element + ) + for list_element in native_input["ConsumedCapacity"] + ] + ) + ) + if "ConsumedCapacity" in native_input.keys() + else Option_None() + ), + Responses=( + Option_Some( + Seq( + [ + aws_cryptography_internal_dynamodb.smithygenerated.com_amazonaws_dynamodb.aws_sdk_to_dafny.com_amazonaws_dynamodb_ItemResponse( + list_element + ) + for list_element in native_input["Responses"] + ] + ) + ) + if "Responses" in native_input.keys() + else Option_None() + ), + ) + + +def com_amazonaws_dynamodb_ItemResponse(native_input): + return DafnyItemResponse( + Item=( + Option_Some( + Map( + { + Seq( + "".join( + [chr(int.from_bytes(pair, "big")) for pair in zip(*[iter(key.encode("utf-16-be"))] * 2)] + ) + ): aws_cryptography_internal_dynamodb.smithygenerated.com_amazonaws_dynamodb.aws_sdk_to_dafny.com_amazonaws_dynamodb_AttributeValue( + value + ) + for (key, value) in native_input["Item"].items() + } + ) + ) + if "Item" in native_input.keys() + else Option_None() + ), + ) + + +def com_amazonaws_dynamodb_ExecuteStatementInput(native_input): + return DafnyExecuteStatementInput( + Statement=Seq( + "".join( + [ + chr(int.from_bytes(pair, "big")) + for pair in zip(*[iter(native_input["Statement"].encode("utf-16-be"))] * 2) + ] + ) + ), + Parameters=( + Option_Some( + Seq( + [ + aws_cryptography_internal_dynamodb.smithygenerated.com_amazonaws_dynamodb.aws_sdk_to_dafny.com_amazonaws_dynamodb_AttributeValue( + list_element + ) + for list_element in native_input["Parameters"] + ] + ) + ) + if "Parameters" in native_input.keys() + else Option_None() + ), + ConsistentRead=( + Option_Some(native_input["ConsistentRead"]) if "ConsistentRead" in native_input.keys() else Option_None() + ), + NextToken=( + Option_Some( + Seq( + "".join( + [ + chr(int.from_bytes(pair, "big")) + for pair in zip(*[iter(native_input["NextToken"].encode("utf-16-be"))] * 2) + ] + ) + ) + ) + if "NextToken" in native_input.keys() + else Option_None() + ), + ReturnConsumedCapacity=( + Option_Some( + aws_cryptography_internal_dynamodb.smithygenerated.com_amazonaws_dynamodb.aws_sdk_to_dafny.com_amazonaws_dynamodb_ReturnConsumedCapacity( + native_input["ReturnConsumedCapacity"] + ) + ) + if "ReturnConsumedCapacity" in native_input.keys() + else Option_None() + ), + Limit=(Option_Some(native_input["Limit"]) if "Limit" in native_input.keys() else Option_None()), + ReturnValuesOnConditionCheckFailure=( + Option_Some( + aws_cryptography_internal_dynamodb.smithygenerated.com_amazonaws_dynamodb.aws_sdk_to_dafny.com_amazonaws_dynamodb_ReturnValuesOnConditionCheckFailure( + native_input["ReturnValuesOnConditionCheckFailure"] + ) + ) + if "ReturnValuesOnConditionCheckFailure" in native_input.keys() + else Option_None() + ), + ) + + +def com_amazonaws_dynamodb_ExecuteStatementOutput(native_input): + return DafnyExecuteStatementOutput( + Items=( + Option_Some( + Seq( + [ + Map( + { + Seq( + "".join( + [ + chr(int.from_bytes(pair, "big")) + for pair in zip(*[iter(key.encode("utf-16-be"))] * 2) + ] + ) + ): aws_cryptography_internal_dynamodb.smithygenerated.com_amazonaws_dynamodb.aws_sdk_to_dafny.com_amazonaws_dynamodb_AttributeValue( + value + ) + for (key, value) in list_element.items() + } + ) + for list_element in native_input["Items"] + ] + ) + ) + if "Items" in native_input.keys() + else Option_None() + ), + NextToken=( + Option_Some( + Seq( + "".join( + [ + chr(int.from_bytes(pair, "big")) + for pair in zip(*[iter(native_input["NextToken"].encode("utf-16-be"))] * 2) + ] + ) + ) + ) + if "NextToken" in native_input.keys() + else Option_None() + ), + ConsumedCapacity=( + Option_Some( + aws_cryptography_internal_dynamodb.smithygenerated.com_amazonaws_dynamodb.aws_sdk_to_dafny.com_amazonaws_dynamodb_ConsumedCapacity( + native_input["ConsumedCapacity"] + ) + ) + if "ConsumedCapacity" in native_input.keys() + else Option_None() + ), + LastEvaluatedKey=( + Option_Some( + Map( + { + Seq( + "".join( + [chr(int.from_bytes(pair, "big")) for pair in zip(*[iter(key.encode("utf-16-be"))] * 2)] + ) + ): aws_cryptography_internal_dynamodb.smithygenerated.com_amazonaws_dynamodb.aws_sdk_to_dafny.com_amazonaws_dynamodb_AttributeValue( + value + ) + for (key, value) in native_input["LastEvaluatedKey"].items() + } + ) + ) + if "LastEvaluatedKey" in native_input.keys() + else Option_None() + ), + ) + + +def com_amazonaws_dynamodb_BatchExecuteStatementInput(native_input): + return DafnyBatchExecuteStatementInput( + Statements=Seq( + [ + aws_cryptography_internal_dynamodb.smithygenerated.com_amazonaws_dynamodb.aws_sdk_to_dafny.com_amazonaws_dynamodb_BatchStatementRequest( + list_element + ) + for list_element in native_input["Statements"] + ] + ), + ReturnConsumedCapacity=( + Option_Some( + aws_cryptography_internal_dynamodb.smithygenerated.com_amazonaws_dynamodb.aws_sdk_to_dafny.com_amazonaws_dynamodb_ReturnConsumedCapacity( + native_input["ReturnConsumedCapacity"] + ) + ) + if "ReturnConsumedCapacity" in native_input.keys() + else Option_None() + ), + ) + + +def com_amazonaws_dynamodb_BatchStatementRequest(native_input): + return DafnyBatchStatementRequest( + Statement=Seq( + "".join( + [ + chr(int.from_bytes(pair, "big")) + for pair in zip(*[iter(native_input["Statement"].encode("utf-16-be"))] * 2) + ] + ) + ), + Parameters=( + Option_Some( + Seq( + [ + aws_cryptography_internal_dynamodb.smithygenerated.com_amazonaws_dynamodb.aws_sdk_to_dafny.com_amazonaws_dynamodb_AttributeValue( + list_element + ) + for list_element in native_input["Parameters"] + ] + ) + ) + if "Parameters" in native_input.keys() + else Option_None() + ), + ConsistentRead=( + Option_Some(native_input["ConsistentRead"]) if "ConsistentRead" in native_input.keys() else Option_None() + ), + ReturnValuesOnConditionCheckFailure=( + Option_Some( + aws_cryptography_internal_dynamodb.smithygenerated.com_amazonaws_dynamodb.aws_sdk_to_dafny.com_amazonaws_dynamodb_ReturnValuesOnConditionCheckFailure( + native_input["ReturnValuesOnConditionCheckFailure"] + ) + ) + if "ReturnValuesOnConditionCheckFailure" in native_input.keys() + else Option_None() + ), + ) + + +def com_amazonaws_dynamodb_BatchExecuteStatementOutput(native_input): + return DafnyBatchExecuteStatementOutput( + Responses=( + Option_Some( + Seq( + [ + aws_cryptography_internal_dynamodb.smithygenerated.com_amazonaws_dynamodb.aws_sdk_to_dafny.com_amazonaws_dynamodb_BatchStatementResponse( + list_element + ) + for list_element in native_input["Responses"] + ] + ) + ) + if "Responses" in native_input.keys() + else Option_None() + ), + ConsumedCapacity=( + Option_Some( + Seq( + [ + aws_cryptography_internal_dynamodb.smithygenerated.com_amazonaws_dynamodb.aws_sdk_to_dafny.com_amazonaws_dynamodb_ConsumedCapacity( + list_element + ) + for list_element in native_input["ConsumedCapacity"] + ] + ) + ) + if "ConsumedCapacity" in native_input.keys() + else Option_None() + ), + ) + + +def com_amazonaws_dynamodb_BatchStatementResponse(native_input): + return DafnyBatchStatementResponse( + Error=( + Option_Some( + aws_cryptography_internal_dynamodb.smithygenerated.com_amazonaws_dynamodb.aws_sdk_to_dafny.com_amazonaws_dynamodb_BatchStatementError( + native_input["Error"] + ) + ) + if "Error" in native_input.keys() + else Option_None() + ), + TableName=( + Option_Some( + Seq( + "".join( + [ + chr(int.from_bytes(pair, "big")) + for pair in zip(*[iter(native_input["TableName"].encode("utf-16-be"))] * 2) + ] + ) + ) + ) + if "TableName" in native_input.keys() + else Option_None() + ), + Item=( + Option_Some( + Map( + { + Seq( + "".join( + [chr(int.from_bytes(pair, "big")) for pair in zip(*[iter(key.encode("utf-16-be"))] * 2)] + ) + ): aws_cryptography_internal_dynamodb.smithygenerated.com_amazonaws_dynamodb.aws_sdk_to_dafny.com_amazonaws_dynamodb_AttributeValue( + value + ) + for (key, value) in native_input["Item"].items() + } + ) + ) + if "Item" in native_input.keys() + else Option_None() + ), + ) + + +def com_amazonaws_dynamodb_BatchStatementError(native_input): + return DafnyBatchStatementError( + Code=( + Option_Some( + aws_cryptography_internal_dynamodb.smithygenerated.com_amazonaws_dynamodb.aws_sdk_to_dafny.com_amazonaws_dynamodb_BatchStatementErrorCodeEnum( + native_input["Code"] + ) + ) + if "Code" in native_input.keys() + else Option_None() + ), + Message=( + Option_Some( + Seq( + "".join( + [ + chr(int.from_bytes(pair, "big")) + for pair in zip(*[iter(native_input["Message"].encode("utf-16-be"))] * 2) + ] + ) + ) + ) + if "Message" in native_input.keys() + else Option_None() + ), + Item=( + Option_Some( + Map( + { + Seq( + "".join( + [chr(int.from_bytes(pair, "big")) for pair in zip(*[iter(key.encode("utf-16-be"))] * 2)] + ) + ): aws_cryptography_internal_dynamodb.smithygenerated.com_amazonaws_dynamodb.aws_sdk_to_dafny.com_amazonaws_dynamodb_AttributeValue( + value + ) + for (key, value) in native_input["Item"].items() + } + ) + ) + if "Item" in native_input.keys() + else Option_None() + ), + ) + + +def com_amazonaws_dynamodb_BatchStatementErrorCodeEnum(native_input): + # Convert BatchStatementErrorCodeEnum + if native_input == "ConditionalCheckFailed": + return BatchStatementErrorCodeEnum_ConditionalCheckFailed() + elif native_input == "ItemCollectionSizeLimitExceeded": + return BatchStatementErrorCodeEnum_ItemCollectionSizeLimitExceeded() + elif native_input == "RequestLimitExceeded": + return BatchStatementErrorCodeEnum_RequestLimitExceeded() + elif native_input == "ValidationError": + return BatchStatementErrorCodeEnum_ValidationError() + elif native_input == "ProvisionedThroughputExceeded": + return BatchStatementErrorCodeEnum_ProvisionedThroughputExceeded() + elif native_input == "TransactionConflict": + return BatchStatementErrorCodeEnum_TransactionConflict() + elif native_input == "ThrottlingError": + return BatchStatementErrorCodeEnum_ThrottlingError() + elif native_input == "InternalServerError": + return BatchStatementErrorCodeEnum_InternalServerError() + elif native_input == "ResourceNotFound": + return BatchStatementErrorCodeEnum_ResourceNotFound() + elif native_input == "AccessDenied": + return BatchStatementErrorCodeEnum_AccessDenied() + elif native_input == "DuplicateItem": + return BatchStatementErrorCodeEnum_DuplicateItem() + else: + raise ValueError("No recognized enum value in enum type: " + native_input) + + +def com_amazonaws_dynamodb_ExecuteTransactionInput(native_input): + return DafnyExecuteTransactionInput( + TransactStatements=Seq( + [ + aws_cryptography_internal_dynamodb.smithygenerated.com_amazonaws_dynamodb.aws_sdk_to_dafny.com_amazonaws_dynamodb_ParameterizedStatement( + list_element + ) + for list_element in native_input["TransactStatements"] + ] + ), + ClientRequestToken=( + Option_Some( + Seq( + "".join( + [ + chr(int.from_bytes(pair, "big")) + for pair in zip(*[iter(native_input["ClientRequestToken"].encode("utf-16-be"))] * 2) + ] + ) + ) + ) + if "ClientRequestToken" in native_input.keys() + else Option_None() + ), + ReturnConsumedCapacity=( + Option_Some( + aws_cryptography_internal_dynamodb.smithygenerated.com_amazonaws_dynamodb.aws_sdk_to_dafny.com_amazonaws_dynamodb_ReturnConsumedCapacity( + native_input["ReturnConsumedCapacity"] + ) + ) + if "ReturnConsumedCapacity" in native_input.keys() + else Option_None() + ), + ) + + +def com_amazonaws_dynamodb_ParameterizedStatement(native_input): + return DafnyParameterizedStatement( + Statement=Seq( + "".join( + [ + chr(int.from_bytes(pair, "big")) + for pair in zip(*[iter(native_input["Statement"].encode("utf-16-be"))] * 2) + ] + ) + ), + Parameters=( + Option_Some( + Seq( + [ + aws_cryptography_internal_dynamodb.smithygenerated.com_amazonaws_dynamodb.aws_sdk_to_dafny.com_amazonaws_dynamodb_AttributeValue( + list_element + ) + for list_element in native_input["Parameters"] + ] + ) + ) + if "Parameters" in native_input.keys() + else Option_None() + ), + ReturnValuesOnConditionCheckFailure=( + Option_Some( + aws_cryptography_internal_dynamodb.smithygenerated.com_amazonaws_dynamodb.aws_sdk_to_dafny.com_amazonaws_dynamodb_ReturnValuesOnConditionCheckFailure( + native_input["ReturnValuesOnConditionCheckFailure"] + ) + ) + if "ReturnValuesOnConditionCheckFailure" in native_input.keys() + else Option_None() + ), + ) + + +def com_amazonaws_dynamodb_ExecuteTransactionOutput(native_input): + return DafnyExecuteTransactionOutput( + Responses=( + Option_Some( + Seq( + [ + aws_cryptography_internal_dynamodb.smithygenerated.com_amazonaws_dynamodb.aws_sdk_to_dafny.com_amazonaws_dynamodb_ItemResponse( + list_element + ) + for list_element in native_input["Responses"] + ] + ) + ) + if "Responses" in native_input.keys() + else Option_None() + ), + ConsumedCapacity=( + Option_Some( + Seq( + [ + aws_cryptography_internal_dynamodb.smithygenerated.com_amazonaws_dynamodb.aws_sdk_to_dafny.com_amazonaws_dynamodb_ConsumedCapacity( + list_element + ) + for list_element in native_input["ConsumedCapacity"] + ] + ) + ) + if "ConsumedCapacity" in native_input.keys() + else Option_None() + ), + ) diff --git a/DynamoDbEncryption/runtimes/python/src/aws_dbesdk_dynamodb/smithygenerated/aws_cryptography_dbencryptionsdk_dynamodb_transforms/client.py b/DynamoDbEncryption/runtimes/python/src/aws_dbesdk_dynamodb/smithygenerated/aws_cryptography_dbencryptionsdk_dynamodb_transforms/client.py new file mode 100644 index 000000000..dfb98d3f7 --- /dev/null +++ b/DynamoDbEncryption/runtimes/python/src/aws_dbesdk_dynamodb/smithygenerated/aws_cryptography_dbencryptionsdk_dynamodb_transforms/client.py @@ -0,0 +1,820 @@ +# Copyright Amazon.com Inc. or its affiliates. All Rights Reserved. +# SPDX-License-Identifier: Apache-2.0 +# Do not modify this file. This file is machine generated, and any changes to it will be overwritten. + +from aws_dbesdk_dynamodb.internaldafny.generated.AwsCryptographyDbEncryptionSdkDynamoDbTransformsTypes import ( + IDynamoDbEncryptionTransformsClient, +) +from typing import Callable, TypeVar, cast + +from .config import Config, DynamoDbTablesEncryptionConfig +from .dafny_protocol import DafnyRequest, DafnyResponse +from .plugin import set_config_impl +from smithy_python.exceptions import SmithyRetryException +from smithy_python.interfaces.interceptor import Interceptor, InterceptorContext +from smithy_python.interfaces.retries import RetryErrorInfo, RetryErrorType + +from .config import Plugin +from .deserialize import ( + _deserialize_batch_execute_statement_input_transform, + _deserialize_batch_execute_statement_output_transform, + _deserialize_batch_get_item_input_transform, + _deserialize_batch_get_item_output_transform, + _deserialize_batch_write_item_input_transform, + _deserialize_batch_write_item_output_transform, + _deserialize_delete_item_input_transform, + _deserialize_delete_item_output_transform, + _deserialize_execute_statement_input_transform, + _deserialize_execute_statement_output_transform, + _deserialize_execute_transaction_input_transform, + _deserialize_execute_transaction_output_transform, + _deserialize_get_item_input_transform, + _deserialize_get_item_output_transform, + _deserialize_put_item_input_transform, + _deserialize_put_item_output_transform, + _deserialize_query_input_transform, + _deserialize_query_output_transform, + _deserialize_resolve_attributes, + _deserialize_scan_input_transform, + _deserialize_scan_output_transform, + _deserialize_transact_get_items_input_transform, + _deserialize_transact_get_items_output_transform, + _deserialize_transact_write_items_input_transform, + _deserialize_transact_write_items_output_transform, + _deserialize_update_item_input_transform, + _deserialize_update_item_output_transform, +) +from .errors import ServiceError +from .models import ( + BatchExecuteStatementInputTransformInput, + BatchExecuteStatementInputTransformOutput, + BatchExecuteStatementOutputTransformInput, + BatchExecuteStatementOutputTransformOutput, + BatchGetItemInputTransformInput, + BatchGetItemInputTransformOutput, + BatchGetItemOutputTransformInput, + BatchGetItemOutputTransformOutput, + BatchWriteItemInputTransformInput, + BatchWriteItemInputTransformOutput, + BatchWriteItemOutputTransformInput, + BatchWriteItemOutputTransformOutput, + DeleteItemInputTransformInput, + DeleteItemInputTransformOutput, + DeleteItemOutputTransformInput, + DeleteItemOutputTransformOutput, + ExecuteStatementInputTransformInput, + ExecuteStatementInputTransformOutput, + ExecuteStatementOutputTransformInput, + ExecuteStatementOutputTransformOutput, + ExecuteTransactionInputTransformInput, + ExecuteTransactionInputTransformOutput, + ExecuteTransactionOutputTransformInput, + ExecuteTransactionOutputTransformOutput, + GetItemInputTransformInput, + GetItemInputTransformOutput, + GetItemOutputTransformInput, + GetItemOutputTransformOutput, + PutItemInputTransformInput, + PutItemInputTransformOutput, + PutItemOutputTransformInput, + PutItemOutputTransformOutput, + QueryInputTransformInput, + QueryInputTransformOutput, + QueryOutputTransformInput, + QueryOutputTransformOutput, + ResolveAttributesInput, + ResolveAttributesOutput, + ScanInputTransformInput, + ScanInputTransformOutput, + ScanOutputTransformInput, + ScanOutputTransformOutput, + TransactGetItemsInputTransformInput, + TransactGetItemsInputTransformOutput, + TransactGetItemsOutputTransformInput, + TransactGetItemsOutputTransformOutput, + TransactWriteItemsInputTransformInput, + TransactWriteItemsInputTransformOutput, + TransactWriteItemsOutputTransformInput, + TransactWriteItemsOutputTransformOutput, + UpdateItemInputTransformInput, + UpdateItemInputTransformOutput, + UpdateItemOutputTransformInput, + UpdateItemOutputTransformOutput, +) +from .serialize import ( + _serialize_batch_execute_statement_input_transform, + _serialize_batch_execute_statement_output_transform, + _serialize_batch_get_item_input_transform, + _serialize_batch_get_item_output_transform, + _serialize_batch_write_item_input_transform, + _serialize_batch_write_item_output_transform, + _serialize_delete_item_input_transform, + _serialize_delete_item_output_transform, + _serialize_execute_statement_input_transform, + _serialize_execute_statement_output_transform, + _serialize_execute_transaction_input_transform, + _serialize_execute_transaction_output_transform, + _serialize_get_item_input_transform, + _serialize_get_item_output_transform, + _serialize_put_item_input_transform, + _serialize_put_item_output_transform, + _serialize_query_input_transform, + _serialize_query_output_transform, + _serialize_resolve_attributes, + _serialize_scan_input_transform, + _serialize_scan_output_transform, + _serialize_transact_get_items_input_transform, + _serialize_transact_get_items_output_transform, + _serialize_transact_write_items_input_transform, + _serialize_transact_write_items_output_transform, + _serialize_update_item_input_transform, + _serialize_update_item_output_transform, +) + + +Input = TypeVar("Input") +Output = TypeVar("Output") + + +class DynamoDbEncryptionTransforms: + """Client for DynamoDbEncryptionTransforms. + + :param config: Configuration for the client. + """ + + def __init__( + self, + config: DynamoDbTablesEncryptionConfig | None = None, + dafny_client: IDynamoDbEncryptionTransformsClient | None = None, + ): + if config is None: + self._config = Config() + else: + self._config = config + + client_plugins: list[Plugin] = [ + set_config_impl, + ] + + for plugin in client_plugins: + plugin(self._config) + + if dafny_client is not None: + self._config.dafnyImplInterface.impl = dafny_client + + def put_item_input_transform(self, input: PutItemInputTransformInput) -> PutItemInputTransformOutput: + """Invokes the PutItemInputTransform operation. + + :param input: The operation's input. + """ + return self._execute_operation( + input=input, + plugins=[], + serialize=_serialize_put_item_input_transform, + deserialize=_deserialize_put_item_input_transform, + config=self._config, + operation_name="PutItemInputTransform", + ) + + def put_item_output_transform(self, input: PutItemOutputTransformInput) -> PutItemOutputTransformOutput: + """Invokes the PutItemOutputTransform operation. + + :param input: The operation's input. + """ + return self._execute_operation( + input=input, + plugins=[], + serialize=_serialize_put_item_output_transform, + deserialize=_deserialize_put_item_output_transform, + config=self._config, + operation_name="PutItemOutputTransform", + ) + + def get_item_input_transform(self, input: GetItemInputTransformInput) -> GetItemInputTransformOutput: + """Invokes the GetItemInputTransform operation. + + :param input: The operation's input. + """ + return self._execute_operation( + input=input, + plugins=[], + serialize=_serialize_get_item_input_transform, + deserialize=_deserialize_get_item_input_transform, + config=self._config, + operation_name="GetItemInputTransform", + ) + + def get_item_output_transform(self, input: GetItemOutputTransformInput) -> GetItemOutputTransformOutput: + """Invokes the GetItemOutputTransform operation. + + :param input: The operation's input. + """ + return self._execute_operation( + input=input, + plugins=[], + serialize=_serialize_get_item_output_transform, + deserialize=_deserialize_get_item_output_transform, + config=self._config, + operation_name="GetItemOutputTransform", + ) + + def batch_write_item_input_transform( + self, input: BatchWriteItemInputTransformInput + ) -> BatchWriteItemInputTransformOutput: + """Invokes the BatchWriteItemInputTransform operation. + + :param input: The operation's input. + """ + return self._execute_operation( + input=input, + plugins=[], + serialize=_serialize_batch_write_item_input_transform, + deserialize=_deserialize_batch_write_item_input_transform, + config=self._config, + operation_name="BatchWriteItemInputTransform", + ) + + def batch_write_item_output_transform( + self, input: BatchWriteItemOutputTransformInput + ) -> BatchWriteItemOutputTransformOutput: + """Invokes the BatchWriteItemOutputTransform operation. + + :param input: The operation's input. + """ + return self._execute_operation( + input=input, + plugins=[], + serialize=_serialize_batch_write_item_output_transform, + deserialize=_deserialize_batch_write_item_output_transform, + config=self._config, + operation_name="BatchWriteItemOutputTransform", + ) + + def batch_get_item_input_transform( + self, input: BatchGetItemInputTransformInput + ) -> BatchGetItemInputTransformOutput: + """Invokes the BatchGetItemInputTransform operation. + + :param input: The operation's input. + """ + return self._execute_operation( + input=input, + plugins=[], + serialize=_serialize_batch_get_item_input_transform, + deserialize=_deserialize_batch_get_item_input_transform, + config=self._config, + operation_name="BatchGetItemInputTransform", + ) + + def batch_get_item_output_transform( + self, input: BatchGetItemOutputTransformInput + ) -> BatchGetItemOutputTransformOutput: + """Invokes the BatchGetItemOutputTransform operation. + + :param input: The operation's input. + """ + return self._execute_operation( + input=input, + plugins=[], + serialize=_serialize_batch_get_item_output_transform, + deserialize=_deserialize_batch_get_item_output_transform, + config=self._config, + operation_name="BatchGetItemOutputTransform", + ) + + def scan_input_transform(self, input: ScanInputTransformInput) -> ScanInputTransformOutput: + """Invokes the ScanInputTransform operation. + + :param input: The operation's input. + """ + return self._execute_operation( + input=input, + plugins=[], + serialize=_serialize_scan_input_transform, + deserialize=_deserialize_scan_input_transform, + config=self._config, + operation_name="ScanInputTransform", + ) + + def scan_output_transform(self, input: ScanOutputTransformInput) -> ScanOutputTransformOutput: + """Invokes the ScanOutputTransform operation. + + :param input: The operation's input. + """ + return self._execute_operation( + input=input, + plugins=[], + serialize=_serialize_scan_output_transform, + deserialize=_deserialize_scan_output_transform, + config=self._config, + operation_name="ScanOutputTransform", + ) + + def query_input_transform(self, input: QueryInputTransformInput) -> QueryInputTransformOutput: + """Invokes the QueryInputTransform operation. + + :param input: The operation's input. + """ + return self._execute_operation( + input=input, + plugins=[], + serialize=_serialize_query_input_transform, + deserialize=_deserialize_query_input_transform, + config=self._config, + operation_name="QueryInputTransform", + ) + + def query_output_transform(self, input: QueryOutputTransformInput) -> QueryOutputTransformOutput: + """Invokes the QueryOutputTransform operation. + + :param input: The operation's input. + """ + return self._execute_operation( + input=input, + plugins=[], + serialize=_serialize_query_output_transform, + deserialize=_deserialize_query_output_transform, + config=self._config, + operation_name="QueryOutputTransform", + ) + + def transact_write_items_input_transform( + self, input: TransactWriteItemsInputTransformInput + ) -> TransactWriteItemsInputTransformOutput: + """Invokes the TransactWriteItemsInputTransform operation. + + :param input: The operation's input. + """ + return self._execute_operation( + input=input, + plugins=[], + serialize=_serialize_transact_write_items_input_transform, + deserialize=_deserialize_transact_write_items_input_transform, + config=self._config, + operation_name="TransactWriteItemsInputTransform", + ) + + def transact_write_items_output_transform( + self, input: TransactWriteItemsOutputTransformInput + ) -> TransactWriteItemsOutputTransformOutput: + """Invokes the TransactWriteItemsOutputTransform operation. + + :param input: The operation's input. + """ + return self._execute_operation( + input=input, + plugins=[], + serialize=_serialize_transact_write_items_output_transform, + deserialize=_deserialize_transact_write_items_output_transform, + config=self._config, + operation_name="TransactWriteItemsOutputTransform", + ) + + def update_item_input_transform(self, input: UpdateItemInputTransformInput) -> UpdateItemInputTransformOutput: + """Invokes the UpdateItemInputTransform operation. + + :param input: The operation's input. + """ + return self._execute_operation( + input=input, + plugins=[], + serialize=_serialize_update_item_input_transform, + deserialize=_deserialize_update_item_input_transform, + config=self._config, + operation_name="UpdateItemInputTransform", + ) + + def update_item_output_transform(self, input: UpdateItemOutputTransformInput) -> UpdateItemOutputTransformOutput: + """Invokes the UpdateItemOutputTransform operation. + + :param input: The operation's input. + """ + return self._execute_operation( + input=input, + plugins=[], + serialize=_serialize_update_item_output_transform, + deserialize=_deserialize_update_item_output_transform, + config=self._config, + operation_name="UpdateItemOutputTransform", + ) + + def delete_item_input_transform(self, input: DeleteItemInputTransformInput) -> DeleteItemInputTransformOutput: + """Invokes the DeleteItemInputTransform operation. + + :param input: The operation's input. + """ + return self._execute_operation( + input=input, + plugins=[], + serialize=_serialize_delete_item_input_transform, + deserialize=_deserialize_delete_item_input_transform, + config=self._config, + operation_name="DeleteItemInputTransform", + ) + + def delete_item_output_transform(self, input: DeleteItemOutputTransformInput) -> DeleteItemOutputTransformOutput: + """Invokes the DeleteItemOutputTransform operation. + + :param input: The operation's input. + """ + return self._execute_operation( + input=input, + plugins=[], + serialize=_serialize_delete_item_output_transform, + deserialize=_deserialize_delete_item_output_transform, + config=self._config, + operation_name="DeleteItemOutputTransform", + ) + + def transact_get_items_input_transform( + self, input: TransactGetItemsInputTransformInput + ) -> TransactGetItemsInputTransformOutput: + """Invokes the TransactGetItemsInputTransform operation. + + :param input: The operation's input. + """ + return self._execute_operation( + input=input, + plugins=[], + serialize=_serialize_transact_get_items_input_transform, + deserialize=_deserialize_transact_get_items_input_transform, + config=self._config, + operation_name="TransactGetItemsInputTransform", + ) + + def transact_get_items_output_transform( + self, input: TransactGetItemsOutputTransformInput + ) -> TransactGetItemsOutputTransformOutput: + """Invokes the TransactGetItemsOutputTransform operation. + + :param input: The operation's input. + """ + return self._execute_operation( + input=input, + plugins=[], + serialize=_serialize_transact_get_items_output_transform, + deserialize=_deserialize_transact_get_items_output_transform, + config=self._config, + operation_name="TransactGetItemsOutputTransform", + ) + + def execute_statement_input_transform( + self, input: ExecuteStatementInputTransformInput + ) -> ExecuteStatementInputTransformOutput: + """Invokes the ExecuteStatementInputTransform operation. + + :param input: The operation's input. + """ + return self._execute_operation( + input=input, + plugins=[], + serialize=_serialize_execute_statement_input_transform, + deserialize=_deserialize_execute_statement_input_transform, + config=self._config, + operation_name="ExecuteStatementInputTransform", + ) + + def execute_statement_output_transform( + self, input: ExecuteStatementOutputTransformInput + ) -> ExecuteStatementOutputTransformOutput: + """Invokes the ExecuteStatementOutputTransform operation. + + :param input: The operation's input. + """ + return self._execute_operation( + input=input, + plugins=[], + serialize=_serialize_execute_statement_output_transform, + deserialize=_deserialize_execute_statement_output_transform, + config=self._config, + operation_name="ExecuteStatementOutputTransform", + ) + + def batch_execute_statement_input_transform( + self, input: BatchExecuteStatementInputTransformInput + ) -> BatchExecuteStatementInputTransformOutput: + """Invokes the BatchExecuteStatementInputTransform operation. + + :param input: The operation's input. + """ + return self._execute_operation( + input=input, + plugins=[], + serialize=_serialize_batch_execute_statement_input_transform, + deserialize=_deserialize_batch_execute_statement_input_transform, + config=self._config, + operation_name="BatchExecuteStatementInputTransform", + ) + + def batch_execute_statement_output_transform( + self, input: BatchExecuteStatementOutputTransformInput + ) -> BatchExecuteStatementOutputTransformOutput: + """Invokes the BatchExecuteStatementOutputTransform operation. + + :param input: The operation's input. + """ + return self._execute_operation( + input=input, + plugins=[], + serialize=_serialize_batch_execute_statement_output_transform, + deserialize=_deserialize_batch_execute_statement_output_transform, + config=self._config, + operation_name="BatchExecuteStatementOutputTransform", + ) + + def execute_transaction_input_transform( + self, input: ExecuteTransactionInputTransformInput + ) -> ExecuteTransactionInputTransformOutput: + """Invokes the ExecuteTransactionInputTransform operation. + + :param input: The operation's input. + """ + return self._execute_operation( + input=input, + plugins=[], + serialize=_serialize_execute_transaction_input_transform, + deserialize=_deserialize_execute_transaction_input_transform, + config=self._config, + operation_name="ExecuteTransactionInputTransform", + ) + + def execute_transaction_output_transform( + self, input: ExecuteTransactionOutputTransformInput + ) -> ExecuteTransactionOutputTransformOutput: + """Invokes the ExecuteTransactionOutputTransform operation. + + :param input: The operation's input. + """ + return self._execute_operation( + input=input, + plugins=[], + serialize=_serialize_execute_transaction_output_transform, + deserialize=_deserialize_execute_transaction_output_transform, + config=self._config, + operation_name="ExecuteTransactionOutputTransform", + ) + + def resolve_attributes(self, input: ResolveAttributesInput) -> ResolveAttributesOutput: + """Given an Item, show the intermediate values (e.g. compound beacons, + virtual fields). + + :param input: The operation's input. + """ + return self._execute_operation( + input=input, + plugins=[], + serialize=_serialize_resolve_attributes, + deserialize=_deserialize_resolve_attributes, + config=self._config, + operation_name="ResolveAttributes", + ) + + def _execute_operation( + self, + input: Input, + plugins: list[Plugin], + serialize: Callable[[Input, Config], DafnyRequest], + deserialize: Callable[[DafnyResponse, Config], Output], + config: Config, + operation_name: str, + ) -> Output: + try: + return self._handle_execution(input, plugins, serialize, deserialize, config, operation_name) + except Exception as e: + # Make sure every exception that we throw is an instance of ServiceError so + # customers can reliably catch everything we throw. + if not isinstance(e, ServiceError): + raise ServiceError(e) from e + raise e + + def _handle_execution( + self, + input: Input, + plugins: list[Plugin], + serialize: Callable[[Input, Config], DafnyRequest], + deserialize: Callable[[DafnyResponse, Config], Output], + config: Config, + operation_name: str, + ) -> Output: + context: InterceptorContext[Input, None, None, None] = InterceptorContext( + request=input, + response=None, + transport_request=None, + transport_response=None, + ) + try: + _client_interceptors = config.interceptors + except AttributeError: + config.interceptors = [] + _client_interceptors = config.interceptors + client_interceptors = cast( + list[Interceptor[Input, Output, DafnyRequest, DafnyResponse]], + _client_interceptors, + ) + interceptors = client_interceptors + + try: + # Step 1a: Invoke read_before_execution on client-level interceptors + for interceptor in client_interceptors: + interceptor.read_before_execution(context) + + # Step 1b: Run operation-level plugins + for plugin in plugins: + plugin(config) + + _client_interceptors = config.interceptors + interceptors = cast( + list[Interceptor[Input, Output, DafnyRequest, DafnyResponse]], + _client_interceptors, + ) + + # Step 1c: Invoke the read_before_execution hooks on newly added + # interceptors. + for interceptor in interceptors: + if interceptor not in client_interceptors: + interceptor.read_before_execution(context) + + # Step 2: Invoke the modify_before_serialization hooks + for interceptor in interceptors: + context._request = interceptor.modify_before_serialization(context) + + # Step 3: Invoke the read_before_serialization hooks + for interceptor in interceptors: + interceptor.read_before_serialization(context) + + # Step 4: Serialize the request + context_with_transport_request = cast(InterceptorContext[Input, None, DafnyRequest, None], context) + context_with_transport_request._transport_request = serialize( + context_with_transport_request.request, config + ) + + # Step 5: Invoke read_after_serialization + for interceptor in interceptors: + interceptor.read_after_serialization(context_with_transport_request) + + # Step 6: Invoke modify_before_retry_loop + for interceptor in interceptors: + context_with_transport_request._transport_request = interceptor.modify_before_retry_loop( + context_with_transport_request + ) + + # Step 7: Acquire the retry token. + retry_strategy = config.retry_strategy + retry_token = retry_strategy.acquire_initial_retry_token() + + while True: + # Make an attempt, creating a copy of the context so we don't pass + # around old data. + context_with_response = self._handle_attempt( + deserialize, + interceptors, + context_with_transport_request.copy(), + config, + operation_name, + ) + + # We perform this type-ignored re-assignment because `context` needs + # to point at the latest context so it can be generically handled + # later on. This is only an issue here because we've created a copy, + # so we're no longer simply pointing at the same object in memory + # with different names and type hints. It is possible to address this + # without having to fall back to the type ignore, but it would impose + # unnecessary runtime costs. + context = context_with_response # type: ignore + + if isinstance(context_with_response.response, Exception): + # Step 7u: Reacquire retry token if the attempt failed + try: + retry_token = retry_strategy.refresh_retry_token_for_retry( + token_to_renew=retry_token, + error_info=RetryErrorInfo( + # TODO: Determine the error type. + error_type=RetryErrorType.CLIENT_ERROR, + ), + ) + except SmithyRetryException: + raise context_with_response.response + else: + # Step 8: Invoke record_success + retry_strategy.record_success(token=retry_token) + break + except Exception as e: + context._response = e + + # At this point, the context's request will have been definitively set, and + # The response will be set either with the modeled output or an exception. The + # transport_request and transport_response may be set or None. + execution_context = cast( + InterceptorContext[Input, Output, DafnyRequest | None, DafnyResponse | None], + context, + ) + return self._finalize_execution(interceptors, execution_context) + + def _handle_attempt( + self, + deserialize: Callable[[DafnyResponse, Config], Output], + interceptors: list[Interceptor[Input, Output, DafnyRequest, DafnyResponse]], + context: InterceptorContext[Input, None, DafnyRequest, None], + config: Config, + operation_name: str, + ) -> InterceptorContext[Input, Output, DafnyRequest, DafnyResponse | None]: + try: + # Step 7a: Invoke read_before_attempt + for interceptor in interceptors: + interceptor.read_before_attempt(context) + + # Step 7m: Involve client Dafny impl + if config.dafnyImplInterface.impl is None: + raise Exception("No impl found on the operation config.") + + context_with_response = cast(InterceptorContext[Input, None, DafnyRequest, DafnyResponse], context) + + context_with_response._transport_response = config.dafnyImplInterface.handle_request( + input=context_with_response.transport_request + ) + + # Step 7n: Invoke read_after_transmit + for interceptor in interceptors: + interceptor.read_after_transmit(context_with_response) + + # Step 7o: Invoke modify_before_deserialization + for interceptor in interceptors: + context_with_response._transport_response = interceptor.modify_before_deserialization( + context_with_response + ) + + # Step 7p: Invoke read_before_deserialization + for interceptor in interceptors: + interceptor.read_before_deserialization(context_with_response) + + # Step 7q: deserialize + context_with_output = cast( + InterceptorContext[Input, Output, DafnyRequest, DafnyResponse], + context_with_response, + ) + context_with_output._response = deserialize(context_with_output._transport_response, config) + + # Step 7r: Invoke read_after_deserialization + for interceptor in interceptors: + interceptor.read_after_deserialization(context_with_output) + except Exception as e: + context._response = e + + # At this point, the context's request and transport_request have definitively been set, + # the response is either set or an exception, and the transport_resposne is either set or + # None. This will also be true after _finalize_attempt because there is no opportunity + # there to set the transport_response. + attempt_context = cast( + InterceptorContext[Input, Output, DafnyRequest, DafnyResponse | None], + context, + ) + return self._finalize_attempt(interceptors, attempt_context) + + def _finalize_attempt( + self, + interceptors: list[Interceptor[Input, Output, DafnyRequest, DafnyResponse]], + context: InterceptorContext[Input, Output, DafnyRequest, DafnyResponse | None], + ) -> InterceptorContext[Input, Output, DafnyRequest, DafnyResponse | None]: + # Step 7s: Invoke modify_before_attempt_completion + try: + for interceptor in interceptors: + context._response = interceptor.modify_before_attempt_completion(context) + except Exception as e: + context._response = e + + # Step 7t: Invoke read_after_attempt + for interceptor in interceptors: + try: + interceptor.read_after_attempt(context) + except Exception as e: + context._response = e + + return context + + def _finalize_execution( + self, + interceptors: list[Interceptor[Input, Output, DafnyRequest, DafnyResponse]], + context: InterceptorContext[Input, Output, DafnyRequest | None, DafnyResponse | None], + ) -> Output: + try: + # Step 9: Invoke modify_before_completion + for interceptor in interceptors: + context._response = interceptor.modify_before_completion(context) + + except Exception as e: + context._response = e + + # Step 11: Invoke read_after_execution + for interceptor in interceptors: + try: + interceptor.read_after_execution(context) + except Exception as e: + context._response = e + + # Step 12: Return / throw + if isinstance(context.response, Exception): + raise context.response + + # We may want to add some aspects of this context to the output types so we can + # return it to the end-users. + return context.response diff --git a/DynamoDbEncryption/runtimes/python/src/aws_dbesdk_dynamodb/smithygenerated/aws_cryptography_dbencryptionsdk_dynamodb_transforms/config.py b/DynamoDbEncryption/runtimes/python/src/aws_dbesdk_dynamodb/smithygenerated/aws_cryptography_dbencryptionsdk_dynamodb_transforms/config.py new file mode 100644 index 000000000..b41b0b30b --- /dev/null +++ b/DynamoDbEncryption/runtimes/python/src/aws_dbesdk_dynamodb/smithygenerated/aws_cryptography_dbencryptionsdk_dynamodb_transforms/config.py @@ -0,0 +1,70 @@ +# Copyright Amazon.com Inc. or its affiliates. All Rights Reserved. +# SPDX-License-Identifier: Apache-2.0 +# Do not modify this file. This file is machine generated, and any changes to it will be overwritten. + +from aws_dbesdk_dynamodb.internaldafny.generated.AwsCryptographyDbEncryptionSdkDynamoDbTypes import ( + DynamoDbTablesEncryptionConfig_DynamoDbTablesEncryptionConfig as DafnyDynamoDbTablesEncryptionConfig, +) +import aws_dbesdk_dynamodb.internaldafny.generated.module_ +import aws_dbesdk_dynamodb.smithygenerated.aws_cryptography_dbencryptionsdk_dynamodb.dafny_to_smithy +from aws_dbesdk_dynamodb.smithygenerated.aws_cryptography_dbencryptionsdk_dynamodb.models import ( + DynamoDbTablesEncryptionConfig, +) +import aws_dbesdk_dynamodb.smithygenerated.aws_cryptography_dbencryptionsdk_dynamodb.smithy_to_dafny +from dataclasses import dataclass +from typing import Any, Callable, TypeAlias + +from .dafnyImplInterface import DafnyImplInterface +from smithy_python._private.retries import SimpleRetryStrategy +from smithy_python.interfaces.retries import RetryStrategy + + +_ServiceInterceptor = Any + + +@dataclass(init=False) +class Config: + """Configuration for DynamoDbEncryptionTransforms.""" + + interceptors: list[_ServiceInterceptor] + retry_strategy: RetryStrategy + dafnyImplInterface: DafnyImplInterface | None + + def __init__( + self, + *, + interceptors: list[_ServiceInterceptor] | None = None, + retry_strategy: RetryStrategy | None = None, + dafnyImplInterface: DafnyImplInterface | None = None, + ): + """Constructor. + + :param interceptors: The list of interceptors, which are hooks + that are called during the execution of a request. + :param retry_strategy: The retry strategy for issuing retry + tokens and computing retry delays. + :param dafnyImplInterface: + """ + self.interceptors = interceptors or [] + self.retry_strategy = retry_strategy or SimpleRetryStrategy() + self.dafnyImplInterface = dafnyImplInterface + + +# A callable that allows customizing the config object on each request. +Plugin: TypeAlias = Callable[[Config], None] + + +def dafny_config_to_smithy_config(dafny_config) -> DynamoDbTablesEncryptionConfig: + """Converts the provided Dafny shape for this localService's config into + the corresponding Smithy-modelled shape.""" + return aws_dbesdk_dynamodb.smithygenerated.aws_cryptography_dbencryptionsdk_dynamodb.dafny_to_smithy.aws_cryptography_dbencryptionsdk_dynamodb_DynamoDbTablesEncryptionConfig( + dafny_config + ) + + +def smithy_config_to_dafny_config(smithy_config) -> DafnyDynamoDbTablesEncryptionConfig: + """Converts the provided Smithy-modelled shape for this localService's + config into the corresponding Dafny shape.""" + return aws_dbesdk_dynamodb.smithygenerated.aws_cryptography_dbencryptionsdk_dynamodb.smithy_to_dafny.aws_cryptography_dbencryptionsdk_dynamodb_DynamoDbTablesEncryptionConfig( + smithy_config + ) diff --git a/DynamoDbEncryption/runtimes/python/src/aws_dbesdk_dynamodb/smithygenerated/aws_cryptography_dbencryptionsdk_dynamodb_transforms/dafnyImplInterface.py b/DynamoDbEncryption/runtimes/python/src/aws_dbesdk_dynamodb/smithygenerated/aws_cryptography_dbencryptionsdk_dynamodb_transforms/dafnyImplInterface.py new file mode 100644 index 000000000..68d924bc1 --- /dev/null +++ b/DynamoDbEncryption/runtimes/python/src/aws_dbesdk_dynamodb/smithygenerated/aws_cryptography_dbencryptionsdk_dynamodb_transforms/dafnyImplInterface.py @@ -0,0 +1,59 @@ +# Copyright Amazon.com Inc. or its affiliates. All Rights Reserved. +# SPDX-License-Identifier: Apache-2.0 +# Do not modify this file. This file is machine generated, and any changes to it will be overwritten. + +from aws_dbesdk_dynamodb.internaldafny.generated.DynamoDbEncryptionTransforms import ( + DynamoDbEncryptionTransformsClient, +) +from .dafny_protocol import DafnyRequest + + +class DafnyImplInterface: + impl: DynamoDbEncryptionTransformsClient | None = None + + # operation_map cannot be created at dafnyImplInterface create time, + # as the map's values reference values inside `self.impl`, + # and impl is only populated at runtime. + # Accessing these before impl is populated results in an error. + # At runtime, the map is populated once and cached. + operation_map = None + + def handle_request(self, input: DafnyRequest): + if self.operation_map is None: + self.operation_map = { + "PutItemInputTransform": self.impl.PutItemInputTransform, + "PutItemOutputTransform": self.impl.PutItemOutputTransform, + "GetItemInputTransform": self.impl.GetItemInputTransform, + "GetItemOutputTransform": self.impl.GetItemOutputTransform, + "BatchWriteItemInputTransform": self.impl.BatchWriteItemInputTransform, + "BatchWriteItemOutputTransform": self.impl.BatchWriteItemOutputTransform, + "BatchGetItemInputTransform": self.impl.BatchGetItemInputTransform, + "BatchGetItemOutputTransform": self.impl.BatchGetItemOutputTransform, + "ScanInputTransform": self.impl.ScanInputTransform, + "ScanOutputTransform": self.impl.ScanOutputTransform, + "QueryInputTransform": self.impl.QueryInputTransform, + "QueryOutputTransform": self.impl.QueryOutputTransform, + "TransactWriteItemsInputTransform": self.impl.TransactWriteItemsInputTransform, + "TransactWriteItemsOutputTransform": self.impl.TransactWriteItemsOutputTransform, + "UpdateItemInputTransform": self.impl.UpdateItemInputTransform, + "UpdateItemOutputTransform": self.impl.UpdateItemOutputTransform, + "DeleteItemInputTransform": self.impl.DeleteItemInputTransform, + "DeleteItemOutputTransform": self.impl.DeleteItemOutputTransform, + "TransactGetItemsInputTransform": self.impl.TransactGetItemsInputTransform, + "TransactGetItemsOutputTransform": self.impl.TransactGetItemsOutputTransform, + "ExecuteStatementInputTransform": self.impl.ExecuteStatementInputTransform, + "ExecuteStatementOutputTransform": self.impl.ExecuteStatementOutputTransform, + "BatchExecuteStatementInputTransform": self.impl.BatchExecuteStatementInputTransform, + "BatchExecuteStatementOutputTransform": self.impl.BatchExecuteStatementOutputTransform, + "ExecuteTransactionInputTransform": self.impl.ExecuteTransactionInputTransform, + "ExecuteTransactionOutputTransform": self.impl.ExecuteTransactionOutputTransform, + "ResolveAttributes": self.impl.ResolveAttributes, + } + + # This logic is where a typical Smithy client would expect the "server" to be. + # This code can be thought of as logic our Dafny "server" uses + # to route incoming client requests to the correct request handler code. + if input.dafny_operation_input is None: + return self.operation_map[input.operation_name]() + else: + return self.operation_map[input.operation_name](input.dafny_operation_input) diff --git a/DynamoDbEncryption/runtimes/python/src/aws_dbesdk_dynamodb/smithygenerated/aws_cryptography_dbencryptionsdk_dynamodb_transforms/dafny_protocol.py b/DynamoDbEncryption/runtimes/python/src/aws_dbesdk_dynamodb/smithygenerated/aws_cryptography_dbencryptionsdk_dynamodb_transforms/dafny_protocol.py new file mode 100644 index 000000000..1da8aec1a --- /dev/null +++ b/DynamoDbEncryption/runtimes/python/src/aws_dbesdk_dynamodb/smithygenerated/aws_cryptography_dbencryptionsdk_dynamodb_transforms/dafny_protocol.py @@ -0,0 +1,83 @@ +# Copyright Amazon.com Inc. or its affiliates. All Rights Reserved. +# SPDX-License-Identifier: Apache-2.0 +# Do not modify this file. This file is machine generated, and any changes to it will be overwritten. + +from aws_dbesdk_dynamodb.internaldafny.generated.AwsCryptographyDbEncryptionSdkDynamoDbTransformsTypes import ( + BatchExecuteStatementInputTransformInput_BatchExecuteStatementInputTransformInput as DafnyBatchExecuteStatementInputTransformInput, + BatchExecuteStatementOutputTransformInput_BatchExecuteStatementOutputTransformInput as DafnyBatchExecuteStatementOutputTransformInput, + BatchGetItemInputTransformInput_BatchGetItemInputTransformInput as DafnyBatchGetItemInputTransformInput, + BatchGetItemOutputTransformInput_BatchGetItemOutputTransformInput as DafnyBatchGetItemOutputTransformInput, + BatchWriteItemInputTransformInput_BatchWriteItemInputTransformInput as DafnyBatchWriteItemInputTransformInput, + BatchWriteItemOutputTransformInput_BatchWriteItemOutputTransformInput as DafnyBatchWriteItemOutputTransformInput, + DeleteItemInputTransformInput_DeleteItemInputTransformInput as DafnyDeleteItemInputTransformInput, + DeleteItemOutputTransformInput_DeleteItemOutputTransformInput as DafnyDeleteItemOutputTransformInput, + ExecuteStatementInputTransformInput_ExecuteStatementInputTransformInput as DafnyExecuteStatementInputTransformInput, + ExecuteStatementOutputTransformInput_ExecuteStatementOutputTransformInput as DafnyExecuteStatementOutputTransformInput, + ExecuteTransactionInputTransformInput_ExecuteTransactionInputTransformInput as DafnyExecuteTransactionInputTransformInput, + ExecuteTransactionOutputTransformInput_ExecuteTransactionOutputTransformInput as DafnyExecuteTransactionOutputTransformInput, + GetItemInputTransformInput_GetItemInputTransformInput as DafnyGetItemInputTransformInput, + GetItemOutputTransformInput_GetItemOutputTransformInput as DafnyGetItemOutputTransformInput, + PutItemInputTransformInput_PutItemInputTransformInput as DafnyPutItemInputTransformInput, + PutItemOutputTransformInput_PutItemOutputTransformInput as DafnyPutItemOutputTransformInput, + QueryInputTransformInput_QueryInputTransformInput as DafnyQueryInputTransformInput, + QueryOutputTransformInput_QueryOutputTransformInput as DafnyQueryOutputTransformInput, + ResolveAttributesInput_ResolveAttributesInput as DafnyResolveAttributesInput, + ScanInputTransformInput_ScanInputTransformInput as DafnyScanInputTransformInput, + ScanOutputTransformInput_ScanOutputTransformInput as DafnyScanOutputTransformInput, + TransactGetItemsInputTransformInput_TransactGetItemsInputTransformInput as DafnyTransactGetItemsInputTransformInput, + TransactGetItemsOutputTransformInput_TransactGetItemsOutputTransformInput as DafnyTransactGetItemsOutputTransformInput, + TransactWriteItemsInputTransformInput_TransactWriteItemsInputTransformInput as DafnyTransactWriteItemsInputTransformInput, + TransactWriteItemsOutputTransformInput_TransactWriteItemsOutputTransformInput as DafnyTransactWriteItemsOutputTransformInput, + UpdateItemInputTransformInput_UpdateItemInputTransformInput as DafnyUpdateItemInputTransformInput, + UpdateItemOutputTransformInput_UpdateItemOutputTransformInput as DafnyUpdateItemOutputTransformInput, +) +import aws_dbesdk_dynamodb.internaldafny.generated.module_ + + +import smithy_dafny_standard_library.internaldafny.generated.Wrappers as Wrappers +from typing import Union + + +class DafnyRequest: + operation_name: str + + # dafny_operation_input can take on any one of the types + # of the input values passed to the Dafny implementation + dafny_operation_input: Union[ + DafnyTransactWriteItemsInputTransformInput, + DafnyExecuteTransactionInputTransformInput, + DafnyBatchExecuteStatementOutputTransformInput, + DafnyQueryOutputTransformInput, + DafnyExecuteStatementOutputTransformInput, + DafnyDeleteItemInputTransformInput, + DafnyBatchGetItemOutputTransformInput, + DafnyQueryInputTransformInput, + DafnyTransactWriteItemsOutputTransformInput, + DafnyPutItemOutputTransformInput, + DafnyBatchWriteItemOutputTransformInput, + DafnyUpdateItemOutputTransformInput, + DafnyUpdateItemInputTransformInput, + DafnyExecuteStatementInputTransformInput, + DafnyBatchGetItemInputTransformInput, + DafnyTransactGetItemsOutputTransformInput, + DafnyGetItemInputTransformInput, + DafnyBatchExecuteStatementInputTransformInput, + DafnyBatchWriteItemInputTransformInput, + DafnyDeleteItemOutputTransformInput, + DafnyExecuteTransactionOutputTransformInput, + DafnyResolveAttributesInput, + DafnyTransactGetItemsInputTransformInput, + DafnyScanInputTransformInput, + DafnyPutItemInputTransformInput, + DafnyGetItemOutputTransformInput, + DafnyScanOutputTransformInput, + ] + + def __init__(self, operation_name, dafny_operation_input): + self.operation_name = operation_name + self.dafny_operation_input = dafny_operation_input + + +class DafnyResponse(Wrappers.Result): + def __init__(self): + super().__init__(self) diff --git a/DynamoDbEncryption/runtimes/python/src/aws_dbesdk_dynamodb/smithygenerated/aws_cryptography_dbencryptionsdk_dynamodb_transforms/dafny_to_aws_sdk.py b/DynamoDbEncryption/runtimes/python/src/aws_dbesdk_dynamodb/smithygenerated/aws_cryptography_dbencryptionsdk_dynamodb_transforms/dafny_to_aws_sdk.py new file mode 100644 index 000000000..4eacf1895 --- /dev/null +++ b/DynamoDbEncryption/runtimes/python/src/aws_dbesdk_dynamodb/smithygenerated/aws_cryptography_dbencryptionsdk_dynamodb_transforms/dafny_to_aws_sdk.py @@ -0,0 +1,1977 @@ +# Copyright Amazon.com Inc. or its affiliates. All Rights Reserved. +# SPDX-License-Identifier: Apache-2.0 +# Do not modify this file. This file is machine generated, and any changes to it will be overwritten. + +from aws_cryptography_internal_dynamodb.internaldafny.generated.ComAmazonawsDynamodbTypes import ( + AttributeAction_ADD, + AttributeAction_DELETE, + AttributeAction_PUT, + AttributeValue_B, + AttributeValue_BOOL, + AttributeValue_BS, + AttributeValue_L, + AttributeValue_M, + AttributeValue_N, + AttributeValue_NS, + AttributeValue_NULL, + AttributeValue_S, + AttributeValue_SS, + BatchStatementErrorCodeEnum_AccessDenied, + BatchStatementErrorCodeEnum_ConditionalCheckFailed, + BatchStatementErrorCodeEnum_DuplicateItem, + BatchStatementErrorCodeEnum_InternalServerError, + BatchStatementErrorCodeEnum_ItemCollectionSizeLimitExceeded, + BatchStatementErrorCodeEnum_ProvisionedThroughputExceeded, + BatchStatementErrorCodeEnum_RequestLimitExceeded, + BatchStatementErrorCodeEnum_ResourceNotFound, + BatchStatementErrorCodeEnum_ThrottlingError, + BatchStatementErrorCodeEnum_TransactionConflict, + BatchStatementErrorCodeEnum_ValidationError, + ComparisonOperator_BEGINS__WITH, + ComparisonOperator_BETWEEN, + ComparisonOperator_CONTAINS, + ComparisonOperator_EQ, + ComparisonOperator_GE, + ComparisonOperator_GT, + ComparisonOperator_IN, + ComparisonOperator_LE, + ComparisonOperator_LT, + ComparisonOperator_NE, + ComparisonOperator_NOT__CONTAINS, + ComparisonOperator_NOT__NULL, + ComparisonOperator_NULL, + ConditionalOperator_AND, + ConditionalOperator_OR, + ReturnConsumedCapacity_INDEXES, + ReturnConsumedCapacity_NONE, + ReturnConsumedCapacity_TOTAL, + ReturnItemCollectionMetrics_NONE, + ReturnItemCollectionMetrics_SIZE, + ReturnValue_ALL__NEW, + ReturnValue_ALL__OLD, + ReturnValue_NONE, + ReturnValue_UPDATED__NEW, + ReturnValue_UPDATED__OLD, + ReturnValuesOnConditionCheckFailure_ALL__OLD, + ReturnValuesOnConditionCheckFailure_NONE, + Select_ALL__ATTRIBUTES, + Select_ALL__PROJECTED__ATTRIBUTES, + Select_COUNT, + Select_SPECIFIC__ATTRIBUTES, +) +import aws_cryptography_internal_dynamodb.internaldafny.generated.module_ +import aws_cryptography_internal_dynamodb.smithygenerated.com_amazonaws_dynamodb.dafny_to_aws_sdk + + +def com_amazonaws_dynamodb_AttributeValue(dafny_input): + # Convert AttributeValue + if isinstance(dafny_input, AttributeValue_S): + AttributeValue_union_value = { + "S": b"".join(ord(c).to_bytes(2, "big") for c in dafny_input.S).decode("utf-16-be") + } + elif isinstance(dafny_input, AttributeValue_N): + AttributeValue_union_value = { + "N": b"".join(ord(c).to_bytes(2, "big") for c in dafny_input.N).decode("utf-16-be") + } + elif isinstance(dafny_input, AttributeValue_B): + AttributeValue_union_value = {"B": bytes(dafny_input.B)} + elif isinstance(dafny_input, AttributeValue_SS): + AttributeValue_union_value = { + "SS": [ + b"".join(ord(c).to_bytes(2, "big") for c in list_element).decode("utf-16-be") + for list_element in dafny_input.SS + ] + } + elif isinstance(dafny_input, AttributeValue_NS): + AttributeValue_union_value = { + "NS": [ + b"".join(ord(c).to_bytes(2, "big") for c in list_element).decode("utf-16-be") + for list_element in dafny_input.NS + ] + } + elif isinstance(dafny_input, AttributeValue_BS): + AttributeValue_union_value = {"BS": [bytes(list_element) for list_element in dafny_input.BS]} + elif isinstance(dafny_input, AttributeValue_M): + AttributeValue_union_value = { + "M": { + b"".join(ord(c).to_bytes(2, "big") for c in key).decode( + "utf-16-be" + ): aws_cryptography_internal_dynamodb.smithygenerated.com_amazonaws_dynamodb.dafny_to_aws_sdk.com_amazonaws_dynamodb_AttributeValue( + value + ) + for (key, value) in dafny_input.M.items + } + } + elif isinstance(dafny_input, AttributeValue_L): + AttributeValue_union_value = { + "L": [ + aws_cryptography_internal_dynamodb.smithygenerated.com_amazonaws_dynamodb.dafny_to_aws_sdk.com_amazonaws_dynamodb_AttributeValue( + list_element + ) + for list_element in dafny_input.L + ] + } + elif isinstance(dafny_input, AttributeValue_NULL): + AttributeValue_union_value = {"NULL": dafny_input.NULL} + elif isinstance(dafny_input, AttributeValue_BOOL): + AttributeValue_union_value = {"BOOL": dafny_input.BOOL} + else: + raise ValueError("No recognized union value in union type: " + str(dafny_input)) + + return AttributeValue_union_value + + +def com_amazonaws_dynamodb_ExpectedAttributeValue(dafny_input): + output = {} + if dafny_input.Value.is_Some: + output["Value"] = ( + aws_cryptography_internal_dynamodb.smithygenerated.com_amazonaws_dynamodb.dafny_to_aws_sdk.com_amazonaws_dynamodb_AttributeValue( + dafny_input.Value.value + ) + ) + + if dafny_input.Exists.is_Some: + output["Exists"] = dafny_input.Exists.value + + if dafny_input.ComparisonOperator.is_Some: + output["ComparisonOperator"] = ( + aws_cryptography_internal_dynamodb.smithygenerated.com_amazonaws_dynamodb.dafny_to_aws_sdk.com_amazonaws_dynamodb_ComparisonOperator( + dafny_input.ComparisonOperator.value + ) + ) + + if dafny_input.AttributeValueList.is_Some: + output["AttributeValueList"] = [ + aws_cryptography_internal_dynamodb.smithygenerated.com_amazonaws_dynamodb.dafny_to_aws_sdk.com_amazonaws_dynamodb_AttributeValue( + list_element + ) + for list_element in dafny_input.AttributeValueList.value + ] + + return output + + +def com_amazonaws_dynamodb_ComparisonOperator(dafny_input): + # Convert ComparisonOperator + if isinstance(dafny_input, ComparisonOperator_EQ): + return "EQ" + + elif isinstance(dafny_input, ComparisonOperator_NE): + return "NE" + + elif isinstance(dafny_input, ComparisonOperator_IN): + return "IN" + + elif isinstance(dafny_input, ComparisonOperator_LE): + return "LE" + + elif isinstance(dafny_input, ComparisonOperator_LT): + return "LT" + + elif isinstance(dafny_input, ComparisonOperator_GE): + return "GE" + + elif isinstance(dafny_input, ComparisonOperator_GT): + return "GT" + + elif isinstance(dafny_input, ComparisonOperator_BETWEEN): + return "BETWEEN" + + elif isinstance(dafny_input, ComparisonOperator_NOT__NULL): + return "NOT_NULL" + + elif isinstance(dafny_input, ComparisonOperator_NULL): + return "NULL" + + elif isinstance(dafny_input, ComparisonOperator_CONTAINS): + return "CONTAINS" + + elif isinstance(dafny_input, ComparisonOperator_NOT__CONTAINS): + return "NOT_CONTAINS" + + elif isinstance(dafny_input, ComparisonOperator_BEGINS__WITH): + return "BEGINS_WITH" + + else: + raise ValueError("No recognized enum value in enum type: " + dafny_input) + + +def com_amazonaws_dynamodb_ReturnValue(dafny_input): + # Convert ReturnValue + if isinstance(dafny_input, ReturnValue_NONE): + return "NONE" + + elif isinstance(dafny_input, ReturnValue_ALL__OLD): + return "ALL_OLD" + + elif isinstance(dafny_input, ReturnValue_UPDATED__OLD): + return "UPDATED_OLD" + + elif isinstance(dafny_input, ReturnValue_ALL__NEW): + return "ALL_NEW" + + elif isinstance(dafny_input, ReturnValue_UPDATED__NEW): + return "UPDATED_NEW" + + else: + raise ValueError("No recognized enum value in enum type: " + dafny_input) + + +def com_amazonaws_dynamodb_ReturnConsumedCapacity(dafny_input): + # Convert ReturnConsumedCapacity + if isinstance(dafny_input, ReturnConsumedCapacity_INDEXES): + return "INDEXES" + + elif isinstance(dafny_input, ReturnConsumedCapacity_TOTAL): + return "TOTAL" + + elif isinstance(dafny_input, ReturnConsumedCapacity_NONE): + return "NONE" + + else: + raise ValueError("No recognized enum value in enum type: " + dafny_input) + + +def com_amazonaws_dynamodb_ReturnItemCollectionMetrics(dafny_input): + # Convert ReturnItemCollectionMetrics + if isinstance(dafny_input, ReturnItemCollectionMetrics_SIZE): + return "SIZE" + + elif isinstance(dafny_input, ReturnItemCollectionMetrics_NONE): + return "NONE" + + else: + raise ValueError("No recognized enum value in enum type: " + dafny_input) + + +def com_amazonaws_dynamodb_ConditionalOperator(dafny_input): + # Convert ConditionalOperator + if isinstance(dafny_input, ConditionalOperator_AND): + return "AND" + + elif isinstance(dafny_input, ConditionalOperator_OR): + return "OR" + + else: + raise ValueError("No recognized enum value in enum type: " + dafny_input) + + +def com_amazonaws_dynamodb_ReturnValuesOnConditionCheckFailure(dafny_input): + # Convert ReturnValuesOnConditionCheckFailure + if isinstance(dafny_input, ReturnValuesOnConditionCheckFailure_ALL__OLD): + return "ALL_OLD" + + elif isinstance(dafny_input, ReturnValuesOnConditionCheckFailure_NONE): + return "NONE" + + else: + raise ValueError("No recognized enum value in enum type: " + dafny_input) + + +def com_amazonaws_dynamodb_PutItemInput(dafny_input): + output = {} + output["TableName"] = b"".join(ord(c).to_bytes(2, "big") for c in dafny_input.TableName).decode("utf-16-be") + output["Item"] = { + b"".join(ord(c).to_bytes(2, "big") for c in key).decode( + "utf-16-be" + ): aws_cryptography_internal_dynamodb.smithygenerated.com_amazonaws_dynamodb.dafny_to_aws_sdk.com_amazonaws_dynamodb_AttributeValue( + value + ) + for (key, value) in dafny_input.Item.items + } + if dafny_input.Expected.is_Some: + output["Expected"] = { + b"".join(ord(c).to_bytes(2, "big") for c in key).decode( + "utf-16-be" + ): aws_cryptography_internal_dynamodb.smithygenerated.com_amazonaws_dynamodb.dafny_to_aws_sdk.com_amazonaws_dynamodb_ExpectedAttributeValue( + value + ) + for (key, value) in dafny_input.Expected.value.items + } + + if dafny_input.ReturnValues.is_Some: + output["ReturnValues"] = ( + aws_cryptography_internal_dynamodb.smithygenerated.com_amazonaws_dynamodb.dafny_to_aws_sdk.com_amazonaws_dynamodb_ReturnValue( + dafny_input.ReturnValues.value + ) + ) + + if dafny_input.ReturnConsumedCapacity.is_Some: + output["ReturnConsumedCapacity"] = ( + aws_cryptography_internal_dynamodb.smithygenerated.com_amazonaws_dynamodb.dafny_to_aws_sdk.com_amazonaws_dynamodb_ReturnConsumedCapacity( + dafny_input.ReturnConsumedCapacity.value + ) + ) + + if dafny_input.ReturnItemCollectionMetrics.is_Some: + output["ReturnItemCollectionMetrics"] = ( + aws_cryptography_internal_dynamodb.smithygenerated.com_amazonaws_dynamodb.dafny_to_aws_sdk.com_amazonaws_dynamodb_ReturnItemCollectionMetrics( + dafny_input.ReturnItemCollectionMetrics.value + ) + ) + + if dafny_input.ConditionalOperator.is_Some: + output["ConditionalOperator"] = ( + aws_cryptography_internal_dynamodb.smithygenerated.com_amazonaws_dynamodb.dafny_to_aws_sdk.com_amazonaws_dynamodb_ConditionalOperator( + dafny_input.ConditionalOperator.value + ) + ) + + if dafny_input.ConditionExpression.is_Some: + output["ConditionExpression"] = b"".join( + ord(c).to_bytes(2, "big") for c in dafny_input.ConditionExpression.value + ).decode("utf-16-be") + + if dafny_input.ExpressionAttributeNames.is_Some: + output["ExpressionAttributeNames"] = { + b"".join(ord(c).to_bytes(2, "big") for c in key) + .decode("utf-16-be"): b"".join(ord(c).to_bytes(2, "big") for c in value) + .decode("utf-16-be") + for (key, value) in dafny_input.ExpressionAttributeNames.value.items + } + + if dafny_input.ExpressionAttributeValues.is_Some: + output["ExpressionAttributeValues"] = { + b"".join(ord(c).to_bytes(2, "big") for c in key).decode( + "utf-16-be" + ): aws_cryptography_internal_dynamodb.smithygenerated.com_amazonaws_dynamodb.dafny_to_aws_sdk.com_amazonaws_dynamodb_AttributeValue( + value + ) + for (key, value) in dafny_input.ExpressionAttributeValues.value.items + } + + if dafny_input.ReturnValuesOnConditionCheckFailure.is_Some: + output["ReturnValuesOnConditionCheckFailure"] = ( + aws_cryptography_internal_dynamodb.smithygenerated.com_amazonaws_dynamodb.dafny_to_aws_sdk.com_amazonaws_dynamodb_ReturnValuesOnConditionCheckFailure( + dafny_input.ReturnValuesOnConditionCheckFailure.value + ) + ) + + return output + + +def com_amazonaws_dynamodb_ConsumedCapacity(dafny_input): + output = {} + if dafny_input.TableName.is_Some: + output["TableName"] = b"".join(ord(c).to_bytes(2, "big") for c in dafny_input.TableName.value).decode( + "utf-16-be" + ) + + if dafny_input.CapacityUnits.is_Some: + output["CapacityUnits"] = dafny_input.CapacityUnits.value + + if dafny_input.ReadCapacityUnits.is_Some: + output["ReadCapacityUnits"] = dafny_input.ReadCapacityUnits.value + + if dafny_input.WriteCapacityUnits.is_Some: + output["WriteCapacityUnits"] = dafny_input.WriteCapacityUnits.value + + if dafny_input.Table.is_Some: + output["Table"] = ( + aws_cryptography_internal_dynamodb.smithygenerated.com_amazonaws_dynamodb.dafny_to_aws_sdk.com_amazonaws_dynamodb_Capacity( + dafny_input.Table.value + ) + ) + + if dafny_input.LocalSecondaryIndexes.is_Some: + output["LocalSecondaryIndexes"] = { + b"".join(ord(c).to_bytes(2, "big") for c in key).decode( + "utf-16-be" + ): aws_cryptography_internal_dynamodb.smithygenerated.com_amazonaws_dynamodb.dafny_to_aws_sdk.com_amazonaws_dynamodb_Capacity( + value + ) + for (key, value) in dafny_input.LocalSecondaryIndexes.value.items + } + + if dafny_input.GlobalSecondaryIndexes.is_Some: + output["GlobalSecondaryIndexes"] = { + b"".join(ord(c).to_bytes(2, "big") for c in key).decode( + "utf-16-be" + ): aws_cryptography_internal_dynamodb.smithygenerated.com_amazonaws_dynamodb.dafny_to_aws_sdk.com_amazonaws_dynamodb_Capacity( + value + ) + for (key, value) in dafny_input.GlobalSecondaryIndexes.value.items + } + + return output + + +def com_amazonaws_dynamodb_Capacity(dafny_input): + output = {} + if dafny_input.ReadCapacityUnits.is_Some: + output["ReadCapacityUnits"] = dafny_input.ReadCapacityUnits.value + + if dafny_input.WriteCapacityUnits.is_Some: + output["WriteCapacityUnits"] = dafny_input.WriteCapacityUnits.value + + if dafny_input.CapacityUnits.is_Some: + output["CapacityUnits"] = dafny_input.CapacityUnits.value + + return output + + +def com_amazonaws_dynamodb_ItemCollectionMetrics(dafny_input): + output = {} + if dafny_input.ItemCollectionKey.is_Some: + output["ItemCollectionKey"] = { + b"".join(ord(c).to_bytes(2, "big") for c in key).decode( + "utf-16-be" + ): aws_cryptography_internal_dynamodb.smithygenerated.com_amazonaws_dynamodb.dafny_to_aws_sdk.com_amazonaws_dynamodb_AttributeValue( + value + ) + for (key, value) in dafny_input.ItemCollectionKey.value.items + } + + if dafny_input.SizeEstimateRangeGB.is_Some: + output["SizeEstimateRangeGB"] = [list_element for list_element in dafny_input.SizeEstimateRangeGB.value] + + return output + + +def com_amazonaws_dynamodb_PutItemOutput(dafny_input): + output = {} + if dafny_input.Attributes.is_Some: + output["Attributes"] = { + b"".join(ord(c).to_bytes(2, "big") for c in key).decode( + "utf-16-be" + ): aws_cryptography_internal_dynamodb.smithygenerated.com_amazonaws_dynamodb.dafny_to_aws_sdk.com_amazonaws_dynamodb_AttributeValue( + value + ) + for (key, value) in dafny_input.Attributes.value.items + } + + if dafny_input.ConsumedCapacity.is_Some: + output["ConsumedCapacity"] = ( + aws_cryptography_internal_dynamodb.smithygenerated.com_amazonaws_dynamodb.dafny_to_aws_sdk.com_amazonaws_dynamodb_ConsumedCapacity( + dafny_input.ConsumedCapacity.value + ) + ) + + if dafny_input.ItemCollectionMetrics.is_Some: + output["ItemCollectionMetrics"] = ( + aws_cryptography_internal_dynamodb.smithygenerated.com_amazonaws_dynamodb.dafny_to_aws_sdk.com_amazonaws_dynamodb_ItemCollectionMetrics( + dafny_input.ItemCollectionMetrics.value + ) + ) + + return output + + +def com_amazonaws_dynamodb_GetItemInput(dafny_input): + output = {} + output["TableName"] = b"".join(ord(c).to_bytes(2, "big") for c in dafny_input.TableName).decode("utf-16-be") + output["Key"] = { + b"".join(ord(c).to_bytes(2, "big") for c in key).decode( + "utf-16-be" + ): aws_cryptography_internal_dynamodb.smithygenerated.com_amazonaws_dynamodb.dafny_to_aws_sdk.com_amazonaws_dynamodb_AttributeValue( + value + ) + for (key, value) in dafny_input.Key.items + } + if dafny_input.AttributesToGet.is_Some: + output["AttributesToGet"] = [ + b"".join(ord(c).to_bytes(2, "big") for c in list_element).decode("utf-16-be") + for list_element in dafny_input.AttributesToGet.value + ] + + if dafny_input.ConsistentRead.is_Some: + output["ConsistentRead"] = dafny_input.ConsistentRead.value + + if dafny_input.ReturnConsumedCapacity.is_Some: + output["ReturnConsumedCapacity"] = ( + aws_cryptography_internal_dynamodb.smithygenerated.com_amazonaws_dynamodb.dafny_to_aws_sdk.com_amazonaws_dynamodb_ReturnConsumedCapacity( + dafny_input.ReturnConsumedCapacity.value + ) + ) + + if dafny_input.ProjectionExpression.is_Some: + output["ProjectionExpression"] = b"".join( + ord(c).to_bytes(2, "big") for c in dafny_input.ProjectionExpression.value + ).decode("utf-16-be") + + if dafny_input.ExpressionAttributeNames.is_Some: + output["ExpressionAttributeNames"] = { + b"".join(ord(c).to_bytes(2, "big") for c in key) + .decode("utf-16-be"): b"".join(ord(c).to_bytes(2, "big") for c in value) + .decode("utf-16-be") + for (key, value) in dafny_input.ExpressionAttributeNames.value.items + } + + return output + + +def com_amazonaws_dynamodb_GetItemOutput(dafny_input): + output = {} + if dafny_input.Item.is_Some: + output["Item"] = { + b"".join(ord(c).to_bytes(2, "big") for c in key).decode( + "utf-16-be" + ): aws_cryptography_internal_dynamodb.smithygenerated.com_amazonaws_dynamodb.dafny_to_aws_sdk.com_amazonaws_dynamodb_AttributeValue( + value + ) + for (key, value) in dafny_input.Item.value.items + } + + if dafny_input.ConsumedCapacity.is_Some: + output["ConsumedCapacity"] = ( + aws_cryptography_internal_dynamodb.smithygenerated.com_amazonaws_dynamodb.dafny_to_aws_sdk.com_amazonaws_dynamodb_ConsumedCapacity( + dafny_input.ConsumedCapacity.value + ) + ) + + return output + + +def com_amazonaws_dynamodb_WriteRequest(dafny_input): + output = {} + if dafny_input.PutRequest.is_Some: + output["PutRequest"] = ( + aws_cryptography_internal_dynamodb.smithygenerated.com_amazonaws_dynamodb.dafny_to_aws_sdk.com_amazonaws_dynamodb_PutRequest( + dafny_input.PutRequest.value + ) + ) + + if dafny_input.DeleteRequest.is_Some: + output["DeleteRequest"] = ( + aws_cryptography_internal_dynamodb.smithygenerated.com_amazonaws_dynamodb.dafny_to_aws_sdk.com_amazonaws_dynamodb_DeleteRequest( + dafny_input.DeleteRequest.value + ) + ) + + return output + + +def com_amazonaws_dynamodb_PutRequest(dafny_input): + output = {} + output["Item"] = { + b"".join(ord(c).to_bytes(2, "big") for c in key).decode( + "utf-16-be" + ): aws_cryptography_internal_dynamodb.smithygenerated.com_amazonaws_dynamodb.dafny_to_aws_sdk.com_amazonaws_dynamodb_AttributeValue( + value + ) + for (key, value) in dafny_input.Item.items + } + return output + + +def com_amazonaws_dynamodb_DeleteRequest(dafny_input): + output = {} + output["Key"] = { + b"".join(ord(c).to_bytes(2, "big") for c in key).decode( + "utf-16-be" + ): aws_cryptography_internal_dynamodb.smithygenerated.com_amazonaws_dynamodb.dafny_to_aws_sdk.com_amazonaws_dynamodb_AttributeValue( + value + ) + for (key, value) in dafny_input.Key.items + } + return output + + +def com_amazonaws_dynamodb_BatchWriteItemInput(dafny_input): + output = {} + output["RequestItems"] = { + b"".join(ord(c).to_bytes(2, "big") for c in key).decode("utf-16-be"): [ + aws_cryptography_internal_dynamodb.smithygenerated.com_amazonaws_dynamodb.dafny_to_aws_sdk.com_amazonaws_dynamodb_WriteRequest( + list_element + ) + for list_element in value + ] + for (key, value) in dafny_input.RequestItems.items + } + if dafny_input.ReturnConsumedCapacity.is_Some: + output["ReturnConsumedCapacity"] = ( + aws_cryptography_internal_dynamodb.smithygenerated.com_amazonaws_dynamodb.dafny_to_aws_sdk.com_amazonaws_dynamodb_ReturnConsumedCapacity( + dafny_input.ReturnConsumedCapacity.value + ) + ) + + if dafny_input.ReturnItemCollectionMetrics.is_Some: + output["ReturnItemCollectionMetrics"] = ( + aws_cryptography_internal_dynamodb.smithygenerated.com_amazonaws_dynamodb.dafny_to_aws_sdk.com_amazonaws_dynamodb_ReturnItemCollectionMetrics( + dafny_input.ReturnItemCollectionMetrics.value + ) + ) + + return output + + +def com_amazonaws_dynamodb_BatchWriteItemOutput(dafny_input): + output = {} + if dafny_input.UnprocessedItems.is_Some: + output["UnprocessedItems"] = { + b"".join(ord(c).to_bytes(2, "big") for c in key).decode("utf-16-be"): [ + aws_cryptography_internal_dynamodb.smithygenerated.com_amazonaws_dynamodb.dafny_to_aws_sdk.com_amazonaws_dynamodb_WriteRequest( + list_element + ) + for list_element in value + ] + for (key, value) in dafny_input.UnprocessedItems.value.items + } + + if dafny_input.ItemCollectionMetrics.is_Some: + output["ItemCollectionMetrics"] = { + b"".join(ord(c).to_bytes(2, "big") for c in key).decode("utf-16-be"): [ + aws_cryptography_internal_dynamodb.smithygenerated.com_amazonaws_dynamodb.dafny_to_aws_sdk.com_amazonaws_dynamodb_ItemCollectionMetrics( + list_element + ) + for list_element in value + ] + for (key, value) in dafny_input.ItemCollectionMetrics.value.items + } + + if dafny_input.ConsumedCapacity.is_Some: + output["ConsumedCapacity"] = [ + aws_cryptography_internal_dynamodb.smithygenerated.com_amazonaws_dynamodb.dafny_to_aws_sdk.com_amazonaws_dynamodb_ConsumedCapacity( + list_element + ) + for list_element in dafny_input.ConsumedCapacity.value + ] + + return output + + +def com_amazonaws_dynamodb_KeysAndAttributes(dafny_input): + output = {} + output["Keys"] = [ + { + b"".join(ord(c).to_bytes(2, "big") for c in key).decode( + "utf-16-be" + ): aws_cryptography_internal_dynamodb.smithygenerated.com_amazonaws_dynamodb.dafny_to_aws_sdk.com_amazonaws_dynamodb_AttributeValue( + value + ) + for (key, value) in list_element.items + } + for list_element in dafny_input.Keys + ] + if dafny_input.AttributesToGet.is_Some: + output["AttributesToGet"] = [ + b"".join(ord(c).to_bytes(2, "big") for c in list_element).decode("utf-16-be") + for list_element in dafny_input.AttributesToGet.value + ] + + if dafny_input.ConsistentRead.is_Some: + output["ConsistentRead"] = dafny_input.ConsistentRead.value + + if dafny_input.ProjectionExpression.is_Some: + output["ProjectionExpression"] = b"".join( + ord(c).to_bytes(2, "big") for c in dafny_input.ProjectionExpression.value + ).decode("utf-16-be") + + if dafny_input.ExpressionAttributeNames.is_Some: + output["ExpressionAttributeNames"] = { + b"".join(ord(c).to_bytes(2, "big") for c in key) + .decode("utf-16-be"): b"".join(ord(c).to_bytes(2, "big") for c in value) + .decode("utf-16-be") + for (key, value) in dafny_input.ExpressionAttributeNames.value.items + } + + return output + + +def com_amazonaws_dynamodb_BatchGetItemInput(dafny_input): + output = {} + output["RequestItems"] = { + b"".join(ord(c).to_bytes(2, "big") for c in key).decode( + "utf-16-be" + ): aws_cryptography_internal_dynamodb.smithygenerated.com_amazonaws_dynamodb.dafny_to_aws_sdk.com_amazonaws_dynamodb_KeysAndAttributes( + value + ) + for (key, value) in dafny_input.RequestItems.items + } + if dafny_input.ReturnConsumedCapacity.is_Some: + output["ReturnConsumedCapacity"] = ( + aws_cryptography_internal_dynamodb.smithygenerated.com_amazonaws_dynamodb.dafny_to_aws_sdk.com_amazonaws_dynamodb_ReturnConsumedCapacity( + dafny_input.ReturnConsumedCapacity.value + ) + ) + + return output + + +def com_amazonaws_dynamodb_BatchGetItemOutput(dafny_input): + output = {} + if dafny_input.Responses.is_Some: + output["Responses"] = { + b"".join(ord(c).to_bytes(2, "big") for c in key).decode("utf-16-be"): [ + { + b"".join(ord(c).to_bytes(2, "big") for c in key).decode( + "utf-16-be" + ): aws_cryptography_internal_dynamodb.smithygenerated.com_amazonaws_dynamodb.dafny_to_aws_sdk.com_amazonaws_dynamodb_AttributeValue( + value + ) + for (key, value) in list_element.items + } + for list_element in value + ] + for (key, value) in dafny_input.Responses.value.items + } + + if dafny_input.UnprocessedKeys.is_Some: + output["UnprocessedKeys"] = { + b"".join(ord(c).to_bytes(2, "big") for c in key).decode( + "utf-16-be" + ): aws_cryptography_internal_dynamodb.smithygenerated.com_amazonaws_dynamodb.dafny_to_aws_sdk.com_amazonaws_dynamodb_KeysAndAttributes( + value + ) + for (key, value) in dafny_input.UnprocessedKeys.value.items + } + + if dafny_input.ConsumedCapacity.is_Some: + output["ConsumedCapacity"] = [ + aws_cryptography_internal_dynamodb.smithygenerated.com_amazonaws_dynamodb.dafny_to_aws_sdk.com_amazonaws_dynamodb_ConsumedCapacity( + list_element + ) + for list_element in dafny_input.ConsumedCapacity.value + ] + + return output + + +def com_amazonaws_dynamodb_Select(dafny_input): + # Convert Select + if isinstance(dafny_input, Select_ALL__ATTRIBUTES): + return "ALL_ATTRIBUTES" + + elif isinstance(dafny_input, Select_ALL__PROJECTED__ATTRIBUTES): + return "ALL_PROJECTED_ATTRIBUTES" + + elif isinstance(dafny_input, Select_SPECIFIC__ATTRIBUTES): + return "SPECIFIC_ATTRIBUTES" + + elif isinstance(dafny_input, Select_COUNT): + return "COUNT" + + else: + raise ValueError("No recognized enum value in enum type: " + dafny_input) + + +def com_amazonaws_dynamodb_Condition(dafny_input): + output = {} + if dafny_input.AttributeValueList.is_Some: + output["AttributeValueList"] = [ + aws_cryptography_internal_dynamodb.smithygenerated.com_amazonaws_dynamodb.dafny_to_aws_sdk.com_amazonaws_dynamodb_AttributeValue( + list_element + ) + for list_element in dafny_input.AttributeValueList.value + ] + + output["ComparisonOperator"] = ( + aws_cryptography_internal_dynamodb.smithygenerated.com_amazonaws_dynamodb.dafny_to_aws_sdk.com_amazonaws_dynamodb_ComparisonOperator( + dafny_input.ComparisonOperator + ) + ) + return output + + +def com_amazonaws_dynamodb_ScanInput(dafny_input): + output = {} + output["TableName"] = b"".join(ord(c).to_bytes(2, "big") for c in dafny_input.TableName).decode("utf-16-be") + if dafny_input.IndexName.is_Some: + output["IndexName"] = b"".join(ord(c).to_bytes(2, "big") for c in dafny_input.IndexName.value).decode( + "utf-16-be" + ) + + if dafny_input.AttributesToGet.is_Some: + output["AttributesToGet"] = [ + b"".join(ord(c).to_bytes(2, "big") for c in list_element).decode("utf-16-be") + for list_element in dafny_input.AttributesToGet.value + ] + + if dafny_input.Limit.is_Some: + output["Limit"] = dafny_input.Limit.value + + if dafny_input.Select.is_Some: + output["Select"] = ( + aws_cryptography_internal_dynamodb.smithygenerated.com_amazonaws_dynamodb.dafny_to_aws_sdk.com_amazonaws_dynamodb_Select( + dafny_input.Select.value + ) + ) + + if dafny_input.ScanFilter.is_Some: + output["ScanFilter"] = { + b"".join(ord(c).to_bytes(2, "big") for c in key).decode( + "utf-16-be" + ): aws_cryptography_internal_dynamodb.smithygenerated.com_amazonaws_dynamodb.dafny_to_aws_sdk.com_amazonaws_dynamodb_Condition( + value + ) + for (key, value) in dafny_input.ScanFilter.value.items + } + + if dafny_input.ConditionalOperator.is_Some: + output["ConditionalOperator"] = ( + aws_cryptography_internal_dynamodb.smithygenerated.com_amazonaws_dynamodb.dafny_to_aws_sdk.com_amazonaws_dynamodb_ConditionalOperator( + dafny_input.ConditionalOperator.value + ) + ) + + if dafny_input.ExclusiveStartKey.is_Some: + output["ExclusiveStartKey"] = { + b"".join(ord(c).to_bytes(2, "big") for c in key).decode( + "utf-16-be" + ): aws_cryptography_internal_dynamodb.smithygenerated.com_amazonaws_dynamodb.dafny_to_aws_sdk.com_amazonaws_dynamodb_AttributeValue( + value + ) + for (key, value) in dafny_input.ExclusiveStartKey.value.items + } + + if dafny_input.ReturnConsumedCapacity.is_Some: + output["ReturnConsumedCapacity"] = ( + aws_cryptography_internal_dynamodb.smithygenerated.com_amazonaws_dynamodb.dafny_to_aws_sdk.com_amazonaws_dynamodb_ReturnConsumedCapacity( + dafny_input.ReturnConsumedCapacity.value + ) + ) + + if dafny_input.TotalSegments.is_Some: + output["TotalSegments"] = dafny_input.TotalSegments.value + + if dafny_input.Segment.is_Some: + output["Segment"] = dafny_input.Segment.value + + if dafny_input.ProjectionExpression.is_Some: + output["ProjectionExpression"] = b"".join( + ord(c).to_bytes(2, "big") for c in dafny_input.ProjectionExpression.value + ).decode("utf-16-be") + + if dafny_input.FilterExpression.is_Some: + output["FilterExpression"] = b"".join( + ord(c).to_bytes(2, "big") for c in dafny_input.FilterExpression.value + ).decode("utf-16-be") + + if dafny_input.ExpressionAttributeNames.is_Some: + output["ExpressionAttributeNames"] = { + b"".join(ord(c).to_bytes(2, "big") for c in key) + .decode("utf-16-be"): b"".join(ord(c).to_bytes(2, "big") for c in value) + .decode("utf-16-be") + for (key, value) in dafny_input.ExpressionAttributeNames.value.items + } + + if dafny_input.ExpressionAttributeValues.is_Some: + output["ExpressionAttributeValues"] = { + b"".join(ord(c).to_bytes(2, "big") for c in key).decode( + "utf-16-be" + ): aws_cryptography_internal_dynamodb.smithygenerated.com_amazonaws_dynamodb.dafny_to_aws_sdk.com_amazonaws_dynamodb_AttributeValue( + value + ) + for (key, value) in dafny_input.ExpressionAttributeValues.value.items + } + + if dafny_input.ConsistentRead.is_Some: + output["ConsistentRead"] = dafny_input.ConsistentRead.value + + return output + + +def com_amazonaws_dynamodb_ScanOutput(dafny_input): + output = {} + if dafny_input.Items.is_Some: + output["Items"] = [ + { + b"".join(ord(c).to_bytes(2, "big") for c in key).decode( + "utf-16-be" + ): aws_cryptography_internal_dynamodb.smithygenerated.com_amazonaws_dynamodb.dafny_to_aws_sdk.com_amazonaws_dynamodb_AttributeValue( + value + ) + for (key, value) in list_element.items + } + for list_element in dafny_input.Items.value + ] + + if dafny_input.Count.is_Some: + output["Count"] = dafny_input.Count.value + + if dafny_input.ScannedCount.is_Some: + output["ScannedCount"] = dafny_input.ScannedCount.value + + if dafny_input.LastEvaluatedKey.is_Some: + output["LastEvaluatedKey"] = { + b"".join(ord(c).to_bytes(2, "big") for c in key).decode( + "utf-16-be" + ): aws_cryptography_internal_dynamodb.smithygenerated.com_amazonaws_dynamodb.dafny_to_aws_sdk.com_amazonaws_dynamodb_AttributeValue( + value + ) + for (key, value) in dafny_input.LastEvaluatedKey.value.items + } + + if dafny_input.ConsumedCapacity.is_Some: + output["ConsumedCapacity"] = ( + aws_cryptography_internal_dynamodb.smithygenerated.com_amazonaws_dynamodb.dafny_to_aws_sdk.com_amazonaws_dynamodb_ConsumedCapacity( + dafny_input.ConsumedCapacity.value + ) + ) + + return output + + +def com_amazonaws_dynamodb_QueryInput(dafny_input): + output = {} + output["TableName"] = b"".join(ord(c).to_bytes(2, "big") for c in dafny_input.TableName).decode("utf-16-be") + if dafny_input.IndexName.is_Some: + output["IndexName"] = b"".join(ord(c).to_bytes(2, "big") for c in dafny_input.IndexName.value).decode( + "utf-16-be" + ) + + if dafny_input.Select.is_Some: + output["Select"] = ( + aws_cryptography_internal_dynamodb.smithygenerated.com_amazonaws_dynamodb.dafny_to_aws_sdk.com_amazonaws_dynamodb_Select( + dafny_input.Select.value + ) + ) + + if dafny_input.AttributesToGet.is_Some: + output["AttributesToGet"] = [ + b"".join(ord(c).to_bytes(2, "big") for c in list_element).decode("utf-16-be") + for list_element in dafny_input.AttributesToGet.value + ] + + if dafny_input.Limit.is_Some: + output["Limit"] = dafny_input.Limit.value + + if dafny_input.ConsistentRead.is_Some: + output["ConsistentRead"] = dafny_input.ConsistentRead.value + + if dafny_input.KeyConditions.is_Some: + output["KeyConditions"] = { + b"".join(ord(c).to_bytes(2, "big") for c in key).decode( + "utf-16-be" + ): aws_cryptography_internal_dynamodb.smithygenerated.com_amazonaws_dynamodb.dafny_to_aws_sdk.com_amazonaws_dynamodb_Condition( + value + ) + for (key, value) in dafny_input.KeyConditions.value.items + } + + if dafny_input.QueryFilter.is_Some: + output["QueryFilter"] = { + b"".join(ord(c).to_bytes(2, "big") for c in key).decode( + "utf-16-be" + ): aws_cryptography_internal_dynamodb.smithygenerated.com_amazonaws_dynamodb.dafny_to_aws_sdk.com_amazonaws_dynamodb_Condition( + value + ) + for (key, value) in dafny_input.QueryFilter.value.items + } + + if dafny_input.ConditionalOperator.is_Some: + output["ConditionalOperator"] = ( + aws_cryptography_internal_dynamodb.smithygenerated.com_amazonaws_dynamodb.dafny_to_aws_sdk.com_amazonaws_dynamodb_ConditionalOperator( + dafny_input.ConditionalOperator.value + ) + ) + + if dafny_input.ScanIndexForward.is_Some: + output["ScanIndexForward"] = dafny_input.ScanIndexForward.value + + if dafny_input.ExclusiveStartKey.is_Some: + output["ExclusiveStartKey"] = { + b"".join(ord(c).to_bytes(2, "big") for c in key).decode( + "utf-16-be" + ): aws_cryptography_internal_dynamodb.smithygenerated.com_amazonaws_dynamodb.dafny_to_aws_sdk.com_amazonaws_dynamodb_AttributeValue( + value + ) + for (key, value) in dafny_input.ExclusiveStartKey.value.items + } + + if dafny_input.ReturnConsumedCapacity.is_Some: + output["ReturnConsumedCapacity"] = ( + aws_cryptography_internal_dynamodb.smithygenerated.com_amazonaws_dynamodb.dafny_to_aws_sdk.com_amazonaws_dynamodb_ReturnConsumedCapacity( + dafny_input.ReturnConsumedCapacity.value + ) + ) + + if dafny_input.ProjectionExpression.is_Some: + output["ProjectionExpression"] = b"".join( + ord(c).to_bytes(2, "big") for c in dafny_input.ProjectionExpression.value + ).decode("utf-16-be") + + if dafny_input.FilterExpression.is_Some: + output["FilterExpression"] = b"".join( + ord(c).to_bytes(2, "big") for c in dafny_input.FilterExpression.value + ).decode("utf-16-be") + + if dafny_input.KeyConditionExpression.is_Some: + output["KeyConditionExpression"] = b"".join( + ord(c).to_bytes(2, "big") for c in dafny_input.KeyConditionExpression.value + ).decode("utf-16-be") + + if dafny_input.ExpressionAttributeNames.is_Some: + output["ExpressionAttributeNames"] = { + b"".join(ord(c).to_bytes(2, "big") for c in key) + .decode("utf-16-be"): b"".join(ord(c).to_bytes(2, "big") for c in value) + .decode("utf-16-be") + for (key, value) in dafny_input.ExpressionAttributeNames.value.items + } + + if dafny_input.ExpressionAttributeValues.is_Some: + output["ExpressionAttributeValues"] = { + b"".join(ord(c).to_bytes(2, "big") for c in key).decode( + "utf-16-be" + ): aws_cryptography_internal_dynamodb.smithygenerated.com_amazonaws_dynamodb.dafny_to_aws_sdk.com_amazonaws_dynamodb_AttributeValue( + value + ) + for (key, value) in dafny_input.ExpressionAttributeValues.value.items + } + + return output + + +def com_amazonaws_dynamodb_QueryOutput(dafny_input): + output = {} + if dafny_input.Items.is_Some: + output["Items"] = [ + { + b"".join(ord(c).to_bytes(2, "big") for c in key).decode( + "utf-16-be" + ): aws_cryptography_internal_dynamodb.smithygenerated.com_amazonaws_dynamodb.dafny_to_aws_sdk.com_amazonaws_dynamodb_AttributeValue( + value + ) + for (key, value) in list_element.items + } + for list_element in dafny_input.Items.value + ] + + if dafny_input.Count.is_Some: + output["Count"] = dafny_input.Count.value + + if dafny_input.ScannedCount.is_Some: + output["ScannedCount"] = dafny_input.ScannedCount.value + + if dafny_input.LastEvaluatedKey.is_Some: + output["LastEvaluatedKey"] = { + b"".join(ord(c).to_bytes(2, "big") for c in key).decode( + "utf-16-be" + ): aws_cryptography_internal_dynamodb.smithygenerated.com_amazonaws_dynamodb.dafny_to_aws_sdk.com_amazonaws_dynamodb_AttributeValue( + value + ) + for (key, value) in dafny_input.LastEvaluatedKey.value.items + } + + if dafny_input.ConsumedCapacity.is_Some: + output["ConsumedCapacity"] = ( + aws_cryptography_internal_dynamodb.smithygenerated.com_amazonaws_dynamodb.dafny_to_aws_sdk.com_amazonaws_dynamodb_ConsumedCapacity( + dafny_input.ConsumedCapacity.value + ) + ) + + return output + + +def com_amazonaws_dynamodb_TransactWriteItem(dafny_input): + output = {} + if dafny_input.ConditionCheck.is_Some: + output["ConditionCheck"] = ( + aws_cryptography_internal_dynamodb.smithygenerated.com_amazonaws_dynamodb.dafny_to_aws_sdk.com_amazonaws_dynamodb_ConditionCheck( + dafny_input.ConditionCheck.value + ) + ) + + if dafny_input.Put.is_Some: + output["Put"] = ( + aws_cryptography_internal_dynamodb.smithygenerated.com_amazonaws_dynamodb.dafny_to_aws_sdk.com_amazonaws_dynamodb_Put( + dafny_input.Put.value + ) + ) + + if dafny_input.Delete.is_Some: + output["Delete"] = ( + aws_cryptography_internal_dynamodb.smithygenerated.com_amazonaws_dynamodb.dafny_to_aws_sdk.com_amazonaws_dynamodb_Delete( + dafny_input.Delete.value + ) + ) + + if dafny_input.Update.is_Some: + output["Update"] = ( + aws_cryptography_internal_dynamodb.smithygenerated.com_amazonaws_dynamodb.dafny_to_aws_sdk.com_amazonaws_dynamodb_Update( + dafny_input.Update.value + ) + ) + + return output + + +def com_amazonaws_dynamodb_ConditionCheck(dafny_input): + output = {} + output["Key"] = { + b"".join(ord(c).to_bytes(2, "big") for c in key).decode( + "utf-16-be" + ): aws_cryptography_internal_dynamodb.smithygenerated.com_amazonaws_dynamodb.dafny_to_aws_sdk.com_amazonaws_dynamodb_AttributeValue( + value + ) + for (key, value) in dafny_input.Key.items + } + output["TableName"] = b"".join(ord(c).to_bytes(2, "big") for c in dafny_input.TableName).decode("utf-16-be") + output["ConditionExpression"] = b"".join(ord(c).to_bytes(2, "big") for c in dafny_input.ConditionExpression).decode( + "utf-16-be" + ) + if dafny_input.ExpressionAttributeNames.is_Some: + output["ExpressionAttributeNames"] = { + b"".join(ord(c).to_bytes(2, "big") for c in key) + .decode("utf-16-be"): b"".join(ord(c).to_bytes(2, "big") for c in value) + .decode("utf-16-be") + for (key, value) in dafny_input.ExpressionAttributeNames.value.items + } + + if dafny_input.ExpressionAttributeValues.is_Some: + output["ExpressionAttributeValues"] = { + b"".join(ord(c).to_bytes(2, "big") for c in key).decode( + "utf-16-be" + ): aws_cryptography_internal_dynamodb.smithygenerated.com_amazonaws_dynamodb.dafny_to_aws_sdk.com_amazonaws_dynamodb_AttributeValue( + value + ) + for (key, value) in dafny_input.ExpressionAttributeValues.value.items + } + + if dafny_input.ReturnValuesOnConditionCheckFailure.is_Some: + output["ReturnValuesOnConditionCheckFailure"] = ( + aws_cryptography_internal_dynamodb.smithygenerated.com_amazonaws_dynamodb.dafny_to_aws_sdk.com_amazonaws_dynamodb_ReturnValuesOnConditionCheckFailure( + dafny_input.ReturnValuesOnConditionCheckFailure.value + ) + ) + + return output + + +def com_amazonaws_dynamodb_Put(dafny_input): + output = {} + output["Item"] = { + b"".join(ord(c).to_bytes(2, "big") for c in key).decode( + "utf-16-be" + ): aws_cryptography_internal_dynamodb.smithygenerated.com_amazonaws_dynamodb.dafny_to_aws_sdk.com_amazonaws_dynamodb_AttributeValue( + value + ) + for (key, value) in dafny_input.Item.items + } + output["TableName"] = b"".join(ord(c).to_bytes(2, "big") for c in dafny_input.TableName).decode("utf-16-be") + if dafny_input.ConditionExpression.is_Some: + output["ConditionExpression"] = b"".join( + ord(c).to_bytes(2, "big") for c in dafny_input.ConditionExpression.value + ).decode("utf-16-be") + + if dafny_input.ExpressionAttributeNames.is_Some: + output["ExpressionAttributeNames"] = { + b"".join(ord(c).to_bytes(2, "big") for c in key) + .decode("utf-16-be"): b"".join(ord(c).to_bytes(2, "big") for c in value) + .decode("utf-16-be") + for (key, value) in dafny_input.ExpressionAttributeNames.value.items + } + + if dafny_input.ExpressionAttributeValues.is_Some: + output["ExpressionAttributeValues"] = { + b"".join(ord(c).to_bytes(2, "big") for c in key).decode( + "utf-16-be" + ): aws_cryptography_internal_dynamodb.smithygenerated.com_amazonaws_dynamodb.dafny_to_aws_sdk.com_amazonaws_dynamodb_AttributeValue( + value + ) + for (key, value) in dafny_input.ExpressionAttributeValues.value.items + } + + if dafny_input.ReturnValuesOnConditionCheckFailure.is_Some: + output["ReturnValuesOnConditionCheckFailure"] = ( + aws_cryptography_internal_dynamodb.smithygenerated.com_amazonaws_dynamodb.dafny_to_aws_sdk.com_amazonaws_dynamodb_ReturnValuesOnConditionCheckFailure( + dafny_input.ReturnValuesOnConditionCheckFailure.value + ) + ) + + return output + + +def com_amazonaws_dynamodb_Delete(dafny_input): + output = {} + output["Key"] = { + b"".join(ord(c).to_bytes(2, "big") for c in key).decode( + "utf-16-be" + ): aws_cryptography_internal_dynamodb.smithygenerated.com_amazonaws_dynamodb.dafny_to_aws_sdk.com_amazonaws_dynamodb_AttributeValue( + value + ) + for (key, value) in dafny_input.Key.items + } + output["TableName"] = b"".join(ord(c).to_bytes(2, "big") for c in dafny_input.TableName).decode("utf-16-be") + if dafny_input.ConditionExpression.is_Some: + output["ConditionExpression"] = b"".join( + ord(c).to_bytes(2, "big") for c in dafny_input.ConditionExpression.value + ).decode("utf-16-be") + + if dafny_input.ExpressionAttributeNames.is_Some: + output["ExpressionAttributeNames"] = { + b"".join(ord(c).to_bytes(2, "big") for c in key) + .decode("utf-16-be"): b"".join(ord(c).to_bytes(2, "big") for c in value) + .decode("utf-16-be") + for (key, value) in dafny_input.ExpressionAttributeNames.value.items + } + + if dafny_input.ExpressionAttributeValues.is_Some: + output["ExpressionAttributeValues"] = { + b"".join(ord(c).to_bytes(2, "big") for c in key).decode( + "utf-16-be" + ): aws_cryptography_internal_dynamodb.smithygenerated.com_amazonaws_dynamodb.dafny_to_aws_sdk.com_amazonaws_dynamodb_AttributeValue( + value + ) + for (key, value) in dafny_input.ExpressionAttributeValues.value.items + } + + if dafny_input.ReturnValuesOnConditionCheckFailure.is_Some: + output["ReturnValuesOnConditionCheckFailure"] = ( + aws_cryptography_internal_dynamodb.smithygenerated.com_amazonaws_dynamodb.dafny_to_aws_sdk.com_amazonaws_dynamodb_ReturnValuesOnConditionCheckFailure( + dafny_input.ReturnValuesOnConditionCheckFailure.value + ) + ) + + return output + + +def com_amazonaws_dynamodb_Update(dafny_input): + output = {} + output["Key"] = { + b"".join(ord(c).to_bytes(2, "big") for c in key).decode( + "utf-16-be" + ): aws_cryptography_internal_dynamodb.smithygenerated.com_amazonaws_dynamodb.dafny_to_aws_sdk.com_amazonaws_dynamodb_AttributeValue( + value + ) + for (key, value) in dafny_input.Key.items + } + output["UpdateExpression"] = b"".join(ord(c).to_bytes(2, "big") for c in dafny_input.UpdateExpression).decode( + "utf-16-be" + ) + output["TableName"] = b"".join(ord(c).to_bytes(2, "big") for c in dafny_input.TableName).decode("utf-16-be") + if dafny_input.ConditionExpression.is_Some: + output["ConditionExpression"] = b"".join( + ord(c).to_bytes(2, "big") for c in dafny_input.ConditionExpression.value + ).decode("utf-16-be") + + if dafny_input.ExpressionAttributeNames.is_Some: + output["ExpressionAttributeNames"] = { + b"".join(ord(c).to_bytes(2, "big") for c in key) + .decode("utf-16-be"): b"".join(ord(c).to_bytes(2, "big") for c in value) + .decode("utf-16-be") + for (key, value) in dafny_input.ExpressionAttributeNames.value.items + } + + if dafny_input.ExpressionAttributeValues.is_Some: + output["ExpressionAttributeValues"] = { + b"".join(ord(c).to_bytes(2, "big") for c in key).decode( + "utf-16-be" + ): aws_cryptography_internal_dynamodb.smithygenerated.com_amazonaws_dynamodb.dafny_to_aws_sdk.com_amazonaws_dynamodb_AttributeValue( + value + ) + for (key, value) in dafny_input.ExpressionAttributeValues.value.items + } + + if dafny_input.ReturnValuesOnConditionCheckFailure.is_Some: + output["ReturnValuesOnConditionCheckFailure"] = ( + aws_cryptography_internal_dynamodb.smithygenerated.com_amazonaws_dynamodb.dafny_to_aws_sdk.com_amazonaws_dynamodb_ReturnValuesOnConditionCheckFailure( + dafny_input.ReturnValuesOnConditionCheckFailure.value + ) + ) + + return output + + +def com_amazonaws_dynamodb_TransactWriteItemsInput(dafny_input): + output = {} + output["TransactItems"] = [ + aws_cryptography_internal_dynamodb.smithygenerated.com_amazonaws_dynamodb.dafny_to_aws_sdk.com_amazonaws_dynamodb_TransactWriteItem( + list_element + ) + for list_element in dafny_input.TransactItems + ] + if dafny_input.ReturnConsumedCapacity.is_Some: + output["ReturnConsumedCapacity"] = ( + aws_cryptography_internal_dynamodb.smithygenerated.com_amazonaws_dynamodb.dafny_to_aws_sdk.com_amazonaws_dynamodb_ReturnConsumedCapacity( + dafny_input.ReturnConsumedCapacity.value + ) + ) + + if dafny_input.ReturnItemCollectionMetrics.is_Some: + output["ReturnItemCollectionMetrics"] = ( + aws_cryptography_internal_dynamodb.smithygenerated.com_amazonaws_dynamodb.dafny_to_aws_sdk.com_amazonaws_dynamodb_ReturnItemCollectionMetrics( + dafny_input.ReturnItemCollectionMetrics.value + ) + ) + + if dafny_input.ClientRequestToken.is_Some: + output["ClientRequestToken"] = b"".join( + ord(c).to_bytes(2, "big") for c in dafny_input.ClientRequestToken.value + ).decode("utf-16-be") + + return output + + +def com_amazonaws_dynamodb_TransactWriteItemsOutput(dafny_input): + output = {} + if dafny_input.ConsumedCapacity.is_Some: + output["ConsumedCapacity"] = [ + aws_cryptography_internal_dynamodb.smithygenerated.com_amazonaws_dynamodb.dafny_to_aws_sdk.com_amazonaws_dynamodb_ConsumedCapacity( + list_element + ) + for list_element in dafny_input.ConsumedCapacity.value + ] + + if dafny_input.ItemCollectionMetrics.is_Some: + output["ItemCollectionMetrics"] = { + b"".join(ord(c).to_bytes(2, "big") for c in key).decode("utf-16-be"): [ + aws_cryptography_internal_dynamodb.smithygenerated.com_amazonaws_dynamodb.dafny_to_aws_sdk.com_amazonaws_dynamodb_ItemCollectionMetrics( + list_element + ) + for list_element in value + ] + for (key, value) in dafny_input.ItemCollectionMetrics.value.items + } + + return output + + +def com_amazonaws_dynamodb_AttributeValueUpdate(dafny_input): + output = {} + if dafny_input.Value.is_Some: + output["Value"] = ( + aws_cryptography_internal_dynamodb.smithygenerated.com_amazonaws_dynamodb.dafny_to_aws_sdk.com_amazonaws_dynamodb_AttributeValue( + dafny_input.Value.value + ) + ) + + if dafny_input.Action.is_Some: + output["Action"] = ( + aws_cryptography_internal_dynamodb.smithygenerated.com_amazonaws_dynamodb.dafny_to_aws_sdk.com_amazonaws_dynamodb_AttributeAction( + dafny_input.Action.value + ) + ) + + return output + + +def com_amazonaws_dynamodb_AttributeAction(dafny_input): + # Convert AttributeAction + if isinstance(dafny_input, AttributeAction_ADD): + return "ADD" + + elif isinstance(dafny_input, AttributeAction_PUT): + return "PUT" + + elif isinstance(dafny_input, AttributeAction_DELETE): + return "DELETE" + + else: + raise ValueError("No recognized enum value in enum type: " + dafny_input) + + +def com_amazonaws_dynamodb_UpdateItemInput(dafny_input): + output = {} + output["TableName"] = b"".join(ord(c).to_bytes(2, "big") for c in dafny_input.TableName).decode("utf-16-be") + output["Key"] = { + b"".join(ord(c).to_bytes(2, "big") for c in key).decode( + "utf-16-be" + ): aws_cryptography_internal_dynamodb.smithygenerated.com_amazonaws_dynamodb.dafny_to_aws_sdk.com_amazonaws_dynamodb_AttributeValue( + value + ) + for (key, value) in dafny_input.Key.items + } + if dafny_input.AttributeUpdates.is_Some: + output["AttributeUpdates"] = { + b"".join(ord(c).to_bytes(2, "big") for c in key).decode( + "utf-16-be" + ): aws_cryptography_internal_dynamodb.smithygenerated.com_amazonaws_dynamodb.dafny_to_aws_sdk.com_amazonaws_dynamodb_AttributeValueUpdate( + value + ) + for (key, value) in dafny_input.AttributeUpdates.value.items + } + + if dafny_input.Expected.is_Some: + output["Expected"] = { + b"".join(ord(c).to_bytes(2, "big") for c in key).decode( + "utf-16-be" + ): aws_cryptography_internal_dynamodb.smithygenerated.com_amazonaws_dynamodb.dafny_to_aws_sdk.com_amazonaws_dynamodb_ExpectedAttributeValue( + value + ) + for (key, value) in dafny_input.Expected.value.items + } + + if dafny_input.ConditionalOperator.is_Some: + output["ConditionalOperator"] = ( + aws_cryptography_internal_dynamodb.smithygenerated.com_amazonaws_dynamodb.dafny_to_aws_sdk.com_amazonaws_dynamodb_ConditionalOperator( + dafny_input.ConditionalOperator.value + ) + ) + + if dafny_input.ReturnValues.is_Some: + output["ReturnValues"] = ( + aws_cryptography_internal_dynamodb.smithygenerated.com_amazonaws_dynamodb.dafny_to_aws_sdk.com_amazonaws_dynamodb_ReturnValue( + dafny_input.ReturnValues.value + ) + ) + + if dafny_input.ReturnConsumedCapacity.is_Some: + output["ReturnConsumedCapacity"] = ( + aws_cryptography_internal_dynamodb.smithygenerated.com_amazonaws_dynamodb.dafny_to_aws_sdk.com_amazonaws_dynamodb_ReturnConsumedCapacity( + dafny_input.ReturnConsumedCapacity.value + ) + ) + + if dafny_input.ReturnItemCollectionMetrics.is_Some: + output["ReturnItemCollectionMetrics"] = ( + aws_cryptography_internal_dynamodb.smithygenerated.com_amazonaws_dynamodb.dafny_to_aws_sdk.com_amazonaws_dynamodb_ReturnItemCollectionMetrics( + dafny_input.ReturnItemCollectionMetrics.value + ) + ) + + if dafny_input.UpdateExpression.is_Some: + output["UpdateExpression"] = b"".join( + ord(c).to_bytes(2, "big") for c in dafny_input.UpdateExpression.value + ).decode("utf-16-be") + + if dafny_input.ConditionExpression.is_Some: + output["ConditionExpression"] = b"".join( + ord(c).to_bytes(2, "big") for c in dafny_input.ConditionExpression.value + ).decode("utf-16-be") + + if dafny_input.ExpressionAttributeNames.is_Some: + output["ExpressionAttributeNames"] = { + b"".join(ord(c).to_bytes(2, "big") for c in key) + .decode("utf-16-be"): b"".join(ord(c).to_bytes(2, "big") for c in value) + .decode("utf-16-be") + for (key, value) in dafny_input.ExpressionAttributeNames.value.items + } + + if dafny_input.ExpressionAttributeValues.is_Some: + output["ExpressionAttributeValues"] = { + b"".join(ord(c).to_bytes(2, "big") for c in key).decode( + "utf-16-be" + ): aws_cryptography_internal_dynamodb.smithygenerated.com_amazonaws_dynamodb.dafny_to_aws_sdk.com_amazonaws_dynamodb_AttributeValue( + value + ) + for (key, value) in dafny_input.ExpressionAttributeValues.value.items + } + + if dafny_input.ReturnValuesOnConditionCheckFailure.is_Some: + output["ReturnValuesOnConditionCheckFailure"] = ( + aws_cryptography_internal_dynamodb.smithygenerated.com_amazonaws_dynamodb.dafny_to_aws_sdk.com_amazonaws_dynamodb_ReturnValuesOnConditionCheckFailure( + dafny_input.ReturnValuesOnConditionCheckFailure.value + ) + ) + + return output + + +def com_amazonaws_dynamodb_UpdateItemOutput(dafny_input): + output = {} + if dafny_input.Attributes.is_Some: + output["Attributes"] = { + b"".join(ord(c).to_bytes(2, "big") for c in key).decode( + "utf-16-be" + ): aws_cryptography_internal_dynamodb.smithygenerated.com_amazonaws_dynamodb.dafny_to_aws_sdk.com_amazonaws_dynamodb_AttributeValue( + value + ) + for (key, value) in dafny_input.Attributes.value.items + } + + if dafny_input.ConsumedCapacity.is_Some: + output["ConsumedCapacity"] = ( + aws_cryptography_internal_dynamodb.smithygenerated.com_amazonaws_dynamodb.dafny_to_aws_sdk.com_amazonaws_dynamodb_ConsumedCapacity( + dafny_input.ConsumedCapacity.value + ) + ) + + if dafny_input.ItemCollectionMetrics.is_Some: + output["ItemCollectionMetrics"] = ( + aws_cryptography_internal_dynamodb.smithygenerated.com_amazonaws_dynamodb.dafny_to_aws_sdk.com_amazonaws_dynamodb_ItemCollectionMetrics( + dafny_input.ItemCollectionMetrics.value + ) + ) + + return output + + +def com_amazonaws_dynamodb_DeleteItemInput(dafny_input): + output = {} + output["TableName"] = b"".join(ord(c).to_bytes(2, "big") for c in dafny_input.TableName).decode("utf-16-be") + output["Key"] = { + b"".join(ord(c).to_bytes(2, "big") for c in key).decode( + "utf-16-be" + ): aws_cryptography_internal_dynamodb.smithygenerated.com_amazonaws_dynamodb.dafny_to_aws_sdk.com_amazonaws_dynamodb_AttributeValue( + value + ) + for (key, value) in dafny_input.Key.items + } + if dafny_input.Expected.is_Some: + output["Expected"] = { + b"".join(ord(c).to_bytes(2, "big") for c in key).decode( + "utf-16-be" + ): aws_cryptography_internal_dynamodb.smithygenerated.com_amazonaws_dynamodb.dafny_to_aws_sdk.com_amazonaws_dynamodb_ExpectedAttributeValue( + value + ) + for (key, value) in dafny_input.Expected.value.items + } + + if dafny_input.ConditionalOperator.is_Some: + output["ConditionalOperator"] = ( + aws_cryptography_internal_dynamodb.smithygenerated.com_amazonaws_dynamodb.dafny_to_aws_sdk.com_amazonaws_dynamodb_ConditionalOperator( + dafny_input.ConditionalOperator.value + ) + ) + + if dafny_input.ReturnValues.is_Some: + output["ReturnValues"] = ( + aws_cryptography_internal_dynamodb.smithygenerated.com_amazonaws_dynamodb.dafny_to_aws_sdk.com_amazonaws_dynamodb_ReturnValue( + dafny_input.ReturnValues.value + ) + ) + + if dafny_input.ReturnConsumedCapacity.is_Some: + output["ReturnConsumedCapacity"] = ( + aws_cryptography_internal_dynamodb.smithygenerated.com_amazonaws_dynamodb.dafny_to_aws_sdk.com_amazonaws_dynamodb_ReturnConsumedCapacity( + dafny_input.ReturnConsumedCapacity.value + ) + ) + + if dafny_input.ReturnItemCollectionMetrics.is_Some: + output["ReturnItemCollectionMetrics"] = ( + aws_cryptography_internal_dynamodb.smithygenerated.com_amazonaws_dynamodb.dafny_to_aws_sdk.com_amazonaws_dynamodb_ReturnItemCollectionMetrics( + dafny_input.ReturnItemCollectionMetrics.value + ) + ) + + if dafny_input.ConditionExpression.is_Some: + output["ConditionExpression"] = b"".join( + ord(c).to_bytes(2, "big") for c in dafny_input.ConditionExpression.value + ).decode("utf-16-be") + + if dafny_input.ExpressionAttributeNames.is_Some: + output["ExpressionAttributeNames"] = { + b"".join(ord(c).to_bytes(2, "big") for c in key) + .decode("utf-16-be"): b"".join(ord(c).to_bytes(2, "big") for c in value) + .decode("utf-16-be") + for (key, value) in dafny_input.ExpressionAttributeNames.value.items + } + + if dafny_input.ExpressionAttributeValues.is_Some: + output["ExpressionAttributeValues"] = { + b"".join(ord(c).to_bytes(2, "big") for c in key).decode( + "utf-16-be" + ): aws_cryptography_internal_dynamodb.smithygenerated.com_amazonaws_dynamodb.dafny_to_aws_sdk.com_amazonaws_dynamodb_AttributeValue( + value + ) + for (key, value) in dafny_input.ExpressionAttributeValues.value.items + } + + if dafny_input.ReturnValuesOnConditionCheckFailure.is_Some: + output["ReturnValuesOnConditionCheckFailure"] = ( + aws_cryptography_internal_dynamodb.smithygenerated.com_amazonaws_dynamodb.dafny_to_aws_sdk.com_amazonaws_dynamodb_ReturnValuesOnConditionCheckFailure( + dafny_input.ReturnValuesOnConditionCheckFailure.value + ) + ) + + return output + + +def com_amazonaws_dynamodb_DeleteItemOutput(dafny_input): + output = {} + if dafny_input.Attributes.is_Some: + output["Attributes"] = { + b"".join(ord(c).to_bytes(2, "big") for c in key).decode( + "utf-16-be" + ): aws_cryptography_internal_dynamodb.smithygenerated.com_amazonaws_dynamodb.dafny_to_aws_sdk.com_amazonaws_dynamodb_AttributeValue( + value + ) + for (key, value) in dafny_input.Attributes.value.items + } + + if dafny_input.ConsumedCapacity.is_Some: + output["ConsumedCapacity"] = ( + aws_cryptography_internal_dynamodb.smithygenerated.com_amazonaws_dynamodb.dafny_to_aws_sdk.com_amazonaws_dynamodb_ConsumedCapacity( + dafny_input.ConsumedCapacity.value + ) + ) + + if dafny_input.ItemCollectionMetrics.is_Some: + output["ItemCollectionMetrics"] = ( + aws_cryptography_internal_dynamodb.smithygenerated.com_amazonaws_dynamodb.dafny_to_aws_sdk.com_amazonaws_dynamodb_ItemCollectionMetrics( + dafny_input.ItemCollectionMetrics.value + ) + ) + + return output + + +def com_amazonaws_dynamodb_TransactGetItem(dafny_input): + output = {} + output["Get"] = ( + aws_cryptography_internal_dynamodb.smithygenerated.com_amazonaws_dynamodb.dafny_to_aws_sdk.com_amazonaws_dynamodb_Get( + dafny_input.Get + ) + ) + return output + + +def com_amazonaws_dynamodb_Get(dafny_input): + output = {} + output["Key"] = { + b"".join(ord(c).to_bytes(2, "big") for c in key).decode( + "utf-16-be" + ): aws_cryptography_internal_dynamodb.smithygenerated.com_amazonaws_dynamodb.dafny_to_aws_sdk.com_amazonaws_dynamodb_AttributeValue( + value + ) + for (key, value) in dafny_input.Key.items + } + output["TableName"] = b"".join(ord(c).to_bytes(2, "big") for c in dafny_input.TableName).decode("utf-16-be") + if dafny_input.ProjectionExpression.is_Some: + output["ProjectionExpression"] = b"".join( + ord(c).to_bytes(2, "big") for c in dafny_input.ProjectionExpression.value + ).decode("utf-16-be") + + if dafny_input.ExpressionAttributeNames.is_Some: + output["ExpressionAttributeNames"] = { + b"".join(ord(c).to_bytes(2, "big") for c in key) + .decode("utf-16-be"): b"".join(ord(c).to_bytes(2, "big") for c in value) + .decode("utf-16-be") + for (key, value) in dafny_input.ExpressionAttributeNames.value.items + } + + return output + + +def com_amazonaws_dynamodb_TransactGetItemsInput(dafny_input): + output = {} + output["TransactItems"] = [ + aws_cryptography_internal_dynamodb.smithygenerated.com_amazonaws_dynamodb.dafny_to_aws_sdk.com_amazonaws_dynamodb_TransactGetItem( + list_element + ) + for list_element in dafny_input.TransactItems + ] + if dafny_input.ReturnConsumedCapacity.is_Some: + output["ReturnConsumedCapacity"] = ( + aws_cryptography_internal_dynamodb.smithygenerated.com_amazonaws_dynamodb.dafny_to_aws_sdk.com_amazonaws_dynamodb_ReturnConsumedCapacity( + dafny_input.ReturnConsumedCapacity.value + ) + ) + + return output + + +def com_amazonaws_dynamodb_ItemResponse(dafny_input): + output = {} + if dafny_input.Item.is_Some: + output["Item"] = { + b"".join(ord(c).to_bytes(2, "big") for c in key).decode( + "utf-16-be" + ): aws_cryptography_internal_dynamodb.smithygenerated.com_amazonaws_dynamodb.dafny_to_aws_sdk.com_amazonaws_dynamodb_AttributeValue( + value + ) + for (key, value) in dafny_input.Item.value.items + } + + return output + + +def com_amazonaws_dynamodb_TransactGetItemsOutput(dafny_input): + output = {} + if dafny_input.ConsumedCapacity.is_Some: + output["ConsumedCapacity"] = [ + aws_cryptography_internal_dynamodb.smithygenerated.com_amazonaws_dynamodb.dafny_to_aws_sdk.com_amazonaws_dynamodb_ConsumedCapacity( + list_element + ) + for list_element in dafny_input.ConsumedCapacity.value + ] + + if dafny_input.Responses.is_Some: + output["Responses"] = [ + aws_cryptography_internal_dynamodb.smithygenerated.com_amazonaws_dynamodb.dafny_to_aws_sdk.com_amazonaws_dynamodb_ItemResponse( + list_element + ) + for list_element in dafny_input.Responses.value + ] + + return output + + +def com_amazonaws_dynamodb_ExecuteStatementInput(dafny_input): + output = {} + output["Statement"] = b"".join(ord(c).to_bytes(2, "big") for c in dafny_input.Statement).decode("utf-16-be") + if dafny_input.Parameters.is_Some: + output["Parameters"] = [ + aws_cryptography_internal_dynamodb.smithygenerated.com_amazonaws_dynamodb.dafny_to_aws_sdk.com_amazonaws_dynamodb_AttributeValue( + list_element + ) + for list_element in dafny_input.Parameters.value + ] + + if dafny_input.ConsistentRead.is_Some: + output["ConsistentRead"] = dafny_input.ConsistentRead.value + + if dafny_input.NextToken.is_Some: + output["NextToken"] = b"".join(ord(c).to_bytes(2, "big") for c in dafny_input.NextToken.value).decode( + "utf-16-be" + ) + + if dafny_input.ReturnConsumedCapacity.is_Some: + output["ReturnConsumedCapacity"] = ( + aws_cryptography_internal_dynamodb.smithygenerated.com_amazonaws_dynamodb.dafny_to_aws_sdk.com_amazonaws_dynamodb_ReturnConsumedCapacity( + dafny_input.ReturnConsumedCapacity.value + ) + ) + + if dafny_input.Limit.is_Some: + output["Limit"] = dafny_input.Limit.value + + if dafny_input.ReturnValuesOnConditionCheckFailure.is_Some: + output["ReturnValuesOnConditionCheckFailure"] = ( + aws_cryptography_internal_dynamodb.smithygenerated.com_amazonaws_dynamodb.dafny_to_aws_sdk.com_amazonaws_dynamodb_ReturnValuesOnConditionCheckFailure( + dafny_input.ReturnValuesOnConditionCheckFailure.value + ) + ) + + return output + + +def com_amazonaws_dynamodb_ExecuteStatementOutput(dafny_input): + output = {} + if dafny_input.Items.is_Some: + output["Items"] = [ + { + b"".join(ord(c).to_bytes(2, "big") for c in key).decode( + "utf-16-be" + ): aws_cryptography_internal_dynamodb.smithygenerated.com_amazonaws_dynamodb.dafny_to_aws_sdk.com_amazonaws_dynamodb_AttributeValue( + value + ) + for (key, value) in list_element.items + } + for list_element in dafny_input.Items.value + ] + + if dafny_input.NextToken.is_Some: + output["NextToken"] = b"".join(ord(c).to_bytes(2, "big") for c in dafny_input.NextToken.value).decode( + "utf-16-be" + ) + + if dafny_input.ConsumedCapacity.is_Some: + output["ConsumedCapacity"] = ( + aws_cryptography_internal_dynamodb.smithygenerated.com_amazonaws_dynamodb.dafny_to_aws_sdk.com_amazonaws_dynamodb_ConsumedCapacity( + dafny_input.ConsumedCapacity.value + ) + ) + + if dafny_input.LastEvaluatedKey.is_Some: + output["LastEvaluatedKey"] = { + b"".join(ord(c).to_bytes(2, "big") for c in key).decode( + "utf-16-be" + ): aws_cryptography_internal_dynamodb.smithygenerated.com_amazonaws_dynamodb.dafny_to_aws_sdk.com_amazonaws_dynamodb_AttributeValue( + value + ) + for (key, value) in dafny_input.LastEvaluatedKey.value.items + } + + return output + + +def com_amazonaws_dynamodb_BatchStatementRequest(dafny_input): + output = {} + output["Statement"] = b"".join(ord(c).to_bytes(2, "big") for c in dafny_input.Statement).decode("utf-16-be") + if dafny_input.Parameters.is_Some: + output["Parameters"] = [ + aws_cryptography_internal_dynamodb.smithygenerated.com_amazonaws_dynamodb.dafny_to_aws_sdk.com_amazonaws_dynamodb_AttributeValue( + list_element + ) + for list_element in dafny_input.Parameters.value + ] + + if dafny_input.ConsistentRead.is_Some: + output["ConsistentRead"] = dafny_input.ConsistentRead.value + + if dafny_input.ReturnValuesOnConditionCheckFailure.is_Some: + output["ReturnValuesOnConditionCheckFailure"] = ( + aws_cryptography_internal_dynamodb.smithygenerated.com_amazonaws_dynamodb.dafny_to_aws_sdk.com_amazonaws_dynamodb_ReturnValuesOnConditionCheckFailure( + dafny_input.ReturnValuesOnConditionCheckFailure.value + ) + ) + + return output + + +def com_amazonaws_dynamodb_BatchExecuteStatementInput(dafny_input): + output = {} + output["Statements"] = [ + aws_cryptography_internal_dynamodb.smithygenerated.com_amazonaws_dynamodb.dafny_to_aws_sdk.com_amazonaws_dynamodb_BatchStatementRequest( + list_element + ) + for list_element in dafny_input.Statements + ] + if dafny_input.ReturnConsumedCapacity.is_Some: + output["ReturnConsumedCapacity"] = ( + aws_cryptography_internal_dynamodb.smithygenerated.com_amazonaws_dynamodb.dafny_to_aws_sdk.com_amazonaws_dynamodb_ReturnConsumedCapacity( + dafny_input.ReturnConsumedCapacity.value + ) + ) + + return output + + +def com_amazonaws_dynamodb_BatchStatementResponse(dafny_input): + output = {} + if dafny_input.Error.is_Some: + output["Error"] = ( + aws_cryptography_internal_dynamodb.smithygenerated.com_amazonaws_dynamodb.dafny_to_aws_sdk.com_amazonaws_dynamodb_BatchStatementError( + dafny_input.Error.value + ) + ) + + if dafny_input.TableName.is_Some: + output["TableName"] = b"".join(ord(c).to_bytes(2, "big") for c in dafny_input.TableName.value).decode( + "utf-16-be" + ) + + if dafny_input.Item.is_Some: + output["Item"] = { + b"".join(ord(c).to_bytes(2, "big") for c in key).decode( + "utf-16-be" + ): aws_cryptography_internal_dynamodb.smithygenerated.com_amazonaws_dynamodb.dafny_to_aws_sdk.com_amazonaws_dynamodb_AttributeValue( + value + ) + for (key, value) in dafny_input.Item.value.items + } + + return output + + +def com_amazonaws_dynamodb_BatchStatementError(dafny_input): + output = {} + if dafny_input.Code.is_Some: + output["Code"] = ( + aws_cryptography_internal_dynamodb.smithygenerated.com_amazonaws_dynamodb.dafny_to_aws_sdk.com_amazonaws_dynamodb_BatchStatementErrorCodeEnum( + dafny_input.Code.value + ) + ) + + if dafny_input.Message.is_Some: + output["Message"] = b"".join(ord(c).to_bytes(2, "big") for c in dafny_input.Message.value).decode("utf-16-be") + + if dafny_input.Item.is_Some: + output["Item"] = { + b"".join(ord(c).to_bytes(2, "big") for c in key).decode( + "utf-16-be" + ): aws_cryptography_internal_dynamodb.smithygenerated.com_amazonaws_dynamodb.dafny_to_aws_sdk.com_amazonaws_dynamodb_AttributeValue( + value + ) + for (key, value) in dafny_input.Item.value.items + } + + return output + + +def com_amazonaws_dynamodb_BatchStatementErrorCodeEnum(dafny_input): + # Convert BatchStatementErrorCodeEnum + if isinstance(dafny_input, BatchStatementErrorCodeEnum_ConditionalCheckFailed): + return "ConditionalCheckFailed" + + elif isinstance(dafny_input, BatchStatementErrorCodeEnum_ItemCollectionSizeLimitExceeded): + return "ItemCollectionSizeLimitExceeded" + + elif isinstance(dafny_input, BatchStatementErrorCodeEnum_RequestLimitExceeded): + return "RequestLimitExceeded" + + elif isinstance(dafny_input, BatchStatementErrorCodeEnum_ValidationError): + return "ValidationError" + + elif isinstance(dafny_input, BatchStatementErrorCodeEnum_ProvisionedThroughputExceeded): + return "ProvisionedThroughputExceeded" + + elif isinstance(dafny_input, BatchStatementErrorCodeEnum_TransactionConflict): + return "TransactionConflict" + + elif isinstance(dafny_input, BatchStatementErrorCodeEnum_ThrottlingError): + return "ThrottlingError" + + elif isinstance(dafny_input, BatchStatementErrorCodeEnum_InternalServerError): + return "InternalServerError" + + elif isinstance(dafny_input, BatchStatementErrorCodeEnum_ResourceNotFound): + return "ResourceNotFound" + + elif isinstance(dafny_input, BatchStatementErrorCodeEnum_AccessDenied): + return "AccessDenied" + + elif isinstance(dafny_input, BatchStatementErrorCodeEnum_DuplicateItem): + return "DuplicateItem" + + else: + raise ValueError("No recognized enum value in enum type: " + dafny_input) + + +def com_amazonaws_dynamodb_BatchExecuteStatementOutput(dafny_input): + output = {} + if dafny_input.Responses.is_Some: + output["Responses"] = [ + aws_cryptography_internal_dynamodb.smithygenerated.com_amazonaws_dynamodb.dafny_to_aws_sdk.com_amazonaws_dynamodb_BatchStatementResponse( + list_element + ) + for list_element in dafny_input.Responses.value + ] + + if dafny_input.ConsumedCapacity.is_Some: + output["ConsumedCapacity"] = [ + aws_cryptography_internal_dynamodb.smithygenerated.com_amazonaws_dynamodb.dafny_to_aws_sdk.com_amazonaws_dynamodb_ConsumedCapacity( + list_element + ) + for list_element in dafny_input.ConsumedCapacity.value + ] + + return output + + +def com_amazonaws_dynamodb_ParameterizedStatement(dafny_input): + output = {} + output["Statement"] = b"".join(ord(c).to_bytes(2, "big") for c in dafny_input.Statement).decode("utf-16-be") + if dafny_input.Parameters.is_Some: + output["Parameters"] = [ + aws_cryptography_internal_dynamodb.smithygenerated.com_amazonaws_dynamodb.dafny_to_aws_sdk.com_amazonaws_dynamodb_AttributeValue( + list_element + ) + for list_element in dafny_input.Parameters.value + ] + + if dafny_input.ReturnValuesOnConditionCheckFailure.is_Some: + output["ReturnValuesOnConditionCheckFailure"] = ( + aws_cryptography_internal_dynamodb.smithygenerated.com_amazonaws_dynamodb.dafny_to_aws_sdk.com_amazonaws_dynamodb_ReturnValuesOnConditionCheckFailure( + dafny_input.ReturnValuesOnConditionCheckFailure.value + ) + ) + + return output + + +def com_amazonaws_dynamodb_ExecuteTransactionInput(dafny_input): + output = {} + output["TransactStatements"] = [ + aws_cryptography_internal_dynamodb.smithygenerated.com_amazonaws_dynamodb.dafny_to_aws_sdk.com_amazonaws_dynamodb_ParameterizedStatement( + list_element + ) + for list_element in dafny_input.TransactStatements + ] + if dafny_input.ClientRequestToken.is_Some: + output["ClientRequestToken"] = b"".join( + ord(c).to_bytes(2, "big") for c in dafny_input.ClientRequestToken.value + ).decode("utf-16-be") + + if dafny_input.ReturnConsumedCapacity.is_Some: + output["ReturnConsumedCapacity"] = ( + aws_cryptography_internal_dynamodb.smithygenerated.com_amazonaws_dynamodb.dafny_to_aws_sdk.com_amazonaws_dynamodb_ReturnConsumedCapacity( + dafny_input.ReturnConsumedCapacity.value + ) + ) + + return output + + +def com_amazonaws_dynamodb_ExecuteTransactionOutput(dafny_input): + output = {} + if dafny_input.Responses.is_Some: + output["Responses"] = [ + aws_cryptography_internal_dynamodb.smithygenerated.com_amazonaws_dynamodb.dafny_to_aws_sdk.com_amazonaws_dynamodb_ItemResponse( + list_element + ) + for list_element in dafny_input.Responses.value + ] + + if dafny_input.ConsumedCapacity.is_Some: + output["ConsumedCapacity"] = [ + aws_cryptography_internal_dynamodb.smithygenerated.com_amazonaws_dynamodb.dafny_to_aws_sdk.com_amazonaws_dynamodb_ConsumedCapacity( + list_element + ) + for list_element in dafny_input.ConsumedCapacity.value + ] + + return output diff --git a/DynamoDbEncryption/runtimes/python/src/aws_dbesdk_dynamodb/smithygenerated/aws_cryptography_dbencryptionsdk_dynamodb_transforms/dafny_to_smithy.py b/DynamoDbEncryption/runtimes/python/src/aws_dbesdk_dynamodb/smithygenerated/aws_cryptography_dbencryptionsdk_dynamodb_transforms/dafny_to_smithy.py new file mode 100644 index 000000000..11826b3ec --- /dev/null +++ b/DynamoDbEncryption/runtimes/python/src/aws_dbesdk_dynamodb/smithygenerated/aws_cryptography_dbencryptionsdk_dynamodb_transforms/dafny_to_smithy.py @@ -0,0 +1,641 @@ +# Copyright Amazon.com Inc. or its affiliates. All Rights Reserved. +# SPDX-License-Identifier: Apache-2.0 +# Do not modify this file. This file is machine generated, and any changes to it will be overwritten. + +import aws_cryptography_internal_dynamodb.smithygenerated.com_amazonaws_dynamodb.dafny_to_aws_sdk +import aws_dbesdk_dynamodb.smithygenerated.aws_cryptography_dbencryptionsdk_dynamodb_transforms.models + + +def aws_cryptography_dbencryptionsdk_dynamodb_transforms_PutItemInputTransformInput( + dafny_input, +): + return aws_dbesdk_dynamodb.smithygenerated.aws_cryptography_dbencryptionsdk_dynamodb_transforms.models.PutItemInputTransformInput( + sdk_input=aws_cryptography_internal_dynamodb.smithygenerated.com_amazonaws_dynamodb.dafny_to_aws_sdk.com_amazonaws_dynamodb_PutItemInput( + dafny_input.sdkInput + ), + ) + + +def aws_cryptography_dbencryptionsdk_dynamodb_transforms_PutItemOutputTransformInput( + dafny_input, +): + return aws_dbesdk_dynamodb.smithygenerated.aws_cryptography_dbencryptionsdk_dynamodb_transforms.models.PutItemOutputTransformInput( + sdk_output=aws_cryptography_internal_dynamodb.smithygenerated.com_amazonaws_dynamodb.dafny_to_aws_sdk.com_amazonaws_dynamodb_PutItemOutput( + dafny_input.sdkOutput + ), + original_input=aws_cryptography_internal_dynamodb.smithygenerated.com_amazonaws_dynamodb.dafny_to_aws_sdk.com_amazonaws_dynamodb_PutItemInput( + dafny_input.originalInput + ), + ) + + +def aws_cryptography_dbencryptionsdk_dynamodb_transforms_GetItemInputTransformInput( + dafny_input, +): + return aws_dbesdk_dynamodb.smithygenerated.aws_cryptography_dbencryptionsdk_dynamodb_transforms.models.GetItemInputTransformInput( + sdk_input=aws_cryptography_internal_dynamodb.smithygenerated.com_amazonaws_dynamodb.dafny_to_aws_sdk.com_amazonaws_dynamodb_GetItemInput( + dafny_input.sdkInput + ), + ) + + +def aws_cryptography_dbencryptionsdk_dynamodb_transforms_GetItemOutputTransformInput( + dafny_input, +): + return aws_dbesdk_dynamodb.smithygenerated.aws_cryptography_dbencryptionsdk_dynamodb_transforms.models.GetItemOutputTransformInput( + sdk_output=aws_cryptography_internal_dynamodb.smithygenerated.com_amazonaws_dynamodb.dafny_to_aws_sdk.com_amazonaws_dynamodb_GetItemOutput( + dafny_input.sdkOutput + ), + original_input=aws_cryptography_internal_dynamodb.smithygenerated.com_amazonaws_dynamodb.dafny_to_aws_sdk.com_amazonaws_dynamodb_GetItemInput( + dafny_input.originalInput + ), + ) + + +def aws_cryptography_dbencryptionsdk_dynamodb_transforms_BatchWriteItemInputTransformInput( + dafny_input, +): + return aws_dbesdk_dynamodb.smithygenerated.aws_cryptography_dbencryptionsdk_dynamodb_transforms.models.BatchWriteItemInputTransformInput( + sdk_input=aws_cryptography_internal_dynamodb.smithygenerated.com_amazonaws_dynamodb.dafny_to_aws_sdk.com_amazonaws_dynamodb_BatchWriteItemInput( + dafny_input.sdkInput + ), + ) + + +def aws_cryptography_dbencryptionsdk_dynamodb_transforms_BatchWriteItemOutputTransformInput( + dafny_input, +): + return aws_dbesdk_dynamodb.smithygenerated.aws_cryptography_dbencryptionsdk_dynamodb_transforms.models.BatchWriteItemOutputTransformInput( + sdk_output=aws_cryptography_internal_dynamodb.smithygenerated.com_amazonaws_dynamodb.dafny_to_aws_sdk.com_amazonaws_dynamodb_BatchWriteItemOutput( + dafny_input.sdkOutput + ), + original_input=aws_cryptography_internal_dynamodb.smithygenerated.com_amazonaws_dynamodb.dafny_to_aws_sdk.com_amazonaws_dynamodb_BatchWriteItemInput( + dafny_input.originalInput + ), + ) + + +def aws_cryptography_dbencryptionsdk_dynamodb_transforms_BatchGetItemInputTransformInput( + dafny_input, +): + return aws_dbesdk_dynamodb.smithygenerated.aws_cryptography_dbencryptionsdk_dynamodb_transforms.models.BatchGetItemInputTransformInput( + sdk_input=aws_cryptography_internal_dynamodb.smithygenerated.com_amazonaws_dynamodb.dafny_to_aws_sdk.com_amazonaws_dynamodb_BatchGetItemInput( + dafny_input.sdkInput + ), + ) + + +def aws_cryptography_dbencryptionsdk_dynamodb_transforms_BatchGetItemOutputTransformInput( + dafny_input, +): + return aws_dbesdk_dynamodb.smithygenerated.aws_cryptography_dbencryptionsdk_dynamodb_transforms.models.BatchGetItemOutputTransformInput( + sdk_output=aws_cryptography_internal_dynamodb.smithygenerated.com_amazonaws_dynamodb.dafny_to_aws_sdk.com_amazonaws_dynamodb_BatchGetItemOutput( + dafny_input.sdkOutput + ), + original_input=aws_cryptography_internal_dynamodb.smithygenerated.com_amazonaws_dynamodb.dafny_to_aws_sdk.com_amazonaws_dynamodb_BatchGetItemInput( + dafny_input.originalInput + ), + ) + + +def aws_cryptography_dbencryptionsdk_dynamodb_transforms_ScanInputTransformInput( + dafny_input, +): + return aws_dbesdk_dynamodb.smithygenerated.aws_cryptography_dbencryptionsdk_dynamodb_transforms.models.ScanInputTransformInput( + sdk_input=aws_cryptography_internal_dynamodb.smithygenerated.com_amazonaws_dynamodb.dafny_to_aws_sdk.com_amazonaws_dynamodb_ScanInput( + dafny_input.sdkInput + ), + ) + + +def aws_cryptography_dbencryptionsdk_dynamodb_transforms_ScanOutputTransformInput( + dafny_input, +): + return aws_dbesdk_dynamodb.smithygenerated.aws_cryptography_dbencryptionsdk_dynamodb_transforms.models.ScanOutputTransformInput( + sdk_output=aws_cryptography_internal_dynamodb.smithygenerated.com_amazonaws_dynamodb.dafny_to_aws_sdk.com_amazonaws_dynamodb_ScanOutput( + dafny_input.sdkOutput + ), + original_input=aws_cryptography_internal_dynamodb.smithygenerated.com_amazonaws_dynamodb.dafny_to_aws_sdk.com_amazonaws_dynamodb_ScanInput( + dafny_input.originalInput + ), + ) + + +def aws_cryptography_dbencryptionsdk_dynamodb_transforms_QueryInputTransformInput( + dafny_input, +): + return aws_dbesdk_dynamodb.smithygenerated.aws_cryptography_dbencryptionsdk_dynamodb_transforms.models.QueryInputTransformInput( + sdk_input=aws_cryptography_internal_dynamodb.smithygenerated.com_amazonaws_dynamodb.dafny_to_aws_sdk.com_amazonaws_dynamodb_QueryInput( + dafny_input.sdkInput + ), + ) + + +def aws_cryptography_dbencryptionsdk_dynamodb_transforms_QueryOutputTransformInput( + dafny_input, +): + return aws_dbesdk_dynamodb.smithygenerated.aws_cryptography_dbencryptionsdk_dynamodb_transforms.models.QueryOutputTransformInput( + sdk_output=aws_cryptography_internal_dynamodb.smithygenerated.com_amazonaws_dynamodb.dafny_to_aws_sdk.com_amazonaws_dynamodb_QueryOutput( + dafny_input.sdkOutput + ), + original_input=aws_cryptography_internal_dynamodb.smithygenerated.com_amazonaws_dynamodb.dafny_to_aws_sdk.com_amazonaws_dynamodb_QueryInput( + dafny_input.originalInput + ), + ) + + +def aws_cryptography_dbencryptionsdk_dynamodb_transforms_TransactWriteItemsInputTransformInput( + dafny_input, +): + return aws_dbesdk_dynamodb.smithygenerated.aws_cryptography_dbencryptionsdk_dynamodb_transforms.models.TransactWriteItemsInputTransformInput( + sdk_input=aws_cryptography_internal_dynamodb.smithygenerated.com_amazonaws_dynamodb.dafny_to_aws_sdk.com_amazonaws_dynamodb_TransactWriteItemsInput( + dafny_input.sdkInput + ), + ) + + +def aws_cryptography_dbencryptionsdk_dynamodb_transforms_TransactWriteItemsOutputTransformInput( + dafny_input, +): + return aws_dbesdk_dynamodb.smithygenerated.aws_cryptography_dbencryptionsdk_dynamodb_transforms.models.TransactWriteItemsOutputTransformInput( + sdk_output=aws_cryptography_internal_dynamodb.smithygenerated.com_amazonaws_dynamodb.dafny_to_aws_sdk.com_amazonaws_dynamodb_TransactWriteItemsOutput( + dafny_input.sdkOutput + ), + original_input=aws_cryptography_internal_dynamodb.smithygenerated.com_amazonaws_dynamodb.dafny_to_aws_sdk.com_amazonaws_dynamodb_TransactWriteItemsInput( + dafny_input.originalInput + ), + ) + + +def aws_cryptography_dbencryptionsdk_dynamodb_transforms_UpdateItemInputTransformInput( + dafny_input, +): + return aws_dbesdk_dynamodb.smithygenerated.aws_cryptography_dbencryptionsdk_dynamodb_transforms.models.UpdateItemInputTransformInput( + sdk_input=aws_cryptography_internal_dynamodb.smithygenerated.com_amazonaws_dynamodb.dafny_to_aws_sdk.com_amazonaws_dynamodb_UpdateItemInput( + dafny_input.sdkInput + ), + ) + + +def aws_cryptography_dbencryptionsdk_dynamodb_transforms_UpdateItemOutputTransformInput( + dafny_input, +): + return aws_dbesdk_dynamodb.smithygenerated.aws_cryptography_dbencryptionsdk_dynamodb_transforms.models.UpdateItemOutputTransformInput( + sdk_output=aws_cryptography_internal_dynamodb.smithygenerated.com_amazonaws_dynamodb.dafny_to_aws_sdk.com_amazonaws_dynamodb_UpdateItemOutput( + dafny_input.sdkOutput + ), + original_input=aws_cryptography_internal_dynamodb.smithygenerated.com_amazonaws_dynamodb.dafny_to_aws_sdk.com_amazonaws_dynamodb_UpdateItemInput( + dafny_input.originalInput + ), + ) + + +def aws_cryptography_dbencryptionsdk_dynamodb_transforms_DeleteItemInputTransformInput( + dafny_input, +): + return aws_dbesdk_dynamodb.smithygenerated.aws_cryptography_dbencryptionsdk_dynamodb_transforms.models.DeleteItemInputTransformInput( + sdk_input=aws_cryptography_internal_dynamodb.smithygenerated.com_amazonaws_dynamodb.dafny_to_aws_sdk.com_amazonaws_dynamodb_DeleteItemInput( + dafny_input.sdkInput + ), + ) + + +def aws_cryptography_dbencryptionsdk_dynamodb_transforms_DeleteItemOutputTransformInput( + dafny_input, +): + return aws_dbesdk_dynamodb.smithygenerated.aws_cryptography_dbencryptionsdk_dynamodb_transforms.models.DeleteItemOutputTransformInput( + sdk_output=aws_cryptography_internal_dynamodb.smithygenerated.com_amazonaws_dynamodb.dafny_to_aws_sdk.com_amazonaws_dynamodb_DeleteItemOutput( + dafny_input.sdkOutput + ), + original_input=aws_cryptography_internal_dynamodb.smithygenerated.com_amazonaws_dynamodb.dafny_to_aws_sdk.com_amazonaws_dynamodb_DeleteItemInput( + dafny_input.originalInput + ), + ) + + +def aws_cryptography_dbencryptionsdk_dynamodb_transforms_TransactGetItemsInputTransformInput( + dafny_input, +): + return aws_dbesdk_dynamodb.smithygenerated.aws_cryptography_dbencryptionsdk_dynamodb_transforms.models.TransactGetItemsInputTransformInput( + sdk_input=aws_cryptography_internal_dynamodb.smithygenerated.com_amazonaws_dynamodb.dafny_to_aws_sdk.com_amazonaws_dynamodb_TransactGetItemsInput( + dafny_input.sdkInput + ), + ) + + +def aws_cryptography_dbencryptionsdk_dynamodb_transforms_TransactGetItemsOutputTransformInput( + dafny_input, +): + return aws_dbesdk_dynamodb.smithygenerated.aws_cryptography_dbencryptionsdk_dynamodb_transforms.models.TransactGetItemsOutputTransformInput( + sdk_output=aws_cryptography_internal_dynamodb.smithygenerated.com_amazonaws_dynamodb.dafny_to_aws_sdk.com_amazonaws_dynamodb_TransactGetItemsOutput( + dafny_input.sdkOutput + ), + original_input=aws_cryptography_internal_dynamodb.smithygenerated.com_amazonaws_dynamodb.dafny_to_aws_sdk.com_amazonaws_dynamodb_TransactGetItemsInput( + dafny_input.originalInput + ), + ) + + +def aws_cryptography_dbencryptionsdk_dynamodb_transforms_ExecuteStatementInputTransformInput( + dafny_input, +): + return aws_dbesdk_dynamodb.smithygenerated.aws_cryptography_dbencryptionsdk_dynamodb_transforms.models.ExecuteStatementInputTransformInput( + sdk_input=aws_cryptography_internal_dynamodb.smithygenerated.com_amazonaws_dynamodb.dafny_to_aws_sdk.com_amazonaws_dynamodb_ExecuteStatementInput( + dafny_input.sdkInput + ), + ) + + +def aws_cryptography_dbencryptionsdk_dynamodb_transforms_ExecuteStatementOutputTransformInput( + dafny_input, +): + return aws_dbesdk_dynamodb.smithygenerated.aws_cryptography_dbencryptionsdk_dynamodb_transforms.models.ExecuteStatementOutputTransformInput( + sdk_output=aws_cryptography_internal_dynamodb.smithygenerated.com_amazonaws_dynamodb.dafny_to_aws_sdk.com_amazonaws_dynamodb_ExecuteStatementOutput( + dafny_input.sdkOutput + ), + original_input=aws_cryptography_internal_dynamodb.smithygenerated.com_amazonaws_dynamodb.dafny_to_aws_sdk.com_amazonaws_dynamodb_ExecuteStatementInput( + dafny_input.originalInput + ), + ) + + +def aws_cryptography_dbencryptionsdk_dynamodb_transforms_BatchExecuteStatementInputTransformInput( + dafny_input, +): + return aws_dbesdk_dynamodb.smithygenerated.aws_cryptography_dbencryptionsdk_dynamodb_transforms.models.BatchExecuteStatementInputTransformInput( + sdk_input=aws_cryptography_internal_dynamodb.smithygenerated.com_amazonaws_dynamodb.dafny_to_aws_sdk.com_amazonaws_dynamodb_BatchExecuteStatementInput( + dafny_input.sdkInput + ), + ) + + +def aws_cryptography_dbencryptionsdk_dynamodb_transforms_BatchExecuteStatementOutputTransformInput( + dafny_input, +): + return aws_dbesdk_dynamodb.smithygenerated.aws_cryptography_dbencryptionsdk_dynamodb_transforms.models.BatchExecuteStatementOutputTransformInput( + sdk_output=aws_cryptography_internal_dynamodb.smithygenerated.com_amazonaws_dynamodb.dafny_to_aws_sdk.com_amazonaws_dynamodb_BatchExecuteStatementOutput( + dafny_input.sdkOutput + ), + original_input=aws_cryptography_internal_dynamodb.smithygenerated.com_amazonaws_dynamodb.dafny_to_aws_sdk.com_amazonaws_dynamodb_BatchExecuteStatementInput( + dafny_input.originalInput + ), + ) + + +def aws_cryptography_dbencryptionsdk_dynamodb_transforms_ExecuteTransactionInputTransformInput( + dafny_input, +): + return aws_dbesdk_dynamodb.smithygenerated.aws_cryptography_dbencryptionsdk_dynamodb_transforms.models.ExecuteTransactionInputTransformInput( + sdk_input=aws_cryptography_internal_dynamodb.smithygenerated.com_amazonaws_dynamodb.dafny_to_aws_sdk.com_amazonaws_dynamodb_ExecuteTransactionInput( + dafny_input.sdkInput + ), + ) + + +def aws_cryptography_dbencryptionsdk_dynamodb_transforms_ExecuteTransactionOutputTransformInput( + dafny_input, +): + return aws_dbesdk_dynamodb.smithygenerated.aws_cryptography_dbencryptionsdk_dynamodb_transforms.models.ExecuteTransactionOutputTransformInput( + sdk_output=aws_cryptography_internal_dynamodb.smithygenerated.com_amazonaws_dynamodb.dafny_to_aws_sdk.com_amazonaws_dynamodb_ExecuteTransactionOutput( + dafny_input.sdkOutput + ), + original_input=aws_cryptography_internal_dynamodb.smithygenerated.com_amazonaws_dynamodb.dafny_to_aws_sdk.com_amazonaws_dynamodb_ExecuteTransactionInput( + dafny_input.originalInput + ), + ) + + +def aws_cryptography_dbencryptionsdk_dynamodb_transforms_ResolveAttributesInput( + dafny_input, +): + return aws_dbesdk_dynamodb.smithygenerated.aws_cryptography_dbencryptionsdk_dynamodb_transforms.models.ResolveAttributesInput( + table_name=b"".join(ord(c).to_bytes(2, "big") for c in dafny_input.TableName).decode("utf-16-be"), + item={ + b"".join(ord(c).to_bytes(2, "big") for c in key).decode( + "utf-16-be" + ): aws_cryptography_internal_dynamodb.smithygenerated.com_amazonaws_dynamodb.dafny_to_aws_sdk.com_amazonaws_dynamodb_AttributeValue( + value + ) + for (key, value) in dafny_input.Item.items + }, + version=(dafny_input.Version.value) if (dafny_input.Version.is_Some) else None, + ) + + +def aws_cryptography_dbencryptionsdk_dynamodb_transforms_PutItemInputTransformOutput( + dafny_input, +): + return aws_dbesdk_dynamodb.smithygenerated.aws_cryptography_dbencryptionsdk_dynamodb_transforms.models.PutItemInputTransformOutput( + transformed_input=aws_cryptography_internal_dynamodb.smithygenerated.com_amazonaws_dynamodb.dafny_to_aws_sdk.com_amazonaws_dynamodb_PutItemInput( + dafny_input.transformedInput + ), + ) + + +def aws_cryptography_dbencryptionsdk_dynamodb_transforms_PutItemOutputTransformOutput( + dafny_input, +): + return aws_dbesdk_dynamodb.smithygenerated.aws_cryptography_dbencryptionsdk_dynamodb_transforms.models.PutItemOutputTransformOutput( + transformed_output=aws_cryptography_internal_dynamodb.smithygenerated.com_amazonaws_dynamodb.dafny_to_aws_sdk.com_amazonaws_dynamodb_PutItemOutput( + dafny_input.transformedOutput + ), + ) + + +def aws_cryptography_dbencryptionsdk_dynamodb_transforms_GetItemInputTransformOutput( + dafny_input, +): + return aws_dbesdk_dynamodb.smithygenerated.aws_cryptography_dbencryptionsdk_dynamodb_transforms.models.GetItemInputTransformOutput( + transformed_input=aws_cryptography_internal_dynamodb.smithygenerated.com_amazonaws_dynamodb.dafny_to_aws_sdk.com_amazonaws_dynamodb_GetItemInput( + dafny_input.transformedInput + ), + ) + + +def aws_cryptography_dbencryptionsdk_dynamodb_transforms_GetItemOutputTransformOutput( + dafny_input, +): + return aws_dbesdk_dynamodb.smithygenerated.aws_cryptography_dbencryptionsdk_dynamodb_transforms.models.GetItemOutputTransformOutput( + transformed_output=aws_cryptography_internal_dynamodb.smithygenerated.com_amazonaws_dynamodb.dafny_to_aws_sdk.com_amazonaws_dynamodb_GetItemOutput( + dafny_input.transformedOutput + ), + ) + + +def aws_cryptography_dbencryptionsdk_dynamodb_transforms_BatchWriteItemInputTransformOutput( + dafny_input, +): + return aws_dbesdk_dynamodb.smithygenerated.aws_cryptography_dbencryptionsdk_dynamodb_transforms.models.BatchWriteItemInputTransformOutput( + transformed_input=aws_cryptography_internal_dynamodb.smithygenerated.com_amazonaws_dynamodb.dafny_to_aws_sdk.com_amazonaws_dynamodb_BatchWriteItemInput( + dafny_input.transformedInput + ), + ) + + +def aws_cryptography_dbencryptionsdk_dynamodb_transforms_BatchWriteItemOutputTransformOutput( + dafny_input, +): + return aws_dbesdk_dynamodb.smithygenerated.aws_cryptography_dbencryptionsdk_dynamodb_transforms.models.BatchWriteItemOutputTransformOutput( + transformed_output=aws_cryptography_internal_dynamodb.smithygenerated.com_amazonaws_dynamodb.dafny_to_aws_sdk.com_amazonaws_dynamodb_BatchWriteItemOutput( + dafny_input.transformedOutput + ), + ) + + +def aws_cryptography_dbencryptionsdk_dynamodb_transforms_BatchGetItemInputTransformOutput( + dafny_input, +): + return aws_dbesdk_dynamodb.smithygenerated.aws_cryptography_dbencryptionsdk_dynamodb_transforms.models.BatchGetItemInputTransformOutput( + transformed_input=aws_cryptography_internal_dynamodb.smithygenerated.com_amazonaws_dynamodb.dafny_to_aws_sdk.com_amazonaws_dynamodb_BatchGetItemInput( + dafny_input.transformedInput + ), + ) + + +def aws_cryptography_dbencryptionsdk_dynamodb_transforms_BatchGetItemOutputTransformOutput( + dafny_input, +): + return aws_dbesdk_dynamodb.smithygenerated.aws_cryptography_dbencryptionsdk_dynamodb_transforms.models.BatchGetItemOutputTransformOutput( + transformed_output=aws_cryptography_internal_dynamodb.smithygenerated.com_amazonaws_dynamodb.dafny_to_aws_sdk.com_amazonaws_dynamodb_BatchGetItemOutput( + dafny_input.transformedOutput + ), + ) + + +def aws_cryptography_dbencryptionsdk_dynamodb_transforms_ScanInputTransformOutput( + dafny_input, +): + return aws_dbesdk_dynamodb.smithygenerated.aws_cryptography_dbencryptionsdk_dynamodb_transforms.models.ScanInputTransformOutput( + transformed_input=aws_cryptography_internal_dynamodb.smithygenerated.com_amazonaws_dynamodb.dafny_to_aws_sdk.com_amazonaws_dynamodb_ScanInput( + dafny_input.transformedInput + ), + ) + + +def aws_cryptography_dbencryptionsdk_dynamodb_transforms_ScanOutputTransformOutput( + dafny_input, +): + return aws_dbesdk_dynamodb.smithygenerated.aws_cryptography_dbencryptionsdk_dynamodb_transforms.models.ScanOutputTransformOutput( + transformed_output=aws_cryptography_internal_dynamodb.smithygenerated.com_amazonaws_dynamodb.dafny_to_aws_sdk.com_amazonaws_dynamodb_ScanOutput( + dafny_input.transformedOutput + ), + ) + + +def aws_cryptography_dbencryptionsdk_dynamodb_transforms_QueryInputTransformOutput( + dafny_input, +): + return aws_dbesdk_dynamodb.smithygenerated.aws_cryptography_dbencryptionsdk_dynamodb_transforms.models.QueryInputTransformOutput( + transformed_input=aws_cryptography_internal_dynamodb.smithygenerated.com_amazonaws_dynamodb.dafny_to_aws_sdk.com_amazonaws_dynamodb_QueryInput( + dafny_input.transformedInput + ), + ) + + +def aws_cryptography_dbencryptionsdk_dynamodb_transforms_QueryOutputTransformOutput( + dafny_input, +): + return aws_dbesdk_dynamodb.smithygenerated.aws_cryptography_dbencryptionsdk_dynamodb_transforms.models.QueryOutputTransformOutput( + transformed_output=aws_cryptography_internal_dynamodb.smithygenerated.com_amazonaws_dynamodb.dafny_to_aws_sdk.com_amazonaws_dynamodb_QueryOutput( + dafny_input.transformedOutput + ), + ) + + +def aws_cryptography_dbencryptionsdk_dynamodb_transforms_TransactWriteItemsInputTransformOutput( + dafny_input, +): + return aws_dbesdk_dynamodb.smithygenerated.aws_cryptography_dbencryptionsdk_dynamodb_transforms.models.TransactWriteItemsInputTransformOutput( + transformed_input=aws_cryptography_internal_dynamodb.smithygenerated.com_amazonaws_dynamodb.dafny_to_aws_sdk.com_amazonaws_dynamodb_TransactWriteItemsInput( + dafny_input.transformedInput + ), + ) + + +def aws_cryptography_dbencryptionsdk_dynamodb_transforms_TransactWriteItemsOutputTransformOutput( + dafny_input, +): + return aws_dbesdk_dynamodb.smithygenerated.aws_cryptography_dbencryptionsdk_dynamodb_transforms.models.TransactWriteItemsOutputTransformOutput( + transformed_output=aws_cryptography_internal_dynamodb.smithygenerated.com_amazonaws_dynamodb.dafny_to_aws_sdk.com_amazonaws_dynamodb_TransactWriteItemsOutput( + dafny_input.transformedOutput + ), + ) + + +def aws_cryptography_dbencryptionsdk_dynamodb_transforms_UpdateItemInputTransformOutput( + dafny_input, +): + return aws_dbesdk_dynamodb.smithygenerated.aws_cryptography_dbencryptionsdk_dynamodb_transforms.models.UpdateItemInputTransformOutput( + transformed_input=aws_cryptography_internal_dynamodb.smithygenerated.com_amazonaws_dynamodb.dafny_to_aws_sdk.com_amazonaws_dynamodb_UpdateItemInput( + dafny_input.transformedInput + ), + ) + + +def aws_cryptography_dbencryptionsdk_dynamodb_transforms_UpdateItemOutputTransformOutput( + dafny_input, +): + return aws_dbesdk_dynamodb.smithygenerated.aws_cryptography_dbencryptionsdk_dynamodb_transforms.models.UpdateItemOutputTransformOutput( + transformed_output=aws_cryptography_internal_dynamodb.smithygenerated.com_amazonaws_dynamodb.dafny_to_aws_sdk.com_amazonaws_dynamodb_UpdateItemOutput( + dafny_input.transformedOutput + ), + ) + + +def aws_cryptography_dbencryptionsdk_dynamodb_transforms_DeleteItemInputTransformOutput( + dafny_input, +): + return aws_dbesdk_dynamodb.smithygenerated.aws_cryptography_dbencryptionsdk_dynamodb_transforms.models.DeleteItemInputTransformOutput( + transformed_input=aws_cryptography_internal_dynamodb.smithygenerated.com_amazonaws_dynamodb.dafny_to_aws_sdk.com_amazonaws_dynamodb_DeleteItemInput( + dafny_input.transformedInput + ), + ) + + +def aws_cryptography_dbencryptionsdk_dynamodb_transforms_DeleteItemOutputTransformOutput( + dafny_input, +): + return aws_dbesdk_dynamodb.smithygenerated.aws_cryptography_dbencryptionsdk_dynamodb_transforms.models.DeleteItemOutputTransformOutput( + transformed_output=aws_cryptography_internal_dynamodb.smithygenerated.com_amazonaws_dynamodb.dafny_to_aws_sdk.com_amazonaws_dynamodb_DeleteItemOutput( + dafny_input.transformedOutput + ), + ) + + +def aws_cryptography_dbencryptionsdk_dynamodb_transforms_TransactGetItemsInputTransformOutput( + dafny_input, +): + return aws_dbesdk_dynamodb.smithygenerated.aws_cryptography_dbencryptionsdk_dynamodb_transforms.models.TransactGetItemsInputTransformOutput( + transformed_input=aws_cryptography_internal_dynamodb.smithygenerated.com_amazonaws_dynamodb.dafny_to_aws_sdk.com_amazonaws_dynamodb_TransactGetItemsInput( + dafny_input.transformedInput + ), + ) + + +def aws_cryptography_dbencryptionsdk_dynamodb_transforms_TransactGetItemsOutputTransformOutput( + dafny_input, +): + return aws_dbesdk_dynamodb.smithygenerated.aws_cryptography_dbencryptionsdk_dynamodb_transforms.models.TransactGetItemsOutputTransformOutput( + transformed_output=aws_cryptography_internal_dynamodb.smithygenerated.com_amazonaws_dynamodb.dafny_to_aws_sdk.com_amazonaws_dynamodb_TransactGetItemsOutput( + dafny_input.transformedOutput + ), + ) + + +def aws_cryptography_dbencryptionsdk_dynamodb_transforms_ExecuteStatementInputTransformOutput( + dafny_input, +): + return aws_dbesdk_dynamodb.smithygenerated.aws_cryptography_dbencryptionsdk_dynamodb_transforms.models.ExecuteStatementInputTransformOutput( + transformed_input=aws_cryptography_internal_dynamodb.smithygenerated.com_amazonaws_dynamodb.dafny_to_aws_sdk.com_amazonaws_dynamodb_ExecuteStatementInput( + dafny_input.transformedInput + ), + ) + + +def aws_cryptography_dbencryptionsdk_dynamodb_transforms_ExecuteStatementOutputTransformOutput( + dafny_input, +): + return aws_dbesdk_dynamodb.smithygenerated.aws_cryptography_dbencryptionsdk_dynamodb_transforms.models.ExecuteStatementOutputTransformOutput( + transformed_output=aws_cryptography_internal_dynamodb.smithygenerated.com_amazonaws_dynamodb.dafny_to_aws_sdk.com_amazonaws_dynamodb_ExecuteStatementOutput( + dafny_input.transformedOutput + ), + ) + + +def aws_cryptography_dbencryptionsdk_dynamodb_transforms_BatchExecuteStatementInputTransformOutput( + dafny_input, +): + return aws_dbesdk_dynamodb.smithygenerated.aws_cryptography_dbencryptionsdk_dynamodb_transforms.models.BatchExecuteStatementInputTransformOutput( + transformed_input=aws_cryptography_internal_dynamodb.smithygenerated.com_amazonaws_dynamodb.dafny_to_aws_sdk.com_amazonaws_dynamodb_BatchExecuteStatementInput( + dafny_input.transformedInput + ), + ) + + +def aws_cryptography_dbencryptionsdk_dynamodb_transforms_BatchExecuteStatementOutputTransformOutput( + dafny_input, +): + return aws_dbesdk_dynamodb.smithygenerated.aws_cryptography_dbencryptionsdk_dynamodb_transforms.models.BatchExecuteStatementOutputTransformOutput( + transformed_output=aws_cryptography_internal_dynamodb.smithygenerated.com_amazonaws_dynamodb.dafny_to_aws_sdk.com_amazonaws_dynamodb_BatchExecuteStatementOutput( + dafny_input.transformedOutput + ), + ) + + +def aws_cryptography_dbencryptionsdk_dynamodb_transforms_ExecuteTransactionInputTransformOutput( + dafny_input, +): + return aws_dbesdk_dynamodb.smithygenerated.aws_cryptography_dbencryptionsdk_dynamodb_transforms.models.ExecuteTransactionInputTransformOutput( + transformed_input=aws_cryptography_internal_dynamodb.smithygenerated.com_amazonaws_dynamodb.dafny_to_aws_sdk.com_amazonaws_dynamodb_ExecuteTransactionInput( + dafny_input.transformedInput + ), + ) + + +def aws_cryptography_dbencryptionsdk_dynamodb_transforms_ExecuteTransactionOutputTransformOutput( + dafny_input, +): + return aws_dbesdk_dynamodb.smithygenerated.aws_cryptography_dbencryptionsdk_dynamodb_transforms.models.ExecuteTransactionOutputTransformOutput( + transformed_output=aws_cryptography_internal_dynamodb.smithygenerated.com_amazonaws_dynamodb.dafny_to_aws_sdk.com_amazonaws_dynamodb_ExecuteTransactionOutput( + dafny_input.transformedOutput + ), + ) + + +def aws_cryptography_dbencryptionsdk_dynamodb_transforms_ResolveAttributesOutput( + dafny_input, +): + return aws_dbesdk_dynamodb.smithygenerated.aws_cryptography_dbencryptionsdk_dynamodb_transforms.models.ResolveAttributesOutput( + virtual_fields={ + b"".join(ord(c).to_bytes(2, "big") for c in key) + .decode("utf-16-be"): b"".join(ord(c).to_bytes(2, "big") for c in value) + .decode("utf-16-be") + for (key, value) in dafny_input.VirtualFields.items + }, + compound_beacons={ + b"".join(ord(c).to_bytes(2, "big") for c in key) + .decode("utf-16-be"): b"".join(ord(c).to_bytes(2, "big") for c in value) + .decode("utf-16-be") + for (key, value) in dafny_input.CompoundBeacons.items + }, + ) + + +def aws_cryptography_dbencryptionsdk_dynamodb_transforms_AwsCryptographicMaterialProvidersReference( + dafny_input, +): + from aws_cryptographic_material_providers.smithygenerated.aws_cryptography_materialproviders.client import ( + AwsCryptographicMaterialProviders, + ) + + return AwsCryptographicMaterialProviders(config=None, dafny_client=dafny_input) + + +def aws_cryptography_dbencryptionsdk_dynamodb_transforms_DynamoDbEncryptionReference( + dafny_input, +): + from aws_dbesdk_dynamodb.smithygenerated.aws_cryptography_dbencryptionsdk_dynamodb.client import ( + DynamoDbEncryption, + ) + + return DynamoDbEncryption(config=None, dafny_client=dafny_input) + + +def aws_cryptography_dbencryptionsdk_dynamodb_transforms_DynamoDbItemEncryptorReference( + dafny_input, +): + from aws_dbesdk_dynamodb.smithygenerated.aws_cryptography_dbencryptionsdk_dynamodb_itemencryptor.client import ( + DynamoDbItemEncryptor, + ) + + return DynamoDbItemEncryptor(config=None, dafny_client=dafny_input) + + +def aws_cryptography_dbencryptionsdk_dynamodb_transforms_StructuredEncryptionReference( + dafny_input, +): + from aws_dbesdk_dynamodb.smithygenerated.aws_cryptography_dbencryptionsdk_structuredencryption.client import ( + StructuredEncryption, + ) + + return StructuredEncryption(config=None, dafny_client=dafny_input) diff --git a/DynamoDbEncryption/runtimes/python/src/aws_dbesdk_dynamodb/smithygenerated/aws_cryptography_dbencryptionsdk_dynamodb_transforms/deserialize.py b/DynamoDbEncryption/runtimes/python/src/aws_dbesdk_dynamodb/smithygenerated/aws_cryptography_dbencryptionsdk_dynamodb_transforms/deserialize.py new file mode 100644 index 000000000..59bc7ba5d --- /dev/null +++ b/DynamoDbEncryption/runtimes/python/src/aws_dbesdk_dynamodb/smithygenerated/aws_cryptography_dbencryptionsdk_dynamodb_transforms/deserialize.py @@ -0,0 +1,350 @@ +# Copyright Amazon.com Inc. or its affiliates. All Rights Reserved. +# SPDX-License-Identifier: Apache-2.0 +# Do not modify this file. This file is machine generated, and any changes to it will be overwritten. + +import _dafny +from aws_dbesdk_dynamodb.internaldafny.generated.AwsCryptographyDbEncryptionSdkDynamoDbTransformsTypes import ( + BatchExecuteStatementInputTransformOutput_BatchExecuteStatementInputTransformOutput as DafnyBatchExecuteStatementInputTransformOutput, + BatchExecuteStatementOutputTransformOutput_BatchExecuteStatementOutputTransformOutput as DafnyBatchExecuteStatementOutputTransformOutput, + BatchGetItemInputTransformOutput_BatchGetItemInputTransformOutput as DafnyBatchGetItemInputTransformOutput, + BatchGetItemOutputTransformOutput_BatchGetItemOutputTransformOutput as DafnyBatchGetItemOutputTransformOutput, + BatchWriteItemInputTransformOutput_BatchWriteItemInputTransformOutput as DafnyBatchWriteItemInputTransformOutput, + BatchWriteItemOutputTransformOutput_BatchWriteItemOutputTransformOutput as DafnyBatchWriteItemOutputTransformOutput, + DeleteItemInputTransformOutput_DeleteItemInputTransformOutput as DafnyDeleteItemInputTransformOutput, + DeleteItemOutputTransformOutput_DeleteItemOutputTransformOutput as DafnyDeleteItemOutputTransformOutput, + Error, + Error_DynamoDbEncryptionTransformsException, + ExecuteStatementInputTransformOutput_ExecuteStatementInputTransformOutput as DafnyExecuteStatementInputTransformOutput, + ExecuteStatementOutputTransformOutput_ExecuteStatementOutputTransformOutput as DafnyExecuteStatementOutputTransformOutput, + ExecuteTransactionInputTransformOutput_ExecuteTransactionInputTransformOutput as DafnyExecuteTransactionInputTransformOutput, + ExecuteTransactionOutputTransformOutput_ExecuteTransactionOutputTransformOutput as DafnyExecuteTransactionOutputTransformOutput, + GetItemInputTransformOutput_GetItemInputTransformOutput as DafnyGetItemInputTransformOutput, + GetItemOutputTransformOutput_GetItemOutputTransformOutput as DafnyGetItemOutputTransformOutput, + PutItemInputTransformOutput_PutItemInputTransformOutput as DafnyPutItemInputTransformOutput, + PutItemOutputTransformOutput_PutItemOutputTransformOutput as DafnyPutItemOutputTransformOutput, + QueryInputTransformOutput_QueryInputTransformOutput as DafnyQueryInputTransformOutput, + QueryOutputTransformOutput_QueryOutputTransformOutput as DafnyQueryOutputTransformOutput, + ResolveAttributesOutput_ResolveAttributesOutput as DafnyResolveAttributesOutput, + ScanInputTransformOutput_ScanInputTransformOutput as DafnyScanInputTransformOutput, + ScanOutputTransformOutput_ScanOutputTransformOutput as DafnyScanOutputTransformOutput, + TransactGetItemsInputTransformOutput_TransactGetItemsInputTransformOutput as DafnyTransactGetItemsInputTransformOutput, + TransactGetItemsOutputTransformOutput_TransactGetItemsOutputTransformOutput as DafnyTransactGetItemsOutputTransformOutput, + TransactWriteItemsInputTransformOutput_TransactWriteItemsInputTransformOutput as DafnyTransactWriteItemsInputTransformOutput, + TransactWriteItemsOutputTransformOutput_TransactWriteItemsOutputTransformOutput as DafnyTransactWriteItemsOutputTransformOutput, + UpdateItemInputTransformOutput_UpdateItemInputTransformOutput as DafnyUpdateItemInputTransformOutput, + UpdateItemOutputTransformOutput_UpdateItemOutputTransformOutput as DafnyUpdateItemOutputTransformOutput, +) +import aws_dbesdk_dynamodb.internaldafny.generated.module_ +import aws_dbesdk_dynamodb.smithygenerated.aws_cryptography_dbencryptionsdk_dynamodb_transforms.dafny_to_smithy +from typing import Any + +from .dafny_protocol import DafnyResponse +from .errors import ( + AwsCryptographicMaterialProviders, + CollectionOfErrors, + ComAmazonawsDynamodb, + DynamoDbEncryption, + DynamoDbEncryptionTransformsException, + DynamoDbItemEncryptor, + OpaqueError, + ServiceError, + StructuredEncryption, +) +from aws_cryptographic_material_providers.smithygenerated.aws_cryptography_materialproviders.deserialize import ( + _deserialize_error as aws_cryptography_materialproviders_deserialize_error, +) +from aws_cryptography_internal_dynamodb.smithygenerated.com_amazonaws_dynamodb.shim import ( + _sdk_error_to_dafny_error as com_amazonaws_dynamodb_sdk_error_to_dafny_error, +) + +from ..aws_cryptography_dbencryptionsdk_dynamodb.deserialize import ( + _deserialize_error as aws_cryptography_dbencryptionsdk_dynamodb_deserialize_error, +) +from ..aws_cryptography_dbencryptionsdk_dynamodb_itemencryptor.deserialize import ( + _deserialize_error as aws_cryptography_dbencryptionsdk_dynamodb_itemencryptor_deserialize_error, +) +from ..aws_cryptography_dbencryptionsdk_structuredencryption.deserialize import ( + _deserialize_error as aws_cryptography_dbencryptionsdk_structuredencryption_deserialize_error, +) +from .config import Config + + +def _deserialize_put_item_input_transform(input: DafnyResponse, config: Config): + + if input.IsFailure(): + return _deserialize_error(input.error) + return aws_dbesdk_dynamodb.smithygenerated.aws_cryptography_dbencryptionsdk_dynamodb_transforms.dafny_to_smithy.aws_cryptography_dbencryptionsdk_dynamodb_transforms_PutItemInputTransformOutput( + input.value + ) + + +def _deserialize_put_item_output_transform(input: DafnyResponse, config: Config): + + if input.IsFailure(): + return _deserialize_error(input.error) + return aws_dbesdk_dynamodb.smithygenerated.aws_cryptography_dbencryptionsdk_dynamodb_transforms.dafny_to_smithy.aws_cryptography_dbencryptionsdk_dynamodb_transforms_PutItemOutputTransformOutput( + input.value + ) + + +def _deserialize_get_item_input_transform(input: DafnyResponse, config: Config): + + if input.IsFailure(): + return _deserialize_error(input.error) + return aws_dbesdk_dynamodb.smithygenerated.aws_cryptography_dbencryptionsdk_dynamodb_transforms.dafny_to_smithy.aws_cryptography_dbencryptionsdk_dynamodb_transforms_GetItemInputTransformOutput( + input.value + ) + + +def _deserialize_get_item_output_transform(input: DafnyResponse, config: Config): + + if input.IsFailure(): + return _deserialize_error(input.error) + return aws_dbesdk_dynamodb.smithygenerated.aws_cryptography_dbencryptionsdk_dynamodb_transforms.dafny_to_smithy.aws_cryptography_dbencryptionsdk_dynamodb_transforms_GetItemOutputTransformOutput( + input.value + ) + + +def _deserialize_batch_write_item_input_transform(input: DafnyResponse, config: Config): + + if input.IsFailure(): + return _deserialize_error(input.error) + return aws_dbesdk_dynamodb.smithygenerated.aws_cryptography_dbencryptionsdk_dynamodb_transforms.dafny_to_smithy.aws_cryptography_dbencryptionsdk_dynamodb_transforms_BatchWriteItemInputTransformOutput( + input.value + ) + + +def _deserialize_batch_write_item_output_transform(input: DafnyResponse, config: Config): + + if input.IsFailure(): + return _deserialize_error(input.error) + return aws_dbesdk_dynamodb.smithygenerated.aws_cryptography_dbencryptionsdk_dynamodb_transforms.dafny_to_smithy.aws_cryptography_dbencryptionsdk_dynamodb_transforms_BatchWriteItemOutputTransformOutput( + input.value + ) + + +def _deserialize_batch_get_item_input_transform(input: DafnyResponse, config: Config): + + if input.IsFailure(): + return _deserialize_error(input.error) + return aws_dbesdk_dynamodb.smithygenerated.aws_cryptography_dbencryptionsdk_dynamodb_transforms.dafny_to_smithy.aws_cryptography_dbencryptionsdk_dynamodb_transforms_BatchGetItemInputTransformOutput( + input.value + ) + + +def _deserialize_batch_get_item_output_transform(input: DafnyResponse, config: Config): + + if input.IsFailure(): + return _deserialize_error(input.error) + return aws_dbesdk_dynamodb.smithygenerated.aws_cryptography_dbencryptionsdk_dynamodb_transforms.dafny_to_smithy.aws_cryptography_dbencryptionsdk_dynamodb_transforms_BatchGetItemOutputTransformOutput( + input.value + ) + + +def _deserialize_scan_input_transform(input: DafnyResponse, config: Config): + + if input.IsFailure(): + return _deserialize_error(input.error) + return aws_dbesdk_dynamodb.smithygenerated.aws_cryptography_dbencryptionsdk_dynamodb_transforms.dafny_to_smithy.aws_cryptography_dbencryptionsdk_dynamodb_transforms_ScanInputTransformOutput( + input.value + ) + + +def _deserialize_scan_output_transform(input: DafnyResponse, config: Config): + + if input.IsFailure(): + return _deserialize_error(input.error) + return aws_dbesdk_dynamodb.smithygenerated.aws_cryptography_dbencryptionsdk_dynamodb_transforms.dafny_to_smithy.aws_cryptography_dbencryptionsdk_dynamodb_transforms_ScanOutputTransformOutput( + input.value + ) + + +def _deserialize_query_input_transform(input: DafnyResponse, config: Config): + + if input.IsFailure(): + return _deserialize_error(input.error) + return aws_dbesdk_dynamodb.smithygenerated.aws_cryptography_dbencryptionsdk_dynamodb_transforms.dafny_to_smithy.aws_cryptography_dbencryptionsdk_dynamodb_transforms_QueryInputTransformOutput( + input.value + ) + + +def _deserialize_query_output_transform(input: DafnyResponse, config: Config): + + if input.IsFailure(): + return _deserialize_error(input.error) + return aws_dbesdk_dynamodb.smithygenerated.aws_cryptography_dbencryptionsdk_dynamodb_transforms.dafny_to_smithy.aws_cryptography_dbencryptionsdk_dynamodb_transforms_QueryOutputTransformOutput( + input.value + ) + + +def _deserialize_transact_write_items_input_transform(input: DafnyResponse, config: Config): + + if input.IsFailure(): + return _deserialize_error(input.error) + return aws_dbesdk_dynamodb.smithygenerated.aws_cryptography_dbencryptionsdk_dynamodb_transforms.dafny_to_smithy.aws_cryptography_dbencryptionsdk_dynamodb_transforms_TransactWriteItemsInputTransformOutput( + input.value + ) + + +def _deserialize_transact_write_items_output_transform(input: DafnyResponse, config: Config): + + if input.IsFailure(): + return _deserialize_error(input.error) + return aws_dbesdk_dynamodb.smithygenerated.aws_cryptography_dbencryptionsdk_dynamodb_transforms.dafny_to_smithy.aws_cryptography_dbencryptionsdk_dynamodb_transforms_TransactWriteItemsOutputTransformOutput( + input.value + ) + + +def _deserialize_update_item_input_transform(input: DafnyResponse, config: Config): + + if input.IsFailure(): + return _deserialize_error(input.error) + return aws_dbesdk_dynamodb.smithygenerated.aws_cryptography_dbencryptionsdk_dynamodb_transforms.dafny_to_smithy.aws_cryptography_dbencryptionsdk_dynamodb_transforms_UpdateItemInputTransformOutput( + input.value + ) + + +def _deserialize_update_item_output_transform(input: DafnyResponse, config: Config): + + if input.IsFailure(): + return _deserialize_error(input.error) + return aws_dbesdk_dynamodb.smithygenerated.aws_cryptography_dbencryptionsdk_dynamodb_transforms.dafny_to_smithy.aws_cryptography_dbencryptionsdk_dynamodb_transforms_UpdateItemOutputTransformOutput( + input.value + ) + + +def _deserialize_delete_item_input_transform(input: DafnyResponse, config: Config): + + if input.IsFailure(): + return _deserialize_error(input.error) + return aws_dbesdk_dynamodb.smithygenerated.aws_cryptography_dbencryptionsdk_dynamodb_transforms.dafny_to_smithy.aws_cryptography_dbencryptionsdk_dynamodb_transforms_DeleteItemInputTransformOutput( + input.value + ) + + +def _deserialize_delete_item_output_transform(input: DafnyResponse, config: Config): + + if input.IsFailure(): + return _deserialize_error(input.error) + return aws_dbesdk_dynamodb.smithygenerated.aws_cryptography_dbencryptionsdk_dynamodb_transforms.dafny_to_smithy.aws_cryptography_dbencryptionsdk_dynamodb_transforms_DeleteItemOutputTransformOutput( + input.value + ) + + +def _deserialize_transact_get_items_input_transform(input: DafnyResponse, config: Config): + + if input.IsFailure(): + return _deserialize_error(input.error) + return aws_dbesdk_dynamodb.smithygenerated.aws_cryptography_dbencryptionsdk_dynamodb_transforms.dafny_to_smithy.aws_cryptography_dbencryptionsdk_dynamodb_transforms_TransactGetItemsInputTransformOutput( + input.value + ) + + +def _deserialize_transact_get_items_output_transform(input: DafnyResponse, config: Config): + + if input.IsFailure(): + return _deserialize_error(input.error) + return aws_dbesdk_dynamodb.smithygenerated.aws_cryptography_dbencryptionsdk_dynamodb_transforms.dafny_to_smithy.aws_cryptography_dbencryptionsdk_dynamodb_transforms_TransactGetItemsOutputTransformOutput( + input.value + ) + + +def _deserialize_execute_statement_input_transform(input: DafnyResponse, config: Config): + + if input.IsFailure(): + return _deserialize_error(input.error) + return aws_dbesdk_dynamodb.smithygenerated.aws_cryptography_dbencryptionsdk_dynamodb_transforms.dafny_to_smithy.aws_cryptography_dbencryptionsdk_dynamodb_transforms_ExecuteStatementInputTransformOutput( + input.value + ) + + +def _deserialize_execute_statement_output_transform(input: DafnyResponse, config: Config): + + if input.IsFailure(): + return _deserialize_error(input.error) + return aws_dbesdk_dynamodb.smithygenerated.aws_cryptography_dbencryptionsdk_dynamodb_transforms.dafny_to_smithy.aws_cryptography_dbencryptionsdk_dynamodb_transforms_ExecuteStatementOutputTransformOutput( + input.value + ) + + +def _deserialize_batch_execute_statement_input_transform(input: DafnyResponse, config: Config): + + if input.IsFailure(): + return _deserialize_error(input.error) + return aws_dbesdk_dynamodb.smithygenerated.aws_cryptography_dbencryptionsdk_dynamodb_transforms.dafny_to_smithy.aws_cryptography_dbencryptionsdk_dynamodb_transforms_BatchExecuteStatementInputTransformOutput( + input.value + ) + + +def _deserialize_batch_execute_statement_output_transform(input: DafnyResponse, config: Config): + + if input.IsFailure(): + return _deserialize_error(input.error) + return aws_dbesdk_dynamodb.smithygenerated.aws_cryptography_dbencryptionsdk_dynamodb_transforms.dafny_to_smithy.aws_cryptography_dbencryptionsdk_dynamodb_transforms_BatchExecuteStatementOutputTransformOutput( + input.value + ) + + +def _deserialize_execute_transaction_input_transform(input: DafnyResponse, config: Config): + + if input.IsFailure(): + return _deserialize_error(input.error) + return aws_dbesdk_dynamodb.smithygenerated.aws_cryptography_dbencryptionsdk_dynamodb_transforms.dafny_to_smithy.aws_cryptography_dbencryptionsdk_dynamodb_transforms_ExecuteTransactionInputTransformOutput( + input.value + ) + + +def _deserialize_execute_transaction_output_transform(input: DafnyResponse, config: Config): + + if input.IsFailure(): + return _deserialize_error(input.error) + return aws_dbesdk_dynamodb.smithygenerated.aws_cryptography_dbencryptionsdk_dynamodb_transforms.dafny_to_smithy.aws_cryptography_dbencryptionsdk_dynamodb_transforms_ExecuteTransactionOutputTransformOutput( + input.value + ) + + +def _deserialize_resolve_attributes(input: DafnyResponse, config: Config): + + if input.IsFailure(): + return _deserialize_error(input.error) + return aws_dbesdk_dynamodb.smithygenerated.aws_cryptography_dbencryptionsdk_dynamodb_transforms.dafny_to_smithy.aws_cryptography_dbencryptionsdk_dynamodb_transforms_ResolveAttributesOutput( + input.value + ) + + +def _deserialize_error(error: Error) -> ServiceError: + if error.is_Opaque: + return OpaqueError(obj=error.obj) + elif error.is_OpaqueWithText: + return OpaqueErrorWithText(obj=error.obj, obj_message=error.objMessage) + elif error.is_CollectionOfErrors: + return CollectionOfErrors( + message=_dafny.string_of(error.message), + list=[_deserialize_error(dafny_e) for dafny_e in error.list], + ) + elif error.is_DynamoDbEncryptionTransformsException: + return DynamoDbEncryptionTransformsException(message=_dafny.string_of(error.message)) + elif error.is_AwsCryptographyDbEncryptionSdkDynamoDbItemEncryptor: + return DynamoDbItemEncryptor( + aws_cryptography_dbencryptionsdk_dynamodb_itemencryptor_deserialize_error( + error.AwsCryptographyDbEncryptionSdkDynamoDbItemEncryptor + ) + ) + elif error.is_AwsCryptographyDbEncryptionSdkStructuredEncryption: + return StructuredEncryption( + aws_cryptography_dbencryptionsdk_structuredencryption_deserialize_error( + error.AwsCryptographyDbEncryptionSdkStructuredEncryption + ) + ) + elif error.is_AwsCryptographyDbEncryptionSdkDynamoDb: + return DynamoDbEncryption( + aws_cryptography_dbencryptionsdk_dynamodb_deserialize_error(error.AwsCryptographyDbEncryptionSdkDynamoDb) + ) + elif error.is_AwsCryptographyMaterialProviders: + return AwsCryptographicMaterialProviders( + aws_cryptography_materialproviders_deserialize_error(error.AwsCryptographyMaterialProviders) + ) + elif error.is_ComAmazonawsDynamodb: + return ComAmazonawsDynamodb(message=_dafny.string_of(error.ComAmazonawsDynamodb.message)) + else: + return OpaqueError(obj=error) diff --git a/DynamoDbEncryption/runtimes/python/src/aws_dbesdk_dynamodb/smithygenerated/aws_cryptography_dbencryptionsdk_dynamodb_transforms/errors.py b/DynamoDbEncryption/runtimes/python/src/aws_dbesdk_dynamodb/smithygenerated/aws_cryptography_dbencryptionsdk_dynamodb_transforms/errors.py new file mode 100644 index 000000000..bfb2b7726 --- /dev/null +++ b/DynamoDbEncryption/runtimes/python/src/aws_dbesdk_dynamodb/smithygenerated/aws_cryptography_dbencryptionsdk_dynamodb_transforms/errors.py @@ -0,0 +1,337 @@ +# Copyright Amazon.com Inc. or its affiliates. All Rights Reserved. +# SPDX-License-Identifier: Apache-2.0 +# Do not modify this file. This file is machine generated, and any changes to it will be overwritten. + +import _dafny +from aws_cryptographic_material_providers.smithygenerated.aws_cryptography_materialproviders.errors import ( + _smithy_error_to_dafny_error as aws_cryptography_materialproviders_smithy_error_to_dafny_error, +) +from aws_cryptography_internal_dynamodb.smithygenerated.com_amazonaws_dynamodb.shim import ( + _sdk_error_to_dafny_error as com_amazonaws_dynamodb_sdk_error_to_dafny_error, +) +import aws_dbesdk_dynamodb.internaldafny.generated +import aws_dbesdk_dynamodb.internaldafny.generated.AwsCryptographyDbEncryptionSdkDynamoDbTransformsTypes +from aws_dbesdk_dynamodb.smithygenerated.aws_cryptography_dbencryptionsdk_dynamodb.errors import ( + _smithy_error_to_dafny_error as aws_cryptography_dbencryptionsdk_dynamodb_smithy_error_to_dafny_error, +) +from aws_dbesdk_dynamodb.smithygenerated.aws_cryptography_dbencryptionsdk_dynamodb_itemencryptor.errors import ( + _smithy_error_to_dafny_error as aws_cryptography_dbencryptionsdk_dynamodb_itemencryptor_smithy_error_to_dafny_error, +) +import aws_dbesdk_dynamodb.smithygenerated.aws_cryptography_dbencryptionsdk_dynamodb_transforms.errors +from aws_dbesdk_dynamodb.smithygenerated.aws_cryptography_dbencryptionsdk_structuredencryption.errors import ( + _smithy_error_to_dafny_error as aws_cryptography_dbencryptionsdk_structuredencryption_smithy_error_to_dafny_error, +) +from typing import Any, Dict, Generic, List, Literal, TypeVar + + +class ServiceError(Exception): + """Base error for all errors in the service.""" + + pass + + +T = TypeVar("T") + + +class ApiError(ServiceError, Generic[T]): + """Base error for all api errors in the service.""" + + code: T + + def __init__(self, message: str): + super().__init__(message) + self.message = message + + +class UnknownApiError(ApiError[Literal["Unknown"]]): + """Error representing any unknown api errors.""" + + code: Literal["Unknown"] = "Unknown" + + +class DynamoDbEncryptionTransformsException(ApiError[Literal["DynamoDbEncryptionTransformsException"]]): + code: Literal["DynamoDbEncryptionTransformsException"] = "DynamoDbEncryptionTransformsException" + message: str + + def __init__( + self, + *, + message: str, + ): + super().__init__(message) + + def as_dict(self) -> Dict[str, Any]: + """Converts the DynamoDbEncryptionTransformsException to a + dictionary.""" + return { + "message": self.message, + "code": self.code, + } + + @staticmethod + def from_dict(d: Dict[str, Any]) -> "DynamoDbEncryptionTransformsException": + """Creates a DynamoDbEncryptionTransformsException from a + dictionary.""" + kwargs: Dict[str, Any] = { + "message": d["message"], + } + + return DynamoDbEncryptionTransformsException(**kwargs) + + def __repr__(self) -> str: + result = "DynamoDbEncryptionTransformsException(" + if self.message is not None: + result += f"message={repr(self.message)}" + + return result + ")" + + def __eq__(self, other: Any) -> bool: + if not isinstance(other, DynamoDbEncryptionTransformsException): + return False + attributes: list[str] = [ + "message", + "message", + ] + return all(getattr(self, a) == getattr(other, a) for a in attributes) + + +class DynamoDbEncryptionTransformsException(ApiError[Literal["DynamoDbEncryptionTransformsException"]]): + code: Literal["DynamoDbEncryptionTransformsException"] = "DynamoDbEncryptionTransformsException" + message: str + + +class ComAmazonawsDynamodb(ApiError[Literal["ComAmazonawsDynamodb"]]): + ComAmazonawsDynamodb: Any + + +class DynamoDbEncryption(ApiError[Literal["DynamoDbEncryption"]]): + DynamoDbEncryption: Any + + +class DynamoDbItemEncryptor(ApiError[Literal["DynamoDbItemEncryptor"]]): + DynamoDbItemEncryptor: Any + + +class StructuredEncryption(ApiError[Literal["StructuredEncryption"]]): + StructuredEncryption: Any + + +class AwsCryptographicMaterialProviders(ApiError[Literal["AwsCryptographicMaterialProviders"]]): + AwsCryptographicMaterialProviders: Any + + +class CollectionOfErrors(ApiError[Literal["CollectionOfErrors"]]): + code: Literal["CollectionOfErrors"] = "CollectionOfErrors" + message: str + list: List[ServiceError] + + def __init__(self, *, message: str, list): + super().__init__(message) + self.list = list + + def as_dict(self) -> Dict[str, Any]: + """Converts the CollectionOfErrors to a dictionary. + + The dictionary uses the modeled shape names rather than the + parameter names as keys to be mostly compatible with boto3. + """ + return { + "message": self.message, + "code": self.code, + "list": self.list, + } + + @staticmethod + def from_dict(d: Dict[str, Any]) -> "CollectionOfErrors": + """Creates a CollectionOfErrors from a dictionary. + + The dictionary is expected to use the modeled shape names rather + than the parameter names as keys to be mostly compatible with + boto3. + """ + kwargs: Dict[str, Any] = {"message": d["message"], "list": d["list"]} + + return CollectionOfErrors(**kwargs) + + def __repr__(self) -> str: + result = "CollectionOfErrors(" + result += f"message={self.message}," + if self.message is not None: + result += f"message={repr(self.message)}" + result += f"list={self.list}" + result += ")" + return result + + def __eq__(self, other: Any) -> bool: + if not isinstance(other, CollectionOfErrors): + return False + if not (self.list == other.list): + return False + attributes: list[str] = ["message", "message"] + return all(getattr(self, a) == getattr(other, a) for a in attributes) + + +class OpaqueError(ApiError[Literal["OpaqueError"]]): + code: Literal["OpaqueError"] = "OpaqueError" + obj: Any # As an OpaqueError, type of obj is unknown + + def __init__(self, *, obj): + super().__init__("") + self.obj = obj + + def as_dict(self) -> Dict[str, Any]: + """Converts the OpaqueError to a dictionary. + + The dictionary uses the modeled shape names rather than the + parameter names as keys to be mostly compatible with boto3. + """ + return { + "message": self.message, + "code": self.code, + "obj": self.obj, + } + + @staticmethod + def from_dict(d: Dict[str, Any]) -> "OpaqueError": + """Creates a OpaqueError from a dictionary. + + The dictionary is expected to use the modeled shape names rather + than the parameter names as keys to be mostly compatible with + boto3. + """ + kwargs: Dict[str, Any] = {"message": d["message"], "obj": d["obj"]} + + return OpaqueError(**kwargs) + + def __repr__(self) -> str: + result = "OpaqueError(" + result += f"message={self.message}," + if self.message is not None: + result += f"message={repr(self.message)}" + result += f"obj={self.obj}" + result += ")" + return result + + def __eq__(self, other: Any) -> bool: + if not isinstance(other, OpaqueError): + return False + if not (self.obj == other.obj): + return False + attributes: list[str] = ["message", "message"] + return all(getattr(self, a) == getattr(other, a) for a in attributes) + + +class OpaqueWithTextError(ApiError[Literal["OpaqueWithTextError"]]): + code: Literal["OpaqueWithTextError"] = "OpaqueWithTextError" + obj: Any # As an OpaqueWithTextError, type of obj is unknown + obj_message: str # obj_message is a message representing the details of obj + + def __init__(self, *, obj, obj_message): + super().__init__("") + self.obj = obj + self.obj_message = obj_message + + def as_dict(self) -> Dict[str, Any]: + """Converts the OpaqueWithTextError to a dictionary. + + The dictionary uses the modeled shape names rather than the + parameter names as keys to be mostly compatible with boto3. + """ + return { + "message": self.message, + "code": self.code, + "obj": self.obj, + "obj_message": self.obj_message, + } + + @staticmethod + def from_dict(d: Dict[str, Any]) -> "OpaqueWithTextError": + """Creates a OpaqueWithTextError from a dictionary. + + The dictionary is expected to use the modeled shape names rather + than the parameter names as keys to be mostly compatible with + boto3. + """ + kwargs: Dict[str, Any] = { + "message": d["message"], + "obj": d["obj"], + "obj_message": d["obj_message"], + } + + return OpaqueWithTextError(**kwargs) + + def __repr__(self) -> str: + result = "OpaqueWithTextError(" + result += f"message={self.message}," + if self.message is not None: + result += f"message={repr(self.message)}" + result += f"obj={self.obj}" + result += f"obj_message={self.obj_message}" + result += ")" + return result + + def __eq__(self, other: Any) -> bool: + if not isinstance(other, OpaqueWithTextError): + return False + if not (self.obj == other.obj): + return False + attributes: list[str] = ["message", "message"] + return all(getattr(self, a) == getattr(other, a) for a in attributes) + + +def _smithy_error_to_dafny_error(e: ServiceError): + """Converts the provided native Smithy-modeled error into the corresponding + Dafny error.""" + if isinstance( + e, + aws_dbesdk_dynamodb.smithygenerated.aws_cryptography_dbencryptionsdk_dynamodb_transforms.errors.DynamoDbEncryptionTransformsException, + ): + return aws_dbesdk_dynamodb.internaldafny.generated.AwsCryptographyDbEncryptionSdkDynamoDbTransformsTypes.Error_DynamoDbEncryptionTransformsException( + message=_dafny.Seq(e.message) + ) + + if isinstance(e, ComAmazonawsDynamodb): + return aws_dbesdk_dynamodb.internaldafny.generated.AwsCryptographyDbEncryptionSdkDynamoDbTransformsTypes.Error_ComAmazonawsDynamodb( + com_amazonaws_dynamodb_sdk_error_to_dafny_error(e.message) + ) + + if isinstance(e, DynamoDbEncryption): + return aws_dbesdk_dynamodb.internaldafny.generated.AwsCryptographyDbEncryptionSdkDynamoDbTransformsTypes.Error_AwsCryptographyDbEncryptionSdkDynamoDb( + aws_cryptography_dbencryptionsdk_dynamodb_smithy_error_to_dafny_error(e.message) + ) + + if isinstance(e, DynamoDbItemEncryptor): + return aws_dbesdk_dynamodb.internaldafny.generated.AwsCryptographyDbEncryptionSdkDynamoDbTransformsTypes.Error_AwsCryptographyDbEncryptionSdkDynamoDbItemEncryptor( + aws_cryptography_dbencryptionsdk_dynamodb_itemencryptor_smithy_error_to_dafny_error(e.message) + ) + + if isinstance(e, StructuredEncryption): + return aws_dbesdk_dynamodb.internaldafny.generated.AwsCryptographyDbEncryptionSdkDynamoDbTransformsTypes.Error_AwsCryptographyDbEncryptionSdkStructuredEncryption( + aws_cryptography_dbencryptionsdk_structuredencryption_smithy_error_to_dafny_error(e.message) + ) + + if isinstance(e, AwsCryptographicMaterialProviders): + return aws_dbesdk_dynamodb.internaldafny.generated.AwsCryptographyDbEncryptionSdkDynamoDbTransformsTypes.Error_AwsCryptographyMaterialProviders( + aws_cryptography_materialproviders_smithy_error_to_dafny_error(e.message) + ) + + if isinstance(e, CollectionOfErrors): + return aws_dbesdk_dynamodb.internaldafny.generated.AwsCryptographyDbEncryptionSdkDynamoDbTransformsTypes.Error_CollectionOfErrors( + message=_dafny.Seq(e.message), + list=_dafny.Seq(_smithy_error_to_dafny_error(native_err) for native_err in e.list), + ) + + if isinstance(e, OpaqueError): + return aws_dbesdk_dynamodb.internaldafny.generated.AwsCryptographyDbEncryptionSdkDynamoDbTransformsTypes.Error_Opaque( + obj=e.obj + ) + + if isinstance(e, OpaqueWithTextError): + return aws_dbesdk_dynamodb.internaldafny.generated.AwsCryptographyDbEncryptionSdkDynamoDbTransformsTypes.Error_OpaqueWithText( + obj=e.obj, objMessage=e.obj_message + ) + + else: + return aws_dbesdk_dynamodb.internaldafny.generated.AwsCryptographyDbEncryptionSdkDynamoDbTransformsTypes.Error_Opaque( + obj=e + ) diff --git a/DynamoDbEncryption/runtimes/python/src/aws_dbesdk_dynamodb/smithygenerated/aws_cryptography_dbencryptionsdk_dynamodb_transforms/models.py b/DynamoDbEncryption/runtimes/python/src/aws_dbesdk_dynamodb/smithygenerated/aws_cryptography_dbencryptionsdk_dynamodb_transforms/models.py new file mode 100644 index 000000000..3e7f1b86b --- /dev/null +++ b/DynamoDbEncryption/runtimes/python/src/aws_dbesdk_dynamodb/smithygenerated/aws_cryptography_dbencryptionsdk_dynamodb_transforms/models.py @@ -0,0 +1,2565 @@ +# Copyright Amazon.com Inc. or its affiliates. All Rights Reserved. +# SPDX-License-Identifier: Apache-2.0 +# Do not modify this file. This file is machine generated, and any changes to it will be overwritten. + +from typing import Any, Dict + + +class ResolveAttributesOutput: + virtual_fields: dict[str, str] + compound_beacons: dict[str, str] + + def __init__( + self, + *, + virtual_fields: dict[str, str], + compound_beacons: dict[str, str], + ): + """ + :param virtual_fields: Full plaintext of all calculable virtual fields. + :param compound_beacons: Full plaintext of all calculable compound beacons. + """ + self.virtual_fields = virtual_fields + self.compound_beacons = compound_beacons + + def as_dict(self) -> Dict[str, Any]: + """Converts the ResolveAttributesOutput to a dictionary.""" + return { + "virtual_fields": self.virtual_fields, + "compound_beacons": self.compound_beacons, + } + + @staticmethod + def from_dict(d: Dict[str, Any]) -> "ResolveAttributesOutput": + """Creates a ResolveAttributesOutput from a dictionary.""" + kwargs: Dict[str, Any] = { + "virtual_fields": d["virtual_fields"], + "compound_beacons": d["compound_beacons"], + } + + return ResolveAttributesOutput(**kwargs) + + def __repr__(self) -> str: + result = "ResolveAttributesOutput(" + if self.virtual_fields is not None: + result += f"virtual_fields={repr(self.virtual_fields)}, " + + if self.compound_beacons is not None: + result += f"compound_beacons={repr(self.compound_beacons)}" + + return result + ")" + + def __eq__(self, other: Any) -> bool: + if not isinstance(other, ResolveAttributesOutput): + return False + attributes: list[str] = [ + "virtual_fields", + "compound_beacons", + ] + return all(getattr(self, a) == getattr(other, a) for a in attributes) + + +class ResolveAttributesInput: + table_name: str + item: "dict[str, dict[str, Any]]" + version: int + + def __init__( + self, + *, + table_name: str, + item: "dict[str, dict[str, Any]]", + version: int = 0, + ): + """ + :param table_name: Use the config for this Table. + :param item: The Item to be examined. + :param version: The beacon version to use. Defaults to 'writeVersion'. + """ + if (table_name is not None) and (len(table_name) < 3): + raise ValueError("The size of table_name must be greater than or equal to 3") + + if (table_name is not None) and (len(table_name) > 255): + raise ValueError("The size of table_name must be less than or equal to 255") + + self.table_name = table_name + self.item = item + if (version is not None) and (version < 1): + raise ValueError("version must be greater than or equal to 1") + + self.version = version + + def as_dict(self) -> Dict[str, Any]: + """Converts the ResolveAttributesInput to a dictionary.""" + d: Dict[str, Any] = { + "table_name": self.table_name, + "item": self.item, + } + + if self.version is not None: + d["version"] = self.version + + return d + + @staticmethod + def from_dict(d: Dict[str, Any]) -> "ResolveAttributesInput": + """Creates a ResolveAttributesInput from a dictionary.""" + kwargs: Dict[str, Any] = { + "table_name": d["table_name"], + "item": d["item"], + } + + if "version" in d: + kwargs["version"] = d["version"] + + return ResolveAttributesInput(**kwargs) + + def __repr__(self) -> str: + result = "ResolveAttributesInput(" + if self.table_name is not None: + result += f"table_name={repr(self.table_name)}, " + + if self.item is not None: + result += f"item={repr(self.item)}, " + + if self.version is not None: + result += f"version={repr(self.version)}" + + return result + ")" + + def __eq__(self, other: Any) -> bool: + if not isinstance(other, ResolveAttributesInput): + return False + attributes: list[str] = [ + "table_name", + "item", + "version", + ] + return all(getattr(self, a) == getattr(other, a) for a in attributes) + + +class ExecuteStatementInputTransformInput: + sdk_input: "dict[str, Any]" + + def __init__( + self, + *, + sdk_input: "dict[str, Any]", + ): + self.sdk_input = sdk_input + + def as_dict(self) -> Dict[str, Any]: + """Converts the ExecuteStatementInputTransformInput to a dictionary.""" + return { + "sdk_input": self.sdk_input.as_dict(), + } + + @staticmethod + def from_dict(d: Dict[str, Any]) -> "ExecuteStatementInputTransformInput": + """Creates a ExecuteStatementInputTransformInput from a dictionary.""" + kwargs: Dict[str, Any] = { + "sdk_input": d["sdk_input"], + } + + return ExecuteStatementInputTransformInput(**kwargs) + + def __repr__(self) -> str: + result = "ExecuteStatementInputTransformInput(" + if self.sdk_input is not None: + result += f"sdk_input={repr(self.sdk_input)}" + + return result + ")" + + def __eq__(self, other: Any) -> bool: + if not isinstance(other, ExecuteStatementInputTransformInput): + return False + attributes: list[str] = [ + "sdk_input", + ] + return all(getattr(self, a) == getattr(other, a) for a in attributes) + + +class ExecuteStatementInputTransformOutput: + transformed_input: "dict[str, Any]" + + def __init__( + self, + *, + transformed_input: "dict[str, Any]", + ): + self.transformed_input = transformed_input + + def as_dict(self) -> Dict[str, Any]: + """Converts the ExecuteStatementInputTransformOutput to a + dictionary.""" + return { + "transformed_input": self.transformed_input.as_dict(), + } + + @staticmethod + def from_dict(d: Dict[str, Any]) -> "ExecuteStatementInputTransformOutput": + """Creates a ExecuteStatementInputTransformOutput from a dictionary.""" + kwargs: Dict[str, Any] = { + "transformed_input": d["transformed_input"], + } + + return ExecuteStatementInputTransformOutput(**kwargs) + + def __repr__(self) -> str: + result = "ExecuteStatementInputTransformOutput(" + if self.transformed_input is not None: + result += f"transformed_input={repr(self.transformed_input)}" + + return result + ")" + + def __eq__(self, other: Any) -> bool: + if not isinstance(other, ExecuteStatementInputTransformOutput): + return False + attributes: list[str] = [ + "transformed_input", + ] + return all(getattr(self, a) == getattr(other, a) for a in attributes) + + +class GetItemInputTransformInput: + sdk_input: "dict[str, Any]" + + def __init__( + self, + *, + sdk_input: "dict[str, Any]", + ): + """ + :param sdk_input:

Represents the input of a GetItem + operation.

+ """ + self.sdk_input = sdk_input + + def as_dict(self) -> Dict[str, Any]: + """Converts the GetItemInputTransformInput to a dictionary.""" + return { + "sdk_input": self.sdk_input.as_dict(), + } + + @staticmethod + def from_dict(d: Dict[str, Any]) -> "GetItemInputTransformInput": + """Creates a GetItemInputTransformInput from a dictionary.""" + kwargs: Dict[str, Any] = { + "sdk_input": d["sdk_input"], + } + + return GetItemInputTransformInput(**kwargs) + + def __repr__(self) -> str: + result = "GetItemInputTransformInput(" + if self.sdk_input is not None: + result += f"sdk_input={repr(self.sdk_input)}" + + return result + ")" + + def __eq__(self, other: Any) -> bool: + if not isinstance(other, GetItemInputTransformInput): + return False + attributes: list[str] = [ + "sdk_input", + ] + return all(getattr(self, a) == getattr(other, a) for a in attributes) + + +class GetItemInputTransformOutput: + transformed_input: "dict[str, Any]" + + def __init__( + self, + *, + transformed_input: "dict[str, Any]", + ): + """ + :param transformed_input:

Represents the input of a GetItem + operation.

+ """ + self.transformed_input = transformed_input + + def as_dict(self) -> Dict[str, Any]: + """Converts the GetItemInputTransformOutput to a dictionary.""" + return { + "transformed_input": self.transformed_input.as_dict(), + } + + @staticmethod + def from_dict(d: Dict[str, Any]) -> "GetItemInputTransformOutput": + """Creates a GetItemInputTransformOutput from a dictionary.""" + kwargs: Dict[str, Any] = { + "transformed_input": d["transformed_input"], + } + + return GetItemInputTransformOutput(**kwargs) + + def __repr__(self) -> str: + result = "GetItemInputTransformOutput(" + if self.transformed_input is not None: + result += f"transformed_input={repr(self.transformed_input)}" + + return result + ")" + + def __eq__(self, other: Any) -> bool: + if not isinstance(other, GetItemInputTransformOutput): + return False + attributes: list[str] = [ + "transformed_input", + ] + return all(getattr(self, a) == getattr(other, a) for a in attributes) + + +class GetItemOutputTransformOutput: + transformed_output: "dict[str, Any]" + + def __init__( + self, + *, + transformed_output: "dict[str, Any]", + ): + """ + :param transformed_output:

Represents the output of a GetItem + operation.

+ """ + self.transformed_output = transformed_output + + def as_dict(self) -> Dict[str, Any]: + """Converts the GetItemOutputTransformOutput to a dictionary.""" + return { + "transformed_output": self.transformed_output.as_dict(), + } + + @staticmethod + def from_dict(d: Dict[str, Any]) -> "GetItemOutputTransformOutput": + """Creates a GetItemOutputTransformOutput from a dictionary.""" + kwargs: Dict[str, Any] = { + "transformed_output": d["transformed_output"], + } + + return GetItemOutputTransformOutput(**kwargs) + + def __repr__(self) -> str: + result = "GetItemOutputTransformOutput(" + if self.transformed_output is not None: + result += f"transformed_output={repr(self.transformed_output)}" + + return result + ")" + + def __eq__(self, other: Any) -> bool: + if not isinstance(other, GetItemOutputTransformOutput): + return False + attributes: list[str] = [ + "transformed_output", + ] + return all(getattr(self, a) == getattr(other, a) for a in attributes) + + +class BatchExecuteStatementInputTransformInput: + sdk_input: "dict[str, Any]" + + def __init__( + self, + *, + sdk_input: "dict[str, Any]", + ): + self.sdk_input = sdk_input + + def as_dict(self) -> Dict[str, Any]: + """Converts the BatchExecuteStatementInputTransformInput to a + dictionary.""" + return { + "sdk_input": self.sdk_input.as_dict(), + } + + @staticmethod + def from_dict(d: Dict[str, Any]) -> "BatchExecuteStatementInputTransformInput": + """Creates a BatchExecuteStatementInputTransformInput from a + dictionary.""" + kwargs: Dict[str, Any] = { + "sdk_input": d["sdk_input"], + } + + return BatchExecuteStatementInputTransformInput(**kwargs) + + def __repr__(self) -> str: + result = "BatchExecuteStatementInputTransformInput(" + if self.sdk_input is not None: + result += f"sdk_input={repr(self.sdk_input)}" + + return result + ")" + + def __eq__(self, other: Any) -> bool: + if not isinstance(other, BatchExecuteStatementInputTransformInput): + return False + attributes: list[str] = [ + "sdk_input", + ] + return all(getattr(self, a) == getattr(other, a) for a in attributes) + + +class BatchExecuteStatementInputTransformOutput: + transformed_input: "dict[str, Any]" + + def __init__( + self, + *, + transformed_input: "dict[str, Any]", + ): + self.transformed_input = transformed_input + + def as_dict(self) -> Dict[str, Any]: + """Converts the BatchExecuteStatementInputTransformOutput to a + dictionary.""" + return { + "transformed_input": self.transformed_input.as_dict(), + } + + @staticmethod + def from_dict(d: Dict[str, Any]) -> "BatchExecuteStatementInputTransformOutput": + """Creates a BatchExecuteStatementInputTransformOutput from a + dictionary.""" + kwargs: Dict[str, Any] = { + "transformed_input": d["transformed_input"], + } + + return BatchExecuteStatementInputTransformOutput(**kwargs) + + def __repr__(self) -> str: + result = "BatchExecuteStatementInputTransformOutput(" + if self.transformed_input is not None: + result += f"transformed_input={repr(self.transformed_input)}" + + return result + ")" + + def __eq__(self, other: Any) -> bool: + if not isinstance(other, BatchExecuteStatementInputTransformOutput): + return False + attributes: list[str] = [ + "transformed_input", + ] + return all(getattr(self, a) == getattr(other, a) for a in attributes) + + +class ExecuteTransactionInputTransformInput: + sdk_input: "dict[str, Any]" + + def __init__( + self, + *, + sdk_input: "dict[str, Any]", + ): + self.sdk_input = sdk_input + + def as_dict(self) -> Dict[str, Any]: + """Converts the ExecuteTransactionInputTransformInput to a + dictionary.""" + return { + "sdk_input": self.sdk_input.as_dict(), + } + + @staticmethod + def from_dict(d: Dict[str, Any]) -> "ExecuteTransactionInputTransformInput": + """Creates a ExecuteTransactionInputTransformInput from a + dictionary.""" + kwargs: Dict[str, Any] = { + "sdk_input": d["sdk_input"], + } + + return ExecuteTransactionInputTransformInput(**kwargs) + + def __repr__(self) -> str: + result = "ExecuteTransactionInputTransformInput(" + if self.sdk_input is not None: + result += f"sdk_input={repr(self.sdk_input)}" + + return result + ")" + + def __eq__(self, other: Any) -> bool: + if not isinstance(other, ExecuteTransactionInputTransformInput): + return False + attributes: list[str] = [ + "sdk_input", + ] + return all(getattr(self, a) == getattr(other, a) for a in attributes) + + +class ExecuteTransactionInputTransformOutput: + transformed_input: "dict[str, Any]" + + def __init__( + self, + *, + transformed_input: "dict[str, Any]", + ): + self.transformed_input = transformed_input + + def as_dict(self) -> Dict[str, Any]: + """Converts the ExecuteTransactionInputTransformOutput to a + dictionary.""" + return { + "transformed_input": self.transformed_input.as_dict(), + } + + @staticmethod + def from_dict(d: Dict[str, Any]) -> "ExecuteTransactionInputTransformOutput": + """Creates a ExecuteTransactionInputTransformOutput from a + dictionary.""" + kwargs: Dict[str, Any] = { + "transformed_input": d["transformed_input"], + } + + return ExecuteTransactionInputTransformOutput(**kwargs) + + def __repr__(self) -> str: + result = "ExecuteTransactionInputTransformOutput(" + if self.transformed_input is not None: + result += f"transformed_input={repr(self.transformed_input)}" + + return result + ")" + + def __eq__(self, other: Any) -> bool: + if not isinstance(other, ExecuteTransactionInputTransformOutput): + return False + attributes: list[str] = [ + "transformed_input", + ] + return all(getattr(self, a) == getattr(other, a) for a in attributes) + + +class ExecuteTransactionOutputTransformOutput: + transformed_output: "dict[str, Any]" + + def __init__( + self, + *, + transformed_output: "dict[str, Any]", + ): + self.transformed_output = transformed_output + + def as_dict(self) -> Dict[str, Any]: + """Converts the ExecuteTransactionOutputTransformOutput to a + dictionary.""" + return { + "transformed_output": self.transformed_output.as_dict(), + } + + @staticmethod + def from_dict(d: Dict[str, Any]) -> "ExecuteTransactionOutputTransformOutput": + """Creates a ExecuteTransactionOutputTransformOutput from a + dictionary.""" + kwargs: Dict[str, Any] = { + "transformed_output": d["transformed_output"], + } + + return ExecuteTransactionOutputTransformOutput(**kwargs) + + def __repr__(self) -> str: + result = "ExecuteTransactionOutputTransformOutput(" + if self.transformed_output is not None: + result += f"transformed_output={repr(self.transformed_output)}" + + return result + ")" + + def __eq__(self, other: Any) -> bool: + if not isinstance(other, ExecuteTransactionOutputTransformOutput): + return False + attributes: list[str] = [ + "transformed_output", + ] + return all(getattr(self, a) == getattr(other, a) for a in attributes) + + +class TransactGetItemsOutputTransformOutput: + transformed_output: "dict[str, Any]" + + def __init__( + self, + *, + transformed_output: "dict[str, Any]", + ): + self.transformed_output = transformed_output + + def as_dict(self) -> Dict[str, Any]: + """Converts the TransactGetItemsOutputTransformOutput to a + dictionary.""" + return { + "transformed_output": self.transformed_output.as_dict(), + } + + @staticmethod + def from_dict(d: Dict[str, Any]) -> "TransactGetItemsOutputTransformOutput": + """Creates a TransactGetItemsOutputTransformOutput from a + dictionary.""" + kwargs: Dict[str, Any] = { + "transformed_output": d["transformed_output"], + } + + return TransactGetItemsOutputTransformOutput(**kwargs) + + def __repr__(self) -> str: + result = "TransactGetItemsOutputTransformOutput(" + if self.transformed_output is not None: + result += f"transformed_output={repr(self.transformed_output)}" + + return result + ")" + + def __eq__(self, other: Any) -> bool: + if not isinstance(other, TransactGetItemsOutputTransformOutput): + return False + attributes: list[str] = [ + "transformed_output", + ] + return all(getattr(self, a) == getattr(other, a) for a in attributes) + + +class BatchGetItemInputTransformInput: + sdk_input: "dict[str, Any]" + + def __init__( + self, + *, + sdk_input: "dict[str, Any]", + ): + """ + :param sdk_input:

Represents the input of a BatchGetItem + operation.

+ """ + self.sdk_input = sdk_input + + def as_dict(self) -> Dict[str, Any]: + """Converts the BatchGetItemInputTransformInput to a dictionary.""" + return { + "sdk_input": self.sdk_input.as_dict(), + } + + @staticmethod + def from_dict(d: Dict[str, Any]) -> "BatchGetItemInputTransformInput": + """Creates a BatchGetItemInputTransformInput from a dictionary.""" + kwargs: Dict[str, Any] = { + "sdk_input": d["sdk_input"], + } + + return BatchGetItemInputTransformInput(**kwargs) + + def __repr__(self) -> str: + result = "BatchGetItemInputTransformInput(" + if self.sdk_input is not None: + result += f"sdk_input={repr(self.sdk_input)}" + + return result + ")" + + def __eq__(self, other: Any) -> bool: + if not isinstance(other, BatchGetItemInputTransformInput): + return False + attributes: list[str] = [ + "sdk_input", + ] + return all(getattr(self, a) == getattr(other, a) for a in attributes) + + +class BatchGetItemInputTransformOutput: + transformed_input: "dict[str, Any]" + + def __init__( + self, + *, + transformed_input: "dict[str, Any]", + ): + """ + :param transformed_input:

Represents the input of a BatchGetItem + operation.

+ """ + self.transformed_input = transformed_input + + def as_dict(self) -> Dict[str, Any]: + """Converts the BatchGetItemInputTransformOutput to a dictionary.""" + return { + "transformed_input": self.transformed_input.as_dict(), + } + + @staticmethod + def from_dict(d: Dict[str, Any]) -> "BatchGetItemInputTransformOutput": + """Creates a BatchGetItemInputTransformOutput from a dictionary.""" + kwargs: Dict[str, Any] = { + "transformed_input": d["transformed_input"], + } + + return BatchGetItemInputTransformOutput(**kwargs) + + def __repr__(self) -> str: + result = "BatchGetItemInputTransformOutput(" + if self.transformed_input is not None: + result += f"transformed_input={repr(self.transformed_input)}" + + return result + ")" + + def __eq__(self, other: Any) -> bool: + if not isinstance(other, BatchGetItemInputTransformOutput): + return False + attributes: list[str] = [ + "transformed_input", + ] + return all(getattr(self, a) == getattr(other, a) for a in attributes) + + +class TransactGetItemsInputTransformInput: + sdk_input: "dict[str, Any]" + + def __init__( + self, + *, + sdk_input: "dict[str, Any]", + ): + self.sdk_input = sdk_input + + def as_dict(self) -> Dict[str, Any]: + """Converts the TransactGetItemsInputTransformInput to a dictionary.""" + return { + "sdk_input": self.sdk_input.as_dict(), + } + + @staticmethod + def from_dict(d: Dict[str, Any]) -> "TransactGetItemsInputTransformInput": + """Creates a TransactGetItemsInputTransformInput from a dictionary.""" + kwargs: Dict[str, Any] = { + "sdk_input": d["sdk_input"], + } + + return TransactGetItemsInputTransformInput(**kwargs) + + def __repr__(self) -> str: + result = "TransactGetItemsInputTransformInput(" + if self.sdk_input is not None: + result += f"sdk_input={repr(self.sdk_input)}" + + return result + ")" + + def __eq__(self, other: Any) -> bool: + if not isinstance(other, TransactGetItemsInputTransformInput): + return False + attributes: list[str] = [ + "sdk_input", + ] + return all(getattr(self, a) == getattr(other, a) for a in attributes) + + +class TransactGetItemsInputTransformOutput: + transformed_input: "dict[str, Any]" + + def __init__( + self, + *, + transformed_input: "dict[str, Any]", + ): + self.transformed_input = transformed_input + + def as_dict(self) -> Dict[str, Any]: + """Converts the TransactGetItemsInputTransformOutput to a + dictionary.""" + return { + "transformed_input": self.transformed_input.as_dict(), + } + + @staticmethod + def from_dict(d: Dict[str, Any]) -> "TransactGetItemsInputTransformOutput": + """Creates a TransactGetItemsInputTransformOutput from a dictionary.""" + kwargs: Dict[str, Any] = { + "transformed_input": d["transformed_input"], + } + + return TransactGetItemsInputTransformOutput(**kwargs) + + def __repr__(self) -> str: + result = "TransactGetItemsInputTransformOutput(" + if self.transformed_input is not None: + result += f"transformed_input={repr(self.transformed_input)}" + + return result + ")" + + def __eq__(self, other: Any) -> bool: + if not isinstance(other, TransactGetItemsInputTransformOutput): + return False + attributes: list[str] = [ + "transformed_input", + ] + return all(getattr(self, a) == getattr(other, a) for a in attributes) + + +class TransactWriteItemsOutputTransformOutput: + transformed_output: "dict[str, Any]" + + def __init__( + self, + *, + transformed_output: "dict[str, Any]", + ): + self.transformed_output = transformed_output + + def as_dict(self) -> Dict[str, Any]: + """Converts the TransactWriteItemsOutputTransformOutput to a + dictionary.""" + return { + "transformed_output": self.transformed_output.as_dict(), + } + + @staticmethod + def from_dict(d: Dict[str, Any]) -> "TransactWriteItemsOutputTransformOutput": + """Creates a TransactWriteItemsOutputTransformOutput from a + dictionary.""" + kwargs: Dict[str, Any] = { + "transformed_output": d["transformed_output"], + } + + return TransactWriteItemsOutputTransformOutput(**kwargs) + + def __repr__(self) -> str: + result = "TransactWriteItemsOutputTransformOutput(" + if self.transformed_output is not None: + result += f"transformed_output={repr(self.transformed_output)}" + + return result + ")" + + def __eq__(self, other: Any) -> bool: + if not isinstance(other, TransactWriteItemsOutputTransformOutput): + return False + attributes: list[str] = [ + "transformed_output", + ] + return all(getattr(self, a) == getattr(other, a) for a in attributes) + + +class GetItemOutputTransformInput: + sdk_output: "dict[str, Any]" + original_input: "dict[str, Any]" + + def __init__( + self, + *, + sdk_output: "dict[str, Any]", + original_input: "dict[str, Any]", + ): + """ + :param sdk_output:

Represents the output of a GetItem + operation.

+ :param original_input:

Represents the input of a GetItem + operation.

+ """ + self.sdk_output = sdk_output + self.original_input = original_input + + def as_dict(self) -> Dict[str, Any]: + """Converts the GetItemOutputTransformInput to a dictionary.""" + return { + "sdk_output": self.sdk_output.as_dict(), + "original_input": self.original_input.as_dict(), + } + + @staticmethod + def from_dict(d: Dict[str, Any]) -> "GetItemOutputTransformInput": + """Creates a GetItemOutputTransformInput from a dictionary.""" + kwargs: Dict[str, Any] = { + "sdk_output": d["sdk_output"], + "original_input": d["original_input"], + } + + return GetItemOutputTransformInput(**kwargs) + + def __repr__(self) -> str: + result = "GetItemOutputTransformInput(" + if self.sdk_output is not None: + result += f"sdk_output={repr(self.sdk_output)}, " + + if self.original_input is not None: + result += f"original_input={repr(self.original_input)}" + + return result + ")" + + def __eq__(self, other: Any) -> bool: + if not isinstance(other, GetItemOutputTransformInput): + return False + attributes: list[str] = [ + "sdk_output", + "original_input", + ] + return all(getattr(self, a) == getattr(other, a) for a in attributes) + + +class DeleteItemOutputTransformOutput: + transformed_output: "dict[str, Any]" + + def __init__( + self, + *, + transformed_output: "dict[str, Any]", + ): + """ + :param transformed_output:

Represents the output of a DeleteItem + operation.

+ """ + self.transformed_output = transformed_output + + def as_dict(self) -> Dict[str, Any]: + """Converts the DeleteItemOutputTransformOutput to a dictionary.""" + return { + "transformed_output": self.transformed_output.as_dict(), + } + + @staticmethod + def from_dict(d: Dict[str, Any]) -> "DeleteItemOutputTransformOutput": + """Creates a DeleteItemOutputTransformOutput from a dictionary.""" + kwargs: Dict[str, Any] = { + "transformed_output": d["transformed_output"], + } + + return DeleteItemOutputTransformOutput(**kwargs) + + def __repr__(self) -> str: + result = "DeleteItemOutputTransformOutput(" + if self.transformed_output is not None: + result += f"transformed_output={repr(self.transformed_output)}" + + return result + ")" + + def __eq__(self, other: Any) -> bool: + if not isinstance(other, DeleteItemOutputTransformOutput): + return False + attributes: list[str] = [ + "transformed_output", + ] + return all(getattr(self, a) == getattr(other, a) for a in attributes) + + +class ExecuteStatementOutputTransformOutput: + transformed_output: "dict[str, Any]" + + def __init__( + self, + *, + transformed_output: "dict[str, Any]", + ): + self.transformed_output = transformed_output + + def as_dict(self) -> Dict[str, Any]: + """Converts the ExecuteStatementOutputTransformOutput to a + dictionary.""" + return { + "transformed_output": self.transformed_output.as_dict(), + } + + @staticmethod + def from_dict(d: Dict[str, Any]) -> "ExecuteStatementOutputTransformOutput": + """Creates a ExecuteStatementOutputTransformOutput from a + dictionary.""" + kwargs: Dict[str, Any] = { + "transformed_output": d["transformed_output"], + } + + return ExecuteStatementOutputTransformOutput(**kwargs) + + def __repr__(self) -> str: + result = "ExecuteStatementOutputTransformOutput(" + if self.transformed_output is not None: + result += f"transformed_output={repr(self.transformed_output)}" + + return result + ")" + + def __eq__(self, other: Any) -> bool: + if not isinstance(other, ExecuteStatementOutputTransformOutput): + return False + attributes: list[str] = [ + "transformed_output", + ] + return all(getattr(self, a) == getattr(other, a) for a in attributes) + + +class PutItemOutputTransformOutput: + transformed_output: "dict[str, Any]" + + def __init__( + self, + *, + transformed_output: "dict[str, Any]", + ): + """ + :param transformed_output:

Represents the output of a PutItem + operation.

+ """ + self.transformed_output = transformed_output + + def as_dict(self) -> Dict[str, Any]: + """Converts the PutItemOutputTransformOutput to a dictionary.""" + return { + "transformed_output": self.transformed_output.as_dict(), + } + + @staticmethod + def from_dict(d: Dict[str, Any]) -> "PutItemOutputTransformOutput": + """Creates a PutItemOutputTransformOutput from a dictionary.""" + kwargs: Dict[str, Any] = { + "transformed_output": d["transformed_output"], + } + + return PutItemOutputTransformOutput(**kwargs) + + def __repr__(self) -> str: + result = "PutItemOutputTransformOutput(" + if self.transformed_output is not None: + result += f"transformed_output={repr(self.transformed_output)}" + + return result + ")" + + def __eq__(self, other: Any) -> bool: + if not isinstance(other, PutItemOutputTransformOutput): + return False + attributes: list[str] = [ + "transformed_output", + ] + return all(getattr(self, a) == getattr(other, a) for a in attributes) + + +class QueryOutputTransformOutput: + transformed_output: "dict[str, Any]" + + def __init__( + self, + *, + transformed_output: "dict[str, Any]", + ): + """ + :param transformed_output:

Represents the output of a Query + operation.

+ """ + self.transformed_output = transformed_output + + def as_dict(self) -> Dict[str, Any]: + """Converts the QueryOutputTransformOutput to a dictionary.""" + return { + "transformed_output": self.transformed_output.as_dict(), + } + + @staticmethod + def from_dict(d: Dict[str, Any]) -> "QueryOutputTransformOutput": + """Creates a QueryOutputTransformOutput from a dictionary.""" + kwargs: Dict[str, Any] = { + "transformed_output": d["transformed_output"], + } + + return QueryOutputTransformOutput(**kwargs) + + def __repr__(self) -> str: + result = "QueryOutputTransformOutput(" + if self.transformed_output is not None: + result += f"transformed_output={repr(self.transformed_output)}" + + return result + ")" + + def __eq__(self, other: Any) -> bool: + if not isinstance(other, QueryOutputTransformOutput): + return False + attributes: list[str] = [ + "transformed_output", + ] + return all(getattr(self, a) == getattr(other, a) for a in attributes) + + +class ScanOutputTransformOutput: + transformed_output: "dict[str, Any]" + + def __init__( + self, + *, + transformed_output: "dict[str, Any]", + ): + """ + :param transformed_output:

Represents the output of a Scan + operation.

+ """ + self.transformed_output = transformed_output + + def as_dict(self) -> Dict[str, Any]: + """Converts the ScanOutputTransformOutput to a dictionary.""" + return { + "transformed_output": self.transformed_output.as_dict(), + } + + @staticmethod + def from_dict(d: Dict[str, Any]) -> "ScanOutputTransformOutput": + """Creates a ScanOutputTransformOutput from a dictionary.""" + kwargs: Dict[str, Any] = { + "transformed_output": d["transformed_output"], + } + + return ScanOutputTransformOutput(**kwargs) + + def __repr__(self) -> str: + result = "ScanOutputTransformOutput(" + if self.transformed_output is not None: + result += f"transformed_output={repr(self.transformed_output)}" + + return result + ")" + + def __eq__(self, other: Any) -> bool: + if not isinstance(other, ScanOutputTransformOutput): + return False + attributes: list[str] = [ + "transformed_output", + ] + return all(getattr(self, a) == getattr(other, a) for a in attributes) + + +class UpdateItemOutputTransformOutput: + transformed_output: "dict[str, Any]" + + def __init__( + self, + *, + transformed_output: "dict[str, Any]", + ): + """ + :param transformed_output:

Represents the output of an + UpdateItem operation.

+ """ + self.transformed_output = transformed_output + + def as_dict(self) -> Dict[str, Any]: + """Converts the UpdateItemOutputTransformOutput to a dictionary.""" + return { + "transformed_output": self.transformed_output.as_dict(), + } + + @staticmethod + def from_dict(d: Dict[str, Any]) -> "UpdateItemOutputTransformOutput": + """Creates a UpdateItemOutputTransformOutput from a dictionary.""" + kwargs: Dict[str, Any] = { + "transformed_output": d["transformed_output"], + } + + return UpdateItemOutputTransformOutput(**kwargs) + + def __repr__(self) -> str: + result = "UpdateItemOutputTransformOutput(" + if self.transformed_output is not None: + result += f"transformed_output={repr(self.transformed_output)}" + + return result + ")" + + def __eq__(self, other: Any) -> bool: + if not isinstance(other, UpdateItemOutputTransformOutput): + return False + attributes: list[str] = [ + "transformed_output", + ] + return all(getattr(self, a) == getattr(other, a) for a in attributes) + + +class ExecuteTransactionOutputTransformInput: + sdk_output: "dict[str, Any]" + original_input: "dict[str, Any]" + + def __init__( + self, + *, + sdk_output: "dict[str, Any]", + original_input: "dict[str, Any]", + ): + self.sdk_output = sdk_output + self.original_input = original_input + + def as_dict(self) -> Dict[str, Any]: + """Converts the ExecuteTransactionOutputTransformInput to a + dictionary.""" + return { + "sdk_output": self.sdk_output.as_dict(), + "original_input": self.original_input.as_dict(), + } + + @staticmethod + def from_dict(d: Dict[str, Any]) -> "ExecuteTransactionOutputTransformInput": + """Creates a ExecuteTransactionOutputTransformInput from a + dictionary.""" + kwargs: Dict[str, Any] = { + "sdk_output": d["sdk_output"], + "original_input": d["original_input"], + } + + return ExecuteTransactionOutputTransformInput(**kwargs) + + def __repr__(self) -> str: + result = "ExecuteTransactionOutputTransformInput(" + if self.sdk_output is not None: + result += f"sdk_output={repr(self.sdk_output)}, " + + if self.original_input is not None: + result += f"original_input={repr(self.original_input)}" + + return result + ")" + + def __eq__(self, other: Any) -> bool: + if not isinstance(other, ExecuteTransactionOutputTransformInput): + return False + attributes: list[str] = [ + "sdk_output", + "original_input", + ] + return all(getattr(self, a) == getattr(other, a) for a in attributes) + + +class BatchExecuteStatementOutputTransformOutput: + transformed_output: "dict[str, Any]" + + def __init__( + self, + *, + transformed_output: "dict[str, Any]", + ): + self.transformed_output = transformed_output + + def as_dict(self) -> Dict[str, Any]: + """Converts the BatchExecuteStatementOutputTransformOutput to a + dictionary.""" + return { + "transformed_output": self.transformed_output.as_dict(), + } + + @staticmethod + def from_dict(d: Dict[str, Any]) -> "BatchExecuteStatementOutputTransformOutput": + """Creates a BatchExecuteStatementOutputTransformOutput from a + dictionary.""" + kwargs: Dict[str, Any] = { + "transformed_output": d["transformed_output"], + } + + return BatchExecuteStatementOutputTransformOutput(**kwargs) + + def __repr__(self) -> str: + result = "BatchExecuteStatementOutputTransformOutput(" + if self.transformed_output is not None: + result += f"transformed_output={repr(self.transformed_output)}" + + return result + ")" + + def __eq__(self, other: Any) -> bool: + if not isinstance(other, BatchExecuteStatementOutputTransformOutput): + return False + attributes: list[str] = [ + "transformed_output", + ] + return all(getattr(self, a) == getattr(other, a) for a in attributes) + + +class BatchGetItemOutputTransformOutput: + transformed_output: "dict[str, Any]" + + def __init__( + self, + *, + transformed_output: "dict[str, Any]", + ): + """ + :param transformed_output:

Represents the output of a + BatchGetItem operation.

+ """ + self.transformed_output = transformed_output + + def as_dict(self) -> Dict[str, Any]: + """Converts the BatchGetItemOutputTransformOutput to a dictionary.""" + return { + "transformed_output": self.transformed_output.as_dict(), + } + + @staticmethod + def from_dict(d: Dict[str, Any]) -> "BatchGetItemOutputTransformOutput": + """Creates a BatchGetItemOutputTransformOutput from a dictionary.""" + kwargs: Dict[str, Any] = { + "transformed_output": d["transformed_output"], + } + + return BatchGetItemOutputTransformOutput(**kwargs) + + def __repr__(self) -> str: + result = "BatchGetItemOutputTransformOutput(" + if self.transformed_output is not None: + result += f"transformed_output={repr(self.transformed_output)}" + + return result + ")" + + def __eq__(self, other: Any) -> bool: + if not isinstance(other, BatchGetItemOutputTransformOutput): + return False + attributes: list[str] = [ + "transformed_output", + ] + return all(getattr(self, a) == getattr(other, a) for a in attributes) + + +class TransactGetItemsOutputTransformInput: + sdk_output: "dict[str, Any]" + original_input: "dict[str, Any]" + + def __init__( + self, + *, + sdk_output: "dict[str, Any]", + original_input: "dict[str, Any]", + ): + self.sdk_output = sdk_output + self.original_input = original_input + + def as_dict(self) -> Dict[str, Any]: + """Converts the TransactGetItemsOutputTransformInput to a + dictionary.""" + return { + "sdk_output": self.sdk_output.as_dict(), + "original_input": self.original_input.as_dict(), + } + + @staticmethod + def from_dict(d: Dict[str, Any]) -> "TransactGetItemsOutputTransformInput": + """Creates a TransactGetItemsOutputTransformInput from a dictionary.""" + kwargs: Dict[str, Any] = { + "sdk_output": d["sdk_output"], + "original_input": d["original_input"], + } + + return TransactGetItemsOutputTransformInput(**kwargs) + + def __repr__(self) -> str: + result = "TransactGetItemsOutputTransformInput(" + if self.sdk_output is not None: + result += f"sdk_output={repr(self.sdk_output)}, " + + if self.original_input is not None: + result += f"original_input={repr(self.original_input)}" + + return result + ")" + + def __eq__(self, other: Any) -> bool: + if not isinstance(other, TransactGetItemsOutputTransformInput): + return False + attributes: list[str] = [ + "sdk_output", + "original_input", + ] + return all(getattr(self, a) == getattr(other, a) for a in attributes) + + +class ExecuteStatementOutputTransformInput: + sdk_output: "dict[str, Any]" + original_input: "dict[str, Any]" + + def __init__( + self, + *, + sdk_output: "dict[str, Any]", + original_input: "dict[str, Any]", + ): + self.sdk_output = sdk_output + self.original_input = original_input + + def as_dict(self) -> Dict[str, Any]: + """Converts the ExecuteStatementOutputTransformInput to a + dictionary.""" + return { + "sdk_output": self.sdk_output.as_dict(), + "original_input": self.original_input.as_dict(), + } + + @staticmethod + def from_dict(d: Dict[str, Any]) -> "ExecuteStatementOutputTransformInput": + """Creates a ExecuteStatementOutputTransformInput from a dictionary.""" + kwargs: Dict[str, Any] = { + "sdk_output": d["sdk_output"], + "original_input": d["original_input"], + } + + return ExecuteStatementOutputTransformInput(**kwargs) + + def __repr__(self) -> str: + result = "ExecuteStatementOutputTransformInput(" + if self.sdk_output is not None: + result += f"sdk_output={repr(self.sdk_output)}, " + + if self.original_input is not None: + result += f"original_input={repr(self.original_input)}" + + return result + ")" + + def __eq__(self, other: Any) -> bool: + if not isinstance(other, ExecuteStatementOutputTransformInput): + return False + attributes: list[str] = [ + "sdk_output", + "original_input", + ] + return all(getattr(self, a) == getattr(other, a) for a in attributes) + + +class ScanInputTransformInput: + sdk_input: "dict[str, Any]" + + def __init__( + self, + *, + sdk_input: "dict[str, Any]", + ): + """ + :param sdk_input:

Represents the input of a Scan operation.

+ """ + self.sdk_input = sdk_input + + def as_dict(self) -> Dict[str, Any]: + """Converts the ScanInputTransformInput to a dictionary.""" + return { + "sdk_input": self.sdk_input.as_dict(), + } + + @staticmethod + def from_dict(d: Dict[str, Any]) -> "ScanInputTransformInput": + """Creates a ScanInputTransformInput from a dictionary.""" + kwargs: Dict[str, Any] = { + "sdk_input": d["sdk_input"], + } + + return ScanInputTransformInput(**kwargs) + + def __repr__(self) -> str: + result = "ScanInputTransformInput(" + if self.sdk_input is not None: + result += f"sdk_input={repr(self.sdk_input)}" + + return result + ")" + + def __eq__(self, other: Any) -> bool: + if not isinstance(other, ScanInputTransformInput): + return False + attributes: list[str] = [ + "sdk_input", + ] + return all(getattr(self, a) == getattr(other, a) for a in attributes) + + +class ScanInputTransformOutput: + transformed_input: "dict[str, Any]" + + def __init__( + self, + *, + transformed_input: "dict[str, Any]", + ): + """ + :param transformed_input:

Represents the input of a Scan + operation.

+ """ + self.transformed_input = transformed_input + + def as_dict(self) -> Dict[str, Any]: + """Converts the ScanInputTransformOutput to a dictionary.""" + return { + "transformed_input": self.transformed_input.as_dict(), + } + + @staticmethod + def from_dict(d: Dict[str, Any]) -> "ScanInputTransformOutput": + """Creates a ScanInputTransformOutput from a dictionary.""" + kwargs: Dict[str, Any] = { + "transformed_input": d["transformed_input"], + } + + return ScanInputTransformOutput(**kwargs) + + def __repr__(self) -> str: + result = "ScanInputTransformOutput(" + if self.transformed_input is not None: + result += f"transformed_input={repr(self.transformed_input)}" + + return result + ")" + + def __eq__(self, other: Any) -> bool: + if not isinstance(other, ScanInputTransformOutput): + return False + attributes: list[str] = [ + "transformed_input", + ] + return all(getattr(self, a) == getattr(other, a) for a in attributes) + + +class BatchWriteItemInputTransformInput: + sdk_input: "dict[str, Any]" + + def __init__( + self, + *, + sdk_input: "dict[str, Any]", + ): + """ + :param sdk_input:

Represents the input of a BatchWriteItem + operation.

+ """ + self.sdk_input = sdk_input + + def as_dict(self) -> Dict[str, Any]: + """Converts the BatchWriteItemInputTransformInput to a dictionary.""" + return { + "sdk_input": self.sdk_input.as_dict(), + } + + @staticmethod + def from_dict(d: Dict[str, Any]) -> "BatchWriteItemInputTransformInput": + """Creates a BatchWriteItemInputTransformInput from a dictionary.""" + kwargs: Dict[str, Any] = { + "sdk_input": d["sdk_input"], + } + + return BatchWriteItemInputTransformInput(**kwargs) + + def __repr__(self) -> str: + result = "BatchWriteItemInputTransformInput(" + if self.sdk_input is not None: + result += f"sdk_input={repr(self.sdk_input)}" + + return result + ")" + + def __eq__(self, other: Any) -> bool: + if not isinstance(other, BatchWriteItemInputTransformInput): + return False + attributes: list[str] = [ + "sdk_input", + ] + return all(getattr(self, a) == getattr(other, a) for a in attributes) + + +class BatchWriteItemInputTransformOutput: + transformed_input: "dict[str, Any]" + + def __init__( + self, + *, + transformed_input: "dict[str, Any]", + ): + """ + :param transformed_input:

Represents the input of a + BatchWriteItem operation.

+ """ + self.transformed_input = transformed_input + + def as_dict(self) -> Dict[str, Any]: + """Converts the BatchWriteItemInputTransformOutput to a dictionary.""" + return { + "transformed_input": self.transformed_input.as_dict(), + } + + @staticmethod + def from_dict(d: Dict[str, Any]) -> "BatchWriteItemInputTransformOutput": + """Creates a BatchWriteItemInputTransformOutput from a dictionary.""" + kwargs: Dict[str, Any] = { + "transformed_input": d["transformed_input"], + } + + return BatchWriteItemInputTransformOutput(**kwargs) + + def __repr__(self) -> str: + result = "BatchWriteItemInputTransformOutput(" + if self.transformed_input is not None: + result += f"transformed_input={repr(self.transformed_input)}" + + return result + ")" + + def __eq__(self, other: Any) -> bool: + if not isinstance(other, BatchWriteItemInputTransformOutput): + return False + attributes: list[str] = [ + "transformed_input", + ] + return all(getattr(self, a) == getattr(other, a) for a in attributes) + + +class BatchExecuteStatementOutputTransformInput: + sdk_output: "dict[str, Any]" + original_input: "dict[str, Any]" + + def __init__( + self, + *, + sdk_output: "dict[str, Any]", + original_input: "dict[str, Any]", + ): + self.sdk_output = sdk_output + self.original_input = original_input + + def as_dict(self) -> Dict[str, Any]: + """Converts the BatchExecuteStatementOutputTransformInput to a + dictionary.""" + return { + "sdk_output": self.sdk_output.as_dict(), + "original_input": self.original_input.as_dict(), + } + + @staticmethod + def from_dict(d: Dict[str, Any]) -> "BatchExecuteStatementOutputTransformInput": + """Creates a BatchExecuteStatementOutputTransformInput from a + dictionary.""" + kwargs: Dict[str, Any] = { + "sdk_output": d["sdk_output"], + "original_input": d["original_input"], + } + + return BatchExecuteStatementOutputTransformInput(**kwargs) + + def __repr__(self) -> str: + result = "BatchExecuteStatementOutputTransformInput(" + if self.sdk_output is not None: + result += f"sdk_output={repr(self.sdk_output)}, " + + if self.original_input is not None: + result += f"original_input={repr(self.original_input)}" + + return result + ")" + + def __eq__(self, other: Any) -> bool: + if not isinstance(other, BatchExecuteStatementOutputTransformInput): + return False + attributes: list[str] = [ + "sdk_output", + "original_input", + ] + return all(getattr(self, a) == getattr(other, a) for a in attributes) + + +class BatchGetItemOutputTransformInput: + sdk_output: "dict[str, Any]" + original_input: "dict[str, Any]" + + def __init__( + self, + *, + sdk_output: "dict[str, Any]", + original_input: "dict[str, Any]", + ): + """ + :param sdk_output:

Represents the output of a BatchGetItem + operation.

+ :param original_input:

Represents the input of a BatchGetItem + operation.

+ """ + self.sdk_output = sdk_output + self.original_input = original_input + + def as_dict(self) -> Dict[str, Any]: + """Converts the BatchGetItemOutputTransformInput to a dictionary.""" + return { + "sdk_output": self.sdk_output.as_dict(), + "original_input": self.original_input.as_dict(), + } + + @staticmethod + def from_dict(d: Dict[str, Any]) -> "BatchGetItemOutputTransformInput": + """Creates a BatchGetItemOutputTransformInput from a dictionary.""" + kwargs: Dict[str, Any] = { + "sdk_output": d["sdk_output"], + "original_input": d["original_input"], + } + + return BatchGetItemOutputTransformInput(**kwargs) + + def __repr__(self) -> str: + result = "BatchGetItemOutputTransformInput(" + if self.sdk_output is not None: + result += f"sdk_output={repr(self.sdk_output)}, " + + if self.original_input is not None: + result += f"original_input={repr(self.original_input)}" + + return result + ")" + + def __eq__(self, other: Any) -> bool: + if not isinstance(other, BatchGetItemOutputTransformInput): + return False + attributes: list[str] = [ + "sdk_output", + "original_input", + ] + return all(getattr(self, a) == getattr(other, a) for a in attributes) + + +class DeleteItemInputTransformInput: + sdk_input: "dict[str, Any]" + + def __init__( + self, + *, + sdk_input: "dict[str, Any]", + ): + """ + :param sdk_input:

Represents the input of a DeleteItem + operation.

+ """ + self.sdk_input = sdk_input + + def as_dict(self) -> Dict[str, Any]: + """Converts the DeleteItemInputTransformInput to a dictionary.""" + return { + "sdk_input": self.sdk_input.as_dict(), + } + + @staticmethod + def from_dict(d: Dict[str, Any]) -> "DeleteItemInputTransformInput": + """Creates a DeleteItemInputTransformInput from a dictionary.""" + kwargs: Dict[str, Any] = { + "sdk_input": d["sdk_input"], + } + + return DeleteItemInputTransformInput(**kwargs) + + def __repr__(self) -> str: + result = "DeleteItemInputTransformInput(" + if self.sdk_input is not None: + result += f"sdk_input={repr(self.sdk_input)}" + + return result + ")" + + def __eq__(self, other: Any) -> bool: + if not isinstance(other, DeleteItemInputTransformInput): + return False + attributes: list[str] = [ + "sdk_input", + ] + return all(getattr(self, a) == getattr(other, a) for a in attributes) + + +class DeleteItemInputTransformOutput: + transformed_input: "dict[str, Any]" + + def __init__( + self, + *, + transformed_input: "dict[str, Any]", + ): + """ + :param transformed_input:

Represents the input of a DeleteItem + operation.

+ """ + self.transformed_input = transformed_input + + def as_dict(self) -> Dict[str, Any]: + """Converts the DeleteItemInputTransformOutput to a dictionary.""" + return { + "transformed_input": self.transformed_input.as_dict(), + } + + @staticmethod + def from_dict(d: Dict[str, Any]) -> "DeleteItemInputTransformOutput": + """Creates a DeleteItemInputTransformOutput from a dictionary.""" + kwargs: Dict[str, Any] = { + "transformed_input": d["transformed_input"], + } + + return DeleteItemInputTransformOutput(**kwargs) + + def __repr__(self) -> str: + result = "DeleteItemInputTransformOutput(" + if self.transformed_input is not None: + result += f"transformed_input={repr(self.transformed_input)}" + + return result + ")" + + def __eq__(self, other: Any) -> bool: + if not isinstance(other, DeleteItemInputTransformOutput): + return False + attributes: list[str] = [ + "transformed_input", + ] + return all(getattr(self, a) == getattr(other, a) for a in attributes) + + +class PutItemInputTransformInput: + sdk_input: "dict[str, Any]" + + def __init__( + self, + *, + sdk_input: "dict[str, Any]", + ): + """ + :param sdk_input:

Represents the input of a PutItem + operation.

+ """ + self.sdk_input = sdk_input + + def as_dict(self) -> Dict[str, Any]: + """Converts the PutItemInputTransformInput to a dictionary.""" + return { + "sdk_input": self.sdk_input.as_dict(), + } + + @staticmethod + def from_dict(d: Dict[str, Any]) -> "PutItemInputTransformInput": + """Creates a PutItemInputTransformInput from a dictionary.""" + kwargs: Dict[str, Any] = { + "sdk_input": d["sdk_input"], + } + + return PutItemInputTransformInput(**kwargs) + + def __repr__(self) -> str: + result = "PutItemInputTransformInput(" + if self.sdk_input is not None: + result += f"sdk_input={repr(self.sdk_input)}" + + return result + ")" + + def __eq__(self, other: Any) -> bool: + if not isinstance(other, PutItemInputTransformInput): + return False + attributes: list[str] = [ + "sdk_input", + ] + return all(getattr(self, a) == getattr(other, a) for a in attributes) + + +class PutItemInputTransformOutput: + transformed_input: "dict[str, Any]" + + def __init__( + self, + *, + transformed_input: "dict[str, Any]", + ): + """ + :param transformed_input:

Represents the input of a PutItem + operation.

+ """ + self.transformed_input = transformed_input + + def as_dict(self) -> Dict[str, Any]: + """Converts the PutItemInputTransformOutput to a dictionary.""" + return { + "transformed_input": self.transformed_input.as_dict(), + } + + @staticmethod + def from_dict(d: Dict[str, Any]) -> "PutItemInputTransformOutput": + """Creates a PutItemInputTransformOutput from a dictionary.""" + kwargs: Dict[str, Any] = { + "transformed_input": d["transformed_input"], + } + + return PutItemInputTransformOutput(**kwargs) + + def __repr__(self) -> str: + result = "PutItemInputTransformOutput(" + if self.transformed_input is not None: + result += f"transformed_input={repr(self.transformed_input)}" + + return result + ")" + + def __eq__(self, other: Any) -> bool: + if not isinstance(other, PutItemInputTransformOutput): + return False + attributes: list[str] = [ + "transformed_input", + ] + return all(getattr(self, a) == getattr(other, a) for a in attributes) + + +class QueryInputTransformInput: + sdk_input: "dict[str, Any]" + + def __init__( + self, + *, + sdk_input: "dict[str, Any]", + ): + """ + :param sdk_input:

Represents the input of a Query operation.

+ """ + self.sdk_input = sdk_input + + def as_dict(self) -> Dict[str, Any]: + """Converts the QueryInputTransformInput to a dictionary.""" + return { + "sdk_input": self.sdk_input.as_dict(), + } + + @staticmethod + def from_dict(d: Dict[str, Any]) -> "QueryInputTransformInput": + """Creates a QueryInputTransformInput from a dictionary.""" + kwargs: Dict[str, Any] = { + "sdk_input": d["sdk_input"], + } + + return QueryInputTransformInput(**kwargs) + + def __repr__(self) -> str: + result = "QueryInputTransformInput(" + if self.sdk_input is not None: + result += f"sdk_input={repr(self.sdk_input)}" + + return result + ")" + + def __eq__(self, other: Any) -> bool: + if not isinstance(other, QueryInputTransformInput): + return False + attributes: list[str] = [ + "sdk_input", + ] + return all(getattr(self, a) == getattr(other, a) for a in attributes) + + +class QueryInputTransformOutput: + transformed_input: "dict[str, Any]" + + def __init__( + self, + *, + transformed_input: "dict[str, Any]", + ): + """ + :param transformed_input:

Represents the input of a Query + operation.

+ """ + self.transformed_input = transformed_input + + def as_dict(self) -> Dict[str, Any]: + """Converts the QueryInputTransformOutput to a dictionary.""" + return { + "transformed_input": self.transformed_input.as_dict(), + } + + @staticmethod + def from_dict(d: Dict[str, Any]) -> "QueryInputTransformOutput": + """Creates a QueryInputTransformOutput from a dictionary.""" + kwargs: Dict[str, Any] = { + "transformed_input": d["transformed_input"], + } + + return QueryInputTransformOutput(**kwargs) + + def __repr__(self) -> str: + result = "QueryInputTransformOutput(" + if self.transformed_input is not None: + result += f"transformed_input={repr(self.transformed_input)}" + + return result + ")" + + def __eq__(self, other: Any) -> bool: + if not isinstance(other, QueryInputTransformOutput): + return False + attributes: list[str] = [ + "transformed_input", + ] + return all(getattr(self, a) == getattr(other, a) for a in attributes) + + +class BatchWriteItemOutputTransformOutput: + transformed_output: "dict[str, Any]" + + def __init__( + self, + *, + transformed_output: "dict[str, Any]", + ): + """ + :param transformed_output:

Represents the output of a + BatchWriteItem operation.

+ """ + self.transformed_output = transformed_output + + def as_dict(self) -> Dict[str, Any]: + """Converts the BatchWriteItemOutputTransformOutput to a dictionary.""" + return { + "transformed_output": self.transformed_output.as_dict(), + } + + @staticmethod + def from_dict(d: Dict[str, Any]) -> "BatchWriteItemOutputTransformOutput": + """Creates a BatchWriteItemOutputTransformOutput from a dictionary.""" + kwargs: Dict[str, Any] = { + "transformed_output": d["transformed_output"], + } + + return BatchWriteItemOutputTransformOutput(**kwargs) + + def __repr__(self) -> str: + result = "BatchWriteItemOutputTransformOutput(" + if self.transformed_output is not None: + result += f"transformed_output={repr(self.transformed_output)}" + + return result + ")" + + def __eq__(self, other: Any) -> bool: + if not isinstance(other, BatchWriteItemOutputTransformOutput): + return False + attributes: list[str] = [ + "transformed_output", + ] + return all(getattr(self, a) == getattr(other, a) for a in attributes) + + +class ScanOutputTransformInput: + sdk_output: "dict[str, Any]" + original_input: "dict[str, Any]" + + def __init__( + self, + *, + sdk_output: "dict[str, Any]", + original_input: "dict[str, Any]", + ): + """ + :param sdk_output:

Represents the output of a Scan + operation.

+ :param original_input:

Represents the input of a Scan + operation.

+ """ + self.sdk_output = sdk_output + self.original_input = original_input + + def as_dict(self) -> Dict[str, Any]: + """Converts the ScanOutputTransformInput to a dictionary.""" + return { + "sdk_output": self.sdk_output.as_dict(), + "original_input": self.original_input.as_dict(), + } + + @staticmethod + def from_dict(d: Dict[str, Any]) -> "ScanOutputTransformInput": + """Creates a ScanOutputTransformInput from a dictionary.""" + kwargs: Dict[str, Any] = { + "sdk_output": d["sdk_output"], + "original_input": d["original_input"], + } + + return ScanOutputTransformInput(**kwargs) + + def __repr__(self) -> str: + result = "ScanOutputTransformInput(" + if self.sdk_output is not None: + result += f"sdk_output={repr(self.sdk_output)}, " + + if self.original_input is not None: + result += f"original_input={repr(self.original_input)}" + + return result + ")" + + def __eq__(self, other: Any) -> bool: + if not isinstance(other, ScanOutputTransformInput): + return False + attributes: list[str] = [ + "sdk_output", + "original_input", + ] + return all(getattr(self, a) == getattr(other, a) for a in attributes) + + +class UpdateItemInputTransformInput: + sdk_input: "dict[str, Any]" + + def __init__( + self, + *, + sdk_input: "dict[str, Any]", + ): + """ + :param sdk_input:

Represents the input of an UpdateItem + operation.

+ """ + self.sdk_input = sdk_input + + def as_dict(self) -> Dict[str, Any]: + """Converts the UpdateItemInputTransformInput to a dictionary.""" + return { + "sdk_input": self.sdk_input.as_dict(), + } + + @staticmethod + def from_dict(d: Dict[str, Any]) -> "UpdateItemInputTransformInput": + """Creates a UpdateItemInputTransformInput from a dictionary.""" + kwargs: Dict[str, Any] = { + "sdk_input": d["sdk_input"], + } + + return UpdateItemInputTransformInput(**kwargs) + + def __repr__(self) -> str: + result = "UpdateItemInputTransformInput(" + if self.sdk_input is not None: + result += f"sdk_input={repr(self.sdk_input)}" + + return result + ")" + + def __eq__(self, other: Any) -> bool: + if not isinstance(other, UpdateItemInputTransformInput): + return False + attributes: list[str] = [ + "sdk_input", + ] + return all(getattr(self, a) == getattr(other, a) for a in attributes) + + +class UpdateItemInputTransformOutput: + transformed_input: "dict[str, Any]" + + def __init__( + self, + *, + transformed_input: "dict[str, Any]", + ): + """ + :param transformed_input:

Represents the input of an UpdateItem + operation.

+ """ + self.transformed_input = transformed_input + + def as_dict(self) -> Dict[str, Any]: + """Converts the UpdateItemInputTransformOutput to a dictionary.""" + return { + "transformed_input": self.transformed_input.as_dict(), + } + + @staticmethod + def from_dict(d: Dict[str, Any]) -> "UpdateItemInputTransformOutput": + """Creates a UpdateItemInputTransformOutput from a dictionary.""" + kwargs: Dict[str, Any] = { + "transformed_input": d["transformed_input"], + } + + return UpdateItemInputTransformOutput(**kwargs) + + def __repr__(self) -> str: + result = "UpdateItemInputTransformOutput(" + if self.transformed_input is not None: + result += f"transformed_input={repr(self.transformed_input)}" + + return result + ")" + + def __eq__(self, other: Any) -> bool: + if not isinstance(other, UpdateItemInputTransformOutput): + return False + attributes: list[str] = [ + "transformed_input", + ] + return all(getattr(self, a) == getattr(other, a) for a in attributes) + + +class DeleteItemOutputTransformInput: + sdk_output: "dict[str, Any]" + original_input: "dict[str, Any]" + + def __init__( + self, + *, + sdk_output: "dict[str, Any]", + original_input: "dict[str, Any]", + ): + """ + :param sdk_output:

Represents the output of a DeleteItem + operation.

+ :param original_input:

Represents the input of a DeleteItem + operation.

+ """ + self.sdk_output = sdk_output + self.original_input = original_input + + def as_dict(self) -> Dict[str, Any]: + """Converts the DeleteItemOutputTransformInput to a dictionary.""" + return { + "sdk_output": self.sdk_output.as_dict(), + "original_input": self.original_input.as_dict(), + } + + @staticmethod + def from_dict(d: Dict[str, Any]) -> "DeleteItemOutputTransformInput": + """Creates a DeleteItemOutputTransformInput from a dictionary.""" + kwargs: Dict[str, Any] = { + "sdk_output": d["sdk_output"], + "original_input": d["original_input"], + } + + return DeleteItemOutputTransformInput(**kwargs) + + def __repr__(self) -> str: + result = "DeleteItemOutputTransformInput(" + if self.sdk_output is not None: + result += f"sdk_output={repr(self.sdk_output)}, " + + if self.original_input is not None: + result += f"original_input={repr(self.original_input)}" + + return result + ")" + + def __eq__(self, other: Any) -> bool: + if not isinstance(other, DeleteItemOutputTransformInput): + return False + attributes: list[str] = [ + "sdk_output", + "original_input", + ] + return all(getattr(self, a) == getattr(other, a) for a in attributes) + + +class PutItemOutputTransformInput: + sdk_output: "dict[str, Any]" + original_input: "dict[str, Any]" + + def __init__( + self, + *, + sdk_output: "dict[str, Any]", + original_input: "dict[str, Any]", + ): + """ + :param sdk_output:

Represents the output of a PutItem + operation.

+ :param original_input:

Represents the input of a PutItem + operation.

+ """ + self.sdk_output = sdk_output + self.original_input = original_input + + def as_dict(self) -> Dict[str, Any]: + """Converts the PutItemOutputTransformInput to a dictionary.""" + return { + "sdk_output": self.sdk_output.as_dict(), + "original_input": self.original_input.as_dict(), + } + + @staticmethod + def from_dict(d: Dict[str, Any]) -> "PutItemOutputTransformInput": + """Creates a PutItemOutputTransformInput from a dictionary.""" + kwargs: Dict[str, Any] = { + "sdk_output": d["sdk_output"], + "original_input": d["original_input"], + } + + return PutItemOutputTransformInput(**kwargs) + + def __repr__(self) -> str: + result = "PutItemOutputTransformInput(" + if self.sdk_output is not None: + result += f"sdk_output={repr(self.sdk_output)}, " + + if self.original_input is not None: + result += f"original_input={repr(self.original_input)}" + + return result + ")" + + def __eq__(self, other: Any) -> bool: + if not isinstance(other, PutItemOutputTransformInput): + return False + attributes: list[str] = [ + "sdk_output", + "original_input", + ] + return all(getattr(self, a) == getattr(other, a) for a in attributes) + + +class QueryOutputTransformInput: + sdk_output: "dict[str, Any]" + original_input: "dict[str, Any]" + + def __init__( + self, + *, + sdk_output: "dict[str, Any]", + original_input: "dict[str, Any]", + ): + """ + :param sdk_output:

Represents the output of a Query + operation.

+ :param original_input:

Represents the input of a Query + operation.

+ """ + self.sdk_output = sdk_output + self.original_input = original_input + + def as_dict(self) -> Dict[str, Any]: + """Converts the QueryOutputTransformInput to a dictionary.""" + return { + "sdk_output": self.sdk_output.as_dict(), + "original_input": self.original_input.as_dict(), + } + + @staticmethod + def from_dict(d: Dict[str, Any]) -> "QueryOutputTransformInput": + """Creates a QueryOutputTransformInput from a dictionary.""" + kwargs: Dict[str, Any] = { + "sdk_output": d["sdk_output"], + "original_input": d["original_input"], + } + + return QueryOutputTransformInput(**kwargs) + + def __repr__(self) -> str: + result = "QueryOutputTransformInput(" + if self.sdk_output is not None: + result += f"sdk_output={repr(self.sdk_output)}, " + + if self.original_input is not None: + result += f"original_input={repr(self.original_input)}" + + return result + ")" + + def __eq__(self, other: Any) -> bool: + if not isinstance(other, QueryOutputTransformInput): + return False + attributes: list[str] = [ + "sdk_output", + "original_input", + ] + return all(getattr(self, a) == getattr(other, a) for a in attributes) + + +class UpdateItemOutputTransformInput: + sdk_output: "dict[str, Any]" + original_input: "dict[str, Any]" + + def __init__( + self, + *, + sdk_output: "dict[str, Any]", + original_input: "dict[str, Any]", + ): + """ + :param sdk_output:

Represents the output of an UpdateItem + operation.

+ :param original_input:

Represents the input of an UpdateItem + operation.

+ """ + self.sdk_output = sdk_output + self.original_input = original_input + + def as_dict(self) -> Dict[str, Any]: + """Converts the UpdateItemOutputTransformInput to a dictionary.""" + return { + "sdk_output": self.sdk_output.as_dict(), + "original_input": self.original_input.as_dict(), + } + + @staticmethod + def from_dict(d: Dict[str, Any]) -> "UpdateItemOutputTransformInput": + """Creates a UpdateItemOutputTransformInput from a dictionary.""" + kwargs: Dict[str, Any] = { + "sdk_output": d["sdk_output"], + "original_input": d["original_input"], + } + + return UpdateItemOutputTransformInput(**kwargs) + + def __repr__(self) -> str: + result = "UpdateItemOutputTransformInput(" + if self.sdk_output is not None: + result += f"sdk_output={repr(self.sdk_output)}, " + + if self.original_input is not None: + result += f"original_input={repr(self.original_input)}" + + return result + ")" + + def __eq__(self, other: Any) -> bool: + if not isinstance(other, UpdateItemOutputTransformInput): + return False + attributes: list[str] = [ + "sdk_output", + "original_input", + ] + return all(getattr(self, a) == getattr(other, a) for a in attributes) + + +class BatchWriteItemOutputTransformInput: + sdk_output: "dict[str, Any]" + original_input: "dict[str, Any]" + + def __init__( + self, + *, + sdk_output: "dict[str, Any]", + original_input: "dict[str, Any]", + ): + """ + :param sdk_output:

Represents the output of a BatchWriteItem + operation.

+ :param original_input:

Represents the input of a BatchWriteItem + operation.

+ """ + self.sdk_output = sdk_output + self.original_input = original_input + + def as_dict(self) -> Dict[str, Any]: + """Converts the BatchWriteItemOutputTransformInput to a dictionary.""" + return { + "sdk_output": self.sdk_output.as_dict(), + "original_input": self.original_input.as_dict(), + } + + @staticmethod + def from_dict(d: Dict[str, Any]) -> "BatchWriteItemOutputTransformInput": + """Creates a BatchWriteItemOutputTransformInput from a dictionary.""" + kwargs: Dict[str, Any] = { + "sdk_output": d["sdk_output"], + "original_input": d["original_input"], + } + + return BatchWriteItemOutputTransformInput(**kwargs) + + def __repr__(self) -> str: + result = "BatchWriteItemOutputTransformInput(" + if self.sdk_output is not None: + result += f"sdk_output={repr(self.sdk_output)}, " + + if self.original_input is not None: + result += f"original_input={repr(self.original_input)}" + + return result + ")" + + def __eq__(self, other: Any) -> bool: + if not isinstance(other, BatchWriteItemOutputTransformInput): + return False + attributes: list[str] = [ + "sdk_output", + "original_input", + ] + return all(getattr(self, a) == getattr(other, a) for a in attributes) + + +class TransactWriteItemsInputTransformInput: + sdk_input: "dict[str, Any]" + + def __init__( + self, + *, + sdk_input: "dict[str, Any]", + ): + self.sdk_input = sdk_input + + def as_dict(self) -> Dict[str, Any]: + """Converts the TransactWriteItemsInputTransformInput to a + dictionary.""" + return { + "sdk_input": self.sdk_input.as_dict(), + } + + @staticmethod + def from_dict(d: Dict[str, Any]) -> "TransactWriteItemsInputTransformInput": + """Creates a TransactWriteItemsInputTransformInput from a + dictionary.""" + kwargs: Dict[str, Any] = { + "sdk_input": d["sdk_input"], + } + + return TransactWriteItemsInputTransformInput(**kwargs) + + def __repr__(self) -> str: + result = "TransactWriteItemsInputTransformInput(" + if self.sdk_input is not None: + result += f"sdk_input={repr(self.sdk_input)}" + + return result + ")" + + def __eq__(self, other: Any) -> bool: + if not isinstance(other, TransactWriteItemsInputTransformInput): + return False + attributes: list[str] = [ + "sdk_input", + ] + return all(getattr(self, a) == getattr(other, a) for a in attributes) + + +class TransactWriteItemsInputTransformOutput: + transformed_input: "dict[str, Any]" + + def __init__( + self, + *, + transformed_input: "dict[str, Any]", + ): + self.transformed_input = transformed_input + + def as_dict(self) -> Dict[str, Any]: + """Converts the TransactWriteItemsInputTransformOutput to a + dictionary.""" + return { + "transformed_input": self.transformed_input.as_dict(), + } + + @staticmethod + def from_dict(d: Dict[str, Any]) -> "TransactWriteItemsInputTransformOutput": + """Creates a TransactWriteItemsInputTransformOutput from a + dictionary.""" + kwargs: Dict[str, Any] = { + "transformed_input": d["transformed_input"], + } + + return TransactWriteItemsInputTransformOutput(**kwargs) + + def __repr__(self) -> str: + result = "TransactWriteItemsInputTransformOutput(" + if self.transformed_input is not None: + result += f"transformed_input={repr(self.transformed_input)}" + + return result + ")" + + def __eq__(self, other: Any) -> bool: + if not isinstance(other, TransactWriteItemsInputTransformOutput): + return False + attributes: list[str] = [ + "transformed_input", + ] + return all(getattr(self, a) == getattr(other, a) for a in attributes) + + +class TransactWriteItemsOutputTransformInput: + sdk_output: "dict[str, Any]" + original_input: "dict[str, Any]" + + def __init__( + self, + *, + sdk_output: "dict[str, Any]", + original_input: "dict[str, Any]", + ): + self.sdk_output = sdk_output + self.original_input = original_input + + def as_dict(self) -> Dict[str, Any]: + """Converts the TransactWriteItemsOutputTransformInput to a + dictionary.""" + return { + "sdk_output": self.sdk_output.as_dict(), + "original_input": self.original_input.as_dict(), + } + + @staticmethod + def from_dict(d: Dict[str, Any]) -> "TransactWriteItemsOutputTransformInput": + """Creates a TransactWriteItemsOutputTransformInput from a + dictionary.""" + kwargs: Dict[str, Any] = { + "sdk_output": d["sdk_output"], + "original_input": d["original_input"], + } + + return TransactWriteItemsOutputTransformInput(**kwargs) + + def __repr__(self) -> str: + result = "TransactWriteItemsOutputTransformInput(" + if self.sdk_output is not None: + result += f"sdk_output={repr(self.sdk_output)}, " + + if self.original_input is not None: + result += f"original_input={repr(self.original_input)}" + + return result + ")" + + def __eq__(self, other: Any) -> bool: + if not isinstance(other, TransactWriteItemsOutputTransformInput): + return False + attributes: list[str] = [ + "sdk_output", + "original_input", + ] + return all(getattr(self, a) == getattr(other, a) for a in attributes) + + +class Unit: + pass diff --git a/DynamoDbEncryption/runtimes/python/src/aws_dbesdk_dynamodb/smithygenerated/aws_cryptography_dbencryptionsdk_dynamodb_transforms/plugin.py b/DynamoDbEncryption/runtimes/python/src/aws_dbesdk_dynamodb/smithygenerated/aws_cryptography_dbencryptionsdk_dynamodb_transforms/plugin.py new file mode 100644 index 000000000..fb35aa7dd --- /dev/null +++ b/DynamoDbEncryption/runtimes/python/src/aws_dbesdk_dynamodb/smithygenerated/aws_cryptography_dbencryptionsdk_dynamodb_transforms/plugin.py @@ -0,0 +1,51 @@ +# Copyright Amazon.com Inc. or its affiliates. All Rights Reserved. +# SPDX-License-Identifier: Apache-2.0 +# Do not modify this file. This file is machine generated, and any changes to it will be overwritten. + +from .config import ( + Config, + Plugin, + smithy_config_to_dafny_config, + DynamoDbTablesEncryptionConfig, +) +from smithy_python.interfaces.retries import RetryStrategy +from smithy_python.exceptions import SmithyRetryException +from .dafnyImplInterface import DafnyImplInterface + + +def set_config_impl(config: Config): + """Set the Dafny-compiled implementation in the Smithy-Python client Config + and load our custom NoRetriesStrategy.""" + config.dafnyImplInterface = DafnyImplInterface() + if isinstance(config, DynamoDbTablesEncryptionConfig): + from aws_dbesdk_dynamodb.internaldafny.generated.DynamoDbEncryptionTransforms import ( + default__, + ) + + config.dafnyImplInterface.impl = default__.DynamoDbEncryptionTransforms( + smithy_config_to_dafny_config(config) + ).value + config.retry_strategy = NoRetriesStrategy() + + +class ZeroRetryDelayToken: + """Placeholder class required by Smithy-Python client implementation. + + Do not wait to retry. + """ + + retry_delay = 0 + + +class NoRetriesStrategy(RetryStrategy): + """Placeholder class required by Smithy-Python client implementation. + + Do not retry calling Dafny code. + """ + + def acquire_initial_retry_token(self): + return ZeroRetryDelayToken() + + def refresh_retry_token_for_retry(self, token_to_renew, error_info): + # Do not retry + raise SmithyRetryException() diff --git a/DynamoDbEncryption/runtimes/python/src/aws_dbesdk_dynamodb/smithygenerated/aws_cryptography_dbencryptionsdk_dynamodb_transforms/serialize.py b/DynamoDbEncryption/runtimes/python/src/aws_dbesdk_dynamodb/smithygenerated/aws_cryptography_dbencryptionsdk_dynamodb_transforms/serialize.py new file mode 100644 index 000000000..b9d2cd1f1 --- /dev/null +++ b/DynamoDbEncryption/runtimes/python/src/aws_dbesdk_dynamodb/smithygenerated/aws_cryptography_dbencryptionsdk_dynamodb_transforms/serialize.py @@ -0,0 +1,252 @@ +# Copyright Amazon.com Inc. or its affiliates. All Rights Reserved. +# SPDX-License-Identifier: Apache-2.0 +# Do not modify this file. This file is machine generated, and any changes to it will be overwritten. + +import aws_dbesdk_dynamodb.smithygenerated.aws_cryptography_dbencryptionsdk_dynamodb_transforms.smithy_to_dafny + +from .dafny_protocol import DafnyRequest + +from .config import Config + + +def _serialize_put_item_input_transform(input, config: Config) -> DafnyRequest: + return DafnyRequest( + operation_name="PutItemInputTransform", + dafny_operation_input=aws_dbesdk_dynamodb.smithygenerated.aws_cryptography_dbencryptionsdk_dynamodb_transforms.smithy_to_dafny.aws_cryptography_dbencryptionsdk_dynamodb_transforms_PutItemInputTransformInput( + input + ), + ) + + +def _serialize_put_item_output_transform(input, config: Config) -> DafnyRequest: + return DafnyRequest( + operation_name="PutItemOutputTransform", + dafny_operation_input=aws_dbesdk_dynamodb.smithygenerated.aws_cryptography_dbencryptionsdk_dynamodb_transforms.smithy_to_dafny.aws_cryptography_dbencryptionsdk_dynamodb_transforms_PutItemOutputTransformInput( + input + ), + ) + + +def _serialize_get_item_input_transform(input, config: Config) -> DafnyRequest: + return DafnyRequest( + operation_name="GetItemInputTransform", + dafny_operation_input=aws_dbesdk_dynamodb.smithygenerated.aws_cryptography_dbencryptionsdk_dynamodb_transforms.smithy_to_dafny.aws_cryptography_dbencryptionsdk_dynamodb_transforms_GetItemInputTransformInput( + input + ), + ) + + +def _serialize_get_item_output_transform(input, config: Config) -> DafnyRequest: + return DafnyRequest( + operation_name="GetItemOutputTransform", + dafny_operation_input=aws_dbesdk_dynamodb.smithygenerated.aws_cryptography_dbencryptionsdk_dynamodb_transforms.smithy_to_dafny.aws_cryptography_dbencryptionsdk_dynamodb_transforms_GetItemOutputTransformInput( + input + ), + ) + + +def _serialize_batch_write_item_input_transform(input, config: Config) -> DafnyRequest: + return DafnyRequest( + operation_name="BatchWriteItemInputTransform", + dafny_operation_input=aws_dbesdk_dynamodb.smithygenerated.aws_cryptography_dbencryptionsdk_dynamodb_transforms.smithy_to_dafny.aws_cryptography_dbencryptionsdk_dynamodb_transforms_BatchWriteItemInputTransformInput( + input + ), + ) + + +def _serialize_batch_write_item_output_transform(input, config: Config) -> DafnyRequest: + return DafnyRequest( + operation_name="BatchWriteItemOutputTransform", + dafny_operation_input=aws_dbesdk_dynamodb.smithygenerated.aws_cryptography_dbencryptionsdk_dynamodb_transforms.smithy_to_dafny.aws_cryptography_dbencryptionsdk_dynamodb_transforms_BatchWriteItemOutputTransformInput( + input + ), + ) + + +def _serialize_batch_get_item_input_transform(input, config: Config) -> DafnyRequest: + return DafnyRequest( + operation_name="BatchGetItemInputTransform", + dafny_operation_input=aws_dbesdk_dynamodb.smithygenerated.aws_cryptography_dbencryptionsdk_dynamodb_transforms.smithy_to_dafny.aws_cryptography_dbencryptionsdk_dynamodb_transforms_BatchGetItemInputTransformInput( + input + ), + ) + + +def _serialize_batch_get_item_output_transform(input, config: Config) -> DafnyRequest: + return DafnyRequest( + operation_name="BatchGetItemOutputTransform", + dafny_operation_input=aws_dbesdk_dynamodb.smithygenerated.aws_cryptography_dbencryptionsdk_dynamodb_transforms.smithy_to_dafny.aws_cryptography_dbencryptionsdk_dynamodb_transforms_BatchGetItemOutputTransformInput( + input + ), + ) + + +def _serialize_scan_input_transform(input, config: Config) -> DafnyRequest: + return DafnyRequest( + operation_name="ScanInputTransform", + dafny_operation_input=aws_dbesdk_dynamodb.smithygenerated.aws_cryptography_dbencryptionsdk_dynamodb_transforms.smithy_to_dafny.aws_cryptography_dbencryptionsdk_dynamodb_transforms_ScanInputTransformInput( + input + ), + ) + + +def _serialize_scan_output_transform(input, config: Config) -> DafnyRequest: + return DafnyRequest( + operation_name="ScanOutputTransform", + dafny_operation_input=aws_dbesdk_dynamodb.smithygenerated.aws_cryptography_dbencryptionsdk_dynamodb_transforms.smithy_to_dafny.aws_cryptography_dbencryptionsdk_dynamodb_transforms_ScanOutputTransformInput( + input + ), + ) + + +def _serialize_query_input_transform(input, config: Config) -> DafnyRequest: + return DafnyRequest( + operation_name="QueryInputTransform", + dafny_operation_input=aws_dbesdk_dynamodb.smithygenerated.aws_cryptography_dbencryptionsdk_dynamodb_transforms.smithy_to_dafny.aws_cryptography_dbencryptionsdk_dynamodb_transforms_QueryInputTransformInput( + input + ), + ) + + +def _serialize_query_output_transform(input, config: Config) -> DafnyRequest: + return DafnyRequest( + operation_name="QueryOutputTransform", + dafny_operation_input=aws_dbesdk_dynamodb.smithygenerated.aws_cryptography_dbencryptionsdk_dynamodb_transforms.smithy_to_dafny.aws_cryptography_dbencryptionsdk_dynamodb_transforms_QueryOutputTransformInput( + input + ), + ) + + +def _serialize_transact_write_items_input_transform(input, config: Config) -> DafnyRequest: + return DafnyRequest( + operation_name="TransactWriteItemsInputTransform", + dafny_operation_input=aws_dbesdk_dynamodb.smithygenerated.aws_cryptography_dbencryptionsdk_dynamodb_transforms.smithy_to_dafny.aws_cryptography_dbencryptionsdk_dynamodb_transforms_TransactWriteItemsInputTransformInput( + input + ), + ) + + +def _serialize_transact_write_items_output_transform(input, config: Config) -> DafnyRequest: + return DafnyRequest( + operation_name="TransactWriteItemsOutputTransform", + dafny_operation_input=aws_dbesdk_dynamodb.smithygenerated.aws_cryptography_dbencryptionsdk_dynamodb_transforms.smithy_to_dafny.aws_cryptography_dbencryptionsdk_dynamodb_transforms_TransactWriteItemsOutputTransformInput( + input + ), + ) + + +def _serialize_update_item_input_transform(input, config: Config) -> DafnyRequest: + return DafnyRequest( + operation_name="UpdateItemInputTransform", + dafny_operation_input=aws_dbesdk_dynamodb.smithygenerated.aws_cryptography_dbencryptionsdk_dynamodb_transforms.smithy_to_dafny.aws_cryptography_dbencryptionsdk_dynamodb_transforms_UpdateItemInputTransformInput( + input + ), + ) + + +def _serialize_update_item_output_transform(input, config: Config) -> DafnyRequest: + return DafnyRequest( + operation_name="UpdateItemOutputTransform", + dafny_operation_input=aws_dbesdk_dynamodb.smithygenerated.aws_cryptography_dbencryptionsdk_dynamodb_transforms.smithy_to_dafny.aws_cryptography_dbencryptionsdk_dynamodb_transforms_UpdateItemOutputTransformInput( + input + ), + ) + + +def _serialize_delete_item_input_transform(input, config: Config) -> DafnyRequest: + return DafnyRequest( + operation_name="DeleteItemInputTransform", + dafny_operation_input=aws_dbesdk_dynamodb.smithygenerated.aws_cryptography_dbencryptionsdk_dynamodb_transforms.smithy_to_dafny.aws_cryptography_dbencryptionsdk_dynamodb_transforms_DeleteItemInputTransformInput( + input + ), + ) + + +def _serialize_delete_item_output_transform(input, config: Config) -> DafnyRequest: + return DafnyRequest( + operation_name="DeleteItemOutputTransform", + dafny_operation_input=aws_dbesdk_dynamodb.smithygenerated.aws_cryptography_dbencryptionsdk_dynamodb_transforms.smithy_to_dafny.aws_cryptography_dbencryptionsdk_dynamodb_transforms_DeleteItemOutputTransformInput( + input + ), + ) + + +def _serialize_transact_get_items_input_transform(input, config: Config) -> DafnyRequest: + return DafnyRequest( + operation_name="TransactGetItemsInputTransform", + dafny_operation_input=aws_dbesdk_dynamodb.smithygenerated.aws_cryptography_dbencryptionsdk_dynamodb_transforms.smithy_to_dafny.aws_cryptography_dbencryptionsdk_dynamodb_transforms_TransactGetItemsInputTransformInput( + input + ), + ) + + +def _serialize_transact_get_items_output_transform(input, config: Config) -> DafnyRequest: + return DafnyRequest( + operation_name="TransactGetItemsOutputTransform", + dafny_operation_input=aws_dbesdk_dynamodb.smithygenerated.aws_cryptography_dbencryptionsdk_dynamodb_transforms.smithy_to_dafny.aws_cryptography_dbencryptionsdk_dynamodb_transforms_TransactGetItemsOutputTransformInput( + input + ), + ) + + +def _serialize_execute_statement_input_transform(input, config: Config) -> DafnyRequest: + return DafnyRequest( + operation_name="ExecuteStatementInputTransform", + dafny_operation_input=aws_dbesdk_dynamodb.smithygenerated.aws_cryptography_dbencryptionsdk_dynamodb_transforms.smithy_to_dafny.aws_cryptography_dbencryptionsdk_dynamodb_transforms_ExecuteStatementInputTransformInput( + input + ), + ) + + +def _serialize_execute_statement_output_transform(input, config: Config) -> DafnyRequest: + return DafnyRequest( + operation_name="ExecuteStatementOutputTransform", + dafny_operation_input=aws_dbesdk_dynamodb.smithygenerated.aws_cryptography_dbencryptionsdk_dynamodb_transforms.smithy_to_dafny.aws_cryptography_dbencryptionsdk_dynamodb_transforms_ExecuteStatementOutputTransformInput( + input + ), + ) + + +def _serialize_batch_execute_statement_input_transform(input, config: Config) -> DafnyRequest: + return DafnyRequest( + operation_name="BatchExecuteStatementInputTransform", + dafny_operation_input=aws_dbesdk_dynamodb.smithygenerated.aws_cryptography_dbencryptionsdk_dynamodb_transforms.smithy_to_dafny.aws_cryptography_dbencryptionsdk_dynamodb_transforms_BatchExecuteStatementInputTransformInput( + input + ), + ) + + +def _serialize_batch_execute_statement_output_transform(input, config: Config) -> DafnyRequest: + return DafnyRequest( + operation_name="BatchExecuteStatementOutputTransform", + dafny_operation_input=aws_dbesdk_dynamodb.smithygenerated.aws_cryptography_dbencryptionsdk_dynamodb_transforms.smithy_to_dafny.aws_cryptography_dbencryptionsdk_dynamodb_transforms_BatchExecuteStatementOutputTransformInput( + input + ), + ) + + +def _serialize_execute_transaction_input_transform(input, config: Config) -> DafnyRequest: + return DafnyRequest( + operation_name="ExecuteTransactionInputTransform", + dafny_operation_input=aws_dbesdk_dynamodb.smithygenerated.aws_cryptography_dbencryptionsdk_dynamodb_transforms.smithy_to_dafny.aws_cryptography_dbencryptionsdk_dynamodb_transforms_ExecuteTransactionInputTransformInput( + input + ), + ) + + +def _serialize_execute_transaction_output_transform(input, config: Config) -> DafnyRequest: + return DafnyRequest( + operation_name="ExecuteTransactionOutputTransform", + dafny_operation_input=aws_dbesdk_dynamodb.smithygenerated.aws_cryptography_dbencryptionsdk_dynamodb_transforms.smithy_to_dafny.aws_cryptography_dbencryptionsdk_dynamodb_transforms_ExecuteTransactionOutputTransformInput( + input + ), + ) + + +def _serialize_resolve_attributes(input, config: Config) -> DafnyRequest: + return DafnyRequest( + operation_name="ResolveAttributes", + dafny_operation_input=aws_dbesdk_dynamodb.smithygenerated.aws_cryptography_dbencryptionsdk_dynamodb_transforms.smithy_to_dafny.aws_cryptography_dbencryptionsdk_dynamodb_transforms_ResolveAttributesInput( + input + ), + ) diff --git a/DynamoDbEncryption/runtimes/python/src/aws_dbesdk_dynamodb/smithygenerated/aws_cryptography_dbencryptionsdk_dynamodb_transforms/smithy_to_dafny.py b/DynamoDbEncryption/runtimes/python/src/aws_dbesdk_dynamodb/smithygenerated/aws_cryptography_dbencryptionsdk_dynamodb_transforms/smithy_to_dafny.py new file mode 100644 index 000000000..74a1601e9 --- /dev/null +++ b/DynamoDbEncryption/runtimes/python/src/aws_dbesdk_dynamodb/smithygenerated/aws_cryptography_dbencryptionsdk_dynamodb_transforms/smithy_to_dafny.py @@ -0,0 +1,703 @@ +# Copyright Amazon.com Inc. or its affiliates. All Rights Reserved. +# SPDX-License-Identifier: Apache-2.0 +# Do not modify this file. This file is machine generated, and any changes to it will be overwritten. + +from _dafny import Map, Seq +import aws_cryptography_internal_dynamodb.smithygenerated.com_amazonaws_dynamodb.aws_sdk_to_dafny +from aws_dbesdk_dynamodb.internaldafny.generated.AwsCryptographyDbEncryptionSdkDynamoDbTransformsTypes import ( + BatchExecuteStatementInputTransformInput_BatchExecuteStatementInputTransformInput as DafnyBatchExecuteStatementInputTransformInput, + BatchExecuteStatementInputTransformOutput_BatchExecuteStatementInputTransformOutput as DafnyBatchExecuteStatementInputTransformOutput, + BatchExecuteStatementOutputTransformInput_BatchExecuteStatementOutputTransformInput as DafnyBatchExecuteStatementOutputTransformInput, + BatchExecuteStatementOutputTransformOutput_BatchExecuteStatementOutputTransformOutput as DafnyBatchExecuteStatementOutputTransformOutput, + BatchGetItemInputTransformInput_BatchGetItemInputTransformInput as DafnyBatchGetItemInputTransformInput, + BatchGetItemInputTransformOutput_BatchGetItemInputTransformOutput as DafnyBatchGetItemInputTransformOutput, + BatchGetItemOutputTransformInput_BatchGetItemOutputTransformInput as DafnyBatchGetItemOutputTransformInput, + BatchGetItemOutputTransformOutput_BatchGetItemOutputTransformOutput as DafnyBatchGetItemOutputTransformOutput, + BatchWriteItemInputTransformInput_BatchWriteItemInputTransformInput as DafnyBatchWriteItemInputTransformInput, + BatchWriteItemInputTransformOutput_BatchWriteItemInputTransformOutput as DafnyBatchWriteItemInputTransformOutput, + BatchWriteItemOutputTransformInput_BatchWriteItemOutputTransformInput as DafnyBatchWriteItemOutputTransformInput, + BatchWriteItemOutputTransformOutput_BatchWriteItemOutputTransformOutput as DafnyBatchWriteItemOutputTransformOutput, + DeleteItemInputTransformInput_DeleteItemInputTransformInput as DafnyDeleteItemInputTransformInput, + DeleteItemInputTransformOutput_DeleteItemInputTransformOutput as DafnyDeleteItemInputTransformOutput, + DeleteItemOutputTransformInput_DeleteItemOutputTransformInput as DafnyDeleteItemOutputTransformInput, + DeleteItemOutputTransformOutput_DeleteItemOutputTransformOutput as DafnyDeleteItemOutputTransformOutput, + ExecuteStatementInputTransformInput_ExecuteStatementInputTransformInput as DafnyExecuteStatementInputTransformInput, + ExecuteStatementInputTransformOutput_ExecuteStatementInputTransformOutput as DafnyExecuteStatementInputTransformOutput, + ExecuteStatementOutputTransformInput_ExecuteStatementOutputTransformInput as DafnyExecuteStatementOutputTransformInput, + ExecuteStatementOutputTransformOutput_ExecuteStatementOutputTransformOutput as DafnyExecuteStatementOutputTransformOutput, + ExecuteTransactionInputTransformInput_ExecuteTransactionInputTransformInput as DafnyExecuteTransactionInputTransformInput, + ExecuteTransactionInputTransformOutput_ExecuteTransactionInputTransformOutput as DafnyExecuteTransactionInputTransformOutput, + ExecuteTransactionOutputTransformInput_ExecuteTransactionOutputTransformInput as DafnyExecuteTransactionOutputTransformInput, + ExecuteTransactionOutputTransformOutput_ExecuteTransactionOutputTransformOutput as DafnyExecuteTransactionOutputTransformOutput, + GetItemInputTransformInput_GetItemInputTransformInput as DafnyGetItemInputTransformInput, + GetItemInputTransformOutput_GetItemInputTransformOutput as DafnyGetItemInputTransformOutput, + GetItemOutputTransformInput_GetItemOutputTransformInput as DafnyGetItemOutputTransformInput, + GetItemOutputTransformOutput_GetItemOutputTransformOutput as DafnyGetItemOutputTransformOutput, + PutItemInputTransformInput_PutItemInputTransformInput as DafnyPutItemInputTransformInput, + PutItemInputTransformOutput_PutItemInputTransformOutput as DafnyPutItemInputTransformOutput, + PutItemOutputTransformInput_PutItemOutputTransformInput as DafnyPutItemOutputTransformInput, + PutItemOutputTransformOutput_PutItemOutputTransformOutput as DafnyPutItemOutputTransformOutput, + QueryInputTransformInput_QueryInputTransformInput as DafnyQueryInputTransformInput, + QueryInputTransformOutput_QueryInputTransformOutput as DafnyQueryInputTransformOutput, + QueryOutputTransformInput_QueryOutputTransformInput as DafnyQueryOutputTransformInput, + QueryOutputTransformOutput_QueryOutputTransformOutput as DafnyQueryOutputTransformOutput, + ResolveAttributesInput_ResolveAttributesInput as DafnyResolveAttributesInput, + ResolveAttributesOutput_ResolveAttributesOutput as DafnyResolveAttributesOutput, + ScanInputTransformInput_ScanInputTransformInput as DafnyScanInputTransformInput, + ScanInputTransformOutput_ScanInputTransformOutput as DafnyScanInputTransformOutput, + ScanOutputTransformInput_ScanOutputTransformInput as DafnyScanOutputTransformInput, + ScanOutputTransformOutput_ScanOutputTransformOutput as DafnyScanOutputTransformOutput, + TransactGetItemsInputTransformInput_TransactGetItemsInputTransformInput as DafnyTransactGetItemsInputTransformInput, + TransactGetItemsInputTransformOutput_TransactGetItemsInputTransformOutput as DafnyTransactGetItemsInputTransformOutput, + TransactGetItemsOutputTransformInput_TransactGetItemsOutputTransformInput as DafnyTransactGetItemsOutputTransformInput, + TransactGetItemsOutputTransformOutput_TransactGetItemsOutputTransformOutput as DafnyTransactGetItemsOutputTransformOutput, + TransactWriteItemsInputTransformInput_TransactWriteItemsInputTransformInput as DafnyTransactWriteItemsInputTransformInput, + TransactWriteItemsInputTransformOutput_TransactWriteItemsInputTransformOutput as DafnyTransactWriteItemsInputTransformOutput, + TransactWriteItemsOutputTransformInput_TransactWriteItemsOutputTransformInput as DafnyTransactWriteItemsOutputTransformInput, + TransactWriteItemsOutputTransformOutput_TransactWriteItemsOutputTransformOutput as DafnyTransactWriteItemsOutputTransformOutput, + UpdateItemInputTransformInput_UpdateItemInputTransformInput as DafnyUpdateItemInputTransformInput, + UpdateItemInputTransformOutput_UpdateItemInputTransformOutput as DafnyUpdateItemInputTransformOutput, + UpdateItemOutputTransformInput_UpdateItemOutputTransformInput as DafnyUpdateItemOutputTransformInput, + UpdateItemOutputTransformOutput_UpdateItemOutputTransformOutput as DafnyUpdateItemOutputTransformOutput, +) +import aws_dbesdk_dynamodb.internaldafny.generated.module_ +from smithy_dafny_standard_library.internaldafny.generated.Wrappers import ( + Option_None, + Option_Some, +) + + +def aws_cryptography_dbencryptionsdk_dynamodb_transforms_PutItemInputTransformInput( + native_input, +): + return DafnyPutItemInputTransformInput( + sdkInput=aws_cryptography_internal_dynamodb.smithygenerated.com_amazonaws_dynamodb.aws_sdk_to_dafny.com_amazonaws_dynamodb_PutItemInput( + native_input.sdk_input + ), + ) + + +def aws_cryptography_dbencryptionsdk_dynamodb_transforms_PutItemOutputTransformInput( + native_input, +): + return DafnyPutItemOutputTransformInput( + sdkOutput=aws_cryptography_internal_dynamodb.smithygenerated.com_amazonaws_dynamodb.aws_sdk_to_dafny.com_amazonaws_dynamodb_PutItemOutput( + native_input.sdk_output + ), + originalInput=aws_cryptography_internal_dynamodb.smithygenerated.com_amazonaws_dynamodb.aws_sdk_to_dafny.com_amazonaws_dynamodb_PutItemInput( + native_input.original_input + ), + ) + + +def aws_cryptography_dbencryptionsdk_dynamodb_transforms_GetItemInputTransformInput( + native_input, +): + return DafnyGetItemInputTransformInput( + sdkInput=aws_cryptography_internal_dynamodb.smithygenerated.com_amazonaws_dynamodb.aws_sdk_to_dafny.com_amazonaws_dynamodb_GetItemInput( + native_input.sdk_input + ), + ) + + +def aws_cryptography_dbencryptionsdk_dynamodb_transforms_GetItemOutputTransformInput( + native_input, +): + return DafnyGetItemOutputTransformInput( + sdkOutput=aws_cryptography_internal_dynamodb.smithygenerated.com_amazonaws_dynamodb.aws_sdk_to_dafny.com_amazonaws_dynamodb_GetItemOutput( + native_input.sdk_output + ), + originalInput=aws_cryptography_internal_dynamodb.smithygenerated.com_amazonaws_dynamodb.aws_sdk_to_dafny.com_amazonaws_dynamodb_GetItemInput( + native_input.original_input + ), + ) + + +def aws_cryptography_dbencryptionsdk_dynamodb_transforms_BatchWriteItemInputTransformInput( + native_input, +): + return DafnyBatchWriteItemInputTransformInput( + sdkInput=aws_cryptography_internal_dynamodb.smithygenerated.com_amazonaws_dynamodb.aws_sdk_to_dafny.com_amazonaws_dynamodb_BatchWriteItemInput( + native_input.sdk_input + ), + ) + + +def aws_cryptography_dbencryptionsdk_dynamodb_transforms_BatchWriteItemOutputTransformInput( + native_input, +): + return DafnyBatchWriteItemOutputTransformInput( + sdkOutput=aws_cryptography_internal_dynamodb.smithygenerated.com_amazonaws_dynamodb.aws_sdk_to_dafny.com_amazonaws_dynamodb_BatchWriteItemOutput( + native_input.sdk_output + ), + originalInput=aws_cryptography_internal_dynamodb.smithygenerated.com_amazonaws_dynamodb.aws_sdk_to_dafny.com_amazonaws_dynamodb_BatchWriteItemInput( + native_input.original_input + ), + ) + + +def aws_cryptography_dbencryptionsdk_dynamodb_transforms_BatchGetItemInputTransformInput( + native_input, +): + return DafnyBatchGetItemInputTransformInput( + sdkInput=aws_cryptography_internal_dynamodb.smithygenerated.com_amazonaws_dynamodb.aws_sdk_to_dafny.com_amazonaws_dynamodb_BatchGetItemInput( + native_input.sdk_input + ), + ) + + +def aws_cryptography_dbencryptionsdk_dynamodb_transforms_BatchGetItemOutputTransformInput( + native_input, +): + return DafnyBatchGetItemOutputTransformInput( + sdkOutput=aws_cryptography_internal_dynamodb.smithygenerated.com_amazonaws_dynamodb.aws_sdk_to_dafny.com_amazonaws_dynamodb_BatchGetItemOutput( + native_input.sdk_output + ), + originalInput=aws_cryptography_internal_dynamodb.smithygenerated.com_amazonaws_dynamodb.aws_sdk_to_dafny.com_amazonaws_dynamodb_BatchGetItemInput( + native_input.original_input + ), + ) + + +def aws_cryptography_dbencryptionsdk_dynamodb_transforms_ScanInputTransformInput( + native_input, +): + return DafnyScanInputTransformInput( + sdkInput=aws_cryptography_internal_dynamodb.smithygenerated.com_amazonaws_dynamodb.aws_sdk_to_dafny.com_amazonaws_dynamodb_ScanInput( + native_input.sdk_input + ), + ) + + +def aws_cryptography_dbencryptionsdk_dynamodb_transforms_ScanOutputTransformInput( + native_input, +): + return DafnyScanOutputTransformInput( + sdkOutput=aws_cryptography_internal_dynamodb.smithygenerated.com_amazonaws_dynamodb.aws_sdk_to_dafny.com_amazonaws_dynamodb_ScanOutput( + native_input.sdk_output + ), + originalInput=aws_cryptography_internal_dynamodb.smithygenerated.com_amazonaws_dynamodb.aws_sdk_to_dafny.com_amazonaws_dynamodb_ScanInput( + native_input.original_input + ), + ) + + +def aws_cryptography_dbencryptionsdk_dynamodb_transforms_QueryInputTransformInput( + native_input, +): + return DafnyQueryInputTransformInput( + sdkInput=aws_cryptography_internal_dynamodb.smithygenerated.com_amazonaws_dynamodb.aws_sdk_to_dafny.com_amazonaws_dynamodb_QueryInput( + native_input.sdk_input + ), + ) + + +def aws_cryptography_dbencryptionsdk_dynamodb_transforms_QueryOutputTransformInput( + native_input, +): + return DafnyQueryOutputTransformInput( + sdkOutput=aws_cryptography_internal_dynamodb.smithygenerated.com_amazonaws_dynamodb.aws_sdk_to_dafny.com_amazonaws_dynamodb_QueryOutput( + native_input.sdk_output + ), + originalInput=aws_cryptography_internal_dynamodb.smithygenerated.com_amazonaws_dynamodb.aws_sdk_to_dafny.com_amazonaws_dynamodb_QueryInput( + native_input.original_input + ), + ) + + +def aws_cryptography_dbencryptionsdk_dynamodb_transforms_TransactWriteItemsInputTransformInput( + native_input, +): + return DafnyTransactWriteItemsInputTransformInput( + sdkInput=aws_cryptography_internal_dynamodb.smithygenerated.com_amazonaws_dynamodb.aws_sdk_to_dafny.com_amazonaws_dynamodb_TransactWriteItemsInput( + native_input.sdk_input + ), + ) + + +def aws_cryptography_dbencryptionsdk_dynamodb_transforms_TransactWriteItemsOutputTransformInput( + native_input, +): + return DafnyTransactWriteItemsOutputTransformInput( + sdkOutput=aws_cryptography_internal_dynamodb.smithygenerated.com_amazonaws_dynamodb.aws_sdk_to_dafny.com_amazonaws_dynamodb_TransactWriteItemsOutput( + native_input.sdk_output + ), + originalInput=aws_cryptography_internal_dynamodb.smithygenerated.com_amazonaws_dynamodb.aws_sdk_to_dafny.com_amazonaws_dynamodb_TransactWriteItemsInput( + native_input.original_input + ), + ) + + +def aws_cryptography_dbencryptionsdk_dynamodb_transforms_UpdateItemInputTransformInput( + native_input, +): + return DafnyUpdateItemInputTransformInput( + sdkInput=aws_cryptography_internal_dynamodb.smithygenerated.com_amazonaws_dynamodb.aws_sdk_to_dafny.com_amazonaws_dynamodb_UpdateItemInput( + native_input.sdk_input + ), + ) + + +def aws_cryptography_dbencryptionsdk_dynamodb_transforms_UpdateItemOutputTransformInput( + native_input, +): + return DafnyUpdateItemOutputTransformInput( + sdkOutput=aws_cryptography_internal_dynamodb.smithygenerated.com_amazonaws_dynamodb.aws_sdk_to_dafny.com_amazonaws_dynamodb_UpdateItemOutput( + native_input.sdk_output + ), + originalInput=aws_cryptography_internal_dynamodb.smithygenerated.com_amazonaws_dynamodb.aws_sdk_to_dafny.com_amazonaws_dynamodb_UpdateItemInput( + native_input.original_input + ), + ) + + +def aws_cryptography_dbencryptionsdk_dynamodb_transforms_DeleteItemInputTransformInput( + native_input, +): + return DafnyDeleteItemInputTransformInput( + sdkInput=aws_cryptography_internal_dynamodb.smithygenerated.com_amazonaws_dynamodb.aws_sdk_to_dafny.com_amazonaws_dynamodb_DeleteItemInput( + native_input.sdk_input + ), + ) + + +def aws_cryptography_dbencryptionsdk_dynamodb_transforms_DeleteItemOutputTransformInput( + native_input, +): + return DafnyDeleteItemOutputTransformInput( + sdkOutput=aws_cryptography_internal_dynamodb.smithygenerated.com_amazonaws_dynamodb.aws_sdk_to_dafny.com_amazonaws_dynamodb_DeleteItemOutput( + native_input.sdk_output + ), + originalInput=aws_cryptography_internal_dynamodb.smithygenerated.com_amazonaws_dynamodb.aws_sdk_to_dafny.com_amazonaws_dynamodb_DeleteItemInput( + native_input.original_input + ), + ) + + +def aws_cryptography_dbencryptionsdk_dynamodb_transforms_TransactGetItemsInputTransformInput( + native_input, +): + return DafnyTransactGetItemsInputTransformInput( + sdkInput=aws_cryptography_internal_dynamodb.smithygenerated.com_amazonaws_dynamodb.aws_sdk_to_dafny.com_amazonaws_dynamodb_TransactGetItemsInput( + native_input.sdk_input + ), + ) + + +def aws_cryptography_dbencryptionsdk_dynamodb_transforms_TransactGetItemsOutputTransformInput( + native_input, +): + return DafnyTransactGetItemsOutputTransformInput( + sdkOutput=aws_cryptography_internal_dynamodb.smithygenerated.com_amazonaws_dynamodb.aws_sdk_to_dafny.com_amazonaws_dynamodb_TransactGetItemsOutput( + native_input.sdk_output + ), + originalInput=aws_cryptography_internal_dynamodb.smithygenerated.com_amazonaws_dynamodb.aws_sdk_to_dafny.com_amazonaws_dynamodb_TransactGetItemsInput( + native_input.original_input + ), + ) + + +def aws_cryptography_dbencryptionsdk_dynamodb_transforms_ExecuteStatementInputTransformInput( + native_input, +): + return DafnyExecuteStatementInputTransformInput( + sdkInput=aws_cryptography_internal_dynamodb.smithygenerated.com_amazonaws_dynamodb.aws_sdk_to_dafny.com_amazonaws_dynamodb_ExecuteStatementInput( + native_input.sdk_input + ), + ) + + +def aws_cryptography_dbencryptionsdk_dynamodb_transforms_ExecuteStatementOutputTransformInput( + native_input, +): + return DafnyExecuteStatementOutputTransformInput( + sdkOutput=aws_cryptography_internal_dynamodb.smithygenerated.com_amazonaws_dynamodb.aws_sdk_to_dafny.com_amazonaws_dynamodb_ExecuteStatementOutput( + native_input.sdk_output + ), + originalInput=aws_cryptography_internal_dynamodb.smithygenerated.com_amazonaws_dynamodb.aws_sdk_to_dafny.com_amazonaws_dynamodb_ExecuteStatementInput( + native_input.original_input + ), + ) + + +def aws_cryptography_dbencryptionsdk_dynamodb_transforms_BatchExecuteStatementInputTransformInput( + native_input, +): + return DafnyBatchExecuteStatementInputTransformInput( + sdkInput=aws_cryptography_internal_dynamodb.smithygenerated.com_amazonaws_dynamodb.aws_sdk_to_dafny.com_amazonaws_dynamodb_BatchExecuteStatementInput( + native_input.sdk_input + ), + ) + + +def aws_cryptography_dbencryptionsdk_dynamodb_transforms_BatchExecuteStatementOutputTransformInput( + native_input, +): + return DafnyBatchExecuteStatementOutputTransformInput( + sdkOutput=aws_cryptography_internal_dynamodb.smithygenerated.com_amazonaws_dynamodb.aws_sdk_to_dafny.com_amazonaws_dynamodb_BatchExecuteStatementOutput( + native_input.sdk_output + ), + originalInput=aws_cryptography_internal_dynamodb.smithygenerated.com_amazonaws_dynamodb.aws_sdk_to_dafny.com_amazonaws_dynamodb_BatchExecuteStatementInput( + native_input.original_input + ), + ) + + +def aws_cryptography_dbencryptionsdk_dynamodb_transforms_ExecuteTransactionInputTransformInput( + native_input, +): + return DafnyExecuteTransactionInputTransformInput( + sdkInput=aws_cryptography_internal_dynamodb.smithygenerated.com_amazonaws_dynamodb.aws_sdk_to_dafny.com_amazonaws_dynamodb_ExecuteTransactionInput( + native_input.sdk_input + ), + ) + + +def aws_cryptography_dbencryptionsdk_dynamodb_transforms_ExecuteTransactionOutputTransformInput( + native_input, +): + return DafnyExecuteTransactionOutputTransformInput( + sdkOutput=aws_cryptography_internal_dynamodb.smithygenerated.com_amazonaws_dynamodb.aws_sdk_to_dafny.com_amazonaws_dynamodb_ExecuteTransactionOutput( + native_input.sdk_output + ), + originalInput=aws_cryptography_internal_dynamodb.smithygenerated.com_amazonaws_dynamodb.aws_sdk_to_dafny.com_amazonaws_dynamodb_ExecuteTransactionInput( + native_input.original_input + ), + ) + + +def aws_cryptography_dbencryptionsdk_dynamodb_transforms_ResolveAttributesInput( + native_input, +): + return DafnyResolveAttributesInput( + TableName=Seq( + "".join( + [ + chr(int.from_bytes(pair, "big")) + for pair in zip(*[iter(native_input.table_name.encode("utf-16-be"))] * 2) + ] + ) + ), + Item=Map( + { + Seq( + "".join([chr(int.from_bytes(pair, "big")) for pair in zip(*[iter(key.encode("utf-16-be"))] * 2)]) + ): aws_cryptography_internal_dynamodb.smithygenerated.com_amazonaws_dynamodb.aws_sdk_to_dafny.com_amazonaws_dynamodb_AttributeValue( + value + ) + for (key, value) in native_input.item.items() + } + ), + Version=((Option_Some(native_input.version)) if (native_input.version is not None) else (Option_None())), + ) + + +def aws_cryptography_dbencryptionsdk_dynamodb_transforms_PutItemInputTransformOutput( + native_input, +): + return DafnyPutItemInputTransformOutput( + transformedInput=aws_cryptography_internal_dynamodb.smithygenerated.com_amazonaws_dynamodb.aws_sdk_to_dafny.com_amazonaws_dynamodb_PutItemInput( + native_input.transformed_input + ), + ) + + +def aws_cryptography_dbencryptionsdk_dynamodb_transforms_PutItemOutputTransformOutput( + native_input, +): + return DafnyPutItemOutputTransformOutput( + transformedOutput=aws_cryptography_internal_dynamodb.smithygenerated.com_amazonaws_dynamodb.aws_sdk_to_dafny.com_amazonaws_dynamodb_PutItemOutput( + native_input.transformed_output + ), + ) + + +def aws_cryptography_dbencryptionsdk_dynamodb_transforms_GetItemInputTransformOutput( + native_input, +): + return DafnyGetItemInputTransformOutput( + transformedInput=aws_cryptography_internal_dynamodb.smithygenerated.com_amazonaws_dynamodb.aws_sdk_to_dafny.com_amazonaws_dynamodb_GetItemInput( + native_input.transformed_input + ), + ) + + +def aws_cryptography_dbencryptionsdk_dynamodb_transforms_GetItemOutputTransformOutput( + native_input, +): + return DafnyGetItemOutputTransformOutput( + transformedOutput=aws_cryptography_internal_dynamodb.smithygenerated.com_amazonaws_dynamodb.aws_sdk_to_dafny.com_amazonaws_dynamodb_GetItemOutput( + native_input.transformed_output + ), + ) + + +def aws_cryptography_dbencryptionsdk_dynamodb_transforms_BatchWriteItemInputTransformOutput( + native_input, +): + return DafnyBatchWriteItemInputTransformOutput( + transformedInput=aws_cryptography_internal_dynamodb.smithygenerated.com_amazonaws_dynamodb.aws_sdk_to_dafny.com_amazonaws_dynamodb_BatchWriteItemInput( + native_input.transformed_input + ), + ) + + +def aws_cryptography_dbencryptionsdk_dynamodb_transforms_BatchWriteItemOutputTransformOutput( + native_input, +): + return DafnyBatchWriteItemOutputTransformOutput( + transformedOutput=aws_cryptography_internal_dynamodb.smithygenerated.com_amazonaws_dynamodb.aws_sdk_to_dafny.com_amazonaws_dynamodb_BatchWriteItemOutput( + native_input.transformed_output + ), + ) + + +def aws_cryptography_dbencryptionsdk_dynamodb_transforms_BatchGetItemInputTransformOutput( + native_input, +): + return DafnyBatchGetItemInputTransformOutput( + transformedInput=aws_cryptography_internal_dynamodb.smithygenerated.com_amazonaws_dynamodb.aws_sdk_to_dafny.com_amazonaws_dynamodb_BatchGetItemInput( + native_input.transformed_input + ), + ) + + +def aws_cryptography_dbencryptionsdk_dynamodb_transforms_BatchGetItemOutputTransformOutput( + native_input, +): + return DafnyBatchGetItemOutputTransformOutput( + transformedOutput=aws_cryptography_internal_dynamodb.smithygenerated.com_amazonaws_dynamodb.aws_sdk_to_dafny.com_amazonaws_dynamodb_BatchGetItemOutput( + native_input.transformed_output + ), + ) + + +def aws_cryptography_dbencryptionsdk_dynamodb_transforms_ScanInputTransformOutput( + native_input, +): + return DafnyScanInputTransformOutput( + transformedInput=aws_cryptography_internal_dynamodb.smithygenerated.com_amazonaws_dynamodb.aws_sdk_to_dafny.com_amazonaws_dynamodb_ScanInput( + native_input.transformed_input + ), + ) + + +def aws_cryptography_dbencryptionsdk_dynamodb_transforms_ScanOutputTransformOutput( + native_input, +): + return DafnyScanOutputTransformOutput( + transformedOutput=aws_cryptography_internal_dynamodb.smithygenerated.com_amazonaws_dynamodb.aws_sdk_to_dafny.com_amazonaws_dynamodb_ScanOutput( + native_input.transformed_output + ), + ) + + +def aws_cryptography_dbencryptionsdk_dynamodb_transforms_QueryInputTransformOutput( + native_input, +): + return DafnyQueryInputTransformOutput( + transformedInput=aws_cryptography_internal_dynamodb.smithygenerated.com_amazonaws_dynamodb.aws_sdk_to_dafny.com_amazonaws_dynamodb_QueryInput( + native_input.transformed_input + ), + ) + + +def aws_cryptography_dbencryptionsdk_dynamodb_transforms_QueryOutputTransformOutput( + native_input, +): + return DafnyQueryOutputTransformOutput( + transformedOutput=aws_cryptography_internal_dynamodb.smithygenerated.com_amazonaws_dynamodb.aws_sdk_to_dafny.com_amazonaws_dynamodb_QueryOutput( + native_input.transformed_output + ), + ) + + +def aws_cryptography_dbencryptionsdk_dynamodb_transforms_TransactWriteItemsInputTransformOutput( + native_input, +): + return DafnyTransactWriteItemsInputTransformOutput( + transformedInput=aws_cryptography_internal_dynamodb.smithygenerated.com_amazonaws_dynamodb.aws_sdk_to_dafny.com_amazonaws_dynamodb_TransactWriteItemsInput( + native_input.transformed_input + ), + ) + + +def aws_cryptography_dbencryptionsdk_dynamodb_transforms_TransactWriteItemsOutputTransformOutput( + native_input, +): + return DafnyTransactWriteItemsOutputTransformOutput( + transformedOutput=aws_cryptography_internal_dynamodb.smithygenerated.com_amazonaws_dynamodb.aws_sdk_to_dafny.com_amazonaws_dynamodb_TransactWriteItemsOutput( + native_input.transformed_output + ), + ) + + +def aws_cryptography_dbencryptionsdk_dynamodb_transforms_UpdateItemInputTransformOutput( + native_input, +): + return DafnyUpdateItemInputTransformOutput( + transformedInput=aws_cryptography_internal_dynamodb.smithygenerated.com_amazonaws_dynamodb.aws_sdk_to_dafny.com_amazonaws_dynamodb_UpdateItemInput( + native_input.transformed_input + ), + ) + + +def aws_cryptography_dbencryptionsdk_dynamodb_transforms_UpdateItemOutputTransformOutput( + native_input, +): + return DafnyUpdateItemOutputTransformOutput( + transformedOutput=aws_cryptography_internal_dynamodb.smithygenerated.com_amazonaws_dynamodb.aws_sdk_to_dafny.com_amazonaws_dynamodb_UpdateItemOutput( + native_input.transformed_output + ), + ) + + +def aws_cryptography_dbencryptionsdk_dynamodb_transforms_DeleteItemInputTransformOutput( + native_input, +): + return DafnyDeleteItemInputTransformOutput( + transformedInput=aws_cryptography_internal_dynamodb.smithygenerated.com_amazonaws_dynamodb.aws_sdk_to_dafny.com_amazonaws_dynamodb_DeleteItemInput( + native_input.transformed_input + ), + ) + + +def aws_cryptography_dbencryptionsdk_dynamodb_transforms_DeleteItemOutputTransformOutput( + native_input, +): + return DafnyDeleteItemOutputTransformOutput( + transformedOutput=aws_cryptography_internal_dynamodb.smithygenerated.com_amazonaws_dynamodb.aws_sdk_to_dafny.com_amazonaws_dynamodb_DeleteItemOutput( + native_input.transformed_output + ), + ) + + +def aws_cryptography_dbencryptionsdk_dynamodb_transforms_TransactGetItemsInputTransformOutput( + native_input, +): + return DafnyTransactGetItemsInputTransformOutput( + transformedInput=aws_cryptography_internal_dynamodb.smithygenerated.com_amazonaws_dynamodb.aws_sdk_to_dafny.com_amazonaws_dynamodb_TransactGetItemsInput( + native_input.transformed_input + ), + ) + + +def aws_cryptography_dbencryptionsdk_dynamodb_transforms_TransactGetItemsOutputTransformOutput( + native_input, +): + return DafnyTransactGetItemsOutputTransformOutput( + transformedOutput=aws_cryptography_internal_dynamodb.smithygenerated.com_amazonaws_dynamodb.aws_sdk_to_dafny.com_amazonaws_dynamodb_TransactGetItemsOutput( + native_input.transformed_output + ), + ) + + +def aws_cryptography_dbencryptionsdk_dynamodb_transforms_ExecuteStatementInputTransformOutput( + native_input, +): + return DafnyExecuteStatementInputTransformOutput( + transformedInput=aws_cryptography_internal_dynamodb.smithygenerated.com_amazonaws_dynamodb.aws_sdk_to_dafny.com_amazonaws_dynamodb_ExecuteStatementInput( + native_input.transformed_input + ), + ) + + +def aws_cryptography_dbencryptionsdk_dynamodb_transforms_ExecuteStatementOutputTransformOutput( + native_input, +): + return DafnyExecuteStatementOutputTransformOutput( + transformedOutput=aws_cryptography_internal_dynamodb.smithygenerated.com_amazonaws_dynamodb.aws_sdk_to_dafny.com_amazonaws_dynamodb_ExecuteStatementOutput( + native_input.transformed_output + ), + ) + + +def aws_cryptography_dbencryptionsdk_dynamodb_transforms_BatchExecuteStatementInputTransformOutput( + native_input, +): + return DafnyBatchExecuteStatementInputTransformOutput( + transformedInput=aws_cryptography_internal_dynamodb.smithygenerated.com_amazonaws_dynamodb.aws_sdk_to_dafny.com_amazonaws_dynamodb_BatchExecuteStatementInput( + native_input.transformed_input + ), + ) + + +def aws_cryptography_dbencryptionsdk_dynamodb_transforms_BatchExecuteStatementOutputTransformOutput( + native_input, +): + return DafnyBatchExecuteStatementOutputTransformOutput( + transformedOutput=aws_cryptography_internal_dynamodb.smithygenerated.com_amazonaws_dynamodb.aws_sdk_to_dafny.com_amazonaws_dynamodb_BatchExecuteStatementOutput( + native_input.transformed_output + ), + ) + + +def aws_cryptography_dbencryptionsdk_dynamodb_transforms_ExecuteTransactionInputTransformOutput( + native_input, +): + return DafnyExecuteTransactionInputTransformOutput( + transformedInput=aws_cryptography_internal_dynamodb.smithygenerated.com_amazonaws_dynamodb.aws_sdk_to_dafny.com_amazonaws_dynamodb_ExecuteTransactionInput( + native_input.transformed_input + ), + ) + + +def aws_cryptography_dbencryptionsdk_dynamodb_transforms_ExecuteTransactionOutputTransformOutput( + native_input, +): + return DafnyExecuteTransactionOutputTransformOutput( + transformedOutput=aws_cryptography_internal_dynamodb.smithygenerated.com_amazonaws_dynamodb.aws_sdk_to_dafny.com_amazonaws_dynamodb_ExecuteTransactionOutput( + native_input.transformed_output + ), + ) + + +def aws_cryptography_dbencryptionsdk_dynamodb_transforms_ResolveAttributesOutput( + native_input, +): + return DafnyResolveAttributesOutput( + VirtualFields=Map( + { + Seq( + "".join([chr(int.from_bytes(pair, "big")) for pair in zip(*[iter(key.encode("utf-16-be"))] * 2)]) + ): Seq( + "".join([chr(int.from_bytes(pair, "big")) for pair in zip(*[iter(value.encode("utf-16-be"))] * 2)]) + ) + for (key, value) in native_input.virtual_fields.items() + } + ), + CompoundBeacons=Map( + { + Seq( + "".join([chr(int.from_bytes(pair, "big")) for pair in zip(*[iter(key.encode("utf-16-be"))] * 2)]) + ): Seq( + "".join([chr(int.from_bytes(pair, "big")) for pair in zip(*[iter(value.encode("utf-16-be"))] * 2)]) + ) + for (key, value) in native_input.compound_beacons.items() + } + ), + ) + + +def aws_cryptography_dbencryptionsdk_dynamodb_transforms_AwsCryptographicMaterialProvidersReference( + native_input, +): + return native_input._config.dafnyImplInterface.impl + + +def aws_cryptography_dbencryptionsdk_dynamodb_transforms_DynamoDbEncryptionReference( + native_input, +): + return native_input._config.dafnyImplInterface.impl + + +def aws_cryptography_dbencryptionsdk_dynamodb_transforms_DynamoDbItemEncryptorReference( + native_input, +): + return native_input._config.dafnyImplInterface.impl + + +def aws_cryptography_dbencryptionsdk_dynamodb_transforms_StructuredEncryptionReference( + native_input, +): + return native_input._config.dafnyImplInterface.impl diff --git a/DynamoDbEncryption/runtimes/python/src/aws_dbesdk_dynamodb/smithygenerated/aws_cryptography_dbencryptionsdk_structuredencryption/__init__.py b/DynamoDbEncryption/runtimes/python/src/aws_dbesdk_dynamodb/smithygenerated/aws_cryptography_dbencryptionsdk_structuredencryption/__init__.py new file mode 100644 index 000000000..09be6133b --- /dev/null +++ b/DynamoDbEncryption/runtimes/python/src/aws_dbesdk_dynamodb/smithygenerated/aws_cryptography_dbencryptionsdk_structuredencryption/__init__.py @@ -0,0 +1,3 @@ +# Copyright Amazon.com Inc. or its affiliates. All Rights Reserved. +# SPDX-License-Identifier: Apache-2.0 +# Do not modify this file. This file is machine generated, and any changes to it will be overwritten. diff --git a/DynamoDbEncryption/runtimes/python/src/aws_dbesdk_dynamodb/smithygenerated/aws_cryptography_dbencryptionsdk_structuredencryption/client.py b/DynamoDbEncryption/runtimes/python/src/aws_dbesdk_dynamodb/smithygenerated/aws_cryptography_dbencryptionsdk_structuredencryption/client.py new file mode 100644 index 000000000..8164df31e --- /dev/null +++ b/DynamoDbEncryption/runtimes/python/src/aws_dbesdk_dynamodb/smithygenerated/aws_cryptography_dbencryptionsdk_structuredencryption/client.py @@ -0,0 +1,395 @@ +# Copyright Amazon.com Inc. or its affiliates. All Rights Reserved. +# SPDX-License-Identifier: Apache-2.0 +# Do not modify this file. This file is machine generated, and any changes to it will be overwritten. + +from aws_dbesdk_dynamodb.internaldafny.generated.AwsCryptographyDbEncryptionSdkStructuredEncryptionTypes import ( + IStructuredEncryptionClient, +) +from typing import Callable, TypeVar, cast + +from .config import Config, StructuredEncryptionConfig +from .dafny_protocol import DafnyRequest, DafnyResponse +from .plugin import set_config_impl +from smithy_python.exceptions import SmithyRetryException +from smithy_python.interfaces.interceptor import Interceptor, InterceptorContext +from smithy_python.interfaces.retries import RetryErrorInfo, RetryErrorType + +from .config import Plugin +from .deserialize import ( + _deserialize_decrypt_path_structure, + _deserialize_decrypt_structure, + _deserialize_encrypt_path_structure, + _deserialize_encrypt_structure, + _deserialize_resolve_auth_actions, +) +from .errors import ServiceError +from .models import ( + DecryptPathStructureInput, + DecryptPathStructureOutput, + DecryptStructureInput, + DecryptStructureOutput, + EncryptPathStructureInput, + EncryptPathStructureOutput, + EncryptStructureInput, + EncryptStructureOutput, + ResolveAuthActionsInput, + ResolveAuthActionsOutput, +) +from .serialize import ( + _serialize_decrypt_path_structure, + _serialize_decrypt_structure, + _serialize_encrypt_path_structure, + _serialize_encrypt_structure, + _serialize_resolve_auth_actions, +) + + +Input = TypeVar("Input") +Output = TypeVar("Output") + + +class StructuredEncryption: + """Client for StructuredEncryption. + + :param config: Configuration for the client. + """ + + def __init__( + self, + config: StructuredEncryptionConfig | None = None, + dafny_client: IStructuredEncryptionClient | None = None, + ): + if config is None: + self._config = Config() + else: + self._config = config + + client_plugins: list[Plugin] = [ + set_config_impl, + ] + + for plugin in client_plugins: + plugin(self._config) + + if dafny_client is not None: + self._config.dafnyImplInterface.impl = dafny_client + + def encrypt_structure(self, input: EncryptStructureInput) -> EncryptStructureOutput: + """Invokes the EncryptStructure operation. + + :param input: The operation's input. + """ + return self._execute_operation( + input=input, + plugins=[], + serialize=_serialize_encrypt_structure, + deserialize=_deserialize_encrypt_structure, + config=self._config, + operation_name="EncryptStructure", + ) + + def decrypt_structure(self, input: DecryptStructureInput) -> DecryptStructureOutput: + """Invokes the DecryptStructure operation. + + :param input: The operation's input. + """ + return self._execute_operation( + input=input, + plugins=[], + serialize=_serialize_decrypt_structure, + deserialize=_deserialize_decrypt_structure, + config=self._config, + operation_name="DecryptStructure", + ) + + def encrypt_path_structure(self, input: EncryptPathStructureInput) -> EncryptPathStructureOutput: + """Invokes the EncryptPathStructure operation. + + :param input: The operation's input. + """ + return self._execute_operation( + input=input, + plugins=[], + serialize=_serialize_encrypt_path_structure, + deserialize=_deserialize_encrypt_path_structure, + config=self._config, + operation_name="EncryptPathStructure", + ) + + def decrypt_path_structure(self, input: DecryptPathStructureInput) -> DecryptPathStructureOutput: + """Invokes the DecryptPathStructure operation. + + :param input: The operation's input. + """ + return self._execute_operation( + input=input, + plugins=[], + serialize=_serialize_decrypt_path_structure, + deserialize=_deserialize_decrypt_path_structure, + config=self._config, + operation_name="DecryptPathStructure", + ) + + def resolve_auth_actions(self, input: ResolveAuthActionsInput) -> ResolveAuthActionsOutput: + """Invokes the ResolveAuthActions operation. + + :param input: The operation's input. + """ + return self._execute_operation( + input=input, + plugins=[], + serialize=_serialize_resolve_auth_actions, + deserialize=_deserialize_resolve_auth_actions, + config=self._config, + operation_name="ResolveAuthActions", + ) + + def _execute_operation( + self, + input: Input, + plugins: list[Plugin], + serialize: Callable[[Input, Config], DafnyRequest], + deserialize: Callable[[DafnyResponse, Config], Output], + config: Config, + operation_name: str, + ) -> Output: + try: + return self._handle_execution(input, plugins, serialize, deserialize, config, operation_name) + except Exception as e: + # Make sure every exception that we throw is an instance of ServiceError so + # customers can reliably catch everything we throw. + if not isinstance(e, ServiceError): + raise ServiceError(e) from e + raise e + + def _handle_execution( + self, + input: Input, + plugins: list[Plugin], + serialize: Callable[[Input, Config], DafnyRequest], + deserialize: Callable[[DafnyResponse, Config], Output], + config: Config, + operation_name: str, + ) -> Output: + context: InterceptorContext[Input, None, None, None] = InterceptorContext( + request=input, + response=None, + transport_request=None, + transport_response=None, + ) + try: + _client_interceptors = config.interceptors + except AttributeError: + config.interceptors = [] + _client_interceptors = config.interceptors + client_interceptors = cast( + list[Interceptor[Input, Output, DafnyRequest, DafnyResponse]], + _client_interceptors, + ) + interceptors = client_interceptors + + try: + # Step 1a: Invoke read_before_execution on client-level interceptors + for interceptor in client_interceptors: + interceptor.read_before_execution(context) + + # Step 1b: Run operation-level plugins + for plugin in plugins: + plugin(config) + + _client_interceptors = config.interceptors + interceptors = cast( + list[Interceptor[Input, Output, DafnyRequest, DafnyResponse]], + _client_interceptors, + ) + + # Step 1c: Invoke the read_before_execution hooks on newly added + # interceptors. + for interceptor in interceptors: + if interceptor not in client_interceptors: + interceptor.read_before_execution(context) + + # Step 2: Invoke the modify_before_serialization hooks + for interceptor in interceptors: + context._request = interceptor.modify_before_serialization(context) + + # Step 3: Invoke the read_before_serialization hooks + for interceptor in interceptors: + interceptor.read_before_serialization(context) + + # Step 4: Serialize the request + context_with_transport_request = cast(InterceptorContext[Input, None, DafnyRequest, None], context) + context_with_transport_request._transport_request = serialize( + context_with_transport_request.request, config + ) + + # Step 5: Invoke read_after_serialization + for interceptor in interceptors: + interceptor.read_after_serialization(context_with_transport_request) + + # Step 6: Invoke modify_before_retry_loop + for interceptor in interceptors: + context_with_transport_request._transport_request = interceptor.modify_before_retry_loop( + context_with_transport_request + ) + + # Step 7: Acquire the retry token. + retry_strategy = config.retry_strategy + retry_token = retry_strategy.acquire_initial_retry_token() + + while True: + # Make an attempt, creating a copy of the context so we don't pass + # around old data. + context_with_response = self._handle_attempt( + deserialize, + interceptors, + context_with_transport_request.copy(), + config, + operation_name, + ) + + # We perform this type-ignored re-assignment because `context` needs + # to point at the latest context so it can be generically handled + # later on. This is only an issue here because we've created a copy, + # so we're no longer simply pointing at the same object in memory + # with different names and type hints. It is possible to address this + # without having to fall back to the type ignore, but it would impose + # unnecessary runtime costs. + context = context_with_response # type: ignore + + if isinstance(context_with_response.response, Exception): + # Step 7u: Reacquire retry token if the attempt failed + try: + retry_token = retry_strategy.refresh_retry_token_for_retry( + token_to_renew=retry_token, + error_info=RetryErrorInfo( + # TODO: Determine the error type. + error_type=RetryErrorType.CLIENT_ERROR, + ), + ) + except SmithyRetryException: + raise context_with_response.response + else: + # Step 8: Invoke record_success + retry_strategy.record_success(token=retry_token) + break + except Exception as e: + context._response = e + + # At this point, the context's request will have been definitively set, and + # The response will be set either with the modeled output or an exception. The + # transport_request and transport_response may be set or None. + execution_context = cast( + InterceptorContext[Input, Output, DafnyRequest | None, DafnyResponse | None], + context, + ) + return self._finalize_execution(interceptors, execution_context) + + def _handle_attempt( + self, + deserialize: Callable[[DafnyResponse, Config], Output], + interceptors: list[Interceptor[Input, Output, DafnyRequest, DafnyResponse]], + context: InterceptorContext[Input, None, DafnyRequest, None], + config: Config, + operation_name: str, + ) -> InterceptorContext[Input, Output, DafnyRequest, DafnyResponse | None]: + try: + # Step 7a: Invoke read_before_attempt + for interceptor in interceptors: + interceptor.read_before_attempt(context) + + # Step 7m: Involve client Dafny impl + if config.dafnyImplInterface.impl is None: + raise Exception("No impl found on the operation config.") + + context_with_response = cast(InterceptorContext[Input, None, DafnyRequest, DafnyResponse], context) + + context_with_response._transport_response = config.dafnyImplInterface.handle_request( + input=context_with_response.transport_request + ) + + # Step 7n: Invoke read_after_transmit + for interceptor in interceptors: + interceptor.read_after_transmit(context_with_response) + + # Step 7o: Invoke modify_before_deserialization + for interceptor in interceptors: + context_with_response._transport_response = interceptor.modify_before_deserialization( + context_with_response + ) + + # Step 7p: Invoke read_before_deserialization + for interceptor in interceptors: + interceptor.read_before_deserialization(context_with_response) + + # Step 7q: deserialize + context_with_output = cast( + InterceptorContext[Input, Output, DafnyRequest, DafnyResponse], + context_with_response, + ) + context_with_output._response = deserialize(context_with_output._transport_response, config) + + # Step 7r: Invoke read_after_deserialization + for interceptor in interceptors: + interceptor.read_after_deserialization(context_with_output) + except Exception as e: + context._response = e + + # At this point, the context's request and transport_request have definitively been set, + # the response is either set or an exception, and the transport_resposne is either set or + # None. This will also be true after _finalize_attempt because there is no opportunity + # there to set the transport_response. + attempt_context = cast( + InterceptorContext[Input, Output, DafnyRequest, DafnyResponse | None], + context, + ) + return self._finalize_attempt(interceptors, attempt_context) + + def _finalize_attempt( + self, + interceptors: list[Interceptor[Input, Output, DafnyRequest, DafnyResponse]], + context: InterceptorContext[Input, Output, DafnyRequest, DafnyResponse | None], + ) -> InterceptorContext[Input, Output, DafnyRequest, DafnyResponse | None]: + # Step 7s: Invoke modify_before_attempt_completion + try: + for interceptor in interceptors: + context._response = interceptor.modify_before_attempt_completion(context) + except Exception as e: + context._response = e + + # Step 7t: Invoke read_after_attempt + for interceptor in interceptors: + try: + interceptor.read_after_attempt(context) + except Exception as e: + context._response = e + + return context + + def _finalize_execution( + self, + interceptors: list[Interceptor[Input, Output, DafnyRequest, DafnyResponse]], + context: InterceptorContext[Input, Output, DafnyRequest | None, DafnyResponse | None], + ) -> Output: + try: + # Step 9: Invoke modify_before_completion + for interceptor in interceptors: + context._response = interceptor.modify_before_completion(context) + + except Exception as e: + context._response = e + + # Step 11: Invoke read_after_execution + for interceptor in interceptors: + try: + interceptor.read_after_execution(context) + except Exception as e: + context._response = e + + # Step 12: Return / throw + if isinstance(context.response, Exception): + raise context.response + + # We may want to add some aspects of this context to the output types so we can + # return it to the end-users. + return context.response diff --git a/DynamoDbEncryption/runtimes/python/src/aws_dbesdk_dynamodb/smithygenerated/aws_cryptography_dbencryptionsdk_structuredencryption/config.py b/DynamoDbEncryption/runtimes/python/src/aws_dbesdk_dynamodb/smithygenerated/aws_cryptography_dbencryptionsdk_structuredencryption/config.py new file mode 100644 index 000000000..7e3a3db13 --- /dev/null +++ b/DynamoDbEncryption/runtimes/python/src/aws_dbesdk_dynamodb/smithygenerated/aws_cryptography_dbencryptionsdk_structuredencryption/config.py @@ -0,0 +1,92 @@ +# Copyright Amazon.com Inc. or its affiliates. All Rights Reserved. +# SPDX-License-Identifier: Apache-2.0 +# Do not modify this file. This file is machine generated, and any changes to it will be overwritten. + +from aws_dbesdk_dynamodb.internaldafny.generated.AwsCryptographyDbEncryptionSdkStructuredEncryptionTypes import ( + StructuredEncryptionConfig_StructuredEncryptionConfig as DafnyStructuredEncryptionConfig, +) +import aws_dbesdk_dynamodb.internaldafny.generated.module_ +import aws_dbesdk_dynamodb.smithygenerated.aws_cryptography_dbencryptionsdk_structuredencryption.dafny_to_smithy +import aws_dbesdk_dynamodb.smithygenerated.aws_cryptography_dbencryptionsdk_structuredencryption.smithy_to_dafny +from dataclasses import dataclass +from typing import Any, Callable, Dict, TypeAlias + +from .dafnyImplInterface import DafnyImplInterface +from smithy_python._private.retries import SimpleRetryStrategy +from smithy_python.interfaces.retries import RetryStrategy + + +_ServiceInterceptor = Any + + +@dataclass(init=False) +class Config: + """Configuration for StructuredEncryption.""" + + interceptors: list[_ServiceInterceptor] + retry_strategy: RetryStrategy + dafnyImplInterface: DafnyImplInterface | None + + def __init__( + self, + *, + interceptors: list[_ServiceInterceptor] | None = None, + retry_strategy: RetryStrategy | None = None, + dafnyImplInterface: DafnyImplInterface | None = None, + ): + """Constructor. + + :param interceptors: The list of interceptors, which are hooks + that are called during the execution of a request. + :param retry_strategy: The retry strategy for issuing retry + tokens and computing retry delays. + :param dafnyImplInterface: + """ + self.interceptors = interceptors or [] + self.retry_strategy = retry_strategy or SimpleRetryStrategy() + self.dafnyImplInterface = dafnyImplInterface + + +# A callable that allows customizing the config object on each request. +Plugin: TypeAlias = Callable[[Config], None] + + +class StructuredEncryptionConfig(Config): + def __init__( + self, + ): + """Constructor for StructuredEncryptionConfig.""" + super().__init__() + + def as_dict(self) -> Dict[str, Any]: + """Converts the StructuredEncryptionConfig to a dictionary.""" + return {} + + @staticmethod + def from_dict(d: Dict[str, Any]) -> "StructuredEncryptionConfig": + """Creates a StructuredEncryptionConfig from a dictionary.""" + return StructuredEncryptionConfig() + + def __repr__(self) -> str: + result = "StructuredEncryptionConfig(" + + return result + ")" + + def __eq__(self, other: Any) -> bool: + return isinstance(other, StructuredEncryptionConfig) + + +def dafny_config_to_smithy_config(dafny_config) -> StructuredEncryptionConfig: + """Converts the provided Dafny shape for this localService's config into + the corresponding Smithy-modelled shape.""" + return aws_dbesdk_dynamodb.smithygenerated.aws_cryptography_dbencryptionsdk_structuredencryption.dafny_to_smithy.aws_cryptography_dbencryptionsdk_structuredencryption_StructuredEncryptionConfig( + dafny_config + ) + + +def smithy_config_to_dafny_config(smithy_config) -> DafnyStructuredEncryptionConfig: + """Converts the provided Smithy-modelled shape for this localService's + config into the corresponding Dafny shape.""" + return aws_dbesdk_dynamodb.smithygenerated.aws_cryptography_dbencryptionsdk_structuredencryption.smithy_to_dafny.aws_cryptography_dbencryptionsdk_structuredencryption_StructuredEncryptionConfig( + smithy_config + ) diff --git a/DynamoDbEncryption/runtimes/python/src/aws_dbesdk_dynamodb/smithygenerated/aws_cryptography_dbencryptionsdk_structuredencryption/dafnyImplInterface.py b/DynamoDbEncryption/runtimes/python/src/aws_dbesdk_dynamodb/smithygenerated/aws_cryptography_dbencryptionsdk_structuredencryption/dafnyImplInterface.py new file mode 100644 index 000000000..95cbd13d5 --- /dev/null +++ b/DynamoDbEncryption/runtimes/python/src/aws_dbesdk_dynamodb/smithygenerated/aws_cryptography_dbencryptionsdk_structuredencryption/dafnyImplInterface.py @@ -0,0 +1,37 @@ +# Copyright Amazon.com Inc. or its affiliates. All Rights Reserved. +# SPDX-License-Identifier: Apache-2.0 +# Do not modify this file. This file is machine generated, and any changes to it will be overwritten. + +from aws_dbesdk_dynamodb.internaldafny.generated.StructuredEncryption import ( + StructuredEncryptionClient, +) +from .dafny_protocol import DafnyRequest + + +class DafnyImplInterface: + impl: StructuredEncryptionClient | None = None + + # operation_map cannot be created at dafnyImplInterface create time, + # as the map's values reference values inside `self.impl`, + # and impl is only populated at runtime. + # Accessing these before impl is populated results in an error. + # At runtime, the map is populated once and cached. + operation_map = None + + def handle_request(self, input: DafnyRequest): + if self.operation_map is None: + self.operation_map = { + "EncryptStructure": self.impl.EncryptStructure, + "DecryptStructure": self.impl.DecryptStructure, + "EncryptPathStructure": self.impl.EncryptPathStructure, + "DecryptPathStructure": self.impl.DecryptPathStructure, + "ResolveAuthActions": self.impl.ResolveAuthActions, + } + + # This logic is where a typical Smithy client would expect the "server" to be. + # This code can be thought of as logic our Dafny "server" uses + # to route incoming client requests to the correct request handler code. + if input.dafny_operation_input is None: + return self.operation_map[input.operation_name]() + else: + return self.operation_map[input.operation_name](input.dafny_operation_input) diff --git a/DynamoDbEncryption/runtimes/python/src/aws_dbesdk_dynamodb/smithygenerated/aws_cryptography_dbencryptionsdk_structuredencryption/dafny_protocol.py b/DynamoDbEncryption/runtimes/python/src/aws_dbesdk_dynamodb/smithygenerated/aws_cryptography_dbencryptionsdk_structuredencryption/dafny_protocol.py new file mode 100644 index 000000000..be268714d --- /dev/null +++ b/DynamoDbEncryption/runtimes/python/src/aws_dbesdk_dynamodb/smithygenerated/aws_cryptography_dbencryptionsdk_structuredencryption/dafny_protocol.py @@ -0,0 +1,39 @@ +# Copyright Amazon.com Inc. or its affiliates. All Rights Reserved. +# SPDX-License-Identifier: Apache-2.0 +# Do not modify this file. This file is machine generated, and any changes to it will be overwritten. + +from aws_dbesdk_dynamodb.internaldafny.generated.AwsCryptographyDbEncryptionSdkStructuredEncryptionTypes import ( + DecryptPathStructureInput_DecryptPathStructureInput as DafnyDecryptPathStructureInput, + DecryptStructureInput_DecryptStructureInput as DafnyDecryptStructureInput, + EncryptPathStructureInput_EncryptPathStructureInput as DafnyEncryptPathStructureInput, + EncryptStructureInput_EncryptStructureInput as DafnyEncryptStructureInput, + ResolveAuthActionsInput_ResolveAuthActionsInput as DafnyResolveAuthActionsInput, +) +import aws_dbesdk_dynamodb.internaldafny.generated.module_ + + +import smithy_dafny_standard_library.internaldafny.generated.Wrappers as Wrappers +from typing import Union + + +class DafnyRequest: + operation_name: str + + # dafny_operation_input can take on any one of the types + # of the input values passed to the Dafny implementation + dafny_operation_input: Union[ + DafnyDecryptStructureInput, + DafnyDecryptPathStructureInput, + DafnyEncryptPathStructureInput, + DafnyEncryptStructureInput, + DafnyResolveAuthActionsInput, + ] + + def __init__(self, operation_name, dafny_operation_input): + self.operation_name = operation_name + self.dafny_operation_input = dafny_operation_input + + +class DafnyResponse(Wrappers.Result): + def __init__(self): + super().__init__(self) diff --git a/DynamoDbEncryption/runtimes/python/src/aws_dbesdk_dynamodb/smithygenerated/aws_cryptography_dbencryptionsdk_structuredencryption/dafny_to_smithy.py b/DynamoDbEncryption/runtimes/python/src/aws_dbesdk_dynamodb/smithygenerated/aws_cryptography_dbencryptionsdk_structuredencryption/dafny_to_smithy.py new file mode 100644 index 000000000..32e97a286 --- /dev/null +++ b/DynamoDbEncryption/runtimes/python/src/aws_dbesdk_dynamodb/smithygenerated/aws_cryptography_dbencryptionsdk_structuredencryption/dafny_to_smithy.py @@ -0,0 +1,435 @@ +# Copyright Amazon.com Inc. or its affiliates. All Rights Reserved. +# SPDX-License-Identifier: Apache-2.0 +# Do not modify this file. This file is machine generated, and any changes to it will be overwritten. + +import aws_cryptographic_material_providers.smithygenerated.aws_cryptography_materialproviders.dafny_to_smithy +from aws_dbesdk_dynamodb.internaldafny.generated.AwsCryptographyDbEncryptionSdkStructuredEncryptionTypes import ( + AuthenticateAction_DO__NOT__SIGN, + AuthenticateAction_SIGN, + CryptoAction_DO__NOTHING, + CryptoAction_ENCRYPT__AND__SIGN, + CryptoAction_SIGN__AND__INCLUDE__IN__ENCRYPTION__CONTEXT, + CryptoAction_SIGN__ONLY, + PathSegment_member, +) +import aws_dbesdk_dynamodb.internaldafny.generated.module_ +import aws_dbesdk_dynamodb.smithygenerated.aws_cryptography_dbencryptionsdk_structuredencryption.dafny_to_smithy +import aws_dbesdk_dynamodb.smithygenerated.aws_cryptography_dbencryptionsdk_structuredencryption.models + + +def aws_cryptography_dbencryptionsdk_structuredencryption_StructuredDataTerminal( + dafny_input, +): + return aws_dbesdk_dynamodb.smithygenerated.aws_cryptography_dbencryptionsdk_structuredencryption.models.StructuredDataTerminal( + value=bytes(dafny_input.value), + type_id=bytes(dafny_input.typeId), + ) + + +def aws_cryptography_dbencryptionsdk_structuredencryption_CryptoAction(dafny_input): + if isinstance(dafny_input, CryptoAction_ENCRYPT__AND__SIGN): + return "ENCRYPT_AND_SIGN" + + elif isinstance(dafny_input, CryptoAction_SIGN__AND__INCLUDE__IN__ENCRYPTION__CONTEXT): + return "SIGN_AND_INCLUDE_IN_ENCRYPTION_CONTEXT" + + elif isinstance(dafny_input, CryptoAction_SIGN__ONLY): + return "SIGN_ONLY" + + elif isinstance(dafny_input, CryptoAction_DO__NOTHING): + return "DO_NOTHING" + + else: + raise ValueError(f"No recognized enum value in enum type: {dafny_input=}") + + +def aws_cryptography_dbencryptionsdk_structuredencryption_EncryptStructureInput( + dafny_input, +): + return aws_dbesdk_dynamodb.smithygenerated.aws_cryptography_dbencryptionsdk_structuredencryption.models.EncryptStructureInput( + table_name=b"".join(ord(c).to_bytes(2, "big") for c in dafny_input.tableName).decode("utf-16-be"), + plaintext_structure={ + b"".join(ord(c).to_bytes(2, "big") for c in key).decode( + "utf-16-be" + ): aws_dbesdk_dynamodb.smithygenerated.aws_cryptography_dbencryptionsdk_structuredencryption.dafny_to_smithy.aws_cryptography_dbencryptionsdk_structuredencryption_StructuredDataTerminal( + value + ) + for (key, value) in dafny_input.plaintextStructure.items + }, + crypto_schema={ + b"".join(ord(c).to_bytes(2, "big") for c in key).decode( + "utf-16-be" + ): aws_dbesdk_dynamodb.smithygenerated.aws_cryptography_dbencryptionsdk_structuredencryption.dafny_to_smithy.aws_cryptography_dbencryptionsdk_structuredencryption_CryptoAction( + value + ) + for (key, value) in dafny_input.cryptoSchema.items + }, + cmm=( + ( + aws_cryptographic_material_providers.smithygenerated.aws_cryptography_materialproviders.dafny_to_smithy.aws_cryptography_materialproviders_CryptographicMaterialsManagerReference( + dafny_input.cmm + ) + ) + if (dafny_input.cmm is not None) + else None + ), + algorithm_suite_id=( + ( + aws_cryptographic_material_providers.smithygenerated.aws_cryptography_materialproviders.dafny_to_smithy.aws_cryptography_materialproviders_DBEAlgorithmSuiteId( + dafny_input.algorithmSuiteId.value + ) + ) + if (dafny_input.algorithmSuiteId.is_Some) + else None + ), + encryption_context=( + ( + { + bytes(key.Elements).decode("utf-8"): bytes(value.Elements).decode("utf-8") + for (key, value) in dafny_input.encryptionContext.value.items + } + ) + if (dafny_input.encryptionContext.is_Some) + else None + ), + ) + + +def aws_cryptography_dbencryptionsdk_structuredencryption_AuthenticateAction( + dafny_input, +): + if isinstance(dafny_input, AuthenticateAction_SIGN): + return "SIGN" + + elif isinstance(dafny_input, AuthenticateAction_DO__NOT__SIGN): + return "DO_NOT_SIGN" + + else: + raise ValueError(f"No recognized enum value in enum type: {dafny_input=}") + + +def aws_cryptography_dbencryptionsdk_structuredencryption_DecryptStructureInput( + dafny_input, +): + return aws_dbesdk_dynamodb.smithygenerated.aws_cryptography_dbencryptionsdk_structuredencryption.models.DecryptStructureInput( + table_name=b"".join(ord(c).to_bytes(2, "big") for c in dafny_input.tableName).decode("utf-16-be"), + encrypted_structure={ + b"".join(ord(c).to_bytes(2, "big") for c in key).decode( + "utf-16-be" + ): aws_dbesdk_dynamodb.smithygenerated.aws_cryptography_dbencryptionsdk_structuredencryption.dafny_to_smithy.aws_cryptography_dbencryptionsdk_structuredencryption_StructuredDataTerminal( + value + ) + for (key, value) in dafny_input.encryptedStructure.items + }, + authenticate_schema={ + b"".join(ord(c).to_bytes(2, "big") for c in key).decode( + "utf-16-be" + ): aws_dbesdk_dynamodb.smithygenerated.aws_cryptography_dbencryptionsdk_structuredencryption.dafny_to_smithy.aws_cryptography_dbencryptionsdk_structuredencryption_AuthenticateAction( + value + ) + for (key, value) in dafny_input.authenticateSchema.items + }, + cmm=( + ( + aws_cryptographic_material_providers.smithygenerated.aws_cryptography_materialproviders.dafny_to_smithy.aws_cryptography_materialproviders_CryptographicMaterialsManagerReference( + dafny_input.cmm + ) + ) + if (dafny_input.cmm is not None) + else None + ), + encryption_context=( + ( + { + bytes(key.Elements).decode("utf-8"): bytes(value.Elements).decode("utf-8") + for (key, value) in dafny_input.encryptionContext.value.items + } + ) + if (dafny_input.encryptionContext.is_Some) + else None + ), + ) + + +def aws_cryptography_dbencryptionsdk_structuredencryption_CryptoItem(dafny_input): + return aws_dbesdk_dynamodb.smithygenerated.aws_cryptography_dbencryptionsdk_structuredencryption.models.CryptoItem( + key=[ + aws_dbesdk_dynamodb.smithygenerated.aws_cryptography_dbencryptionsdk_structuredencryption.dafny_to_smithy.aws_cryptography_dbencryptionsdk_structuredencryption_PathSegment( + list_element + ) + for list_element in dafny_input.key + ], + data=aws_dbesdk_dynamodb.smithygenerated.aws_cryptography_dbencryptionsdk_structuredencryption.dafny_to_smithy.aws_cryptography_dbencryptionsdk_structuredencryption_StructuredDataTerminal( + dafny_input.data + ), + action=aws_dbesdk_dynamodb.smithygenerated.aws_cryptography_dbencryptionsdk_structuredencryption.dafny_to_smithy.aws_cryptography_dbencryptionsdk_structuredencryption_CryptoAction( + dafny_input.action + ), + ) + + +def aws_cryptography_dbencryptionsdk_structuredencryption_PathSegment(dafny_input): + # Convert PathSegment + if isinstance(dafny_input, PathSegment_member): + PathSegment_union_value = aws_dbesdk_dynamodb.smithygenerated.aws_cryptography_dbencryptionsdk_structuredencryption.models.PathSegmentMember( + aws_dbesdk_dynamodb.smithygenerated.aws_cryptography_dbencryptionsdk_structuredencryption.dafny_to_smithy.aws_cryptography_dbencryptionsdk_structuredencryption_StructureSegment( + dafny_input.member + ) + ) + else: + raise ValueError("No recognized union value in union type: " + str(dafny_input)) + + return PathSegment_union_value + + +def aws_cryptography_dbencryptionsdk_structuredencryption_StructureSegment(dafny_input): + return aws_dbesdk_dynamodb.smithygenerated.aws_cryptography_dbencryptionsdk_structuredencryption.models.StructureSegment( + key=b"".join(ord(c).to_bytes(2, "big") for c in dafny_input.key).decode("utf-16-be"), + ) + + +def aws_cryptography_dbencryptionsdk_structuredencryption_EncryptPathStructureInput( + dafny_input, +): + return aws_dbesdk_dynamodb.smithygenerated.aws_cryptography_dbencryptionsdk_structuredencryption.models.EncryptPathStructureInput( + table_name=b"".join(ord(c).to_bytes(2, "big") for c in dafny_input.tableName).decode("utf-16-be"), + plaintext_structure=[ + aws_dbesdk_dynamodb.smithygenerated.aws_cryptography_dbencryptionsdk_structuredencryption.dafny_to_smithy.aws_cryptography_dbencryptionsdk_structuredencryption_CryptoItem( + list_element + ) + for list_element in dafny_input.plaintextStructure + ], + cmm=( + ( + aws_cryptographic_material_providers.smithygenerated.aws_cryptography_materialproviders.dafny_to_smithy.aws_cryptography_materialproviders_CryptographicMaterialsManagerReference( + dafny_input.cmm + ) + ) + if (dafny_input.cmm is not None) + else None + ), + algorithm_suite_id=( + ( + aws_cryptographic_material_providers.smithygenerated.aws_cryptography_materialproviders.dafny_to_smithy.aws_cryptography_materialproviders_DBEAlgorithmSuiteId( + dafny_input.algorithmSuiteId.value + ) + ) + if (dafny_input.algorithmSuiteId.is_Some) + else None + ), + encryption_context=( + ( + { + bytes(key.Elements).decode("utf-8"): bytes(value.Elements).decode("utf-8") + for (key, value) in dafny_input.encryptionContext.value.items + } + ) + if (dafny_input.encryptionContext.is_Some) + else None + ), + ) + + +def aws_cryptography_dbencryptionsdk_structuredencryption_AuthItem(dafny_input): + return aws_dbesdk_dynamodb.smithygenerated.aws_cryptography_dbencryptionsdk_structuredencryption.models.AuthItem( + key=[ + aws_dbesdk_dynamodb.smithygenerated.aws_cryptography_dbencryptionsdk_structuredencryption.dafny_to_smithy.aws_cryptography_dbencryptionsdk_structuredencryption_PathSegment( + list_element + ) + for list_element in dafny_input.key + ], + data=aws_dbesdk_dynamodb.smithygenerated.aws_cryptography_dbencryptionsdk_structuredencryption.dafny_to_smithy.aws_cryptography_dbencryptionsdk_structuredencryption_StructuredDataTerminal( + dafny_input.data + ), + action=aws_dbesdk_dynamodb.smithygenerated.aws_cryptography_dbencryptionsdk_structuredencryption.dafny_to_smithy.aws_cryptography_dbencryptionsdk_structuredencryption_AuthenticateAction( + dafny_input.action + ), + ) + + +def aws_cryptography_dbencryptionsdk_structuredencryption_DecryptPathStructureInput( + dafny_input, +): + return aws_dbesdk_dynamodb.smithygenerated.aws_cryptography_dbencryptionsdk_structuredencryption.models.DecryptPathStructureInput( + table_name=b"".join(ord(c).to_bytes(2, "big") for c in dafny_input.tableName).decode("utf-16-be"), + encrypted_structure=[ + aws_dbesdk_dynamodb.smithygenerated.aws_cryptography_dbencryptionsdk_structuredencryption.dafny_to_smithy.aws_cryptography_dbencryptionsdk_structuredencryption_AuthItem( + list_element + ) + for list_element in dafny_input.encryptedStructure + ], + cmm=( + ( + aws_cryptographic_material_providers.smithygenerated.aws_cryptography_materialproviders.dafny_to_smithy.aws_cryptography_materialproviders_CryptographicMaterialsManagerReference( + dafny_input.cmm + ) + ) + if (dafny_input.cmm is not None) + else None + ), + encryption_context=( + ( + { + bytes(key.Elements).decode("utf-8"): bytes(value.Elements).decode("utf-8") + for (key, value) in dafny_input.encryptionContext.value.items + } + ) + if (dafny_input.encryptionContext.is_Some) + else None + ), + ) + + +def aws_cryptography_dbencryptionsdk_structuredencryption_ResolveAuthActionsInput( + dafny_input, +): + return aws_dbesdk_dynamodb.smithygenerated.aws_cryptography_dbencryptionsdk_structuredencryption.models.ResolveAuthActionsInput( + table_name=b"".join(ord(c).to_bytes(2, "big") for c in dafny_input.tableName).decode("utf-16-be"), + auth_actions=[ + aws_dbesdk_dynamodb.smithygenerated.aws_cryptography_dbencryptionsdk_structuredencryption.dafny_to_smithy.aws_cryptography_dbencryptionsdk_structuredencryption_AuthItem( + list_element + ) + for list_element in dafny_input.authActions + ], + header_bytes=bytes(dafny_input.headerBytes), + ) + + +def aws_cryptography_dbencryptionsdk_structuredencryption_ParsedHeader(dafny_input): + return aws_dbesdk_dynamodb.smithygenerated.aws_cryptography_dbencryptionsdk_structuredencryption.models.ParsedHeader( + algorithm_suite_id=aws_cryptographic_material_providers.smithygenerated.aws_cryptography_materialproviders.dafny_to_smithy.aws_cryptography_materialproviders_DBEAlgorithmSuiteId( + dafny_input.algorithmSuiteId + ), + encrypted_data_keys=[ + aws_cryptographic_material_providers.smithygenerated.aws_cryptography_materialproviders.dafny_to_smithy.aws_cryptography_materialproviders_EncryptedDataKey( + list_element + ) + for list_element in dafny_input.encryptedDataKeys + ], + stored_encryption_context={ + bytes(key.Elements).decode("utf-8"): bytes(value.Elements).decode("utf-8") + for (key, value) in dafny_input.storedEncryptionContext.items + }, + encryption_context={ + bytes(key.Elements).decode("utf-8"): bytes(value.Elements).decode("utf-8") + for (key, value) in dafny_input.encryptionContext.items + }, + ) + + +def aws_cryptography_dbencryptionsdk_structuredencryption_EncryptStructureOutput( + dafny_input, +): + return aws_dbesdk_dynamodb.smithygenerated.aws_cryptography_dbencryptionsdk_structuredencryption.models.EncryptStructureOutput( + encrypted_structure={ + b"".join(ord(c).to_bytes(2, "big") for c in key).decode( + "utf-16-be" + ): aws_dbesdk_dynamodb.smithygenerated.aws_cryptography_dbencryptionsdk_structuredencryption.dafny_to_smithy.aws_cryptography_dbencryptionsdk_structuredencryption_StructuredDataTerminal( + value + ) + for (key, value) in dafny_input.encryptedStructure.items + }, + crypto_schema={ + b"".join(ord(c).to_bytes(2, "big") for c in key).decode( + "utf-16-be" + ): aws_dbesdk_dynamodb.smithygenerated.aws_cryptography_dbencryptionsdk_structuredencryption.dafny_to_smithy.aws_cryptography_dbencryptionsdk_structuredencryption_CryptoAction( + value + ) + for (key, value) in dafny_input.cryptoSchema.items + }, + parsed_header=aws_dbesdk_dynamodb.smithygenerated.aws_cryptography_dbencryptionsdk_structuredencryption.dafny_to_smithy.aws_cryptography_dbencryptionsdk_structuredencryption_ParsedHeader( + dafny_input.parsedHeader + ), + ) + + +def aws_cryptography_dbencryptionsdk_structuredencryption_DecryptStructureOutput( + dafny_input, +): + return aws_dbesdk_dynamodb.smithygenerated.aws_cryptography_dbencryptionsdk_structuredencryption.models.DecryptStructureOutput( + plaintext_structure={ + b"".join(ord(c).to_bytes(2, "big") for c in key).decode( + "utf-16-be" + ): aws_dbesdk_dynamodb.smithygenerated.aws_cryptography_dbencryptionsdk_structuredencryption.dafny_to_smithy.aws_cryptography_dbencryptionsdk_structuredencryption_StructuredDataTerminal( + value + ) + for (key, value) in dafny_input.plaintextStructure.items + }, + crypto_schema={ + b"".join(ord(c).to_bytes(2, "big") for c in key).decode( + "utf-16-be" + ): aws_dbesdk_dynamodb.smithygenerated.aws_cryptography_dbencryptionsdk_structuredencryption.dafny_to_smithy.aws_cryptography_dbencryptionsdk_structuredencryption_CryptoAction( + value + ) + for (key, value) in dafny_input.cryptoSchema.items + }, + parsed_header=aws_dbesdk_dynamodb.smithygenerated.aws_cryptography_dbencryptionsdk_structuredencryption.dafny_to_smithy.aws_cryptography_dbencryptionsdk_structuredencryption_ParsedHeader( + dafny_input.parsedHeader + ), + ) + + +def aws_cryptography_dbencryptionsdk_structuredencryption_EncryptPathStructureOutput( + dafny_input, +): + return aws_dbesdk_dynamodb.smithygenerated.aws_cryptography_dbencryptionsdk_structuredencryption.models.EncryptPathStructureOutput( + encrypted_structure=[ + aws_dbesdk_dynamodb.smithygenerated.aws_cryptography_dbencryptionsdk_structuredencryption.dafny_to_smithy.aws_cryptography_dbencryptionsdk_structuredencryption_CryptoItem( + list_element + ) + for list_element in dafny_input.encryptedStructure + ], + parsed_header=aws_dbesdk_dynamodb.smithygenerated.aws_cryptography_dbencryptionsdk_structuredencryption.dafny_to_smithy.aws_cryptography_dbencryptionsdk_structuredencryption_ParsedHeader( + dafny_input.parsedHeader + ), + ) + + +def aws_cryptography_dbencryptionsdk_structuredencryption_DecryptPathStructureOutput( + dafny_input, +): + return aws_dbesdk_dynamodb.smithygenerated.aws_cryptography_dbencryptionsdk_structuredencryption.models.DecryptPathStructureOutput( + plaintext_structure=[ + aws_dbesdk_dynamodb.smithygenerated.aws_cryptography_dbencryptionsdk_structuredencryption.dafny_to_smithy.aws_cryptography_dbencryptionsdk_structuredencryption_CryptoItem( + list_element + ) + for list_element in dafny_input.plaintextStructure + ], + parsed_header=aws_dbesdk_dynamodb.smithygenerated.aws_cryptography_dbencryptionsdk_structuredencryption.dafny_to_smithy.aws_cryptography_dbencryptionsdk_structuredencryption_ParsedHeader( + dafny_input.parsedHeader + ), + ) + + +def aws_cryptography_dbencryptionsdk_structuredencryption_ResolveAuthActionsOutput( + dafny_input, +): + return aws_dbesdk_dynamodb.smithygenerated.aws_cryptography_dbencryptionsdk_structuredencryption.models.ResolveAuthActionsOutput( + crypto_actions=[ + aws_dbesdk_dynamodb.smithygenerated.aws_cryptography_dbencryptionsdk_structuredencryption.dafny_to_smithy.aws_cryptography_dbencryptionsdk_structuredencryption_CryptoItem( + list_element + ) + for list_element in dafny_input.cryptoActions + ], + ) + + +def aws_cryptography_dbencryptionsdk_structuredencryption_AtomicPrimitivesReference( + dafny_input, +): + from aws_cryptography_primitives.smithygenerated.aws_cryptography_primitives.client import ( + AwsCryptographicPrimitives, + ) + + return AwsCryptographicPrimitives(config=None, dafny_client=dafny_input) + + +def aws_cryptography_dbencryptionsdk_structuredencryption_StructuredEncryptionConfig( + dafny_input, +): + # Deferred import of .config to avoid circular dependency + import aws_dbesdk_dynamodb.smithygenerated.aws_cryptography_dbencryptionsdk_structuredencryption.config + + return ( + aws_dbesdk_dynamodb.smithygenerated.aws_cryptography_dbencryptionsdk_structuredencryption.config.StructuredEncryptionConfig() + ) diff --git a/DynamoDbEncryption/runtimes/python/src/aws_dbesdk_dynamodb/smithygenerated/aws_cryptography_dbencryptionsdk_structuredencryption/deserialize.py b/DynamoDbEncryption/runtimes/python/src/aws_dbesdk_dynamodb/smithygenerated/aws_cryptography_dbencryptionsdk_structuredencryption/deserialize.py new file mode 100644 index 000000000..b29eb6b13 --- /dev/null +++ b/DynamoDbEncryption/runtimes/python/src/aws_dbesdk_dynamodb/smithygenerated/aws_cryptography_dbencryptionsdk_structuredencryption/deserialize.py @@ -0,0 +1,104 @@ +# Copyright Amazon.com Inc. or its affiliates. All Rights Reserved. +# SPDX-License-Identifier: Apache-2.0 +# Do not modify this file. This file is machine generated, and any changes to it will be overwritten. + +import _dafny +from aws_dbesdk_dynamodb.internaldafny.generated.AwsCryptographyDbEncryptionSdkStructuredEncryptionTypes import ( + DecryptPathStructureOutput_DecryptPathStructureOutput as DafnyDecryptPathStructureOutput, + DecryptStructureOutput_DecryptStructureOutput as DafnyDecryptStructureOutput, + EncryptPathStructureOutput_EncryptPathStructureOutput as DafnyEncryptPathStructureOutput, + EncryptStructureOutput_EncryptStructureOutput as DafnyEncryptStructureOutput, + Error, + Error_StructuredEncryptionException, + ResolveAuthActionsOutput_ResolveAuthActionsOutput as DafnyResolveAuthActionsOutput, +) +import aws_dbesdk_dynamodb.internaldafny.generated.module_ +import aws_dbesdk_dynamodb.smithygenerated.aws_cryptography_dbencryptionsdk_structuredencryption.dafny_to_smithy +from typing import Any + +from .dafny_protocol import DafnyResponse +from .errors import ( + AwsCryptographicMaterialProviders, + AwsCryptographicPrimitives, + CollectionOfErrors, + OpaqueError, + ServiceError, + StructuredEncryptionException, +) +from aws_cryptographic_material_providers.smithygenerated.aws_cryptography_materialproviders.deserialize import ( + _deserialize_error as aws_cryptography_materialproviders_deserialize_error, +) +from aws_cryptography_primitives.smithygenerated.aws_cryptography_primitives.deserialize import ( + _deserialize_error as aws_cryptography_primitives_deserialize_error, +) + +from .config import Config + + +def _deserialize_encrypt_structure(input: DafnyResponse, config: Config): + + if input.IsFailure(): + return _deserialize_error(input.error) + return aws_dbesdk_dynamodb.smithygenerated.aws_cryptography_dbencryptionsdk_structuredencryption.dafny_to_smithy.aws_cryptography_dbencryptionsdk_structuredencryption_EncryptStructureOutput( + input.value + ) + + +def _deserialize_decrypt_structure(input: DafnyResponse, config: Config): + + if input.IsFailure(): + return _deserialize_error(input.error) + return aws_dbesdk_dynamodb.smithygenerated.aws_cryptography_dbencryptionsdk_structuredencryption.dafny_to_smithy.aws_cryptography_dbencryptionsdk_structuredencryption_DecryptStructureOutput( + input.value + ) + + +def _deserialize_encrypt_path_structure(input: DafnyResponse, config: Config): + + if input.IsFailure(): + return _deserialize_error(input.error) + return aws_dbesdk_dynamodb.smithygenerated.aws_cryptography_dbencryptionsdk_structuredencryption.dafny_to_smithy.aws_cryptography_dbencryptionsdk_structuredencryption_EncryptPathStructureOutput( + input.value + ) + + +def _deserialize_decrypt_path_structure(input: DafnyResponse, config: Config): + + if input.IsFailure(): + return _deserialize_error(input.error) + return aws_dbesdk_dynamodb.smithygenerated.aws_cryptography_dbencryptionsdk_structuredencryption.dafny_to_smithy.aws_cryptography_dbencryptionsdk_structuredencryption_DecryptPathStructureOutput( + input.value + ) + + +def _deserialize_resolve_auth_actions(input: DafnyResponse, config: Config): + + if input.IsFailure(): + return _deserialize_error(input.error) + return aws_dbesdk_dynamodb.smithygenerated.aws_cryptography_dbencryptionsdk_structuredencryption.dafny_to_smithy.aws_cryptography_dbencryptionsdk_structuredencryption_ResolveAuthActionsOutput( + input.value + ) + + +def _deserialize_error(error: Error) -> ServiceError: + if error.is_Opaque: + return OpaqueError(obj=error.obj) + elif error.is_OpaqueWithText: + return OpaqueErrorWithText(obj=error.obj, obj_message=error.objMessage) + elif error.is_CollectionOfErrors: + return CollectionOfErrors( + message=_dafny.string_of(error.message), + list=[_deserialize_error(dafny_e) for dafny_e in error.list], + ) + elif error.is_StructuredEncryptionException: + return StructuredEncryptionException(message=_dafny.string_of(error.message)) + elif error.is_AwsCryptographyPrimitives: + return AwsCryptographicPrimitives( + aws_cryptography_primitives_deserialize_error(error.AwsCryptographyPrimitives) + ) + elif error.is_AwsCryptographyMaterialProviders: + return AwsCryptographicMaterialProviders( + aws_cryptography_materialproviders_deserialize_error(error.AwsCryptographyMaterialProviders) + ) + else: + return OpaqueError(obj=error) diff --git a/DynamoDbEncryption/runtimes/python/src/aws_dbesdk_dynamodb/smithygenerated/aws_cryptography_dbencryptionsdk_structuredencryption/errors.py b/DynamoDbEncryption/runtimes/python/src/aws_dbesdk_dynamodb/smithygenerated/aws_cryptography_dbencryptionsdk_structuredencryption/errors.py new file mode 100644 index 000000000..dc77dce9f --- /dev/null +++ b/DynamoDbEncryption/runtimes/python/src/aws_dbesdk_dynamodb/smithygenerated/aws_cryptography_dbencryptionsdk_structuredencryption/errors.py @@ -0,0 +1,299 @@ +# Copyright Amazon.com Inc. or its affiliates. All Rights Reserved. +# SPDX-License-Identifier: Apache-2.0 +# Do not modify this file. This file is machine generated, and any changes to it will be overwritten. + +import _dafny +from aws_cryptographic_material_providers.smithygenerated.aws_cryptography_materialproviders.errors import ( + _smithy_error_to_dafny_error as aws_cryptography_materialproviders_smithy_error_to_dafny_error, +) +from aws_cryptography_primitives.smithygenerated.aws_cryptography_primitives.errors import ( + _smithy_error_to_dafny_error as aws_cryptography_primitives_smithy_error_to_dafny_error, +) +import aws_dbesdk_dynamodb.internaldafny.generated +import aws_dbesdk_dynamodb.internaldafny.generated.AwsCryptographyDbEncryptionSdkStructuredEncryptionTypes +import aws_dbesdk_dynamodb.smithygenerated.aws_cryptography_dbencryptionsdk_structuredencryption.errors +from typing import Any, Dict, Generic, List, Literal, TypeVar + + +class ServiceError(Exception): + """Base error for all errors in the service.""" + + pass + + +T = TypeVar("T") + + +class ApiError(ServiceError, Generic[T]): + """Base error for all api errors in the service.""" + + code: T + + def __init__(self, message: str): + super().__init__(message) + self.message = message + + +class UnknownApiError(ApiError[Literal["Unknown"]]): + """Error representing any unknown api errors.""" + + code: Literal["Unknown"] = "Unknown" + + +class StructuredEncryptionException(ApiError[Literal["StructuredEncryptionException"]]): + code: Literal["StructuredEncryptionException"] = "StructuredEncryptionException" + message: str + + def __init__( + self, + *, + message: str, + ): + super().__init__(message) + + def as_dict(self) -> Dict[str, Any]: + """Converts the StructuredEncryptionException to a dictionary.""" + return { + "message": self.message, + "code": self.code, + } + + @staticmethod + def from_dict(d: Dict[str, Any]) -> "StructuredEncryptionException": + """Creates a StructuredEncryptionException from a dictionary.""" + kwargs: Dict[str, Any] = { + "message": d["message"], + } + + return StructuredEncryptionException(**kwargs) + + def __repr__(self) -> str: + result = "StructuredEncryptionException(" + if self.message is not None: + result += f"message={repr(self.message)}" + + return result + ")" + + def __eq__(self, other: Any) -> bool: + if not isinstance(other, StructuredEncryptionException): + return False + attributes: list[str] = [ + "message", + "message", + ] + return all(getattr(self, a) == getattr(other, a) for a in attributes) + + +class StructuredEncryptionException(ApiError[Literal["StructuredEncryptionException"]]): + code: Literal["StructuredEncryptionException"] = "StructuredEncryptionException" + message: str + + +class AwsCryptographicPrimitives(ApiError[Literal["AwsCryptographicPrimitives"]]): + AwsCryptographicPrimitives: Any + + +class AwsCryptographicMaterialProviders(ApiError[Literal["AwsCryptographicMaterialProviders"]]): + AwsCryptographicMaterialProviders: Any + + +class CollectionOfErrors(ApiError[Literal["CollectionOfErrors"]]): + code: Literal["CollectionOfErrors"] = "CollectionOfErrors" + message: str + list: List[ServiceError] + + def __init__(self, *, message: str, list): + super().__init__(message) + self.list = list + + def as_dict(self) -> Dict[str, Any]: + """Converts the CollectionOfErrors to a dictionary. + + The dictionary uses the modeled shape names rather than the + parameter names as keys to be mostly compatible with boto3. + """ + return { + "message": self.message, + "code": self.code, + "list": self.list, + } + + @staticmethod + def from_dict(d: Dict[str, Any]) -> "CollectionOfErrors": + """Creates a CollectionOfErrors from a dictionary. + + The dictionary is expected to use the modeled shape names rather + than the parameter names as keys to be mostly compatible with + boto3. + """ + kwargs: Dict[str, Any] = {"message": d["message"], "list": d["list"]} + + return CollectionOfErrors(**kwargs) + + def __repr__(self) -> str: + result = "CollectionOfErrors(" + result += f"message={self.message}," + if self.message is not None: + result += f"message={repr(self.message)}" + result += f"list={self.list}" + result += ")" + return result + + def __eq__(self, other: Any) -> bool: + if not isinstance(other, CollectionOfErrors): + return False + if not (self.list == other.list): + return False + attributes: list[str] = ["message", "message"] + return all(getattr(self, a) == getattr(other, a) for a in attributes) + + +class OpaqueError(ApiError[Literal["OpaqueError"]]): + code: Literal["OpaqueError"] = "OpaqueError" + obj: Any # As an OpaqueError, type of obj is unknown + + def __init__(self, *, obj): + super().__init__("") + self.obj = obj + + def as_dict(self) -> Dict[str, Any]: + """Converts the OpaqueError to a dictionary. + + The dictionary uses the modeled shape names rather than the + parameter names as keys to be mostly compatible with boto3. + """ + return { + "message": self.message, + "code": self.code, + "obj": self.obj, + } + + @staticmethod + def from_dict(d: Dict[str, Any]) -> "OpaqueError": + """Creates a OpaqueError from a dictionary. + + The dictionary is expected to use the modeled shape names rather + than the parameter names as keys to be mostly compatible with + boto3. + """ + kwargs: Dict[str, Any] = {"message": d["message"], "obj": d["obj"]} + + return OpaqueError(**kwargs) + + def __repr__(self) -> str: + result = "OpaqueError(" + result += f"message={self.message}," + if self.message is not None: + result += f"message={repr(self.message)}" + result += f"obj={self.obj}" + result += ")" + return result + + def __eq__(self, other: Any) -> bool: + if not isinstance(other, OpaqueError): + return False + if not (self.obj == other.obj): + return False + attributes: list[str] = ["message", "message"] + return all(getattr(self, a) == getattr(other, a) for a in attributes) + + +class OpaqueWithTextError(ApiError[Literal["OpaqueWithTextError"]]): + code: Literal["OpaqueWithTextError"] = "OpaqueWithTextError" + obj: Any # As an OpaqueWithTextError, type of obj is unknown + obj_message: str # obj_message is a message representing the details of obj + + def __init__(self, *, obj, obj_message): + super().__init__("") + self.obj = obj + self.obj_message = obj_message + + def as_dict(self) -> Dict[str, Any]: + """Converts the OpaqueWithTextError to a dictionary. + + The dictionary uses the modeled shape names rather than the + parameter names as keys to be mostly compatible with boto3. + """ + return { + "message": self.message, + "code": self.code, + "obj": self.obj, + "obj_message": self.obj_message, + } + + @staticmethod + def from_dict(d: Dict[str, Any]) -> "OpaqueWithTextError": + """Creates a OpaqueWithTextError from a dictionary. + + The dictionary is expected to use the modeled shape names rather + than the parameter names as keys to be mostly compatible with + boto3. + """ + kwargs: Dict[str, Any] = { + "message": d["message"], + "obj": d["obj"], + "obj_message": d["obj_message"], + } + + return OpaqueWithTextError(**kwargs) + + def __repr__(self) -> str: + result = "OpaqueWithTextError(" + result += f"message={self.message}," + if self.message is not None: + result += f"message={repr(self.message)}" + result += f"obj={self.obj}" + result += f"obj_message={self.obj_message}" + result += ")" + return result + + def __eq__(self, other: Any) -> bool: + if not isinstance(other, OpaqueWithTextError): + return False + if not (self.obj == other.obj): + return False + attributes: list[str] = ["message", "message"] + return all(getattr(self, a) == getattr(other, a) for a in attributes) + + +def _smithy_error_to_dafny_error(e: ServiceError): + """Converts the provided native Smithy-modeled error into the corresponding + Dafny error.""" + if isinstance( + e, + aws_dbesdk_dynamodb.smithygenerated.aws_cryptography_dbencryptionsdk_structuredencryption.errors.StructuredEncryptionException, + ): + return aws_dbesdk_dynamodb.internaldafny.generated.AwsCryptographyDbEncryptionSdkStructuredEncryptionTypes.Error_StructuredEncryptionException( + message=_dafny.Seq(e.message) + ) + + if isinstance(e, AwsCryptographicPrimitives): + return aws_dbesdk_dynamodb.internaldafny.generated.AwsCryptographyDbEncryptionSdkStructuredEncryptionTypes.Error_AwsCryptographyPrimitives( + aws_cryptography_primitives_smithy_error_to_dafny_error(e.message) + ) + + if isinstance(e, AwsCryptographicMaterialProviders): + return aws_dbesdk_dynamodb.internaldafny.generated.AwsCryptographyDbEncryptionSdkStructuredEncryptionTypes.Error_AwsCryptographyMaterialProviders( + aws_cryptography_materialproviders_smithy_error_to_dafny_error(e.message) + ) + + if isinstance(e, CollectionOfErrors): + return aws_dbesdk_dynamodb.internaldafny.generated.AwsCryptographyDbEncryptionSdkStructuredEncryptionTypes.Error_CollectionOfErrors( + message=_dafny.Seq(e.message), + list=_dafny.Seq(_smithy_error_to_dafny_error(native_err) for native_err in e.list), + ) + + if isinstance(e, OpaqueError): + return aws_dbesdk_dynamodb.internaldafny.generated.AwsCryptographyDbEncryptionSdkStructuredEncryptionTypes.Error_Opaque( + obj=e.obj + ) + + if isinstance(e, OpaqueWithTextError): + return aws_dbesdk_dynamodb.internaldafny.generated.AwsCryptographyDbEncryptionSdkStructuredEncryptionTypes.Error_OpaqueWithText( + obj=e.obj, objMessage=e.obj_message + ) + + else: + return aws_dbesdk_dynamodb.internaldafny.generated.AwsCryptographyDbEncryptionSdkStructuredEncryptionTypes.Error_Opaque( + obj=e + ) diff --git a/DynamoDbEncryption/runtimes/python/src/aws_dbesdk_dynamodb/smithygenerated/aws_cryptography_dbencryptionsdk_structuredencryption/models.py b/DynamoDbEncryption/runtimes/python/src/aws_dbesdk_dynamodb/smithygenerated/aws_cryptography_dbencryptionsdk_structuredencryption/models.py new file mode 100644 index 000000000..4ba2c8b40 --- /dev/null +++ b/DynamoDbEncryption/runtimes/python/src/aws_dbesdk_dynamodb/smithygenerated/aws_cryptography_dbencryptionsdk_structuredencryption/models.py @@ -0,0 +1,1096 @@ +# Copyright Amazon.com Inc. or its affiliates. All Rights Reserved. +# SPDX-License-Identifier: Apache-2.0 +# Do not modify this file. This file is machine generated, and any changes to it will be overwritten. + +import aws_cryptographic_material_providers.smithygenerated.aws_cryptography_materialproviders.references +from typing import Any, Dict, List, Optional, Union + +from aws_cryptographic_material_providers.smithygenerated.aws_cryptography_materialproviders.models import ( + EncryptedDataKey, +) + + +class AuthenticateAction: + SIGN = "SIGN" + + DO_NOT_SIGN = "DO_NOT_SIGN" + + # This set contains every possible value known at the time this was generated. New + # values may be added in the future. + values = frozenset({"SIGN", "DO_NOT_SIGN"}) + + +class StructuredDataTerminal: + value: bytes | bytearray + type_id: bytes | bytearray + + def __init__( + self, + *, + value: bytes | bytearray, + type_id: bytes | bytearray, + ): + self.value = value + if (type_id is not None) and (len(type_id) < 2): + raise ValueError("The size of type_id must be greater than or equal to 2") + + if (type_id is not None) and (len(type_id) > 2): + raise ValueError("The size of type_id must be less than or equal to 2") + + self.type_id = type_id + + def as_dict(self) -> Dict[str, Any]: + """Converts the StructuredDataTerminal to a dictionary.""" + return { + "value": self.value, + "type_id": self.type_id, + } + + @staticmethod + def from_dict(d: Dict[str, Any]) -> "StructuredDataTerminal": + """Creates a StructuredDataTerminal from a dictionary.""" + kwargs: Dict[str, Any] = { + "value": d["value"], + "type_id": d["type_id"], + } + + return StructuredDataTerminal(**kwargs) + + def __repr__(self) -> str: + result = "StructuredDataTerminal(" + if self.value is not None: + result += f"value={repr(self.value)}, " + + if self.type_id is not None: + result += f"type_id={repr(self.type_id)}" + + return result + ")" + + def __eq__(self, other: Any) -> bool: + if not isinstance(other, StructuredDataTerminal): + return False + attributes: list[str] = [ + "value", + "type_id", + ] + return all(getattr(self, a) == getattr(other, a) for a in attributes) + + +class StructureSegment: + key: str + + def __init__( + self, + *, + key: str, + ): + self.key = key + + def as_dict(self) -> Dict[str, Any]: + """Converts the StructureSegment to a dictionary.""" + return { + "key": self.key, + } + + @staticmethod + def from_dict(d: Dict[str, Any]) -> "StructureSegment": + """Creates a StructureSegment from a dictionary.""" + kwargs: Dict[str, Any] = { + "key": d["key"], + } + + return StructureSegment(**kwargs) + + def __repr__(self) -> str: + result = "StructureSegment(" + if self.key is not None: + result += f"key={repr(self.key)}" + + return result + ")" + + def __eq__(self, other: Any) -> bool: + if not isinstance(other, StructureSegment): + return False + attributes: list[str] = [ + "key", + ] + return all(getattr(self, a) == getattr(other, a) for a in attributes) + + +class PathSegmentMember: + def __init__(self, value: StructureSegment): + self.value = value + + def as_dict(self) -> Dict[str, Any]: + return {"member": self.value.as_dict()} + + @staticmethod + def from_dict(d: Dict[str, Any]) -> "PathSegmentMember": + if len(d) != 1: + raise TypeError(f"Unions may have exactly 1 value, but found {len(d)}") + + return PathSegmentMember(StructureSegment.from_dict(d["member"])) + + def __repr__(self) -> str: + return f"PathSegmentMember(value=repr(self.value))" + + def __eq__(self, other: Any) -> bool: + if not isinstance(other, PathSegmentMember): + return False + return self.value == other.value + + +class PathSegmentUnknown: + """Represents an unknown variant. + + If you receive this value, you will need to update your library to + receive the parsed value. + + This value may not be deliberately sent. + """ + + def __init__(self, tag: str): + self.tag = tag + + def as_dict(self) -> Dict[str, Any]: + return {"SDK_UNKNOWN_MEMBER": {"name": self.tag}} + + @staticmethod + def from_dict(d: Dict[str, Any]) -> "PathSegmentUnknown": + if len(d) != 1: + raise TypeError(f"Unions may have exactly 1 value, but found {len(d)}") + return PathSegmentUnknown(d["SDK_UNKNOWN_MEMBER"]["name"]) + + def __repr__(self) -> str: + return f"PathSegmentUnknown(tag={self.tag})" + + +PathSegment = Union[PathSegmentMember, PathSegmentUnknown] + + +def _path_segment_from_dict(d: Dict[str, Any]) -> PathSegment: + if "member" in d: + return PathSegmentMember.from_dict(d) + + raise TypeError(f"Unions may have exactly 1 value, but found {len(d)}") + + +class AuthItem: + key: list[PathSegment] + data: StructuredDataTerminal + action: str + + def __init__( + self, + *, + key: list[PathSegment], + data: StructuredDataTerminal, + action: str, + ): + self.key = key + self.data = data + self.action = action + + def as_dict(self) -> Dict[str, Any]: + """Converts the AuthItem to a dictionary.""" + return { + "key": _path_as_dict(self.key), + "data": self.data.as_dict(), + "action": self.action, + } + + @staticmethod + def from_dict(d: Dict[str, Any]) -> "AuthItem": + """Creates a AuthItem from a dictionary.""" + kwargs: Dict[str, Any] = { + "key": _path_from_dict(d["key"]), + "data": StructuredDataTerminal.from_dict(d["data"]), + "action": d["action"], + } + + return AuthItem(**kwargs) + + def __repr__(self) -> str: + result = "AuthItem(" + if self.key is not None: + result += f"key={repr(self.key)}, " + + if self.data is not None: + result += f"data={repr(self.data)}, " + + if self.action is not None: + result += f"action={repr(self.action)}" + + return result + ")" + + def __eq__(self, other: Any) -> bool: + if not isinstance(other, AuthItem): + return False + attributes: list[str] = [ + "key", + "data", + "action", + ] + return all(getattr(self, a) == getattr(other, a) for a in attributes) + + +class CryptoAction: + ENCRYPT_AND_SIGN = "ENCRYPT_AND_SIGN" + + SIGN_AND_INCLUDE_IN_ENCRYPTION_CONTEXT = "SIGN_AND_INCLUDE_IN_ENCRYPTION_CONTEXT" + + SIGN_ONLY = "SIGN_ONLY" + + DO_NOTHING = "DO_NOTHING" + + # This set contains every possible value known at the time this was generated. New + # values may be added in the future. + values = frozenset( + { + "ENCRYPT_AND_SIGN", + "SIGN_AND_INCLUDE_IN_ENCRYPTION_CONTEXT", + "SIGN_ONLY", + "DO_NOTHING", + } + ) + + +class CryptoItem: + key: list[PathSegment] + data: StructuredDataTerminal + action: str + + def __init__( + self, + *, + key: list[PathSegment], + data: StructuredDataTerminal, + action: str, + ): + self.key = key + self.data = data + self.action = action + + def as_dict(self) -> Dict[str, Any]: + """Converts the CryptoItem to a dictionary.""" + return { + "key": _path_as_dict(self.key), + "data": self.data.as_dict(), + "action": self.action, + } + + @staticmethod + def from_dict(d: Dict[str, Any]) -> "CryptoItem": + """Creates a CryptoItem from a dictionary.""" + kwargs: Dict[str, Any] = { + "key": _path_from_dict(d["key"]), + "data": StructuredDataTerminal.from_dict(d["data"]), + "action": d["action"], + } + + return CryptoItem(**kwargs) + + def __repr__(self) -> str: + result = "CryptoItem(" + if self.key is not None: + result += f"key={repr(self.key)}, " + + if self.data is not None: + result += f"data={repr(self.data)}, " + + if self.action is not None: + result += f"action={repr(self.action)}" + + return result + ")" + + def __eq__(self, other: Any) -> bool: + if not isinstance(other, CryptoItem): + return False + attributes: list[str] = [ + "key", + "data", + "action", + ] + return all(getattr(self, a) == getattr(other, a) for a in attributes) + + +class DecryptPathStructureInput: + table_name: str + encrypted_structure: list[AuthItem] + cmm: "aws_cryptographic_material_providers.smithygenerated.aws_cryptography_materialproviders.references.CryptographicMaterialsManager" + encryption_context: Optional[dict[str, str]] + + def __init__( + self, + *, + table_name: str, + encrypted_structure: list[AuthItem], + cmm: "aws_cryptographic_material_providers.smithygenerated.aws_cryptography_materialproviders.references.CryptographicMaterialsManager", + encryption_context: Optional[dict[str, str]] = None, + ): + self.table_name = table_name + self.encrypted_structure = encrypted_structure + self.cmm = cmm + self.encryption_context = encryption_context + + def as_dict(self) -> Dict[str, Any]: + """Converts the DecryptPathStructureInput to a dictionary.""" + d: Dict[str, Any] = { + "table_name": self.table_name, + "encrypted_structure": _auth_list_as_dict(self.encrypted_structure), + "cmm": self.cmm.as_dict(), + } + + if self.encryption_context is not None: + d["encryption_context"] = self.encryption_context + + return d + + @staticmethod + def from_dict(d: Dict[str, Any]) -> "DecryptPathStructureInput": + """Creates a DecryptPathStructureInput from a dictionary.""" + from aws_cryptographic_material_providers.smithygenerated.aws_cryptography_materialproviders.references import ( + CryptographicMaterialsManager, + ) + + kwargs: Dict[str, Any] = { + "table_name": d["table_name"], + "encrypted_structure": _auth_list_from_dict(d["encrypted_structure"]), + "cmm": CryptographicMaterialsManager.from_dict(d["cmm"]), + } + + if "encryption_context" in d: + kwargs["encryption_context"] = d["encryption_context"] + + return DecryptPathStructureInput(**kwargs) + + def __repr__(self) -> str: + result = "DecryptPathStructureInput(" + if self.table_name is not None: + result += f"table_name={repr(self.table_name)}, " + + if self.encrypted_structure is not None: + result += f"encrypted_structure={repr(self.encrypted_structure)}, " + + if self.cmm is not None: + result += f"cmm={repr(self.cmm)}, " + + if self.encryption_context is not None: + result += f"encryption_context={repr(self.encryption_context)}" + + return result + ")" + + def __eq__(self, other: Any) -> bool: + if not isinstance(other, DecryptPathStructureInput): + return False + attributes: list[str] = [ + "table_name", + "encrypted_structure", + "cmm", + "encryption_context", + ] + return all(getattr(self, a) == getattr(other, a) for a in attributes) + + +class ParsedHeader: + algorithm_suite_id: str + encrypted_data_keys: list[EncryptedDataKey] + stored_encryption_context: dict[str, str] + encryption_context: dict[str, str] + + def __init__( + self, + *, + algorithm_suite_id: str, + encrypted_data_keys: list[EncryptedDataKey], + stored_encryption_context: dict[str, str], + encryption_context: dict[str, str], + ): + self.algorithm_suite_id = algorithm_suite_id + self.encrypted_data_keys = encrypted_data_keys + self.stored_encryption_context = stored_encryption_context + self.encryption_context = encryption_context + + def as_dict(self) -> Dict[str, Any]: + """Converts the ParsedHeader to a dictionary.""" + return { + "algorithm_suite_id": self.algorithm_suite_id, + "encrypted_data_keys": self.encrypted_data_keys, + "stored_encryption_context": self.stored_encryption_context, + "encryption_context": self.encryption_context, + } + + @staticmethod + def from_dict(d: Dict[str, Any]) -> "ParsedHeader": + """Creates a ParsedHeader from a dictionary.""" + kwargs: Dict[str, Any] = { + "algorithm_suite_id": d["algorithm_suite_id"], + "encrypted_data_keys": d["encrypted_data_keys"], + "stored_encryption_context": d["stored_encryption_context"], + "encryption_context": d["encryption_context"], + } + + return ParsedHeader(**kwargs) + + def __repr__(self) -> str: + result = "ParsedHeader(" + if self.algorithm_suite_id is not None: + result += f"algorithm_suite_id={repr(self.algorithm_suite_id)}, " + + if self.encrypted_data_keys is not None: + result += f"encrypted_data_keys={repr(self.encrypted_data_keys)}, " + + if self.stored_encryption_context is not None: + result += f"stored_encryption_context={repr(self.stored_encryption_context)}, " + + if self.encryption_context is not None: + result += f"encryption_context={repr(self.encryption_context)}" + + return result + ")" + + def __eq__(self, other: Any) -> bool: + if not isinstance(other, ParsedHeader): + return False + attributes: list[str] = [ + "algorithm_suite_id", + "encrypted_data_keys", + "stored_encryption_context", + "encryption_context", + ] + return all(getattr(self, a) == getattr(other, a) for a in attributes) + + +class DecryptPathStructureOutput: + plaintext_structure: list[CryptoItem] + parsed_header: ParsedHeader + + def __init__( + self, + *, + plaintext_structure: list[CryptoItem], + parsed_header: ParsedHeader, + ): + self.plaintext_structure = plaintext_structure + self.parsed_header = parsed_header + + def as_dict(self) -> Dict[str, Any]: + """Converts the DecryptPathStructureOutput to a dictionary.""" + return { + "plaintext_structure": _crypto_list_as_dict(self.plaintext_structure), + "parsed_header": self.parsed_header.as_dict(), + } + + @staticmethod + def from_dict(d: Dict[str, Any]) -> "DecryptPathStructureOutput": + """Creates a DecryptPathStructureOutput from a dictionary.""" + kwargs: Dict[str, Any] = { + "plaintext_structure": _crypto_list_from_dict(d["plaintext_structure"]), + "parsed_header": ParsedHeader.from_dict(d["parsed_header"]), + } + + return DecryptPathStructureOutput(**kwargs) + + def __repr__(self) -> str: + result = "DecryptPathStructureOutput(" + if self.plaintext_structure is not None: + result += f"plaintext_structure={repr(self.plaintext_structure)}, " + + if self.parsed_header is not None: + result += f"parsed_header={repr(self.parsed_header)}" + + return result + ")" + + def __eq__(self, other: Any) -> bool: + if not isinstance(other, DecryptPathStructureOutput): + return False + attributes: list[str] = [ + "plaintext_structure", + "parsed_header", + ] + return all(getattr(self, a) == getattr(other, a) for a in attributes) + + +class DecryptStructureInput: + table_name: str + encrypted_structure: dict[str, StructuredDataTerminal] + authenticate_schema: dict[str, str] + cmm: "aws_cryptographic_material_providers.smithygenerated.aws_cryptography_materialproviders.references.CryptographicMaterialsManager" + encryption_context: Optional[dict[str, str]] + + def __init__( + self, + *, + table_name: str, + encrypted_structure: dict[str, StructuredDataTerminal], + authenticate_schema: dict[str, str], + cmm: "aws_cryptographic_material_providers.smithygenerated.aws_cryptography_materialproviders.references.CryptographicMaterialsManager", + encryption_context: Optional[dict[str, str]] = None, + ): + self.table_name = table_name + self.encrypted_structure = encrypted_structure + self.authenticate_schema = authenticate_schema + self.cmm = cmm + self.encryption_context = encryption_context + + def as_dict(self) -> Dict[str, Any]: + """Converts the DecryptStructureInput to a dictionary.""" + d: Dict[str, Any] = { + "table_name": self.table_name, + "encrypted_structure": _structured_data_map_as_dict(self.encrypted_structure), + "authenticate_schema": self.authenticate_schema, + "cmm": self.cmm.as_dict(), + } + + if self.encryption_context is not None: + d["encryption_context"] = self.encryption_context + + return d + + @staticmethod + def from_dict(d: Dict[str, Any]) -> "DecryptStructureInput": + """Creates a DecryptStructureInput from a dictionary.""" + from aws_cryptographic_material_providers.smithygenerated.aws_cryptography_materialproviders.references import ( + CryptographicMaterialsManager, + ) + + kwargs: Dict[str, Any] = { + "table_name": d["table_name"], + "encrypted_structure": _structured_data_map_from_dict(d["encrypted_structure"]), + "authenticate_schema": d["authenticate_schema"], + "cmm": CryptographicMaterialsManager.from_dict(d["cmm"]), + } + + if "encryption_context" in d: + kwargs["encryption_context"] = d["encryption_context"] + + return DecryptStructureInput(**kwargs) + + def __repr__(self) -> str: + result = "DecryptStructureInput(" + if self.table_name is not None: + result += f"table_name={repr(self.table_name)}, " + + if self.encrypted_structure is not None: + result += f"encrypted_structure={repr(self.encrypted_structure)}, " + + if self.authenticate_schema is not None: + result += f"authenticate_schema={repr(self.authenticate_schema)}, " + + if self.cmm is not None: + result += f"cmm={repr(self.cmm)}, " + + if self.encryption_context is not None: + result += f"encryption_context={repr(self.encryption_context)}" + + return result + ")" + + def __eq__(self, other: Any) -> bool: + if not isinstance(other, DecryptStructureInput): + return False + attributes: list[str] = [ + "table_name", + "encrypted_structure", + "authenticate_schema", + "cmm", + "encryption_context", + ] + return all(getattr(self, a) == getattr(other, a) for a in attributes) + + +class DecryptStructureOutput: + plaintext_structure: dict[str, StructuredDataTerminal] + crypto_schema: dict[str, str] + parsed_header: ParsedHeader + + def __init__( + self, + *, + plaintext_structure: dict[str, StructuredDataTerminal], + crypto_schema: dict[str, str], + parsed_header: ParsedHeader, + ): + self.plaintext_structure = plaintext_structure + self.crypto_schema = crypto_schema + self.parsed_header = parsed_header + + def as_dict(self) -> Dict[str, Any]: + """Converts the DecryptStructureOutput to a dictionary.""" + return { + "plaintext_structure": _structured_data_map_as_dict(self.plaintext_structure), + "crypto_schema": self.crypto_schema, + "parsed_header": self.parsed_header.as_dict(), + } + + @staticmethod + def from_dict(d: Dict[str, Any]) -> "DecryptStructureOutput": + """Creates a DecryptStructureOutput from a dictionary.""" + kwargs: Dict[str, Any] = { + "plaintext_structure": _structured_data_map_from_dict(d["plaintext_structure"]), + "crypto_schema": d["crypto_schema"], + "parsed_header": ParsedHeader.from_dict(d["parsed_header"]), + } + + return DecryptStructureOutput(**kwargs) + + def __repr__(self) -> str: + result = "DecryptStructureOutput(" + if self.plaintext_structure is not None: + result += f"plaintext_structure={repr(self.plaintext_structure)}, " + + if self.crypto_schema is not None: + result += f"crypto_schema={repr(self.crypto_schema)}, " + + if self.parsed_header is not None: + result += f"parsed_header={repr(self.parsed_header)}" + + return result + ")" + + def __eq__(self, other: Any) -> bool: + if not isinstance(other, DecryptStructureOutput): + return False + attributes: list[str] = [ + "plaintext_structure", + "crypto_schema", + "parsed_header", + ] + return all(getattr(self, a) == getattr(other, a) for a in attributes) + + +class EncryptPathStructureInput: + table_name: str + plaintext_structure: list[CryptoItem] + cmm: "aws_cryptographic_material_providers.smithygenerated.aws_cryptography_materialproviders.references.CryptographicMaterialsManager" + algorithm_suite_id: Optional[str] + encryption_context: Optional[dict[str, str]] + + def __init__( + self, + *, + table_name: str, + plaintext_structure: list[CryptoItem], + cmm: "aws_cryptographic_material_providers.smithygenerated.aws_cryptography_materialproviders.references.CryptographicMaterialsManager", + algorithm_suite_id: Optional[str] = None, + encryption_context: Optional[dict[str, str]] = None, + ): + self.table_name = table_name + self.plaintext_structure = plaintext_structure + self.cmm = cmm + self.algorithm_suite_id = algorithm_suite_id + self.encryption_context = encryption_context + + def as_dict(self) -> Dict[str, Any]: + """Converts the EncryptPathStructureInput to a dictionary.""" + d: Dict[str, Any] = { + "table_name": self.table_name, + "plaintext_structure": _crypto_list_as_dict(self.plaintext_structure), + "cmm": self.cmm.as_dict(), + } + + if self.algorithm_suite_id is not None: + d["algorithm_suite_id"] = self.algorithm_suite_id + + if self.encryption_context is not None: + d["encryption_context"] = self.encryption_context + + return d + + @staticmethod + def from_dict(d: Dict[str, Any]) -> "EncryptPathStructureInput": + """Creates a EncryptPathStructureInput from a dictionary.""" + from aws_cryptographic_material_providers.smithygenerated.aws_cryptography_materialproviders.references import ( + CryptographicMaterialsManager, + ) + + kwargs: Dict[str, Any] = { + "table_name": d["table_name"], + "plaintext_structure": _crypto_list_from_dict(d["plaintext_structure"]), + "cmm": CryptographicMaterialsManager.from_dict(d["cmm"]), + } + + if "algorithm_suite_id" in d: + kwargs["algorithm_suite_id"] = d["algorithm_suite_id"] + + if "encryption_context" in d: + kwargs["encryption_context"] = d["encryption_context"] + + return EncryptPathStructureInput(**kwargs) + + def __repr__(self) -> str: + result = "EncryptPathStructureInput(" + if self.table_name is not None: + result += f"table_name={repr(self.table_name)}, " + + if self.plaintext_structure is not None: + result += f"plaintext_structure={repr(self.plaintext_structure)}, " + + if self.cmm is not None: + result += f"cmm={repr(self.cmm)}, " + + if self.algorithm_suite_id is not None: + result += f"algorithm_suite_id={repr(self.algorithm_suite_id)}, " + + if self.encryption_context is not None: + result += f"encryption_context={repr(self.encryption_context)}" + + return result + ")" + + def __eq__(self, other: Any) -> bool: + if not isinstance(other, EncryptPathStructureInput): + return False + attributes: list[str] = [ + "table_name", + "plaintext_structure", + "cmm", + "algorithm_suite_id", + "encryption_context", + ] + return all(getattr(self, a) == getattr(other, a) for a in attributes) + + +class EncryptPathStructureOutput: + encrypted_structure: list[CryptoItem] + parsed_header: ParsedHeader + + def __init__( + self, + *, + encrypted_structure: list[CryptoItem], + parsed_header: ParsedHeader, + ): + self.encrypted_structure = encrypted_structure + self.parsed_header = parsed_header + + def as_dict(self) -> Dict[str, Any]: + """Converts the EncryptPathStructureOutput to a dictionary.""" + return { + "encrypted_structure": _crypto_list_as_dict(self.encrypted_structure), + "parsed_header": self.parsed_header.as_dict(), + } + + @staticmethod + def from_dict(d: Dict[str, Any]) -> "EncryptPathStructureOutput": + """Creates a EncryptPathStructureOutput from a dictionary.""" + kwargs: Dict[str, Any] = { + "encrypted_structure": _crypto_list_from_dict(d["encrypted_structure"]), + "parsed_header": ParsedHeader.from_dict(d["parsed_header"]), + } + + return EncryptPathStructureOutput(**kwargs) + + def __repr__(self) -> str: + result = "EncryptPathStructureOutput(" + if self.encrypted_structure is not None: + result += f"encrypted_structure={repr(self.encrypted_structure)}, " + + if self.parsed_header is not None: + result += f"parsed_header={repr(self.parsed_header)}" + + return result + ")" + + def __eq__(self, other: Any) -> bool: + if not isinstance(other, EncryptPathStructureOutput): + return False + attributes: list[str] = [ + "encrypted_structure", + "parsed_header", + ] + return all(getattr(self, a) == getattr(other, a) for a in attributes) + + +class EncryptStructureInput: + table_name: str + plaintext_structure: dict[str, StructuredDataTerminal] + crypto_schema: dict[str, str] + cmm: "aws_cryptographic_material_providers.smithygenerated.aws_cryptography_materialproviders.references.CryptographicMaterialsManager" + algorithm_suite_id: Optional[str] + encryption_context: Optional[dict[str, str]] + + def __init__( + self, + *, + table_name: str, + plaintext_structure: dict[str, StructuredDataTerminal], + crypto_schema: dict[str, str], + cmm: "aws_cryptographic_material_providers.smithygenerated.aws_cryptography_materialproviders.references.CryptographicMaterialsManager", + algorithm_suite_id: Optional[str] = None, + encryption_context: Optional[dict[str, str]] = None, + ): + self.table_name = table_name + self.plaintext_structure = plaintext_structure + self.crypto_schema = crypto_schema + self.cmm = cmm + self.algorithm_suite_id = algorithm_suite_id + self.encryption_context = encryption_context + + def as_dict(self) -> Dict[str, Any]: + """Converts the EncryptStructureInput to a dictionary.""" + d: Dict[str, Any] = { + "table_name": self.table_name, + "plaintext_structure": _structured_data_map_as_dict(self.plaintext_structure), + "crypto_schema": self.crypto_schema, + "cmm": self.cmm.as_dict(), + } + + if self.algorithm_suite_id is not None: + d["algorithm_suite_id"] = self.algorithm_suite_id + + if self.encryption_context is not None: + d["encryption_context"] = self.encryption_context + + return d + + @staticmethod + def from_dict(d: Dict[str, Any]) -> "EncryptStructureInput": + """Creates a EncryptStructureInput from a dictionary.""" + from aws_cryptographic_material_providers.smithygenerated.aws_cryptography_materialproviders.references import ( + CryptographicMaterialsManager, + ) + + kwargs: Dict[str, Any] = { + "table_name": d["table_name"], + "plaintext_structure": _structured_data_map_from_dict(d["plaintext_structure"]), + "crypto_schema": d["crypto_schema"], + "cmm": CryptographicMaterialsManager.from_dict(d["cmm"]), + } + + if "algorithm_suite_id" in d: + kwargs["algorithm_suite_id"] = d["algorithm_suite_id"] + + if "encryption_context" in d: + kwargs["encryption_context"] = d["encryption_context"] + + return EncryptStructureInput(**kwargs) + + def __repr__(self) -> str: + result = "EncryptStructureInput(" + if self.table_name is not None: + result += f"table_name={repr(self.table_name)}, " + + if self.plaintext_structure is not None: + result += f"plaintext_structure={repr(self.plaintext_structure)}, " + + if self.crypto_schema is not None: + result += f"crypto_schema={repr(self.crypto_schema)}, " + + if self.cmm is not None: + result += f"cmm={repr(self.cmm)}, " + + if self.algorithm_suite_id is not None: + result += f"algorithm_suite_id={repr(self.algorithm_suite_id)}, " + + if self.encryption_context is not None: + result += f"encryption_context={repr(self.encryption_context)}" + + return result + ")" + + def __eq__(self, other: Any) -> bool: + if not isinstance(other, EncryptStructureInput): + return False + attributes: list[str] = [ + "table_name", + "plaintext_structure", + "crypto_schema", + "cmm", + "algorithm_suite_id", + "encryption_context", + ] + return all(getattr(self, a) == getattr(other, a) for a in attributes) + + +class EncryptStructureOutput: + encrypted_structure: dict[str, StructuredDataTerminal] + crypto_schema: dict[str, str] + parsed_header: ParsedHeader + + def __init__( + self, + *, + encrypted_structure: dict[str, StructuredDataTerminal], + crypto_schema: dict[str, str], + parsed_header: ParsedHeader, + ): + self.encrypted_structure = encrypted_structure + self.crypto_schema = crypto_schema + self.parsed_header = parsed_header + + def as_dict(self) -> Dict[str, Any]: + """Converts the EncryptStructureOutput to a dictionary.""" + return { + "encrypted_structure": _structured_data_map_as_dict(self.encrypted_structure), + "crypto_schema": self.crypto_schema, + "parsed_header": self.parsed_header.as_dict(), + } + + @staticmethod + def from_dict(d: Dict[str, Any]) -> "EncryptStructureOutput": + """Creates a EncryptStructureOutput from a dictionary.""" + kwargs: Dict[str, Any] = { + "encrypted_structure": _structured_data_map_from_dict(d["encrypted_structure"]), + "crypto_schema": d["crypto_schema"], + "parsed_header": ParsedHeader.from_dict(d["parsed_header"]), + } + + return EncryptStructureOutput(**kwargs) + + def __repr__(self) -> str: + result = "EncryptStructureOutput(" + if self.encrypted_structure is not None: + result += f"encrypted_structure={repr(self.encrypted_structure)}, " + + if self.crypto_schema is not None: + result += f"crypto_schema={repr(self.crypto_schema)}, " + + if self.parsed_header is not None: + result += f"parsed_header={repr(self.parsed_header)}" + + return result + ")" + + def __eq__(self, other: Any) -> bool: + if not isinstance(other, EncryptStructureOutput): + return False + attributes: list[str] = [ + "encrypted_structure", + "crypto_schema", + "parsed_header", + ] + return all(getattr(self, a) == getattr(other, a) for a in attributes) + + +class ResolveAuthActionsInput: + table_name: str + auth_actions: list[AuthItem] + header_bytes: bytes | bytearray + + def __init__( + self, + *, + table_name: str, + auth_actions: list[AuthItem], + header_bytes: bytes | bytearray, + ): + self.table_name = table_name + self.auth_actions = auth_actions + self.header_bytes = header_bytes + + def as_dict(self) -> Dict[str, Any]: + """Converts the ResolveAuthActionsInput to a dictionary.""" + return { + "table_name": self.table_name, + "auth_actions": _auth_list_as_dict(self.auth_actions), + "header_bytes": self.header_bytes, + } + + @staticmethod + def from_dict(d: Dict[str, Any]) -> "ResolveAuthActionsInput": + """Creates a ResolveAuthActionsInput from a dictionary.""" + kwargs: Dict[str, Any] = { + "table_name": d["table_name"], + "auth_actions": _auth_list_from_dict(d["auth_actions"]), + "header_bytes": d["header_bytes"], + } + + return ResolveAuthActionsInput(**kwargs) + + def __repr__(self) -> str: + result = "ResolveAuthActionsInput(" + if self.table_name is not None: + result += f"table_name={repr(self.table_name)}, " + + if self.auth_actions is not None: + result += f"auth_actions={repr(self.auth_actions)}, " + + if self.header_bytes is not None: + result += f"header_bytes={repr(self.header_bytes)}" + + return result + ")" + + def __eq__(self, other: Any) -> bool: + if not isinstance(other, ResolveAuthActionsInput): + return False + attributes: list[str] = [ + "table_name", + "auth_actions", + "header_bytes", + ] + return all(getattr(self, a) == getattr(other, a) for a in attributes) + + +class ResolveAuthActionsOutput: + crypto_actions: list[CryptoItem] + + def __init__( + self, + *, + crypto_actions: list[CryptoItem], + ): + self.crypto_actions = crypto_actions + + def as_dict(self) -> Dict[str, Any]: + """Converts the ResolveAuthActionsOutput to a dictionary.""" + return { + "crypto_actions": _crypto_list_as_dict(self.crypto_actions), + } + + @staticmethod + def from_dict(d: Dict[str, Any]) -> "ResolveAuthActionsOutput": + """Creates a ResolveAuthActionsOutput from a dictionary.""" + kwargs: Dict[str, Any] = { + "crypto_actions": _crypto_list_from_dict(d["crypto_actions"]), + } + + return ResolveAuthActionsOutput(**kwargs) + + def __repr__(self) -> str: + result = "ResolveAuthActionsOutput(" + if self.crypto_actions is not None: + result += f"crypto_actions={repr(self.crypto_actions)}" + + return result + ")" + + def __eq__(self, other: Any) -> bool: + if not isinstance(other, ResolveAuthActionsOutput): + return False + attributes: list[str] = [ + "crypto_actions", + ] + return all(getattr(self, a) == getattr(other, a) for a in attributes) + + +def _crypto_list_as_dict(given: list[CryptoItem]) -> List[Any]: + return [v.as_dict() for v in given] + + +def _crypto_list_from_dict(given: List[Any]) -> list[CryptoItem]: + return [CryptoItem.from_dict(v) for v in given] + + +def _path_as_dict(given: list[PathSegment]) -> List[Any]: + return [v.as_dict() for v in given] + + +def _path_from_dict(given: List[Any]) -> list[PathSegment]: + return [PathSegment.from_dict(v) for v in given] + + +def _auth_list_as_dict(given: list[AuthItem]) -> List[Any]: + return [v.as_dict() for v in given] + + +def _auth_list_from_dict(given: List[Any]) -> list[AuthItem]: + return [AuthItem.from_dict(v) for v in given] + + +def _structured_data_map_as_dict( + given: dict[str, StructuredDataTerminal], +) -> Dict[str, Any]: + return {k: v.as_dict() for k, v in given.items()} + + +def _structured_data_map_from_dict( + given: Dict[str, Any], +) -> dict[str, StructuredDataTerminal]: + return {k: StructuredDataTerminal.from_dict(v) for k, v in given.items()} + + +class Unit: + pass diff --git a/DynamoDbEncryption/runtimes/python/src/aws_dbesdk_dynamodb/smithygenerated/aws_cryptography_dbencryptionsdk_structuredencryption/plugin.py b/DynamoDbEncryption/runtimes/python/src/aws_dbesdk_dynamodb/smithygenerated/aws_cryptography_dbencryptionsdk_structuredencryption/plugin.py new file mode 100644 index 000000000..f7ff755c3 --- /dev/null +++ b/DynamoDbEncryption/runtimes/python/src/aws_dbesdk_dynamodb/smithygenerated/aws_cryptography_dbencryptionsdk_structuredencryption/plugin.py @@ -0,0 +1,49 @@ +# Copyright Amazon.com Inc. or its affiliates. All Rights Reserved. +# SPDX-License-Identifier: Apache-2.0 +# Do not modify this file. This file is machine generated, and any changes to it will be overwritten. + +from .config import ( + Config, + Plugin, + smithy_config_to_dafny_config, + StructuredEncryptionConfig, +) +from smithy_python.interfaces.retries import RetryStrategy +from smithy_python.exceptions import SmithyRetryException +from .dafnyImplInterface import DafnyImplInterface + + +def set_config_impl(config: Config): + """Set the Dafny-compiled implementation in the Smithy-Python client Config + and load our custom NoRetriesStrategy.""" + config.dafnyImplInterface = DafnyImplInterface() + if isinstance(config, StructuredEncryptionConfig): + from aws_dbesdk_dynamodb.internaldafny.generated.StructuredEncryption import ( + default__, + ) + + config.dafnyImplInterface.impl = default__.StructuredEncryption(smithy_config_to_dafny_config(config)).value + config.retry_strategy = NoRetriesStrategy() + + +class ZeroRetryDelayToken: + """Placeholder class required by Smithy-Python client implementation. + + Do not wait to retry. + """ + + retry_delay = 0 + + +class NoRetriesStrategy(RetryStrategy): + """Placeholder class required by Smithy-Python client implementation. + + Do not retry calling Dafny code. + """ + + def acquire_initial_retry_token(self): + return ZeroRetryDelayToken() + + def refresh_retry_token_for_retry(self, token_to_renew, error_info): + # Do not retry + raise SmithyRetryException() diff --git a/DynamoDbEncryption/runtimes/python/src/aws_dbesdk_dynamodb/smithygenerated/aws_cryptography_dbencryptionsdk_structuredencryption/serialize.py b/DynamoDbEncryption/runtimes/python/src/aws_dbesdk_dynamodb/smithygenerated/aws_cryptography_dbencryptionsdk_structuredencryption/serialize.py new file mode 100644 index 000000000..6b0353e08 --- /dev/null +++ b/DynamoDbEncryption/runtimes/python/src/aws_dbesdk_dynamodb/smithygenerated/aws_cryptography_dbencryptionsdk_structuredencryption/serialize.py @@ -0,0 +1,54 @@ +# Copyright Amazon.com Inc. or its affiliates. All Rights Reserved. +# SPDX-License-Identifier: Apache-2.0 +# Do not modify this file. This file is machine generated, and any changes to it will be overwritten. + +import aws_dbesdk_dynamodb.smithygenerated.aws_cryptography_dbencryptionsdk_structuredencryption.smithy_to_dafny + +from .dafny_protocol import DafnyRequest + +from .config import Config + + +def _serialize_encrypt_structure(input, config: Config) -> DafnyRequest: + return DafnyRequest( + operation_name="EncryptStructure", + dafny_operation_input=aws_dbesdk_dynamodb.smithygenerated.aws_cryptography_dbencryptionsdk_structuredencryption.smithy_to_dafny.aws_cryptography_dbencryptionsdk_structuredencryption_EncryptStructureInput( + input + ), + ) + + +def _serialize_decrypt_structure(input, config: Config) -> DafnyRequest: + return DafnyRequest( + operation_name="DecryptStructure", + dafny_operation_input=aws_dbesdk_dynamodb.smithygenerated.aws_cryptography_dbencryptionsdk_structuredencryption.smithy_to_dafny.aws_cryptography_dbencryptionsdk_structuredencryption_DecryptStructureInput( + input + ), + ) + + +def _serialize_encrypt_path_structure(input, config: Config) -> DafnyRequest: + return DafnyRequest( + operation_name="EncryptPathStructure", + dafny_operation_input=aws_dbesdk_dynamodb.smithygenerated.aws_cryptography_dbencryptionsdk_structuredencryption.smithy_to_dafny.aws_cryptography_dbencryptionsdk_structuredencryption_EncryptPathStructureInput( + input + ), + ) + + +def _serialize_decrypt_path_structure(input, config: Config) -> DafnyRequest: + return DafnyRequest( + operation_name="DecryptPathStructure", + dafny_operation_input=aws_dbesdk_dynamodb.smithygenerated.aws_cryptography_dbencryptionsdk_structuredencryption.smithy_to_dafny.aws_cryptography_dbencryptionsdk_structuredencryption_DecryptPathStructureInput( + input + ), + ) + + +def _serialize_resolve_auth_actions(input, config: Config) -> DafnyRequest: + return DafnyRequest( + operation_name="ResolveAuthActions", + dafny_operation_input=aws_dbesdk_dynamodb.smithygenerated.aws_cryptography_dbencryptionsdk_structuredencryption.smithy_to_dafny.aws_cryptography_dbencryptionsdk_structuredencryption_ResolveAuthActionsInput( + input + ), + ) diff --git a/DynamoDbEncryption/runtimes/python/src/aws_dbesdk_dynamodb/smithygenerated/aws_cryptography_dbencryptionsdk_structuredencryption/smithy_to_dafny.py b/DynamoDbEncryption/runtimes/python/src/aws_dbesdk_dynamodb/smithygenerated/aws_cryptography_dbencryptionsdk_structuredencryption/smithy_to_dafny.py new file mode 100644 index 000000000..6bf97c976 --- /dev/null +++ b/DynamoDbEncryption/runtimes/python/src/aws_dbesdk_dynamodb/smithygenerated/aws_cryptography_dbencryptionsdk_structuredencryption/smithy_to_dafny.py @@ -0,0 +1,524 @@ +# Copyright Amazon.com Inc. or its affiliates. All Rights Reserved. +# SPDX-License-Identifier: Apache-2.0 +# Do not modify this file. This file is machine generated, and any changes to it will be overwritten. + +from _dafny import Map, Seq +import aws_cryptographic_material_providers.smithygenerated.aws_cryptography_materialproviders.smithy_to_dafny +from aws_dbesdk_dynamodb.internaldafny.generated.AwsCryptographyDbEncryptionSdkStructuredEncryptionTypes import ( + AuthItem_AuthItem as DafnyAuthItem, + AuthenticateAction_DO__NOT__SIGN, + AuthenticateAction_SIGN, + CryptoAction_DO__NOTHING, + CryptoAction_ENCRYPT__AND__SIGN, + CryptoAction_SIGN__AND__INCLUDE__IN__ENCRYPTION__CONTEXT, + CryptoAction_SIGN__ONLY, + CryptoItem_CryptoItem as DafnyCryptoItem, + DecryptPathStructureInput_DecryptPathStructureInput as DafnyDecryptPathStructureInput, + DecryptPathStructureOutput_DecryptPathStructureOutput as DafnyDecryptPathStructureOutput, + DecryptStructureInput_DecryptStructureInput as DafnyDecryptStructureInput, + DecryptStructureOutput_DecryptStructureOutput as DafnyDecryptStructureOutput, + EncryptPathStructureInput_EncryptPathStructureInput as DafnyEncryptPathStructureInput, + EncryptPathStructureOutput_EncryptPathStructureOutput as DafnyEncryptPathStructureOutput, + EncryptStructureInput_EncryptStructureInput as DafnyEncryptStructureInput, + EncryptStructureOutput_EncryptStructureOutput as DafnyEncryptStructureOutput, + ParsedHeader_ParsedHeader as DafnyParsedHeader, + PathSegment_member, + ResolveAuthActionsInput_ResolveAuthActionsInput as DafnyResolveAuthActionsInput, + ResolveAuthActionsOutput_ResolveAuthActionsOutput as DafnyResolveAuthActionsOutput, + StructureSegment_StructureSegment as DafnyStructureSegment, + StructuredDataTerminal_StructuredDataTerminal as DafnyStructuredDataTerminal, + StructuredEncryptionConfig_StructuredEncryptionConfig as DafnyStructuredEncryptionConfig, +) +import aws_dbesdk_dynamodb.internaldafny.generated.module_ +import aws_dbesdk_dynamodb.smithygenerated.aws_cryptography_dbencryptionsdk_structuredencryption.models +import aws_dbesdk_dynamodb.smithygenerated.aws_cryptography_dbencryptionsdk_structuredencryption.smithy_to_dafny +from smithy_dafny_standard_library.internaldafny.generated.Wrappers import ( + Option_None, + Option_Some, +) + + +def aws_cryptography_dbencryptionsdk_structuredencryption_EncryptStructureInput( + native_input, +): + return DafnyEncryptStructureInput( + tableName=Seq( + "".join( + [ + chr(int.from_bytes(pair, "big")) + for pair in zip(*[iter(native_input.table_name.encode("utf-16-be"))] * 2) + ] + ) + ), + plaintextStructure=Map( + { + Seq( + "".join([chr(int.from_bytes(pair, "big")) for pair in zip(*[iter(key.encode("utf-16-be"))] * 2)]) + ): aws_dbesdk_dynamodb.smithygenerated.aws_cryptography_dbencryptionsdk_structuredencryption.smithy_to_dafny.aws_cryptography_dbencryptionsdk_structuredencryption_StructuredDataTerminal( + value + ) + for (key, value) in native_input.plaintext_structure.items() + } + ), + cryptoSchema=Map( + { + Seq( + "".join([chr(int.from_bytes(pair, "big")) for pair in zip(*[iter(key.encode("utf-16-be"))] * 2)]) + ): aws_dbesdk_dynamodb.smithygenerated.aws_cryptography_dbencryptionsdk_structuredencryption.smithy_to_dafny.aws_cryptography_dbencryptionsdk_structuredencryption_CryptoAction( + value + ) + for (key, value) in native_input.crypto_schema.items() + } + ), + cmm=aws_cryptographic_material_providers.smithygenerated.aws_cryptography_materialproviders.smithy_to_dafny.aws_cryptography_materialproviders_CryptographicMaterialsManagerReference( + native_input.cmm + ), + algorithmSuiteId=( + ( + Option_Some( + aws_cryptographic_material_providers.smithygenerated.aws_cryptography_materialproviders.smithy_to_dafny.aws_cryptography_materialproviders_DBEAlgorithmSuiteId( + native_input.algorithm_suite_id + ) + ) + ) + if (native_input.algorithm_suite_id is not None) + else (Option_None()) + ), + encryptionContext=( + ( + Option_Some( + Map( + { + Seq(key.encode("utf-8")): Seq(value.encode("utf-8")) + for (key, value) in native_input.encryption_context.items() + } + ) + ) + ) + if (native_input.encryption_context is not None) + else (Option_None()) + ), + ) + + +def aws_cryptography_dbencryptionsdk_structuredencryption_StructuredDataTerminal( + native_input, +): + return DafnyStructuredDataTerminal( + value=Seq(native_input.value), + typeId=Seq(native_input.type_id), + ) + + +def aws_cryptography_dbencryptionsdk_structuredencryption_CryptoAction(native_input): + if native_input == "ENCRYPT_AND_SIGN": + return CryptoAction_ENCRYPT__AND__SIGN() + + elif native_input == "SIGN_AND_INCLUDE_IN_ENCRYPTION_CONTEXT": + return CryptoAction_SIGN__AND__INCLUDE__IN__ENCRYPTION__CONTEXT() + + elif native_input == "SIGN_ONLY": + return CryptoAction_SIGN__ONLY() + + elif native_input == "DO_NOTHING": + return CryptoAction_DO__NOTHING() + + else: + raise ValueError(f"No recognized enum value in enum type: {native_input=}") + + +def aws_cryptography_dbencryptionsdk_structuredencryption_DecryptStructureInput( + native_input, +): + return DafnyDecryptStructureInput( + tableName=Seq( + "".join( + [ + chr(int.from_bytes(pair, "big")) + for pair in zip(*[iter(native_input.table_name.encode("utf-16-be"))] * 2) + ] + ) + ), + encryptedStructure=Map( + { + Seq( + "".join([chr(int.from_bytes(pair, "big")) for pair in zip(*[iter(key.encode("utf-16-be"))] * 2)]) + ): aws_dbesdk_dynamodb.smithygenerated.aws_cryptography_dbencryptionsdk_structuredencryption.smithy_to_dafny.aws_cryptography_dbencryptionsdk_structuredencryption_StructuredDataTerminal( + value + ) + for (key, value) in native_input.encrypted_structure.items() + } + ), + authenticateSchema=Map( + { + Seq( + "".join([chr(int.from_bytes(pair, "big")) for pair in zip(*[iter(key.encode("utf-16-be"))] * 2)]) + ): aws_dbesdk_dynamodb.smithygenerated.aws_cryptography_dbencryptionsdk_structuredencryption.smithy_to_dafny.aws_cryptography_dbencryptionsdk_structuredencryption_AuthenticateAction( + value + ) + for (key, value) in native_input.authenticate_schema.items() + } + ), + cmm=aws_cryptographic_material_providers.smithygenerated.aws_cryptography_materialproviders.smithy_to_dafny.aws_cryptography_materialproviders_CryptographicMaterialsManagerReference( + native_input.cmm + ), + encryptionContext=( + ( + Option_Some( + Map( + { + Seq(key.encode("utf-8")): Seq(value.encode("utf-8")) + for (key, value) in native_input.encryption_context.items() + } + ) + ) + ) + if (native_input.encryption_context is not None) + else (Option_None()) + ), + ) + + +def aws_cryptography_dbencryptionsdk_structuredencryption_AuthenticateAction( + native_input, +): + if native_input == "SIGN": + return AuthenticateAction_SIGN() + + elif native_input == "DO_NOT_SIGN": + return AuthenticateAction_DO__NOT__SIGN() + + else: + raise ValueError(f"No recognized enum value in enum type: {native_input=}") + + +def aws_cryptography_dbencryptionsdk_structuredencryption_EncryptPathStructureInput( + native_input, +): + return DafnyEncryptPathStructureInput( + tableName=Seq( + "".join( + [ + chr(int.from_bytes(pair, "big")) + for pair in zip(*[iter(native_input.table_name.encode("utf-16-be"))] * 2) + ] + ) + ), + plaintextStructure=Seq( + [ + aws_dbesdk_dynamodb.smithygenerated.aws_cryptography_dbencryptionsdk_structuredencryption.smithy_to_dafny.aws_cryptography_dbencryptionsdk_structuredencryption_CryptoItem( + list_element + ) + for list_element in native_input.plaintext_structure + ] + ), + cmm=aws_cryptographic_material_providers.smithygenerated.aws_cryptography_materialproviders.smithy_to_dafny.aws_cryptography_materialproviders_CryptographicMaterialsManagerReference( + native_input.cmm + ), + algorithmSuiteId=( + ( + Option_Some( + aws_cryptographic_material_providers.smithygenerated.aws_cryptography_materialproviders.smithy_to_dafny.aws_cryptography_materialproviders_DBEAlgorithmSuiteId( + native_input.algorithm_suite_id + ) + ) + ) + if (native_input.algorithm_suite_id is not None) + else (Option_None()) + ), + encryptionContext=( + ( + Option_Some( + Map( + { + Seq(key.encode("utf-8")): Seq(value.encode("utf-8")) + for (key, value) in native_input.encryption_context.items() + } + ) + ) + ) + if (native_input.encryption_context is not None) + else (Option_None()) + ), + ) + + +def aws_cryptography_dbencryptionsdk_structuredencryption_CryptoItem(native_input): + return DafnyCryptoItem( + key=Seq( + [ + aws_dbesdk_dynamodb.smithygenerated.aws_cryptography_dbencryptionsdk_structuredencryption.smithy_to_dafny.aws_cryptography_dbencryptionsdk_structuredencryption_PathSegment( + list_element + ) + for list_element in native_input.key + ] + ), + data=aws_dbesdk_dynamodb.smithygenerated.aws_cryptography_dbencryptionsdk_structuredencryption.smithy_to_dafny.aws_cryptography_dbencryptionsdk_structuredencryption_StructuredDataTerminal( + native_input.data + ), + action=aws_dbesdk_dynamodb.smithygenerated.aws_cryptography_dbencryptionsdk_structuredencryption.smithy_to_dafny.aws_cryptography_dbencryptionsdk_structuredencryption_CryptoAction( + native_input.action + ), + ) + + +def aws_cryptography_dbencryptionsdk_structuredencryption_PathSegment(native_input): + if isinstance( + native_input, + aws_dbesdk_dynamodb.smithygenerated.aws_cryptography_dbencryptionsdk_structuredencryption.models.PathSegmentMember, + ): + PathSegment_union_value = PathSegment_member( + aws_dbesdk_dynamodb.smithygenerated.aws_cryptography_dbencryptionsdk_structuredencryption.smithy_to_dafny.aws_cryptography_dbencryptionsdk_structuredencryption_StructureSegment( + native_input.value + ) + ) + else: + raise ValueError("No recognized union value in union type: " + str(native_input)) + + return PathSegment_union_value + + +def aws_cryptography_dbencryptionsdk_structuredencryption_StructureSegment( + native_input, +): + return DafnyStructureSegment( + key=Seq( + "".join( + [chr(int.from_bytes(pair, "big")) for pair in zip(*[iter(native_input.key.encode("utf-16-be"))] * 2)] + ) + ), + ) + + +def aws_cryptography_dbencryptionsdk_structuredencryption_DecryptPathStructureInput( + native_input, +): + return DafnyDecryptPathStructureInput( + tableName=Seq( + "".join( + [ + chr(int.from_bytes(pair, "big")) + for pair in zip(*[iter(native_input.table_name.encode("utf-16-be"))] * 2) + ] + ) + ), + encryptedStructure=Seq( + [ + aws_dbesdk_dynamodb.smithygenerated.aws_cryptography_dbencryptionsdk_structuredencryption.smithy_to_dafny.aws_cryptography_dbencryptionsdk_structuredencryption_AuthItem( + list_element + ) + for list_element in native_input.encrypted_structure + ] + ), + cmm=aws_cryptographic_material_providers.smithygenerated.aws_cryptography_materialproviders.smithy_to_dafny.aws_cryptography_materialproviders_CryptographicMaterialsManagerReference( + native_input.cmm + ), + encryptionContext=( + ( + Option_Some( + Map( + { + Seq(key.encode("utf-8")): Seq(value.encode("utf-8")) + for (key, value) in native_input.encryption_context.items() + } + ) + ) + ) + if (native_input.encryption_context is not None) + else (Option_None()) + ), + ) + + +def aws_cryptography_dbencryptionsdk_structuredencryption_AuthItem(native_input): + return DafnyAuthItem( + key=Seq( + [ + aws_dbesdk_dynamodb.smithygenerated.aws_cryptography_dbencryptionsdk_structuredencryption.smithy_to_dafny.aws_cryptography_dbencryptionsdk_structuredencryption_PathSegment( + list_element + ) + for list_element in native_input.key + ] + ), + data=aws_dbesdk_dynamodb.smithygenerated.aws_cryptography_dbencryptionsdk_structuredencryption.smithy_to_dafny.aws_cryptography_dbencryptionsdk_structuredencryption_StructuredDataTerminal( + native_input.data + ), + action=aws_dbesdk_dynamodb.smithygenerated.aws_cryptography_dbencryptionsdk_structuredencryption.smithy_to_dafny.aws_cryptography_dbencryptionsdk_structuredencryption_AuthenticateAction( + native_input.action + ), + ) + + +def aws_cryptography_dbencryptionsdk_structuredencryption_ResolveAuthActionsInput( + native_input, +): + return DafnyResolveAuthActionsInput( + tableName=Seq( + "".join( + [ + chr(int.from_bytes(pair, "big")) + for pair in zip(*[iter(native_input.table_name.encode("utf-16-be"))] * 2) + ] + ) + ), + authActions=Seq( + [ + aws_dbesdk_dynamodb.smithygenerated.aws_cryptography_dbencryptionsdk_structuredencryption.smithy_to_dafny.aws_cryptography_dbencryptionsdk_structuredencryption_AuthItem( + list_element + ) + for list_element in native_input.auth_actions + ] + ), + headerBytes=Seq(native_input.header_bytes), + ) + + +def aws_cryptography_dbencryptionsdk_structuredencryption_EncryptStructureOutput( + native_input, +): + return DafnyEncryptStructureOutput( + encryptedStructure=Map( + { + Seq( + "".join([chr(int.from_bytes(pair, "big")) for pair in zip(*[iter(key.encode("utf-16-be"))] * 2)]) + ): aws_dbesdk_dynamodb.smithygenerated.aws_cryptography_dbencryptionsdk_structuredencryption.smithy_to_dafny.aws_cryptography_dbencryptionsdk_structuredencryption_StructuredDataTerminal( + value + ) + for (key, value) in native_input.encrypted_structure.items() + } + ), + cryptoSchema=Map( + { + Seq( + "".join([chr(int.from_bytes(pair, "big")) for pair in zip(*[iter(key.encode("utf-16-be"))] * 2)]) + ): aws_dbesdk_dynamodb.smithygenerated.aws_cryptography_dbencryptionsdk_structuredencryption.smithy_to_dafny.aws_cryptography_dbencryptionsdk_structuredencryption_CryptoAction( + value + ) + for (key, value) in native_input.crypto_schema.items() + } + ), + parsedHeader=aws_dbesdk_dynamodb.smithygenerated.aws_cryptography_dbencryptionsdk_structuredencryption.smithy_to_dafny.aws_cryptography_dbencryptionsdk_structuredencryption_ParsedHeader( + native_input.parsed_header + ), + ) + + +def aws_cryptography_dbencryptionsdk_structuredencryption_ParsedHeader(native_input): + return DafnyParsedHeader( + algorithmSuiteId=aws_cryptographic_material_providers.smithygenerated.aws_cryptography_materialproviders.smithy_to_dafny.aws_cryptography_materialproviders_DBEAlgorithmSuiteId( + native_input.algorithm_suite_id + ), + encryptedDataKeys=Seq( + [ + aws_cryptographic_material_providers.smithygenerated.aws_cryptography_materialproviders.smithy_to_dafny.aws_cryptography_materialproviders_EncryptedDataKey( + list_element + ) + for list_element in native_input.encrypted_data_keys + ] + ), + storedEncryptionContext=Map( + { + Seq(key.encode("utf-8")): Seq(value.encode("utf-8")) + for (key, value) in native_input.stored_encryption_context.items() + } + ), + encryptionContext=Map( + { + Seq(key.encode("utf-8")): Seq(value.encode("utf-8")) + for (key, value) in native_input.encryption_context.items() + } + ), + ) + + +def aws_cryptography_dbencryptionsdk_structuredencryption_DecryptStructureOutput( + native_input, +): + return DafnyDecryptStructureOutput( + plaintextStructure=Map( + { + Seq( + "".join([chr(int.from_bytes(pair, "big")) for pair in zip(*[iter(key.encode("utf-16-be"))] * 2)]) + ): aws_dbesdk_dynamodb.smithygenerated.aws_cryptography_dbencryptionsdk_structuredencryption.smithy_to_dafny.aws_cryptography_dbencryptionsdk_structuredencryption_StructuredDataTerminal( + value + ) + for (key, value) in native_input.plaintext_structure.items() + } + ), + cryptoSchema=Map( + { + Seq( + "".join([chr(int.from_bytes(pair, "big")) for pair in zip(*[iter(key.encode("utf-16-be"))] * 2)]) + ): aws_dbesdk_dynamodb.smithygenerated.aws_cryptography_dbencryptionsdk_structuredencryption.smithy_to_dafny.aws_cryptography_dbencryptionsdk_structuredencryption_CryptoAction( + value + ) + for (key, value) in native_input.crypto_schema.items() + } + ), + parsedHeader=aws_dbesdk_dynamodb.smithygenerated.aws_cryptography_dbencryptionsdk_structuredencryption.smithy_to_dafny.aws_cryptography_dbencryptionsdk_structuredencryption_ParsedHeader( + native_input.parsed_header + ), + ) + + +def aws_cryptography_dbencryptionsdk_structuredencryption_EncryptPathStructureOutput( + native_input, +): + return DafnyEncryptPathStructureOutput( + encryptedStructure=Seq( + [ + aws_dbesdk_dynamodb.smithygenerated.aws_cryptography_dbencryptionsdk_structuredencryption.smithy_to_dafny.aws_cryptography_dbencryptionsdk_structuredencryption_CryptoItem( + list_element + ) + for list_element in native_input.encrypted_structure + ] + ), + parsedHeader=aws_dbesdk_dynamodb.smithygenerated.aws_cryptography_dbencryptionsdk_structuredencryption.smithy_to_dafny.aws_cryptography_dbencryptionsdk_structuredencryption_ParsedHeader( + native_input.parsed_header + ), + ) + + +def aws_cryptography_dbencryptionsdk_structuredencryption_DecryptPathStructureOutput( + native_input, +): + return DafnyDecryptPathStructureOutput( + plaintextStructure=Seq( + [ + aws_dbesdk_dynamodb.smithygenerated.aws_cryptography_dbencryptionsdk_structuredencryption.smithy_to_dafny.aws_cryptography_dbencryptionsdk_structuredencryption_CryptoItem( + list_element + ) + for list_element in native_input.plaintext_structure + ] + ), + parsedHeader=aws_dbesdk_dynamodb.smithygenerated.aws_cryptography_dbencryptionsdk_structuredencryption.smithy_to_dafny.aws_cryptography_dbencryptionsdk_structuredencryption_ParsedHeader( + native_input.parsed_header + ), + ) + + +def aws_cryptography_dbencryptionsdk_structuredencryption_ResolveAuthActionsOutput( + native_input, +): + return DafnyResolveAuthActionsOutput( + cryptoActions=Seq( + [ + aws_dbesdk_dynamodb.smithygenerated.aws_cryptography_dbencryptionsdk_structuredencryption.smithy_to_dafny.aws_cryptography_dbencryptionsdk_structuredencryption_CryptoItem( + list_element + ) + for list_element in native_input.crypto_actions + ] + ), + ) + + +def aws_cryptography_dbencryptionsdk_structuredencryption_AtomicPrimitivesReference( + native_input, +): + return native_input._config.dafnyImplInterface.impl + + +def aws_cryptography_dbencryptionsdk_structuredencryption_StructuredEncryptionConfig( + native_input, +): + return DafnyStructuredEncryptionConfig() diff --git a/DynamoDbEncryption/runtimes/python/src/aws_dbesdk_dynamodb/structures/dynamodb.py b/DynamoDbEncryption/runtimes/python/src/aws_dbesdk_dynamodb/structures/dynamodb.py new file mode 100644 index 000000000..bc852c227 --- /dev/null +++ b/DynamoDbEncryption/runtimes/python/src/aws_dbesdk_dynamodb/structures/dynamodb.py @@ -0,0 +1,20 @@ +# Copyright Amazon.com Inc. or its affiliates. All Rights Reserved. +# SPDX-License-Identifier: Apache-2.0 +"""Alias for generated models.""" + + +# Alias from: +# "aws_dbesdk_dynamodb.smithygenerated.aws_cryptography_dbencryptionsdk_dynamodb.models.ABC" +# to: +# "aws_dbesdk_dynamodb.structures.dynamodb.ABC" +# ruff: noqa: F403 +from aws_dbesdk_dynamodb.smithygenerated.aws_cryptography_dbencryptionsdk_dynamodb.models import * + +# Dynamically define __all__ to reflect everything imported +__all__ = [ + name + for name in dir() + if not name.startswith("_") + and name != "sys" + and name not in ["aws_cryptographic_material_providers", "aws_dbesdk_dynamodb"] +] diff --git a/DynamoDbEncryption/runtimes/python/src/aws_dbesdk_dynamodb/structures/item_encryptor.py b/DynamoDbEncryption/runtimes/python/src/aws_dbesdk_dynamodb/structures/item_encryptor.py new file mode 100644 index 000000000..bcab62823 --- /dev/null +++ b/DynamoDbEncryption/runtimes/python/src/aws_dbesdk_dynamodb/structures/item_encryptor.py @@ -0,0 +1,24 @@ +# Copyright Amazon.com Inc. or its affiliates. All Rights Reserved. +# SPDX-License-Identifier: Apache-2.0 +"""Alias for generated models.""" + +# Alias from: +# "aws_dbesdk_dynamodb.smithygenerated.aws_cryptography_dbencryptionsdk_dynamodb_itemencryptor.models.ABC" +# to: +# "aws_dbesdk_dynamodb.structures.item_encryptor.ABC" +# and from: +# "aws_dbesdk_dynamodb.smithygenerated.aws_cryptography_dbencryptionsdk_dynamodb_itemencryptor.config.ABC" +# to: +# "aws_dbesdk_dynamodb.structures.item_encryptor.ABC" +# ruff: noqa: F403 +from aws_dbesdk_dynamodb.smithygenerated.aws_cryptography_dbencryptionsdk_dynamodb_itemencryptor.config import * +from aws_dbesdk_dynamodb.smithygenerated.aws_cryptography_dbencryptionsdk_dynamodb_itemencryptor.models import * + +# Dynamically define __all__ to reflect everything imported +__all__ = [ + name + for name in dir() + if not name.startswith("_") + and name != "sys" + and name not in ["aws_cryptographic_material_providers", "aws_dbesdk_dynamodb"] +] diff --git a/DynamoDbEncryption/runtimes/python/src/aws_dbesdk_dynamodb/structures/structured_encryption.py b/DynamoDbEncryption/runtimes/python/src/aws_dbesdk_dynamodb/structures/structured_encryption.py new file mode 100644 index 000000000..145af36b2 --- /dev/null +++ b/DynamoDbEncryption/runtimes/python/src/aws_dbesdk_dynamodb/structures/structured_encryption.py @@ -0,0 +1,19 @@ +# Copyright Amazon.com Inc. or its affiliates. All Rights Reserved. +# SPDX-License-Identifier: Apache-2.0 +"""Alias for generated models.""" + +# Alias from: +# "aws_dbesdk_dynamodb.smithygenerated.aws_cryptography_dbencryptionsdk_structuredencryption.models.ABC" +# to: +# "aws_dbesdk_dynamodb.structures.structured_encryption.ABC" +# ruff: noqa: F403 +from aws_dbesdk_dynamodb.smithygenerated.aws_cryptography_dbencryptionsdk_structuredencryption.models import * + +# Dynamically define __all__ to reflect everything imported +__all__ = [ + name + for name in dir() + if not name.startswith("_") + and name != "sys" + and name not in ["aws_cryptographic_material_providers", "aws_dbesdk_dynamodb"] +] diff --git a/DynamoDbEncryption/runtimes/python/src/aws_dbesdk_dynamodb/transform.py b/DynamoDbEncryption/runtimes/python/src/aws_dbesdk_dynamodb/transform.py new file mode 100644 index 000000000..e09e20d6c --- /dev/null +++ b/DynamoDbEncryption/runtimes/python/src/aws_dbesdk_dynamodb/transform.py @@ -0,0 +1,71 @@ +# Copyright Amazon.com Inc. or its affiliates. All Rights Reserved. +# SPDX-License-Identifier: Apache-2.0 +""" +Helper tools for translating between native and DynamoDB items. + +For information on how types are serializes, see: +https://boto3.amazonaws.com/v1/documentation/api/latest/_modules/boto3/dynamodb/types.html +""" +from typing import Any + +from boto3.dynamodb.types import TypeDeserializer, TypeSerializer + +__all__ = ("dict_to_ddb", "ddb_to_dict") + + +def dict_to_ddb(item: dict[str, Any]) -> dict[str, Any]: + """ + Convert a native Python dictionary to a DynamoDB-JSON item. + + Args: + item (Dict[str, Any]): Native Python dictionary. + + Returns: + Dict[str, Any]: DynamoDB-formatted item. + + """ + serializer = TypeSerializer() + return {key: serializer.serialize(value) for key, value in item.items()} + + +def list_of_dict_to_list_of_ddb(items: list[dict[str, Any]]) -> list[dict[str, Any]]: + """ + Convert a list of Python dictionaries into a list of DynamoDB-JSON formatted items. + + Args: + items (List[Dict[str, Any]]): List of native Python dictionaries. + + Returns: + List[Dict[str, Any]]: List of DynamoDB-formatted items. + + """ + return [dict_to_ddb(item) for item in items] + + +def ddb_to_dict(item: dict[str, Any]) -> dict[str, Any]: + """ + Convert a DynamoDB-JSON item to a native Python dictionary. + + Args: + item (Dict[str, Any]): DynamoDB-formatted item. + + Returns: + Dict[str, Any]: Native Python dictionary. + + """ + deserializer = TypeDeserializer() + return {key: deserializer.deserialize(value) for key, value in item.items()} + + +def list_of_ddb_to_list_of_dict(items: list[dict[str, Any]]) -> list[dict[str, Any]]: + """ + Convert a list of DynamoDB-JSON formatted items to a list of Python dictionaries. + + Args: + items (List[Dict[str, Any]]): List of DynamoDB-formatted items. + + Returns: + List[Dict[str, Any]]: List of native Python dictionaries. + + """ + return [ddb_to_dict(item) for item in items] diff --git a/DynamoDbEncryption/runtimes/python/test/__init__.py b/DynamoDbEncryption/runtimes/python/test/__init__.py new file mode 100644 index 000000000..f94fd12a2 --- /dev/null +++ b/DynamoDbEncryption/runtimes/python/test/__init__.py @@ -0,0 +1,2 @@ +# Copyright Amazon.com Inc. or its affiliates. All Rights Reserved. +# SPDX-License-Identifier: Apache-2.0 diff --git a/DynamoDbEncryption/runtimes/python/test/constants.py b/DynamoDbEncryption/runtimes/python/test/constants.py new file mode 100644 index 000000000..c46c87099 --- /dev/null +++ b/DynamoDbEncryption/runtimes/python/test/constants.py @@ -0,0 +1,63 @@ +from aws_cryptographic_material_providers.mpl import AwsCryptographicMaterialProviders +from aws_cryptographic_material_providers.mpl.config import MaterialProvidersConfig +from aws_cryptographic_material_providers.mpl.models import ( + CreateAwsKmsMrkMultiKeyringInput, + DBEAlgorithmSuiteId, +) +from aws_cryptographic_material_providers.mpl.references import IKeyring + +from aws_dbesdk_dynamodb.smithygenerated.aws_cryptography_dbencryptionsdk_dynamodb.models import ( + DynamoDbTableEncryptionConfig, + DynamoDbTablesEncryptionConfig, +) +from aws_dbesdk_dynamodb.smithygenerated.aws_cryptography_dbencryptionsdk_dynamodb_itemencryptor.config import ( + DynamoDbItemEncryptorConfig, +) +from aws_dbesdk_dynamodb.smithygenerated.aws_cryptography_dbencryptionsdk_structuredencryption.models import ( + CryptoAction, +) + +MPL_CLIENT: AwsCryptographicMaterialProviders = AwsCryptographicMaterialProviders(config=MaterialProvidersConfig()) +INTEG_TEST_DEFAULT_KMS_KEY_ID = "arn:aws:kms:us-west-2:658956600833:key/b3537ef1-d8dc-4780-9f5a-55776cbb2f7f" +INTEG_TEST_DEFAULT_KEYRING: IKeyring = MPL_CLIENT.create_aws_kms_mrk_multi_keyring( + CreateAwsKmsMrkMultiKeyringInput( + generator=INTEG_TEST_DEFAULT_KMS_KEY_ID, + ) +) +INTEG_TEST_DEFAULT_ATTRIBUTE_ACTIONS_ON_ENCRYPT = { + "partition_key": CryptoAction.SIGN_ONLY, + "sort_key": CryptoAction.SIGN_ONLY, + "attribute1": CryptoAction.ENCRYPT_AND_SIGN, + "attribute2": CryptoAction.SIGN_ONLY, + ":attribute3": CryptoAction.DO_NOTHING, +} +INTEG_TEST_DEFAULT_UNSIGNED_ATTRIBUTE_PREFIX: str = ":" + +INTEG_TEST_DEFAULT_DYNAMODB_TABLE_NAME = "DynamoDbEncryptionInterceptorTestTable" +INTEG_TEST_DEFAULT_DYNAMODB_TABLE_NAME_PLAINTEXT = "DynamoDbEncryptionInterceptorTestTableCS" +INTEG_TEST_DEFAULT_PARTITION_KEY_NAME = "partition_key" +INTEG_TEST_DEFAULT_SORT_KEY_NAME = "sort_key" +INTEG_TEST_DEFAULT_ALGORITHM_SUITE_ID = ( + DBEAlgorithmSuiteId.ALG_AES_256_GCM_HKDF_SHA512_COMMIT_KEY_ECDSA_P384_SYMSIG_HMAC_SHA384 +) +INTEG_TEST_DEFAULT_TABLE_CONFIG = DynamoDbTableEncryptionConfig( + logical_table_name=INTEG_TEST_DEFAULT_DYNAMODB_TABLE_NAME, + partition_key_name=INTEG_TEST_DEFAULT_PARTITION_KEY_NAME, + sort_key_name=INTEG_TEST_DEFAULT_SORT_KEY_NAME, + attribute_actions_on_encrypt=INTEG_TEST_DEFAULT_ATTRIBUTE_ACTIONS_ON_ENCRYPT, + keyring=INTEG_TEST_DEFAULT_KEYRING, + allowed_unsigned_attribute_prefix=INTEG_TEST_DEFAULT_UNSIGNED_ATTRIBUTE_PREFIX, + algorithm_suite_id=INTEG_TEST_DEFAULT_ALGORITHM_SUITE_ID, +) +INTEG_TEST_DEFAULT_TABLE_CONFIGS = DynamoDbTablesEncryptionConfig( + table_encryption_configs={INTEG_TEST_DEFAULT_DYNAMODB_TABLE_NAME: INTEG_TEST_DEFAULT_TABLE_CONFIG} +) +INTEG_TEST_DEFAULT_ITEM_ENCRYPTOR_CONFIG = DynamoDbItemEncryptorConfig( + logical_table_name=INTEG_TEST_DEFAULT_DYNAMODB_TABLE_NAME, + partition_key_name=INTEG_TEST_DEFAULT_PARTITION_KEY_NAME, + sort_key_name=INTEG_TEST_DEFAULT_SORT_KEY_NAME, + attribute_actions_on_encrypt=INTEG_TEST_DEFAULT_ATTRIBUTE_ACTIONS_ON_ENCRYPT, + keyring=INTEG_TEST_DEFAULT_KEYRING, + algorithm_suite_id=INTEG_TEST_DEFAULT_ALGORITHM_SUITE_ID, + allowed_unsigned_attribute_prefix=INTEG_TEST_DEFAULT_UNSIGNED_ATTRIBUTE_PREFIX, +) diff --git a/DynamoDbEncryption/runtimes/python/test/integ/__init__.py b/DynamoDbEncryption/runtimes/python/test/integ/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/DynamoDbEncryption/runtimes/python/test/integ/encrypted/README.md b/DynamoDbEncryption/runtimes/python/test/integ/encrypted/README.md new file mode 100644 index 000000000..f6a9abf10 --- /dev/null +++ b/DynamoDbEncryption/runtimes/python/test/integ/encrypted/README.md @@ -0,0 +1,10 @@ +Integration tests for encrypted interfaces. + +These integration tests verify that encrypted boto3 interfaces behave as drop-in replacements for plaintext boto3 interfaces. + +Each test runs with both a plaintext client and an encrypted client, using the same request parameters and expecting the same response. + +This validates that encrypted clients expect the same input shapes as plaintext clients +and encrypted clients return the same output shapes as plaintext clients. + +This guarantees that users can substitute encrypted interfaces without modifying their application logic. diff --git a/DynamoDbEncryption/runtimes/python/test/integ/encrypted/__init__.py b/DynamoDbEncryption/runtimes/python/test/integ/encrypted/__init__.py new file mode 100644 index 000000000..37c9c2f01 --- /dev/null +++ b/DynamoDbEncryption/runtimes/python/test/integ/encrypted/__init__.py @@ -0,0 +1,18 @@ +# Copyright Amazon.com Inc. or its affiliates. All Rights Reserved. +# SPDX-License-Identifier: Apache-2.0 + + +def sort_dynamodb_json_lists(obj): + """ + Utility that recursively sorts all lists in a DynamoDB JSON-like structure. + DynamoDB JSON uses lists to represent sets, so strict equality can fail. + Sort lists to ensure consistent ordering when comparing expected and actual items. + """ + if isinstance(obj, dict): + return {k: sort_dynamodb_json_lists(v) for k, v in obj.items()} + elif isinstance(obj, list): + try: + return sorted(obj) # Sort lists for consistent comparison + except TypeError: + return obj # Not all lists are sortable; ex. complex_item_ddb's "list" attribute + return obj diff --git a/DynamoDbEncryption/runtimes/python/test/integ/encrypted/test_client.py b/DynamoDbEncryption/runtimes/python/test/integ/encrypted/test_client.py new file mode 100644 index 000000000..946c38ff2 --- /dev/null +++ b/DynamoDbEncryption/runtimes/python/test/integ/encrypted/test_client.py @@ -0,0 +1,651 @@ +# Copyright Amazon.com Inc. or its affiliates. All Rights Reserved. +# SPDX-License-Identifier: Apache-2.0 +import uuid +from copy import deepcopy + +import boto3 +import pytest + +from aws_dbesdk_dynamodb.encrypted.client import EncryptedClient +from aws_dbesdk_dynamodb.encrypted.paginator import EncryptedPaginator +from aws_dbesdk_dynamodb.smithygenerated.aws_cryptography_dbencryptionsdk_dynamodb_transforms.errors import ( + DynamoDbEncryptionTransformsException, +) + +from ...constants import ( + INTEG_TEST_DEFAULT_DYNAMODB_TABLE_NAME, + INTEG_TEST_DEFAULT_TABLE_CONFIGS, +) +from ...items import ( + complex_item_ddb, + complex_item_dict, + complex_key_ddb, + complex_key_dict, + simple_item_ddb, + simple_item_dict, + simple_key_ddb, + simple_key_dict, +) +from ...requests import ( + basic_batch_execute_statement_request_encrypted_table, + basic_batch_execute_statement_request_plaintext_table, + basic_batch_get_item_request_ddb, + basic_batch_get_item_request_dict, + basic_batch_write_item_delete_request_ddb, + basic_batch_write_item_delete_request_dict, + basic_batch_write_item_put_request_ddb, + basic_batch_write_item_put_request_dict, + basic_delete_item_request_ddb, + basic_delete_item_request_dict, + basic_execute_statement_request_encrypted_table, + basic_execute_statement_request_plaintext_table, + basic_execute_transaction_request_encrypted_table, + basic_execute_transaction_request_plaintext_table, + basic_get_item_request_ddb, + basic_get_item_request_dict, + basic_put_item_request_ddb, + basic_put_item_request_dict, + basic_query_request_ddb, + basic_query_request_dict, + basic_scan_request_ddb, + basic_scan_request_dict, + basic_transact_get_item_request_ddb, + basic_transact_get_item_request_dict, + basic_transact_write_item_delete_request_ddb, + basic_transact_write_item_delete_request_dict, + basic_transact_write_item_put_request_ddb, + basic_transact_write_item_put_request_dict, + basic_update_item_request_ddb_signed_attribute, + basic_update_item_request_ddb_unsigned_attribute, + basic_update_item_request_dict_signed_attribute, + basic_update_item_request_dict_unsigned_attribute, +) +from . import sort_dynamodb_json_lists + + +# Creates a matrix of tests for each value in param, +# with a user-friendly string for test output: +# expect_standard_dictionaries = True -> "standard_dicts" +# expect_standard_dictionaries = False -> "ddb_json" +@pytest.fixture(params=[True, False], ids=["standard_dicts", "ddb_json"]) +def expect_standard_dictionaries(request): + return request.param + + +def encrypted_client(expect_standard_dictionaries): + return EncryptedClient( + client=plaintext_client(expect_standard_dictionaries), + encryption_config=INTEG_TEST_DEFAULT_TABLE_CONFIGS, + expect_standard_dictionaries=expect_standard_dictionaries, + ) + + +def plaintext_client(expect_standard_dictionaries): + if expect_standard_dictionaries: + client = boto3.resource("dynamodb").Table(INTEG_TEST_DEFAULT_DYNAMODB_TABLE_NAME).meta.client + else: + client = boto3.client("dynamodb") + return client + + +# Creates a matrix of tests for each value in param, +# with a user-friendly string for test output: +# encrypted = True -> "encrypted" +# encrypted = False -> "plaintext" +@pytest.fixture(params=[True, False], ids=["encrypted", "plaintext"]) +def encrypted(request): + return request.param + + +@pytest.fixture +def client(encrypted, expect_standard_dictionaries): + if encrypted: + return encrypted_client(expect_standard_dictionaries) + else: + return plaintext_client(expect_standard_dictionaries) + + +# Creates a matrix of tests for each value in param, +# with a user-friendly string for test output: +# use_complex_item = True -> "complex_item" +# use_complex_item = False -> "simple_item" +@pytest.fixture(params=[True, False], ids=["complex_item", "simple_item"]) +def use_complex_item(request): + return request.param + + +# Append a suffix to the partition key to avoid collisions between test runs. +@pytest.fixture(scope="module") +def test_run_suffix(): + return "-" + str(uuid.uuid4()) + + +@pytest.fixture +def test_item(expect_standard_dictionaries, use_complex_item, test_run_suffix): + """Get a single test item in the appropriate format for the client.""" + if expect_standard_dictionaries: + if use_complex_item: + item = deepcopy(complex_item_dict) + else: + item = deepcopy(simple_item_dict) + else: + if use_complex_item: + item = deepcopy(complex_item_ddb) + else: + item = deepcopy(simple_item_ddb) + # Add a suffix to the partition key to avoid collisions between test runs. + if isinstance(item["partition_key"], dict): + item["partition_key"]["S"] += test_run_suffix + else: + item["partition_key"] += test_run_suffix + return item + + +@pytest.fixture +def test_key(expect_standard_dictionaries, use_complex_item, test_run_suffix): + """Get a single test item in the appropriate format for the client.""" + if expect_standard_dictionaries: + if use_complex_item: + key = deepcopy(complex_key_dict) + else: + key = deepcopy(simple_key_dict) + else: + if use_complex_item: + key = deepcopy(complex_key_ddb) + else: + key = deepcopy(simple_key_ddb) + # Add a suffix to the partition key to avoid collisions between test runs. + if isinstance(key["partition_key"], dict): + key["partition_key"]["S"] += test_run_suffix + else: + key["partition_key"] += test_run_suffix + return key + + +@pytest.fixture +def multiple_test_items(expect_standard_dictionaries, test_run_suffix): + """Get two test items in the appropriate format for the client.""" + if expect_standard_dictionaries: + items = [deepcopy(simple_item_dict), deepcopy(complex_item_dict)] + else: + items = [deepcopy(simple_item_ddb), deepcopy(complex_item_ddb)] + for item in items: + if isinstance(item["partition_key"], dict): + item["partition_key"]["S"] += test_run_suffix + else: + item["partition_key"] += test_run_suffix + return items + + +@pytest.fixture +def multiple_test_keys(expect_standard_dictionaries, test_run_suffix): + """Get two test keys in the appropriate format for the client.""" + if expect_standard_dictionaries: + keys = [deepcopy(simple_key_dict), deepcopy(complex_key_dict)] + else: + keys = [deepcopy(simple_key_ddb), deepcopy(complex_key_ddb)] + for key in keys: + if isinstance(key["partition_key"], dict): + key["partition_key"]["S"] += test_run_suffix + else: + key["partition_key"] += test_run_suffix + return keys + + +@pytest.fixture +def put_item_request(expect_standard_dictionaries, test_item): + if expect_standard_dictionaries: + # Client requests with `expect_standard_dictionaries=True` use dict-formatted requests + # with an added "TableName" key. + return {**basic_put_item_request_dict(test_item), "TableName": INTEG_TEST_DEFAULT_DYNAMODB_TABLE_NAME} + return basic_put_item_request_ddb(test_item) + + +@pytest.fixture +def get_item_request(expect_standard_dictionaries, test_item): + if expect_standard_dictionaries: + # Client requests with `expect_standard_dictionaries=True` use dict-formatted requests + # with an added "TableName" key. + return {**basic_get_item_request_dict(test_item), "TableName": INTEG_TEST_DEFAULT_DYNAMODB_TABLE_NAME} + return basic_get_item_request_ddb(test_item) + + +@pytest.fixture +def delete_item_request(expect_standard_dictionaries, test_item): + if expect_standard_dictionaries: + return {**basic_delete_item_request_dict(test_item), "TableName": INTEG_TEST_DEFAULT_DYNAMODB_TABLE_NAME} + return basic_delete_item_request_ddb(test_item) + + +def test_GIVEN_valid_put_and_get_and_delete_requests_WHEN_put_and_get_and_delete_THEN_round_trip_passes( + client, put_item_request, get_item_request, delete_item_request +): + # Given: Valid put_item request + # When: put_item + put_response = client.put_item(**put_item_request) + # Then: put_item succeeds + assert put_response["ResponseMetadata"]["HTTPStatusCode"] == 200 + + # Given: Valid get_item request for the same item + # When: get_item + get_response = client.get_item(**get_item_request) + # Then: Resposne is equal to the original item + assert get_response["ResponseMetadata"]["HTTPStatusCode"] == 200 + # DynamoDB JSON uses lists to represent sets, so strict equality can fail. + # Sort lists to ensure consistent ordering when comparing expected and actual items. + expected_item = sort_dynamodb_json_lists(put_item_request["Item"]) + actual_item = sort_dynamodb_json_lists(get_response["Item"]) + assert expected_item == actual_item + + # Given: Valid delete_item request for the same item + # When: delete_item + delete_response = client.delete_item(**{**delete_item_request, "ReturnValues": "ALL_OLD"}) + # Then: delete_item succeeds and contains the expected response + assert delete_response["ResponseMetadata"]["HTTPStatusCode"] == 200 + # DynamoDB JSON uses lists to represent sets, so strict equality can fail. + # Sort lists to ensure consistent ordering when comparing expected and actual items. + expected_item = sort_dynamodb_json_lists(put_item_request["Item"]) + actual_item = sort_dynamodb_json_lists(delete_response["Attributes"]) + assert expected_item == actual_item + + # Given: Valid get_item request for the same item + # When: get_item + get_response = client.get_item(**get_item_request) + # Then: get_item is empty (i.e. the item was deleted) + assert "Item" not in get_response + + +@pytest.fixture +def batch_write_item_put_request(expect_standard_dictionaries, multiple_test_items): + if expect_standard_dictionaries: + return basic_batch_write_item_put_request_dict(multiple_test_items) + return basic_batch_write_item_put_request_ddb(multiple_test_items) + + +@pytest.fixture +def batch_write_item_delete_request(expect_standard_dictionaries, multiple_test_keys): + if expect_standard_dictionaries: + return basic_batch_write_item_delete_request_dict(multiple_test_keys) + return basic_batch_write_item_delete_request_ddb(multiple_test_keys) + + +@pytest.fixture +def batch_get_item_request(expect_standard_dictionaries, multiple_test_keys): + if expect_standard_dictionaries: + return basic_batch_get_item_request_dict(multiple_test_keys) + return basic_batch_get_item_request_ddb(multiple_test_keys) + + +def test_GIVEN_valid_batch_write_and_get_requests_WHEN_batch_write_and_get_THEN_round_trip_passes( + client, multiple_test_items, batch_write_item_put_request, batch_get_item_request, batch_write_item_delete_request +): + # Given: Valid batch_write_item put request + # When: batch_write_item put + batch_write_response = client.batch_write_item(**batch_write_item_put_request) + # Then: batch_write_item put succeeds + assert batch_write_response["ResponseMetadata"]["HTTPStatusCode"] == 200 + + # Given: Valid batch_get_item request + # When: batch_get_item + batch_get_response = client.batch_get_item(**batch_get_item_request) + # Then: batch_get_item succeeds + assert batch_get_response["ResponseMetadata"]["HTTPStatusCode"] == 200 + # Then: All items are encrypted and decrypted correctly + retrieved_items = batch_get_response["Responses"][INTEG_TEST_DEFAULT_DYNAMODB_TABLE_NAME] + assert len(retrieved_items) > 0 + assert len(retrieved_items) == len(multiple_test_items) + expected_items = [sort_dynamodb_json_lists(expected_item) for expected_item in multiple_test_items] + actual_items = [sort_dynamodb_json_lists(actual_item) for actual_item in retrieved_items] + for actual_item in actual_items: + assert actual_item in expected_items + + # Given: Valid batch_delete_item request + # When: batch_delete_item + batch_delete_response = client.batch_write_item(**batch_write_item_delete_request) + # Then: batch_delete_item succeeds + assert batch_delete_response["ResponseMetadata"]["HTTPStatusCode"] == 200 + + # Given: Valid batch_get_item request + # When: batch_get_item + batch_get_response = client.batch_get_item(**batch_get_item_request) + # Then: batch_get_item succeeds + assert batch_get_response["ResponseMetadata"]["HTTPStatusCode"] == 200 + # Then: All items are deleted + retrieved_items = batch_get_response["Responses"] + + +@pytest.fixture +def query_request(expect_standard_dictionaries, test_item): + if expect_standard_dictionaries: + return {**basic_query_request_dict(test_item), "TableName": INTEG_TEST_DEFAULT_DYNAMODB_TABLE_NAME} + return basic_query_request_ddb(test_item) + + +def test_GIVEN_valid_put_and_query_requests_WHEN_put_and_query_THEN_round_trip_passes( + client, put_item_request, query_request +): + # Given: Valid put_item request + # When: put_item + put_response = client.put_item(**put_item_request) + # Then: put_item succeeds + assert put_response["ResponseMetadata"]["HTTPStatusCode"] == 200 + + # Given: Valid query request + # When: query + query_response = client.query(**query_request) + # Then: query succeeds + assert query_response["ResponseMetadata"]["HTTPStatusCode"] == 200 + assert len(query_response["Items"]) == 1 + # DynamoDB JSON uses lists to represent sets, so strict equality can fail. + # Sort lists to ensure consistent ordering when comparing expected and actual items. + expected_item = sort_dynamodb_json_lists(put_item_request["Item"]) + actual_item = sort_dynamodb_json_lists(query_response["Items"][0]) + assert expected_item == actual_item + + +@pytest.fixture +def scan_request(expect_standard_dictionaries, test_item, encrypted): + if expect_standard_dictionaries: + request = {**basic_scan_request_dict(test_item), "TableName": INTEG_TEST_DEFAULT_DYNAMODB_TABLE_NAME} + else: + request = basic_scan_request_ddb(test_item) + if encrypted: + # If the encrypted scan encounters a plaintext item, the scan will fail. + # To avoid this, encrypted scans add a filter expression that matches only encrypted items. + request["FilterExpression"] = request["FilterExpression"] + " AND attribute_exists (#sig)" + request["ExpressionAttributeNames"] = {} + request["ExpressionAttributeNames"]["#sig"] = "amzn-ddb-map-sig" + return request + + +def test_GIVEN_valid_put_and_scan_requests_WHEN_put_and_scan_THEN_round_trip_passes( + client, put_item_request, scan_request, get_item_request +): + # Given: Valid put_item request + # When: put_item + put_response = client.put_item(**put_item_request) + # Then: put_item succeeds + assert put_response["ResponseMetadata"]["HTTPStatusCode"] == 200 + + # Given: Valid scan request + # When: scan + scan_response = client.scan(**scan_request) + # Then: scan succeeds + assert scan_response["ResponseMetadata"]["HTTPStatusCode"] == 200 + # Can't assert anything about the scan; + # there are too many items. + # The critical assertion is that the scan succeeds. + + +@pytest.fixture +def transact_write_item_put_request(expect_standard_dictionaries, multiple_test_items): + if expect_standard_dictionaries: + return basic_transact_write_item_put_request_dict(multiple_test_items) + return basic_transact_write_item_put_request_ddb(multiple_test_items) + + +@pytest.fixture +def transact_write_item_delete_request(expect_standard_dictionaries, multiple_test_keys): + if expect_standard_dictionaries: + return basic_transact_write_item_delete_request_dict(multiple_test_keys) + return basic_transact_write_item_delete_request_ddb(multiple_test_keys) + + +@pytest.fixture +def transact_get_item_request(expect_standard_dictionaries, multiple_test_keys): + if expect_standard_dictionaries: + return basic_transact_get_item_request_dict(multiple_test_keys) + return basic_transact_get_item_request_ddb(multiple_test_keys) + + +def test_GIVEN_valid_transact_write_and_get_requests_WHEN_transact_write_and_get_THEN_round_trip_passes( + client, + multiple_test_items, + transact_write_item_put_request, + transact_write_item_delete_request, + transact_get_item_request, + batch_get_item_request, +): + # Given: Valid transact_write_item put request + # When: transact_write_item put + transact_write_put_response = client.transact_write_items(**transact_write_item_put_request) + # Then: transact_write_item put succeeds + assert transact_write_put_response["ResponseMetadata"]["HTTPStatusCode"] == 200 + + # Given: Valid transact_get_item request + # When: transact_get_item + transact_get_response = client.transact_get_items(**transact_get_item_request) + # Then: transact_get_item succeeds + assert transact_get_response["ResponseMetadata"]["HTTPStatusCode"] == 200 + # Then: All items are encrypted and decrypted correctly + retrieved_items = transact_get_response["Responses"] + assert len(retrieved_items) > 0 + assert len(retrieved_items) == len(multiple_test_items) + expected_items = [sort_dynamodb_json_lists(expected_item) for expected_item in multiple_test_items] + actual_items = [sort_dynamodb_json_lists(actual_item) for actual_item in retrieved_items] + for actual_item in actual_items: + assert actual_item["Item"] in expected_items + + # Given: Valid transact_write_item delete request + # When: transact_write_item delete + transact_write_delete_response = client.transact_write_items(**transact_write_item_delete_request) + # Then: transact_write_item delete succeeds + assert transact_write_delete_response["ResponseMetadata"]["HTTPStatusCode"] == 200 + + +@pytest.fixture +def update_item_request_unsigned_attribute(expect_standard_dictionaries, test_item): + if expect_standard_dictionaries: + return { + **basic_update_item_request_dict_unsigned_attribute(test_item), + "TableName": INTEG_TEST_DEFAULT_DYNAMODB_TABLE_NAME, + } + return basic_update_item_request_ddb_unsigned_attribute(test_item) + + +def test_WHEN_update_item_with_unsigned_attribute_THEN_passes( + client, update_item_request_unsigned_attribute, encrypted, get_item_request +): + # Given: Valid update_item request + # When: update_item + update_response = client.update_item(**update_item_request_unsigned_attribute) + # Then: update_item succeeds + assert update_response["ResponseMetadata"]["HTTPStatusCode"] == 200 + + +@pytest.fixture +def update_item_request_signed_attribute(expect_standard_dictionaries, test_item): + if expect_standard_dictionaries: + return { + **basic_update_item_request_dict_signed_attribute(test_item), + "TableName": INTEG_TEST_DEFAULT_DYNAMODB_TABLE_NAME, + } + return basic_update_item_request_ddb_signed_attribute(test_item) + + +def test_WHEN_update_item_with_signed_attribute_THEN_raises_DynamoDbEncryptionTransformsException( + client, + update_item_request_signed_attribute, + encrypted, +): + if not encrypted: + pytest.skip("Skipping negative test for plaintext client") + + # Given: Encrypted client and update item parameters + # Then: DynamoDbEncryptionTransformsException is raised + with pytest.raises(DynamoDbEncryptionTransformsException): + # When: Calling update_item + client.update_item(**update_item_request_signed_attribute) + + +# Create a matrix of tests for each value in param, +# with a user-friendly string for test output: +# execute_uses_encrypted_table = True -> "encrypted_table" +# execute_uses_encrypted_table = False -> "plaintext_table" +# This indicates whether an execute_(statement,transaction,etc.) operation should be on an encrypted table +@pytest.fixture(params=[True, False], ids=["encrypted_table", "plaintext_table"]) +def execute_uses_encrypted_table(request): + return request.param + + +@pytest.fixture +def execute_statement_request(execute_uses_encrypted_table, test_item): + if execute_uses_encrypted_table: + return basic_execute_statement_request_encrypted_table(test_item) + return basic_execute_statement_request_plaintext_table(test_item) + + +def test_WHEN_execute_statement_for_encrypted_table_THEN_raises_DynamoDbEncryptionTransformsException( + client, + execute_statement_request, + encrypted, + execute_uses_encrypted_table, +): + if not encrypted: + pytest.skip("Skipping negative test for plaintext client") + + if execute_uses_encrypted_table: + # Given: Encrypted client and execute_statement request on encrypted table + # Then: DynamoDbEncryptionTransformsException is raised + with pytest.raises(DynamoDbEncryptionTransformsException): + # When: Calling execute_statement + client.execute_statement(**execute_statement_request) + else: + pytest.skip("Skipping test for plaintext table; this test is only for encrypted tables") + + +def test_WHEN_execute_statement_for_plaintext_table_THEN_passes( + client, + execute_statement_request, + execute_uses_encrypted_table, +): + if execute_uses_encrypted_table: + pytest.skip("Skipping test for encrypted table; this test is only for plaintext tables") + + # Given: Client calls execute_statement on plaintext table + # When: Calling execute_statement + response = client.execute_statement(**execute_statement_request) + # Then: Success + assert response["ResponseMetadata"]["HTTPStatusCode"] == 200 + + +@pytest.fixture +def execute_transaction_request(execute_uses_encrypted_table, test_item): + if execute_uses_encrypted_table: + return basic_execute_transaction_request_encrypted_table(test_item) + return basic_execute_transaction_request_plaintext_table(test_item) + + +def test_WHEN_execute_transaction_for_encrypted_table_THEN_raises_DynamoDbEncryptionTransformsException( + client, + execute_transaction_request, + encrypted, + execute_uses_encrypted_table, +): + if not encrypted: + pytest.skip("Skipping negative test for plaintext client") + + if execute_uses_encrypted_table: + # Given: Encrypted client and execute_transaction request on encrypted table + # Then: DynamoDbEncryptionTransformsException is raised + with pytest.raises(DynamoDbEncryptionTransformsException): + # When: Calling execute_transaction + client.execute_transaction(**execute_transaction_request) + else: + pytest.skip("Skipping test for plaintext table; this test is only for encrypted tables") + + +def test_WHEN_execute_transaction_for_plaintext_table_THEN_passes( + client, + execute_transaction_request, + execute_uses_encrypted_table, + put_item_request, +): + if execute_uses_encrypted_table: + pytest.skip("Skipping test for encrypted table; this test is only for plaintext tables") + + put_response = client.put_item(**put_item_request) + assert put_response["ResponseMetadata"]["HTTPStatusCode"] == 200 + + # Given: Client calls execute_transaction on plaintext table + # When: Calling execute_transaction + response = client.execute_transaction(**execute_transaction_request) + # Then: Success + assert response["ResponseMetadata"]["HTTPStatusCode"] == 200 + + +@pytest.fixture +def batch_execute_statement_request(execute_uses_encrypted_table): + if execute_uses_encrypted_table: + return basic_batch_execute_statement_request_encrypted_table() + return basic_batch_execute_statement_request_plaintext_table() + + +def test_WHEN_batch_execute_statement_for_encrypted_table_THEN_raises_DynamoDbEncryptionTransformsException( + client, + batch_execute_statement_request, + encrypted, + execute_uses_encrypted_table, +): + if not encrypted: + pytest.skip("Skipping negative test for plaintext client") + + if execute_uses_encrypted_table: + # Given: Encrypted client and batch_execute_statement request on encrypted table + # Then: DynamoDbEncryptionTransformsException is raised + with pytest.raises(DynamoDbEncryptionTransformsException): + # When: Calling batch_execute_statement + client.batch_execute_statement(**batch_execute_statement_request) + else: + pytest.skip("Skipping test for plaintext table; this test is only for encrypted tables") + + +def test_WHEN_batch_execute_statement_for_plaintext_table_THEN_passes( + client, + batch_execute_statement_request, + execute_uses_encrypted_table, +): + if execute_uses_encrypted_table: + pytest.skip("Skipping test for encrypted table; this test is only for plaintext tables") + + # Given: Client calls batch_execute_statement on plaintext table + # When: Calling batch_execute_statement + response = client.batch_execute_statement(**batch_execute_statement_request) + # Then: Success + assert response["ResponseMetadata"]["HTTPStatusCode"] == 200 + + +def test_WHEN_get_paginator_THEN_correct_paginator_is_returned(): + """Test get_paginator for scan and query operations.""" + # Given: Encrypted client + + # When: Getting paginator for some encrypted operation + scan_paginator = encrypted_client(expect_standard_dictionaries=False).get_paginator("query") + # Then: EncryptedPaginator is returned + assert isinstance(scan_paginator, EncryptedPaginator) + + # When: Getting paginator for some non-encrypted operation + list_backups_paginator = encrypted_client(expect_standard_dictionaries=False).get_paginator("list_backups") + # Then: Query paginator is returned + assert not isinstance(list_backups_paginator, EncryptedPaginator) + + +def test_WHEN_call_passthrough_method_THEN_correct_response_is_returned(): + """Test that calling a passthrough method returns the correct response.""" + # Given: Encrypted client + # When: Calling some passthrough method that does not explicitly exist on EncryptedClient, + # but exists on the underlying boto3 client + response = encrypted_client(expect_standard_dictionaries=False).list_backups() + # Then: Correct response is returned, i.e. EncryptedClient forwards the call to the underlying boto3 client + assert response["ResponseMetadata"]["HTTPStatusCode"] == 200 + + +# Delete the items in the table after the module runs +@pytest.fixture(scope="module", autouse=True) +def cleanup_after_module(test_run_suffix): + yield + table = boto3.client("dynamodb") + items = [deepcopy(simple_item_ddb), deepcopy(complex_item_ddb)] + for item in items: + item["partition_key"]["S"] += test_run_suffix + table.delete_item(**basic_delete_item_request_ddb(item)) diff --git a/DynamoDbEncryption/runtimes/python/test/integ/encrypted/test_item.py b/DynamoDbEncryption/runtimes/python/test/integ/encrypted/test_item.py new file mode 100644 index 000000000..18f594c90 --- /dev/null +++ b/DynamoDbEncryption/runtimes/python/test/integ/encrypted/test_item.py @@ -0,0 +1,79 @@ +# Copyright Amazon.com Inc. or its affiliates. All Rights Reserved. +# SPDX-License-Identifier: Apache-2.0 +"""Integration tests for the ItemEncryptor.""" +import pytest + +from aws_dbesdk_dynamodb.encrypted.item import ItemEncryptor +from aws_dbesdk_dynamodb.smithygenerated.aws_cryptography_dbencryptionsdk_dynamodb_itemencryptor.models import ( + DecryptItemInput, + EncryptItemInput, +) + +from ...constants import INTEG_TEST_DEFAULT_ITEM_ENCRYPTOR_CONFIG +from ...items import complex_item_ddb, complex_item_dict, simple_item_ddb, simple_item_dict + + +# Creates a matrix of tests for each value in param, +# with a user-friendly string for test output: +# use_complex_item = True -> "complex_item" +# use_complex_item = False -> "simple_item" +@pytest.fixture(params=[True, False], ids=["complex_item", "simple_item"]) +def use_complex_item(request): + return request.param + + +@pytest.fixture +def test_dict_item(use_complex_item): + if use_complex_item: + return complex_item_dict + return simple_item_dict + + +@pytest.fixture +def test_ddb_item(use_complex_item): + if use_complex_item: + return complex_item_ddb + return simple_item_ddb + + +item_encryptor = ItemEncryptor(INTEG_TEST_DEFAULT_ITEM_ENCRYPTOR_CONFIG) + + +def test_GIVEN_valid_dict_item_WHEN_encrypt_python_item_AND_decrypt_python_item_THEN_round_trip_passes(test_dict_item): + # Given: Valid dict item + # When: encrypt_python_item + encrypted_dict_item = item_encryptor.encrypt_python_item(test_dict_item).encrypted_item + # Then: Encrypted dict item is returned + assert encrypted_dict_item != test_dict_item + # When: decrypt_python_item + decrypted_dict_item = item_encryptor.decrypt_python_item(encrypted_dict_item).plaintext_item + # Then: Decrypted dict item is returned and matches the original item + assert decrypted_dict_item == test_dict_item + + +def test_GIVEN_valid_ddb_item_WHEN_encrypt_dynamodb_item_AND_decrypt_dynamodb_item_THEN_round_trip_passes( + test_ddb_item, +): + # Given: Valid ddb item + # When: encrypt_dynamodb_item + encrypted_ddb_item = item_encryptor.encrypt_dynamodb_item(test_ddb_item).encrypted_item + # Then: Encrypted ddb item is returned + assert encrypted_ddb_item != test_ddb_item + # When: decrypt_dynamodb_item + decrypted_ddb_item = item_encryptor.decrypt_dynamodb_item(encrypted_ddb_item).plaintext_item + # Then: Decrypted ddb item is returned and matches the original item + assert decrypted_ddb_item == test_ddb_item + + +def test_GIVEN_valid_encrypt_item_input_WHEN_encrypt_item_AND_decrypt_item_THEN_round_trip_passes(test_ddb_item): + # Given: Valid encrypt_item_input + encrypt_item_input = EncryptItemInput(plaintext_item=test_ddb_item) + # When: encrypt_item + encrypted_item = item_encryptor.encrypt_item(encrypt_item_input).encrypted_item + # Then: Encrypted item is returned + assert encrypted_item != test_ddb_item + # When: decrypt_item + decrypt_item_input = DecryptItemInput(encrypted_item=encrypted_item) + decrypted_item = item_encryptor.decrypt_item(decrypt_item_input).plaintext_item + # Then: Decrypted item is returned and matches the original item + assert decrypted_item == test_ddb_item diff --git a/DynamoDbEncryption/runtimes/python/test/integ/encrypted/test_paginator.py b/DynamoDbEncryption/runtimes/python/test/integ/encrypted/test_paginator.py new file mode 100644 index 000000000..723fb6fac --- /dev/null +++ b/DynamoDbEncryption/runtimes/python/test/integ/encrypted/test_paginator.py @@ -0,0 +1,239 @@ +# Copyright Amazon.com Inc. or its affiliates. All Rights Reserved. +# SPDX-License-Identifier: Apache-2.0 +import uuid +from copy import deepcopy + +import boto3 +import pytest + +from aws_dbesdk_dynamodb.encrypted.client import EncryptedClient + +from ...constants import ( + INTEG_TEST_DEFAULT_DYNAMODB_TABLE_NAME, + INTEG_TEST_DEFAULT_TABLE_CONFIGS, +) +from ...items import ( + complex_item_ddb, + complex_item_dict, + complex_key_ddb, + complex_key_dict, + simple_item_ddb, + simple_item_dict, + simple_key_ddb, + simple_key_dict, +) +from ...requests import ( + basic_delete_item_request_ddb, + basic_put_item_request_ddb, + basic_put_item_request_dict, + basic_query_paginator_request, + basic_scan_paginator_request, +) +from . import sort_dynamodb_json_lists + + +# Creates a matrix of tests for each value in param, +# with a user-friendly string for test output: +# expect_standard_dictionaries = True -> "standard_dicts" +# expect_standard_dictionaries = False -> "ddb_json" +@pytest.fixture(params=[True, False], ids=["standard_dicts", "ddb_json"]) +def expect_standard_dictionaries(request): + return request.param + + +def encrypted_client(expect_standard_dictionaries): + return EncryptedClient( + client=plaintext_client(expect_standard_dictionaries), + encryption_config=INTEG_TEST_DEFAULT_TABLE_CONFIGS, + expect_standard_dictionaries=expect_standard_dictionaries, + ) + + +def plaintext_client(expect_standard_dictionaries): + if expect_standard_dictionaries: + client = boto3.resource("dynamodb").Table(INTEG_TEST_DEFAULT_DYNAMODB_TABLE_NAME).meta.client + else: + client = boto3.client("dynamodb") + return client + + +# Creates a matrix of tests for each value in param, +# with a user-friendly string for test output: +# encrypted = True -> "encrypted" +# encrypted = False -> "plaintext" +@pytest.fixture(params=[True, False], ids=["encrypted", "plaintext"]) +def encrypted(request): + return request.param + + +@pytest.fixture +def client(encrypted, expect_standard_dictionaries): + if encrypted: + return encrypted_client(expect_standard_dictionaries) + else: + return plaintext_client(expect_standard_dictionaries) + + +@pytest.fixture +def query_paginator(client): + return client.get_paginator("query") + + +@pytest.fixture +def scan_paginator(client): + return client.get_paginator("scan") + + +# Creates a matrix of tests for each value in param, +# with a user-friendly string for test output: +# use_complex_item = True -> "complex_item" +# use_complex_item = False -> "simple_item" +@pytest.fixture(params=[True, False], ids=["complex_item", "simple_item"]) +def use_complex_item(request): + return request.param + + +# Append a suffix to the partition key to avoid collisions between test runs. +@pytest.fixture(scope="module") +def test_run_suffix(): + return "-" + str(uuid.uuid4()) + + +@pytest.fixture +def test_key(expect_standard_dictionaries, use_complex_item, test_run_suffix): + """Get a single test item in the appropriate format for the client.""" + if expect_standard_dictionaries: + if use_complex_item: + key = deepcopy(complex_key_dict) + else: + key = deepcopy(simple_key_dict) + else: + if use_complex_item: + key = deepcopy(complex_key_ddb) + else: + key = deepcopy(simple_key_ddb) + # Add a suffix to the partition key to avoid collisions between test runs. + if isinstance(key["partition_key"], dict): + key["partition_key"]["S"] += test_run_suffix + else: + key["partition_key"] += test_run_suffix + return key + + +@pytest.fixture +def multiple_test_keys(expect_standard_dictionaries, test_run_suffix): + """Get two test keys in the appropriate format for the client.""" + if expect_standard_dictionaries: + keys = [deepcopy(simple_key_dict), deepcopy(complex_key_dict)] + else: + keys = [deepcopy(simple_key_ddb), deepcopy(complex_key_ddb)] + # Add a suffix to the partition key to avoid collisions between test runs. + for key in keys: + if isinstance(key["partition_key"], dict): + key["partition_key"]["S"] += test_run_suffix + else: + key["partition_key"] += test_run_suffix + return keys + + +@pytest.fixture +def test_item(expect_standard_dictionaries, use_complex_item, test_run_suffix): + """Get a single test item in the appropriate format for the client.""" + if expect_standard_dictionaries: + if use_complex_item: + item = deepcopy(complex_item_dict) + else: + item = deepcopy(simple_item_dict) + else: + if use_complex_item: + item = deepcopy(complex_item_ddb) + else: + item = deepcopy(simple_item_ddb) + # Add a suffix to the partition key to avoid collisions between test runs. + if isinstance(item["partition_key"], dict): + item["partition_key"]["S"] += test_run_suffix + else: + item["partition_key"] += test_run_suffix + return item + + +@pytest.fixture +def paginate_query_request(expect_standard_dictionaries, test_key): + if expect_standard_dictionaries: + return {**basic_query_paginator_request(test_key), "TableName": INTEG_TEST_DEFAULT_DYNAMODB_TABLE_NAME} + return basic_query_paginator_request(test_key) + + +@pytest.fixture +def put_item_request(expect_standard_dictionaries, test_item): + if expect_standard_dictionaries: + # Client requests with `expect_standard_dictionaries=True` use dict-formatted requests + # with an added "TableName" key. + return {**basic_put_item_request_dict(test_item), "TableName": INTEG_TEST_DEFAULT_DYNAMODB_TABLE_NAME} + return basic_put_item_request_ddb(test_item) + + +def test_GIVEN_query_paginator_WHEN_paginate_THEN_returns_expected_items( + client, query_paginator, paginate_query_request, put_item_request, test_item +): + # Given: item in table + client.put_item(**put_item_request) + # Given: Query paginator + # When: Paginate + response = query_paginator.paginate(**paginate_query_request) + # Then: Returns encrypted items + items = [] + for page in response: + if "Items" in page: + for item in page["Items"]: + items.append(item) + assert len(items) == 1 + # DynamoDB JSON uses lists to represent sets, so strict equality can fail. + # Sort lists to ensure consistent ordering when comparing expected and actual items. + expected_item = sort_dynamodb_json_lists(test_item) + actual_item = sort_dynamodb_json_lists(items[0]) + # Then: Items are equal + assert expected_item == actual_item + + +@pytest.fixture +def paginate_scan_request(expect_standard_dictionaries, test_item): + if expect_standard_dictionaries: + request = {**basic_scan_paginator_request(test_item), "TableName": INTEG_TEST_DEFAULT_DYNAMODB_TABLE_NAME} + else: + request = basic_scan_paginator_request(test_item) + return request + + +def test_GIVEN_scan_paginator_WHEN_paginate_THEN_returns_expected_items( + client, scan_paginator, paginate_scan_request, put_item_request, test_item +): + # Given: item in table + client.put_item(**put_item_request) + # Given: Scan paginator + # When: Paginate + response = scan_paginator.paginate(**paginate_scan_request) + # Then: Returns encrypted items + items = [] + for page in response: + if "Items" in page: + for item in page["Items"]: + items.append(item) + assert len(items) == 1 + # DynamoDB JSON uses lists to represent sets, so strict equality can fail. + # Sort lists to ensure consistent ordering when comparing expected and actual items. + expected_item = sort_dynamodb_json_lists(test_item) + actual_item = sort_dynamodb_json_lists(items[0]) + # Then: Items are equal + assert expected_item == actual_item + + +# Delete the items in the table after the module runs +@pytest.fixture(scope="module", autouse=True) +def cleanup_after_module(test_run_suffix): + yield + table = boto3.client("dynamodb") + items = [deepcopy(simple_item_ddb), deepcopy(complex_item_ddb)] + for item in items: + item["partition_key"]["S"] += test_run_suffix + table.delete_item(**basic_delete_item_request_ddb(item)) diff --git a/DynamoDbEncryption/runtimes/python/test/integ/encrypted/test_resource.py b/DynamoDbEncryption/runtimes/python/test/integ/encrypted/test_resource.py new file mode 100644 index 000000000..22ddc3d1a --- /dev/null +++ b/DynamoDbEncryption/runtimes/python/test/integ/encrypted/test_resource.py @@ -0,0 +1,201 @@ +# Copyright Amazon.com Inc. or its affiliates. All Rights Reserved. +# SPDX-License-Identifier: Apache-2.0 +import uuid +from copy import deepcopy + +import boto3 +import pytest + +from aws_dbesdk_dynamodb.encrypted.resource import EncryptedResource, EncryptedTablesCollectionManager +from aws_dbesdk_dynamodb.encrypted.table import EncryptedTable + +from ...constants import INTEG_TEST_DEFAULT_DYNAMODB_TABLE_NAME, INTEG_TEST_DEFAULT_TABLE_CONFIGS +from ...items import ( + complex_item_ddb, + complex_item_dict, + complex_key_dict, + simple_item_ddb, + simple_item_dict, + simple_key_dict, +) +from ...requests import ( + basic_batch_get_item_request_dict, + basic_batch_write_item_delete_request_dict, + basic_batch_write_item_put_request_dict, + basic_delete_item_request_ddb, +) + + +@pytest.fixture(params=[True, False], ids=["encrypted", "plaintext"]) +def encrypted(request): + return request.param + + +def plaintext_resource(): + return boto3.resource("dynamodb") + + +def encrypted_resource(): + return EncryptedResource( + resource=plaintext_resource(), + encryption_config=INTEG_TEST_DEFAULT_TABLE_CONFIGS, + ) + + +@pytest.fixture +def resource(encrypted): + if encrypted: + return encrypted_resource() + else: + return plaintext_resource() + + +@pytest.fixture +def tables(resource): + return resource.tables + + +@pytest.fixture(scope="module") +def test_run_suffix(): + return "-" + str(uuid.uuid4()) + + +@pytest.fixture +def test_items(test_run_suffix): + items = [deepcopy(complex_item_dict), deepcopy(simple_item_dict)] + for item in items: + item["partition_key"] += test_run_suffix + return items + + +@pytest.fixture +def test_keys(test_run_suffix): + keys = [deepcopy(complex_key_dict), deepcopy(simple_key_dict)] + for key in keys: + key["partition_key"] += test_run_suffix + return keys + + +def test_GIVEN_items_WHEN_batch_write_and_get_THEN_round_trip_passes( + resource, + test_items, + test_keys, +): + batch_write_item_put_request = basic_batch_write_item_put_request_dict(test_items) + batch_write_response = resource.batch_write_item(**batch_write_item_put_request) + assert batch_write_response["ResponseMetadata"]["HTTPStatusCode"] == 200 + + batch_get_item_request = basic_batch_get_item_request_dict(test_keys) + batch_get_response = resource.batch_get_item(**batch_get_item_request) + assert batch_get_response["ResponseMetadata"]["HTTPStatusCode"] == 200 + responses = batch_get_response["Responses"][INTEG_TEST_DEFAULT_DYNAMODB_TABLE_NAME] + assert len(responses) == 2 + for response in responses: + assert response in test_items + + batch_write_item_delete_request = basic_batch_write_item_delete_request_dict(test_keys) + batch_write_response = resource.batch_write_item(**batch_write_item_delete_request) + assert batch_write_response["ResponseMetadata"]["HTTPStatusCode"] == 200 + + batch_get_response = resource.batch_get_item(**batch_get_item_request) + assert batch_get_response["ResponseMetadata"]["HTTPStatusCode"] == 200 + assert len(batch_get_response["Responses"][INTEG_TEST_DEFAULT_DYNAMODB_TABLE_NAME]) == 0 + + +def test_GIVEN_encrypted_resource_WHEN_Table_THEN_returns_encrypted_table_with_correct_arguments(): + # Given: Encrypted resource + resource = encrypted_resource() + # When: Table + table = resource.Table(INTEG_TEST_DEFAULT_DYNAMODB_TABLE_NAME) + # Then: Returns encrypted table + assert isinstance(table, EncryptedTable) + assert table.name == INTEG_TEST_DEFAULT_DYNAMODB_TABLE_NAME + assert table._encryption_config == resource._encryption_config + + +def test_GIVEN_encrypted_resource_WHEN_tables_THEN_returns_encrypted_tables_collection_manager(): + # Given: Encrypted resource + resource = encrypted_resource() + # When: tables + tables = resource.tables + # Then: Returns encrypted tables collection manager + assert isinstance(tables, EncryptedTablesCollectionManager) + # Given: Encrypted tables collection manager + tables = resource.tables + # When: all + iter = tables.all() + tables_list = [] + for table in iter: + tables_list.append(table) + assert len(tables_list) > 0 + for table in tables_list: + assert isinstance(table, EncryptedTable) + assert table._encryption_config == resource._encryption_config + + +def test_GIVEN_tables_WHEN_all_THEN_returns_tables( + tables, +): + # Given: Tables + # When: all + tables_list = [] + for table in tables.all(): + tables_list.append(table) + # Then: Returns tables + assert len(tables_list) > 0 + table_names = [table.name for table in tables_list] + # "All tables" includes the integ test table + assert INTEG_TEST_DEFAULT_DYNAMODB_TABLE_NAME in table_names + + +def test_GIVEN_tables_WHEN_filter_THEN_returns_tables( + tables, +): + # Given: Tables + # When: filter + tables_list = [] + for table in tables.filter( + ExclusiveStartTableName=INTEG_TEST_DEFAULT_DYNAMODB_TABLE_NAME, + Limit=1, + ): + tables_list.append(table) + # Then: Returns tables + assert len(tables_list) > 0 + table_names = [table.name for table in tables_list] + # The filter request started from the integ test table, not inclusive; it should not be in the list + assert INTEG_TEST_DEFAULT_DYNAMODB_TABLE_NAME not in table_names + + +def test_GIVEN_tables_WHEN_limit_THEN_returns_tables( + tables, +): + # Given: Tables + # When: limit + tables_list = [] + for table in tables.limit(count=1): + tables_list.append(table) + # Then: Returns tables + assert len(tables_list) == 1 + + +def test_GIVEN_tables_WHEN_page_size_THEN_returns_tables( + tables, +): + # Given: Tables + # When: page_size + tables_list = [] + for table in tables.page_size(count=1): + tables_list.append(table) + # Then: Returns tables + assert len(tables_list) > 0 + + +# Delete the items in the table after the module runs +@pytest.fixture(scope="module", autouse=True) +def cleanup_after_module(test_run_suffix): + yield + table = boto3.client("dynamodb") + items = [deepcopy(simple_item_ddb), deepcopy(complex_item_ddb)] + for item in items: + item["partition_key"]["S"] += test_run_suffix + table.delete_item(**basic_delete_item_request_ddb(item)) diff --git a/DynamoDbEncryption/runtimes/python/test/integ/encrypted/test_table.py b/DynamoDbEncryption/runtimes/python/test/integ/encrypted/test_table.py new file mode 100644 index 000000000..46436e468 --- /dev/null +++ b/DynamoDbEncryption/runtimes/python/test/integ/encrypted/test_table.py @@ -0,0 +1,262 @@ +# Copyright Amazon.com Inc. or its affiliates. All Rights Reserved. +# SPDX-License-Identifier: Apache-2.0 +import uuid +from copy import deepcopy + +import boto3 +import pytest + +from aws_dbesdk_dynamodb.encrypted.table import EncryptedTable +from aws_dbesdk_dynamodb.smithygenerated.aws_cryptography_dbencryptionsdk_dynamodb_transforms.errors import ( + DynamoDbEncryptionTransformsException, +) + +from ...constants import ( + INTEG_TEST_DEFAULT_DYNAMODB_TABLE_NAME, + INTEG_TEST_DEFAULT_TABLE_CONFIGS, +) +from ...items import complex_item_dict, complex_key_dict, simple_item_dict, simple_key_dict +from ...requests import ( + basic_delete_item_request_dict, + basic_get_item_request_dict, + basic_put_item_request_dict, + basic_query_request_dict, + basic_scan_request_dict, + basic_update_item_request_dict_signed_attribute, + basic_update_item_request_dict_unsigned_attribute, +) + + +def encrypted_table(): + """Create an encrypted table.""" + table = boto3.resource("dynamodb").Table(INTEG_TEST_DEFAULT_DYNAMODB_TABLE_NAME) + return EncryptedTable( + table=table, + encryption_config=INTEG_TEST_DEFAULT_TABLE_CONFIGS, + ) + + +def plaintext_table(): + """Create a plaintext table.""" + table = boto3.resource("dynamodb").Table(INTEG_TEST_DEFAULT_DYNAMODB_TABLE_NAME) + return table + + +# Creates a matrix of tests for each value in param, +# with a user-friendly string for test output: +# encrypted = True -> "encrypted" +# encrypted = False -> "plaintext" +@pytest.fixture(params=[True, False], ids=["encrypted", "plaintext"]) +def encrypted(request): + return request.param + + +@pytest.fixture +def table(encrypted): + """ + Create a table client. + Use both to test that the same input can be provided to both boto3 and the EncryptedTable. + """ + if encrypted: + return encrypted_table() + else: + return plaintext_table() + + +@pytest.fixture(scope="module") +def test_run_suffix(): + return "-" + str(uuid.uuid4()) + + +# Creates a matrix of tests for each value in param, +# with a user-friendly string for test output: +# use_complex_item = True -> "complex_item" +# use_complex_item = False -> "simple_item" +@pytest.fixture(params=[simple_item_dict, complex_item_dict], ids=["simple_item", "complex_item"]) +def test_item(request, test_run_suffix): + item = deepcopy(request.param) + item["partition_key"] += test_run_suffix + return item + + +def test_GIVEN_item_WHEN_basic_put_AND_basic_get_AND_basic_delete_THEN_round_trip_passes(table, test_item): + """Test put_item, get_item, and delete_item operations.""" + # Given: Valid put_item request + put_item_request_dict = basic_put_item_request_dict(test_item) + # When: put_item + put_response = table.put_item(**put_item_request_dict) + # Then: put_item succeeds + assert put_response["ResponseMetadata"]["HTTPStatusCode"] == 200 + + # Given: Valid get_item request for the same item + get_item_request_dict = basic_get_item_request_dict(test_item) + # When: get_item + get_response = table.get_item(**get_item_request_dict) + # Then: Simple item is encrypted and decrypted correctly + assert get_response["ResponseMetadata"]["HTTPStatusCode"] == 200 + assert get_response["Item"] == put_item_request_dict["Item"] + + # Given: Valid delete_item request for the same item + delete_item_request_dict = basic_delete_item_request_dict(test_item) + # When: delete_item + delete_response = table.delete_item(**delete_item_request_dict) + # Then: delete_item succeeds + assert delete_response["ResponseMetadata"]["HTTPStatusCode"] == 200 + + # Given: Valid get_item request for the same item + get_item_request_dict = basic_get_item_request_dict(test_item) + # When: get_item + get_response = table.get_item(**get_item_request_dict) + # Then: get_item is empty (i.e. the item was deleted) + assert "Item" not in get_response + + +@pytest.fixture +def multiple_test_items(test_run_suffix): + """Get two test items in the appropriate format for the client.""" + items = [deepcopy(simple_item_dict), deepcopy(complex_item_dict)] + for item in items: + item["partition_key"] += test_run_suffix + return items + + +@pytest.fixture +def multiple_test_keys(test_run_suffix): + """Get two test keys in the appropriate format for the client.""" + keys = [deepcopy(simple_key_dict), deepcopy(complex_key_dict)] + for key in keys: + key["partition_key"] += test_run_suffix + return keys + + +def test_GIVEN_items_WHEN_batch_write_and_get_THEN_round_trip_passes( + table, + multiple_test_items, + multiple_test_keys, +): + # Given: Simple and complex items in appropriate format for client + # When: Batch put items + with table.batch_writer() as batch_writer: + # boto3 documentation for batch_writer.put_item() is incorrect; + # the method accepts the item directly, not the item inside an "Item" key. + for item in multiple_test_items: + batch_writer.put_item(item) + + # When: Get items + for item in multiple_test_items: + get_item_request_dict = basic_get_item_request_dict(item) + get_response = table.get_item(**get_item_request_dict) + # Then: All items are encrypted and decrypted correctly + assert get_response["ResponseMetadata"]["HTTPStatusCode"] == 200 + assert get_response["Item"] == item + + # When: Batch delete items + with table.batch_writer() as batch_writer: + for key in multiple_test_keys: + batch_writer.delete_item(key) + + # When: Get items + for item in multiple_test_items: + get_item_request_dict = basic_get_item_request_dict(item) + get_response = table.get_item(**get_item_request_dict) + # Then: All items are encrypted and decrypted correctly + assert get_response["ResponseMetadata"]["HTTPStatusCode"] == 200 + assert "Item" not in get_response + + +def test_GIVEN_items_in_table_WHEN_query_THEN_items_are_decrypted_correctly(table, test_item): + """Test query and scan operations.""" + # Given: Simple and complex items in appropriate format for client + # When: Putting items into table + put_item_request_dict = basic_put_item_request_dict(test_item) + table.put_item(**put_item_request_dict) + + # When: Querying items by partition key + query_request_dict = basic_query_request_dict(test_item) + query_response = table.query(**query_request_dict) + # Then: Query returns correct items + assert query_response["ResponseMetadata"]["HTTPStatusCode"] == 200 + assert len(query_response["Items"]) == 1 + assert query_response["Items"][0] == put_item_request_dict["Item"] + + +@pytest.fixture +def scan_request(encrypted, test_item): + if encrypted: + request = basic_scan_request_dict(test_item) + # If the encrypted scan encounters a plaintext item, the scan will fail. + # To avoid this, encrypted scans add a filter expression that matches only encrypted items. + request["FilterExpression"] = request["FilterExpression"] + " AND attribute_exists (#sig)" + request["ExpressionAttributeNames"] = {} + request["ExpressionAttributeNames"]["#sig"] = "amzn-ddb-map-sig" + return request + return basic_scan_request_dict(test_item) + + +def test_GIVEN_valid_put_and_scan_requests_WHEN_put_and_scan_THEN_round_trip_passes(table, test_item, scan_request): + # Given: Simple and complex items in appropriate format for client + put_item_request_dict = basic_put_item_request_dict(test_item) + table.put_item(**put_item_request_dict) + + # When: Scanning items + scan_request_dict = scan_request + scan_response = table.scan(**scan_request_dict) + # Then: Scan succeeds + # Can't assert anything about the scan; + # there are too many items. + # The critical assertion is that the scan succeeds. + assert scan_response["ResponseMetadata"]["HTTPStatusCode"] == 200 + + +def test_GIVEN_update_for_unsigned_attribute_WHEN_update_item_THEN_passes(table, test_item): + # Given: some item is already in the table + put_response = table.put_item(**basic_put_item_request_dict(test_item)) + assert put_response["ResponseMetadata"]["HTTPStatusCode"] == 200 + + # Given: Valid update item request for unsigned attribute + update_item_request = basic_update_item_request_dict_unsigned_attribute(test_item) + + # When: Calling update_item + update_response = table.update_item(**update_item_request) + # Then: update_item succeeds + assert update_response["ResponseMetadata"]["HTTPStatusCode"] == 200 + + +def test_GIVEN_update_for_signed_attribute_WHEN_update_item_THEN_raises_DynamoDbEncryptionTransformsException( + table, test_item, encrypted +): + if not encrypted: + pytest.skip("Skipping negative test for plaintext client") + + # Given: some item is already in the table + put_response = table.put_item(**basic_put_item_request_dict(test_item)) + assert put_response["ResponseMetadata"]["HTTPStatusCode"] == 200 + + # Given: Valid update item request for signed attribute + update_item_request = basic_update_item_request_dict_signed_attribute(test_item) + + # Then: raises DynamoDbEncryptionTransformsException + with pytest.raises(DynamoDbEncryptionTransformsException): + # When: Calling update_item + table.update_item(**update_item_request) + + +def test_WHEN_call_passthrough_method_THEN_correct_response_is_returned(): + """Test that calling a passthrough method returns the correct response.""" + # Given: Encrypted or plaintext table + # When: Calling some passthrough method that does not explicitly exist on EncryptedTable, + # but exists on the underlying boto3 table + response = encrypted_table().table_name + # Then: Correct response is returned, i.e. EncryptedTable forwards the call to the underlying boto3 table + assert response == INTEG_TEST_DEFAULT_DYNAMODB_TABLE_NAME + + +# Delete the items in the table after the module runs +@pytest.fixture(scope="module", autouse=True) +def cleanup_after_module(test_run_suffix): + yield + table = boto3.resource("dynamodb").Table(INTEG_TEST_DEFAULT_DYNAMODB_TABLE_NAME) + items = [deepcopy(simple_item_dict), deepcopy(complex_item_dict)] + for item in items: + item["partition_key"] = item["partition_key"] + test_run_suffix + table.delete_item(**basic_delete_item_request_dict(item)) diff --git a/DynamoDbEncryption/runtimes/python/test/integ/legacy/__init__.py b/DynamoDbEncryption/runtimes/python/test/integ/legacy/__init__.py new file mode 100644 index 000000000..37c9c2f01 --- /dev/null +++ b/DynamoDbEncryption/runtimes/python/test/integ/legacy/__init__.py @@ -0,0 +1,18 @@ +# Copyright Amazon.com Inc. or its affiliates. All Rights Reserved. +# SPDX-License-Identifier: Apache-2.0 + + +def sort_dynamodb_json_lists(obj): + """ + Utility that recursively sorts all lists in a DynamoDB JSON-like structure. + DynamoDB JSON uses lists to represent sets, so strict equality can fail. + Sort lists to ensure consistent ordering when comparing expected and actual items. + """ + if isinstance(obj, dict): + return {k: sort_dynamodb_json_lists(v) for k, v in obj.items()} + elif isinstance(obj, list): + try: + return sorted(obj) # Sort lists for consistent comparison + except TypeError: + return obj # Not all lists are sortable; ex. complex_item_ddb's "list" attribute + return obj diff --git a/DynamoDbEncryption/runtimes/python/test/integ/legacy/test_client.py b/DynamoDbEncryption/runtimes/python/test/integ/legacy/test_client.py new file mode 100644 index 000000000..0900b5b5e --- /dev/null +++ b/DynamoDbEncryption/runtimes/python/test/integ/legacy/test_client.py @@ -0,0 +1,256 @@ +# Copyright Amazon.com Inc. or its affiliates. All Rights Reserved. +# SPDX-License-Identifier: Apache-2.0 +import uuid +from copy import deepcopy + +import boto3 +import pytest +from dynamodb_encryption_sdk.exceptions import DecryptionError + +from aws_dbesdk_dynamodb.smithygenerated.aws_cryptography_dbencryptionsdk_dynamodb_transforms.errors import ( + DynamoDbItemEncryptor, +) +from aws_dbesdk_dynamodb.structures.dynamodb import LegacyPolicy + +from ...constants import ( + INTEG_TEST_DEFAULT_DYNAMODB_TABLE_NAME, +) +from ...items import ( + complex_item_ddb, + complex_item_dict, + complex_key_ddb, + complex_key_dict, + simple_item_ddb, + simple_item_dict, + simple_key_ddb, + simple_key_dict, +) +from ...requests import ( + basic_delete_item_request_ddb, + basic_delete_item_request_dict, + basic_get_item_request_ddb, + basic_get_item_request_dict, + basic_put_item_request_ddb, + basic_put_item_request_dict, +) +from . import sort_dynamodb_json_lists +from .utils import ( + create_legacy_encrypted_client, + create_legacy_encrypted_resource, + create_legacy_encrypted_table, + encrypted_client_with_legacy_override, + legacy_actions, +) + + +# Creates a matrix of tests for each value in param, +# with a user-friendly string for test output: +# expect_standard_dictionaries = True -> "standard_dicts" +# expect_standard_dictionaries = False -> "ddb_json" +@pytest.fixture(params=[True, False], ids=["standard_dicts", "ddb_json"]) +def expect_standard_dictionaries(request): + return request.param + + +# Creates a matrix of tests for each value in param, +# with a user-friendly string for test output: +# use_complex_item = True -> "complex_item" +# use_complex_item = False -> "simple_item" +@pytest.fixture(params=[True, False], ids=["complex_item", "simple_item"]) +def use_complex_item(request): + return request.param + + +# Append a suffix to the partition key to avoid collisions between test runs. +@pytest.fixture(scope="module") +def test_run_suffix(): + return "-" + str(uuid.uuid4()) + + +@pytest.fixture +def test_item(expect_standard_dictionaries, use_complex_item, test_run_suffix): + """Get a single test item in the appropriate format for the client.""" + if expect_standard_dictionaries: + if use_complex_item: + item = deepcopy(complex_item_dict) + else: + item = deepcopy(simple_item_dict) + else: + if use_complex_item: + item = deepcopy(complex_item_ddb) + else: + item = deepcopy(simple_item_ddb) + # Add a suffix to the partition key to avoid collisions between test runs. + if isinstance(item["partition_key"], dict): + item["partition_key"]["S"] += test_run_suffix + else: + item["partition_key"] += test_run_suffix + return item + + +@pytest.fixture +def test_key(expect_standard_dictionaries, use_complex_item, test_run_suffix): + """Get a single test item in the appropriate format for the client.""" + if expect_standard_dictionaries: + if use_complex_item: + key = deepcopy(complex_key_dict) + else: + key = deepcopy(simple_key_dict) + else: + if use_complex_item: + key = deepcopy(complex_key_ddb) + else: + key = deepcopy(simple_key_ddb) + # Add a suffix to the partition key to avoid collisions between test runs. + if isinstance(key["partition_key"], dict): + key["partition_key"]["S"] += test_run_suffix + else: + key["partition_key"] += test_run_suffix + return key + + +@pytest.fixture +def put_item_request(expect_standard_dictionaries, test_item): + if expect_standard_dictionaries: + # Client requests with `expect_standard_dictionaries=True` use dict-formatted requests + # with an added "TableName" key. + return {**basic_put_item_request_dict(test_item), "TableName": INTEG_TEST_DEFAULT_DYNAMODB_TABLE_NAME} + return basic_put_item_request_ddb(test_item) + + +@pytest.fixture +def get_item_request(expect_standard_dictionaries, test_item): + if expect_standard_dictionaries: + # Client requests with `expect_standard_dictionaries=True` use dict-formatted requests + # with an added "TableName" key. + return {**basic_get_item_request_dict(test_item), "TableName": INTEG_TEST_DEFAULT_DYNAMODB_TABLE_NAME} + return basic_get_item_request_ddb(test_item) + + +@pytest.fixture +def delete_item_request(expect_standard_dictionaries, test_item): + if expect_standard_dictionaries: + return {**basic_delete_item_request_dict(test_item), "TableName": INTEG_TEST_DEFAULT_DYNAMODB_TABLE_NAME} + return basic_delete_item_request_ddb(test_item) + + +# Fixtures for legacy encryptors and clients + + +@pytest.fixture(params=["client", "table", "resource"], ids=["legacy_client", "legacy_table", "legacy_resource"]) +def legacy_encryptor(request): + """Create a legacy encryptor of the specified type.""" + if request.param == "client": + return create_legacy_encrypted_client() + elif request.param == "table": + return create_legacy_encrypted_table() + elif request.param == "resource": + return create_legacy_encrypted_resource() + + +@pytest.fixture( + params=[ + LegacyPolicy.FORCE_LEGACY_ENCRYPT_ALLOW_LEGACY_DECRYPT, + LegacyPolicy.FORBID_LEGACY_ENCRYPT_ALLOW_LEGACY_DECRYPT, + LegacyPolicy.FORBID_LEGACY_ENCRYPT_FORBID_LEGACY_DECRYPT, + ] +) +def legacy_policy(request): + return request.param + + +@pytest.fixture +def encrypted_client(legacy_encryptor, legacy_policy, expect_standard_dictionaries): + return encrypted_client_with_legacy_override( + legacy_encryptor=legacy_encryptor, + legacy_policy=legacy_policy, + expect_standard_dictionaries=expect_standard_dictionaries, + ) + + +def test_GIVEN_awsdbe_encrypted_item_WHEN_get_with_legacy_client( + encrypted_client, + put_item_request, + get_item_request, + delete_item_request, + expect_standard_dictionaries, + legacy_policy, +): + # Given: Valid put_item request + # When: put_item + put_response = encrypted_client.put_item(**put_item_request) + # Then: put_item succeeds + assert put_response["ResponseMetadata"]["HTTPStatusCode"] == 200 + + # Given: Fresh legacy encryptor of the same type as used in the fixture + legacy_encrypted_client = create_legacy_encrypted_client( + attribute_actions=legacy_actions(), + expect_standard_dictionaries=expect_standard_dictionaries, + ) + + if legacy_policy == LegacyPolicy.FORCE_LEGACY_ENCRYPT_ALLOW_LEGACY_DECRYPT: + # Given: Valid get_item request for the same item using legacy encryptor with FORCE_LEGACY_ENCRYPT policy + # When: get_item with legacy encryptor + get_response = legacy_encrypted_client.get_item(**get_item_request) + # Then: Response is equal to the original item (legacy encryptor can decrypt item written by AWS DB-ESDK) + assert get_response["ResponseMetadata"]["HTTPStatusCode"] == 200 + # DynamoDB JSON uses lists to represent sets, so strict equality can fail. + # Sort lists to ensure consistent ordering when comparing expected and actual items. + expected_item = sort_dynamodb_json_lists(put_item_request["Item"]) + legacy_actual_item = sort_dynamodb_json_lists(get_response["Item"]) + assert expected_item == legacy_actual_item + else: + # Given: Valid get_item request for the same item using legacy encryptor with FORBID_LEGACY_ENCRYPT policy + # When: get_item with legacy encryptor + # Then: throws DecryptionError Exception (i.e. legacy encryptor cannot read values in new format) + with pytest.raises(DecryptionError): # The exact exception may vary in Python implementation + # Try to read the item with the legacy encryptor + legacy_encrypted_client.get_item(**get_item_request) + + +def test_GIVEN_legacy_encrypted_item_WHEN_get_with_awsdbe( + encrypted_client, + put_item_request, + get_item_request, + delete_item_request, + expect_standard_dictionaries, + legacy_policy, +): + # Given: Fresh legacy encryptor and valid put_item request + legacy_encrypted_client = create_legacy_encrypted_client( + attribute_actions=legacy_actions(), + expect_standard_dictionaries=expect_standard_dictionaries, + ) + # When: put_item using legacy encryptor + put_response = legacy_encrypted_client.put_item(**put_item_request) + # Then: put_item succeeds (item is written using legacy format) + assert put_response["ResponseMetadata"]["HTTPStatusCode"] == 200 + + if not legacy_policy == LegacyPolicy.FORBID_LEGACY_ENCRYPT_FORBID_LEGACY_DECRYPT: + # Given: Valid get_item request for the same item with ALLOW_LEGACY_DECRYPT policy + # When: get_item using AWS DB-ESDK client + get_response = encrypted_client.get_item(**get_item_request) + # Then: Response is equal to the original item (AWS DB ESDK can decrypt legacy items) + assert get_response["ResponseMetadata"]["HTTPStatusCode"] == 200 + # DynamoDB JSON uses lists to represent sets, so strict equality can fail. + # Sort lists to ensure consistent ordering when comparing expected and actual items. + expected_item = sort_dynamodb_json_lists(put_item_request["Item"]) + actual_item = sort_dynamodb_json_lists(get_response["Item"]) + assert expected_item == actual_item + else: + # Given: Valid get_item request for the same item with FORBID_LEGACY_DECRYPT policy + # When: get_item using AWS DBE SDK client + # Then: Throws a DynamoDbItemEncryptor exception (AWS DB-ESDK with FORBID policy cannot decrypt legacy items) + with pytest.raises(DynamoDbItemEncryptor): + encrypted_client.get_item(**get_item_request) + + +# Delete the items in the table after the module runs +@pytest.fixture(scope="module", autouse=True) +def cleanup_after_module(test_run_suffix): + yield + table = boto3.client("dynamodb") + items = [deepcopy(simple_item_ddb), deepcopy(complex_item_ddb)] + for item in items: + item["partition_key"]["S"] += test_run_suffix + table.delete_item(**basic_delete_item_request_ddb(item)) diff --git a/DynamoDbEncryption/runtimes/python/test/integ/legacy/test_paginator.py b/DynamoDbEncryption/runtimes/python/test/integ/legacy/test_paginator.py new file mode 100644 index 000000000..6f170ac52 --- /dev/null +++ b/DynamoDbEncryption/runtimes/python/test/integ/legacy/test_paginator.py @@ -0,0 +1,365 @@ +# Copyright Amazon.com Inc. or its affiliates. All Rights Reserved. +# SPDX-License-Identifier: Apache-2.0 +import uuid +from copy import deepcopy + +import boto3 +import pytest +from dynamodb_encryption_sdk.exceptions import DecryptionError + +from aws_dbesdk_dynamodb.smithygenerated.aws_cryptography_dbencryptionsdk_dynamodb_transforms.errors import ( + DynamoDbItemEncryptor, +) +from aws_dbesdk_dynamodb.structures.dynamodb import LegacyPolicy + +from ...constants import ( + INTEG_TEST_DEFAULT_DYNAMODB_TABLE_NAME, +) +from ...items import ( + complex_item_ddb, + complex_item_dict, + complex_key_ddb, + complex_key_dict, + simple_item_ddb, + simple_item_dict, + simple_key_ddb, + simple_key_dict, +) +from ...requests import ( + basic_delete_item_request_ddb, + basic_put_item_request_ddb, + basic_put_item_request_dict, + basic_query_paginator_request, + basic_scan_paginator_request, +) +from . import sort_dynamodb_json_lists +from .utils import ( + create_legacy_encrypted_client, + create_legacy_encrypted_resource, + create_legacy_encrypted_table, + encrypted_client_with_legacy_override, + legacy_actions, +) + + +# Creates a matrix of tests for each value in param, +# with a user-friendly string for test output: +# expect_standard_dictionaries = True -> "standard_dicts" +# expect_standard_dictionaries = False -> "ddb_json" +@pytest.fixture(params=[True, False], ids=["standard_dicts", "ddb_json"]) +def expect_standard_dictionaries(request): + return request.param + + +# Creates a matrix of tests for each value in param, +# with a user-friendly string for test output: +# use_complex_item = True -> "complex_item" +# use_complex_item = False -> "simple_item" +@pytest.fixture(params=[True, False], ids=["complex_item", "simple_item"]) +def use_complex_item(request): + return request.param + + +# Append a suffix to the partition key to avoid collisions between test runs. +@pytest.fixture(scope="module") +def test_run_suffix(): + return "-" + str(uuid.uuid4()) + + +@pytest.fixture +def test_key(expect_standard_dictionaries, use_complex_item, test_run_suffix): + """Get a single test item in the appropriate format for the client.""" + if expect_standard_dictionaries: + if use_complex_item: + key = deepcopy(complex_key_dict) + else: + key = deepcopy(simple_key_dict) + else: + if use_complex_item: + key = deepcopy(complex_key_ddb) + else: + key = deepcopy(simple_key_ddb) + # Add a suffix to the partition key to avoid collisions between test runs. + if isinstance(key["partition_key"], dict): + key["partition_key"]["S"] += test_run_suffix + else: + key["partition_key"] += test_run_suffix + return key + + +@pytest.fixture +def test_item(expect_standard_dictionaries, use_complex_item, test_run_suffix): + """Get a single test item in the appropriate format for the client.""" + if expect_standard_dictionaries: + if use_complex_item: + item = deepcopy(complex_item_dict) + else: + item = deepcopy(simple_item_dict) + else: + if use_complex_item: + item = deepcopy(complex_item_ddb) + else: + item = deepcopy(simple_item_ddb) + # Add a suffix to the partition key to avoid collisions between test runs. + if isinstance(item["partition_key"], dict): + item["partition_key"]["S"] += test_run_suffix + else: + item["partition_key"] += test_run_suffix + return item + + +@pytest.fixture +def paginate_query_request(expect_standard_dictionaries, test_key): + if expect_standard_dictionaries: + return {**basic_query_paginator_request(test_key), "TableName": INTEG_TEST_DEFAULT_DYNAMODB_TABLE_NAME} + return basic_query_paginator_request(test_key) + + +@pytest.fixture +def put_item_request(expect_standard_dictionaries, test_item): + if expect_standard_dictionaries: + # Client requests with `expect_standard_dictionaries=True` use dict-formatted requests + # with an added "TableName" key. + return {**basic_put_item_request_dict(test_item), "TableName": INTEG_TEST_DEFAULT_DYNAMODB_TABLE_NAME} + return basic_put_item_request_ddb(test_item) + + +@pytest.fixture +def paginate_scan_request(expect_standard_dictionaries, test_item): + """Get a scan paginator request in the appropriate format for the client.""" + if expect_standard_dictionaries: + request = {**basic_scan_paginator_request(test_item), "TableName": INTEG_TEST_DEFAULT_DYNAMODB_TABLE_NAME} + else: + request = basic_scan_paginator_request(test_item) + return request + + +# Fixtures for legacy encryptors and clients + + +@pytest.fixture(params=["client", "table", "resource"], ids=["legacy_client", "legacy_table", "legacy_resource"]) +def legacy_encryptor(request): + """ + Create a legacy encryptor of the specified type. + + This fixture creates legacy encryptors of three types: + - client: DynamoDB Encryption Client's EncryptedClient + - table: DynamoDB Encryption Client's EncryptedTable + - resource: DynamoDB Encryption Client's EncryptedResource + """ + if request.param == "client": + return create_legacy_encrypted_client() + elif request.param == "table": + return create_legacy_encrypted_table() + elif request.param == "resource": + return create_legacy_encrypted_resource() + + +# Fixtures for each legacy policy +@pytest.fixture( + params=[ + LegacyPolicy.FORCE_LEGACY_ENCRYPT_ALLOW_LEGACY_DECRYPT, + LegacyPolicy.FORBID_LEGACY_ENCRYPT_ALLOW_LEGACY_DECRYPT, + LegacyPolicy.FORBID_LEGACY_ENCRYPT_FORBID_LEGACY_DECRYPT, + ] +) +def legacy_policy(request): + return request.param + + +@pytest.fixture +def encrypted_client(legacy_encryptor, legacy_policy, expect_standard_dictionaries): + return encrypted_client_with_legacy_override( + legacy_encryptor=legacy_encryptor, + legacy_policy=legacy_policy, + expect_standard_dictionaries=expect_standard_dictionaries, + ) + + +@pytest.fixture +def client_legacy_force_encrypt_allow_decrypt(legacy_encryptor, expect_standard_dictionaries): + """Create AWS DBE SDK client with FORCE_LEGACY_ENCRYPT_ALLOW_LEGACY_DECRYPT policy.""" + return encrypted_client_with_legacy_override( + legacy_encryptor=legacy_encryptor, + legacy_policy=LegacyPolicy.FORCE_LEGACY_ENCRYPT_ALLOW_LEGACY_DECRYPT, + expect_standard_dictionaries=expect_standard_dictionaries, + ) + + +def test_GIVEN_awsdbe_encrypted_item_WHEN_paginate_with_legacy_query_paginator( + encrypted_client, put_item_request, paginate_query_request, test_item, legacy_policy, expect_standard_dictionaries +): + # Given: Valid put_item request + # When: put_item using AWS DB-ESDK client + put_response = encrypted_client.put_item(**put_item_request) + # Then: Item is stored in the table + assert put_response["ResponseMetadata"]["HTTPStatusCode"] == 200 + + # Given: Fresh legacy encrypted client and query paginator + legacy_encrypted_client = create_legacy_encrypted_client( + attribute_actions=legacy_actions(), + expect_standard_dictionaries=expect_standard_dictionaries, + ) + legacy_query_paginator = legacy_encrypted_client.get_paginator("query") + + if legacy_policy == LegacyPolicy.FORCE_LEGACY_ENCRYPT_ALLOW_LEGACY_DECRYPT: + # When: Paginate with legacy query paginator using FORCE_LEGACY_ENCRYPT policy + # Then: Legacy paginator can read and decrypt items + response = legacy_query_paginator.paginate(**paginate_query_request) + items = [] + for page in response: + if "Items" in page: + for item in page["Items"]: + items.append(item) + + assert len(items) == 1 + expected_item = sort_dynamodb_json_lists(test_item) + legacy_actual_item = sort_dynamodb_json_lists(items[0]) + assert expected_item == legacy_actual_item + else: + # When: Paginate with legacy query paginator using FORBID policies + # Then: Legacy paginator cannot decrypt items created with FORBID_LEGACY_ENCRYPT policy + with pytest.raises(DecryptionError): + response = legacy_query_paginator.paginate(**paginate_query_request) + items = [] + for page in response: + if "Items" in page: + for item in page["Items"]: + items.append(item) + + +def test_GIVEN_awsdbe_encrypted_item_WHEN_paginate_with_legacy_scan_paginator( + encrypted_client, put_item_request, paginate_scan_request, test_item, legacy_policy, expect_standard_dictionaries +): + # Given: Valid put_item request + # When: put_item using AWS DB-ESDK client + put_response = encrypted_client.put_item(**put_item_request) + # Then: Item is stored in the table + assert put_response["ResponseMetadata"]["HTTPStatusCode"] == 200 + + # Given: Fresh legacy encrypted client and scan paginator + legacy_encrypted_client = create_legacy_encrypted_client( + attribute_actions=legacy_actions(), + expect_standard_dictionaries=expect_standard_dictionaries, + ) + legacy_scan_paginator = legacy_encrypted_client.get_paginator("scan") + + if legacy_policy == LegacyPolicy.FORCE_LEGACY_ENCRYPT_ALLOW_LEGACY_DECRYPT: + # When: Paginate with legacy scan paginator using FORCE_LEGACY_ENCRYPT policy + # Then: Legacy paginator can read and decrypt items + response = legacy_scan_paginator.paginate(**paginate_scan_request) + items = [] + for page in response: + if "Items" in page: + for item in page["Items"]: + items.append(item) + + assert len(items) == 1 + expected_item = sort_dynamodb_json_lists(test_item) + legacy_actual_item = sort_dynamodb_json_lists(items[0]) + assert expected_item == legacy_actual_item + else: + # When: Paginate with legacy scan paginator using FORBID policies + # Then: Legacy paginator cannot decrypt items created with FORBID_LEGACY_ENCRYPT policy + with pytest.raises(DecryptionError): + response = legacy_scan_paginator.paginate(**paginate_scan_request) + items = [] + for page in response: + if "Items" in page: + for item in page["Items"]: + items.append(item) + + +def test_GIVEN_legacy_encrypted_item_WHEN_paginate_with_awsdbe_query_paginator( + encrypted_client, put_item_request, paginate_query_request, test_item, legacy_policy, expect_standard_dictionaries +): + # Given: Fresh legacy encrypted client and valid put_item request + legacy_encrypted_client = create_legacy_encrypted_client( + attribute_actions=legacy_actions(), + expect_standard_dictionaries=expect_standard_dictionaries, + ) + # When: put_item using legacy client + legacy_encrypted_client.put_item(**put_item_request) + # Then: Item is stored in the table + + # Given: Query paginator with AWS DB-ESDK client + query_paginator = encrypted_client.get_paginator("query") + + if not legacy_policy == LegacyPolicy.FORBID_LEGACY_ENCRYPT_FORBID_LEGACY_DECRYPT: + # When: Paginate with AWS DB-ESDK query paginator using ALLOW_LEGACY_DECRYPT policies + # Then: AWS DB-ESDK paginator can read the legacy-encrypted item + response = query_paginator.paginate(**paginate_query_request) + items = [] + for page in response: + if "Items" in page: + for item in page["Items"]: + items.append(item) + + assert len(items) == 1 + expected_item = sort_dynamodb_json_lists(test_item) + actual_item = sort_dynamodb_json_lists(items[0]) + assert expected_item == actual_item + else: + # Given: Valid paginate request with FORBID_LEGACY_DECRYPT policy + # When: Paginate with AWS DB-ESDK client + # Then: Throws a DynamoDbItemEncryptor exception (AWS DB-ESDK with FORBID policy cannot decrypt legacy items) + with pytest.raises(DynamoDbItemEncryptor): + response = query_paginator.paginate(**paginate_query_request) + items = [] + for page in response: + if "Items" in page: + for item in page["Items"]: + items.append(item) + + +def test_GIVEN_legacy_encrypted_item_WHEN_paginate_with_awsdbe_scan_paginator( + encrypted_client, put_item_request, paginate_scan_request, test_item, legacy_policy, expect_standard_dictionaries +): + # Given: Fresh legacy encrypted client and valid put_item request + legacy_encrypted_client = create_legacy_encrypted_client( + attribute_actions=legacy_actions(), + expect_standard_dictionaries=expect_standard_dictionaries, + ) + # When: put_item using legacy client + legacy_encrypted_client.put_item(**put_item_request) + # Then: Item is stored in the table + + # Given: Scan paginator with AWS DB-ESDK client + scan_paginator = encrypted_client.get_paginator("scan") + + if not legacy_policy == LegacyPolicy.FORBID_LEGACY_ENCRYPT_FORBID_LEGACY_DECRYPT: + # When: Paginate with AWS DB-ESDK scan paginator using ALLOW_LEGACY_DECRYPT policies + # Then: AWS DB-ESDK paginator can read the legacy-encrypted item + response = scan_paginator.paginate(**paginate_scan_request) + items = [] + for page in response: + if "Items" in page: + for item in page["Items"]: + items.append(item) + + assert len(items) == 1 + expected_item = sort_dynamodb_json_lists(test_item) + actual_item = sort_dynamodb_json_lists(items[0]) + assert expected_item == actual_item + else: + # Given: Valid paginate request with FORBID_LEGACY_DECRYPT policy + # When: Paginate with AWS DB-ESDK client + # Then: Throws a DynamoDbItemEncryptor exception (AWS DB-ESDK with FORBID policy cannot decrypt legacy items) + with pytest.raises(DynamoDbItemEncryptor): + response = scan_paginator.paginate(**paginate_scan_request) + items = [] + for page in response: + if "Items" in page: + for item in page["Items"]: + items.append(item) + + +# Delete the items in the table after the module runs +@pytest.fixture(scope="module", autouse=True) +def cleanup_after_module(test_run_suffix): + yield + table = boto3.client("dynamodb") + items = [deepcopy(simple_item_ddb), deepcopy(complex_item_ddb)] + for item in items: + item["partition_key"]["S"] += test_run_suffix + table.delete_item(**basic_delete_item_request_ddb(item)) diff --git a/DynamoDbEncryption/runtimes/python/test/integ/legacy/test_resource.py b/DynamoDbEncryption/runtimes/python/test/integ/legacy/test_resource.py new file mode 100644 index 000000000..037ac9299 --- /dev/null +++ b/DynamoDbEncryption/runtimes/python/test/integ/legacy/test_resource.py @@ -0,0 +1,182 @@ +# Copyright Amazon.com Inc. or its affiliates. All Rights Reserved. +# SPDX-License-Identifier: Apache-2.0 +import uuid +from copy import deepcopy + +import boto3 +import pytest +from dynamodb_encryption_sdk.exceptions import DecryptionError + +from aws_dbesdk_dynamodb.smithygenerated.aws_cryptography_dbencryptionsdk_dynamodb_transforms.errors import ( + DynamoDbItemEncryptor, +) +from aws_dbesdk_dynamodb.structures.dynamodb import LegacyPolicy + +from ...constants import ( + INTEG_TEST_DEFAULT_DYNAMODB_TABLE_NAME, +) +from ...items import ( + complex_item_ddb, + complex_item_dict, + complex_key_dict, + simple_item_ddb, + simple_item_dict, + simple_key_dict, +) +from ...requests import ( + basic_batch_get_item_request_dict, + basic_batch_write_item_put_request_dict, + basic_delete_item_request_ddb, +) +from .utils import ( + create_legacy_encrypted_client, + create_legacy_encrypted_resource, + create_legacy_encrypted_table, + encrypted_resource_with_legacy_override, + legacy_actions, +) + + +@pytest.fixture +def tables(resource): + return resource.tables + + +@pytest.fixture(scope="module") +def test_run_suffix(): + return "-" + str(uuid.uuid4()) + + +@pytest.fixture +def test_items(test_run_suffix): + items = [deepcopy(complex_item_dict), deepcopy(simple_item_dict)] + for item in items: + item["partition_key"] += test_run_suffix + return items + + +@pytest.fixture +def test_keys(test_run_suffix): + keys = [deepcopy(complex_key_dict), deepcopy(simple_key_dict)] + for key in keys: + key["partition_key"] += test_run_suffix + return keys + + +@pytest.fixture(params=["client", "table", "resource"], ids=["legacy_client", "legacy_table", "legacy_resource"]) +def legacy_encryptor(request): + """ + Create a legacy encryptor of the specified type. + + This fixture creates legacy encryptors of three types: + - client: DynamoDB Encryption Client's EncryptedClient + - table: DynamoDB Encryption Client's EncryptedTable + - resource: DynamoDB Encryption Client's EncryptedResource + """ + if request.param == "client": + return create_legacy_encrypted_client() + elif request.param == "table": + return create_legacy_encrypted_table() + elif request.param == "resource": + return create_legacy_encrypted_resource() + + +@pytest.fixture( + params=[ + LegacyPolicy.FORCE_LEGACY_ENCRYPT_ALLOW_LEGACY_DECRYPT, + LegacyPolicy.FORBID_LEGACY_ENCRYPT_ALLOW_LEGACY_DECRYPT, + LegacyPolicy.FORBID_LEGACY_ENCRYPT_FORBID_LEGACY_DECRYPT, + ] +) +def legacy_policy(request): + """Fixture providing different legacy policies to test.""" + return request.param + + +@pytest.fixture +def encrypted_resource(legacy_encryptor, legacy_policy): + """Create AWS DBE SDK resource with specified legacy policy.""" + return encrypted_resource_with_legacy_override( + legacy_encryptor=legacy_encryptor, + legacy_policy=legacy_policy, + ) + + +def test_GIVEN_awsdbe_encrypted_item_WHEN_get_with_legacy_resource( + encrypted_resource, + test_items, + test_keys, + legacy_policy, +): + # Given: Valid batch_write_item request with items to put + batch_write_item_put_request = basic_batch_write_item_put_request_dict(test_items) + # When: batch_write_item using AWS DB-ESDK resource + batch_write_response = encrypted_resource.batch_write_item(**batch_write_item_put_request) + # Then: batch_write_item succeeds + assert batch_write_response["ResponseMetadata"]["HTTPStatusCode"] == 200 + + # Given: Fresh legacy encrypted resource and valid batch_get request + legacy_resource = create_legacy_encrypted_resource(attribute_actions=legacy_actions()) + batch_get_item_request = basic_batch_get_item_request_dict(test_keys) + + if legacy_policy == LegacyPolicy.FORCE_LEGACY_ENCRYPT_ALLOW_LEGACY_DECRYPT: + # When: batch_get_item with legacy resource using FORCE_LEGACY_ENCRYPT policy + # Then: Items can be decrypted by legacy resource + batch_get_response = legacy_resource.batch_get_item(**batch_get_item_request) + assert batch_get_response["ResponseMetadata"]["HTTPStatusCode"] == 200 + responses = batch_get_response["Responses"][INTEG_TEST_DEFAULT_DYNAMODB_TABLE_NAME] + assert len(responses) == 2 + for response in responses: + assert response in test_items + else: + # When: batch_get_item with legacy resource using FORBID policies + # Then: Legacy resource cannot decrypt items created with FORBID_LEGACY_ENCRYPT policy + with pytest.raises(DecryptionError): + legacy_resource.batch_get_item(**batch_get_item_request) + + +def test_GIVEN_legacy_encrypted_item_WHEN_get_with_awsdbe_resource( + encrypted_resource, + test_items, + test_keys, + legacy_policy, +): + # Given: Fresh legacy encrypted resource and valid batch_write request + legacy_resource = create_legacy_encrypted_resource(attribute_actions=legacy_actions()) + + # When: batch_write_item using legacy resource + batch_write_item_put_request = basic_batch_write_item_put_request_dict(test_items) + batch_write_response = legacy_resource.batch_write_item(**batch_write_item_put_request) + # Then: batch_write_item succeeds + assert batch_write_response["ResponseMetadata"]["HTTPStatusCode"] == 200 + + # Create batch_get request + batch_get_item_request = basic_batch_get_item_request_dict(test_keys) + + if not legacy_policy == LegacyPolicy.FORBID_LEGACY_ENCRYPT_FORBID_LEGACY_DECRYPT: + # Given: Valid batch_get_item request for the same items with ALLOW_LEGACY_DECRYPT policy + # When: batch_get_item using AWS DB-ESDK resource + batch_get_response = encrypted_resource.batch_get_item(**batch_get_item_request) + # Then: Legacy resource can decrypt items created with FORCE_LEGACY_ENCRYPT policy + assert batch_get_response["ResponseMetadata"]["HTTPStatusCode"] == 200 + responses = batch_get_response["Responses"][INTEG_TEST_DEFAULT_DYNAMODB_TABLE_NAME] + assert len(responses) == 2 + for response in responses: + assert response in test_items + else: + # Given: Valid get_item request for the same item with FORBID_LEGACY_DECRYPT policy + # When: get_item using AWS DB-ESDK client + # Then: Throws DynamoDbItemEncryptor exception (AWS DB-ESDK with FORBID policy cannot decrypt legacy items) + with pytest.raises(DynamoDbItemEncryptor): + encrypted_resource.batch_get_item(**batch_get_item_request) + + +# Delete the items in the table after the module runs +@pytest.fixture(scope="module", autouse=True) +def cleanup_after_module(test_run_suffix): + yield + table = boto3.client("dynamodb") + items = [deepcopy(simple_item_ddb), deepcopy(complex_item_ddb)] + for item in items: + item["partition_key"]["S"] += test_run_suffix + table.delete_item(**basic_delete_item_request_ddb(item)) diff --git a/DynamoDbEncryption/runtimes/python/test/integ/legacy/test_table.py b/DynamoDbEncryption/runtimes/python/test/integ/legacy/test_table.py new file mode 100644 index 000000000..0cc4fedef --- /dev/null +++ b/DynamoDbEncryption/runtimes/python/test/integ/legacy/test_table.py @@ -0,0 +1,168 @@ +# Copyright Amazon.com Inc. or its affiliates. All Rights Reserved. +# SPDX-License-Identifier: Apache-2.0 +import uuid +from copy import deepcopy + +import boto3 +import pytest +from dynamodb_encryption_sdk.exceptions import DecryptionError + +from aws_dbesdk_dynamodb.smithygenerated.aws_cryptography_dbencryptionsdk_dynamodb_transforms.errors import ( + DynamoDbItemEncryptor, +) +from aws_dbesdk_dynamodb.structures.dynamodb import LegacyPolicy + +from ...constants import INTEG_TEST_DEFAULT_DYNAMODB_TABLE_NAME +from ...items import ( + complex_item_dict, + simple_item_dict, +) +from ...requests import basic_delete_item_request_dict, basic_get_item_request_dict, basic_put_item_request_dict +from .utils import ( + create_legacy_encrypted_client, + create_legacy_encrypted_resource, + create_legacy_encrypted_table, + encrypted_table_with_legacy_override, + legacy_actions, +) + + +@pytest.fixture(scope="module") +def test_run_suffix(): + return "-" + str(uuid.uuid4()) + + +# Creates a matrix of tests for each value in param, +# with a user-friendly string for test output: +# use_complex_item = True -> "complex_item" +# use_complex_item = False -> "simple_item" +@pytest.fixture(params=[simple_item_dict, complex_item_dict], ids=["simple_item", "complex_item"]) +def test_item(request, test_run_suffix): + item = deepcopy(request.param) + item["partition_key"] += test_run_suffix + return item + + +# Fixtures for legacy encryptors and tables + + +@pytest.fixture(params=["client", "table", "resource"], ids=["legacy_client", "legacy_table", "legacy_resource"]) +def legacy_encryptor(request): + """ + Create a legacy encryptor of the specified type. + + This fixture creates legacy encryptors of three types: + - client: DynamoDB Encryption Client's EncryptedClient + - table: DynamoDB Encryption Client's EncryptedTable + - resource: DynamoDB Encryption Client's EncryptedResource + """ + if request.param == "client": + return create_legacy_encrypted_client() + elif request.param == "table": + return create_legacy_encrypted_table() + elif request.param == "resource": + return create_legacy_encrypted_resource() + + +# Fixtures for each legacy policy + + +@pytest.fixture( + params=[ + LegacyPolicy.FORCE_LEGACY_ENCRYPT_ALLOW_LEGACY_DECRYPT, + LegacyPolicy.FORBID_LEGACY_ENCRYPT_ALLOW_LEGACY_DECRYPT, + LegacyPolicy.FORBID_LEGACY_ENCRYPT_FORBID_LEGACY_DECRYPT, + ] +) +def legacy_policy(request): + return request.param + + +@pytest.fixture +def encrypted_table(legacy_encryptor, legacy_policy): + """Create AWS DBE SDK table with specified legacy policy.""" + return encrypted_table_with_legacy_override( + legacy_encryptor=legacy_encryptor, + legacy_policy=legacy_policy, + ) + + +def test_GIVEN_awsdbe_encrypted_item_WHEN_get_with_legacy_table( + encrypted_table, + test_item, + legacy_policy, +): + # Given: Valid put_item request + put_item_request_dict = basic_put_item_request_dict(test_item) + # When: put_item + put_response = encrypted_table.put_item(**put_item_request_dict) + # Then: put_item succeeds + assert put_response["ResponseMetadata"]["HTTPStatusCode"] == 200 + + # Given: Fresh legacy encryptor of the same type as used in the fixture + legacy_encrypted_table = create_legacy_encrypted_table( + attribute_actions=legacy_actions(), + ) + + # Get item request + get_item_request_dict = basic_get_item_request_dict(test_item) + + if legacy_policy == LegacyPolicy.FORCE_LEGACY_ENCRYPT_ALLOW_LEGACY_DECRYPT: + # Given: Valid get_item request for the same item using legacy encryptor with FORCE_LEGACY_ENCRYPT policy + # When: get_item with legacy encryptor + get_response = legacy_encrypted_table.get_item(**get_item_request_dict) + # Then: Response is equal to the original item (legacy encryptor can decrypt item written by AWS DB-ESDK) + assert get_response["ResponseMetadata"]["HTTPStatusCode"] == 200 + assert get_response["Item"] == put_item_request_dict["Item"] + else: + # Given: Valid get_item request for the same item using legacy encryptor with FORBID_LEGACY_ENCRYPT policy + # When: get_item with legacy encryptor + # Then: throws DecryptionError Exception (i.e. legacy encryptor cannot read values in new format) + with pytest.raises(DecryptionError): # The exact exception may vary in Python implementation + # Try to read the item with the legacy encryptor + legacy_encrypted_table.get_item(**get_item_request_dict) + + +def test_GIVEN_legacy_encrypted_item_WHEN_get_with_awsdbe( + encrypted_table, + test_item, + legacy_policy, +): + # Given: Fresh legacy encryptor and valid put_item request + legacy_encrypted_table = create_legacy_encrypted_table( + attribute_actions=legacy_actions(), + ) + # Given: Valid put_item request + put_item_request_dict = basic_put_item_request_dict(test_item) + # When: put_item using legacy encryptor + put_response = legacy_encrypted_table.put_item(**put_item_request_dict) + # Then: put_item succeeds (item is written using legacy format) + assert put_response["ResponseMetadata"]["HTTPStatusCode"] == 200 + + # Get item request + get_item_request_dict = basic_get_item_request_dict(test_item) + + if not legacy_policy == LegacyPolicy.FORBID_LEGACY_ENCRYPT_FORBID_LEGACY_DECRYPT: + # Given: Valid get_item request for the same item with ALLOW_LEGACY_DECRYPT policy + # When: get_item using AWS DB-ESDK client + get_response = encrypted_table.get_item(**get_item_request_dict) + # Then: Table can read the legacy-encrypted item + assert get_response["ResponseMetadata"]["HTTPStatusCode"] == 200 + assert get_response["Item"] == put_item_request_dict["Item"] + else: + # Given: Valid get_item request for the same item with FORBID_LEGACY_DECRYPT policy + # When: get_item using AWS DB-ESDK client + # Then: Throws a DynamoDbItemEncryptor exception (AWS DB-ESDK with FORBID policy cannot decrypt legacy items) + with pytest.raises(DynamoDbItemEncryptor): + encrypted_table.get_item(**get_item_request_dict) + + +# Delete the items in the table after the module runs +@pytest.fixture(scope="module", autouse=True) +def cleanup_after_module(test_run_suffix): + yield + table = boto3.resource("dynamodb").Table(INTEG_TEST_DEFAULT_DYNAMODB_TABLE_NAME) + items = [deepcopy(simple_item_dict), deepcopy(complex_item_dict)] + for item in items: + item["partition_key"] = item["partition_key"] + test_run_suffix + table.delete_item(**basic_delete_item_request_dict(item)) diff --git a/DynamoDbEncryption/runtimes/python/test/integ/legacy/utils.py b/DynamoDbEncryption/runtimes/python/test/integ/legacy/utils.py new file mode 100644 index 000000000..8e12cc935 --- /dev/null +++ b/DynamoDbEncryption/runtimes/python/test/integ/legacy/utils.py @@ -0,0 +1,158 @@ +# Copyright Amazon.com Inc. or its affiliates. All Rights Reserved. +# SPDX-License-Identifier: Apache-2.0 +import uuid + +import boto3 +from dynamodb_encryption_sdk.encrypted.client import EncryptedClient as LegacyEncryptedClient +from dynamodb_encryption_sdk.encrypted.resource import EncryptedResource as LegacyEncryptedResource +from dynamodb_encryption_sdk.encrypted.table import EncryptedTable as LegacyEncryptedTable +from dynamodb_encryption_sdk.identifiers import CryptoAction +from dynamodb_encryption_sdk.material_providers.aws_kms import AwsKmsCryptographicMaterialsProvider +from dynamodb_encryption_sdk.structures import AttributeActions + +from aws_dbesdk_dynamodb.encrypted.client import EncryptedClient +from aws_dbesdk_dynamodb.encrypted.resource import EncryptedResource +from aws_dbesdk_dynamodb.encrypted.table import EncryptedTable +from aws_dbesdk_dynamodb.structures.dynamodb import ( + DynamoDbTableEncryptionConfig, + DynamoDbTablesEncryptionConfig, + LegacyOverride, +) + +from ...constants import ( + INTEG_TEST_DEFAULT_ALGORITHM_SUITE_ID, + INTEG_TEST_DEFAULT_ATTRIBUTE_ACTIONS_ON_ENCRYPT, + INTEG_TEST_DEFAULT_DYNAMODB_TABLE_NAME, + INTEG_TEST_DEFAULT_KEYRING, + INTEG_TEST_DEFAULT_KMS_KEY_ID, + INTEG_TEST_DEFAULT_UNSIGNED_ATTRIBUTE_PREFIX, +) + + +def generate_unique_suffix(): + """Generate a unique suffix for test items.""" + return "-" + str(uuid.uuid4()) + + +# Legacy Attribute Actions +def legacy_actions(): + return AttributeActions( + default_action=CryptoAction.ENCRYPT_AND_SIGN, + attribute_actions={ + "partition_key": CryptoAction.SIGN_ONLY, + "sort_key": CryptoAction.SIGN_ONLY, + "attribute1": CryptoAction.ENCRYPT_AND_SIGN, + "attribute2": CryptoAction.SIGN_ONLY, + ":attribute3": CryptoAction.DO_NOTHING, + }, + ) + + +# Legacy interface creation functions + + +def create_legacy_encrypted_client(attribute_actions=None, expect_standard_dictionaries=False): + """Create a legacy DynamoDB encrypted client.""" + cmp = AwsKmsCryptographicMaterialsProvider(key_id=INTEG_TEST_DEFAULT_KMS_KEY_ID) + return LegacyEncryptedClient( + client=plaintext_client(expect_standard_dictionaries), + materials_provider=cmp, + attribute_actions=attribute_actions, + expect_standard_dictionaries=expect_standard_dictionaries, + ) + + +def create_legacy_encrypted_table(attribute_actions=None): + """Create a legacy DynamoDB encrypted table.""" + plaintext_table = boto3.resource("dynamodb").Table(INTEG_TEST_DEFAULT_DYNAMODB_TABLE_NAME) + cmp = AwsKmsCryptographicMaterialsProvider(key_id=INTEG_TEST_DEFAULT_KMS_KEY_ID) + return LegacyEncryptedTable( + table=plaintext_table, + materials_provider=cmp, + attribute_actions=attribute_actions, + ) + + +def create_legacy_encrypted_resource(attribute_actions=None): + """Create a legacy DynamoDB encrypted resource.""" + plaintext_resource = boto3.resource("dynamodb") + cmp = AwsKmsCryptographicMaterialsProvider(key_id=INTEG_TEST_DEFAULT_KMS_KEY_ID) + return LegacyEncryptedResource( + resource=plaintext_resource, + materials_provider=cmp, + attribute_actions=attribute_actions, + ) + + +# AWS DBE SDK interface creation functions with legacy override + + +def create_encryption_config(legacy_encryptor, legacy_policy): + """Create a DynamoDbTableEncryptionConfig with optional legacy override.""" + # Configure legacy behavior + legacy_override = LegacyOverride( + encryptor=legacy_encryptor, + attribute_actions_on_encrypt=INTEG_TEST_DEFAULT_ATTRIBUTE_ACTIONS_ON_ENCRYPT, + policy=legacy_policy, + ) + + # Create the table config with legacy override + table_config = DynamoDbTableEncryptionConfig( + logical_table_name=INTEG_TEST_DEFAULT_DYNAMODB_TABLE_NAME, + partition_key_name="partition_key", + sort_key_name="sort_key", + attribute_actions_on_encrypt=INTEG_TEST_DEFAULT_ATTRIBUTE_ACTIONS_ON_ENCRYPT, + keyring=INTEG_TEST_DEFAULT_KEYRING, + legacy_override=legacy_override, + allowed_unsigned_attribute_prefix=INTEG_TEST_DEFAULT_UNSIGNED_ATTRIBUTE_PREFIX, + algorithm_suite_id=INTEG_TEST_DEFAULT_ALGORITHM_SUITE_ID, + ) + + # Create the tables config + table_configs = {INTEG_TEST_DEFAULT_DYNAMODB_TABLE_NAME: table_config} + return DynamoDbTablesEncryptionConfig(table_encryption_configs=table_configs) + + +def plaintext_client(expect_standard_dictionaries): + if expect_standard_dictionaries: + client = boto3.resource("dynamodb").Table(INTEG_TEST_DEFAULT_DYNAMODB_TABLE_NAME).meta.client + else: + client = boto3.client("dynamodb") + return client + + +def encrypted_client_with_legacy_override( + legacy_encryptor=None, legacy_policy=None, expect_standard_dictionaries=False +): + """Create an AWS Database Encryption SDK client with optional legacy override.""" + tables_config = create_encryption_config(legacy_encryptor=legacy_encryptor, legacy_policy=legacy_policy) + + # Create the EncryptedClient + return EncryptedClient( + client=plaintext_client(expect_standard_dictionaries), + encryption_config=tables_config, + expect_standard_dictionaries=expect_standard_dictionaries, + ) + + +def encrypted_table_with_legacy_override(legacy_encryptor=None, legacy_policy=None): + """Create an AWS Database Encryption SDK table from a client.""" + tables_config = create_encryption_config(legacy_encryptor=legacy_encryptor, legacy_policy=legacy_policy) + + # Create the EncryptedTable + table = boto3.resource("dynamodb").Table(INTEG_TEST_DEFAULT_DYNAMODB_TABLE_NAME) + return EncryptedTable( + table=table, + encryption_config=tables_config, + ) + + +def encrypted_resource_with_legacy_override(legacy_encryptor=None, legacy_policy=None): + """Create an AWS Database Encryption SDK resource with optional legacy override.""" + tables_config = create_encryption_config(legacy_encryptor=legacy_encryptor, legacy_policy=legacy_policy) + + # Create the EncryptedResource + return EncryptedResource( + resource=boto3.resource("dynamodb"), + encryption_config=tables_config, + ) diff --git a/DynamoDbEncryption/runtimes/python/test/internaldafny/__init__.py b/DynamoDbEncryption/runtimes/python/test/internaldafny/__init__.py new file mode 100644 index 000000000..f94fd12a2 --- /dev/null +++ b/DynamoDbEncryption/runtimes/python/test/internaldafny/__init__.py @@ -0,0 +1,2 @@ +# Copyright Amazon.com Inc. or its affiliates. All Rights Reserved. +# SPDX-License-Identifier: Apache-2.0 diff --git a/DynamoDbEncryption/runtimes/python/test/internaldafny/test_dafny_wrapper.py b/DynamoDbEncryption/runtimes/python/test/internaldafny/test_dafny_wrapper.py new file mode 100644 index 000000000..3f98d441c --- /dev/null +++ b/DynamoDbEncryption/runtimes/python/test/internaldafny/test_dafny_wrapper.py @@ -0,0 +1,20 @@ +# Copyright Amazon.com Inc. or its affiliates. All Rights Reserved. +# SPDX-License-Identifier: Apache-2.0 +""" +Wrapper file for executing Dafny tests from pytest. +This allows us to import modules required by Dafny-generated tests +before executing Dafny-generated tests. +pytest will find and execute the `test_dafny` method below, +which will execute the `internaldafny_test_executor.py` file in the `dafny` directory. +""" + +import sys + +internaldafny_dir = "/".join(__file__.split("/")[:-1]) + +sys.path.append(internaldafny_dir + "/extern") +sys.path.append(internaldafny_dir + "/generated") + + +def test_dafny(): + from .generated import __main__ diff --git a/DynamoDbEncryption/runtimes/python/test/items.py b/DynamoDbEncryption/runtimes/python/test/items.py new file mode 100644 index 000000000..2383585d6 --- /dev/null +++ b/DynamoDbEncryption/runtimes/python/test/items.py @@ -0,0 +1,63 @@ +# Copyright Amazon.com Inc. or its affiliates. All Rights Reserved. +# SPDX-License-Identifier: Apache-2.0 +from decimal import Decimal + +simple_item_ddb = { + "partition_key": {"S": "test-key"}, + "sort_key": {"N": "1"}, + "attribute1": {"S": "encrypted value"}, + "attribute2": {"S": "signed value"}, + ":attribute3": {"S": "unsigned value"}, +} + +simple_key_ddb = {"partition_key": simple_item_ddb["partition_key"], "sort_key": simple_item_ddb["sort_key"]} + +simple_item_dict = { + "partition_key": "test-key", + "sort_key": 1, + "attribute1": "encrypted value", + "attribute2": "signed value", + ":attribute3": "unsigned value", +} + +simple_key_dict = {"partition_key": simple_item_dict["partition_key"], "sort_key": simple_item_dict["sort_key"]} + +complex_item_ddb = { + "partition_key": {"S": "all-types-test"}, + "sort_key": {"N": "1"}, + "attribute1": { + "M": { + "string": {"S": "string value"}, + "number": {"N": "123.45"}, + "binary": {"B": b"binary data"}, + "string_set": {"SS": ["value1", "value2"]}, + "number_set": {"NS": ["1", "2", "3"]}, + "binary_set": {"BS": [b"binary1", b"binary2"]}, + "list": {"L": [{"S": "list item 1"}, {"N": "42"}, {"B": b"list binary"}]}, + "map": {"M": {"nested_string": {"S": "nested value"}, "nested_number": {"N": "42"}}}, + } + }, + "attribute2": {"S": "signed value"}, + ":attribute3": {"S": "unsigned value"}, +} + +complex_key_ddb = {"partition_key": complex_item_ddb["partition_key"], "sort_key": complex_item_ddb["sort_key"]} + +complex_item_dict = { + "partition_key": "all-types-test", + "sort_key": 1, + "attribute1": { + "string": "string value", + "number": Decimal("123.45"), + "binary": b"binary data", + "string_set": {"value1", "value2"}, + "number_set": {Decimal("1"), 2, Decimal("3")}, + "binary_set": {b"binary1", b"binary2"}, + "list": ["list item 1", 42, b"list binary"], + "map": {"nested_string": "nested value", "nested_number": 42}, + }, + "attribute2": "signed value", + ":attribute3": "unsigned value", +} + +complex_key_dict = {"partition_key": complex_item_dict["partition_key"], "sort_key": complex_item_dict["sort_key"]} diff --git a/DynamoDbEncryption/runtimes/python/test/requests.py b/DynamoDbEncryption/runtimes/python/test/requests.py new file mode 100644 index 000000000..900af6cf4 --- /dev/null +++ b/DynamoDbEncryption/runtimes/python/test/requests.py @@ -0,0 +1,585 @@ +# Copyright Amazon.com Inc. or its affiliates. All Rights Reserved. +# SPDX-License-Identifier: Apache-2.0 +"""Request constants for DynamoDB operations used for testing.""" + +from boto3.dynamodb.conditions import Attr, Key + +from .constants import ( + INTEG_TEST_DEFAULT_DYNAMODB_TABLE_NAME, + INTEG_TEST_DEFAULT_DYNAMODB_TABLE_NAME_PLAINTEXT, +) + +# Base request structures that are shared between DDB and dict formats +# Use ConsistentRead: True for all requests; +# many of these are used in integ tests, where consistent reads reduce test flakiness. + + +def base_put_item_request(item): + """Base structure for put_item requests.""" + return {"Item": item} + + +def base_get_item_request(item): + """Base structure for get_item requests.""" + return {"Key": {"partition_key": item["partition_key"], "sort_key": item["sort_key"]}, "ConsistentRead": True} + + +def base_delete_item_request(item): + """Base structure for delete_item requests.""" + return {"Key": {"partition_key": item["partition_key"], "sort_key": item["sort_key"]}} + + +def base_query_request(item): + """Base structure for query requests.""" + return { + "KeyConditionExpression": "partition_key = :pk", + "ExpressionAttributeValues": {":pk": item["partition_key"]}, + "ConsistentRead": True, + } + + +def base_scan_request(item): + """Base structure for scan requests.""" + return { + "FilterExpression": "attribute2 = :a2", + "ExpressionAttributeValues": {":a2": item["attribute2"]}, + "ConsistentRead": True, + } + + +def base_batch_write_item_request(actions_with_items): + """Base structure for batch_write_item requests.""" + return {"RequestItems": {INTEG_TEST_DEFAULT_DYNAMODB_TABLE_NAME: actions_with_items}} + + +def base_batch_get_item_request(keys): + """Base structure for batch_get_item requests.""" + return {"RequestItems": {INTEG_TEST_DEFAULT_DYNAMODB_TABLE_NAME: {"Keys": keys, "ConsistentRead": True}}} + + +def base_transact_write_item_request(actions_with_items): + """Base structure for transact_write_item requests.""" + return {"TransactItems": actions_with_items} + + +def base_transact_get_item_request(keys): + """Base structure for transact_get_item requests.""" + return { + "TransactItems": [{"Get": {"TableName": INTEG_TEST_DEFAULT_DYNAMODB_TABLE_NAME, "Key": key}} for key in keys] + } + + +def base_update_item_request_signed_attribute(item): + """Base structure for update_item requests.""" + return { + "Key": {"partition_key": item["partition_key"], "sort_key": item["sort_key"]}, + "UpdateExpression": "SET attribute1 = :val", + "ExpressionAttributeValues": {":val": item["attribute1"]}, + } + + +def base_update_item_request_unsigned_attribute(item): + """Base structure for update_item requests.""" + return { + "Key": {"partition_key": item["partition_key"], "sort_key": item["sort_key"]}, + "UpdateExpression": "SET #attr3 = :val", + "ExpressionAttributeValues": {":val": item[":attribute3"]}, + "ExpressionAttributeNames": {"#attr3": ":attribute3"}, + } + + +def basic_execute_statement_request_encrypted_table(item): + """Base structure for execute_statement requests for an encrypted table.""" + return { + "Statement": f"""SELECT * FROM {INTEG_TEST_DEFAULT_DYNAMODB_TABLE_NAME} + WHERE partition_key=? AND sort_key=?""", + "Parameters": [item["partition_key"], item["sort_key"]], + } + + +def basic_execute_statement_request_plaintext_table(item): + """Base structure for execute_statement requests for a plaintext table.""" + return { + "Statement": f"""SELECT * FROM {INTEG_TEST_DEFAULT_DYNAMODB_TABLE_NAME_PLAINTEXT} + WHERE partition_key=? AND sort_key=?""", + "Parameters": [item["partition_key"], item["sort_key"]], + } + + +def basic_execute_transaction_request_encrypted_table(item): + """Base structure for execute_transaction requests for an encrypted table.""" + return { + "TransactStatements": [ + { + "Statement": f"""SELECT * FROM {INTEG_TEST_DEFAULT_DYNAMODB_TABLE_NAME} + WHERE partition_key=? AND sort_key=?""", + "Parameters": [item["partition_key"], item["sort_key"]], + } + ] + } + + +def basic_execute_transaction_request_plaintext_table(item): + """Base structure for execute_transaction requests for a plaintext table.""" + return { + "TransactStatements": [ + { + "Statement": f"""SELECT * FROM {INTEG_TEST_DEFAULT_DYNAMODB_TABLE_NAME_PLAINTEXT} + WHERE partition_key=? AND sort_key=?""", + "Parameters": [item["partition_key"], item["sort_key"]], + } + ] + } + + +def basic_batch_execute_statement_request_encrypted_table(): + """Base structure for batch_execute_statement requests.""" + return {"Statements": [{"Statement": "SELECT * FROM " + INTEG_TEST_DEFAULT_DYNAMODB_TABLE_NAME}]} + + +def basic_batch_execute_statement_request_plaintext_table(): + """Base structure for batch_execute_statement requests for a plaintext table.""" + return {"Statements": [{"Statement": "SELECT * FROM " + INTEG_TEST_DEFAULT_DYNAMODB_TABLE_NAME_PLAINTEXT}]} + + +# Base exhaustive request structures that are shared between DDB and dict formats + +# No exhaustive requests are intended to be able to be used as real requests. +# Some parameters conflict with each other when sent to DynamoDB. +# These are only intended to test the conversion of the structure between client and resource formats. + + +def base_exhaustive_put_item_request(item): + """ + Base structure for exhaustive put_item requests. + This is not intended to be able to be used as a real request. + Some parameters conflict with each other when sent to DynamoDB. + This is only intended to test the conversion of the request between client and resource formats. + """ + return { + # Expected is legacy, but still in the boto3 docs. + "Expected": { + "partition_key": { + "Value": item["partition_key"], + }, + "sort_key": {"AttributeValueList": [item["sort_key"]], "ComparisonOperator": "EQ"}, + }, + # "ExpressionAttributeNames": {"#pk": "partition_key", "#sk": "sort_key"}, + # "ExpressionAttributeValues": {":pk": item["partition_key"], ":sk": item["sort_key"]}, + "ReturnConsumedCapacity": "TOTAL", + "ReturnItemCollectionMetrics": "SIZE", + "ReturnValues": "ALL_OLD", + "ReturnValuesOnConditionCheckFailure": "ALL_OLD", + } + + +def base_exhaustive_get_item_request(item): + """ + Base structure for exhaustive get_item requests. + This is not intended to be able to be used as a real request. + Some parameters conflict with each other when sent to DynamoDB. + This is only intended to test the conversion of the request between client and resource formats. + """ + return { + "ReturnConsumedCapacity": "TOTAL", + "ReturnItemCollectionMetrics": "SIZE", + "ProjectionExpression": "partition_key, sort_key, attribute1, attribute2", + "ExpressionAttributeNames": { + "#pk": "partition_key", + "#sk": "sort_key", + "#a1": "attribute1", + "#a2": "attribute2", + }, + "ConsistentRead": True, + "AttributesToGet": ["partition_key", "sort_key", "attribute1", "attribute2"], + } + + +def base_exhaustive_delete_item_request(item): + """ + Base structure for exhaustive delete_item requests. + This is not intended to be able to be used as a real request. + Some parameters conflict with each other when sent to DynamoDB. + This is only intended to test the conversion of the request between client and resource formats. + """ + return { + "ReturnConsumedCapacity": "TOTAL", + "ReturnItemCollectionMetrics": "SIZE", + "ReturnValues": "ALL_OLD", + "ReturnValuesOnConditionCheckFailure": "ALL_OLD", + } + + +def base_exhaustive_query_request(item): + """ + Base structure for exhaustive query requests. + This is not intended to be able to be used as a real request. + Some parameters conflict with each other when sent to DynamoDB. + This is only intended to test the conversion of the request between client and resource formats. + """ + return { + "IndexName": "index_name", + "Select": "SPECIFIC_ATTRIBUTES", + "AttributesToGet": ["partition_key", "sort_key", "attribute1", "attribute2"], + "KeyConditions": {"partition_key": {"AttributeValueList": [item["partition_key"]], "ComparisonOperator": "EQ"}}, + "QueryFilter": {"attribute1": {"AttributeValueList": [item["attribute1"]], "ComparisonOperator": "EQ"}}, + "ConditionalOperator": "AND", + "ScanIndexForward": True, + "ExclusiveStartKey": {"partition_key": item["partition_key"], "sort_key": item["sort_key"]}, + "ReturnConsumedCapacity": "TOTAL", + "ProjectionExpression": "partition_key, sort_key, attribute1, attribute2", + "FilterExpression": "attribute1 = :a1", + "ExpressionAttributeNames": { + "#pk": "partition_key", + "#sk": "sort_key", + "#a1": "attribute1", + "#a2": "attribute2", + }, + "ExpressionAttributeValues": {":pk": item["partition_key"], ":a1": item["attribute1"]}, + } + + +def base_exhaustive_scan_request(item): + """ + Base structure for exhaustive scan requests. + This is not intended to be able to be used as a real request. + Some parameters conflict with each other when sent to DynamoDB. + This is only intended to test the conversion of the request between client and resource formats. + """ + return { + "IndexName": "index_name", + "AttributesToGet": ["partition_key", "sort_key", "attribute1", "attribute2"], + "Select": "SPECIFIC_ATTRIBUTES", + "ScanFilter": {"attribute1": {"AttributeValueList": [item["attribute1"]], "ComparisonOperator": "EQ"}}, + "ConditionalOperator": "AND", + "ReturnConsumedCapacity": "TOTAL", + "ReturnItemCollectionMetrics": "SIZE", + "ExpressionAttributeNames": {"#a1": "attribute1"}, + "ExpressionAttributeValues": {":a1": item["attribute1"]}, + "ExclusiveStartKey": {"partition_key": item["partition_key"], "sort_key": item["sort_key"]}, + } + + +# No exhaustive requests for: +# - transact_write_items +# - transact_get_items +# - batch_write_item +# - batch_get_item +# - batch_execute_statement +# - execute_statement +# - execute_transaction +# The base requests sufficiently test the conversion of the request between client and resource formats +# for items. + +# DDB format request functions + + +def basic_put_item_request_ddb(item): + """Get a put_item request in DDB format for any item.""" + base = base_put_item_request(item) + return {"TableName": INTEG_TEST_DEFAULT_DYNAMODB_TABLE_NAME, **base} + + +def exhaustive_put_item_request_ddb(item): + """Get a put_item request in DDB format for any item.""" + base = basic_put_item_request_ddb(item) + additional_keys = base_exhaustive_put_item_request(item) + additional_keys["ConditionExpression"] = "attribute_not_exists(#pk) AND attribute_not_exists(#sk)" + return {**base, **additional_keys} + + +def basic_get_item_request_ddb(item): + """Get a get_item request in DDB format for any item.""" + base = base_get_item_request(item) + return {"TableName": INTEG_TEST_DEFAULT_DYNAMODB_TABLE_NAME, **base} + + +def exhaustive_get_item_request_ddb(item): + """Get a get_item request in DDB format for any item.""" + base = basic_get_item_request_ddb(item) + additional_keys = base_exhaustive_get_item_request(item) + return {**base, **additional_keys} + + +def basic_delete_item_request_ddb(item): + """Get a delete_item request in DDB format for any item.""" + base = base_delete_item_request(item) + return {"TableName": INTEG_TEST_DEFAULT_DYNAMODB_TABLE_NAME, **base} + + +def exhaustive_delete_item_request_ddb(item): + """Get a delete_item request in DDB format for any item.""" + base = basic_delete_item_request_ddb(item) + additional_keys = base_exhaustive_delete_item_request(item) + return {**base, **additional_keys} + + +def basic_query_request_ddb(item): + """Get a query request in DDB format for any item.""" + base = base_query_request(item) + return {"TableName": INTEG_TEST_DEFAULT_DYNAMODB_TABLE_NAME, **base} + + +def exhaustive_query_request_ddb(item): + """ + Query request with all possible parameters. + This is not intended to be able to be used as a real request. + Some parameters conflict with each other when sent to DynamoDB. + This is only intended to test the conversion of the request between client and resource formats. + """ + base = basic_query_request_ddb(item) + additional_keys = base_exhaustive_query_request(item) + return {**base, **additional_keys} + + +def basic_scan_request_ddb(item): + """Get a scan request in DDB format for any item.""" + base = base_scan_request(item) + return {"TableName": INTEG_TEST_DEFAULT_DYNAMODB_TABLE_NAME, **base} + + +def exhaustive_scan_request_ddb(item): + """Get a scan request in DDB format for any item.""" + base = basic_scan_request_ddb(item) + additional_keys = base_exhaustive_scan_request(item) + return {**base, **additional_keys} + + +def basic_batch_write_item_request_ddb(actions_with_items): + """Get a batch_write_item request in DDB format for any items.""" + return base_batch_write_item_request(actions_with_items) + + +def basic_batch_write_item_put_request_ddb(items): + """Get a batch_write_item put request in DDB format for any items.""" + actions_with_items = [{"PutRequest": {"Item": item}} for item in items] + return basic_batch_write_item_request_ddb(actions_with_items) + + +def basic_batch_write_item_delete_request_ddb(keys): + """Get a batch_write_item delete request in DDB format for any keys.""" + actions_with_keys = [{"DeleteRequest": {"Key": key}} for key in keys] + return basic_batch_write_item_request_ddb(actions_with_keys) + + +def basic_batch_get_item_request_ddb(keys): + """Get a batch_get_item request in DDB format for any keys.""" + return base_batch_get_item_request(keys) + + +def basic_transact_write_item_request_ddb(actions_with_items): + """Get a transact_write_item request in DDB format for any items.""" + return base_transact_write_item_request(actions_with_items) + + +def basic_transact_write_item_put_request_ddb(items): + """Get a transact_write_item put request in DDB format for any items.""" + actions_with_items = [ + {"Put": {"TableName": INTEG_TEST_DEFAULT_DYNAMODB_TABLE_NAME, "Item": item}} for item in items + ] + return basic_transact_write_item_request_ddb(actions_with_items) + + +def basic_transact_write_item_delete_request_ddb(keys): + """Get a transact_write_item delete request in DDB format for any keys.""" + actions_with_keys = [{"Delete": {"TableName": INTEG_TEST_DEFAULT_DYNAMODB_TABLE_NAME, "Key": key}} for key in keys] + return basic_transact_write_item_request_ddb(actions_with_keys) + + +def basic_transact_write_item_condition_check_request_ddb(keys): + """Get a transact_write_item condition check request in DDB format for any keys.""" + actions_with_keys = [ + {"ConditionCheck": {"TableName": INTEG_TEST_DEFAULT_DYNAMODB_TABLE_NAME, "Key": key}} for key in keys + ] + return basic_transact_write_item_request_ddb(actions_with_keys) + + +def basic_transact_get_item_request_ddb(keys): + """Get a transact_get_item request in DDB format for any keys.""" + return base_transact_get_item_request(keys) + + +def basic_query_paginator_request(key): + """Get a query paginator request in DDB format for any item.""" + return { + "TableName": INTEG_TEST_DEFAULT_DYNAMODB_TABLE_NAME, + "KeyConditionExpression": "partition_key = :pk AND sort_key = :sk", + "ExpressionAttributeValues": {":pk": key["partition_key"], ":sk": key["sort_key"]}, + "ConsistentRead": True, + } + + +def basic_scan_paginator_request(item): + """Get a scan paginator request in DDB format for any item.""" + return { + "TableName": INTEG_TEST_DEFAULT_DYNAMODB_TABLE_NAME, + "FilterExpression": "partition_key = :pk AND sort_key = :sk", + "ExpressionAttributeValues": {":pk": item["partition_key"], ":sk": item["sort_key"]}, + "ConsistentRead": True, + } + + +def basic_update_item_request_ddb_signed_attribute(item): + """Get an update_item request in DDB format for any item.""" + base = base_update_item_request_signed_attribute(item) + return {"TableName": INTEG_TEST_DEFAULT_DYNAMODB_TABLE_NAME, **base} + + +def basic_update_item_request_ddb_unsigned_attribute(item): + """Get an update_item request in DDB format for any item.""" + base = base_update_item_request_unsigned_attribute(item) + return {"TableName": INTEG_TEST_DEFAULT_DYNAMODB_TABLE_NAME, **base} + + +# Dict format request functions + + +def basic_put_item_request_dict(item): + """Get a put_item request in dict format for any item.""" + return base_put_item_request(item) + + +def exhaustive_put_item_request_dict(item): + """ + Get a put_item request in dict format for any item. + This is not intended to be able to be used as a real request. + Some parameters conflict with each other when sent to DynamoDB. + This is only intended to test the conversion of the request between client and resource formats. + """ + base = basic_put_item_request_dict(item) + # Replace the default ConditionExpression string with a ConditionExpression object + # to increase test coverage. + additional_keys = base_exhaustive_put_item_request(item) + additional_keys["ConditionExpression"] = Attr("#pk").not_exists() & Attr("#sk").not_exists() + return {**base, **additional_keys} + + +def basic_get_item_request_dict(item): + """Get a get_item request in dict format for any item.""" + return base_get_item_request(item) + + +def basic_delete_item_request_dict(item): + """Get a delete_item request in dict format for any item.""" + return base_delete_item_request(item) + + +def exhaustive_get_item_request_dict(item): + """ + Get a get_item request in dict format for any item. + This is not intended to be able to be used as a real request. + Some parameters conflict with each other when sent to DynamoDB. + This is only intended to test the conversion of the request between client and resource formats. + """ + base = basic_get_item_request_dict(item) + additional_keys = base_exhaustive_get_item_request(item) + return {**base, **additional_keys} + + +def basic_query_request_dict(item): + """Get a query request in dict format for any item.""" + base = base_query_request(item) + return base + + +def basic_query_request_dict_condition_expression(item): + """Get a query request in dict format for any item.""" + base = base_query_request(item) + # Replace the default KeyConditionExpression string with a ConditionExpression object + # to increase test coverage. + return {"KeyConditionExpression": Key("partition_key").eq(item["partition_key"]), **base} + + +def exhaustive_query_request_dict(item): + """ + Get a query request in dict format for any item. + This is not intended to be able to be used as a real request. + Some parameters conflict with each other when sent to DynamoDB. + This is only intended to test the conversion of the request between client and resource formats. + """ + base = basic_query_request_dict(item) + additional_keys = base_exhaustive_query_request(item) + return {**base, **additional_keys} + + +def basic_scan_request_dict(item): + """Get a scan request in dict format for any item.""" + return base_scan_request(item) + + +def exhaustive_scan_request_dict(item): + """ + Get a scan request in dict format for any item. + This is not intended to be able to be used as a real request. + Some parameters conflict with each other when sent to DynamoDB. + This is only intended to test the conversion of the request between client and resource formats. + """ + base = basic_scan_request_dict(item) + additional_keys = base_exhaustive_scan_request(item) + return {**base, **additional_keys} + + +def basic_batch_write_item_request_dict(actions_with_items): + """Get a batch_write_item request in dict format for any items.""" + return base_batch_write_item_request(actions_with_items) + + +def basic_batch_write_item_put_request_dict(items): + """Get a batch_put_item request in dict format for any items.""" + actions_with_items = [{"PutRequest": {"Item": item}} for item in items] + return basic_batch_write_item_request_dict(actions_with_items) + + +def basic_batch_write_item_delete_request_dict(keys): + """Get a batch_write_item delete request in dict format for any keys.""" + actions_with_keys = [{"DeleteRequest": {"Key": key}} for key in keys] + return basic_batch_write_item_request_dict(actions_with_keys) + + +def basic_batch_get_item_request_dict(keys): + """Get a batch_get_item request in dict format for any keys.""" + return base_batch_get_item_request(keys) + + +def basic_transact_write_item_request_dict(actions_with_items): + """Get a transact_write_item request in dict format for any items.""" + return base_transact_write_item_request(actions_with_items) + + +def basic_transact_write_item_put_request_dict(items): + """Get a transact_write_item put request in dict format for any items.""" + actions_with_items = [ + {"Put": {"TableName": INTEG_TEST_DEFAULT_DYNAMODB_TABLE_NAME, "Item": item}} for item in items + ] + return basic_transact_write_item_request_dict(actions_with_items) + + +def basic_transact_write_item_delete_request_dict(keys): + """Get a transact_write_item delete request in dict format for any keys.""" + actions_with_keys = [{"Delete": {"TableName": INTEG_TEST_DEFAULT_DYNAMODB_TABLE_NAME, "Key": key}} for key in keys] + return basic_transact_write_item_request_dict(actions_with_keys) + + +def basic_transact_write_item_condition_check_request_dict(keys): + """Get a transact_write_item condition check request in dict format for any keys.""" + actions_with_keys = [ + {"ConditionCheck": {"TableName": INTEG_TEST_DEFAULT_DYNAMODB_TABLE_NAME, "Key": key}} for key in keys + ] + return basic_transact_write_item_request_dict(actions_with_keys) + + +def basic_transact_get_item_request_dict(keys): + """Get a transact_get_item request in dict format for any keys.""" + return base_transact_get_item_request(keys) + + +def basic_update_item_request_dict_signed_attribute(item): + """Get an update_item request in dict format for any item.""" + base = base_update_item_request_signed_attribute(item) + return base + + +def basic_update_item_request_dict_unsigned_attribute(item): + """Get an update_item request in dict format for any item.""" + base = base_update_item_request_unsigned_attribute(item) + return base diff --git a/DynamoDbEncryption/runtimes/python/test/responses.py b/DynamoDbEncryption/runtimes/python/test/responses.py new file mode 100644 index 000000000..b765a0c09 --- /dev/null +++ b/DynamoDbEncryption/runtimes/python/test/responses.py @@ -0,0 +1,262 @@ +# Copyright Amazon.com Inc. or its affiliates. All Rights Reserved. +# SPDX-License-Identifier: Apache-2.0 +from test.integ.encrypted.test_resource import INTEG_TEST_DEFAULT_DYNAMODB_TABLE_NAME + + +def basic_put_item_response(item): + """Get a put_item response in resource (ddb) format for any item.""" + return {"Attributes": item} + + +def exhaustive_put_item_response(item): + """ + Get a put_item response in resource (ddb) format for any item. + This is not intended to be a real response that DynamoDB would return, + but the response should contain additional attributes that DynamoDB could return. + This is only intended to exhaustively test the conversion of the request between client and resource formats. + """ + base = basic_put_item_response(item) + additional_keys = { + "ConsumedCapacity": {"CapacityUnits": 1, "TableName": INTEG_TEST_DEFAULT_DYNAMODB_TABLE_NAME}, + "ItemCollectionMetrics": { + "TableName": INTEG_TEST_DEFAULT_DYNAMODB_TABLE_NAME, + "ItemCollectionKey": {"partition_key": item["partition_key"]}, + }, + "SequenceNumber": "1234567890", + "SizeEstimateRangeGB": [0.5, 1.0], + } + return {**base, **additional_keys} + + +def basic_get_item_response(item): + """Get a get_item response in resource (ddb) format for any item.""" + return {"Item": item} + + +def exhaustive_get_item_response(item): + """ + Get a get_item response in resource (ddb) format for any item. + This is not intended to be a real response that DynamoDB would return, + but the response should contain additional attributes that DynamoDB could return. + This is only intended to exhaustively test the conversion of the request between client and resource formats. + """ + base = basic_get_item_response(item) + additional_keys = { + "ConsumedCapacity": {"CapacityUnits": 1, "TableName": INTEG_TEST_DEFAULT_DYNAMODB_TABLE_NAME}, + } + return {**base, **additional_keys} + + +def basic_query_response(items): + """Get a query response in resource (ddb) format for any items.""" + return { + "Items": items, + "Count": len(items), + "ScannedCount": len(items), + "ConsumedCapacity": {"CapacityUnits": 1, "TableName": INTEG_TEST_DEFAULT_DYNAMODB_TABLE_NAME}, + } + + +def exhaustive_query_response(items): + """ + Get a query response in resource (ddb) format for any items. + This is not intended to be a real response that DynamoDB would return, + but the response should contain additional attributes that DynamoDB could return. + This is only intended to exhaustively test the conversion of the request between client and resource formats. + """ + base = basic_query_response(items) + additional_keys = { + "LastEvaluatedKey": {"partition_key": items[-1]["partition_key"]}, + } + return {**base, **additional_keys} + + +def basic_scan_response(items, keys): + """Get a scan response in resource (ddb) format for any items.""" + return { + "Items": items, + } + + +def exhaustive_scan_response(items, keys): + """ + Get a scan response in resource (ddb) format for any items. + This is not intended to be a real response that DynamoDB would return, + but the response should contain additional attributes that DynamoDB could return. + This is only intended to exhaustively test the conversion of the request between client and resource formats. + """ + base = basic_scan_response(items, keys) + additional_keys = { + "ConsumedCapacity": {"CapacityUnits": 1, "TableName": INTEG_TEST_DEFAULT_DYNAMODB_TABLE_NAME}, + "Count": len(items), + "ScannedCount": len(items), + "LastEvaluatedKey": keys[-1], + } + return {**base, **additional_keys} + + +def basic_batch_get_item_response(items): + """Get a batch_get_item response in resource (ddb) format for any items.""" + return {"Responses": {INTEG_TEST_DEFAULT_DYNAMODB_TABLE_NAME: items}} + + +def exhaustive_batch_get_item_response(items): + """ + Get a batch_get_item response in resource (ddb) format for any items. + This is not intended to be a real response that DynamoDB would return, + but the response should contain additional attributes that DynamoDB could return. + This is only intended to exhaustively test the conversion of the request between client and resource formats. + """ + base = basic_batch_get_item_response(items) + additional_keys = { + "UnprocessedKeys": { + INTEG_TEST_DEFAULT_DYNAMODB_TABLE_NAME: { + "Keys": [{"partition_key": item["partition_key"]} for item in items] + } + }, + } + return {**base, **additional_keys} + + +def basic_batch_write_item_put_response(items): + """Get a batch_write_item response in resource (ddb) format for any items.""" + return { + "UnprocessedItems": {INTEG_TEST_DEFAULT_DYNAMODB_TABLE_NAME: [{"PutRequest": {"Item": item}} for item in items]} + } + + +def exhaustive_batch_write_item_put_response(items): + """ + Get a batch_write_item response in resource (ddb) format for any items. + This is not intended to be a real response that DynamoDB would return, + but the response should contain additional attributes that DynamoDB could return. + This is only intended to exhaustively test the conversion of the request between client and resource formats. + """ + base = basic_batch_write_item_put_response(items) + additional_keys = { + "ItemCollectionMetrics": { + INTEG_TEST_DEFAULT_DYNAMODB_TABLE_NAME: [ + {"ItemCollectionKey": {"partition_key": items[-1]["partition_key"]}} + ] + }, + } + return {**base, **additional_keys} + + +def basic_transact_write_items_response(items): + """Get a transact_write_items response in resource (ddb) format for any items.""" + return { + "ItemCollectionMetrics": { + INTEG_TEST_DEFAULT_DYNAMODB_TABLE_NAME: [ + {"ItemCollectionKey": {"partition_key": items[-1]["partition_key"]}} + ] + }, + } + + +# No exhaustive response for transact_write_items; +# The basic_transact_write_items_response is sufficient + + +def basic_transact_get_items_response(items): + """Get a transact_get_items response in resource (ddb) format for any items.""" + return {"Responses": [{"Item": item} for item in items]} + + +# No exhaustive response for transact_get_items; +# The basic_transact_get_items_response is sufficient + + +def basic_update_item_response(item): + """Get an update_item response in resource (ddb) format for any item.""" + return {"Attributes": item} + + +def exhaustive_update_item_response(item): + """ + Get an update_item response in resource (ddb) format for any item. + This is not intended to be a real response that DynamoDB would return, + but the response should contain additional attributes that DynamoDB could return. + This is only intended to exhaustively test the conversion of the request between client and resource formats. + """ + base = basic_update_item_response(item) + additional_keys = { + "ItemCollectionMetrics": { + "ItemCollectionKey": {"partition_key": item["partition_key"]}, + }, + } + return {**base, **additional_keys} + + +def basic_delete_item_response(item): + """Get a delete_item response in resource (ddb) format for any item.""" + return {"Attributes": item} + + +def exhaustive_delete_item_response(item): + """ + Get a delete_item response in resource (ddb) format for any item. + This is not intended to be a real response that DynamoDB would return, + but the response should contain additional attributes that DynamoDB could return. + This is only intended to exhaustively test the conversion of the request between client and resource formats. + """ + base = basic_delete_item_response(item) + additional_keys = { + "ItemCollectionMetrics": { + "ItemCollectionKey": {"partition_key": item["partition_key"]}, + }, + } + return {**base, **additional_keys} + + +def basic_execute_statement_response(items): + """Get an execute_statement response in resource (ddb) format for any items.""" + return {"Items": items} + + +def exhaustive_execute_statement_response(items): + """ + Get an execute_statement response in resource (ddb) format for any items. + This is not intended to be a real response that DynamoDB would return, + but the response should contain additional attributes that DynamoDB could return. + This is only intended to exhaustively test the conversion of the request between client and resource formats. + """ + base = basic_execute_statement_response(items) + additional_keys = { + "LastEvaluatedKey": { + "partition_key": items[-1]["partition_key"], + "sort_key": items[-1]["sort_key"], + }, + } + return {**base, **additional_keys} + + +def basic_execute_transaction_response(items): + """Get an execute_transaction response in resource (ddb) format for any items.""" + return {"Responses": [{"Item": item} for item in items]} + + +# No exhaustive response for execute_transaction; +# The basic_execute_transaction_response is sufficient + + +def basic_batch_execute_statement_response(items): + """Get a batch_execute_statement response in resource (ddb) format for any items.""" + return {"Responses": [{"Item": item} for item in items]} + + +def exhaustive_batch_execute_statement_response(items): + """ + Get a batch_execute_statement response in resource (ddb) format for any items. + This is not intended to be a real response that DynamoDB would return, + but the response should contain additional attributes that DynamoDB could return. + This is only intended to exhaustively test the conversion of the request between client and resource formats. + """ + base = basic_batch_execute_statement_response(items) + base["Responses"][0]["Error"] = { + "Item": { + "partition_key": items[0]["partition_key"], + "sort_key": items[0]["sort_key"], + } + } + return base diff --git a/DynamoDbEncryption/runtimes/python/test/unit/__init__.py b/DynamoDbEncryption/runtimes/python/test/unit/__init__.py new file mode 100644 index 000000000..f94fd12a2 --- /dev/null +++ b/DynamoDbEncryption/runtimes/python/test/unit/__init__.py @@ -0,0 +1,2 @@ +# Copyright Amazon.com Inc. or its affiliates. All Rights Reserved. +# SPDX-License-Identifier: Apache-2.0 diff --git a/DynamoDbEncryption/runtimes/python/test/unit/encrypted/test_client.py b/DynamoDbEncryption/runtimes/python/test/unit/encrypted/test_client.py new file mode 100644 index 000000000..9b2275c5c --- /dev/null +++ b/DynamoDbEncryption/runtimes/python/test/unit/encrypted/test_client.py @@ -0,0 +1,41 @@ +# Copyright Amazon.com Inc. or its affiliates. All Rights Reserved. +# SPDX-License-Identifier: Apache-2.0 +"""High-level helper class to provide an encrypting wrapper for boto3 DynamoDB clients.""" +import pytest +from botocore.client import BaseClient +from mock import MagicMock + +from aws_dbesdk_dynamodb.encrypted.client import ( + EncryptedClient, +) +from aws_dbesdk_dynamodb.smithygenerated.aws_cryptography_dbencryptionsdk_dynamodb.models import ( + DynamoDbTablesEncryptionConfig, +) + +mock_boto3_dynamodb_client = MagicMock(__class__=BaseClient) +mock_tables_encryption_config = MagicMock(__class__=DynamoDbTablesEncryptionConfig) + + +def test_GIVEN_valid_inputs_WHEN_create_EncryptedClient_THEN_success(): + # Given: Valid EncryptedClient inputs + # When: Create EncryptedClient + EncryptedClient( + client=mock_boto3_dynamodb_client, + encryption_config=mock_tables_encryption_config, + ) + # Then: Success + + +def test_GIVEN_invalid_class_attribute_WHEN_getattr_THEN_raise_error(): + # Create a mock with a specific spec that excludes our unknown attribute + mock_boto3_dynamodb_client = MagicMock(spec=["put_item", "get_item", "query", "scan"]) + encrypted_client = EncryptedClient( + client=mock_boto3_dynamodb_client, + encryption_config=mock_tables_encryption_config, + ) + + # Then: AttributeError is raised + with pytest.raises(AttributeError): + # Given: Invalid class attribute: not_a_valid_attribute_on_EncryptedClient_nor_boto3_client + # When: getattr is called + encrypted_client.not_a_valid_attribute_on_EncryptedClient_nor_boto3_client() diff --git a/DynamoDbEncryption/runtimes/python/test/unit/encrypted/test_paginator.py b/DynamoDbEncryption/runtimes/python/test/unit/encrypted/test_paginator.py new file mode 100644 index 000000000..566008bb0 --- /dev/null +++ b/DynamoDbEncryption/runtimes/python/test/unit/encrypted/test_paginator.py @@ -0,0 +1,81 @@ +# Copyright Amazon.com Inc. or its affiliates. All Rights Reserved. +# SPDX-License-Identifier: Apache-2.0 +import pytest +from botocore.client import BaseClient +from botocore.paginate import Paginator +from mock import MagicMock + +from aws_dbesdk_dynamodb.encrypted.client import ( + EncryptedPaginator, +) +from aws_dbesdk_dynamodb.smithygenerated.aws_cryptography_dbencryptionsdk_dynamodb.models import ( + DynamoDbTablesEncryptionConfig, +) +from aws_dbesdk_dynamodb.smithygenerated.aws_cryptography_dbencryptionsdk_dynamodb_transforms.models import ( + QueryInputTransformInput, + QueryInputTransformOutput, +) + +pytestmark = [pytest.mark.unit, pytest.mark.local] + +mock_boto3_dynamodb_client = MagicMock(__class__=BaseClient) +mock_tables_encryption_config = MagicMock(__class__=DynamoDbTablesEncryptionConfig) + + +def test_GIVEN_paginator_not_query_nor_scan_WHEN_paginate_THEN_defers_to_underlying_paginator(): + # Given: A paginator that is not a Query or Scan paginator + # Mock an underlying paginator to spy on its call pattern + underlying_paginator = MagicMock(__class__=Paginator) + underlying_paginator._model.name = "NotQueryNorScan" + non_query_scan_paginator = EncryptedPaginator( + paginator=underlying_paginator, + encryption_config=mock_tables_encryption_config, + ) + # When: Call paginate + for _ in non_query_scan_paginator.paginate(): + pass # Drain the generator + # Then: Call goes to underlying paginator + underlying_paginator.paginate.assert_called_once() + + +def test_GIVEN_kwargs_has_PaginationConfig_WHEN_paginate_THEN_PaginationConfig_is_added_back_to_request(): + # Mock an underlying paginator to spy on its call pattern + mock_underlying_paginator = MagicMock(__class__=Paginator) + mock_underlying_paginator._model.name = "Query" + paginator = EncryptedPaginator( + paginator=mock_underlying_paginator, + encryption_config=mock_tables_encryption_config, + ) + # Mock the input transform method to spy on its arguments + mock_input_transform_method = MagicMock() + mock_input_transform_method.return_value = QueryInputTransformOutput(transformed_input={"TableName": "test-table"}) + paginator._transformer.query_input_transform = mock_input_transform_method + # Given: A kwargs that has a PaginationConfig + kwargs_without_pagination_config = { + "TableName": "test-table", + } + kwargs_with_pagination_config = {**kwargs_without_pagination_config, "PaginationConfig": {"MaxItems": 10}} + # When: Call paginate + for _ in paginator.paginate(**kwargs_with_pagination_config): + pass # Drain the generator + # Then: PaginationConfig is added back to the request sent to the SDK + mock_underlying_paginator.paginate.assert_called_once_with(**kwargs_with_pagination_config) + # And: input_transform_method is called with kwargs without PaginationConfig + mock_input_transform_method.assert_called_once_with( + QueryInputTransformInput(sdk_input=kwargs_without_pagination_config) + ) + + +def test_GIVEN_invalid_class_attribute_WHEN_getattr_THEN_raise_error(): + # Create a mock with a specific spec that excludes our unknown attribute + mock_boto3_dynamodb_client = MagicMock(spec=["put_item", "get_item", "query", "scan"]) + encrypted_paginator = EncryptedPaginator( + paginator=mock_boto3_dynamodb_client, + encryption_config=mock_tables_encryption_config, + ) + + # Then: AttributeError is raised + with pytest.raises(AttributeError): + # Given: Invalid class attribute: not_a_valid_attribute_on_EncryptedPaginator_nor_boto3_paginator + # When: getattr is called + encrypted_paginator.not_a_valid_attribute_on_EncryptedPaginator_nor_boto3_paginator() diff --git a/DynamoDbEncryption/runtimes/python/test/unit/encrypted/test_resource.py b/DynamoDbEncryption/runtimes/python/test/unit/encrypted/test_resource.py new file mode 100644 index 000000000..12293dd54 --- /dev/null +++ b/DynamoDbEncryption/runtimes/python/test/unit/encrypted/test_resource.py @@ -0,0 +1,15 @@ +# Copyright Amazon.com Inc. or its affiliates. All Rights Reserved. +# SPDX-License-Identifier: Apache-2.0 +from mock import MagicMock + +from aws_dbesdk_dynamodb.encrypted.resource import EncryptedResource, EncryptedTablesCollectionManager + + +def test_WHEN_boto3_client_attr_name_THEN_returns_expected_value(): + # Given: EncryptedResource + assert EncryptedResource(resource=MagicMock(), encryption_config=MagicMock())._boto_client_attr_name == "_resource" + # And: EncryptedTablesCollectionManager + assert ( + EncryptedTablesCollectionManager(collection=MagicMock(), encryption_config=MagicMock())._boto_client_attr_name + == "_collection" + ) diff --git a/DynamoDbEncryption/runtimes/python/test/unit/internal/README.md b/DynamoDbEncryption/runtimes/python/test/unit/internal/README.md new file mode 100644 index 000000000..10b60d584 --- /dev/null +++ b/DynamoDbEncryption/runtimes/python/test/unit/internal/README.md @@ -0,0 +1,41 @@ +The `test_client_to_resource.py` and `test_resource_to_client.py` files +in this directory verify that DBESDK's boto3 resource/client conversion methods +correctly convert between resource/client shapes for all operations +supported by DBESDK. + +The only shapes that require conversion are + +- `AttributeValue`s (DDB items or keys) + - Client format example: `{"S": "some string"}` + - Resource format example: `"some string"` +- ConditionExpressions (`KeyConditionExpression` or `FilterExpression`; only resource-to-client) + - Client shape ex.: + - KeyConditionExpression: `"attr : :value"` + - ExpressionAttributeValues: `{":value" : {"S" : "some value}}` + - Resource shape ex.: + - KeyConditionExpression: `Attr("attr").eq("some value")` + - (Resources also support the client-style string expression) + +The conversion logic will recursively traverse inpuyt/output shapes to find shapes that require conversion, then convert them. +(ex. for boto3 Table [put_item](https://boto3.amazonaws.com/v1/documentation/api/latest/reference/services/dynamodb/table/put_item.html)), +the following kwargs MUST be converted from resource to client format: + +- `["Item"]` +- `["Expected"][]["Value"]` +- `["Expected"][]["AttributeValueList"]` +- `["ConditionExpression"]` +- `["ExpressionAttributeValues"]` + +The requests, responses, and items in the parent directory are shared between the integ tests and these unit tests. +The integ tests send the exact request that whose client/resource conversion is tested in the unit tests, +and the integ tests receive the exact response whose conversion is tested in the unit tests. + +The integration tests verify that the basic forms of these requests and responses are authoritative. The unit tests verify that DBESDK’s conversion logic exactly transforms one shape format into the other. + +Note: The conversion logic is generated by Smithy-Dafny Python +and the shape traversals are derived from the MPL's DynamoDB Smithy model. +As a result, the correctness of this conversion logic is primarily depends on the correctness of the Smithy codegen logic and the correctness of the DynamoDB Smithy model. + +Originally, the conversion logic was hand-written, +so these tests go beyond smoke testing to provide extra guarantees, +even though basic smoke testing should suffice now that the logic is machine-generated. diff --git a/DynamoDbEncryption/runtimes/python/test/unit/internal/__init__.py b/DynamoDbEncryption/runtimes/python/test/unit/internal/__init__.py new file mode 100644 index 000000000..f94fd12a2 --- /dev/null +++ b/DynamoDbEncryption/runtimes/python/test/unit/internal/__init__.py @@ -0,0 +1,2 @@ +# Copyright Amazon.com Inc. or its affiliates. All Rights Reserved. +# SPDX-License-Identifier: Apache-2.0 diff --git a/DynamoDbEncryption/runtimes/python/test/unit/internal/test_client_to_resource.py b/DynamoDbEncryption/runtimes/python/test/unit/internal/test_client_to_resource.py new file mode 100644 index 000000000..c040e5fac --- /dev/null +++ b/DynamoDbEncryption/runtimes/python/test/unit/internal/test_client_to_resource.py @@ -0,0 +1,745 @@ +# Copyright Amazon.com Inc. or its affiliates. All Rights Reserved. +# SPDX-License-Identifier: Apache-2.0 +import pytest +from boto3.dynamodb.conditions import ConditionExpressionBuilder + +from aws_dbesdk_dynamodb.internal.client_to_resource import ClientShapeToResourceShapeConverter + +from ...items import ( + complex_item_ddb, + complex_item_dict, + complex_key_ddb, + complex_key_dict, + simple_item_ddb, + simple_item_dict, + simple_key_ddb, + simple_key_dict, +) +from ...requests import ( + basic_batch_execute_statement_request_encrypted_table, + basic_batch_get_item_request_ddb, + basic_batch_get_item_request_dict, + basic_batch_write_item_delete_request_ddb, + basic_batch_write_item_delete_request_dict, + basic_batch_write_item_put_request_ddb, + basic_batch_write_item_put_request_dict, + basic_delete_item_request_ddb, + basic_delete_item_request_dict, + basic_execute_statement_request_encrypted_table, + basic_execute_transaction_request_encrypted_table, + basic_get_item_request_ddb, + basic_get_item_request_dict, + basic_put_item_request_ddb, + basic_put_item_request_dict, + basic_query_request_ddb, + basic_query_request_dict, + basic_scan_request_ddb, + basic_scan_request_dict, + basic_transact_get_item_request_ddb, + basic_transact_get_item_request_dict, + basic_transact_write_item_condition_check_request_ddb, + basic_transact_write_item_condition_check_request_dict, + basic_transact_write_item_delete_request_ddb, + basic_transact_write_item_delete_request_dict, + basic_transact_write_item_put_request_ddb, + basic_transact_write_item_put_request_dict, + basic_update_item_request_ddb_unsigned_attribute, + basic_update_item_request_dict_unsigned_attribute, + exhaustive_get_item_request_ddb, + exhaustive_get_item_request_dict, + exhaustive_put_item_request_ddb, + exhaustive_put_item_request_dict, + exhaustive_query_request_ddb, + exhaustive_query_request_dict, + exhaustive_scan_request_ddb, + exhaustive_scan_request_dict, +) +from ...responses import ( + basic_batch_execute_statement_response, + basic_batch_get_item_response, + basic_batch_write_item_put_response, + basic_delete_item_response, + basic_execute_statement_response, + basic_execute_transaction_response, + basic_get_item_response, + basic_put_item_response, + basic_query_response, + basic_scan_response, + basic_transact_get_items_response, + basic_transact_write_items_response, + basic_update_item_response, + exhaustive_batch_execute_statement_response, + exhaustive_batch_get_item_response, + exhaustive_batch_write_item_put_response, + exhaustive_delete_item_response, + exhaustive_execute_statement_response, + exhaustive_get_item_response, + exhaustive_put_item_response, + exhaustive_query_response, + exhaustive_scan_response, + exhaustive_update_item_response, +) + +client_to_resource_converter = ClientShapeToResourceShapeConverter() + + +@pytest.fixture(params=[True, False], ids=["complex_item", "simple_item"]) +def use_complex_item(request): + return request.param + + +@pytest.fixture +def test_ddb_item(use_complex_item): + """Get a single test item in the appropriate format for the client.""" + if use_complex_item: + return complex_item_ddb + return simple_item_ddb + + +@pytest.fixture +def test_dict_item(use_complex_item): + """Get a single test item in the appropriate format for the client.""" + if use_complex_item: + return complex_item_dict + return simple_item_dict + + +@pytest.fixture +def test_ddb_key(use_complex_item): + """Get a single test item in the appropriate format for the client.""" + if use_complex_item: + return complex_key_ddb + return simple_key_ddb + + +@pytest.fixture +def test_dict_key(use_complex_item): + """Get a single test item in the appropriate format for the client.""" + if use_complex_item: + return complex_key_dict + return simple_key_dict + + +@pytest.fixture(params=[True, False], ids=["exhaustive_request", "basic_request"]) +def use_exhaustive_request(request): + return request.param + + +@pytest.fixture +def test_put_item_request_ddb(use_exhaustive_request): + if use_exhaustive_request: + return exhaustive_put_item_request_ddb + return basic_put_item_request_ddb + + +@pytest.fixture +def test_put_item_request_dict(use_exhaustive_request): + if use_exhaustive_request: + return exhaustive_put_item_request_dict + return basic_put_item_request_dict + + +def test_GIVEN_test_put_item_request_WHEN_client_to_resource_THEN_returns_dict_value( + test_put_item_request_ddb, test_put_item_request_dict, test_ddb_item, test_dict_item +): + # Given: Put item request + request = test_put_item_request_ddb(test_ddb_item) + # When: Converting to resource format + dict_item = client_to_resource_converter.put_item_request(request) + # Then: Returns dict value + # For exhaustive requests, we need to handle ConditionExpression separately + # since it keeps the original DDB-formatted string + expected_dict_request = test_put_item_request_dict(test_dict_item) + for key in dict_item.keys(): + if key != "ConditionExpression": + assert dict_item[key] == expected_dict_request[key] + + +@pytest.fixture +def test_put_item_response(use_exhaustive_request): + if use_exhaustive_request: + return exhaustive_put_item_response + return basic_put_item_response + + +def test_GIVEN_test_put_item_response_WHEN_client_to_resource_THEN_returns_dict_value( + test_put_item_response, test_ddb_key, test_dict_key +): + # Given: Put item response + response = test_put_item_response(test_ddb_key) + # When: Converting to resource format + dict_item = client_to_resource_converter.put_item_response(response) + # Then: Returns dict value + assert dict_item == test_put_item_response(test_dict_key) + + +@pytest.fixture +def test_get_item_request_ddb(use_exhaustive_request): + if use_exhaustive_request: + return exhaustive_get_item_request_ddb + return basic_get_item_request_ddb + + +@pytest.fixture +def test_get_item_request_dict(use_exhaustive_request): + if use_exhaustive_request: + return exhaustive_get_item_request_dict + return basic_get_item_request_dict + + +def test_GIVEN_test_get_item_request_WHEN_client_to_resource_THEN_returns_dict_value( + test_get_item_request_ddb, test_get_item_request_dict, test_ddb_item, test_dict_item +): + # Given: Get item request + request = test_get_item_request_ddb(test_ddb_item) + # When: Converting to resource format + dict_item = client_to_resource_converter.get_item_request(request) + # Then: Returns dict value + assert dict_item == test_get_item_request_dict(test_dict_item) + + +@pytest.fixture +def test_get_item_response(use_exhaustive_request): + if use_exhaustive_request: + return exhaustive_get_item_response + return basic_get_item_response + + +def test_GIVEN_test_get_item_response_WHEN_client_to_resource_THEN_returns_dict_value( + test_get_item_response, test_ddb_item, test_dict_item +): + # Given: Get item response + response = test_get_item_response(test_ddb_item) + # When: Converting to resource format + dict_item = client_to_resource_converter.get_item_response(response) + # Then: Returns dict value + assert dict_item == test_get_item_response(test_dict_item) + + +@pytest.fixture +def test_query_request_ddb(use_exhaustive_request): + if use_exhaustive_request: + return exhaustive_query_request_ddb + return basic_query_request_ddb + + +@pytest.fixture +def test_query_request_dict(use_exhaustive_request): + if use_exhaustive_request: + return exhaustive_query_request_dict + return basic_query_request_dict + + +def test_GIVEN_test_query_request_WHEN_client_to_resource_THEN_returns_dict_value( + test_query_request_ddb, test_query_request_dict, test_ddb_item, test_dict_item +): + # Given: Query request + request = test_query_request_ddb(test_ddb_item) + # When: Converting to resource format + dict_item = client_to_resource_converter.query_request(request) + # Then: Returns dict value + for key in dict_item.keys(): + if key == "KeyConditionExpression": + assert_condition_expressions_are_equal(test_query_request_dict(test_dict_item), dict_item, key) + else: + assert dict_item[key] == test_query_request_dict(test_dict_item)[key] + + +@pytest.fixture +def test_query_response(use_exhaustive_request): + if use_exhaustive_request: + return exhaustive_query_response + return basic_query_response + + +def test_GIVEN_test_query_response_WHEN_client_to_resource_THEN_returns_dict_value( + test_query_response, test_ddb_item, test_dict_item +): + # Given: Query response + response = test_query_response([test_ddb_item]) + # When: Converting to resource format + dict_item = client_to_resource_converter.query_response(response) + # Then: Returns dict value + assert dict_item == test_query_response([test_dict_item]) + + +def get_string_for_key_condition_expression( + key_condition_expression, expression_attribute_names, expression_attribute_values +): + """Get the string for the key condition expression.""" + if not isinstance(key_condition_expression, str): + built_expression = ConditionExpressionBuilder().build_expression( + key_condition_expression, expression_attribute_names, expression_attribute_values + ) + key_condition_expression = built_expression.condition_expression + expression_attribute_names = built_expression.attribute_name_placeholders + expression_attribute_values = built_expression.attribute_value_placeholders + for expression_attribute_name, value in expression_attribute_names.items(): + key_condition_expression = key_condition_expression.replace(expression_attribute_name, str(value)) + for expression_attribute_value, value in expression_attribute_values.items(): + key_condition_expression = key_condition_expression.replace(expression_attribute_value, str(value)) + # Sometimes, the generated string has parentheses around the condition expression. + # It doesn't matter for the purposes of this test, so we remove them. + if key_condition_expression.startswith("(") and key_condition_expression.endswith(")"): + key_condition_expression = key_condition_expression[1:-1] + return key_condition_expression + + +def assert_condition_expressions_are_equal(expected_item, actual_item, key): + expected_key_condition_expression = get_string_for_key_condition_expression( + expected_item[key], + expected_item["ExpressionAttributeNames"] if "ExpressionAttributeNames" in expected_item else {}, + expected_item["ExpressionAttributeValues"] if "ExpressionAttributeValues" in expected_item else {}, + ) + actual_key_condition_expression = get_string_for_key_condition_expression( + actual_item[key], + actual_item["ExpressionAttributeNames"] if "ExpressionAttributeNames" in actual_item else {}, + actual_item["ExpressionAttributeValues"] if "ExpressionAttributeValues" in actual_item else {}, + ) + assert expected_key_condition_expression == actual_key_condition_expression + + +@pytest.fixture +def test_scan_request_ddb(use_exhaustive_request): + if use_exhaustive_request: + return exhaustive_scan_request_ddb + return basic_scan_request_ddb + + +@pytest.fixture +def test_scan_request_dict(use_exhaustive_request): + if use_exhaustive_request: + return exhaustive_scan_request_dict + return basic_scan_request_dict + + +def test_GIVEN_test_scan_request_WHEN_client_to_resource_THEN_returns_dict_value( + test_scan_request_ddb, test_scan_request_dict, test_ddb_item, test_dict_item +): + # Given: Scan request + request = test_scan_request_ddb(test_ddb_item) + # When: Converting to resource format + dict_item = client_to_resource_converter.scan_request(request) + # Then: Returns dict value + assert dict_item == test_scan_request_dict(test_dict_item) + + +@pytest.fixture +def test_scan_response(use_exhaustive_request): + if use_exhaustive_request: + return exhaustive_scan_response + return basic_scan_response + + +def test_GIVEN_test_scan_response_WHEN_client_to_resource_THEN_returns_dict_value( + test_scan_response, test_ddb_item, test_dict_item, test_ddb_key, test_dict_key +): + # Given: Scan response + response = test_scan_response([test_ddb_item], [test_ddb_key]) + # When: Converting to resource format + dict_item = client_to_resource_converter.scan_response(response) + # Then: Returns dict value + assert dict_item == test_scan_response([test_dict_item], [test_dict_key]) + + +@pytest.fixture +def test_batch_get_item_request_ddb(): + return basic_batch_get_item_request_ddb + + +@pytest.fixture +def test_batch_get_item_request_dict(): + return basic_batch_get_item_request_dict + + +def test_GIVEN_test_batch_get_item_request_WHEN_client_to_resource_THEN_returns_dict_value( + test_batch_get_item_request_ddb, test_batch_get_item_request_dict, test_ddb_item, test_dict_item +): + # Given: Batch get item request + request = test_batch_get_item_request_ddb([test_ddb_item]) + # When: Converting to resource format + dict_item = client_to_resource_converter.batch_get_item_request(request) + # Then: Returns dict value + assert dict_item == test_batch_get_item_request_dict([test_dict_item]) + + +@pytest.fixture +def test_batch_get_item_response(use_exhaustive_request): + if use_exhaustive_request: + return exhaustive_batch_get_item_response + return basic_batch_get_item_response + + +def test_GIVEN_test_batch_get_item_response_WHEN_client_to_resource_THEN_returns_dict_value( + test_batch_get_item_response, test_ddb_item, test_dict_item +): + # Given: Batch get item response + response = test_batch_get_item_response([test_ddb_item]) + # When: Converting to resource format + dict_item = client_to_resource_converter.batch_get_item_response(response) + # Then: Returns dict value + assert dict_item == test_batch_get_item_response([test_dict_item]) + + +@pytest.fixture +def test_batch_write_item_put_request_ddb(): + return basic_batch_write_item_put_request_ddb + + +@pytest.fixture +def test_batch_write_item_put_request_dict(): + return basic_batch_write_item_put_request_dict + + +def test_GIVEN_test_batch_write_item_put_request_WHEN_client_to_resource_THEN_returns_dict_value( + test_batch_write_item_put_request_ddb, test_batch_write_item_put_request_dict, test_ddb_item, test_dict_item +): + # Given: Batch write item request + request = test_batch_write_item_put_request_ddb([test_ddb_item]) + # When: Converting to resource format + dict_item = client_to_resource_converter.batch_write_item_request(request) + # Then: Returns dict value + assert dict_item == test_batch_write_item_put_request_dict([test_dict_item]) + + +@pytest.fixture +def test_batch_write_item_delete_request_ddb(): + return basic_batch_write_item_delete_request_ddb + + +@pytest.fixture +def test_batch_write_item_delete_request_dict(): + return basic_batch_write_item_delete_request_dict + + +def test_GIVEN_test_batch_write_item_delete_request_WHEN_client_to_resource_THEN_returns_dict_value( + test_batch_write_item_delete_request_ddb, test_batch_write_item_delete_request_dict, test_ddb_key, test_dict_key +): + # Given: Batch write item delete request + request = test_batch_write_item_delete_request_ddb([test_ddb_key]) + # When: Converting to resource format + dict_item = client_to_resource_converter.batch_write_item_request(request) + # Then: Returns dict value + assert dict_item == test_batch_write_item_delete_request_dict([test_dict_key]) + + +@pytest.fixture +def test_batch_write_item_put_response(use_exhaustive_request): + if use_exhaustive_request: + return exhaustive_batch_write_item_put_response + return basic_batch_write_item_put_response + + +def test_GIVEN_test_batch_write_item_put_response_WHEN_client_to_resource_THEN_returns_dict_value( + test_batch_write_item_put_response, test_ddb_item, test_dict_item +): + # Given: Batch write item put response + response = test_batch_write_item_put_response([test_ddb_item]) + # When: Converting to resource format + dict_item = client_to_resource_converter.batch_write_item_response(response) + # Then: Returns dict value + assert dict_item == test_batch_write_item_put_response([test_dict_item]) + + +@pytest.fixture +def test_transact_write_items_put_request_ddb(): + return basic_transact_write_item_put_request_ddb + + +@pytest.fixture +def test_transact_write_items_put_request_dict(): + return basic_transact_write_item_put_request_dict + + +def test_GIVEN_test_transact_write_items_put_request_WHEN_client_to_resource_THEN_returns_dict_value( + test_transact_write_items_put_request_ddb, test_transact_write_items_put_request_dict, test_ddb_item, test_dict_item +): + # Given: Transact write item put request + request = test_transact_write_items_put_request_ddb([test_ddb_item]) + # When: Converting to resource format + dict_item = client_to_resource_converter.transact_write_items_request(request) + # Then: Returns dict value + assert dict_item == test_transact_write_items_put_request_dict([test_dict_item]) + + +@pytest.fixture +def test_transact_write_items_delete_request_ddb(): + return basic_transact_write_item_delete_request_ddb + + +@pytest.fixture +def test_transact_write_items_delete_request_dict(): + return basic_transact_write_item_delete_request_dict + + +def test_GIVEN_test_transact_write_items_delete_request_WHEN_client_to_resource_THEN_returns_dict_value( + test_transact_write_items_delete_request_ddb, + test_transact_write_items_delete_request_dict, + test_ddb_key, + test_dict_key, +): + # Given: Transact write item delete request + request = test_transact_write_items_delete_request_ddb([test_ddb_key]) + # When: Converting to resource format + dict_item = client_to_resource_converter.transact_write_items_request(request) + # Then: Returns dict value + assert dict_item == test_transact_write_items_delete_request_dict([test_dict_key]) + + +@pytest.fixture +def test_transact_write_items_condition_check_request_ddb(): + return basic_transact_write_item_condition_check_request_ddb + + +@pytest.fixture +def test_transact_write_items_condition_check_request_dict(): + return basic_transact_write_item_condition_check_request_dict + + +def test_GIVEN_test_transact_write_items_condition_check_request_WHEN_client_to_resource_THEN_returns_dict_value( + test_transact_write_items_condition_check_request_ddb, + test_transact_write_items_condition_check_request_dict, + test_ddb_key, + test_dict_key, +): + # Given: Transact write item condition check request + request = test_transact_write_items_condition_check_request_ddb([test_ddb_key]) + # When: Converting to resource format + dict_item = client_to_resource_converter.transact_write_items_request(request) + # Then: Returns dict value + assert dict_item == test_transact_write_items_condition_check_request_dict([test_dict_key]) + + +@pytest.fixture +def test_transact_write_items_response(): + return basic_transact_write_items_response + + +def test_GIVEN_test_transact_write_items_response_WHEN_client_to_resource_THEN_returns_dict_value( + test_transact_write_items_response, test_ddb_item, test_dict_item +): + # Given: Transact write items response + response = test_transact_write_items_response([test_ddb_item]) + # When: Converting to resource format + dict_item = client_to_resource_converter.transact_write_items_response(response) + # Then: Returns dict value + assert dict_item == test_transact_write_items_response([test_dict_item]) + + +@pytest.fixture +def test_transact_get_items_request_ddb(): + return basic_transact_get_item_request_ddb + + +@pytest.fixture +def test_transact_get_items_request_dict(): + return basic_transact_get_item_request_dict + + +def test_GIVEN_test_transact_get_items_request_WHEN_client_to_resource_THEN_returns_dict_value( + test_transact_get_items_request_ddb, test_transact_get_items_request_dict, test_ddb_key, test_dict_key +): + # Given: Transact get items request + request = test_transact_get_items_request_ddb([test_ddb_key]) + # When: Converting to resource format + dict_item = client_to_resource_converter.transact_get_items_request(request) + # Then: Returns dict value + assert dict_item == test_transact_get_items_request_dict([test_dict_key]) + + +@pytest.fixture +def test_transact_get_items_response(): + return basic_transact_get_items_response + + +def test_GIVEN_test_transact_get_items_response_WHEN_client_to_resource_THEN_returns_dict_value( + test_transact_get_items_response, test_ddb_item, test_dict_item +): + # Given: Transact get items response + response = test_transact_get_items_response([test_ddb_item]) + # When: Converting to resource format + dict_item = client_to_resource_converter.transact_get_items_response(response) + # Then: Returns dict value + assert dict_item == test_transact_get_items_response([test_dict_item]) + + +@pytest.fixture +def test_update_item_request_ddb(): + # Select unsigned attribute without loss of generality; + # resource/client logic doesn't care about signed attributes + return basic_update_item_request_ddb_unsigned_attribute + + +@pytest.fixture +def test_update_item_request_dict(): + # Select unsigned attribute without loss of generality; + # resource/client logic doesn't care about signed attributes + return basic_update_item_request_dict_unsigned_attribute + + +def test_GIVEN_test_update_item_request_WHEN_client_to_resource_THEN_returns_dict_value( + test_update_item_request_ddb, test_update_item_request_dict, test_ddb_item, test_dict_item +): + # Given: Update item request + request = test_update_item_request_ddb(test_ddb_item) + # When: Converting to resource format + dict_item = client_to_resource_converter.update_item_request(request) + # Then: Returns dict value + assert dict_item == test_update_item_request_dict(test_dict_item) + + +@pytest.fixture +def test_update_item_response(use_exhaustive_request): + if use_exhaustive_request: + return exhaustive_update_item_response + return basic_update_item_response + + +def test_GIVEN_test_update_item_response_WHEN_client_to_resource_THEN_returns_dict_value( + test_update_item_response, test_ddb_item, test_dict_item +): + # Given: Update item response + response = test_update_item_response(test_ddb_item) + # When: Converting to resource format + dict_item = client_to_resource_converter.update_item_response(response) + # Then: Returns dict value + assert dict_item == test_update_item_response(test_dict_item) + + +@pytest.fixture +def test_execute_statement_request(): + return basic_execute_statement_request_encrypted_table + + +def test_GIVEN_test_execute_statement_request_WHEN_client_to_resource_THEN_returns_dict_value( + test_execute_statement_request, test_ddb_item, test_dict_item +): + # Given: Execute statement request + request = test_execute_statement_request(test_ddb_item) + # When: Converting to resource format + dict_item = client_to_resource_converter.execute_statement_request(request) + # Then: Returns dict value (here, request is not modified) + assert dict_item == test_execute_statement_request(test_dict_item) + + +@pytest.fixture +def test_execute_statement_response(use_exhaustive_request): + if use_exhaustive_request: + return exhaustive_execute_statement_response + return basic_execute_statement_response + + +def test_GIVEN_test_execute_statement_response_WHEN_client_to_resource_THEN_returns_dict_value( + test_execute_statement_response, test_ddb_item, test_dict_item +): + # Given: Execute statement response + ddb_response = test_execute_statement_response([test_ddb_item]) + # When: Converting to resource format + resource_response = client_to_resource_converter.execute_statement_response(ddb_response) + # Then: Returns dict value + assert resource_response == test_execute_statement_response([test_dict_item]) + + +@pytest.fixture +def test_execute_transaction_request(): + return basic_execute_transaction_request_encrypted_table + + +def test_GIVEN_test_execute_transaction_request_WHEN_client_to_resource_THEN_returns_dict_value( + test_execute_transaction_request, test_ddb_item, test_dict_item +): + # Given: Execute transaction request + request = test_execute_transaction_request(test_ddb_item) + # When: Converting to resource format + dict_item = client_to_resource_converter.execute_transaction_request(request) + # Then: Returns dict value (here, request is not modified) + assert dict_item == test_execute_transaction_request(test_dict_item) + + +@pytest.fixture +def test_execute_transaction_response(): + return basic_execute_transaction_response + + +def test_GIVEN_test_execute_transaction_response_WHEN_client_to_resource_THEN_returns_dict_value( + test_execute_transaction_response, test_ddb_item, test_dict_item +): + # Given: Execute transaction response + ddb_response = test_execute_transaction_response([test_ddb_item]) + # When: Converting to resource format + resource_response = client_to_resource_converter.execute_transaction_response(ddb_response) + # Then: Returns dict value + assert resource_response == test_execute_transaction_response([test_dict_item]) + + +@pytest.fixture +def test_batch_execute_statement_request(use_exhaustive_request): + return basic_batch_execute_statement_request_encrypted_table + + +def test_GIVEN_test_batch_execute_statement_request_WHEN_client_to_resource_THEN_returns_dict_value( + test_batch_execute_statement_request, test_ddb_item, test_dict_item +): + # Given: Batch execute statement request + request = test_batch_execute_statement_request() + # When: Converting to resource format + dict_item = client_to_resource_converter.batch_execute_statement_request(request) + # Then: Returns dict value (here, request is not modified) + assert dict_item == test_batch_execute_statement_request() + + +@pytest.fixture +def test_batch_execute_statement_response(use_exhaustive_request): + if use_exhaustive_request: + return exhaustive_batch_execute_statement_response + return basic_batch_execute_statement_response + + +def test_GIVEN_test_batch_execute_statement_response_WHEN_client_to_resource_THEN_returns_dict_value( + test_batch_execute_statement_response, test_ddb_item, test_dict_item +): + # Given: Batch execute statement response + ddb_response = test_batch_execute_statement_response([test_ddb_item]) + # When: Converting to resource format + resource_response = client_to_resource_converter.batch_execute_statement_response(ddb_response) + # Then: Returns dict value + assert resource_response == test_batch_execute_statement_response([test_dict_item]) + + +@pytest.fixture +def test_delete_item_request_ddb(): + return basic_delete_item_request_ddb + + +@pytest.fixture +def test_delete_item_request_dict(): + return basic_delete_item_request_dict + + +def test_GIVEN_test_delete_item_request_WHEN_client_to_resource_THEN_returns_dict_value( + test_delete_item_request_ddb, test_delete_item_request_dict, test_ddb_key, test_dict_key +): + # Given: Delete item request + request = test_delete_item_request_ddb(test_ddb_key) + # When: Converting to resource format + dict_item = client_to_resource_converter.delete_item_request(request) + # Then: Returns dict value + assert dict_item == test_delete_item_request_dict(test_dict_key) + + +@pytest.fixture +def test_delete_item_response(use_exhaustive_request): + if use_exhaustive_request: + return exhaustive_delete_item_response + return basic_delete_item_response + + +def test_GIVEN_test_delete_item_response_WHEN_client_to_resource_THEN_returns_dict_value( + test_delete_item_response, test_ddb_item, test_dict_item +): + # Given: Delete item response + response = test_delete_item_response(test_ddb_item) + # When: Converting to resource format + dict_item = client_to_resource_converter.delete_item_response(response) + # Then: Returns dict value + assert dict_item == test_delete_item_response(test_dict_item) diff --git a/DynamoDbEncryption/runtimes/python/test/unit/internal/test_resource_to_client.py b/DynamoDbEncryption/runtimes/python/test/unit/internal/test_resource_to_client.py new file mode 100644 index 000000000..c3973adb9 --- /dev/null +++ b/DynamoDbEncryption/runtimes/python/test/unit/internal/test_resource_to_client.py @@ -0,0 +1,1081 @@ +# Copyright Amazon.com Inc. or its affiliates. All Rights Reserved. +# SPDX-License-Identifier: Apache-2.0 +import pytest +from boto3.dynamodb.conditions import ConditionExpressionBuilder + +from aws_dbesdk_dynamodb.internal.resource_to_client import ResourceShapeToClientShapeConverter + +from ...constants import INTEG_TEST_DEFAULT_DYNAMODB_TABLE_NAME +from ...items import ( + complex_item_ddb, + complex_item_dict, + complex_key_ddb, + complex_key_dict, + simple_item_ddb, + simple_item_dict, + simple_key_ddb, + simple_key_dict, +) +from ...requests import ( + basic_batch_execute_statement_request_encrypted_table, + basic_batch_get_item_request_ddb, + basic_batch_get_item_request_dict, + basic_batch_write_item_delete_request_ddb, + basic_batch_write_item_delete_request_dict, + basic_batch_write_item_put_request_ddb, + basic_batch_write_item_put_request_dict, + basic_delete_item_request_ddb, + basic_delete_item_request_dict, + basic_execute_statement_request_encrypted_table, + basic_execute_transaction_request_encrypted_table, + basic_get_item_request_ddb, + basic_get_item_request_dict, + basic_put_item_request_ddb, + basic_put_item_request_dict, + basic_query_request_ddb, + basic_query_request_dict, + basic_scan_request_ddb, + basic_scan_request_dict, + basic_transact_get_item_request_ddb, + basic_transact_get_item_request_dict, + basic_transact_write_item_condition_check_request_ddb, + basic_transact_write_item_condition_check_request_dict, + basic_transact_write_item_delete_request_ddb, + basic_transact_write_item_delete_request_dict, + basic_transact_write_item_put_request_ddb, + basic_transact_write_item_put_request_dict, + basic_update_item_request_ddb_unsigned_attribute, + basic_update_item_request_dict_unsigned_attribute, + exhaustive_get_item_request_ddb, + exhaustive_get_item_request_dict, + exhaustive_put_item_request_ddb, + exhaustive_put_item_request_dict, + exhaustive_query_request_ddb, + exhaustive_query_request_dict, + exhaustive_scan_request_ddb, + exhaustive_scan_request_dict, +) +from ...responses import ( + basic_batch_execute_statement_response, + basic_batch_get_item_response, + basic_batch_write_item_put_response, + basic_delete_item_response, + basic_execute_statement_response, + basic_execute_transaction_response, + basic_get_item_response, + basic_put_item_response, + basic_query_response, + basic_scan_response, + basic_transact_get_items_response, + basic_transact_write_items_response, + basic_update_item_response, + exhaustive_batch_execute_statement_response, + exhaustive_batch_get_item_response, + exhaustive_batch_write_item_put_response, + exhaustive_delete_item_response, + exhaustive_execute_statement_response, + exhaustive_get_item_response, + exhaustive_put_item_response, + exhaustive_query_response, + exhaustive_scan_response, + exhaustive_update_item_response, +) + +resource_to_client_converter = ResourceShapeToClientShapeConverter(table_name=INTEG_TEST_DEFAULT_DYNAMODB_TABLE_NAME) + + +@pytest.fixture(params=[True, False], ids=["complex_item", "simple_item"]) +def use_complex_item(request): + return request.param + + +@pytest.fixture +def test_ddb_item(use_complex_item): + """Get a single test item in the appropriate format for the client.""" + if use_complex_item: + return complex_item_ddb + return simple_item_ddb + + +@pytest.fixture +def test_dict_item(use_complex_item): + """Get a single test item in the appropriate format for the client.""" + if use_complex_item: + return complex_item_dict + return simple_item_dict + + +@pytest.fixture +def test_ddb_key(use_complex_item): + """Get a single test item in the appropriate format for the client.""" + if use_complex_item: + return complex_key_ddb + return simple_key_ddb + + +@pytest.fixture +def test_dict_key(use_complex_item): + """Get a single test item in the appropriate format for the client.""" + if use_complex_item: + return complex_key_dict + return simple_key_dict + + +@pytest.fixture(params=[True, False], ids=["exhaustive_request", "basic_request"]) +def use_exhaustive_request(request): + return request.param + + +@pytest.fixture +def test_put_item_request_ddb(use_exhaustive_request): + if use_exhaustive_request: + return exhaustive_put_item_request_ddb + return basic_put_item_request_ddb + + +@pytest.fixture +def test_put_item_request_dict(use_exhaustive_request): + if use_exhaustive_request: + return exhaustive_put_item_request_dict + return basic_put_item_request_dict + + +def sort_dynamodb_json_lists(obj): + """ + Utility that recursively sorts all lists in a DynamoDB JSON-like structure. + DynamoDB JSON uses lists to represent sets, so strict equality can fail. + Sort lists to ensure consistent ordering when comparing expected and actual items. + """ + if isinstance(obj, dict): + return {k: sort_dynamodb_json_lists(v) for k, v in obj.items()} + elif isinstance(obj, list): + try: + a = sorted(obj) # Sort lists for consistent comparison + return a + except TypeError: + return obj # Not all lists are sortable; ex. complex_item_ddb's "list" attribute + return obj + + +def test_GIVEN_test_put_item_request_WHEN_resource_to_client_THEN_returns_ddb_value( + test_put_item_request_ddb, test_put_item_request_dict, test_ddb_item, test_dict_item +): + # Given: Put item request + request = test_put_item_request_dict(test_dict_item) + # When: Converting to resource format + ddb_item = resource_to_client_converter.put_item_request(request) + # Then: Returns dict value + # For exhaustive requests, we need to handle ConditionExpression separately + # since it keeps the original DDB-formatted string + expected_ddb_request = test_put_item_request_ddb(test_ddb_item) + + actual_ddb_request = sort_dynamodb_json_lists(ddb_item) + expected_ddb_request = sort_dynamodb_json_lists(expected_ddb_request) + + for key in actual_ddb_request.keys(): + if key == "ConditionExpression": + assert_condition_expressions_are_equal(expected_ddb_request, actual_ddb_request, key) + elif key == "ExpressionAttributeValues": + # Any values in expected_ddb_request MUST be in actual_ddb_request, + # but not the other way around. + # actual_ddb_request will generate attribute symbols as needed, + # but any values in expected_ddb_request MUST be present in actual_ddb_request. + if key in expected_ddb_request: + for name, value in expected_ddb_request[key].items(): + assert name in actual_ddb_request[key] + assert actual_ddb_request[key][name] == value + else: + # Keys in actual_ddb_request don't need to be in expected_ddb_request. + pass + elif key == "ExpressionAttributeNames": + # Any keys in expected_ddb_request MUST be in actual_ddb_request, + # but not the other way around. + # actual_ddb_request will generate attribute symbols as needed, + # but any keys in expected_ddb_request MUST be present in actual_ddb_request. + if key in expected_ddb_request: + for name, value in expected_ddb_request[key].items(): + assert name in actual_ddb_request[key] + assert actual_ddb_request[key][name] == value + else: + # Keys in actual_ddb_request don't need to be in expected_ddb_request. + pass + else: + assert actual_ddb_request[key] == expected_ddb_request[key] + + +def test_GIVEN_put_item_request_without_table_name_WHEN_resource_to_client_THEN_raises_error( + test_put_item_request_dict, +): + # Given: ResourceShapeToClientShapeConverter without table name + resource_to_client_converter_without_table_name = ResourceShapeToClientShapeConverter(table_name=None) + # Given: Put item request without table name + # Then: Raises ValueError + with pytest.raises(ValueError): + # When: Converting to resource format + resource_to_client_converter_without_table_name.put_item_request(test_put_item_request_dict) + + +@pytest.fixture +def test_put_item_response(use_exhaustive_request): + if use_exhaustive_request: + return exhaustive_put_item_response + return basic_put_item_response + + +def test_GIVEN_test_put_item_response_WHEN_resource_to_client_THEN_returns_ddb_value( + test_put_item_response, test_ddb_key, test_dict_key +): + # Given: Put item response + response = test_put_item_response(test_dict_key) + # When: Converting to resource format + ddb_item = resource_to_client_converter.put_item_response(response) + # Then: Returns dict value + expected_ddb_response = test_put_item_response(test_ddb_key) + assert ddb_item == expected_ddb_response + + +@pytest.fixture +def test_get_item_request_ddb(use_exhaustive_request): + if use_exhaustive_request: + return exhaustive_get_item_request_ddb + return basic_get_item_request_ddb + + +@pytest.fixture +def test_get_item_request_dict(use_exhaustive_request): + if use_exhaustive_request: + return exhaustive_get_item_request_dict + return basic_get_item_request_dict + + +def test_GIVEN_test_get_item_request_WHEN_resource_to_client_THEN_returns_ddb_value( + test_get_item_request_ddb, test_get_item_request_dict, test_ddb_item, test_dict_item +): + # Given: Get item request + request = test_get_item_request_dict(test_dict_item) + # When: Converting to resource format + ddb_item = resource_to_client_converter.get_item_request(request) + # Then: Returns dict value + expected_ddb_request = test_get_item_request_ddb(test_ddb_item) + assert ddb_item == expected_ddb_request + + +def test_GIVEN_get_item_request_without_table_name_WHEN_resource_to_client_THEN_raises_error( + test_get_item_request_dict, +): + # Given: ResourceShapeToClientShapeConverter without table name + resource_to_client_converter_without_table_name = ResourceShapeToClientShapeConverter(table_name=None) + # Given: Get item request without table name + # Then: Raises ValueError + with pytest.raises(ValueError): + # When: Converting to resource format + resource_to_client_converter_without_table_name.get_item_request(test_get_item_request_dict) + + +@pytest.fixture +def test_get_item_response(use_exhaustive_request): + if use_exhaustive_request: + return exhaustive_get_item_response + return basic_get_item_response + + +def test_GIVEN_test_get_item_response_WHEN_resource_to_client_THEN_returns_ddb_value( + test_get_item_response, test_ddb_item, test_dict_item +): + # Given: Get item response + response = test_get_item_response(test_dict_item) + # When: Converting to resource format + ddb_item = resource_to_client_converter.get_item_response(response) + # Then: Returns dict value + expected_ddb_response = test_get_item_response(test_ddb_item) + if "Item" in ddb_item: + ddb_item["Item"] = sort_dynamodb_json_lists(ddb_item["Item"]) + expected_ddb_response["Item"] = sort_dynamodb_json_lists(expected_ddb_response["Item"]) + assert ddb_item == expected_ddb_response + + +@pytest.fixture +def test_query_request_ddb(use_exhaustive_request): + if use_exhaustive_request: + return exhaustive_query_request_ddb + return basic_query_request_ddb + + +@pytest.fixture +def test_query_request_dict(use_exhaustive_request): + if use_exhaustive_request: + return exhaustive_query_request_dict + return basic_query_request_dict + + +def test_GIVEN_test_query_request_WHEN_resource_to_client_THEN_returns_ddb_value( + test_query_request_ddb, test_query_request_dict, test_ddb_item, test_dict_item +): + # Given: Query request + dict_request = test_query_request_dict(test_dict_item) + # When: Converting to resource format + ddb_request = resource_to_client_converter.query_request(dict_request) + # Then: Returns ddb value + actual_ddb_request = ddb_request + expected_ddb_request = test_query_request_ddb(test_ddb_item) + + try: + for key in actual_ddb_request["ExpressionAttributeValues"].keys(): + actual_ddb_request["ExpressionAttributeValues"][key] = sort_dynamodb_json_lists( + actual_ddb_request["ExpressionAttributeValues"][key] + ) + except KeyError: + pass + + try: + for key in expected_ddb_request["ExpressionAttributeValues"].keys(): + expected_ddb_request["ExpressionAttributeValues"][key] = sort_dynamodb_json_lists( + expected_ddb_request["ExpressionAttributeValues"][key] + ) + except KeyError: + pass + + try: + for key in actual_ddb_request["QueryFilter"].keys(): + actual_ddb_request["QueryFilter"][key]["AttributeValueList"] = [ + sort_dynamodb_json_lists(item) for item in actual_ddb_request["QueryFilter"][key]["AttributeValueList"] + ] + except KeyError: + pass + + try: + for key in expected_ddb_request["QueryFilter"].keys(): + expected_ddb_request["QueryFilter"][key]["AttributeValueList"] = [ + sort_dynamodb_json_lists(item) + for item in expected_ddb_request["QueryFilter"][key]["AttributeValueList"] + ] + except KeyError: + pass + + try: + for key in actual_ddb_request["ExclusiveStartKey"].keys(): + actual_ddb_request["ExclusiveStartKey"][key] = sort_dynamodb_json_lists( + actual_ddb_request["ExclusiveStartKey"][key] + ) + except KeyError: + pass + + try: + for key in expected_ddb_request["ExclusiveStartKey"].keys(): + expected_ddb_request["ExclusiveStartKey"][key] = sort_dynamodb_json_lists( + expected_ddb_request["ExclusiveStartKey"][key] + ) + except KeyError: + pass + + try: + for key in actual_ddb_request["KeyConditions"].keys(): + actual_ddb_request["KeyConditions"][key]["AttributeValueList"] = [ + sort_dynamodb_json_lists(item) + for item in actual_ddb_request["KeyConditions"][key]["AttributeValueList"] + ] + except KeyError: + pass + + try: + for key in expected_ddb_request["KeyConditions"].keys(): + expected_ddb_request["KeyConditions"][key]["AttributeValueList"] = [ + sort_dynamodb_json_lists(item) + for item in expected_ddb_request["KeyConditions"][key]["AttributeValueList"] + ] + except KeyError: + pass + + for key in actual_ddb_request.keys(): + if key == "KeyConditionExpression": + assert_condition_expressions_are_equal(expected_ddb_request, actual_ddb_request, key) + elif key == "ExpressionAttributeValues": + # Any values in expected_ddb_request MUST be in actual_ddb_request, + # but not the other way around. + # actual_ddb_request will generate attribute symbols as needed, + # but any values in expected_ddb_request MUST be present in actual_ddb_request. + if key in expected_ddb_request: + for name, value in expected_ddb_request[key].items(): + assert name in actual_ddb_request[key] + assert actual_ddb_request[key][name] == value + else: + # Keys in actual_ddb_request don't need to be in expected_ddb_request. + pass + elif key == "ExpressionAttributeNames": + # Any keys in expected_ddb_request MUST be in actual_ddb_request, + # but not the other way around. + # actual_ddb_request will generate attribute symbols as needed, + # but any keys in expected_ddb_request MUST be present in actual_ddb_request. + if key in expected_ddb_request: + for name, value in expected_ddb_request[key].items(): + assert name in actual_ddb_request[key] + assert actual_ddb_request[key][name] == value + else: + # Keys in actual_ddb_request don't need to be in expected_ddb_request. + pass + else: + assert actual_ddb_request[key] == expected_ddb_request[key] + + +def test_GIVEN_query_request_without_table_name_WHEN_resource_to_client_THEN_raises_error(test_query_request_dict): + # Given: ResourceShapeToClientShapeConverter without table name + resource_to_client_converter_without_table_name = ResourceShapeToClientShapeConverter(table_name=None) + # Given: Query request without table name + # Then: Raises ValueError + with pytest.raises(ValueError): + # When: Converting to resource format + resource_to_client_converter_without_table_name.query_request(test_query_request_dict) + + +@pytest.fixture +def test_query_response(use_exhaustive_request): + if use_exhaustive_request: + return exhaustive_query_response + return basic_query_response + + +def test_GIVEN_test_query_response_WHEN_resource_to_client_THEN_returns_ddb_value( + test_query_response, test_ddb_item, test_dict_item +): + # Given: Query response + response = test_query_response([test_dict_item]) + # When: Converting to resource format + ddb_item = resource_to_client_converter.query_response(response) + # Then: Returns dict value + actual_ddb_response = ddb_item + actual_ddb_response["Items"] = [sort_dynamodb_json_lists(item) for item in actual_ddb_response["Items"]] + expected_ddb_response = test_query_response([test_ddb_item]) + expected_ddb_response["Items"] = [sort_dynamodb_json_lists(item) for item in expected_ddb_response["Items"]] + + assert actual_ddb_response == expected_ddb_response + + +def get_string_for_key_condition_expression( + key_condition_expression, expression_attribute_names, expression_attribute_values +): + """Get the string for the key condition expression.""" + if not isinstance(key_condition_expression, str): + built_expression = ConditionExpressionBuilder().build_expression( + key_condition_expression, expression_attribute_names, expression_attribute_values + ) + key_condition_expression = built_expression.condition_expression + expression_attribute_names = built_expression.attribute_name_placeholders + expression_attribute_values = built_expression.attribute_value_placeholders + for expression_attribute_name, value in expression_attribute_names.items(): + key_condition_expression = key_condition_expression.replace(expression_attribute_name, str(value)) + for expression_attribute_value, value in expression_attribute_values.items(): + key_condition_expression = key_condition_expression.replace(expression_attribute_value, str(value)) + # Sometimes, the generated string has parentheses around the condition expression. + # It doesn't matter for the purposes of this test, so we remove them. + if key_condition_expression.startswith("(") and key_condition_expression.endswith(")"): + key_condition_expression = key_condition_expression[1:-1] + return key_condition_expression + + +def assert_condition_expressions_are_equal(expected_item, actual_item, key): + expected_key_condition_expression = get_string_for_key_condition_expression( + expected_item[key], + expected_item["ExpressionAttributeNames"] if "ExpressionAttributeNames" in expected_item else {}, + expected_item["ExpressionAttributeValues"] if "ExpressionAttributeValues" in expected_item else {}, + ) + actual_key_condition_expression = get_string_for_key_condition_expression( + actual_item[key], + actual_item["ExpressionAttributeNames"] if "ExpressionAttributeNames" in actual_item else {}, + actual_item["ExpressionAttributeValues"] if "ExpressionAttributeValues" in actual_item else {}, + ) + assert expected_key_condition_expression == actual_key_condition_expression + + +@pytest.fixture +def test_scan_request_ddb(use_exhaustive_request): + if use_exhaustive_request: + return exhaustive_scan_request_ddb + return basic_scan_request_ddb + + +@pytest.fixture +def test_scan_request_dict(use_exhaustive_request): + if use_exhaustive_request: + return exhaustive_scan_request_dict + return basic_scan_request_dict + + +def sort_attribute_dynamodb_json_lists(item, attribute): + if attribute in item: + item[attribute] = sort_dynamodb_json_lists(item[attribute]) + return item + + +def sort_attribute_list_of_dynamodb_json_lists(item, attribute): + if attribute in item: + item[attribute] = [sort_dynamodb_json_lists(item) for item in item[attribute]] + return item + + +def test_GIVEN_test_scan_request_WHEN_resource_to_client_THEN_returns_ddb_value( + test_scan_request_ddb, test_scan_request_dict, test_ddb_item, test_dict_item +): + # Given: Scan request + request = test_scan_request_dict(test_dict_item) + # When: Converting to resource format + actual_ddb_request = resource_to_client_converter.scan_request(request) + # Then: Returns dict value + expected_ddb_request = test_scan_request_ddb(test_ddb_item) + + actual_ddb_request = sort_attribute_list_of_dynamodb_json_lists(actual_ddb_request, "ScanFilter") + expected_ddb_request = sort_attribute_list_of_dynamodb_json_lists(expected_ddb_request, "ScanFilter") + + actual_ddb_request = sort_attribute_dynamodb_json_lists(actual_ddb_request, "ExclusiveStartKey") + expected_ddb_request = sort_attribute_dynamodb_json_lists(expected_ddb_request, "ExclusiveStartKey") + + actual_ddb_request = sort_attribute_dynamodb_json_lists(actual_ddb_request, "ExpressionAttributeValues") + expected_ddb_request = sort_attribute_dynamodb_json_lists(expected_ddb_request, "ExpressionAttributeValues") + + assert actual_ddb_request == expected_ddb_request + + +def test_GIVEN_scan_request_without_table_name_WHEN_resource_to_client_THEN_raises_error(test_scan_request_dict): + # Given: ResourceShapeToClientShapeConverter without table name + resource_to_client_converter_without_table_name = ResourceShapeToClientShapeConverter(table_name=None) + # Given: Scan request without table name + # Then: Raises ValueError + with pytest.raises(ValueError): + # When: Converting to resource format + resource_to_client_converter_without_table_name.scan_request(test_scan_request_dict) + + +@pytest.fixture +def test_scan_response(use_exhaustive_request): + if use_exhaustive_request: + return exhaustive_scan_response + return basic_scan_response + + +def test_GIVEN_test_scan_response_WHEN_resource_to_client_THEN_returns_ddb_value( + test_scan_response, test_ddb_item, test_dict_item, test_ddb_key, test_dict_key +): + # Given: Scan response + response = test_scan_response([test_dict_item], [test_dict_key]) + # When: Converting to resource format + actual_ddb_response = resource_to_client_converter.scan_response(response) + # Then: Returns dict value + expected_ddb_response = test_scan_response([test_ddb_item], [test_ddb_key]) + + actual_ddb_response = sort_attribute_list_of_dynamodb_json_lists(actual_ddb_response, "Items") + expected_ddb_response = sort_attribute_list_of_dynamodb_json_lists(expected_ddb_response, "Items") + + assert actual_ddb_response == expected_ddb_response + + +@pytest.fixture +def test_batch_get_item_request_ddb(): + return basic_batch_get_item_request_ddb + + +@pytest.fixture +def test_batch_get_item_request_dict(): + return basic_batch_get_item_request_dict + + +def test_GIVEN_test_batch_get_item_request_WHEN_resource_to_client_THEN_returns_ddb_value( + test_batch_get_item_request_ddb, test_batch_get_item_request_dict, test_ddb_item, test_dict_item +): + # Given: Batch get item request + request = test_batch_get_item_request_dict([test_dict_item]) + # When: Converting to resource format + actual_ddb_request = resource_to_client_converter.batch_get_item_request(request) + # Then: Returns dict value + expected_ddb_request = test_batch_get_item_request_ddb([test_ddb_item]) + + actual_ddb_request = sort_attribute_list_of_dynamodb_json_lists(actual_ddb_request, "RequestItems") + expected_ddb_request = sort_attribute_list_of_dynamodb_json_lists(expected_ddb_request, "RequestItems") + + assert actual_ddb_request == expected_ddb_request + + +@pytest.fixture +def test_batch_get_item_response(use_exhaustive_request): + if use_exhaustive_request: + return exhaustive_batch_get_item_response + return basic_batch_get_item_response + + +def test_GIVEN_test_batch_get_item_response_WHEN_resource_to_client_THEN_returns_ddb_value( + test_batch_get_item_response, test_ddb_item, test_dict_item +): + # Given: Batch get item response + response = test_batch_get_item_response([test_dict_item]) + # When: Converting to resource format + actual_ddb_response = resource_to_client_converter.batch_get_item_response(response) + # Then: Returns dict value + expected_ddb_response = test_batch_get_item_response([test_ddb_item]) + + actual_ddb_response = sort_attribute_list_of_dynamodb_json_lists(actual_ddb_response, "Responses") + expected_ddb_response = sort_attribute_list_of_dynamodb_json_lists(expected_ddb_response, "Responses") + + assert actual_ddb_response == expected_ddb_response + + +@pytest.fixture +def test_batch_write_item_put_request_ddb(): + return basic_batch_write_item_put_request_ddb + + +@pytest.fixture +def test_batch_write_item_put_request_dict(): + return basic_batch_write_item_put_request_dict + + +def test_GIVEN_test_batch_write_item_put_request_WHEN_resource_to_client_THEN_returns_ddb_value( + test_batch_write_item_put_request_ddb, test_batch_write_item_put_request_dict, test_ddb_item, test_dict_item +): + # Given: Batch write item request + request = test_batch_write_item_put_request_dict([test_dict_item]) + # When: Converting to resource format + actual_ddb_request = resource_to_client_converter.batch_write_item_request(request) + # Then: Returns dict value + expected_ddb_request = test_batch_write_item_put_request_ddb([test_ddb_item]) + + actual_ddb_request = sort_attribute_list_of_dynamodb_json_lists(actual_ddb_request, "RequestItems") + expected_ddb_request = sort_attribute_list_of_dynamodb_json_lists(expected_ddb_request, "RequestItems") + + assert actual_ddb_request == expected_ddb_request + + +@pytest.fixture +def test_batch_write_item_delete_request_ddb(): + return basic_batch_write_item_delete_request_ddb + + +@pytest.fixture +def test_batch_write_item_delete_request_dict(): + return basic_batch_write_item_delete_request_dict + + +def test_GIVEN_test_batch_write_item_delete_request_WHEN_resource_to_client_THEN_returns_ddb_value( + test_batch_write_item_delete_request_ddb, test_batch_write_item_delete_request_dict, test_ddb_key, test_dict_key +): + # Given: Batch write item delete request + request = test_batch_write_item_delete_request_dict([test_dict_key]) + # When: Converting to resource format + actual_ddb_request = resource_to_client_converter.batch_write_item_request(request) + # Then: Returns dict value + expected_ddb_request = test_batch_write_item_delete_request_ddb([test_ddb_key]) + + actual_ddb_request = sort_attribute_list_of_dynamodb_json_lists(actual_ddb_request, "RequestItems") + expected_ddb_request = sort_attribute_list_of_dynamodb_json_lists(expected_ddb_request, "RequestItems") + + assert actual_ddb_request == expected_ddb_request + + +@pytest.fixture +def test_batch_write_item_put_response(use_exhaustive_request): + if use_exhaustive_request: + return exhaustive_batch_write_item_put_response + return basic_batch_write_item_put_response + + +def test_GIVEN_test_batch_write_item_put_response_WHEN_resource_to_client_THEN_returns_ddb_value( + test_batch_write_item_put_response, test_ddb_item, test_dict_item +): + # Given: Batch write item put response + response = test_batch_write_item_put_response([test_dict_item]) + # When: Converting to resource format + actual_ddb_response = resource_to_client_converter.batch_write_item_response(response) + # Then: Returns dict value + expected_ddb_response = test_batch_write_item_put_response([test_ddb_item]) + + actual_ddb_response = sort_attribute_list_of_dynamodb_json_lists(actual_ddb_response, "UnprocessedItems") + expected_ddb_response = sort_attribute_list_of_dynamodb_json_lists(expected_ddb_response, "UnprocessedItems") + + assert actual_ddb_response == expected_ddb_response + + +@pytest.fixture +def test_transact_write_items_put_request_ddb(): + return basic_transact_write_item_put_request_ddb + + +@pytest.fixture +def test_transact_write_items_put_request_dict(): + return basic_transact_write_item_put_request_dict + + +def test_GIVEN_test_transact_write_items_put_request_WHEN_resource_to_client_THEN_returns_ddb_value( + test_transact_write_items_put_request_ddb, test_transact_write_items_put_request_dict, test_ddb_item, test_dict_item +): + # Given: Transact write item put request + request = test_transact_write_items_put_request_dict([test_dict_item]) + # When: Converting to resource format + actual_ddb_request = resource_to_client_converter.transact_write_items_request(request) + # Then: Returns dict value + expected_ddb_request = test_transact_write_items_put_request_ddb([test_ddb_item]) + + actual_ddb_request = sort_attribute_list_of_dynamodb_json_lists(actual_ddb_request, "TransactItems") + expected_ddb_request = sort_attribute_list_of_dynamodb_json_lists(expected_ddb_request, "TransactItems") + + assert actual_ddb_request == expected_ddb_request + + +@pytest.fixture +def test_transact_write_items_delete_request_ddb(): + return basic_transact_write_item_delete_request_ddb + + +@pytest.fixture +def test_transact_write_items_delete_request_dict(): + return basic_transact_write_item_delete_request_dict + + +def test_GIVEN_test_transact_write_items_delete_request_WHEN_resource_to_client_THEN_returns_ddb_value( + test_transact_write_items_delete_request_ddb, + test_transact_write_items_delete_request_dict, + test_ddb_key, + test_dict_key, +): + # Given: Transact write item delete request + request = test_transact_write_items_delete_request_dict([test_dict_key]) + # When: Converting to resource format + actual_ddb_request = resource_to_client_converter.transact_write_items_request(request) + # Then: Returns dict value + expected_ddb_request = test_transact_write_items_delete_request_ddb([test_ddb_key]) + + actual_ddb_request = sort_attribute_list_of_dynamodb_json_lists(actual_ddb_request, "TransactItems") + expected_ddb_request = sort_attribute_list_of_dynamodb_json_lists(expected_ddb_request, "TransactItems") + + assert actual_ddb_request == expected_ddb_request + + +@pytest.fixture +def test_transact_write_items_condition_check_request_ddb(): + return basic_transact_write_item_condition_check_request_ddb + + +@pytest.fixture +def test_transact_write_items_condition_check_request_dict(): + return basic_transact_write_item_condition_check_request_dict + + +def test_GIVEN_test_transact_write_items_condition_check_request_WHEN_resource_to_client_THEN_returns_ddb_value( + test_transact_write_items_condition_check_request_ddb, + test_transact_write_items_condition_check_request_dict, + test_ddb_key, + test_dict_key, +): + # Given: Transact write item condition check request + request = test_transact_write_items_condition_check_request_dict([test_dict_key]) + # When: Converting to resource format + actual_ddb_request = resource_to_client_converter.transact_write_items_request(request) + # Then: Returns dict value + expected_ddb_request = test_transact_write_items_condition_check_request_ddb([test_ddb_key]) + + actual_ddb_request = sort_attribute_list_of_dynamodb_json_lists(actual_ddb_request, "TransactItems") + expected_ddb_request = sort_attribute_list_of_dynamodb_json_lists(expected_ddb_request, "TransactItems") + + assert actual_ddb_request == expected_ddb_request + + +@pytest.fixture +def test_transact_write_items_response(): + return basic_transact_write_items_response + + +def test_GIVEN_test_transact_write_items_response_WHEN_resource_to_client_THEN_returns_ddb_value( + test_transact_write_items_response, test_ddb_item, test_dict_item +): + # Given: Transact write items response + response = test_transact_write_items_response([test_dict_item]) + # When: Converting to resource format + actual_ddb_response = resource_to_client_converter.transact_write_items_response(response) + # Then: Returns dict value + expected_ddb_response = test_transact_write_items_response([test_ddb_item]) + + actual_ddb_response = sort_attribute_list_of_dynamodb_json_lists(actual_ddb_response, "ConsumedCapacity") + expected_ddb_response = sort_attribute_list_of_dynamodb_json_lists(expected_ddb_response, "ConsumedCapacity") + + assert actual_ddb_response == expected_ddb_response + + +@pytest.fixture +def test_transact_get_items_request_ddb(): + return basic_transact_get_item_request_ddb + + +@pytest.fixture +def test_transact_get_items_request_dict(): + return basic_transact_get_item_request_dict + + +def test_GIVEN_test_transact_get_items_request_WHEN_resource_to_client_THEN_returns_ddb_value( + test_transact_get_items_request_ddb, test_transact_get_items_request_dict, test_ddb_key, test_dict_key +): + # Given: Transact get items request + request = test_transact_get_items_request_dict([test_dict_key]) + # When: Converting to resource format + actual_ddb_request = resource_to_client_converter.transact_get_items_request(request) + # Then: Returns dict value + expected_ddb_request = test_transact_get_items_request_ddb([test_ddb_key]) + + actual_ddb_request = sort_attribute_list_of_dynamodb_json_lists(actual_ddb_request, "TransactItems") + expected_ddb_request = sort_attribute_list_of_dynamodb_json_lists(expected_ddb_request, "TransactItems") + + assert actual_ddb_request == expected_ddb_request + + +@pytest.fixture +def test_transact_get_items_response(): + return basic_transact_get_items_response + + +def test_GIVEN_test_transact_get_items_response_WHEN_resource_to_client_THEN_returns_ddb_value( + test_transact_get_items_response, test_ddb_item, test_dict_item +): + # Given: Transact get items response + response = test_transact_get_items_response([test_dict_item]) + # When: Converting to resource format + actual_ddb_response = resource_to_client_converter.transact_get_items_response(response) + # Then: Returns dict value + expected_ddb_response = test_transact_get_items_response([test_ddb_item]) + + actual_ddb_response = sort_attribute_list_of_dynamodb_json_lists(actual_ddb_response, "Responses") + expected_ddb_response = sort_attribute_list_of_dynamodb_json_lists(expected_ddb_response, "Responses") + + assert actual_ddb_response == expected_ddb_response + + +@pytest.fixture +def test_update_item_request_ddb(): + # Select unsigned attribute without loss of generality; + # resource/client logic doesn't care about signed attributes + # TODO: Add exhaustive request + return basic_update_item_request_ddb_unsigned_attribute + + +@pytest.fixture +def test_update_item_request_dict(): + # Select unsigned attribute without loss of generality; + # resource/client logic doesn't care about signed attributes + # TODO: Add exhaustive request + return basic_update_item_request_dict_unsigned_attribute + + +def test_GIVEN_test_update_item_request_WHEN_resource_to_client_THEN_returns_ddb_value( + test_update_item_request_ddb, test_update_item_request_dict, test_ddb_item, test_dict_item +): + # Given: Update item request + request = test_update_item_request_dict(test_dict_item) + # When: Converting to resource format + actual_ddb_request = resource_to_client_converter.update_item_request(request) + # Then: Returns dict value + expected_ddb_request = test_update_item_request_ddb(test_ddb_item) + + actual_ddb_request = sort_dynamodb_json_lists(actual_ddb_request) + expected_ddb_request = sort_dynamodb_json_lists(expected_ddb_request) + + assert actual_ddb_request == expected_ddb_request + + +def test_GIVEN_update_item_request_without_table_name_WHEN_resource_to_client_THEN_raises_error( + test_update_item_request_dict, +): + # Given: ResourceShapeToClientShapeConverter without table name + resource_to_client_converter_without_table_name = ResourceShapeToClientShapeConverter(table_name=None) + # Given: Put item request without table name + # Then: Raises ValueError + with pytest.raises(ValueError): + # When: Converting to resource format + resource_to_client_converter_without_table_name.update_item_request(test_update_item_request_dict) + + +@pytest.fixture +def test_update_item_response(use_exhaustive_request): + if use_exhaustive_request: + return exhaustive_update_item_response + return basic_update_item_response + + +def test_GIVEN_update_item_response_WHEN_resource_to_client_THEN_returns_dict_value( + test_update_item_response, test_ddb_item, test_dict_item +): + # Given: Update item response + response = test_update_item_response(test_dict_item) + # When: Converting to client format + actual_ddb_response = resource_to_client_converter.update_item_response(response) + # Then: Returns dict value + expected_ddb_response = test_update_item_response(test_ddb_item) + + actual_ddb_response = sort_dynamodb_json_lists(actual_ddb_response["Attributes"]) + expected_ddb_response = sort_dynamodb_json_lists(expected_ddb_response["Attributes"]) + + assert actual_ddb_response == expected_ddb_response + + +@pytest.fixture +def test_execute_statement_request(): + return basic_execute_statement_request_encrypted_table + + +def test_GIVEN_test_execute_statement_request_WHEN_resource_to_client_THEN_returns_ddb_value( + test_execute_statement_request, test_ddb_item, test_dict_item +): + # Given: Execute statement request + request = test_execute_statement_request(test_dict_item) + # When: Converting to client format + actual_ddb_request = resource_to_client_converter.execute_statement_request(request) + # Then: Returns dict value (here, request is not modified) + assert actual_ddb_request == test_execute_statement_request(test_ddb_item) + + +@pytest.fixture +def test_execute_statement_response(use_exhaustive_request): + if use_exhaustive_request: + return exhaustive_execute_statement_response + return basic_execute_statement_response + + +def test_GIVEN_test_execute_statement_response_WHEN_resource_to_client_THEN_returns_dict_value( + test_execute_statement_response, test_ddb_item, test_dict_item +): + # Given: Execute statement response + response = test_execute_statement_response([test_dict_item]) + # When: Converting to client format + actual_ddb_response = resource_to_client_converter.execute_statement_response(response) + # Then: Returns dict value + actual_ddb_response = sort_attribute_list_of_dynamodb_json_lists(actual_ddb_response, "Items") + expected_ddb_response = sort_attribute_list_of_dynamodb_json_lists( + test_execute_statement_response([test_ddb_item]), "Items" + ) + + assert actual_ddb_response == expected_ddb_response + + +@pytest.fixture +def test_execute_transaction_request(): + return basic_execute_transaction_request_encrypted_table + + +def test_GIVEN_test_execute_transaction_request_WHEN_resource_to_client_THEN_returns_ddb_value( + test_execute_transaction_request, test_ddb_item, test_dict_item +): + # Given: Execute transaction request + request = test_execute_transaction_request(test_dict_item) + # When: Converting to resource format + actual_ddb_request = resource_to_client_converter.execute_transaction_request(request) + # Then: Returns dict value (here, request is not modified) + assert actual_ddb_request == test_execute_transaction_request(test_ddb_item) + + +@pytest.fixture +def test_execute_transaction_response(): + return basic_execute_transaction_response + + +def test_GIVEN_test_execute_transaction_response_WHEN_resource_to_client_THEN_returns_dict_value( + test_execute_transaction_response, test_ddb_item, test_dict_item +): + # Given: Execute transaction response + response = test_execute_transaction_response([test_dict_item]) + # When: Converting to resource format + actual_ddb_response = resource_to_client_converter.execute_transaction_response(response) + # Then: Returns dict value + expected_ddb_response = test_execute_transaction_response([test_ddb_item]) + + actual_ddb_response = sort_attribute_list_of_dynamodb_json_lists(actual_ddb_response, "Responses") + expected_ddb_response = sort_attribute_list_of_dynamodb_json_lists(expected_ddb_response, "Responses") + + assert actual_ddb_response == expected_ddb_response + + +@pytest.fixture +def test_batch_execute_statement_request(): + return basic_batch_execute_statement_request_encrypted_table + + +def test_GIVEN_test_batch_execute_statement_request_WHEN_resource_to_client_THEN_returns_ddb_value( + test_batch_execute_statement_request, +): + # Given: Batch execute statement request + request = test_batch_execute_statement_request() + # When: Converting to resource format + actual_ddb_request = resource_to_client_converter.batch_execute_statement_request(request) + # Then: Returns dict value (here, request is not modified) + assert actual_ddb_request == test_batch_execute_statement_request() + + +@pytest.fixture +def test_batch_execute_statement_response(use_exhaustive_request): + if use_exhaustive_request: + return exhaustive_batch_execute_statement_response + return basic_batch_execute_statement_response + + +def test_GIVEN_test_batch_execute_statement_response_WHEN_resource_to_client_THEN_returns_dict_value( + test_batch_execute_statement_response, test_ddb_item, test_dict_item +): + # Given: Batch execute statement response + response = test_batch_execute_statement_response([test_dict_item]) + # When: Converting to resource format + actual_ddb_response = resource_to_client_converter.batch_execute_statement_response(response) + # Then: Returns dict value + expected_ddb_response = test_batch_execute_statement_response([test_ddb_item]) + + actual_ddb_response = sort_attribute_list_of_dynamodb_json_lists(actual_ddb_response, "Responses") + expected_ddb_response = sort_attribute_list_of_dynamodb_json_lists(expected_ddb_response, "Responses") + + assert actual_ddb_response == expected_ddb_response + + +@pytest.fixture +def test_delete_item_request_ddb(): + return basic_delete_item_request_ddb + + +@pytest.fixture +def test_delete_item_request_dict(): + return basic_delete_item_request_dict + + +def test_GIVEN_test_delete_item_request_WHEN_resource_to_client_THEN_returns_ddb_value( + test_delete_item_request_ddb, test_delete_item_request_dict, test_ddb_item, test_dict_item +): + # Given: Delete item request + request = test_delete_item_request_dict(test_dict_item) + # When: Converting to resource format + actual_ddb_request = resource_to_client_converter.delete_item_request(request) + # Then: Returns dict value + assert actual_ddb_request == test_delete_item_request_ddb(test_ddb_item) + + +def test_GIVEN_delete_item_request_without_table_name_WHEN_resource_to_client_THEN_raises_error( + test_delete_item_request_dict, +): + # Given: ResourceShapeToClientShapeConverter without table name + resource_to_client_converter_without_table_name = ResourceShapeToClientShapeConverter(table_name=None) + # Given: Delete item request without table name + # Then: Raises ValueError + with pytest.raises(ValueError): + # When: Converting to resource format + resource_to_client_converter_without_table_name.delete_item_request(test_delete_item_request_dict) + + +@pytest.fixture +def test_delete_item_response(use_exhaustive_request): + if use_exhaustive_request: + return exhaustive_delete_item_response + return basic_delete_item_response + + +def test_GIVEN_delete_item_response_WHEN_resource_to_client_THEN_returns_ddb_value( + test_delete_item_response, test_ddb_item, test_dict_item +): + # Given: Delete item response + response = test_delete_item_response(test_dict_item) + # When: Converting to resource format + actual_ddb_response = resource_to_client_converter.delete_item_response(response) + # Then: Returns dict value + expected_ddb_response = test_delete_item_response(test_ddb_item) + + actual_ddb_response["Attributes"] = sort_dynamodb_json_lists(actual_ddb_response["Attributes"]) + expected_ddb_response["Attributes"] = sort_dynamodb_json_lists(expected_ddb_response["Attributes"]) + + assert actual_ddb_response == expected_ddb_response diff --git a/DynamoDbEncryption/runtimes/python/tox.ini b/DynamoDbEncryption/runtimes/python/tox.ini new file mode 100644 index 000000000..dbd30d8e4 --- /dev/null +++ b/DynamoDbEncryption/runtimes/python/tox.ini @@ -0,0 +1,117 @@ +[tox] +isolated_build = True +envlist = + py{311,312,313}-{dafnytests,unit,integ,legacyinteg}, + encrypted-interface-coverage, + client-to-resource-conversions-coverage, + resource-to-client-conversions-coverage, + docs, + isort-check, + black-check + +[testenv:base-command] +commands = poetry run pytest -s -v -l {posargs} + +[testenv] +skip_install = true +allowlist_externals = poetry,ruff,black +passenv = AWS_* +commands_pre = + poetry lock + poetry install --with test +commands = + dafnytests: {[testenv:base-command]commands} test/internaldafny/ + unit: {[testenv:base-command]commands} test/unit/ + integ: {[testenv:base-command]commands} test/integ/encrypted/ + +[testenv:legacyinteg] +description = Run integ tests for legacy extern compatibility +commands_pre = + poetry lock + poetry install --with test --extras legacy-ddbec +commands = {[testenv:base-command]commands} test/integ/legacy/ + +[testenv:encrypted-interface-coverage] +description = Run integ + unit tests for encrypted interfaces with coverage +commands = + python -m pytest -s -vv \ + test/integ/encrypted \ + test/unit/encrypted \ + --cov aws_dbesdk_dynamodb.encrypted \ + --cov-report=term-missing \ + --cov-fail-under=100 + +[testenv:client-to-resource-conversions-coverage] +description = Run boto3 conversion tests with coverage +commands = + python -m pytest -s -vv \ + test/unit/internal/test_client_to_resource.py \ + --cov aws_dbesdk_dynamodb.internal.client_to_resource \ + --cov-report=term-missing \ + --cov-fail-under=100 + +[testenv:resource-to-client-conversions-coverage] +description = Run boto3 conversion tests with coverage +commands = + python -m pytest -s -vv \ + test/unit/internal/test_resource_to_client.py \ + --cov aws_dbesdk_dynamodb.internal.resource_to_client \ + --cov-report=term-missing \ + --cov-fail-under=100 + +# Linters +[testenv:ruff] +commands_pre = + poetry install --with linting +deps = + ruff +commands = + ruff check \ + src/aws_dbesdk_dynamodb/ \ + ../../../Examples/runtimes/python/DynamoDBEncryption/ \ + ../../../Examples/runtimes/python/Migration/ \ + test/ \ + {posargs} + +[testenv:blacken] +commands_pre = + poetry install --with linting +deps = + black +basepython = python3 +commands = + black --line-length 120 \ + src/aws_dbesdk_dynamodb/ \ + ../../../Examples/runtimes/python/DynamoDBEncryption/ \ + ../../../Examples/runtimes/python/Migration/ \ + test/ \ + {posargs} + +[testenv:black-check] +commands_pre = + {[testenv:blacken]commands_pre} +basepython = python3 +deps = + {[testenv:blacken]deps} +commands = + {[testenv:blacken]commands} --diff --check + +[testenv:lint] +commands_pre = + poetry install --with linting +deps = + black +basepython = python3 +commands = + {[testenv:blacken]commands} + {[testenv:ruff]commands} --fix + +[testenv:lint-check] +commands_pre = + poetry install --with linting +deps = + black +basepython = python3 +commands = + {[testenv:black-check]commands} + {[testenv:ruff]commands} diff --git a/Examples/runtimes/java/DynamoDbEncryption/src/main/java/software/amazon/cryptography/examples/keyring/SharedCacheAcrossHierarchicalKeyringsExample.java b/Examples/runtimes/java/DynamoDbEncryption/src/main/java/software/amazon/cryptography/examples/keyring/SharedCacheAcrossHierarchicalKeyringsExample.java index dfed4b84d..62fb6d711 100644 --- a/Examples/runtimes/java/DynamoDbEncryption/src/main/java/software/amazon/cryptography/examples/keyring/SharedCacheAcrossHierarchicalKeyringsExample.java +++ b/Examples/runtimes/java/DynamoDbEncryption/src/main/java/software/amazon/cryptography/examples/keyring/SharedCacheAcrossHierarchicalKeyringsExample.java @@ -180,7 +180,7 @@ public static void SharedCacheAcrossHierarchicalKeyringsGetItemPutItem( final IKeyring hierarchicalKeyring1 = matProv.CreateAwsKmsHierarchicalKeyring(keyringInput1); - // 4. Configure which attributes are encrypted and/or signed when writing new items. + // 5. Configure which attributes are encrypted and/or signed when writing new items. // For each attribute that may exist on the items we plan to write to our DynamoDbTable, // we must explicitly configure how they should be treated during item encryption: // - ENCRYPT_AND_SIGN: The attribute is encrypted and included in the signature @@ -194,14 +194,14 @@ public static void SharedCacheAcrossHierarchicalKeyringsGetItemPutItem( CryptoAction.ENCRYPT_AND_SIGN ); - // 5. Get the DDB Client for Hierarchical Keyring 1. + // 6. Get the DDB Client for Hierarchical Keyring 1. final DynamoDbClient ddbClient1 = GetDdbClient( ddbTableName, hierarchicalKeyring1, attributeActionsOnEncrypt ); - // 6. Encrypt Decrypt roundtrip with ddbClient1 + // 7. Encrypt Decrypt roundtrip with ddbClient1 PutGetItems(ddbTableName, ddbClient1); // Through the above encrypt and decrypt roundtrip, the cache will be populated and @@ -210,7 +210,7 @@ public static void SharedCacheAcrossHierarchicalKeyringsGetItemPutItem( // - Same Logical Key Store Name of the Key Store for the Hierarchical Keyring // - Same Branch Key ID - // 7. Configure your KeyStore resource keystore2. + // 8. Configure your KeyStore resource keystore2. // This SHOULD be the same configuration that you used // to initially create and populate your physical KeyStore. // Note that keyStoreTableName is the physical Key Store, @@ -243,7 +243,7 @@ public static void SharedCacheAcrossHierarchicalKeyringsGetItemPutItem( ) .build(); - // 8. Create the Hierarchical Keyring HK2 with Key Store instance K2, the shared Cache + // 9. Create the Hierarchical Keyring HK2 with Key Store instance K2, the shared Cache // and the same partitionId and BranchKeyId used in HK1 because we want to share cache entries // (and experience cache HITS). @@ -262,14 +262,14 @@ public static void SharedCacheAcrossHierarchicalKeyringsGetItemPutItem( final IKeyring hierarchicalKeyring2 = matProv.CreateAwsKmsHierarchicalKeyring(keyringInput2); - // 9. Get the DDB Client for Hierarchical Keyring 2. + // 10. Get the DDB Client for Hierarchical Keyring 2. final DynamoDbClient ddbClient2 = GetDdbClient( ddbTableName, hierarchicalKeyring2, attributeActionsOnEncrypt ); - // 10. Encrypt Decrypt roundtrip with ddbClient2 + // 11. Encrypt Decrypt roundtrip with ddbClient2 PutGetItems(ddbTableName, ddbClient2); } diff --git a/Examples/runtimes/python/DynamoDBEncryption/.gitignore b/Examples/runtimes/python/DynamoDBEncryption/.gitignore new file mode 100644 index 000000000..61d5202d8 --- /dev/null +++ b/Examples/runtimes/python/DynamoDBEncryption/.gitignore @@ -0,0 +1,17 @@ +# Python build artifacts +__pycache__ +**/__pycache__ +*.pyc +src/**.egg-info/ +build +poetry.lock +**/poetry.lock +dist + +# Dafny-generated Python +**/internaldafny/generated/*.py + +# Python test artifacts +.tox +.pytest_cache + diff --git a/Examples/runtimes/python/DynamoDBEncryption/__init__.py b/Examples/runtimes/python/DynamoDBEncryption/__init__.py new file mode 100644 index 000000000..fa977e22f --- /dev/null +++ b/Examples/runtimes/python/DynamoDBEncryption/__init__.py @@ -0,0 +1,3 @@ +# Copyright Amazon.com Inc. or its affiliates. All Rights Reserved. +# SPDX-License-Identifier: Apache-2.0 +"""Stub to allow relative imports of examples from tests.""" diff --git a/Examples/runtimes/python/DynamoDBEncryption/src/__init__.py b/Examples/runtimes/python/DynamoDBEncryption/src/__init__.py new file mode 100644 index 000000000..fa977e22f --- /dev/null +++ b/Examples/runtimes/python/DynamoDBEncryption/src/__init__.py @@ -0,0 +1,3 @@ +# Copyright Amazon.com Inc. or its affiliates. All Rights Reserved. +# SPDX-License-Identifier: Apache-2.0 +"""Stub to allow relative imports of examples from tests.""" diff --git a/Examples/runtimes/python/DynamoDBEncryption/src/basic_put_get_example/__init__.py b/Examples/runtimes/python/DynamoDBEncryption/src/basic_put_get_example/__init__.py new file mode 100644 index 000000000..fa977e22f --- /dev/null +++ b/Examples/runtimes/python/DynamoDBEncryption/src/basic_put_get_example/__init__.py @@ -0,0 +1,3 @@ +# Copyright Amazon.com Inc. or its affiliates. All Rights Reserved. +# SPDX-License-Identifier: Apache-2.0 +"""Stub to allow relative imports of examples from tests.""" diff --git a/Examples/runtimes/python/DynamoDBEncryption/src/basic_put_get_example/with_encrypted_client.py b/Examples/runtimes/python/DynamoDBEncryption/src/basic_put_get_example/with_encrypted_client.py new file mode 100644 index 000000000..04c361e2b --- /dev/null +++ b/Examples/runtimes/python/DynamoDBEncryption/src/basic_put_get_example/with_encrypted_client.py @@ -0,0 +1,160 @@ +# Copyright Amazon.com Inc. or its affiliates. All Rights Reserved. +# SPDX-License-Identifier: Apache-2.0 +""" +Example for using an EncryptedClient to put and get an encrypted item. + +Running this example requires access to the DDB Table whose name +is provided in the function arguments. +This table must be configured with the following +primary key configuration: +- Partition key is named "partition_key" with type (S) +- Sort key is named "sort_key" with type (N) +""" + +import boto3 +from aws_cryptographic_material_providers.mpl import AwsCryptographicMaterialProviders +from aws_cryptographic_material_providers.mpl.config import MaterialProvidersConfig +from aws_cryptographic_material_providers.mpl.models import ( + CreateAwsKmsMrkMultiKeyringInput, + DBEAlgorithmSuiteId, +) +from aws_cryptographic_material_providers.mpl.references import IKeyring +from aws_dbesdk_dynamodb.encrypted.client import EncryptedClient +from aws_dbesdk_dynamodb.structures.dynamodb import ( + DynamoDbTableEncryptionConfig, + DynamoDbTablesEncryptionConfig, +) +from aws_dbesdk_dynamodb.structures.structured_encryption import ( + CryptoAction, +) + + +def encrypted_client_put_get_example( + kms_key_id: str, + dynamodb_table_name: str, +): + """Use an EncryptedClient to put and get an encrypted item.""" + # 1. Create a Keyring. This Keyring will be responsible for protecting the data keys that protect your data. + # For this example, we will create a AWS KMS Keyring with the AWS KMS Key we want to use. + # We will use the `CreateMrkMultiKeyring` method to create this keyring, + # as it will correctly handle both single region and Multi-Region KMS Keys. + mat_prov: AwsCryptographicMaterialProviders = AwsCryptographicMaterialProviders(config=MaterialProvidersConfig()) + kms_mrk_multi_keyring_input: CreateAwsKmsMrkMultiKeyringInput = CreateAwsKmsMrkMultiKeyringInput( + generator=kms_key_id, + ) + kms_mrk_multi_keyring: IKeyring = mat_prov.create_aws_kms_mrk_multi_keyring(input=kms_mrk_multi_keyring_input) + + # 2. Configure which attributes are encrypted and/or signed when writing new items. + # For each attribute that may exist on the items we plan to write to our DynamoDbTable, + # we must explicitly configure how they should be treated during item encryption: + # - ENCRYPT_AND_SIGN: The attribute is encrypted and included in the signature + # - SIGN_ONLY: The attribute not encrypted, but is still included in the signature + # - DO_NOTHING: The attribute is not encrypted and not included in the signature + attribute_actions_on_encrypt = { + "partition_key": CryptoAction.SIGN_ONLY, + "sort_key": CryptoAction.SIGN_ONLY, + "attribute1": CryptoAction.ENCRYPT_AND_SIGN, + "attribute2": CryptoAction.SIGN_ONLY, + ":attribute3": CryptoAction.DO_NOTHING, + } + + # 3. Configure which attributes we expect to be included in the signature + # when reading items. There are two options for configuring this: + # + # - (Recommended) Configure `allowedUnsignedAttributesPrefix`: + # When defining your DynamoDb schema and deciding on attribute names, + # choose a distinguishing prefix (such as ":") for all attributes that + # you do not want to include in the signature. + # This has two main benefits: + # - It is easier to reason about the security and authenticity of data within your item + # when all unauthenticated data is easily distinguishable by their attribute name. + # - If you need to add new unauthenticated attributes in the future, + # you can easily make the corresponding update to your `attributeActionsOnEncrypt` + # and immediately start writing to that new attribute, without + # any other configuration update needed. + # Once you configure this field, it is not safe to update it. + # + # - Configure `allowedUnsignedAttributes`: You may also explicitly list + # a set of attributes that should be considered unauthenticated when encountered + # on read. Be careful if you use this configuration. Do not remove an attribute + # name from this configuration, even if you are no longer writing with that attribute, + # as old items may still include this attribute, and our configuration needs to know + # to continue to exclude this attribute from the signature scope. + # If you add new attribute names to this field, you must first deploy the update to this + # field to all readers in your host fleet before deploying the update to start writing + # with that new attribute. + # + # For this example, we have designed our DynamoDb table such that any attribute name with + # the ":" prefix should be considered unauthenticated. + unsignAttrPrefix: str = ":" + + # 4. Create the DynamoDb Encryption configuration for the table we will be writing to. + table_configs = {} + table_config = DynamoDbTableEncryptionConfig( + logical_table_name=dynamodb_table_name, + partition_key_name="partition_key", + sort_key_name="sort_key", + attribute_actions_on_encrypt=attribute_actions_on_encrypt, + keyring=kms_mrk_multi_keyring, + allowed_unsigned_attribute_prefix=unsignAttrPrefix, + # Specifying an algorithm suite is not required, + # but is done here to demonstrate how to do so. + # We suggest using the + # `ALG_AES_256_GCM_HKDF_SHA512_COMMIT_KEY_ECDSA_P384_SYMSIG_HMAC_SHA384` suite, + # which includes AES-GCM with key derivation, signing, and key commitment. + # This is also the default algorithm suite if one is not specified in this config. + # For more information on supported algorithm suites, see: + # https://docs.aws.amazon.com/database-encryption-sdk/latest/devguide/supported-algorithms.html + algorithm_suite_id=DBEAlgorithmSuiteId.ALG_AES_256_GCM_HKDF_SHA512_COMMIT_KEY_ECDSA_P384_SYMSIG_HMAC_SHA384, + ) + table_configs[dynamodb_table_name] = table_config + tables_config = DynamoDbTablesEncryptionConfig(table_encryption_configs=table_configs) + + # 5. Create the EncryptedClient + encrypted_client = EncryptedClient( + client=boto3.client("dynamodb"), + encryption_config=tables_config, + ) + + # 6. Put an item into our table using the above client. + # Before the item gets sent to DynamoDb, it will be encrypted + # client-side, according to our configuration. + item_to_encrypt = { + "partition_key": {"S": "BasicPutGetExample"}, + "sort_key": {"N": "0"}, + "attribute1": {"S": "encrypt and sign me!"}, + "attribute2": {"S": "sign me!"}, + ":attribute3": {"S": "ignore me!"}, + } + + put_item_request = { + "TableName": dynamodb_table_name, + "Item": item_to_encrypt, + } + + put_item_response = encrypted_client.put_item(**put_item_request) + + # Demonstrate that PutItem succeeded + assert put_item_response["ResponseMetadata"]["HTTPStatusCode"] == 200 + + # 7. Get the item back from our table using the same client. + # The client will decrypt the item client-side, and return + # back the original item. + key_to_get = {"partition_key": {"S": "BasicPutGetExample"}, "sort_key": {"N": "0"}} + + get_item_request = {"TableName": dynamodb_table_name, "Key": key_to_get} + + get_item_response = encrypted_client.get_item(**get_item_request) + + # Demonstrate that GetItem succeeded and returned the decrypted item + assert get_item_response["ResponseMetadata"]["HTTPStatusCode"] == 200 + assert get_item_response["Item"] == item_to_encrypt + + # 8. Clean up the item we put into the table by deleting it. + delete_item_request = {"TableName": dynamodb_table_name, "Key": key_to_get} + delete_item_response = encrypted_client.delete_item(**delete_item_request) + + # Demonstrate that DeleteItem succeeded + assert delete_item_response["ResponseMetadata"]["HTTPStatusCode"] == 200 + get_item_response = encrypted_client.get_item(**get_item_request) + assert "Item" not in get_item_response diff --git a/Examples/runtimes/python/DynamoDBEncryption/src/basic_put_get_example/with_encrypted_table.py b/Examples/runtimes/python/DynamoDBEncryption/src/basic_put_get_example/with_encrypted_table.py new file mode 100644 index 000000000..161f90a16 --- /dev/null +++ b/Examples/runtimes/python/DynamoDBEncryption/src/basic_put_get_example/with_encrypted_table.py @@ -0,0 +1,150 @@ +# Copyright Amazon.com Inc. or its affiliates. All Rights Reserved. +# SPDX-License-Identifier: Apache-2.0 +""" +Example for using an EncryptedTable to put and get an encrypted item. + +Running this example requires access to the DDB Table whose name +is provided in the function arguments. +This table must be configured with the following +primary key configuration: +- Partition key is named "partition_key" with type (S) +- Sort key is named "sort_key" with type (N) +""" + +import boto3 +from aws_cryptographic_material_providers.mpl import AwsCryptographicMaterialProviders +from aws_cryptographic_material_providers.mpl.config import MaterialProvidersConfig +from aws_cryptographic_material_providers.mpl.models import ( + CreateAwsKmsMrkMultiKeyringInput, + DBEAlgorithmSuiteId, +) +from aws_cryptographic_material_providers.mpl.references import IKeyring +from aws_dbesdk_dynamodb.encrypted.table import EncryptedTable +from aws_dbesdk_dynamodb.structures.dynamodb import ( + DynamoDbTableEncryptionConfig, + DynamoDbTablesEncryptionConfig, +) +from aws_dbesdk_dynamodb.structures.structured_encryption import ( + CryptoAction, +) + + +def encrypted_table_put_get_example( + kms_key_id: str, + dynamodb_table_name: str, +): + """Use an EncryptedTable to put and get an encrypted item.""" + # 1. Create a Keyring. This Keyring will be responsible for protecting the data keys that protect your data. + # For this example, we will create a AWS KMS Keyring with the AWS KMS Key we want to use. + # We will use the `CreateMrkMultiKeyring` method to create this keyring, + # as it will correctly handle both single region and Multi-Region KMS Keys. + mat_prov: AwsCryptographicMaterialProviders = AwsCryptographicMaterialProviders(config=MaterialProvidersConfig()) + kms_mrk_multi_keyring_input: CreateAwsKmsMrkMultiKeyringInput = CreateAwsKmsMrkMultiKeyringInput( + generator=kms_key_id, + ) + kms_mrk_multi_keyring: IKeyring = mat_prov.create_aws_kms_mrk_multi_keyring(input=kms_mrk_multi_keyring_input) + + # 2. Configure which attributes are encrypted and/or signed when writing new items. + # For each attribute that may exist on the items we plan to write to our DynamoDbTable, + # we must explicitly configure how they should be treated during item encryption: + # - ENCRYPT_AND_SIGN: The attribute is encrypted and included in the signature + # - SIGN_ONLY: The attribute not encrypted, but is still included in the signature + # - DO_NOTHING: The attribute is not encrypted and not included in the signature + attribute_actions_on_encrypt = { + "partition_key": CryptoAction.SIGN_ONLY, + "sort_key": CryptoAction.SIGN_ONLY, + "attribute1": CryptoAction.ENCRYPT_AND_SIGN, + "attribute2": CryptoAction.SIGN_ONLY, + ":attribute3": CryptoAction.DO_NOTHING, + } + + # 3. Configure which attributes we expect to be included in the signature + # when reading items. There are two options for configuring this: + # + # - (Recommended) Configure `allowedUnsignedAttributesPrefix`: + # When defining your DynamoDb schema and deciding on attribute names, + # choose a distinguishing prefix (such as ":") for all attributes that + # you do not want to include in the signature. + # This has two main benefits: + # - It is easier to reason about the security and authenticity of data within your item + # when all unauthenticated data is easily distinguishable by their attribute name. + # - If you need to add new unauthenticated attributes in the future, + # you can easily make the corresponding update to your `attributeActionsOnEncrypt` + # and immediately start writing to that new attribute, without + # any other configuration update needed. + # Once you configure this field, it is not safe to update it. + # + # - Configure `allowedUnsignedAttributes`: You may also explicitly list + # a set of attributes that should be considered unauthenticated when encountered + # on read. Be careful if you use this configuration. Do not remove an attribute + # name from this configuration, even if you are no longer writing with that attribute, + # as old items may still include this attribute, and our configuration needs to know + # to continue to exclude this attribute from the signature scope. + # If you add new attribute names to this field, you must first deploy the update to this + # field to all readers in your host fleet before deploying the update to start writing + # with that new attribute. + # + # For this example, we have designed our DynamoDb table such that any attribute name with + # the ":" prefix should be considered unauthenticated. + unsignAttrPrefix: str = ":" + + # 4. Create the DynamoDb Encryption configuration for the table we will be writing to. + table_configs = {} + table_config = DynamoDbTableEncryptionConfig( + logical_table_name=dynamodb_table_name, + partition_key_name="partition_key", + sort_key_name="sort_key", + attribute_actions_on_encrypt=attribute_actions_on_encrypt, + keyring=kms_mrk_multi_keyring, + allowed_unsigned_attribute_prefix=unsignAttrPrefix, + # Specifying an algorithm suite is not required, + # but is done here to demonstrate how to do so. + # We suggest using the + # `ALG_AES_256_GCM_HKDF_SHA512_COMMIT_KEY_ECDSA_P384_SYMSIG_HMAC_SHA384` suite, + # which includes AES-GCM with key derivation, signing, and key commitment. + # This is also the default algorithm suite if one is not specified in this config. + # For more information on supported algorithm suites, see: + # https://docs.aws.amazon.com/database-encryption-sdk/latest/devguide/supported-algorithms.html + algorithm_suite_id=DBEAlgorithmSuiteId.ALG_AES_256_GCM_HKDF_SHA512_COMMIT_KEY_ECDSA_P384_SYMSIG_HMAC_SHA384, + ) + table_configs[dynamodb_table_name] = table_config + tables_config = DynamoDbTablesEncryptionConfig(table_encryption_configs=table_configs) + + # 5. Create the EncryptedTable + encrypted_table = EncryptedTable( + table=boto3.resource("dynamodb").Table(dynamodb_table_name), + encryption_config=tables_config, + ) + + # 6. Put an item into our table using the above client. + # Before the item gets sent to DynamoDb, it will be encrypted + # client-side, according to our configuration. + item_to_encrypt = { + "partition_key": "BasicPutGetExample", + "sort_key": 0, + "attribute1": "encrypt and sign me!", + "attribute2": "sign me!", + ":attribute3": "ignore me!", + } + + put_item_request = { + "Item": item_to_encrypt, + } + + put_item_response = encrypted_table.put_item(**put_item_request) + + # Demonstrate that PutItem succeeded + assert put_item_response["ResponseMetadata"]["HTTPStatusCode"] == 200 + + # 7. Get the item back from our table using the same client. + # The client will decrypt the item client-side, and return + # back the original item. + key_to_get = {"partition_key": "BasicPutGetExample", "sort_key": 0} + + get_item_request = {"Key": key_to_get} + + get_item_response = encrypted_table.get_item(**get_item_request) + + # Demonstrate that GetItem succeeded and returned the decrypted item + assert get_item_response["ResponseMetadata"]["HTTPStatusCode"] == 200 + assert get_item_response["Item"] == item_to_encrypt diff --git a/Examples/runtimes/python/DynamoDBEncryption/src/client_supplier/__init__.py b/Examples/runtimes/python/DynamoDBEncryption/src/client_supplier/__init__.py new file mode 100644 index 000000000..fa977e22f --- /dev/null +++ b/Examples/runtimes/python/DynamoDBEncryption/src/client_supplier/__init__.py @@ -0,0 +1,3 @@ +# Copyright Amazon.com Inc. or its affiliates. All Rights Reserved. +# SPDX-License-Identifier: Apache-2.0 +"""Stub to allow relative imports of examples from tests.""" diff --git a/Examples/runtimes/python/DynamoDBEncryption/src/client_supplier/client_supplier_example.py b/Examples/runtimes/python/DynamoDBEncryption/src/client_supplier/client_supplier_example.py new file mode 100644 index 000000000..a08a88bbd --- /dev/null +++ b/Examples/runtimes/python/DynamoDBEncryption/src/client_supplier/client_supplier_example.py @@ -0,0 +1,221 @@ +# Copyright Amazon.com Inc. or its affiliates. All Rights Reserved. +# SPDX-License-Identifier: Apache-2.0 +""" +Example demonstrating DynamoDB Encryption using a custom client supplier. + +A custom client supplier grants users access to more granular configuration aspects +of their authentication details and KMS client. The example creates a simple custom +client supplier that authenticates with a different IAM role based on the region +of the KMS key. + +Creates a MRK multi-keyring configured with a custom client supplier using a single +MRK and puts an encrypted item to the table. Then, creates a MRK discovery +multi-keyring to decrypt the item and retrieves the item from the table. + +Running this example requires access to the DDB Table whose name is provided in +CLI arguments. This table must be configured with the following primary key +configuration: + - Partition key is named "partition_key" with type (S) + - Sort key is named "sort_key" with type (N) +""" + +from typing import List + +import boto3 +from aws_cryptographic_material_providers.mpl import AwsCryptographicMaterialProviders +from aws_cryptographic_material_providers.mpl.config import MaterialProvidersConfig +from aws_cryptographic_material_providers.mpl.models import ( + CreateAwsKmsMrkDiscoveryMultiKeyringInput, + CreateAwsKmsMrkMultiKeyringInput, + DiscoveryFilter, +) +from aws_dbesdk_dynamodb.encrypted.client import EncryptedClient +from aws_dbesdk_dynamodb.structures.dynamodb import ( + DynamoDbTableEncryptionConfig, + DynamoDbTablesEncryptionConfig, +) +from aws_dbesdk_dynamodb.structures.structured_encryption import ( + CryptoAction, +) + +from .regional_role_client_supplier import RegionalRoleClientSupplier + + +def client_supplier_example(ddb_table_name: str, key_arn: str, account_ids: List[str], regions: List[str]) -> None: + """ + Demonstrate using custom client supplier with AWS KMS MRK keyrings. + + Shows how to use a custom client supplier with AWS KMS MRK multi-keyring and AWS + KMS MRK discovery multi-keyring. + + :param ddb_table_name: The name of the DynamoDB table + :param key_arn: The ARN of the AWS KMS key + :param account_ids: List of AWS account IDs + :param regions: List of AWS regions + """ + # 1. Create a single MRK multi-keyring. + # This can be either a single-region KMS key or an MRK. + # For this example to succeed, the key's region must either + # 1) be in the regions list, or + # 2) the key must be an MRK with a replica defined + # in a region in the regions list, and the client + # must have the correct permissions to access the replica. + mat_prov = AwsCryptographicMaterialProviders(config=MaterialProvidersConfig()) + + # Create the multi-keyring using our custom client supplier + # defined in the RegionalRoleClientSupplier class in this directory. + create_aws_kms_mrk_multi_keyring_input = CreateAwsKmsMrkMultiKeyringInput( + # Note: RegionalRoleClientSupplier will internally use the keyArn's region + # to retrieve the correct IAM role. + client_supplier=RegionalRoleClientSupplier(), + generator=key_arn, + ) + mrk_keyring_with_client_supplier = mat_prov.create_aws_kms_mrk_multi_keyring( + input=create_aws_kms_mrk_multi_keyring_input + ) + + # 2. Configure which attributes are encrypted and/or signed when writing new items. + # For each attribute that may exist on the items we plan to write to our DynamoDbTable, + # we must explicitly configure how they should be treated during item encryption: + # - ENCRYPT_AND_SIGN: The attribute is encrypted and included in the signature + # - SIGN_ONLY: The attribute is not encrypted, but is still included in the signature + # - DO_NOTHING: The attribute is not encrypted and not included in the signature + attribute_actions_on_encrypt = { + "partition_key": CryptoAction.SIGN_ONLY, # Our partition attribute must be SIGN_ONLY + "sort_key": CryptoAction.SIGN_ONLY, # Our sort attribute must be SIGN_ONLY + "sensitive_data": CryptoAction.ENCRYPT_AND_SIGN, + } + + # 3. Configure which attributes we expect to be included in the signature + # when reading items. There are two options for configuring this: + # + # - (Recommended) Configure `allowed_unsigned_attribute_prefix`: + # When defining your DynamoDb schema and deciding on attribute names, + # choose a distinguishing prefix (such as ":") for all attributes that + # you do not want to include in the signature. + # This has two main benefits: + # - It is easier to reason about the security and authenticity of data within your item + # when all unauthenticated data is easily distinguishable by their attribute name. + # - If you need to add new unauthenticated attributes in the future, + # you can easily make the corresponding update to your `attribute_actions_on_encrypt` + # and immediately start writing to that new attribute, without + # any other configuration update needed. + # Once you configure this field, it is not safe to update it. + # + # - Configure `allowed_unsigned_attributes`: You may also explicitly list + # a set of attributes that should be considered unauthenticated when encountered + # on read. Be careful if you use this configuration. Do not remove an attribute + # name from this configuration, even if you are no longer writing with that attribute, + # as old items may still include this attribute, and our configuration needs to know + # to continue to exclude this attribute from the signature scope. + # If you add new attribute names to this field, you must first deploy the update to this + # field to all readers in your host fleet before deploying the update to start writing + # with that new attribute. + # + # For this example, we currently authenticate all attributes. To make it easier to + # add unauthenticated attributes in the future, we define a prefix ":" for such attributes. + unsign_attr_prefix = ":" + + # 4. Create the DynamoDb Encryption configuration for the table we will be writing to. + table_config = DynamoDbTableEncryptionConfig( + logical_table_name=ddb_table_name, + partition_key_name="partition_key", + sort_key_name="sort_key", + attribute_actions_on_encrypt=attribute_actions_on_encrypt, + keyring=mrk_keyring_with_client_supplier, + allowed_unsigned_attribute_prefix=unsign_attr_prefix, + ) + + table_configs = {ddb_table_name: table_config} + tables_config = DynamoDbTablesEncryptionConfig(table_encryption_configs=table_configs) + + # 5. Create the EncryptedClient + ddb_client = boto3.client("dynamodb") + encrypted_ddb_client = EncryptedClient(client=ddb_client, encryption_config=tables_config) + + # 6. Put an item into our table using the above client. + # Before the item gets sent to DynamoDb, it will be encrypted + # client-side using the MRK multi-keyring. + # The data key protecting this item will be encrypted + # with all the KMS Keys in this keyring, so that it can be + # decrypted with any one of those KMS Keys. + item = { + "partition_key": {"S": "clientSupplierItem"}, + "sort_key": {"N": "0"}, + "sensitive_data": {"S": "encrypt and sign me!"}, + } + + put_response = encrypted_ddb_client.put_item(TableName=ddb_table_name, Item=item) + + # Demonstrate that PutItem succeeded + assert put_response["ResponseMetadata"]["HTTPStatusCode"] == 200 + + # 7. Get the item back from our table using the same keyring. + # The client will decrypt the item client-side using the MRK + # and return the original item. + key_to_get = {"partition_key": {"S": "clientSupplierItem"}, "sort_key": {"N": "0"}} + + get_response = encrypted_ddb_client.get_item(TableName=ddb_table_name, Key=key_to_get) + + # Demonstrate that GetItem succeeded and returned the decrypted item + assert get_response["ResponseMetadata"]["HTTPStatusCode"] == 200 + returned_item = get_response["Item"] + assert returned_item["sensitive_data"]["S"] == "encrypt and sign me!" + + # 8. Create a MRK discovery multi-keyring with a custom client supplier. + # A discovery MRK multi-keyring will be composed of + # multiple discovery MRK keyrings, one for each region. + # Each component keyring has its own KMS client in a particular region. + # When we provide a client supplier to the multi-keyring, all component + # keyrings will use that client supplier configuration. + # In our tests, we make `key_arn` an MRK with a replica, and + # provide only the replica region in our discovery filter. + discovery_filter = DiscoveryFilter(partition="aws", account_ids=account_ids) + + mrk_discovery_client_supplier_input = CreateAwsKmsMrkDiscoveryMultiKeyringInput( + client_supplier=RegionalRoleClientSupplier(), discovery_filter=discovery_filter, regions=regions + ) + + mrk_discovery_client_supplier_keyring = mat_prov.create_aws_kms_mrk_discovery_multi_keyring( + input=mrk_discovery_client_supplier_input + ) + + # 9. Create a new config and client using the discovery keyring. + # This is the same setup as above, except we provide the discovery keyring to the config. + replica_key_table_config = DynamoDbTableEncryptionConfig( + logical_table_name=ddb_table_name, + partition_key_name="partition_key", + sort_key_name="sort_key", + attribute_actions_on_encrypt=attribute_actions_on_encrypt, + # Provide discovery keyring here + keyring=mrk_discovery_client_supplier_keyring, + allowed_unsigned_attribute_prefix=unsign_attr_prefix, + ) + + replica_key_tables_config = {ddb_table_name: replica_key_table_config} + replica_key_tables_encryption_config = DynamoDbTablesEncryptionConfig( + table_encryption_configs=replica_key_tables_config + ) + + replica_key_encrypted_client = EncryptedClient( + client=ddb_client, encryption_config=replica_key_tables_encryption_config + ) + + # 10. Get the item back from our table using the discovery keyring client. + # The client will decrypt the item client-side using the keyring, + # and return the original item. + # The discovery keyring will only use KMS keys in the provided regions and + # AWS accounts. Since we have provided it with a custom client supplier + # which uses different IAM roles based on the key region, + # the discovery keyring will use a particular IAM role to decrypt + # based on the region of the KMS key it uses to decrypt. + replica_key_key_to_get = {"partition_key": {"S": "awsKmsMrkMultiKeyringItem"}, "sort_key": {"N": "0"}} + + replica_key_get_response = replica_key_encrypted_client.get_item( + TableName=ddb_table_name, Key=replica_key_key_to_get + ) + + # Demonstrate that GetItem succeeded and returned the decrypted item + assert replica_key_get_response["ResponseMetadata"]["HTTPStatusCode"] == 200 + replica_key_returned_item = replica_key_get_response["Item"] + assert replica_key_returned_item["sensitive_data"]["S"] == "encrypt and sign me!" diff --git a/Examples/runtimes/python/DynamoDBEncryption/src/client_supplier/regional_role_client_supplier.py b/Examples/runtimes/python/DynamoDBEncryption/src/client_supplier/regional_role_client_supplier.py new file mode 100644 index 000000000..ef44d6c49 --- /dev/null +++ b/Examples/runtimes/python/DynamoDBEncryption/src/client_supplier/regional_role_client_supplier.py @@ -0,0 +1,69 @@ +# Copyright Amazon.com Inc. or its affiliates. All Rights Reserved. +# SPDX-License-Identifier: Apache-2.0 +""" +Demonstrates implementing a custom client supplier. + +Creates KMS clients with different IAM roles depending on the region passed. +""" + +import logging + +import boto3 +from aws_cryptographic_material_providers.mpl.models import GetClientInput +from aws_cryptographic_material_providers.mpl.references import ClientSupplier +from botocore.exceptions import ClientError + +from .regional_role_client_supplier_config import RegionalRoleClientSupplierConfig + + +class RegionalRoleClientSupplier(ClientSupplier): + """ + Custom client supplier for region-specific IAM roles. + + Creates KMS clients with different IAM roles depending on the region passed. + """ + + def __init__(self): + """Initialize the client supplier with STS client and configuration.""" + self._sts_client = boto3.client("sts") + self._config = RegionalRoleClientSupplierConfig() + self._logger = logging.getLogger(__name__) + + def get_client(self, input_params: GetClientInput) -> boto3.client: + """ + Get a KMS client for the specified region using the configured IAM role. + + In test environments where assuming the role might fail, we fall back to + creating a standard KMS client for the region without assuming a role. + This ensures examples can run in test environments without proper IAM permissions. + + :param input_params: Input parameters containing the region + :return: A boto3 KMS client for the specified region with the appropriate credentials + """ + region = input_params.region + if region not in self._config.region_iam_role_map: + self._logger.warning(f"Missing region in config: {region}. Using default client.") + return boto3.client("kms", region_name=region) + + role_arn = self._config.region_iam_role_map[region] + + try: + # Assume the IAM role for the region + response = self._sts_client.assume_role( + RoleArn=role_arn, + DurationSeconds=900, # 15 minutes is the minimum value + RoleSessionName="Python-Client-Supplier-Example-Session", + ) + + # Create a KMS client with the temporary credentials + return boto3.client( + "kms", + region_name=region, + aws_access_key_id=response["Credentials"]["AccessKeyId"], + aws_secret_access_key=response["Credentials"]["SecretAccessKey"], + aws_session_token=response["Credentials"]["SessionToken"], + ) + except ClientError as e: + # In test environments, fall back to a standard client + self._logger.warning(f"Failed to assume role: {str(e)}. Falling back to default client.") + return boto3.client("kms", region_name=region) diff --git a/Examples/runtimes/python/DynamoDBEncryption/src/client_supplier/regional_role_client_supplier_config.py b/Examples/runtimes/python/DynamoDBEncryption/src/client_supplier/regional_role_client_supplier_config.py new file mode 100644 index 000000000..98478fd71 --- /dev/null +++ b/Examples/runtimes/python/DynamoDBEncryption/src/client_supplier/regional_role_client_supplier_config.py @@ -0,0 +1,26 @@ +# Copyright Amazon.com Inc. or its affiliates. All Rights Reserved. +# SPDX-License-Identifier: Apache-2.0 +""" +Configuration for the RegionalRoleClientSupplier. + +Contains hardcoded configuration values for demonstration purposes. In production +code, these values might be loaded from environment variables, AWS AppConfig, or +other external sources. +""" + + +class RegionalRoleClientSupplierConfig: + """ + Configuration class mapping AWS regions to IAM roles. + + Provides a mapping between AWS regions and their corresponding IAM roles for + use in the RegionalRoleClientSupplier. For demonstration purposes, this uses + hardcoded values. + """ + + US_EAST_1_IAM_ROLE = "arn:aws:iam::370957321024:role/GitHub-CI-DDBEC-Dafny-Role-only-us-east-1-KMS-keys" + EU_WEST_1_IAM_ROLE = "arn:aws:iam::370957321024:role/GitHub-CI-DDBEC-Dafny-Role-only-eu-west-1-KMS-keys" + + def __init__(self): + """Initialize the configuration with region to IAM role mapping.""" + self.region_iam_role_map = {"us-east-1": self.US_EAST_1_IAM_ROLE, "eu-west-1": self.EU_WEST_1_IAM_ROLE} diff --git a/Examples/runtimes/python/DynamoDBEncryption/src/create_keystore_key_example.py b/Examples/runtimes/python/DynamoDBEncryption/src/create_keystore_key_example.py new file mode 100644 index 000000000..6957290f8 --- /dev/null +++ b/Examples/runtimes/python/DynamoDBEncryption/src/create_keystore_key_example.py @@ -0,0 +1,49 @@ +# Copyright Amazon.com Inc. or its affiliates. All Rights Reserved. +# SPDX-License-Identifier: Apache-2.0 +""" +Example for creating a new key in a KeyStore. + +The Hierarchical Keyring Example and Searchable Encryption Examples rely on the +existence of a DDB-backed key store with pre-existing branch key material or +beacon key material. + +See the "Create KeyStore Table Example" for how to first set up the DDB Table +that will back this KeyStore. + +Demonstrates configuring a KeyStore and using a helper method to create a branch +key and beacon key that share the same Id. A new beacon key is always created +alongside a new branch key, even if searchable encryption is not being used. + +Note: This key creation should occur within your control plane. +""" + +import boto3 +from aws_cryptographic_material_providers.keystore.client import KeyStore +from aws_cryptographic_material_providers.keystore.config import KeyStoreConfig +from aws_cryptographic_material_providers.keystore.models import ( + CreateKeyInput, + KMSConfigurationKmsKeyArn, +) + + +def keystore_create_key(key_store_table_name: str, logical_key_store_name: str, kms_key_arn: str) -> str: + """Create a new branch key and beacon key in our KeyStore.""" + # 1. Configure your KeyStore resource. + # This SHOULD be the same configuration that was used to create the DDB table + # in the "Create KeyStore Table Example". + keystore: KeyStore = KeyStore( + KeyStoreConfig( + ddb_table_name=key_store_table_name, + kms_configuration=KMSConfigurationKmsKeyArn(kms_key_arn), + logical_key_store_name=logical_key_store_name, + kms_client=boto3.client("kms"), + ddb_client=boto3.client("dynamodb"), + ) + ) + + # 2. Create a new branch key and beacon key in our KeyStore. + # Both the branch key and the beacon key will share an Id. + # This creation is eventually consistent. + branch_key_id = keystore.create_key(CreateKeyInput()).branch_key_identifier + + return branch_key_id diff --git a/Examples/runtimes/python/DynamoDBEncryption/src/create_keystore_table_example.py b/Examples/runtimes/python/DynamoDBEncryption/src/create_keystore_table_example.py new file mode 100644 index 000000000..d154e33c2 --- /dev/null +++ b/Examples/runtimes/python/DynamoDBEncryption/src/create_keystore_table_example.py @@ -0,0 +1,59 @@ +# Copyright Amazon.com Inc. or its affiliates. All Rights Reserved. +# SPDX-License-Identifier: Apache-2.0 +""" +Example for creating a DynamoDB table for use as a KeyStore. + +The Hierarchical Keyring Example and Searchable Encryption Examples rely on the +existence of a DDB-backed key store with pre-existing branch key material or +beacon key material. + +Shows how to configure a KeyStore and use a helper method to create the DDB table +that will be used to persist branch keys and beacons keys for this KeyStore. + +This table creation should occur within your control plane and only needs to occur +once. While not demonstrated in this example, you should additionally use the +`VersionKey` API on the KeyStore to periodically rotate your branch key material. +""" + +import boto3 +from aws_cryptographic_material_providers.keystore.client import KeyStore +from aws_cryptographic_material_providers.keystore.config import KeyStoreConfig +from aws_cryptographic_material_providers.keystore.models import ( + CreateKeyStoreInput, + KMSConfigurationKmsKeyArn, +) + + +def keystore_create_table(keystore_table_name: str, logical_keystore_name: str, kms_key_arn: str): + """ + Create KeyStore Table Example. + + :param keystore_table_name: The name of the DynamoDB table to create + :param logical_keystore_name: The logical name for this keystore + :param kms_key_arn: The ARN of the KMS key to use for protecting branch keys + """ + # 1. Configure your KeyStore resource. + # `ddb_table_name` is the name you want for the DDB table that + # will back your keystore. + # `kms_key_arn` is the KMS Key that will protect your branch keys and beacon keys + # when they are stored in your DDB table. + keystore = KeyStore( + config=KeyStoreConfig( + ddb_client=boto3.client("dynamodb"), + ddb_table_name=keystore_table_name, + logical_key_store_name=logical_keystore_name, + kms_client=boto3.client("kms"), + kms_configuration=KMSConfigurationKmsKeyArn(kms_key_arn), + ) + ) + + # 2. Create the DynamoDb table that will store the branch keys and beacon keys. + # This checks if the correct table already exists at `ddb_table_name` + # by using the DescribeTable API. If no table exists, + # it will create one. If a table exists, it will verify + # the table's configuration and will error if the configuration is incorrect. + keystore.create_key_store(input=CreateKeyStoreInput()) + # It may take a couple of minutes for the table to become ACTIVE, + # at which point it is ready to store branch and beacon keys. + # See the Create KeyStore Key Example for how to populate + # this table. diff --git a/Examples/runtimes/python/DynamoDBEncryption/src/encrypted_paginator/__init__.py b/Examples/runtimes/python/DynamoDBEncryption/src/encrypted_paginator/__init__.py new file mode 100644 index 000000000..fa977e22f --- /dev/null +++ b/Examples/runtimes/python/DynamoDBEncryption/src/encrypted_paginator/__init__.py @@ -0,0 +1,3 @@ +# Copyright Amazon.com Inc. or its affiliates. All Rights Reserved. +# SPDX-License-Identifier: Apache-2.0 +"""Stub to allow relative imports of examples from tests.""" diff --git a/Examples/runtimes/python/DynamoDBEncryption/src/encrypted_paginator/encrypted_paginator_example.py b/Examples/runtimes/python/DynamoDBEncryption/src/encrypted_paginator/encrypted_paginator_example.py new file mode 100644 index 000000000..9baf46f42 --- /dev/null +++ b/Examples/runtimes/python/DynamoDBEncryption/src/encrypted_paginator/encrypted_paginator_example.py @@ -0,0 +1,159 @@ +# Copyright Amazon.com Inc. or its affiliates. All Rights Reserved. +# SPDX-License-Identifier: Apache-2.0 +""" +Example for using the EncryptedPaginator provided by EncryptedClient. + +https://boto3.amazonaws.com/v1/documentation/api/latest/reference/services/dynamodb/paginator/Query.html + +Running this example requires access to the DDB Table whose name +is provided in the function arguments. +This table must be configured with the following primary key configuration: +- Partition key is named "partition_key" with type (S) +- Sort key is named "sort_key" with type (N) +""" + +import boto3 +from aws_cryptographic_material_providers.mpl import AwsCryptographicMaterialProviders +from aws_cryptographic_material_providers.mpl.config import MaterialProvidersConfig +from aws_cryptographic_material_providers.mpl.models import ( + CreateAwsKmsMrkMultiKeyringInput, + DBEAlgorithmSuiteId, +) +from aws_cryptographic_material_providers.mpl.references import IKeyring +from aws_dbesdk_dynamodb.encrypted.client import EncryptedClient +from aws_dbesdk_dynamodb.structures.dynamodb import ( + DynamoDbTableEncryptionConfig, + DynamoDbTablesEncryptionConfig, +) +from aws_dbesdk_dynamodb.structures.structured_encryption import ( + CryptoAction, +) + + +def encrypted_paginator_example( + kms_key_id: str, + dynamodb_table_name: str, +): + """Use an EncryptedPaginator to paginate through items in a table.""" + # 1. Create a Keyring. This Keyring will be responsible for protecting the data keys that protect your data. + # For this example, we will create a AWS KMS Keyring with the AWS KMS Key we want to use. + # We will use the `CreateMrkMultiKeyring` method to create this keyring, + # as it will correctly handle both single region and Multi-Region KMS Keys. + mat_prov: AwsCryptographicMaterialProviders = AwsCryptographicMaterialProviders(config=MaterialProvidersConfig()) + kms_mrk_multi_keyring_input: CreateAwsKmsMrkMultiKeyringInput = CreateAwsKmsMrkMultiKeyringInput( + generator=kms_key_id, + ) + kms_mrk_multi_keyring: IKeyring = mat_prov.create_aws_kms_mrk_multi_keyring(input=kms_mrk_multi_keyring_input) + + # 2. Configure which attributes are encrypted and/or signed when writing new items. + # For each attribute that may exist on the items we plan to write to our DynamoDbTable, + # we must explicitly configure how they should be treated during item encryption: + # - ENCRYPT_AND_SIGN: The attribute is encrypted and included in the signature + # - SIGN_ONLY: The attribute not encrypted, but is still included in the signature + # - DO_NOTHING: The attribute is not encrypted and not included in the signature + attribute_actions_on_encrypt = { + "partition_key": CryptoAction.SIGN_ONLY, + "sort_key": CryptoAction.SIGN_ONLY, + "attribute1": CryptoAction.ENCRYPT_AND_SIGN, + "attribute2": CryptoAction.SIGN_ONLY, + ":attribute3": CryptoAction.DO_NOTHING, + } + + # 3. Configure which attributes we expect to be included in the signature + # when reading items. There are two options for configuring this: + # + # - (Recommended) Configure `allowedUnsignedAttributesPrefix`: + # When defining your DynamoDb schema and deciding on attribute names, + # choose a distinguishing prefix (such as ":") for all attributes that + # you do not want to include in the signature. + # This has two main benefits: + # - It is easier to reason about the security and authenticity of data within your item + # when all unauthenticated data is easily distinguishable by their attribute name. + # - If you need to add new unauthenticated attributes in the future, + # you can easily make the corresponding update to your `attributeActionsOnEncrypt` + # and immediately start writing to that new attribute, without + # any other configuration update needed. + # Once you configure this field, it is not safe to update it. + # + # - Configure `allowedUnsignedAttributes`: You may also explicitly list + # a set of attributes that should be considered unauthenticated when encountered + # on read. Be careful if you use this configuration. Do not remove an attribute + # name from this configuration, even if you are no longer writing with that attribute, + # as old items may still include this attribute, and our configuration needs to know + # to continue to exclude this attribute from the signature scope. + # If you add new attribute names to this field, you must first deploy the update to this + # field to all readers in your host fleet before deploying the update to start writing + # with that new attribute. + # + # For this example, we have designed our DynamoDb table such that any attribute name with + # the ":" prefix should be considered unauthenticated. + unsignAttrPrefix: str = ":" + + # 4. Create the DynamoDb Encryption configuration for the tables we will be writing to. + # For each table, we create a DynamoDbTableEncryptionConfig and add it to a dictionary. + # This dictionary is then added to a DynamoDbTablesEncryptionConfig, which is used to create the + # EncryptedResource. + table_configs = {} + table_config = DynamoDbTableEncryptionConfig( + logical_table_name=dynamodb_table_name, + partition_key_name="partition_key", + sort_key_name="sort_key", + attribute_actions_on_encrypt=attribute_actions_on_encrypt, + keyring=kms_mrk_multi_keyring, + allowed_unsigned_attribute_prefix=unsignAttrPrefix, + # Specifying an algorithm suite is not required, + # but is done here to demonstrate how to do so. + # We suggest using the + # `ALG_AES_256_GCM_HKDF_SHA512_COMMIT_KEY_ECDSA_P384_SYMSIG_HMAC_SHA384` suite, + # which includes AES-GCM with key derivation, signing, and key commitment. + # This is also the default algorithm suite if one is not specified in this config. + # For more information on supported algorithm suites, see: + # https://docs.aws.amazon.com/database-encryption-sdk/latest/devguide/supported-algorithms.html + algorithm_suite_id=DBEAlgorithmSuiteId.ALG_AES_256_GCM_HKDF_SHA512_COMMIT_KEY_ECDSA_P384_SYMSIG_HMAC_SHA384, + ) + table_configs[dynamodb_table_name] = table_config + tables_config = DynamoDbTablesEncryptionConfig(table_encryption_configs=table_configs) + + # 5. Create the EncryptedClient + encrypted_client = EncryptedClient( + client=boto3.client("dynamodb"), + encryption_config=tables_config, + ) + + # 6. Put an item into the table. The EncryptedPaginator will paginate through the items in the table + # to find this item. + item = { + "partition_key": {"S": "PythonEncryptedPaginatorExample"}, + "sort_key": {"N": "0"}, + "attribute1": {"S": "encrypt and sign me!"}, + "attribute2": {"S": "sign me!"}, + ":attribute3": {"S": "ignore me!"}, + } + + encrypted_client.put_item( + TableName=dynamodb_table_name, + Item=item, + ) + + # 7. Create the EncryptedPaginator. + # We will use the encrypted `query` paginator, but an encrypted `scan` paginator is also available. + encrypted_paginator = encrypted_client.get_paginator("query") + + # 8. Use the EncryptedPaginator to paginate through the items in the table. + # The `paginate` method returns a generator that yields pages as dictionaries. + # The EncryptedPaginator will transparently decrypt the items in each page as they are returned. + # Once the generator is exhausted, the loop will exit. + items = [] + for page in encrypted_paginator.paginate( + TableName=dynamodb_table_name, + KeyConditionExpression="partition_key = :partition_key", + ExpressionAttributeValues={":partition_key": {"S": "PythonEncryptedPaginatorExample"}}, + ): + for item in page["Items"]: + items.append(item) + + # 9. Assert the items are returned as expected. + assert len(items) == 1 + assert items[0]["attribute1"]["S"] == "encrypt and sign me!" + assert items[0]["attribute2"]["S"] == "sign me!" + assert items[0][":attribute3"]["S"] == "ignore me!" diff --git a/Examples/runtimes/python/DynamoDBEncryption/src/encrypted_resource/batch_read_write_example.py b/Examples/runtimes/python/DynamoDBEncryption/src/encrypted_resource/batch_read_write_example.py new file mode 100644 index 000000000..5efb34336 --- /dev/null +++ b/Examples/runtimes/python/DynamoDBEncryption/src/encrypted_resource/batch_read_write_example.py @@ -0,0 +1,183 @@ +# Copyright Amazon.com Inc. or its affiliates. All Rights Reserved. +# SPDX-License-Identifier: Apache-2.0 +""" +Example for using an EncryptedResource to batch read and write items. + +Running this example requires access to the DDB Table whose name +is provided in the function arguments. +This table must be configured with the following +primary key configuration: +- Partition key is named "partition_key" with type (S) +- Sort key is named "sort_key" with type (N) +""" + +import boto3 +from aws_cryptographic_material_providers.mpl import AwsCryptographicMaterialProviders +from aws_cryptographic_material_providers.mpl.config import MaterialProvidersConfig +from aws_cryptographic_material_providers.mpl.models import ( + CreateAwsKmsMrkMultiKeyringInput, + DBEAlgorithmSuiteId, +) +from aws_cryptographic_material_providers.mpl.references import IKeyring +from aws_dbesdk_dynamodb.encrypted.resource import EncryptedResource +from aws_dbesdk_dynamodb.structures.dynamodb import ( + DynamoDbTableEncryptionConfig, + DynamoDbTablesEncryptionConfig, +) +from aws_dbesdk_dynamodb.structures.structured_encryption import ( + CryptoAction, +) + + +def encrypted_resource_batch_read_write_example( + kms_key_id: str, + dynamodb_table_name: str, +): + """Use an EncryptedResource to batch read and write items.""" + # 1. Create a Keyring. This Keyring will be responsible for protecting the data keys that protect your data. + # For this example, we will create a AWS KMS Keyring with the AWS KMS Key we want to use. + # We will use the `CreateMrkMultiKeyring` method to create this keyring, + # as it will correctly handle both single region and Multi-Region KMS Keys. + mat_prov: AwsCryptographicMaterialProviders = AwsCryptographicMaterialProviders(config=MaterialProvidersConfig()) + kms_mrk_multi_keyring_input: CreateAwsKmsMrkMultiKeyringInput = CreateAwsKmsMrkMultiKeyringInput( + generator=kms_key_id, + ) + kms_mrk_multi_keyring: IKeyring = mat_prov.create_aws_kms_mrk_multi_keyring(input=kms_mrk_multi_keyring_input) + + # 2. Configure which attributes are encrypted and/or signed when writing new items. + # For each attribute that may exist on the items we plan to write to our DynamoDbTable, + # we must explicitly configure how they should be treated during item encryption: + # - ENCRYPT_AND_SIGN: The attribute is encrypted and included in the signature + # - SIGN_ONLY: The attribute not encrypted, but is still included in the signature + # - DO_NOTHING: The attribute is not encrypted and not included in the signature + attribute_actions_on_encrypt = { + "partition_key": CryptoAction.SIGN_ONLY, + "sort_key": CryptoAction.SIGN_ONLY, + "attribute1": CryptoAction.ENCRYPT_AND_SIGN, + "attribute2": CryptoAction.SIGN_ONLY, + ":attribute3": CryptoAction.DO_NOTHING, + } + + # 3. Configure which attributes we expect to be included in the signature + # when reading items. There are two options for configuring this: + # + # - (Recommended) Configure `allowedUnsignedAttributesPrefix`: + # When defining your DynamoDb schema and deciding on attribute names, + # choose a distinguishing prefix (such as ":") for all attributes that + # you do not want to include in the signature. + # This has two main benefits: + # - It is easier to reason about the security and authenticity of data within your item + # when all unauthenticated data is easily distinguishable by their attribute name. + # - If you need to add new unauthenticated attributes in the future, + # you can easily make the corresponding update to your `attributeActionsOnEncrypt` + # and immediately start writing to that new attribute, without + # any other configuration update needed. + # Once you configure this field, it is not safe to update it. + # + # - Configure `allowedUnsignedAttributes`: You may also explicitly list + # a set of attributes that should be considered unauthenticated when encountered + # on read. Be careful if you use this configuration. Do not remove an attribute + # name from this configuration, even if you are no longer writing with that attribute, + # as old items may still include this attribute, and our configuration needs to know + # to continue to exclude this attribute from the signature scope. + # If you add new attribute names to this field, you must first deploy the update to this + # field to all readers in your host fleet before deploying the update to start writing + # with that new attribute. + # + # For this example, we have designed our DynamoDb table such that any attribute name with + # the ":" prefix should be considered unauthenticated. + unsignAttrPrefix: str = ":" + + # 4. Create the DynamoDb Encryption configuration for the table we will be writing to. + table_configs = {} + table_config = DynamoDbTableEncryptionConfig( + logical_table_name=dynamodb_table_name, + partition_key_name="partition_key", + sort_key_name="sort_key", + attribute_actions_on_encrypt=attribute_actions_on_encrypt, + keyring=kms_mrk_multi_keyring, + allowed_unsigned_attribute_prefix=unsignAttrPrefix, + # Specifying an algorithm suite is not required, + # but is done here to demonstrate how to do so. + # We suggest using the + # `ALG_AES_256_GCM_HKDF_SHA512_COMMIT_KEY_ECDSA_P384_SYMSIG_HMAC_SHA384` suite, + # which includes AES-GCM with key derivation, signing, and key commitment. + # This is also the default algorithm suite if one is not specified in this config. + # For more information on supported algorithm suites, see: + # https://docs.aws.amazon.com/database-encryption-sdk/latest/devguide/supported-algorithms.html + algorithm_suite_id=DBEAlgorithmSuiteId.ALG_AES_256_GCM_HKDF_SHA512_COMMIT_KEY_ECDSA_P384_SYMSIG_HMAC_SHA384, + ) + table_configs[dynamodb_table_name] = table_config + tables_config = DynamoDbTablesEncryptionConfig(table_encryption_configs=table_configs) + + # 5. Create the EncryptedResource + encrypted_resource = EncryptedResource( + resource=boto3.resource("dynamodb"), + encryption_config=tables_config, + ) + + # 6. Write a batch of items to the table. + # Before the items get sent to DynamoDb, they will be encrypted + # client-side, according to our configuration. + items = [ + { + "partition_key": "PythonEncryptedResourceBatchReadWriteExample1", + "sort_key": 0, + "attribute1": "encrypt and sign me!", + "attribute2": "sign me!", + ":attribute3": "ignore me!", + }, + { + "partition_key": "PythonEncryptedResourceBatchReadWriteExample2", + "sort_key": 0, + "attribute1": "encrypt and sign me!", + "attribute2": "sign me!", + ":attribute3": "ignore me!", + }, + ] + + batch_write_items_put_request = { + "RequestItems": { + dynamodb_table_name: [{"PutRequest": {"Item": item}} for item in items], + }, + } + + batch_write_items_put_response = encrypted_resource.batch_write_item(**batch_write_items_put_request) + + # Demonstrate that BatchWriteItem succeeded + assert batch_write_items_put_response["ResponseMetadata"]["HTTPStatusCode"] == 200 + + # 7. Read the items back from the table. + # After the items are retrieved from DynamoDb, but before the EncryptedResource + # returns them to the caller, they will be decrypted client-side according to our configuration. + batch_get_items_request = { + "RequestItems": { + dynamodb_table_name: { + "Keys": [{"partition_key": item["partition_key"], "sort_key": item["sort_key"]} for item in items], + } + }, + } + + batch_get_items_response = encrypted_resource.batch_get_item(**batch_get_items_request) + + # Demonstrate that BatchGetItem succeeded with the expected result + assert batch_get_items_response["ResponseMetadata"]["HTTPStatusCode"] == 200 + for item in batch_get_items_response["Responses"][dynamodb_table_name]: + assert item["attribute1"] == "encrypt and sign me!" + assert item["attribute2"] == "sign me!" + assert item[":attribute3"] == "ignore me!" + + # 8. Delete the items from the table. + batch_write_items_delete_request = { + "RequestItems": { + dynamodb_table_name: [ + {"DeleteRequest": {"Key": {"partition_key": item["partition_key"], "sort_key": item["sort_key"]}}} + for item in items + ], + }, + } + + batch_write_items_delete_response = encrypted_resource.batch_write_item(**batch_write_items_delete_request) + + # Demonstrate that BatchWriteItem succeeded + assert batch_write_items_delete_response["ResponseMetadata"]["HTTPStatusCode"] == 200 diff --git a/Examples/runtimes/python/DynamoDBEncryption/src/encrypted_resource/encrypted_tables_collection_manager_example.py b/Examples/runtimes/python/DynamoDBEncryption/src/encrypted_resource/encrypted_tables_collection_manager_example.py new file mode 100644 index 000000000..c453ae580 --- /dev/null +++ b/Examples/runtimes/python/DynamoDBEncryption/src/encrypted_resource/encrypted_tables_collection_manager_example.py @@ -0,0 +1,175 @@ +# Copyright Amazon.com Inc. or its affiliates. All Rights Reserved. +# SPDX-License-Identifier: Apache-2.0 +""" +Example for using the EncryptedTablesCollectionManager provided by EncryptedResource. + +Running this example requires access to the DDB Tables whose names +are provided in the function arguments. +These tables must be configured with the following primary key configuration: +- Partition key is named "partition_key" with type (S) +- Sort key is named "sort_key" with type (N) +""" + +import boto3 +from aws_cryptographic_material_providers.mpl import AwsCryptographicMaterialProviders +from aws_cryptographic_material_providers.mpl.config import MaterialProvidersConfig +from aws_cryptographic_material_providers.mpl.models import ( + CreateAwsKmsMrkMultiKeyringInput, + DBEAlgorithmSuiteId, +) +from aws_cryptographic_material_providers.mpl.references import IKeyring +from aws_dbesdk_dynamodb.encrypted.resource import ( + EncryptedResource, +) +from aws_dbesdk_dynamodb.structures.dynamodb import ( + DynamoDbTableEncryptionConfig, + DynamoDbTablesEncryptionConfig, +) +from aws_dbesdk_dynamodb.structures.structured_encryption import ( + CryptoAction, +) + + +def encrypted_tables_collection_manager_example( + kms_key_id: str, + dynamodb_table_names: list[str], +): + """Use an EncryptedTablesCollectionManager to write and read to multiple tables.""" + # 1. Create a Keyring. This Keyring will be responsible for protecting the data keys that protect your data. + # For this example, we will create a AWS KMS Keyring with the AWS KMS Key we want to use. + # We will use the `CreateMrkMultiKeyring` method to create this keyring, + # as it will correctly handle both single region and Multi-Region KMS Keys. + mat_prov: AwsCryptographicMaterialProviders = AwsCryptographicMaterialProviders(config=MaterialProvidersConfig()) + kms_mrk_multi_keyring_input: CreateAwsKmsMrkMultiKeyringInput = CreateAwsKmsMrkMultiKeyringInput( + generator=kms_key_id, + ) + kms_mrk_multi_keyring: IKeyring = mat_prov.create_aws_kms_mrk_multi_keyring(input=kms_mrk_multi_keyring_input) + + # 2. Configure which attributes are encrypted and/or signed when writing new items. + # For each attribute that may exist on the items we plan to write to our DynamoDbTable, + # we must explicitly configure how they should be treated during item encryption: + # - ENCRYPT_AND_SIGN: The attribute is encrypted and included in the signature + # - SIGN_ONLY: The attribute not encrypted, but is still included in the signature + # - DO_NOTHING: The attribute is not encrypted and not included in the signature + attribute_actions_on_encrypt = { + "partition_key": CryptoAction.SIGN_ONLY, + "sort_key": CryptoAction.SIGN_ONLY, + "attribute1": CryptoAction.ENCRYPT_AND_SIGN, + "attribute2": CryptoAction.SIGN_ONLY, + ":attribute3": CryptoAction.DO_NOTHING, + } + + # 3. Configure which attributes we expect to be included in the signature + # when reading items. There are two options for configuring this: + # + # - (Recommended) Configure `allowedUnsignedAttributesPrefix`: + # When defining your DynamoDb schema and deciding on attribute names, + # choose a distinguishing prefix (such as ":") for all attributes that + # you do not want to include in the signature. + # This has two main benefits: + # - It is easier to reason about the security and authenticity of data within your item + # when all unauthenticated data is easily distinguishable by their attribute name. + # - If you need to add new unauthenticated attributes in the future, + # you can easily make the corresponding update to your `attributeActionsOnEncrypt` + # and immediately start writing to that new attribute, without + # any other configuration update needed. + # Once you configure this field, it is not safe to update it. + # + # - Configure `allowedUnsignedAttributes`: You may also explicitly list + # a set of attributes that should be considered unauthenticated when encountered + # on read. Be careful if you use this configuration. Do not remove an attribute + # name from this configuration, even if you are no longer writing with that attribute, + # as old items may still include this attribute, and our configuration needs to know + # to continue to exclude this attribute from the signature scope. + # If you add new attribute names to this field, you must first deploy the update to this + # field to all readers in your host fleet before deploying the update to start writing + # with that new attribute. + # + # For this example, we have designed our DynamoDb table such that any attribute name with + # the ":" prefix should be considered unauthenticated. + unsignAttrPrefix: str = ":" + + # 4. Create the DynamoDb Encryption configuration for the tables we will be writing to. + # For each table, we create a DynamoDbTableEncryptionConfig and add it to a dictionary. + # This dictionary is then added to a DynamoDbTablesEncryptionConfig, which is used to create the + # EncryptedResource. + table_configs = {} + for dynamodb_table_name in dynamodb_table_names: + table_config = DynamoDbTableEncryptionConfig( + logical_table_name=dynamodb_table_name, + partition_key_name="partition_key", + sort_key_name="sort_key", + attribute_actions_on_encrypt=attribute_actions_on_encrypt, + keyring=kms_mrk_multi_keyring, + allowed_unsigned_attribute_prefix=unsignAttrPrefix, + # Specifying an algorithm suite is not required, + # but is done here to demonstrate how to do so. + # We suggest using the + # `ALG_AES_256_GCM_HKDF_SHA512_COMMIT_KEY_ECDSA_P384_SYMSIG_HMAC_SHA384` suite, + # which includes AES-GCM with key derivation, signing, and key commitment. + # This is also the default algorithm suite if one is not specified in this config. + # For more information on supported algorithm suites, see: + # https://docs.aws.amazon.com/database-encryption-sdk/latest/devguide/supported-algorithms.html + algorithm_suite_id=DBEAlgorithmSuiteId.ALG_AES_256_GCM_HKDF_SHA512_COMMIT_KEY_ECDSA_P384_SYMSIG_HMAC_SHA384, + ) + table_configs[dynamodb_table_name] = table_config + tables_config = DynamoDbTablesEncryptionConfig(table_encryption_configs=table_configs) + + # 5. Create the EncryptedResource + encrypted_resource = EncryptedResource( + resource=boto3.resource("dynamodb"), + encryption_config=tables_config, + ) + + # 6. Retrieve the EncryptedTablesCollectionManager from the EncryptedResource + encrypted_tables_collection_manager = encrypted_resource.tables + + # 7. Use the EncryptedTablesCollectionManager to get EncryptedTables to write to. + # **IMPORTANT**: This will return all tables in the collection, not just the ones you want to write to. + # This will include all tables that are associated with the current account and endpoint. + # You should consider filtering the tables you write to based on the table name. + encrypted_tables = encrypted_tables_collection_manager.all() + + # 8. Write a batch of items to the table. + # Before the items get sent to DynamoDb, they will be encrypted + # client-side, according to our configuration. + items = [] + for encrypted_table in encrypted_tables: + # Here, you should consider filtering the tables you write. + # If you do not, you will write to all tables in the collection. + # This may include tables with incompatible schemas, or tables that you do not have permission to write to. + if encrypted_table.table_name in dynamodb_table_names: + encrypted_table.put_item( + Item={ + "partition_key": "PythonEncryptedTablesCollectionManagerExample", + "sort_key": 0, + "attribute1": "encrypt and sign me!", + "attribute2": "sign me!", + ":attribute3": "ignore me!", + } + ) + + # 9. Read the items back from the table. + # After the items are retrieved from DynamoDb, but before the EncryptedResource + # returns them to the caller, they will be decrypted client-side according to our configuration. + items = [] + for encrypted_table in encrypted_tables: + # Here, you should consider filtering the tables you read from. + # If you do not, you will read from all tables in the collection. + # This may include tables with incompatible schemas, or tables that you do not have permission to read from. + if encrypted_table.table_name in dynamodb_table_names: + get_item_response = encrypted_table.get_item( + Key={ + "partition_key": "PythonEncryptedTablesCollectionManagerExample", + "sort_key": 0, + } + ) + + item = get_item_response["Item"] + items.append(item) + + # 10. Assert the items are as expected. + for item in items: + assert item["attribute1"] == "encrypt and sign me!" + assert item["attribute2"] == "sign me!" + assert item[":attribute3"] == "ignore me!" diff --git a/Examples/runtimes/python/DynamoDBEncryption/src/get_encrypted_data_key_description_example.py b/Examples/runtimes/python/DynamoDBEncryption/src/get_encrypted_data_key_description_example.py new file mode 100644 index 000000000..f023577f2 --- /dev/null +++ b/Examples/runtimes/python/DynamoDBEncryption/src/get_encrypted_data_key_description_example.py @@ -0,0 +1,82 @@ +# Copyright Amazon.com Inc. or its affiliates. All Rights Reserved. +# SPDX-License-Identifier: Apache-2.0 +"""Example demonstrating how to get encrypted data key descriptions from DynamoDB items.""" + +import boto3 +from aws_dbesdk_dynamodb.smithygenerated.aws_cryptography_dbencryptionsdk_dynamodb.client import DynamoDbEncryption +from aws_dbesdk_dynamodb.smithygenerated.aws_cryptography_dbencryptionsdk_dynamodb.config import ( + DynamoDbEncryptionConfig, +) +from aws_dbesdk_dynamodb.structures.dynamodb import ( + GetEncryptedDataKeyDescriptionInput, + GetEncryptedDataKeyDescriptionUnionItem, +) + + +def get_encrypted_data_key_description( + table_name: str, + partition_key: str, + partition_key_val: str, + sort_key_name: str, + sort_key_value: str, + expected_key_provider_id: str, + expected_key_provider_info: str, + expected_branch_key_id: str, + expected_branch_key_version: str, +): + """ + Get encrypted data key description from a DynamoDB item. + + :param table_name: The name of the DynamoDB table + :param partition_key: The name of the partition key + :param partition_key_val: The value of the partition key + :param sort_key_name: The name of the sort key + :param sort_key_value: The value of the sort key + :param expected_key_provider_id: The expected key provider ID + :param expected_key_provider_info: The expected key provider info (optional) + :param expected_branch_key_id: The expected branch key ID (optional) + :param expected_branch_key_version: The expected branch key version (optional) + """ + # 1. Create a new AWS SDK DynamoDb client. This client will be used to get item from the DynamoDB table + ddb = boto3.client("dynamodb") + + # 2. Get item from the DynamoDB table. This item will be used to Get Encrypted DataKey Description + key_to_get = {partition_key: {"S": partition_key_val}, sort_key_name: {"N": sort_key_value}} + + response = ddb.get_item(TableName=table_name, Key=key_to_get) + + returned_item = response.get("Item", {}) + if not returned_item: + print(f"No item found with the key {partition_key}!") + return + + # 3. Prepare the input for GetEncryptedDataKeyDescription method. + # This input can be a DynamoDB item or a header. For now, we are giving input as a DynamoDB item + # but users can also extract the header from the attribute "aws_dbe_head" in the DynamoDB table + # and use it for GetEncryptedDataKeyDescription method. + ddb_enc = DynamoDbEncryption(config=DynamoDbEncryptionConfig()) + + input_union = GetEncryptedDataKeyDescriptionUnionItem(returned_item) + + input_obj = GetEncryptedDataKeyDescriptionInput(input=input_union) + + output = ddb_enc.get_encrypted_data_key_description(input=input_obj) + + # In the following code, we are giving input as header instead of a complete DynamoDB item + # This code is provided solely to demo how the alternative approach works. So, it is commented. + + # header_attribute = "aws_dbe_head" + # header = returned_item[header_attribute]["B"] + # input_union = GetEncryptedDataKeyDescriptionUnion( + # header=header + # ) + + # Assert everything + assert output.encrypted_data_key_description_output[0].key_provider_id == expected_key_provider_id + + if expected_key_provider_id.startswith("aws-kms"): + assert output.encrypted_data_key_description_output[0].key_provider_info == expected_key_provider_info + + if output.encrypted_data_key_description_output[0].key_provider_id == "aws-kms-hierarchy": + assert output.encrypted_data_key_description_output[0].branch_key_id == expected_branch_key_id + assert output.encrypted_data_key_description_output[0].branch_key_version == expected_branch_key_version diff --git a/Examples/runtimes/python/DynamoDBEncryption/src/item_encryptor/__init__.py b/Examples/runtimes/python/DynamoDBEncryption/src/item_encryptor/__init__.py new file mode 100644 index 000000000..fa977e22f --- /dev/null +++ b/Examples/runtimes/python/DynamoDBEncryption/src/item_encryptor/__init__.py @@ -0,0 +1,3 @@ +# Copyright Amazon.com Inc. or its affiliates. All Rights Reserved. +# SPDX-License-Identifier: Apache-2.0 +"""Stub to allow relative imports of examples from tests.""" diff --git a/Examples/runtimes/python/DynamoDBEncryption/src/item_encryptor/encrypt_decrypt_example.py b/Examples/runtimes/python/DynamoDBEncryption/src/item_encryptor/encrypt_decrypt_example.py new file mode 100644 index 000000000..daf8082f0 --- /dev/null +++ b/Examples/runtimes/python/DynamoDBEncryption/src/item_encryptor/encrypt_decrypt_example.py @@ -0,0 +1,231 @@ +# Copyright Amazon.com Inc. or its affiliates. All Rights Reserved. +# SPDX-License-Identifier: Apache-2.0 +""" +Set up an ItemEncryptor and use its APIs to encrypt and decrypt items in 3 different formats. + +You should use the ItemEncryptor +if you already have an item to encrypt or decrypt, +and do not need to make a Put or Get call to DynamoDb. +For example, if you are using DynamoDb Streams, +you may already be working with an encrypted item obtained from +DynamoDb, and want to directly decrypt the item. + +This example demonstrates the 3 formats the ItemEncryptor can accept: +- Python dictionaries (encrypt_python_item, decrypt_python_item) +- DynamoDB JSON (encrypt_dynamodb_item, decrypt_dynamodb_item) +- DBESDK shapes (encrypt_item, decrypt_item) + +Running this example requires access to the DDB Table whose name +is provided in CLI arguments. +This table must be configured with the following +primary key configuration: + - Partition key is named "partition_key" with type (S) + - Sort key is named "sort_key" with type (S) +""" + +from typing import Any, Dict + +from aws_cryptographic_material_providers.mpl import AwsCryptographicMaterialProviders +from aws_cryptographic_material_providers.mpl.config import MaterialProvidersConfig +from aws_cryptographic_material_providers.mpl.models import ( + CreateAwsKmsMrkMultiKeyringInput, + DBEAlgorithmSuiteId, +) +from aws_cryptographic_material_providers.mpl.references import IKeyring +from aws_dbesdk_dynamodb.encrypted.item import ( + ItemEncryptor, +) +from aws_dbesdk_dynamodb.structures.item_encryptor import ( + DecryptItemInput, + DynamoDbItemEncryptorConfig, + EncryptItemInput, +) +from aws_dbesdk_dynamodb.structures.structured_encryption import ( + CryptoAction, +) + + +def encrypt_decrypt_example(kms_key_id: str, ddb_table_name: str): + """Encrypt and decrypt an item with an ItemEncryptor.""" + # 1. Create a Keyring. This Keyring will be responsible for protecting the data keys that protect your data. + # For this example, we will create a AWS KMS Keyring with the AWS KMS Key we want to use. + # We will use the `CreateAwsKmsMrkMultiKeyringInput` method to create this keyring, + # as it will correctly handle both single region and Multi-Region KMS Keys. + mat_prov: AwsCryptographicMaterialProviders = AwsCryptographicMaterialProviders(config=MaterialProvidersConfig()) + kms_mrk_multi_keyring_input: CreateAwsKmsMrkMultiKeyringInput = CreateAwsKmsMrkMultiKeyringInput( + generator=kms_key_id, + ) + kms_mrk_multi_keyring: IKeyring = mat_prov.create_aws_kms_mrk_multi_keyring(input=kms_mrk_multi_keyring_input) + + # 2. Configure which attributes are encrypted and/or signed when writing new items. + # For each attribute that may exist on the items we plan to write to our DynamoDbTable, + # we must explicitly configure how they should be treated during item encryption: + # - ENCRYPT_AND_SIGN: The attribute is encrypted and included in the signature + # - SIGN_ONLY: The attribute not encrypted, but is still included in the signature + # - DO_NOTHING: The attribute is not encrypted and not included in the signature + attribute_actions_on_encrypt: Dict[str, str] = { + "partition_key": CryptoAction.SIGN_ONLY, # Our partition attribute must be SIGN_ONLY + "sort_key": CryptoAction.SIGN_ONLY, # Our sort attribute must be SIGN_ONLY + "attribute1": CryptoAction.ENCRYPT_AND_SIGN, + "attribute2": CryptoAction.SIGN_ONLY, + ":attribute3": CryptoAction.DO_NOTHING, + } + + # 3. Configure which attributes we expect to be included in the signature + # when reading items. There are two options for configuring this: + # + # - (Recommended) Configure `allowedUnsignedAttributesPrefix`: + # When defining your DynamoDb schema and deciding on attribute names, + # choose a distinguishing prefix (such as ":") for all attributes that + # you do not want to include in the signature. + # This has two main benefits: + # - It is easier to reason about the security and authenticity of data within your item + # when all unauthenticated data is easily distinguishable by their attribute name. + # - If you need to add new unauthenticated attributes in the future, + # you can easily make the corresponding update to your `attributeActionsOnEncrypt` + # and immediately start writing to that new attribute, without + # any other configuration update needed. + # Once you configure this field, it is not safe to update it. + # + # - Configure `allowedUnsignedAttributes`: You may also explicitly list + # a set of attributes that should be considered unauthenticated when encountered + # on read. Be careful if you use this configuration. Do not remove an attribute + # name from this configuration, even if you are no longer writing with that attribute, + # as old items may still include this attribute, and our configuration needs to know + # to continue to exclude this attribute from the signature scope. + # If you add new attribute names to this field, you must first deploy the update to this + # field to all readers in your host fleet before deploying the update to start writing + # with that new attribute. + # + # For this example, we have designed our DynamoDb table such that any attribute name with + # the ":" prefix should be considered unauthenticated. + unsign_attr_prefix = ":" + + # 4. Create the configuration for the DynamoDb Item Encryptor + config = DynamoDbItemEncryptorConfig( + logical_table_name=ddb_table_name, + partition_key_name="partition_key", + sort_key_name="sort_key", + attribute_actions_on_encrypt=attribute_actions_on_encrypt, + keyring=kms_mrk_multi_keyring, + allowed_unsigned_attribute_prefix=unsign_attr_prefix, + # Specifying an algorithm suite is not required, + # but is done here to demonstrate how to do so. + # We suggest using the + # `ALG_AES_256_GCM_HKDF_SHA512_COMMIT_KEY_ECDSA_P384_SYMSIG_HMAC_SHA384` suite, + # which includes AES-GCM with key derivation, signing, and key commitment. + # This is also the default algorithm suite if one is not specified in this config. + # For more information on supported algorithm suites, see: + # https://docs.aws.amazon.com/database-encryption-sdk/latest/devguide/supported-algorithms.html + algorithm_suite_id=DBEAlgorithmSuiteId.ALG_AES_256_GCM_HKDF_SHA512_COMMIT_KEY_ECDSA_P384_SYMSIG_HMAC_SHA384, + ) + + # 5. Create the DynamoDb Item Encryptor + item_encryptor = ItemEncryptor(config) + + # 6. Encrypt a Python dictionary using the ItemEncryptor + plaintext_dict_item: Dict[str, Any] = { + "partition_key": "ItemEncryptDecryptExample", + "sort_key": 0, + "attribute1": "encrypt and sign me!", + "attribute2": "sign me!", + ":attribute3": "ignore me!", + } + + encrypt_output = item_encryptor.encrypt_python_item(plaintext_dict_item) + encrypted_dict_item = encrypt_output.encrypted_item + + # Demonstrate that the item has been encrypted according to the configuration + # We do this for demonstration only, and you do not need to do this in your code. + # Our configuration specified that the partition key should be SIGN_ONLY, + # so it should not have been encrypted + assert encrypted_dict_item["partition_key"] == "ItemEncryptDecryptExample" + # Our configuration specified that the sort key should be SIGN_ONLY, + # so it should not have been encrypted + assert encrypted_dict_item["sort_key"] == 0 + # Our configuration specified that attribute1 should be ENCRYPT_AND_SIGN, + # so it should have been encrypted + assert "attribute1" in encrypted_dict_item + assert encrypted_dict_item["attribute1"] != plaintext_dict_item["attribute1"] + + # Here, you could use a standard boto3 DynamoDB Table or Resource to store the item in a DynamoDB Table. + # For this example, we will not do that, but will continue to work with the encrypted item. + + # 7. Decrypt the encrypted item using the DynamoDb Item Encryptor + decrypt_output = item_encryptor.decrypt_python_item(encrypted_dict_item) + decrypted_dict_item = decrypt_output.plaintext_item + + # Demonstrate that GetItem succeeded and returned the decrypted item + # We do this for demonstration only, and you do not need to do this in your code. + assert decrypted_dict_item["partition_key"] == "ItemEncryptDecryptExample" + assert decrypted_dict_item["sort_key"] == 0 + assert decrypted_dict_item["attribute1"] == "encrypt and sign me!" + + # 8. Encrypt a DynamoDB JSON item using the ItemEncryptor + plaintext_dynamodb_item: Dict[str, Any] = { + "partition_key": {"S": "ItemEncryptDecryptExample"}, + "sort_key": {"N": "0"}, + "attribute1": {"S": "encrypt and sign me!"}, + "attribute2": {"S": "sign me!"}, + ":attribute3": {"S": "ignore me!"}, + } + encrypt_output = item_encryptor.encrypt_dynamodb_item(plaintext_dynamodb_item) + encrypted_dynamodb_item = encrypt_output.encrypted_item + + # Here, you could use a standard boto3 DynamoDB Client to store the item in a DynamoDB Table. + # For this example, we will not do that, but will continue to work with the encrypted item. + + # Demonstrate that the item has been encrypted according to the configuration. + # We do this for demonstration only, and you do not need to do this in your code. + # Our configuration specified that the partition key should be SIGN_ONLY, + # so it should not have been encrypted + assert encrypted_dynamodb_item["partition_key"] == {"S": "ItemEncryptDecryptExample"} + # Our configuration specified that the sort key should be SIGN_ONLY, + # so it should not have been encrypted + assert encrypted_dynamodb_item["sort_key"] == {"N": "0"} + # Our configuration specified that attribute1 should be ENCRYPT_AND_SIGN, + # so it should have been encrypted + assert "attribute1" in encrypted_dynamodb_item + assert encrypted_dynamodb_item["attribute1"] != plaintext_dynamodb_item["attribute1"] + + # 9. Decrypt the encrypted item using the DynamoDb Item Encryptor + decrypt_output = item_encryptor.decrypt_dynamodb_item(encrypted_dynamodb_item) + decrypted_dynamodb_item = decrypt_output.plaintext_item + + # Demonstrate that GetItem succeeded and returned the decrypted item + # We do this for demonstration only, and you do not need to do this in your code. + assert decrypted_dynamodb_item["partition_key"] == {"S": "ItemEncryptDecryptExample"} + assert decrypted_dynamodb_item["sort_key"] == {"N": "0"} + assert decrypted_dynamodb_item["attribute1"] == {"S": "encrypt and sign me!"} + + # 10. Encrypt a DBESDK shape item using the ItemEncryptor + encrypt_item_input: EncryptItemInput = EncryptItemInput(plaintext_item=plaintext_dynamodb_item) + encrypt_item_output = item_encryptor.encrypt_item(encrypt_item_input) + encrypted_item = encrypt_item_output.encrypted_item + + # Here, you could use a standard boto3 DynamoDB Client to store the item in a DynamoDB Table. + # For this example, we will not do that, but will continue to work with the encrypted item. + + # Demonstrate that the item has been encrypted according to the configuration. + # We do this for demonstration only, and you do not need to do this in your code. + # Our configuration specified that the partition key should be SIGN_ONLY, + # so it should not have been encrypted + assert encrypted_item["partition_key"] == {"S": "ItemEncryptDecryptExample"} + # Our configuration specified that the sort key should be SIGN_ONLY, + # so it should not have been encrypted + assert encrypted_item["sort_key"] == {"N": "0"} + # Our configuration specified that attribute1 should be ENCRYPT_AND_SIGN, + # so it should have been encrypted + assert "attribute1" in encrypted_item + assert encrypted_item["attribute1"] != plaintext_dynamodb_item["attribute1"] + + # 11. Decrypt the encrypted item using the DynamoDb Item Encryptor + decrypt_item_input: DecryptItemInput = DecryptItemInput(encrypted_item=encrypted_item) + decrypt_output = item_encryptor.decrypt_item(decrypt_item_input) + decrypted_item = decrypt_output.plaintext_item + + # Demonstrate that GetItem succeeded and returned the decrypted item + # We do this for demonstration only, and you do not need to do this in your code. + assert decrypted_item["partition_key"] == {"S": "ItemEncryptDecryptExample"} + assert decrypted_item["sort_key"] == {"N": "0"} + assert decrypted_item["attribute1"] == {"S": "encrypt and sign me!"} diff --git a/Examples/runtimes/python/DynamoDBEncryption/src/keyring/__init__.py b/Examples/runtimes/python/DynamoDBEncryption/src/keyring/__init__.py new file mode 100644 index 000000000..fa977e22f --- /dev/null +++ b/Examples/runtimes/python/DynamoDBEncryption/src/keyring/__init__.py @@ -0,0 +1,3 @@ +# Copyright Amazon.com Inc. or its affiliates. All Rights Reserved. +# SPDX-License-Identifier: Apache-2.0 +"""Stub to allow relative imports of examples from tests.""" diff --git a/Examples/runtimes/python/DynamoDBEncryption/src/keyring/hierarchical_keyring_example/__init__.py b/Examples/runtimes/python/DynamoDBEncryption/src/keyring/hierarchical_keyring_example/__init__.py new file mode 100644 index 000000000..fa977e22f --- /dev/null +++ b/Examples/runtimes/python/DynamoDBEncryption/src/keyring/hierarchical_keyring_example/__init__.py @@ -0,0 +1,3 @@ +# Copyright Amazon.com Inc. or its affiliates. All Rights Reserved. +# SPDX-License-Identifier: Apache-2.0 +"""Stub to allow relative imports of examples from tests.""" diff --git a/Examples/runtimes/python/DynamoDBEncryption/src/keyring/hierarchical_keyring_example/encryption_config.py b/Examples/runtimes/python/DynamoDBEncryption/src/keyring/hierarchical_keyring_example/encryption_config.py new file mode 100644 index 000000000..02614d105 --- /dev/null +++ b/Examples/runtimes/python/DynamoDBEncryption/src/keyring/hierarchical_keyring_example/encryption_config.py @@ -0,0 +1,154 @@ +# Copyright Amazon.com Inc. or its affiliates. All Rights Reserved. +# SPDX-License-Identifier: Apache-2.0 +""" +Configuration module for hierarchical keyring encryption setup. + +This module provides the common encryption configuration used by both +EncryptedClient and EncryptedTable examples. +""" + +import boto3 +from aws_cryptographic_material_providers.keystore.client import KeyStore +from aws_cryptographic_material_providers.keystore.config import KeyStoreConfig +from aws_cryptographic_material_providers.keystore.models import KMSConfigurationKmsKeyArn +from aws_cryptographic_material_providers.mpl import AwsCryptographicMaterialProviders +from aws_cryptographic_material_providers.mpl.config import MaterialProvidersConfig +from aws_cryptographic_material_providers.mpl.models import ( + CacheTypeDefault, + CreateAwsKmsHierarchicalKeyringInput, + DefaultCache, +) +from aws_dbesdk_dynamodb.smithygenerated.aws_cryptography_dbencryptionsdk_dynamodb.client import DynamoDbEncryption +from aws_dbesdk_dynamodb.smithygenerated.aws_cryptography_dbencryptionsdk_dynamodb.config import ( + DynamoDbEncryptionConfig, +) +from aws_dbesdk_dynamodb.structures.dynamodb import ( + CreateDynamoDbEncryptionBranchKeyIdSupplierInput, + DynamoDbTableEncryptionConfig, + DynamoDbTablesEncryptionConfig, +) +from aws_dbesdk_dynamodb.structures.structured_encryption import ( + CryptoAction, +) + +from .example_branch_key_id_supplier import ExampleBranchKeyIdSupplier + + +def create_encryption_config( + ddb_table_name: str, + tenant1_branch_key_id: str, + tenant2_branch_key_id: str, + keystore_table_name: str, + logical_keystore_name: str, + kms_key_id: str, +) -> DynamoDbTablesEncryptionConfig: + """ + Create the encryption configuration for DynamoDB encryption. + + :param ddb_table_name: The name of the DynamoDB table + :param tenant1_branch_key_id: Branch key ID for tenant 1 + :param tenant2_branch_key_id: Branch key ID for tenant 2 + :param keystore_table_name: The name of the KeyStore DynamoDB table + :param logical_keystore_name: The logical name for this keystore + :param kms_key_id: The ARN of the KMS key to use + :return: The DynamoDB tables encryption configuration + """ + # Initial KeyStore Setup: This example requires that you have already + # created your KeyStore, and have populated it with two new branch keys. + # See the "Create KeyStore Table Example" and "Create KeyStore Key Example" + # for an example of how to do this. + + # 1. Configure your KeyStore resource. + # This SHOULD be the same configuration that you used + # to initially create and populate your KeyStore. + keystore = KeyStore( + config=KeyStoreConfig( + ddb_client=boto3.client("dynamodb"), + ddb_table_name=keystore_table_name, + logical_key_store_name=logical_keystore_name, + kms_client=boto3.client("kms"), + kms_configuration=KMSConfigurationKmsKeyArn(kms_key_id), + ) + ) + + # 2. Create a Branch Key ID Supplier. See ExampleBranchKeyIdSupplier in this directory. + ddb_enc = DynamoDbEncryption(config=DynamoDbEncryptionConfig()) + branch_key_id_supplier = ddb_enc.create_dynamo_db_encryption_branch_key_id_supplier( + input=CreateDynamoDbEncryptionBranchKeyIdSupplierInput( + ddb_key_branch_key_id_supplier=ExampleBranchKeyIdSupplier(tenant1_branch_key_id, tenant2_branch_key_id) + ) + ).branch_key_id_supplier + + # 3. Create the Hierarchical Keyring, using the Branch Key ID Supplier above. + # With this configuration, the AWS SDK Client ultimately configured will be capable + # of encrypting or decrypting items for either tenant (assuming correct KMS access). + # If you want to restrict the client to only encrypt or decrypt for a single tenant, + # configure this Hierarchical Keyring using `.branch_key_id=tenant1_branch_key_id` instead + # of `.branch_key_id_supplier=branch_key_id_supplier`. + mat_prov = AwsCryptographicMaterialProviders(config=MaterialProvidersConfig()) + + keyring_input = CreateAwsKmsHierarchicalKeyringInput( + key_store=keystore, + branch_key_id_supplier=branch_key_id_supplier, + ttl_seconds=600, # This dictates how often we call back to KMS to authorize use of the branch keys + cache=CacheTypeDefault( # This dictates how many branch keys will be held locally + value=DefaultCache(entry_capacity=100) + ), + ) + + hierarchical_keyring = mat_prov.create_aws_kms_hierarchical_keyring(input=keyring_input) + + # 4. Configure which attributes are encrypted and/or signed when writing new items. + # For each attribute that may exist on the items we plan to write to our DynamoDbTable, + # we must explicitly configure how they should be treated during item encryption: + # - ENCRYPT_AND_SIGN: The attribute is encrypted and included in the signature + # - SIGN_ONLY: The attribute not encrypted, but is still included in the signature + # - DO_NOTHING: The attribute is not encrypted and not included in the signature + attribute_actions = { + "partition_key": CryptoAction.SIGN_ONLY, # Our partition attribute must be SIGN_ONLY + "sort_key": CryptoAction.SIGN_ONLY, # Our sort attribute must be SIGN_ONLY + "tenant_sensitive_data": CryptoAction.ENCRYPT_AND_SIGN, + } + + # 5. Configure which attributes we expect to be included in the signature + # when reading items. There are two options for configuring this: + # + # - (Recommended) Configure `allowed_unsigned_attribute_prefix`: + # When defining your DynamoDb schema and deciding on attribute names, + # choose a distinguishing prefix (such as ":") for all attributes that + # you do not want to include in the signature. + # This has two main benefits: + # - It is easier to reason about the security and authenticity of data within your item + # when all unauthenticated data is easily distinguishable by their attribute name. + # - If you need to add new unauthenticated attributes in the future, + # you can easily make the corresponding update to your `attribute_actions` + # and immediately start writing to that new attribute, without + # any other configuration update needed. + # Once you configure this field, it is not safe to update it. + # + # - Configure `allowed_unsigned_attributes`: You may also explicitly list + # a set of attributes that should be considered unauthenticated when encountered + # on read. Be careful if you use this configuration. Do not remove an attribute + # name from this configuration, even if you are no longer writing with that attribute, + # as old items may still include this attribute, and our configuration needs to know + # to continue to exclude this attribute from the signature scope. + # If you add new attribute names to this field, you must first deploy the update to this + # field to all readers in your host fleet before deploying the update to start writing + # with that new attribute. + # + # For this example, we currently authenticate all attributes. To make it easier to + # add unauthenticated attributes in the future, we define a prefix ":" for such attributes. + unsign_attr_prefix = ":" + + # 6. Create the DynamoDb Encryption configuration for the table we will be writing to. + table_config = DynamoDbTableEncryptionConfig( + logical_table_name=ddb_table_name, + partition_key_name="partition_key", + sort_key_name="sort_key", + attribute_actions_on_encrypt=attribute_actions, + keyring=hierarchical_keyring, + allowed_unsigned_attribute_prefix=unsign_attr_prefix, + ) + + table_configs = {ddb_table_name: table_config} + return DynamoDbTablesEncryptionConfig(table_encryption_configs=table_configs) diff --git a/Examples/runtimes/python/DynamoDBEncryption/src/keyring/hierarchical_keyring_example/example_branch_key_id_supplier.py b/Examples/runtimes/python/DynamoDBEncryption/src/keyring/hierarchical_keyring_example/example_branch_key_id_supplier.py new file mode 100644 index 000000000..f16218a46 --- /dev/null +++ b/Examples/runtimes/python/DynamoDBEncryption/src/keyring/hierarchical_keyring_example/example_branch_key_id_supplier.py @@ -0,0 +1,61 @@ +# Copyright Amazon.com Inc. or its affiliates. All Rights Reserved. +# SPDX-License-Identifier: Apache-2.0 +""" +Example implementation of a branch key ID supplier. + +Used in the 'HierarchicalKeyringExample'. +In that example, we have a table where we distinguish multiple tenants +by a tenant ID that is stored in our partition attribute. +The expectation is that this does not produce a confused deputy +because the tenants are separated by partition. +In order to create a Hierarchical Keyring that is capable of encrypting or +decrypting data for either tenant, we implement this interface +to map the correct branch key ID to the correct tenant ID. +""" +from typing import Dict + +from aws_dbesdk_dynamodb.smithygenerated.aws_cryptography_dbencryptionsdk_dynamodb.references import ( + IDynamoDbKeyBranchKeyIdSupplier, +) +from aws_dbesdk_dynamodb.structures.dynamodb import GetBranchKeyIdFromDdbKeyInput, GetBranchKeyIdFromDdbKeyOutput + + +class ExampleBranchKeyIdSupplier(IDynamoDbKeyBranchKeyIdSupplier): + """Example implementation of a branch key ID supplier.""" + + branch_key_id_for_tenant1: str + branch_key_id_for_tenant2: str + + def __init__(self, tenant1_id: str, tenant2_id: str): + """ + Initialize a branch key ID supplier. + + :param tenant1_id: Branch key ID for tenant 1 + :param tenant2_id: Branch key ID for tenant 2 + """ + self.branch_key_id_for_tenant1 = tenant1_id + self.branch_key_id_for_tenant2 = tenant2_id + + def get_branch_key_id_from_ddb_key(self, param: GetBranchKeyIdFromDdbKeyInput) -> GetBranchKeyIdFromDdbKeyOutput: + """ + Get branch key ID from the tenant ID in input's DDB key. + + :param param: Input containing DDB key + :return: Output containing branch key ID + :raises ValueError: If DDB key is invalid or contains invalid tenant ID + """ + key: Dict[str, Dict] = param.ddb_key + + if "partition_key" not in key: + raise ValueError("Item invalid, does not contain expected partition key attribute.") + + tenant_key_id = key["partition_key"]["S"] + + if tenant_key_id == "tenant1Id": + branch_key_id = self.branch_key_id_for_tenant1 + elif tenant_key_id == "tenant2Id": + branch_key_id = self.branch_key_id_for_tenant2 + else: + raise ValueError("Item does not contain valid tenant ID") + + return GetBranchKeyIdFromDdbKeyOutput(branch_key_id=branch_key_id) diff --git a/Examples/runtimes/python/DynamoDBEncryption/src/keyring/hierarchical_keyring_example/with_encrypted_client.py b/Examples/runtimes/python/DynamoDBEncryption/src/keyring/hierarchical_keyring_example/with_encrypted_client.py new file mode 100644 index 000000000..6132c93d2 --- /dev/null +++ b/Examples/runtimes/python/DynamoDBEncryption/src/keyring/hierarchical_keyring_example/with_encrypted_client.py @@ -0,0 +1,118 @@ +# Copyright Amazon.com Inc. or its affiliates. All Rights Reserved. +# SPDX-License-Identifier: Apache-2.0 +""" +Example demonstrating DynamoDb Encryption using a Hierarchical Keyring with EncryptedClient. + +This example sets up DynamoDb Encryption for the AWS SDK client +using the Hierarchical Keyring, which establishes a key hierarchy +where "branch" keys are persisted in DynamoDb. +These branch keys are used to protect your data keys, +and these branch keys are themselves protected by a root KMS Key. + +Establishing a key hierarchy like this has two benefits: + +First, by caching the branch key material, and only calling back +to KMS to re-establish authentication regularly according to your configured TTL, +you limit how often you need to call back to KMS to protect your data. +This is a performance/security tradeoff, where your authentication, audit, and +logging from KMS is no longer one-to-one with every encrypt or decrypt call. +However, the benefit is that you no longer have to make a +network call to KMS for every encrypt or decrypt. + +Second, this key hierarchy makes it easy to hold multi-tenant data +that is isolated per branch key in a single DynamoDb table. +You can create a branch key for each tenant in your table, +and encrypt all that tenant's data under that distinct branch key. +On decrypt, you can either statically configure a single branch key +to ensure you are restricting decryption to a single tenant, +or you can implement an interface that lets you map the primary key on your items +to the branch key that should be responsible for decrypting that data. + +This example then demonstrates configuring a Hierarchical Keyring +with a Branch Key ID Supplier to encrypt and decrypt data for +two separate tenants. + +Running this example requires access to the DDB Table whose name +is provided in CLI arguments. +This table must be configured with the following +primary key configuration: + - Partition key is named "partition_key" with type (S) + - Sort key is named "sort_key" with type (S) + +This example also requires using a KMS Key whose ARN +is provided in CLI arguments. You need the following access +on this key: + - GenerateDataKeyWithoutPlaintext + - Decrypt +""" + +import boto3 +from aws_dbesdk_dynamodb.encrypted.client import EncryptedClient + +from .encryption_config import create_encryption_config + + +def hierarchical_keyring_client_example( + ddb_table_name: str, + tenant1_branch_key_id: str, + tenant2_branch_key_id: str, + keystore_table_name: str, + logical_keystore_name: str, + kms_key_id: str, +): + """ + Demonstrate using a hierarchical keyring with multiple tenants using EncryptedClient. + + :param ddb_table_name: The name of the DynamoDB table + :param tenant1_branch_key_id: Branch key ID for tenant 1 + :param tenant2_branch_key_id: Branch key ID for tenant 2 + :param keystore_table_name: The name of the KeyStore DynamoDB table + :param logical_keystore_name: The logical name for this keystore + :param kms_key_id: The ARN of the KMS key to use + """ + # 1. Create the DynamoDb Encryption configuration for the table we will be writing to. + # See beacon_config.py in this directory for detailed steps on the encryption configuration. + tables_config = create_encryption_config( + ddb_table_name=ddb_table_name, + tenant1_branch_key_id=tenant1_branch_key_id, + tenant2_branch_key_id=tenant2_branch_key_id, + keystore_table_name=keystore_table_name, + logical_keystore_name=logical_keystore_name, + kms_key_id=kms_key_id, + ) + + # 2. Create the EncryptedClient + ddb_client = boto3.client("dynamodb") + encrypted_ddb_client = EncryptedClient(client=ddb_client, encryption_config=tables_config) + + # 3. Put an item into our table using the above client. + # Before the item gets sent to DynamoDb, it will be encrypted + # client-side, according to our configuration. + # Because the item we are writing uses "tenantId1" as our partition value, + # based on the code we wrote in the ExampleBranchKeySupplier, + # `tenant1_branch_key_id` will be used to encrypt this item. + item = { + "partition_key": {"S": "tenant1Id"}, + "sort_key": {"N": "0"}, + "tenant_sensitive_data": {"S": "encrypt and sign me!"}, + } + + put_response = encrypted_ddb_client.put_item(TableName=ddb_table_name, Item=item) + + # Demonstrate that PutItem succeeded + assert put_response["ResponseMetadata"]["HTTPStatusCode"] == 200 + + # 4. Get the item back from our table using the same client. + # The client will decrypt the item client-side, and return + # back the original item. + # Because the returned item's partition value is "tenantId1", + # based on the code we wrote in the ExampleBranchKeySupplier, + # `tenant1_branch_key_id` will be used to decrypt this item. + key_to_get = {"partition_key": {"S": "tenant1Id"}, "sort_key": {"N": "0"}} + + get_response = encrypted_ddb_client.get_item(TableName=ddb_table_name, Key=key_to_get) + + # Demonstrate that GetItem succeeded and returned the decrypted item + assert get_response["ResponseMetadata"]["HTTPStatusCode"] == 200 + returned_item = get_response["Item"] + assert returned_item["tenant_sensitive_data"]["S"] == "encrypt and sign me!" diff --git a/Examples/runtimes/python/DynamoDBEncryption/src/keyring/hierarchical_keyring_example/with_encrypted_table.py b/Examples/runtimes/python/DynamoDBEncryption/src/keyring/hierarchical_keyring_example/with_encrypted_table.py new file mode 100644 index 000000000..3765c4994 --- /dev/null +++ b/Examples/runtimes/python/DynamoDBEncryption/src/keyring/hierarchical_keyring_example/with_encrypted_table.py @@ -0,0 +1,121 @@ +# Copyright Amazon.com Inc. or its affiliates. All Rights Reserved. +# SPDX-License-Identifier: Apache-2.0 +""" +Example demonstrating DynamoDb Encryption using a Hierarchical Keyring with EncryptedTable. + +This example sets up DynamoDb Encryption for the AWS SDK Table resource +using the Hierarchical Keyring, which establishes a key hierarchy +where "branch" keys are persisted in DynamoDb. +These branch keys are used to protect your data keys, +and these branch keys are themselves protected by a root KMS Key. + +Establishing a key hierarchy like this has two benefits: + +First, by caching the branch key material, and only calling back +to KMS to re-establish authentication regularly according to your configured TTL, +you limit how often you need to call back to KMS to protect your data. +This is a performance/security tradeoff, where your authentication, audit, and +logging from KMS is no longer one-to-one with every encrypt or decrypt call. +However, the benefit is that you no longer have to make a +network call to KMS for every encrypt or decrypt. + +Second, this key hierarchy makes it easy to hold multi-tenant data +that is isolated per branch key in a single DynamoDb table. +You can create a branch key for each tenant in your table, +and encrypt all that tenant's data under that distinct branch key. +On decrypt, you can either statically configure a single branch key +to ensure you are restricting decryption to a single tenant, +or you can implement an interface that lets you map the primary key on your items +to the branch key that should be responsible for decrypting that data. + +This example then demonstrates configuring a Hierarchical Keyring +with a Branch Key ID Supplier to encrypt and decrypt data for +two separate tenants. + +Running this example requires access to the DDB Table whose name +is provided in CLI arguments. +This table must be configured with the following +primary key configuration: + - Partition key is named "partition_key" with type (S) + - Sort key is named "sort_key" with type (S) + +This example also requires using a KMS Key whose ARN +is provided in CLI arguments. You need the following access +on this key: + - GenerateDataKeyWithoutPlaintext + - Decrypt +""" + +import boto3 +from aws_dbesdk_dynamodb.encrypted.table import EncryptedTable + +from .encryption_config import create_encryption_config + + +def hierarchical_keyring_table_example( + ddb_table_name: str, + tenant1_branch_key_id: str, + tenant2_branch_key_id: str, + keystore_table_name: str, + logical_keystore_name: str, + kms_key_id: str, +): + """ + Demonstrate using a hierarchical keyring with multiple tenants using EncryptedTable. + + :param ddb_table_name: The name of the DynamoDB table + :param tenant1_branch_key_id: Branch key ID for tenant 1 + :param tenant2_branch_key_id: Branch key ID for tenant 2 + :param keystore_table_name: The name of the KeyStore DynamoDB table + :param logical_keystore_name: The logical name for this keystore + :param kms_key_id: The ARN of the KMS key to use + """ + # 1. Create the DynamoDb Encryption configuration for the table we will be writing to. + # See encryption_config.py in this directory for detailed steps on the encryption configuration. + tables_config = create_encryption_config( + ddb_table_name=ddb_table_name, + tenant1_branch_key_id=tenant1_branch_key_id, + tenant2_branch_key_id=tenant2_branch_key_id, + keystore_table_name=keystore_table_name, + logical_keystore_name=logical_keystore_name, + kms_key_id=kms_key_id, + ) + + # 2. Create the EncryptedTable + ddb_table = boto3.resource("dynamodb").Table(ddb_table_name) + encrypted_table = EncryptedTable( + table=ddb_table, + encryption_config=tables_config, + ) + + # 3. Put an item into our table using the above client. + # Before the item gets sent to DynamoDb, it will be encrypted + # client-side, according to our configuration. + # Because the item we are writing uses "tenantId1" as our partition value, + # based on the code we wrote in the ExampleBranchKeySupplier, + # `tenant1_branch_key_id` will be used to encrypt this item. + item = { + "partition_key": "tenant1Id", + "sort_key": 0, + "tenant_sensitive_data": "encrypt and sign me!", + } + + put_response = encrypted_table.put_item(Item=item) + + # Demonstrate that PutItem succeeded + assert put_response["ResponseMetadata"]["HTTPStatusCode"] == 200 + + # 4. Get the item back from our table using the same client. + # The client will decrypt the item client-side, and return + # back the original item. + # Because the returned item's partition value is "tenantId1", + # based on the code we wrote in the ExampleBranchKeySupplier, + # `tenant1_branch_key_id` will be used to decrypt this item. + key_to_get = {"partition_key": "tenant1Id", "sort_key": 0} + + get_response = encrypted_table.get_item(Key=key_to_get) + + # Demonstrate that GetItem succeeded and returned the decrypted item + assert get_response["ResponseMetadata"]["HTTPStatusCode"] == 200 + returned_item = get_response["Item"] + assert returned_item["tenant_sensitive_data"] == "encrypt and sign me!" diff --git a/Examples/runtimes/python/DynamoDBEncryption/src/keyring/kms_ecdh_keyring_example/__init__.py b/Examples/runtimes/python/DynamoDBEncryption/src/keyring/kms_ecdh_keyring_example/__init__.py new file mode 100644 index 000000000..fa977e22f --- /dev/null +++ b/Examples/runtimes/python/DynamoDBEncryption/src/keyring/kms_ecdh_keyring_example/__init__.py @@ -0,0 +1,3 @@ +# Copyright Amazon.com Inc. or its affiliates. All Rights Reserved. +# SPDX-License-Identifier: Apache-2.0 +"""Stub to allow relative imports of examples from tests.""" diff --git a/Examples/runtimes/python/DynamoDBEncryption/src/keyring/kms_ecdh_keyring_example/utility.py b/Examples/runtimes/python/DynamoDBEncryption/src/keyring/kms_ecdh_keyring_example/utility.py new file mode 100644 index 000000000..85947b4ac --- /dev/null +++ b/Examples/runtimes/python/DynamoDBEncryption/src/keyring/kms_ecdh_keyring_example/utility.py @@ -0,0 +1,88 @@ +# Copyright Amazon.com Inc. or its affiliates. All Rights Reserved. +# SPDX-License-Identifier: Apache-2.0 +""" +Utility functions for the KMS ECDH keyring example. + +This module provides functions for handling ECC public keys, including: +- Loading public key bytes from a PEM file +- Checking if new public keys should be generated +- Writing public key PEM files for ECC keys +""" + +import pathlib + +import boto3 +from cryptography.hazmat.primitives import serialization + +EXAMPLE_ECC_PUBLIC_KEY_SENDER_FILENAME = "KmsEccKeyringKeyringExamplePublicKeySender.pem" +EXAMPLE_ECC_PUBLIC_KEY_RECIPIENT_FILENAME = "KmsEccKeyringKeyringExamplePublicKeyRecipient.pem" + + +def load_public_key_bytes(ecc_public_key_filename: str) -> bytes: + """ + Load public key bytes from a PEM file. + + :param ecc_public_key_filename: The filename containing the public key + :return: The public key bytes + """ + try: + with open(ecc_public_key_filename, "rb") as f: + public_key_file_bytes = f.read() + public_key = serialization.load_pem_public_key(public_key_file_bytes) + return public_key.public_bytes( + encoding=serialization.Encoding.DER, format=serialization.PublicFormat.SubjectPublicKeyInfo + ) + except IOError as e: + raise OSError("IOError while reading public key from file") from e + + +def should_get_new_public_keys() -> bool: + """ + Check if new public keys should be generated. + + :return: True if new keys should be generated, False otherwise + """ + # Check if public keys already exist + sender_public_key_file = pathlib.Path(EXAMPLE_ECC_PUBLIC_KEY_SENDER_FILENAME) + recipient_public_key_file = pathlib.Path(EXAMPLE_ECC_PUBLIC_KEY_RECIPIENT_FILENAME) + + if sender_public_key_file.exists() or recipient_public_key_file.exists(): + return False + + if not sender_public_key_file.exists() and recipient_public_key_file.exists(): + raise FileNotFoundError(f"Missing public key sender file at {EXAMPLE_ECC_PUBLIC_KEY_SENDER_FILENAME}") + + if not recipient_public_key_file.exists() and sender_public_key_file.exists(): + raise FileNotFoundError(f"Missing public key recipient file at {EXAMPLE_ECC_PUBLIC_KEY_RECIPIENT_FILENAME}") + + return True + + +def write_public_key_pem_for_ecc_key(ecc_key_arn: str, ecc_public_key_filename: str): + """ + Write a public key PEM file for an ECC key. + + :param ecc_key_arn: The ARN of the KMS ECC key + :param ecc_public_key_filename: The filename to write the public key to + """ + # Safety check: Validate file is not present + public_key_file = pathlib.Path(ecc_public_key_filename) + if public_key_file.exists(): + raise FileExistsError("writePublicKeyPemForEccKey will not overwrite existing PEM files") + + # This code will call KMS to get the public key for the KMS ECC key. + # You must have kms:GetPublicKey permissions on the key for this to succeed. + # The public key will be written to the file EXAMPLE_ECC_PUBLIC_KEY_SENDER_FILENAME + # or EXAMPLE_ECC_PUBLIC_KEY_RECIPIENT_FILENAME. + kms_client = boto3.client("kms") + response = kms_client.get_public_key(KeyId=ecc_key_arn) + public_key_bytes = response["PublicKey"] + + # Write the public key to a PEM file + public_key = serialization.load_der_public_key(public_key_bytes) + pem_data = public_key.public_bytes( + encoding=serialization.Encoding.PEM, format=serialization.PublicFormat.SubjectPublicKeyInfo + ) + + with open(ecc_public_key_filename, "wb") as f: + f.write(pem_data) diff --git a/Examples/runtimes/python/DynamoDBEncryption/src/keyring/kms_ecdh_keyring_example/with_encrypted_client.py b/Examples/runtimes/python/DynamoDBEncryption/src/keyring/kms_ecdh_keyring_example/with_encrypted_client.py new file mode 100644 index 000000000..9965eeb73 --- /dev/null +++ b/Examples/runtimes/python/DynamoDBEncryption/src/keyring/kms_ecdh_keyring_example/with_encrypted_client.py @@ -0,0 +1,383 @@ +# Copyright Amazon.com Inc. or its affiliates. All Rights Reserved. +# SPDX-License-Identifier: Apache-2.0 +""" +Examples demonstrating DynamoDb Encryption using a AWS KMS ECDH Keyring with EncryptedClient. + +These examples set up DynamoDb Encryption for the AWS SDK client +using the AWS KMS ECDH Keyring. This keyring, depending on its KeyAgreement scheme, +takes in the sender's KMS ECC Key ARN, and the recipient's ECC Public Key to derive a shared secret. +The keyring uses the shared secret to derive a data key to protect the +data keys that encrypt and decrypt DynamoDb table items. + +Running these examples require access to the DDB Table whose name +is provided in CLI arguments. +This table must be configured with the following +primary key configuration: + - Partition key is named "partition_key" with type (S) + - Sort key is named "sort_key" with type (S) +""" + + +import boto3 +from aws_cryptographic_material_providers.mpl import AwsCryptographicMaterialProviders +from aws_cryptographic_material_providers.mpl.config import MaterialProvidersConfig +from aws_cryptographic_material_providers.mpl.models import ( + CreateAwsKmsEcdhKeyringInput, + DBEAlgorithmSuiteId, + KmsEcdhStaticConfigurationsKmsPrivateKeyToStaticPublicKey, + KmsEcdhStaticConfigurationsKmsPublicKeyDiscovery, + KmsPrivateKeyToStaticPublicKeyInput, + KmsPublicKeyDiscoveryInput, +) +from aws_cryptographic_material_providers.mpl.references import IKeyring +from aws_cryptography_primitives.smithygenerated.aws_cryptography_primitives.models import ECDHCurveSpec +from aws_dbesdk_dynamodb.encrypted.client import EncryptedClient +from aws_dbesdk_dynamodb.structures.dynamodb import ( + DynamoDbTableEncryptionConfig, + DynamoDbTablesEncryptionConfig, +) +from aws_dbesdk_dynamodb.structures.structured_encryption import ( + CryptoAction, +) + +from .utility import ( + EXAMPLE_ECC_PUBLIC_KEY_RECIPIENT_FILENAME, + EXAMPLE_ECC_PUBLIC_KEY_SENDER_FILENAME, + load_public_key_bytes, +) + + +def kms_ecdh_keyring_client_example( + ddb_table_name: str, + ecc_key_arn: str, + ecc_public_key_sender_filename: str = EXAMPLE_ECC_PUBLIC_KEY_SENDER_FILENAME, + ecc_public_key_recipient_filename: str = EXAMPLE_ECC_PUBLIC_KEY_RECIPIENT_FILENAME, +): + """ + Demonstrate using a KMS ECDH keyring with static keys. + + This example takes in the sender's KMS ECC key ARN, the sender's public key, + the recipient's public key, and the algorithm definition where the ECC keys lie. + The ecc_key_arn parameter takes in the sender's KMS ECC key ARN, + the ecc_public_key_sender_filename parameter takes in the sender's public key that corresponds to the + ecc_key_arn, the ecc_public_key_recipient_filename parameter takes in the recipient's public key, + and the Curve Specification where the keys lie. + + Both public keys MUST be UTF8 PEM-encoded X.509 public key, also known as SubjectPublicKeyInfo (SPKI) + + This example encrypts a test item using the provided ECC keys and puts the + encrypted item to the provided DynamoDb table. Then, it gets the + item from the table and decrypts it. + + Running this example requires access to the DDB Table whose name + is provided in CLI arguments. + This table must be configured with the following + primary key configuration: + - Partition key is named "partition_key" with type (S) + - Sort key is named "sort_key" with type (S) + This example also requires access to a KMS ECC key. + Our tests provide a KMS ECC Key ARN that anyone can use, but you + can also provide your own KMS ECC key. + To use your own KMS ECC key, you must have either: + - Its public key downloaded in a UTF-8 encoded PEM file + - kms:GetPublicKey permissions on that key. + If you do not have the public key downloaded, running this example + through its main method will download the public key for you + by calling kms:GetPublicKey. + You must also have kms:DeriveSharedSecret permissions on the KMS ECC key. + This example also requires a recipient ECC Public Key that lies on the same + curve as the sender public key. This examples uses another distinct + KMS ECC Public Key, it does not have to be a KMS key; it can be a + valid SubjectPublicKeyInfo (SPKI) Public Key. + + :param ddb_table_name: The name of the DynamoDB table + :param ecc_key_arn: The ARN of the KMS ECC key to use + :param ecc_public_key_sender_filename: The filename containing the sender's public key + :param ecc_public_key_recipient_filename: The filename containing the recipient's public key + """ + # Load UTF-8 encoded public key PEM files as DER encoded bytes. + # You may provide your own PEM files to use here. If you provide this, it MUST + # be a key on curve P256. + # If not, the main method in this class will call + # the KMS ECC key, retrieve its public key, and store it + # in a PEM file for example use. + public_key_recipient_bytes = load_public_key_bytes(ecc_public_key_recipient_filename) + public_key_sender_bytes = load_public_key_bytes(ecc_public_key_sender_filename) + + # Create a KMS ECDH keyring. + # This keyring uses the KmsPrivateKeyToStaticPublicKey configuration. This configuration calls for both of + # the keys to be on the same curve (P256, P384, P521). + # On encrypt, the keyring calls AWS KMS to derive the shared secret from the sender's KMS ECC Key ARN + # and the recipient's public key. + # For this example, on decrypt, the keyring calls AWS KMS to derive the shared secret from the + # sender's KMS ECC Key ARN and the recipient's public key; + # however, on decrypt, the recipient can construct a keyring such that the shared secret is calculated with + # the recipient's private key and the sender's public key. In both scenarios the shared secret will be the same. + # For more information on this configuration see: + # https://docs.aws.amazon.com/encryption-sdk/latest/developer-guide/use-kms-ecdh-keyring.html#kms-ecdh-create + # The DynamoDb encryption client uses this keyring to encrypt and decrypt items. + # This keyring takes in: + # - kms_client + # - kms_key_id: Must be an ARN representing a KMS ECC key meant for KeyAgreement + # - curve_spec: The curve name where the public keys lie + # - sender_public_key: A ByteBuffer of a UTF-8 encoded public + # key for the key passed into kms_key_id in DER format + # - recipient_public_key: A ByteBuffer of a UTF-8 encoded public + # key for the key passed into kms_key_id in DER format + mat_prov = AwsCryptographicMaterialProviders(config=MaterialProvidersConfig()) + + keyring_input = CreateAwsKmsEcdhKeyringInput( + kms_client=boto3.client("kms"), + curve_spec=ECDHCurveSpec.ECC_NIST_P256, + key_agreement_scheme=KmsEcdhStaticConfigurationsKmsPrivateKeyToStaticPublicKey( + KmsPrivateKeyToStaticPublicKeyInput( + sender_kms_identifier=ecc_key_arn, + # Must be a DER-encoded X.509 public key + sender_public_key=public_key_sender_bytes, + # Must be a DER-encoded X.509 public key + recipient_public_key=public_key_recipient_bytes, + ) + ), + ) + + kms_ecdh_keyring = mat_prov.create_aws_kms_ecdh_keyring(input=keyring_input) + + put_get_item_with_keyring(kms_ecdh_keyring, ddb_table_name) + + +def kms_ecdh_discovery_client_example(ddb_table_name: str, ecc_recipient_key_arn: str): + """ + Demonstrate using a KMS ECDH keyring with discovery. + + This example takes in the recipient's KMS ECC key ARN via + the ecc_recipient_key_arn parameter. + + This example attempts to decrypt a test item using the provided ecc_recipient_key_arn, + it does so by checking if the message header contains the recipient's public key. + + Running this example requires access to the DDB Table whose name + is provided in CLI arguments. + This table must be configured with the following + primary key configuration: + - Partition key is named "partition_key" with type (S) + - Sort key is named "sort_key" with type (S) + This example also requires access to a KMS ECC key. + Our tests provide a KMS ECC Key ARN that anyone can use, but you + can also provide your own KMS ECC key. + To use your own KMS ECC key, you must have: + - kms:GetPublicKey permissions on that key. + This example will call kms:GetPublicKey on keyring creation. + You must also have kms:DeriveSharedSecret permissions on the KMS ECC key. + + :param ddb_table_name: The name of the DynamoDB table + :param ecc_recipient_key_arn: The ARN of the recipient's KMS ECC key + """ + # Create a KMS ECDH keyring. + # This keyring uses the KmsPublicKeyDiscovery configuration. + # On encrypt, the keyring will fail as it is not allowed to encrypt data under this configuration. + # On decrypt, the keyring will check if its corresponding public key is stored in the message header. It + # will AWS KMS to derive the shared from the recipient's KMS ECC Key ARN and the sender's public key; + # For more information on this configuration see: + # https://docs.aws.amazon.com/encryption-sdk/latest/developer-guide/use-kms-ecdh-keyring.html#kms-ecdh-discovery + # The DynamoDb encryption client uses this to encrypt and decrypt items. + # This keyring takes in: + # - kms_client + # - recipient_kms_identifier: Must be an ARN representing a KMS ECC key meant for KeyAgreement + # - curve_spec: The curve name where the public keys lie + mat_prov = AwsCryptographicMaterialProviders(config=MaterialProvidersConfig()) + + keyring_input = CreateAwsKmsEcdhKeyringInput( + kms_client=boto3.client("kms"), + curve_spec=ECDHCurveSpec.ECC_NIST_P256, + key_agreement_scheme=KmsEcdhStaticConfigurationsKmsPublicKeyDiscovery( + KmsPublicKeyDiscoveryInput(recipient_kms_identifier=ecc_recipient_key_arn) + ), + ) + + kms_ecdh_keyring = mat_prov.create_aws_kms_ecdh_keyring(input=keyring_input) + + get_item_with_keyring(kms_ecdh_keyring, ddb_table_name) + + +def get_item_with_keyring(kms_ecdh_keyring: IKeyring, ddb_table_name: str): + """ + Demonstrate get operation with a KMS ECDH keyring. + + :param kms_ecdh_keyring: The KMS ECDH keyring to use + :param ddb_table_name: The name of the DynamoDB table + """ + # Configure which attributes are encrypted and/or signed when writing new items. + # For each attribute that may exist on the items we plan to write to our DynamoDbTable, + # we must explicitly configure how they should be treated during item encryption: + # - ENCRYPT_AND_SIGN: The attribute is encrypted and included in the signature + # - SIGN_ONLY: The attribute not encrypted, but is still included in the signature + # - DO_NOTHING: The attribute is not encrypted and not included in the signature + attribute_actions = { + "partition_key": CryptoAction.SIGN_ONLY, # Our partition attribute must be SIGN_ONLY + "sort_key": CryptoAction.SIGN_ONLY, # Our sort attribute must be SIGN_ONLY + "sensitive_data": CryptoAction.ENCRYPT_AND_SIGN, + } + + # Configure which attributes we expect to be included in the signature + # when reading items. There are two options for configuring this: + # + # - (Recommended) Configure `allowed_unsigned_attribute_prefix`: + # When defining your DynamoDb schema and deciding on attribute names, + # choose a distinguishing prefix (such as ":") for all attributes that + # you do not want to include in the signature. + # This has two main benefits: + # - It is easier to reason about the security and authenticity of data within your item + # when all unauthenticated data is easily distinguishable by their attribute name. + # - If you need to add new unauthenticated attributes in the future, + # you can easily make the corresponding update to your `attribute_actions` + # and immediately start writing to that new attribute, without + # any other configuration update needed. + # Once you configure this field, it is not safe to update it. + # + # - Configure `allowed_unsigned_attributes`: You may also explicitly list + # a set of attributes that should be considered unauthenticated when encountered + # on read. Be careful if you use this configuration. Do not remove an attribute + # name from this configuration, even if you are no longer writing with that attribute, + # as old items may still include this attribute, and our configuration needs to know + # to continue to exclude this attribute from the signature scope. + # If you add new attribute names to this field, you must first deploy the update to this + # field to all readers in your host fleet before deploying the update to start writing + # with that new attribute. + # + # For this example, we currently authenticate all attributes. To make it easier to + # add unauthenticated attributes in the future, we define a prefix ":" for such attributes. + unsign_attr_prefix = ":" + + # Create the DynamoDb Encryption configuration for the table we will be writing to. + # Note: To use the KMS RSA keyring, your table config must specify an algorithmSuite + # that does not use asymmetric signing. + table_config = DynamoDbTableEncryptionConfig( + logical_table_name=ddb_table_name, + partition_key_name="partition_key", + sort_key_name="sort_key", + attribute_actions_on_encrypt=attribute_actions, + keyring=kms_ecdh_keyring, + allowed_unsigned_attribute_prefix=unsign_attr_prefix, + # Specify algorithmSuite without asymmetric signing here + # As of v3.0.0, the only supported algorithmSuite without asymmetric signing is + # ALG_AES_256_GCM_HKDF_SHA512_COMMIT_KEY_SYMSIG_HMAC_SHA384. + algorithm_suite_id=DBEAlgorithmSuiteId.ALG_AES_256_GCM_HKDF_SHA512_COMMIT_KEY_SYMSIG_HMAC_SHA384, + ) + + table_configs = {ddb_table_name: table_config} + tables_config = DynamoDbTablesEncryptionConfig(table_encryption_configs=table_configs) + + # Create the EncryptedClient + ddb_client = boto3.client("dynamodb") + encrypted_ddb_client = EncryptedClient(client=ddb_client, encryption_config=tables_config) + + # Get the item back from our table using the client. + # The client will decrypt the item client-side using the ECDH keyring + # and return the original item. + key_to_get = {"partition_key": {"S": "awsKmsEcdhKeyringItem"}, "sort_key": {"N": "0"}} + + get_response = encrypted_ddb_client.get_item(TableName=ddb_table_name, Key=key_to_get) + + # Demonstrate that GetItem succeeded and returned the decrypted item + assert get_response["ResponseMetadata"]["HTTPStatusCode"] == 200 + returned_item = get_response["Item"] + assert returned_item["sensitive_data"]["S"] == "encrypt and sign me!" + + +def put_get_item_with_keyring(aws_kms_ecdh_keyring: IKeyring, ddb_table_name: str): + """ + Demonstrate put and get operations with a KMS ECDH keyring. + + :param aws_kms_ecdh_keyring: The KMS ECDH keyring to use + :param ddb_table_name: The name of the DynamoDB table + """ + # Configure which attributes are encrypted and/or signed when writing new items. + # For each attribute that may exist on the items we plan to write to our DynamoDbTable, + # we must explicitly configure how they should be treated during item encryption: + # - ENCRYPT_AND_SIGN: The attribute is encrypted and included in the signature + # - SIGN_ONLY: The attribute not encrypted, but is still included in the signature + # - DO_NOTHING: The attribute is not encrypted and not included in the signature + attribute_actions = { + "partition_key": CryptoAction.SIGN_ONLY, # Our partition attribute must be SIGN_ONLY + "sort_key": CryptoAction.SIGN_ONLY, # Our sort attribute must be SIGN_ONLY + "sensitive_data": CryptoAction.ENCRYPT_AND_SIGN, + } + + # Configure which attributes we expect to be included in the signature + # when reading items. There are two options for configuring this: + # + # - (Recommended) Configure `allowed_unsigned_attribute_prefix`: + # When defining your DynamoDb schema and deciding on attribute names, + # choose a distinguishing prefix (such as ":") for all attributes that + # you do not want to include in the signature. + # This has two main benefits: + # - It is easier to reason about the security and authenticity of data within your item + # when all unauthenticated data is easily distinguishable by their attribute name. + # - If you need to add new unauthenticated attributes in the future, + # you can easily make the corresponding update to your `attribute_actions` + # and immediately start writing to that new attribute, without + # any other configuration update needed. + # Once you configure this field, it is not safe to update it. + # + # - Configure `allowed_unsigned_attributes`: You may also explicitly list + # a set of attributes that should be considered unauthenticated when encountered + # on read. Be careful if you use this configuration. Do not remove an attribute + # name from this configuration, even if you are no longer writing with that attribute, + # as old items may still include this attribute, and our configuration needs to know + # to continue to exclude this attribute from the signature scope. + # If you add new attribute names to this field, you must first deploy the update to this + # field to all readers in your host fleet before deploying the update to start writing + # with that new attribute. + # + # For this example, we currently authenticate all attributes. To make it easier to + # add unauthenticated attributes in the future, we define a prefix ":" for such attributes. + unsign_attr_prefix = ":" + + # Create the DynamoDb Encryption configuration for the table we will be writing to. + # Note: To use the KMS RSA keyring, your table config must specify an algorithmSuite + # that does not use asymmetric signing. + table_config = DynamoDbTableEncryptionConfig( + logical_table_name=ddb_table_name, + partition_key_name="partition_key", + sort_key_name="sort_key", + attribute_actions_on_encrypt=attribute_actions, + keyring=aws_kms_ecdh_keyring, + allowed_unsigned_attribute_prefix=unsign_attr_prefix, + # Specify algorithmSuite without asymmetric signing here + # As of v3.0.0, the only supported algorithmSuite without asymmetric signing is + # ALG_AES_256_GCM_HKDF_SHA512_COMMIT_KEY_SYMSIG_HMAC_SHA384. + algorithm_suite_id=DBEAlgorithmSuiteId.ALG_AES_256_GCM_HKDF_SHA512_COMMIT_KEY_SYMSIG_HMAC_SHA384, + ) + + table_configs = {ddb_table_name: table_config} + tables_config = DynamoDbTablesEncryptionConfig(table_encryption_configs=table_configs) + + # Create the EncryptedClient + ddb_client = boto3.client("dynamodb") + encrypted_ddb_client = EncryptedClient(client=ddb_client, encryption_config=tables_config) + + # Put an item into our table using the above client. + # Before the item gets sent to DynamoDb, it will be encrypted + # client-side, according to our configuration. + item = { + "partition_key": {"S": "awsKmsEcdhKeyringItem"}, + "sort_key": {"N": "0"}, + "sensitive_data": {"S": "encrypt and sign me!"}, + } + + put_response = encrypted_ddb_client.put_item(TableName=ddb_table_name, Item=item) + + # Demonstrate that PutItem succeeded + assert put_response["ResponseMetadata"]["HTTPStatusCode"] == 200 + + # Get the item back from our table using the client. + # The client will decrypt the item client-side using the RSA keyring + # and return the original item. + key_to_get = {"partition_key": {"S": "awsKmsEcdhKeyringItem"}, "sort_key": {"N": "0"}} + + get_response = encrypted_ddb_client.get_item(TableName=ddb_table_name, Key=key_to_get) + + # Demonstrate that GetItem succeeded and returned the decrypted item + assert get_response["ResponseMetadata"]["HTTPStatusCode"] == 200 + returned_item = get_response["Item"] + assert returned_item["sensitive_data"]["S"] == "encrypt and sign me!" diff --git a/Examples/runtimes/python/DynamoDBEncryption/src/keyring/kms_ecdh_keyring_example/with_encrypted_table.py b/Examples/runtimes/python/DynamoDBEncryption/src/keyring/kms_ecdh_keyring_example/with_encrypted_table.py new file mode 100644 index 000000000..3749a2e87 --- /dev/null +++ b/Examples/runtimes/python/DynamoDBEncryption/src/keyring/kms_ecdh_keyring_example/with_encrypted_table.py @@ -0,0 +1,389 @@ +# Copyright Amazon.com Inc. or its affiliates. All Rights Reserved. +# SPDX-License-Identifier: Apache-2.0 +""" +Examples demonstrating DynamoDb Encryption using an AWS KMS ECDH Keyring with EncryptedTable. + +These examples set up DynamoDb Encryption for the AWS SDK Table resource +using the AWS KMS ECDH Keyring. This keyring, depending on its KeyAgreement scheme, +takes in the sender's KMS ECC Key ARN, and the recipient's ECC Public Key to derive a shared secret. +The keyring uses the shared secret to derive a data key to protect the +data keys that encrypt and decrypt DynamoDb table items. + +Running these examples require access to the DDB Table whose name +is provided in CLI arguments. +This table must be configured with the following +primary key configuration: + - Partition key is named "partition_key" with type (S) + - Sort key is named "sort_key" with type (S) +""" + + +import boto3 +from aws_cryptographic_material_providers.mpl import AwsCryptographicMaterialProviders +from aws_cryptographic_material_providers.mpl.config import MaterialProvidersConfig +from aws_cryptographic_material_providers.mpl.models import ( + CreateAwsKmsEcdhKeyringInput, + DBEAlgorithmSuiteId, + KmsEcdhStaticConfigurationsKmsPrivateKeyToStaticPublicKey, + KmsEcdhStaticConfigurationsKmsPublicKeyDiscovery, + KmsPrivateKeyToStaticPublicKeyInput, + KmsPublicKeyDiscoveryInput, +) +from aws_cryptographic_material_providers.mpl.references import IKeyring +from aws_cryptography_primitives.smithygenerated.aws_cryptography_primitives.models import ECDHCurveSpec +from aws_dbesdk_dynamodb.encrypted.table import EncryptedTable +from aws_dbesdk_dynamodb.structures.dynamodb import ( + DynamoDbTableEncryptionConfig, + DynamoDbTablesEncryptionConfig, +) +from aws_dbesdk_dynamodb.structures.structured_encryption import ( + CryptoAction, +) + +from .utility import ( + EXAMPLE_ECC_PUBLIC_KEY_RECIPIENT_FILENAME, + EXAMPLE_ECC_PUBLIC_KEY_SENDER_FILENAME, + load_public_key_bytes, +) + + +def kms_ecdh_keyring_table_example( + ddb_table_name: str, + ecc_key_arn: str, + ecc_public_key_sender_filename: str = EXAMPLE_ECC_PUBLIC_KEY_SENDER_FILENAME, + ecc_public_key_recipient_filename: str = EXAMPLE_ECC_PUBLIC_KEY_RECIPIENT_FILENAME, +): + """ + Demonstrate using a KMS ECDH keyring with static keys. + + This example takes in the sender's KMS ECC key ARN, the sender's public key, + the recipient's public key, and the algorithm definition where the ECC keys lie. + The ecc_key_arn parameter takes in the sender's KMS ECC key ARN, + the ecc_public_key_sender_filename parameter takes in the sender's public key that corresponds to the + ecc_key_arn, the ecc_public_key_recipient_filename parameter takes in the recipient's public key, + and the Curve Specification where the keys lie. + + Both public keys MUST be UTF8 PEM-encoded X.509 public key, also known as SubjectPublicKeyInfo (SPKI) + + This example encrypts a test item using the provided ECC keys and puts the + encrypted item to the provided DynamoDb table. Then, it gets the + item from the table and decrypts it. + + Running this example requires access to the DDB Table whose name + is provided in CLI arguments. + This table must be configured with the following + primary key configuration: + - Partition key is named "partition_key" with type (S) + - Sort key is named "sort_key" with type (S) + This example also requires access to a KMS ECC key. + Our tests provide a KMS ECC Key ARN that anyone can use, but you + can also provide your own KMS ECC key. + To use your own KMS ECC key, you must have either: + - Its public key downloaded in a UTF-8 encoded PEM file + - kms:GetPublicKey permissions on that key. + If you do not have the public key downloaded, running this example + through its main method will download the public key for you + by calling kms:GetPublicKey. + You must also have kms:DeriveSharedSecret permissions on the KMS ECC key. + This example also requires a recipient ECC Public Key that lies on the same + curve as the sender public key. This examples uses another distinct + KMS ECC Public Key, it does not have to be a KMS key; it can be a + valid SubjectPublicKeyInfo (SPKI) Public Key. + + :param ddb_table_name: The name of the DynamoDB table + :param ecc_key_arn: The ARN of the KMS ECC key to use + :param ecc_public_key_sender_filename: The filename containing the sender's public key + :param ecc_public_key_recipient_filename: The filename containing the recipient's public key + """ + # Load UTF-8 encoded public key PEM files as DER encoded bytes. + # You may provide your own PEM files to use here. If you provide this, it MUST + # be a key on curve P256. + # If not, the main method in this class will call + # the KMS ECC key, retrieve its public key, and store it + # in a PEM file for example use. + public_key_recipient_bytes = load_public_key_bytes(ecc_public_key_recipient_filename) + public_key_sender_bytes = load_public_key_bytes(ecc_public_key_sender_filename) + + # Create a KMS ECDH keyring. + # This keyring uses the KmsPrivateKeyToStaticPublicKey configuration. This configuration calls for both of + # the keys to be on the same curve (P256, P384, P521). + # On encrypt, the keyring calls AWS KMS to derive the shared secret from the sender's KMS ECC Key ARN + # and the recipient's public key. + # For this example, on decrypt, the keyring calls AWS KMS to derive the shared secret from the + # sender's KMS ECC Key ARN and the recipient's public key; + # however, on decrypt, the recipient can construct a keyring such that the shared secret is calculated with + # the recipient's private key and the sender's public key. In both scenarios the shared secret will be the same. + # For more information on this configuration see: + # https://docs.aws.amazon.com/encryption-sdk/latest/developer-guide/use-kms-ecdh-keyring.html#kms-ecdh-create + # The DynamoDb encryption client uses this keyring to encrypt and decrypt items. + # This keyring takes in: + # - kms_client + # - kms_key_id: Must be an ARN representing a KMS ECC key meant for KeyAgreement + # - curve_spec: The curve name where the public keys lie + # - sender_public_key: A ByteBuffer of a UTF-8 encoded public + # key for the key passed into kms_key_id in DER format + # - recipient_public_key: A ByteBuffer of a UTF-8 encoded public + # key for the key passed into kms_key_id in DER format + mat_prov = AwsCryptographicMaterialProviders(config=MaterialProvidersConfig()) + + keyring_input = CreateAwsKmsEcdhKeyringInput( + kms_client=boto3.client("kms"), + curve_spec=ECDHCurveSpec.ECC_NIST_P256, + key_agreement_scheme=KmsEcdhStaticConfigurationsKmsPrivateKeyToStaticPublicKey( + KmsPrivateKeyToStaticPublicKeyInput( + sender_kms_identifier=ecc_key_arn, + # Must be a DER-encoded X.509 public key + sender_public_key=public_key_sender_bytes, + # Must be a DER-encoded X.509 public key + recipient_public_key=public_key_recipient_bytes, + ) + ), + ) + + kms_ecdh_keyring = mat_prov.create_aws_kms_ecdh_keyring(input=keyring_input) + + put_get_item_with_keyring(kms_ecdh_keyring, ddb_table_name) + + +def kms_ecdh_discovery_table_example(ddb_table_name: str, ecc_recipient_key_arn: str): + """ + Demonstrate using a KMS ECDH keyring with discovery. + + This example takes in the recipient's KMS ECC key ARN via + the ecc_recipient_key_arn parameter. + + This example attempts to decrypt a test item using the provided ecc_recipient_key_arn, + it does so by checking if the message header contains the recipient's public key. + + Running this example requires access to the DDB Table whose name + is provided in CLI arguments. + This table must be configured with the following + primary key configuration: + - Partition key is named "partition_key" with type (S) + - Sort key is named "sort_key" with type (S) + This example also requires access to a KMS ECC key. + Our tests provide a KMS ECC Key ARN that anyone can use, but you + can also provide your own KMS ECC key. + To use your own KMS ECC key, you must have: + - kms:GetPublicKey permissions on that key. + This example will call kms:GetPublicKey on keyring creation. + You must also have kms:DeriveSharedSecret permissions on the KMS ECC key. + + :param ddb_table_name: The name of the DynamoDB table + :param ecc_recipient_key_arn: The ARN of the recipient's KMS ECC key + """ + # Create a KMS ECDH keyring. + # This keyring uses the KmsPublicKeyDiscovery configuration. + # On encrypt, the keyring will fail as it is not allowed to encrypt data under this configuration. + # On decrypt, the keyring will check if its corresponding public key is stored in the message header. It + # will AWS KMS to derive the shared from the recipient's KMS ECC Key ARN and the sender's public key; + # For more information on this configuration see: + # https://docs.aws.amazon.com/encryption-sdk/latest/developer-guide/use-kms-ecdh-keyring.html#kms-ecdh-discovery + # The DynamoDb encryption client uses this to encrypt and decrypt items. + # This keyring takes in: + # - kms_client + # - recipient_kms_identifier: Must be an ARN representing a KMS ECC key meant for KeyAgreement + # - curve_spec: The curve name where the public keys lie + mat_prov = AwsCryptographicMaterialProviders(config=MaterialProvidersConfig()) + + keyring_input = CreateAwsKmsEcdhKeyringInput( + kms_client=boto3.client("kms"), + curve_spec=ECDHCurveSpec.ECC_NIST_P256, + key_agreement_scheme=KmsEcdhStaticConfigurationsKmsPublicKeyDiscovery( + KmsPublicKeyDiscoveryInput(recipient_kms_identifier=ecc_recipient_key_arn) + ), + ) + + kms_ecdh_keyring = mat_prov.create_aws_kms_ecdh_keyring(input=keyring_input) + + get_item_with_keyring(kms_ecdh_keyring, ddb_table_name) + + +def get_item_with_keyring(kms_ecdh_keyring: IKeyring, ddb_table_name: str): + """ + Demonstrate get operation with a KMS ECDH keyring. + + :param kms_ecdh_keyring: The KMS ECDH keyring to use + :param ddb_table_name: The name of the DynamoDB table + """ + # Configure which attributes are encrypted and/or signed when writing new items. + # For each attribute that may exist on the items we plan to write to our DynamoDbTable, + # we must explicitly configure how they should be treated during item encryption: + # - ENCRYPT_AND_SIGN: The attribute is encrypted and included in the signature + # - SIGN_ONLY: The attribute not encrypted, but is still included in the signature + # - DO_NOTHING: The attribute is not encrypted and not included in the signature + attribute_actions = { + "partition_key": CryptoAction.SIGN_ONLY, # Our partition attribute must be SIGN_ONLY + "sort_key": CryptoAction.SIGN_ONLY, # Our sort attribute must be SIGN_ONLY + "sensitive_data": CryptoAction.ENCRYPT_AND_SIGN, + } + + # Configure which attributes we expect to be included in the signature + # when reading items. There are two options for configuring this: + # + # - (Recommended) Configure `allowed_unsigned_attribute_prefix`: + # When defining your DynamoDb schema and deciding on attribute names, + # choose a distinguishing prefix (such as ":") for all attributes that + # you do not want to include in the signature. + # This has two main benefits: + # - It is easier to reason about the security and authenticity of data within your item + # when all unauthenticated data is easily distinguishable by their attribute name. + # - If you need to add new unauthenticated attributes in the future, + # you can easily make the corresponding update to your `attribute_actions` + # and immediately start writing to that new attribute, without + # any other configuration update needed. + # Once you configure this field, it is not safe to update it. + # + # - Configure `allowed_unsigned_attributes`: You may also explicitly list + # a set of attributes that should be considered unauthenticated when encountered + # on read. Be careful if you use this configuration. Do not remove an attribute + # name from this configuration, even if you are no longer writing with that attribute, + # as old items may still include this attribute, and our configuration needs to know + # to continue to exclude this attribute from the signature scope. + # If you add new attribute names to this field, you must first deploy the update to this + # field to all readers in your host fleet before deploying the update to start writing + # with that new attribute. + # + # For this example, we currently authenticate all attributes. To make it easier to + # add unauthenticated attributes in the future, we define a prefix ":" for such attributes. + unsign_attr_prefix = ":" + + # Create the DynamoDb Encryption configuration for the table we will be writing to. + # Note: To use the KMS RSA keyring, your table config must specify an algorithmSuite + # that does not use asymmetric signing. + table_config = DynamoDbTableEncryptionConfig( + logical_table_name=ddb_table_name, + partition_key_name="partition_key", + sort_key_name="sort_key", + attribute_actions_on_encrypt=attribute_actions, + keyring=kms_ecdh_keyring, + allowed_unsigned_attribute_prefix=unsign_attr_prefix, + # Specify algorithmSuite without asymmetric signing here + # As of v3.0.0, the only supported algorithmSuite without asymmetric signing is + # ALG_AES_256_GCM_HKDF_SHA512_COMMIT_KEY_SYMSIG_HMAC_SHA384. + algorithm_suite_id=DBEAlgorithmSuiteId.ALG_AES_256_GCM_HKDF_SHA512_COMMIT_KEY_SYMSIG_HMAC_SHA384, + ) + + table_configs = {ddb_table_name: table_config} + tables_config = DynamoDbTablesEncryptionConfig(table_encryption_configs=table_configs) + + # Create the EncryptedTable + ddb_table = boto3.resource("dynamodb").Table(ddb_table_name) + encrypted_table = EncryptedTable( + table=ddb_table, + encryption_config=tables_config, + ) + + # Get the item back from our table using the above table. + # The table will decrypt the item client-side using the ECDH keyring + # and return the original item. + key_to_get = {"partition_key": "awsKmsEcdhKeyringItem", "sort_key": 0} + + get_response = encrypted_table.get_item(Key=key_to_get) + + # Demonstrate that GetItem succeeded and returned the decrypted item + assert get_response["ResponseMetadata"]["HTTPStatusCode"] == 200 + returned_item = get_response["Item"] + assert returned_item["sensitive_data"] == "encrypt and sign me!" + + +def put_get_item_with_keyring(aws_kms_ecdh_keyring: IKeyring, ddb_table_name: str): + """ + Demonstrate put and get operations with a KMS ECDH keyring. + + :param aws_kms_ecdh_keyring: The KMS ECDH keyring to use + :param ddb_table_name: The name of the DynamoDB table + """ + # Configure which attributes are encrypted and/or signed when writing new items. + # For each attribute that may exist on the items we plan to write to our DynamoDbTable, + # we must explicitly configure how they should be treated during item encryption: + # - ENCRYPT_AND_SIGN: The attribute is encrypted and included in the signature + # - SIGN_ONLY: The attribute not encrypted, but is still included in the signature + # - DO_NOTHING: The attribute is not encrypted and not included in the signature + attribute_actions = { + "partition_key": CryptoAction.SIGN_ONLY, # Our partition attribute must be SIGN_ONLY + "sort_key": CryptoAction.SIGN_ONLY, # Our sort attribute must be SIGN_ONLY + "sensitive_data": CryptoAction.ENCRYPT_AND_SIGN, + } + + # Configure which attributes we expect to be included in the signature + # when reading items. There are two options for configuring this: + # + # - (Recommended) Configure `allowed_unsigned_attribute_prefix`: + # When defining your DynamoDb schema and deciding on attribute names, + # choose a distinguishing prefix (such as ":") for all attributes that + # you do not want to include in the signature. + # This has two main benefits: + # - It is easier to reason about the security and authenticity of data within your item + # when all unauthenticated data is easily distinguishable by their attribute name. + # - If you need to add new unauthenticated attributes in the future, + # you can easily make the corresponding update to your `attribute_actions` + # and immediately start writing to that new attribute, without + # any other configuration update needed. + # Once you configure this field, it is not safe to update it. + # + # - Configure `allowed_unsigned_attributes`: You may also explicitly list + # a set of attributes that should be considered unauthenticated when encountered + # on read. Be careful if you use this configuration. Do not remove an attribute + # name from this configuration, even if you are no longer writing with that attribute, + # as old items may still include this attribute, and our configuration needs to know + # to continue to exclude this attribute from the signature scope. + # If you add new attribute names to this field, you must first deploy the update to this + # field to all readers in your host fleet before deploying the update to start writing + # with that new attribute. + # + # For this example, we currently authenticate all attributes. To make it easier to + # add unauthenticated attributes in the future, we define a prefix ":" for such attributes. + unsign_attr_prefix = ":" + + # Create the DynamoDb Encryption configuration for the table we will be writing to. + # Note: To use the KMS RSA keyring, your table config must specify an algorithmSuite + # that does not use asymmetric signing. + table_config = DynamoDbTableEncryptionConfig( + logical_table_name=ddb_table_name, + partition_key_name="partition_key", + sort_key_name="sort_key", + attribute_actions_on_encrypt=attribute_actions, + keyring=aws_kms_ecdh_keyring, + allowed_unsigned_attribute_prefix=unsign_attr_prefix, + # Specify algorithmSuite without asymmetric signing here + # As of v3.0.0, the only supported algorithmSuite without asymmetric signing is + # ALG_AES_256_GCM_HKDF_SHA512_COMMIT_KEY_SYMSIG_HMAC_SHA384. + algorithm_suite_id=DBEAlgorithmSuiteId.ALG_AES_256_GCM_HKDF_SHA512_COMMIT_KEY_SYMSIG_HMAC_SHA384, + ) + + table_configs = {ddb_table_name: table_config} + tables_config = DynamoDbTablesEncryptionConfig(table_encryption_configs=table_configs) + + # Create the EncryptedTable + ddb_table = boto3.resource("dynamodb").Table(ddb_table_name) + encrypted_table = EncryptedTable( + table=ddb_table, + encryption_config=tables_config, + ) + + # Put an item into our table using the above table. + # Before the item gets sent to DynamoDb, it will be encrypted + # client-side, according to our configuration. + item = { + "partition_key": "awsKmsEcdhKeyringItem", + "sort_key": 0, + "sensitive_data": "encrypt and sign me!", + } + + put_response = encrypted_table.put_item(Item=item) + + # Demonstrate that PutItem succeeded + assert put_response["ResponseMetadata"]["HTTPStatusCode"] == 200 + + # Get the item back from our table using the table + # The table will decrypt the item client-side using the RSA keyring + # and return the original item. + key_to_get = {"partition_key": "awsKmsEcdhKeyringItem", "sort_key": 0} + + get_response = encrypted_table.get_item(Key=key_to_get) + + # Demonstrate that GetItem succeeded and returned the decrypted item + assert get_response["ResponseMetadata"]["HTTPStatusCode"] == 200 + returned_item = get_response["Item"] + assert returned_item["sensitive_data"] == "encrypt and sign me!" diff --git a/Examples/runtimes/python/DynamoDBEncryption/src/keyring/kms_rsa_keyring_example/__init__.py b/Examples/runtimes/python/DynamoDBEncryption/src/keyring/kms_rsa_keyring_example/__init__.py new file mode 100644 index 000000000..fa977e22f --- /dev/null +++ b/Examples/runtimes/python/DynamoDBEncryption/src/keyring/kms_rsa_keyring_example/__init__.py @@ -0,0 +1,3 @@ +# Copyright Amazon.com Inc. or its affiliates. All Rights Reserved. +# SPDX-License-Identifier: Apache-2.0 +"""Stub to allow relative imports of examples from tests.""" diff --git a/Examples/runtimes/python/DynamoDBEncryption/src/keyring/kms_rsa_keyring_example/encryption_config.py b/Examples/runtimes/python/DynamoDBEncryption/src/keyring/kms_rsa_keyring_example/encryption_config.py new file mode 100644 index 000000000..e960b1897 --- /dev/null +++ b/Examples/runtimes/python/DynamoDBEncryption/src/keyring/kms_rsa_keyring_example/encryption_config.py @@ -0,0 +1,114 @@ +# Copyright Amazon.com Inc. or its affiliates. All Rights Reserved. +# SPDX-License-Identifier: Apache-2.0 +""" +Configuration module for KMS RSA keyring encryption setup. + +This module provides the common encryption configuration used by both +EncryptedClient and EncryptedTable examples. +""" + +import boto3 +from aws_cryptographic_material_providers.mpl import AwsCryptographicMaterialProviders +from aws_cryptographic_material_providers.mpl.config import MaterialProvidersConfig +from aws_cryptographic_material_providers.mpl.models import ( + CreateAwsKmsRsaKeyringInput, + DBEAlgorithmSuiteId, +) +from aws_dbesdk_dynamodb.structures.dynamodb import ( + DynamoDbTableEncryptionConfig, + DynamoDbTablesEncryptionConfig, +) +from aws_dbesdk_dynamodb.structures.structured_encryption import ( + CryptoAction, +) + + +def create_encryption_config( + ddb_table_name: str, rsa_key_arn: str, public_key_utf8_encoded: bytes +) -> DynamoDbTablesEncryptionConfig: + """ + Create the encryption configuration for DynamoDB encryption using KMS RSA keyring. + + :param ddb_table_name: The name of the DynamoDB table + :param rsa_key_arn: ARN of the KMS RSA key + :param public_key_utf8_encoded: UTF-8 encoded public key in PEM format + :return: The DynamoDB tables encryption configuration + """ + # 1. Create a KMS RSA keyring. + # This keyring takes in: + # - kms_client + # - kms_key_id: Must be an ARN representing a KMS RSA key + # - public_key: A ByteBuffer of a UTF-8 encoded PEM file representing the public + # key for the key passed into kms_key_id + # - encryption_algorithm: Must be either RSAES_OAEP_SHA_256 or RSAES_OAEP_SHA_1 + mat_prov = AwsCryptographicMaterialProviders(config=MaterialProvidersConfig()) + + keyring_input = CreateAwsKmsRsaKeyringInput( + kms_key_id=rsa_key_arn, + kms_client=boto3.client("kms"), + public_key=public_key_utf8_encoded, + encryption_algorithm="RSAES_OAEP_SHA_256", + ) + + kms_rsa_keyring = mat_prov.create_aws_kms_rsa_keyring(input=keyring_input) + + # 2. Configure which attributes are encrypted and/or signed when writing new items. + # For each attribute that may exist on the items we plan to write to our DynamoDbTable, + # we must explicitly configure how they should be treated during item encryption: + # - ENCRYPT_AND_SIGN: The attribute is encrypted and included in the signature + # - SIGN_ONLY: The attribute not encrypted, but is still included in the signature + # - DO_NOTHING: The attribute is not encrypted and not included in the signature + attribute_actions = { + "partition_key": CryptoAction.SIGN_ONLY, # Our partition attribute must be SIGN_ONLY + "sort_key": CryptoAction.SIGN_ONLY, # Our sort attribute must be SIGN_ONLY + "sensitive_data": CryptoAction.ENCRYPT_AND_SIGN, + } + + # 3. Configure which attributes we expect to be included in the signature + # when reading items. There are two options for configuring this: + # + # - (Recommended) Configure `allowed_unsigned_attribute_prefix`: + # When defining your DynamoDb schema and deciding on attribute names, + # choose a distinguishing prefix (such as ":") for all attributes that + # you do not want to include in the signature. + # This has two main benefits: + # - It is easier to reason about the security and authenticity of data within your item + # when all unauthenticated data is easily distinguishable by their attribute name. + # - If you need to add new unauthenticated attributes in the future, + # you can easily make the corresponding update to your `attribute_actions` + # and immediately start writing to that new attribute, without + # any other configuration update needed. + # Once you configure this field, it is not safe to update it. + # + # - Configure `allowed_unsigned_attributes`: You may also explicitly list + # a set of attributes that should be considered unauthenticated when encountered + # on read. Be careful if you use this configuration. Do not remove an attribute + # name from this configuration, even if you are no longer writing with that attribute, + # as old items may still include this attribute, and our configuration needs to know + # to continue to exclude this attribute from the signature scope. + # If you add new attribute names to this field, you must first deploy the update to this + # field to all readers in your host fleet before deploying the update to start writing + # with that new attribute. + # + # For this example, we currently authenticate all attributes. To make it easier to + # add unauthenticated attributes in the future, we define a prefix ":" for such attributes. + unsign_attr_prefix = ":" + + # 4. Create the DynamoDb Encryption configuration for the table we will be writing to. + # Note: To use the KMS RSA keyring, your table config must specify an algorithmSuite + # that does not use asymmetric signing. + table_config = DynamoDbTableEncryptionConfig( + logical_table_name=ddb_table_name, + partition_key_name="partition_key", + sort_key_name="sort_key", + attribute_actions_on_encrypt=attribute_actions, + keyring=kms_rsa_keyring, + allowed_unsigned_attribute_prefix=unsign_attr_prefix, + # Specify algorithmSuite without asymmetric signing here + # As of v3.0.0, the only supported algorithmSuite without asymmetric signing is + # ALG_AES_256_GCM_HKDF_SHA512_COMMIT_KEY_SYMSIG_HMAC_SHA384. + algorithm_suite_id=DBEAlgorithmSuiteId.ALG_AES_256_GCM_HKDF_SHA512_COMMIT_KEY_SYMSIG_HMAC_SHA384, + ) + + table_configs = {ddb_table_name: table_config} + return DynamoDbTablesEncryptionConfig(table_encryption_configs=table_configs) diff --git a/Examples/runtimes/python/DynamoDBEncryption/src/keyring/kms_rsa_keyring_example/utility.py b/Examples/runtimes/python/DynamoDBEncryption/src/keyring/kms_rsa_keyring_example/utility.py new file mode 100644 index 000000000..3e9302073 --- /dev/null +++ b/Examples/runtimes/python/DynamoDBEncryption/src/keyring/kms_rsa_keyring_example/utility.py @@ -0,0 +1,62 @@ +# Copyright Amazon.com Inc. or its affiliates. All Rights Reserved. +# SPDX-License-Identifier: Apache-2.0 +"""Utility functions for the KMS RSA keyring example.""" + +import os + +import boto3 +from cryptography.hazmat.primitives import serialization + +DEFAULT_EXAMPLE_RSA_PUBLIC_KEY_FILENAME = "KmsRsaKeyringExamplePublicKey.pem" + + +def should_get_new_public_key(rsa_public_key_filename: str = DEFAULT_EXAMPLE_RSA_PUBLIC_KEY_FILENAME) -> bool: + """ + Check if we need to get a new public key. + + :param rsa_public_key_filename: Path to the public key PEM file + :return: True if we need to get a new public key, False otherwise + """ + # Check if a public key file already exists + public_key_file = os.path.exists(rsa_public_key_filename) + + # If a public key file already exists: do not overwrite existing file + if public_key_file: + return False + + # If file is not present, generate a new key pair + return True + + +def write_public_key_pem_for_rsa_key( + rsa_key_arn: str, rsa_public_key_filename: str = DEFAULT_EXAMPLE_RSA_PUBLIC_KEY_FILENAME +): + """ + Get the public key from KMS and write it to a PEM file. + + :param rsa_key_arn: The ARN of the KMS RSA key + :param rsa_public_key_filename: Path to write the public key PEM file + """ + # Safety check: Validate file is not present + if os.path.exists(rsa_public_key_filename): + raise FileExistsError("getRsaPublicKey will not overwrite existing PEM files") + + # This code will call KMS to get the public key for the KMS RSA key. + # You must have kms:GetPublicKey permissions on the key for this to succeed. + # The public key will be written to the file EXAMPLE_RSA_PUBLIC_KEY_FILENAME. + kms_client = boto3.client("kms") + response = kms_client.get_public_key(KeyId=rsa_key_arn) + public_key_bytes = response["PublicKey"] + + # Convert the public key to PEM format + public_key = serialization.load_der_public_key(public_key_bytes) + pem_data = public_key.public_bytes( + encoding=serialization.Encoding.PEM, format=serialization.PublicFormat.SubjectPublicKeyInfo + ) + + # Write the PEM file + try: + with open(rsa_public_key_filename, "wb") as f: + f.write(pem_data) + except IOError as e: + raise OSError("IOError while writing public key PEM") from e diff --git a/Examples/runtimes/python/DynamoDBEncryption/src/keyring/kms_rsa_keyring_example/with_encrypted_client.py b/Examples/runtimes/python/DynamoDBEncryption/src/keyring/kms_rsa_keyring_example/with_encrypted_client.py new file mode 100644 index 000000000..ab4b15332 --- /dev/null +++ b/Examples/runtimes/python/DynamoDBEncryption/src/keyring/kms_rsa_keyring_example/with_encrypted_client.py @@ -0,0 +1,92 @@ +# Copyright Amazon.com Inc. or its affiliates. All Rights Reserved. +# SPDX-License-Identifier: Apache-2.0 +""" +Example demonstrating DynamoDb Encryption using a KMS RSA Keyring with EncryptedClient. + +The KMS RSA Keyring uses a KMS RSA key pair to encrypt and decrypt records. The client +uses the downloaded public key to encrypt items it adds to the table. The keyring +uses the private key to decrypt existing table items it retrieves by calling +KMS' decrypt API. + +Running this example requires access to the DDB Table whose name is provided +in CLI arguments. This table must be configured with the following primary key +configuration: + - Partition key is named "partition_key" with type (S) + - Sort key is named "sort_key" with type (S) + +The example also requires access to a KMS RSA key. Our tests provide a KMS RSA +ARN that anyone can use, but you can also provide your own KMS RSA key. +To use your own KMS RSA key, you must have either: + - Its public key downloaded in a UTF-8 encoded PEM file + - kms:GetPublicKey permissions on that key + +If you do not have the public key downloaded, running this example through its +main method will download the public key for you by calling kms:GetPublicKey. +You must also have kms:Decrypt permissions on the KMS RSA key. +""" + +import boto3 +from aws_dbesdk_dynamodb.encrypted.client import EncryptedClient + +from .encryption_config import create_encryption_config +from .utility import DEFAULT_EXAMPLE_RSA_PUBLIC_KEY_FILENAME + + +def kms_rsa_keyring_client_example( + ddb_table_name: str, rsa_key_arn: str, rsa_public_key_filename: str = DEFAULT_EXAMPLE_RSA_PUBLIC_KEY_FILENAME +): + """ + Create a KMS RSA keyring and use it to encrypt/decrypt DynamoDB items with EncryptedClient. + + :param ddb_table_name: The name of the DynamoDB table + :param rsa_key_arn: ARN of the KMS RSA key + :param rsa_public_key_filename: Path to the public key PEM file + """ + # 1. Load UTF-8 encoded public key PEM file. + # You may have an RSA public key file already defined. + # If not, the main method in this class will call + # the KMS RSA key, retrieve its public key, and store it + # in a PEM file for example use. + try: + with open(rsa_public_key_filename, "rb") as f: + public_key_utf8_encoded = f.read() + except IOError as e: + raise OSError("IOError while reading public key from file") from e + + # 2. Create the DynamoDb Encryption configuration for the table we will be writing to. + # See encryption_config.py in this directory for detailed steps on the encryption configuration. + tables_config = create_encryption_config( + ddb_table_name=ddb_table_name, + rsa_key_arn=rsa_key_arn, + public_key_utf8_encoded=public_key_utf8_encoded, + ) + + # 3. Create the EncryptedClient + ddb_client = boto3.client("dynamodb") + encrypted_ddb_client = EncryptedClient(client=ddb_client, encryption_config=tables_config) + + # 4. Put an item into our table using the above client. + # Before the item gets sent to DynamoDb, it will be encrypted + # client-side using the KMS RSA keyring. + item = { + "partition_key": {"S": "awsKmsRsaKeyringItem"}, + "sort_key": {"N": "0"}, + "sensitive_data": {"S": "encrypt and sign me!"}, + } + + put_response = encrypted_ddb_client.put_item(TableName=ddb_table_name, Item=item) + + # Demonstrate that PutItem succeeded + assert put_response["ResponseMetadata"]["HTTPStatusCode"] == 200 + + # 5. Get the item back from our table using the client. + # The client will decrypt the item client-side using the RSA keyring + # and return the original item. + key_to_get = {"partition_key": {"S": "awsKmsRsaKeyringItem"}, "sort_key": {"N": "0"}} + + get_response = encrypted_ddb_client.get_item(TableName=ddb_table_name, Key=key_to_get) + + # Demonstrate that GetItem succeeded and returned the decrypted item + assert get_response["ResponseMetadata"]["HTTPStatusCode"] == 200 + returned_item = get_response["Item"] + assert returned_item["sensitive_data"]["S"] == "encrypt and sign me!" diff --git a/Examples/runtimes/python/DynamoDBEncryption/src/keyring/kms_rsa_keyring_example/with_encrypted_table.py b/Examples/runtimes/python/DynamoDBEncryption/src/keyring/kms_rsa_keyring_example/with_encrypted_table.py new file mode 100644 index 000000000..2d71ec9fa --- /dev/null +++ b/Examples/runtimes/python/DynamoDBEncryption/src/keyring/kms_rsa_keyring_example/with_encrypted_table.py @@ -0,0 +1,95 @@ +# Copyright Amazon.com Inc. or its affiliates. All Rights Reserved. +# SPDX-License-Identifier: Apache-2.0 +""" +Example demonstrating DynamoDb Encryption using a KMS RSA Keyring with EncryptedTable. + +The KMS RSA Keyring uses a KMS RSA key pair to encrypt and decrypt records. The client +uses the downloaded public key to encrypt items it adds to the table. The keyring +uses the private key to decrypt existing table items it retrieves by calling +KMS' decrypt API. + +Running this example requires access to the DDB Table whose name is provided +in CLI arguments. This table must be configured with the following primary key +configuration: + - Partition key is named "partition_key" with type (S) + - Sort key is named "sort_key" with type (S) + +The example also requires access to a KMS RSA key. Our tests provide a KMS RSA +ARN that anyone can use, but you can also provide your own KMS RSA key. +To use your own KMS RSA key, you must have either: + - Its public key downloaded in a UTF-8 encoded PEM file + - kms:GetPublicKey permissions on that key + +If you do not have the public key downloaded, running this example through its +main method will download the public key for you by calling kms:GetPublicKey. +You must also have kms:Decrypt permissions on the KMS RSA key. +""" + +import boto3 +from aws_dbesdk_dynamodb.encrypted.table import EncryptedTable + +from .encryption_config import create_encryption_config +from .utility import DEFAULT_EXAMPLE_RSA_PUBLIC_KEY_FILENAME + + +def kms_rsa_keyring_table_example( + ddb_table_name: str, rsa_key_arn: str, rsa_public_key_filename: str = DEFAULT_EXAMPLE_RSA_PUBLIC_KEY_FILENAME +): + """ + Create a KMS RSA keyring and use it to encrypt/decrypt DynamoDB items with EncryptedTable. + + :param ddb_table_name: The name of the DynamoDB table + :param rsa_key_arn: ARN of the KMS RSA key + :param rsa_public_key_filename: Path to the public key PEM file + """ + # 1. Load UTF-8 encoded public key PEM file. + # You may have an RSA public key file already defined. + # If not, the main method in this class will call + # the KMS RSA key, retrieve its public key, and store it + # in a PEM file for example use. + try: + with open(rsa_public_key_filename, "rb") as f: + public_key_utf8_encoded = f.read() + except IOError as e: + raise OSError("IOError while reading public key from file") from e + + # 2. Create the DynamoDb Encryption configuration for the table we will be writing to. + # See encryption_config.py in this directory for detailed steps on the encryption configuration. + tables_config = create_encryption_config( + ddb_table_name=ddb_table_name, + rsa_key_arn=rsa_key_arn, + public_key_utf8_encoded=public_key_utf8_encoded, + ) + + # 3. Create the EncryptedTable + ddb_table = boto3.resource("dynamodb").Table(ddb_table_name) + encrypted_table = EncryptedTable( + table=ddb_table, + encryption_config=tables_config, + ) + + # 4. Put an item into our table using the above table. + # Before the item gets sent to DynamoDb, it will be encrypted + # client-side using the KMS RSA keyring. + item = { + "partition_key": "awsKmsRsaKeyringItem", + "sort_key": 0, + "sensitive_data": "encrypt and sign me!", + } + + put_response = encrypted_table.put_item(Item=item) + + # Demonstrate that PutItem succeeded + assert put_response["ResponseMetadata"]["HTTPStatusCode"] == 200 + + # 5. Get the item back from our table using the table. + # The table will decrypt the item client-side using the RSA keyring + # and return the original item. + key_to_get = {"partition_key": "awsKmsRsaKeyringItem", "sort_key": 0} + + get_response = encrypted_table.get_item(Key=key_to_get) + + # Demonstrate that GetItem succeeded and returned the decrypted item + assert get_response["ResponseMetadata"]["HTTPStatusCode"] == 200 + returned_item = get_response["Item"] + assert returned_item["sensitive_data"] == "encrypt and sign me!" diff --git a/Examples/runtimes/python/DynamoDBEncryption/src/keyring/mrk_discovery_multi_keyring_example/__init__.py b/Examples/runtimes/python/DynamoDBEncryption/src/keyring/mrk_discovery_multi_keyring_example/__init__.py new file mode 100644 index 000000000..fa977e22f --- /dev/null +++ b/Examples/runtimes/python/DynamoDBEncryption/src/keyring/mrk_discovery_multi_keyring_example/__init__.py @@ -0,0 +1,3 @@ +# Copyright Amazon.com Inc. or its affiliates. All Rights Reserved. +# SPDX-License-Identifier: Apache-2.0 +"""Stub to allow relative imports of examples from tests.""" diff --git a/Examples/runtimes/python/DynamoDBEncryption/src/keyring/mrk_discovery_multi_keyring_example/with_encrypted_client.py b/Examples/runtimes/python/DynamoDBEncryption/src/keyring/mrk_discovery_multi_keyring_example/with_encrypted_client.py new file mode 100644 index 000000000..0b3a3465f --- /dev/null +++ b/Examples/runtimes/python/DynamoDBEncryption/src/keyring/mrk_discovery_multi_keyring_example/with_encrypted_client.py @@ -0,0 +1,188 @@ +# Copyright Amazon.com Inc. or its affiliates. All Rights Reserved. +# SPDX-License-Identifier: Apache-2.0 +""" +Example demonstrating DynamoDb Encryption using a MRK discovery multi-keyring with EncryptedClient. + +A discovery keyring is not provided with any wrapping keys; instead, it recognizes +the KMS key that was used to encrypt a data key, and asks KMS to decrypt with that +KMS key. Discovery keyrings cannot be used to encrypt data. + +For more information on discovery keyrings, see: +https://docs.aws.amazon.com/encryption-sdk/latest/developer-guide/use-kms-keyring.html#kms-keyring-discovery + +The example encrypts an item using an MRK multi-keyring and puts the encrypted +item to the configured DynamoDb table. Then, it gets the item from the table and +decrypts it using the discovery keyring. + +Running this example requires access to the DDB Table whose name is provided in +CLI arguments. This table must be configured with the following primary key +configuration: + - Partition key is named "partition_key" with type (S) + - Sort key is named "sort_key" with type (S) +""" +from typing import List + +import boto3 +from aws_cryptographic_material_providers.mpl import AwsCryptographicMaterialProviders +from aws_cryptographic_material_providers.mpl.config import MaterialProvidersConfig +from aws_cryptographic_material_providers.mpl.models import ( + CreateAwsKmsMrkDiscoveryMultiKeyringInput, + CreateAwsKmsMrkMultiKeyringInput, + DiscoveryFilter, +) +from aws_dbesdk_dynamodb.encrypted.client import EncryptedClient +from aws_dbesdk_dynamodb.structures.dynamodb import ( + DynamoDbTableEncryptionConfig, + DynamoDbTablesEncryptionConfig, +) +from aws_dbesdk_dynamodb.structures.structured_encryption import ( + CryptoAction, +) + + +def multi_mrk_discovery_keyring_client_example( + ddb_table_name: str, key_arn: str, account_ids: List[str], regions: List[str] +): + """ + Demonstrate using a MRK discovery multi-keyring with EncryptedClient. + + :param ddb_table_name: The name of the DynamoDB table + :param key_arn: The ARN of the KMS key to use for encryption + :param account_ids: List of AWS account IDs for discovery filter + :param regions: List of AWS regions for discovery keyring + """ + # 1. Create a single MRK multi-keyring using the key arn. + # Although this example demonstrates use of the MRK discovery multi-keyring, + # a discovery keyring cannot be used to encrypt. So we will need to construct + # a non-discovery keyring for this example to encrypt. For more information on MRK + # multi-keyrings, see the MultiMrkKeyringExample in this directory. + # Though this is an "MRK multi-keyring", we do not need to provide multiple keys, + # and can use single-region KMS keys. We will provide a single key here; this + # can be either an MRK or a single-region key. + mat_prov = AwsCryptographicMaterialProviders(config=MaterialProvidersConfig()) + + encrypt_keyring = mat_prov.create_aws_kms_mrk_multi_keyring( + input=CreateAwsKmsMrkMultiKeyringInput(generator=key_arn) + ) + + # 2. Configure which attributes are encrypted and/or signed when writing new items. + # For each attribute that may exist on the items we plan to write to our DynamoDbTable, + # we must explicitly configure how they should be treated during item encryption: + # - ENCRYPT_AND_SIGN: The attribute is encrypted and included in the signature + # - SIGN_ONLY: The attribute not encrypted, but is still included in the signature + # - DO_NOTHING: The attribute is not encrypted and not included in the signature + attribute_actions_on_encrypt = { + "partition_key": CryptoAction.SIGN_ONLY, # Our partition attribute must be SIGN_ONLY + "sort_key": CryptoAction.SIGN_ONLY, # Our sort attribute must be SIGN_ONLY + "sensitive_data": CryptoAction.ENCRYPT_AND_SIGN, + } + + # 3. Configure which attributes we expect to be included in the signature + # when reading items. There are two options for configuring this: + # + # - (Recommended) Configure `allowed_unsigned_attribute_prefix`: + # When defining your DynamoDb schema and deciding on attribute names, + # choose a distinguishing prefix (such as ":") for all attributes that + # you do not want to include in the signature. + # This has two main benefits: + # - It is easier to reason about the security and authenticity of data within your item + # when all unauthenticated data is easily distinguishable by their attribute name. + # - If you need to add new unauthenticated attributes in the future, + # you can easily make the corresponding update to your `attribute_actions_on_encrypt` + # and immediately start writing to that new attribute, without + # any other configuration update needed. + # Once you configure this field, it is not safe to update it. + # + # - Configure `allowed_unsigned_attributes`: You may also explicitly list + # a set of attributes that should be considered unauthenticated when encountered + # on read. Be careful if you use this configuration. Do not remove an attribute + # name from this configuration, even if you are no longer writing with that attribute, + # as old items may still include this attribute, and our configuration needs to know + # to continue to exclude this attribute from the signature scope. + # If you add new attribute names to this field, you must first deploy the update to this + # field to all readers in your host fleet before deploying the update to start writing + # with that new attribute. + # + # For this example, we currently authenticate all attributes. To make it easier to + # add unauthenticated attributes in the future, we define a prefix ":" for such attributes. + unsign_attr_prefix = ":" + + # 4. Create the DynamoDb Encryption configuration for the table we will be writing to. + table_config = DynamoDbTableEncryptionConfig( + logical_table_name=ddb_table_name, + partition_key_name="partition_key", + sort_key_name="sort_key", + attribute_actions_on_encrypt=attribute_actions_on_encrypt, + keyring=encrypt_keyring, + allowed_unsigned_attribute_prefix=unsign_attr_prefix, + ) + + table_configs = {ddb_table_name: table_config} + tables_config = DynamoDbTablesEncryptionConfig(table_encryption_configs=table_configs) + + # 5. Create the EncryptedClient + ddb_client = boto3.client("dynamodb") + encrypted_ddb_client = EncryptedClient(client=ddb_client, encryption_config=tables_config) + + # 6. Put an item into our table using the above client. + # Before the item gets sent to DynamoDb, it will be encrypted + # client-side using the MRK multi-keyring. + item = { + "partition_key": {"S": "awsKmsMrkDiscoveryMultiKeyringItem"}, + "sort_key": {"N": "0"}, + "sensitive_data": {"S": "encrypt and sign me!"}, + } + + put_response = encrypted_ddb_client.put_item(TableName=ddb_table_name, Item=item) + + # Demonstrate that PutItem succeeded + assert put_response["ResponseMetadata"]["HTTPStatusCode"] == 200 + + # 7. Construct a discovery filter. + # A discovery filter limits the set of encrypted data keys + # the keyring can use to decrypt data. + # We will only let the keyring use keys in the selected AWS accounts + # and in the `aws` partition. + # This is the suggested config for most users; for more detailed config, see + # https://docs.aws.amazon.com/encryption-sdk/latest/developer-guide/use-kms-keyring.html#kms-keyring-discovery + discovery_filter = DiscoveryFilter(partition="aws", account_ids=account_ids) + + # 8. Construct a discovery keyring. + # Note that we choose to use the MRK discovery multi-keyring, even though + # our original keyring used a single KMS key. + decrypt_keyring = mat_prov.create_aws_kms_mrk_discovery_multi_keyring( + input=CreateAwsKmsMrkDiscoveryMultiKeyringInput(discovery_filter=discovery_filter, regions=regions) + ) + + # 9. Create new DDB config and client using the decrypt discovery keyring. + # This is the same as the above config, except we pass in the decrypt keyring. + table_config_for_decrypt = DynamoDbTableEncryptionConfig( + logical_table_name=ddb_table_name, + partition_key_name="partition_key", + sort_key_name="sort_key", + attribute_actions_on_encrypt=attribute_actions_on_encrypt, + # Add decrypt keyring here + keyring=decrypt_keyring, + allowed_unsigned_attribute_prefix=unsign_attr_prefix, + ) + + table_configs_for_decrypt = {ddb_table_name: table_config_for_decrypt} + tables_config_for_decrypt = DynamoDbTablesEncryptionConfig(table_encryption_configs=table_configs_for_decrypt) + + encrypted_ddb_client_for_decrypt = EncryptedClient(client=ddb_client, encryption_config=tables_config_for_decrypt) + + # 10. Get the item back from our table using the client. + # The client will retrieve encrypted items from the DDB table, then + # detect the KMS key that was used to encrypt their data keys. + # The client will make a request to KMS to decrypt with the encrypting KMS key. + # If the client has permission to decrypt with the KMS key, + # the client will decrypt the item client-side using the keyring + # and return the original item. + key_to_get = {"partition_key": {"S": "awsKmsMrkDiscoveryMultiKeyringItem"}, "sort_key": {"N": "0"}} + + get_response = encrypted_ddb_client_for_decrypt.get_item(TableName=ddb_table_name, Key=key_to_get) + + # Demonstrate that GetItem succeeded and returned the decrypted item + assert get_response["ResponseMetadata"]["HTTPStatusCode"] == 200 + returned_item = get_response["Item"] + assert returned_item["sensitive_data"]["S"] == "encrypt and sign me!" diff --git a/Examples/runtimes/python/DynamoDBEncryption/src/keyring/mrk_discovery_multi_keyring_example/with_encrypted_table.py b/Examples/runtimes/python/DynamoDBEncryption/src/keyring/mrk_discovery_multi_keyring_example/with_encrypted_table.py new file mode 100644 index 000000000..849b5a14b --- /dev/null +++ b/Examples/runtimes/python/DynamoDBEncryption/src/keyring/mrk_discovery_multi_keyring_example/with_encrypted_table.py @@ -0,0 +1,190 @@ +# Copyright Amazon.com Inc. or its affiliates. All Rights Reserved. +# SPDX-License-Identifier: Apache-2.0 +""" +Example demonstrating DynamoDb Encryption using a MRK discovery multi-keyring with EncryptedTable. + +A discovery keyring is not provided with any wrapping keys; instead, it recognizes +the KMS key that was used to encrypt a data key, and asks KMS to decrypt with that +KMS key. Discovery keyrings cannot be used to encrypt data. + +For more information on discovery keyrings, see: +https://docs.aws.amazon.com/encryption-sdk/latest/developer-guide/use-kms-keyring.html#kms-keyring-discovery + +The example encrypts an item using an MRK multi-keyring and puts the encrypted +item to the configured DynamoDb table. Then, it gets the item from the table and +decrypts it using the discovery keyring. + +Running this example requires access to the DDB Table whose name is provided in +CLI arguments. This table must be configured with the following primary key +configuration: + - Partition key is named "partition_key" with type (S) + - Sort key is named "sort_key" with type (S) +""" +from typing import List + +import boto3 +from aws_cryptographic_material_providers.mpl import AwsCryptographicMaterialProviders +from aws_cryptographic_material_providers.mpl.config import MaterialProvidersConfig +from aws_cryptographic_material_providers.mpl.models import ( + CreateAwsKmsMrkDiscoveryMultiKeyringInput, + CreateAwsKmsMrkMultiKeyringInput, + DiscoveryFilter, +) +from aws_dbesdk_dynamodb.encrypted.table import EncryptedTable +from aws_dbesdk_dynamodb.structures.dynamodb import ( + DynamoDbTableEncryptionConfig, + DynamoDbTablesEncryptionConfig, +) +from aws_dbesdk_dynamodb.structures.structured_encryption import ( + CryptoAction, +) + + +def multi_mrk_discovery_keyring_table_example( + ddb_table_name: str, key_arn: str, account_ids: List[str], regions: List[str] +): + """ + Demonstrate using a MRK discovery multi-keyring with EncryptedTable. + + :param ddb_table_name: The name of the DynamoDB table + :param key_arn: The ARN of the KMS key to use for encryption + :param account_ids: List of AWS account IDs for discovery filter + :param regions: List of AWS regions for discovery keyring + """ + # 1. Create a single MRK multi-keyring using the key arn. + # Although this example demonstrates use of the MRK discovery multi-keyring, + # a discovery keyring cannot be used to encrypt. So we will need to construct + # a non-discovery keyring for this example to encrypt. For more information on MRK + # multi-keyrings, see the MultiMrkKeyringExample in this directory. + # Though this is an "MRK multi-keyring", we do not need to provide multiple keys, + # and can use single-region KMS keys. We will provide a single key here; this + # can be either an MRK or a single-region key. + mat_prov = AwsCryptographicMaterialProviders(config=MaterialProvidersConfig()) + + encrypt_keyring = mat_prov.create_aws_kms_mrk_multi_keyring( + input=CreateAwsKmsMrkMultiKeyringInput(generator=key_arn) + ) + + # 2. Configure which attributes are encrypted and/or signed when writing new items. + # For each attribute that may exist on the items we plan to write to our DynamoDbTable, + # we must explicitly configure how they should be treated during item encryption: + # - ENCRYPT_AND_SIGN: The attribute is encrypted and included in the signature + # - SIGN_ONLY: The attribute not encrypted, but is still included in the signature + # - DO_NOTHING: The attribute is not encrypted and not included in the signature + attribute_actions_on_encrypt = { + "partition_key": CryptoAction.SIGN_ONLY, # Our partition attribute must be SIGN_ONLY + "sort_key": CryptoAction.SIGN_ONLY, # Our sort attribute must be SIGN_ONLY + "sensitive_data": CryptoAction.ENCRYPT_AND_SIGN, + } + + # 3. Configure which attributes we expect to be included in the signature + # when reading items. There are two options for configuring this: + # + # - (Recommended) Configure `allowed_unsigned_attribute_prefix`: + # When defining your DynamoDb schema and deciding on attribute names, + # choose a distinguishing prefix (such as ":") for all attributes that + # you do not want to include in the signature. + # This has two main benefits: + # - It is easier to reason about the security and authenticity of data within your item + # when all unauthenticated data is easily distinguishable by their attribute name. + # - If you need to add new unauthenticated attributes in the future, + # you can easily make the corresponding update to your `attribute_actions_on_encrypt` + # and immediately start writing to that new attribute, without + # any other configuration update needed. + # Once you configure this field, it is not safe to update it. + # + # - Configure `allowed_unsigned_attributes`: You may also explicitly list + # a set of attributes that should be considered unauthenticated when encountered + # on read. Be careful if you use this configuration. Do not remove an attribute + # name from this configuration, even if you are no longer writing with that attribute, + # as old items may still include this attribute, and our configuration needs to know + # to continue to exclude this attribute from the signature scope. + # If you add new attribute names to this field, you must first deploy the update to this + # field to all readers in your host fleet before deploying the update to start writing + # with that new attribute. + # + # For this example, we currently authenticate all attributes. To make it easier to + # add unauthenticated attributes in the future, we define a prefix ":" for such attributes. + unsign_attr_prefix = ":" + + # 4. Create the DynamoDb Encryption configuration for the table we will be writing to. + table_config = DynamoDbTableEncryptionConfig( + logical_table_name=ddb_table_name, + partition_key_name="partition_key", + sort_key_name="sort_key", + attribute_actions_on_encrypt=attribute_actions_on_encrypt, + keyring=encrypt_keyring, + allowed_unsigned_attribute_prefix=unsign_attr_prefix, + ) + + table_configs = {ddb_table_name: table_config} + tables_config = DynamoDbTablesEncryptionConfig(table_encryption_configs=table_configs) + + # 5. Create the EncryptedTable + ddb_table = boto3.resource("dynamodb").Table(ddb_table_name) + encrypted_table = EncryptedTable( + table=ddb_table, + encryption_config=tables_config, + ) + # 6. Put an item into our table using the above table. + # Before the item gets sent to DynamoDb, it will be encrypted + # client-side using the MRK multi-keyring. + item = { + "partition_key": "awsKmsMrkDiscoveryMultiKeyringItem", + "sort_key": 0, + "sensitive_data": "encrypt and sign me!", + } + + put_response = encrypted_table.put_item(Item=item) + + # Demonstrate that PutItem succeeded + assert put_response["ResponseMetadata"]["HTTPStatusCode"] == 200 + + # 7. Construct a discovery filter. + # A discovery filter limits the set of encrypted data keys + # the keyring can use to decrypt data. + # We will only let the keyring use keys in the selected AWS accounts + # and in the `aws` partition. + # This is the suggested config for most users; for more detailed config, see + # https://docs.aws.amazon.com/encryption-sdk/latest/developer-guide/use-kms-keyring.html#kms-keyring-discovery + discovery_filter = DiscoveryFilter(partition="aws", account_ids=account_ids) + + # 8. Construct a discovery keyring. + # Note that we choose to use the MRK discovery multi-keyring, even though + # our original keyring used a single KMS key. + decrypt_keyring = mat_prov.create_aws_kms_mrk_discovery_multi_keyring( + input=CreateAwsKmsMrkDiscoveryMultiKeyringInput(discovery_filter=discovery_filter, regions=regions) + ) + + # 9. Create new DDB config and client using the decrypt discovery keyring. + # This is the same as the above config, except we pass in the decrypt keyring. + table_config_for_decrypt = DynamoDbTableEncryptionConfig( + logical_table_name=ddb_table_name, + partition_key_name="partition_key", + sort_key_name="sort_key", + attribute_actions_on_encrypt=attribute_actions_on_encrypt, + # Add decrypt keyring here + keyring=decrypt_keyring, + allowed_unsigned_attribute_prefix=unsign_attr_prefix, + ) + + table_configs_for_decrypt = {ddb_table_name: table_config_for_decrypt} + tables_config_for_decrypt = DynamoDbTablesEncryptionConfig(table_encryption_configs=table_configs_for_decrypt) + + encrypted_ddb_table_for_decrypt = EncryptedTable(table=ddb_table, encryption_config=tables_config_for_decrypt) + + # 10. Get the item back from our table using the above table. + # The table will retrieve encrypted items from the DDB table, then + # detect the KMS key that was used to encrypt their data keys. + # The table will make a request to KMS to decrypt with the encrypting KMS key. + # If the table has permission to decrypt with the KMS key, + # the table will decrypt the item client-side using the keyring + # and return the original item. + key_to_get = {"partition_key": "awsKmsMrkDiscoveryMultiKeyringItem", "sort_key": 0} + + get_response = encrypted_ddb_table_for_decrypt.get_item(Key=key_to_get) + + # Demonstrate that GetItem succeeded and returned the decrypted item + assert get_response["ResponseMetadata"]["HTTPStatusCode"] == 200 + returned_item = get_response["Item"] + assert returned_item["sensitive_data"] == "encrypt and sign me!" diff --git a/Examples/runtimes/python/DynamoDBEncryption/src/keyring/mrk_multi_keyring_example/__init__.py b/Examples/runtimes/python/DynamoDBEncryption/src/keyring/mrk_multi_keyring_example/__init__.py new file mode 100644 index 000000000..fa977e22f --- /dev/null +++ b/Examples/runtimes/python/DynamoDBEncryption/src/keyring/mrk_multi_keyring_example/__init__.py @@ -0,0 +1,3 @@ +# Copyright Amazon.com Inc. or its affiliates. All Rights Reserved. +# SPDX-License-Identifier: Apache-2.0 +"""Stub to allow relative imports of examples from tests.""" diff --git a/Examples/runtimes/python/DynamoDBEncryption/src/keyring/mrk_multi_keyring_example/with_encrypted_client.py b/Examples/runtimes/python/DynamoDBEncryption/src/keyring/mrk_multi_keyring_example/with_encrypted_client.py new file mode 100644 index 000000000..ae801e0dd --- /dev/null +++ b/Examples/runtimes/python/DynamoDBEncryption/src/keyring/mrk_multi_keyring_example/with_encrypted_client.py @@ -0,0 +1,245 @@ +# Copyright Amazon.com Inc. or its affiliates. All Rights Reserved. +# SPDX-License-Identifier: Apache-2.0 +""" +Example demonstrating DynamoDb Encryption using an MRK multi-keyring configuration. + +The MRK multi-keyring accepts multiple AWS KMS MRKs (multi-region keys) or regular +AWS KMS keys (single-region keys) and uses them to encrypt and decrypt data. Data +encrypted using an MRK multi-keyring can be decrypted using any of its component +keys. If a component key is an MRK with a replica in a second region, the replica +key can also be used to decrypt data. + +For more information on MRKs and multi-keyrings, see: +- MRKs: https://docs.aws.amazon.com/kms/latest/developerguide/multi-region-keys-overview.html +- Multi-keyrings: https://docs.aws.amazon.com/encryption-sdk/latest/developer-guide/use-multi-keyring.html + +The example creates a new MRK multi-keyring consisting of one MRK (labeled as the +"generator keyring") and one single-region key (labeled as the only "child keyring"). +The MRK also has a replica in a second region. + +The example encrypts a test item using the MRK multi-keyring and puts the encrypted +item to the provided DynamoDb table. Then, it gets the item from the table and +decrypts it using three different configs: + 1. The MRK multi-keyring, where the MRK key is used to decrypt + 2. Another MRK multi-keyring, where the replica MRK key is used to decrypt + 3. Another MRK multi-keyring, where the single-region key that was present + in the original MRK multi-keyring is used to decrypt + +Running this example requires access to the DDB Table whose name is provided in +CLI arguments. This table must be configured with the following primary key +configuration: + - Partition key is named "partition_key" with type (S) + - Sort key is named "sort_key" with type (S) + +Since this example demonstrates multi-region use cases, it requires a default +region set in your AWS client. You can set a default region through the AWS CLI: + aws configure set region [region-name] +For example: + aws configure set region us-west-2 + +For more information on using AWS CLI to set config, see: +https://awscli.amazonaws.com/v2/documentation/api/latest/reference/configure/set.html +""" +import boto3 +from aws_cryptographic_material_providers.mpl import AwsCryptographicMaterialProviders +from aws_cryptographic_material_providers.mpl.config import MaterialProvidersConfig +from aws_cryptographic_material_providers.mpl.models import ( + CreateAwsKmsMrkMultiKeyringInput, +) +from aws_dbesdk_dynamodb.encrypted.client import EncryptedClient +from aws_dbesdk_dynamodb.structures.dynamodb import ( + DynamoDbTableEncryptionConfig, + DynamoDbTablesEncryptionConfig, +) +from aws_dbesdk_dynamodb.structures.structured_encryption import ( + CryptoAction, +) + + +def multi_mrk_keyring_client_example(ddb_table_name: str, mrk_key_arn: str, key_arn: str, mrk_replica_key_arn: str): + """ + Demonstrate using a MRK multi-keyring with EncryptedClient. + + :param ddb_table_name: The name of the DynamoDB table + :param mrk_key_arn: The ARN of the MRK key to use as generator + :param key_arn: The ARN of the single-region key to use as child + :param mrk_replica_key_arn: The ARN of the MRK replica key + """ + # 1. Create a single MRK multi-keyring using the MRK arn and the single-region key arn. + mat_prov = AwsCryptographicMaterialProviders(config=MaterialProvidersConfig()) + + # Create the multi-keyring, using the MRK as the generator key, + # and the single-region key as a child key. + # Note that the generator key will generate and encrypt a plaintext data key + # and all child keys will only encrypt that same plaintext data key. + # As such, you must have permission to call KMS:GenerateDataKey on your generator key + # and permission to call KMS:Encrypt on all child keys. + # For more information, see the AWS docs on multi-keyrings above. + aws_kms_mrk_multi_keyring = mat_prov.create_aws_kms_mrk_multi_keyring( + input=CreateAwsKmsMrkMultiKeyringInput(generator=mrk_key_arn, kms_key_ids=[key_arn]) + ) + + # 2. Configure which attributes are encrypted and/or signed when writing new items. + # For each attribute that may exist on the items we plan to write to our DynamoDbTable, + # we must explicitly configure how they should be treated during item encryption: + # - ENCRYPT_AND_SIGN: The attribute is encrypted and included in the signature + # - SIGN_ONLY: The attribute not encrypted, but is still included in the signature + # - DO_NOTHING: The attribute is not encrypted and not included in the signature + attribute_actions_on_encrypt = { + "partition_key": CryptoAction.SIGN_ONLY, # Our partition attribute must be SIGN_ONLY + "sort_key": CryptoAction.SIGN_ONLY, # Our sort attribute must be SIGN_ONLY + "sensitive_data": CryptoAction.ENCRYPT_AND_SIGN, + } + + # 3. Configure which attributes we expect to be included in the signature + # when reading items. There are two options for configuring this: + # + # - (Recommended) Configure `allowed_unsigned_attribute_prefix`: + # When defining your DynamoDb schema and deciding on attribute names, + # choose a distinguishing prefix (such as ":") for all attributes that + # you do not want to include in the signature. + # This has two main benefits: + # - It is easier to reason about the security and authenticity of data within your item + # when all unauthenticated data is easily distinguishable by their attribute name. + # - If you need to add new unauthenticated attributes in the future, + # you can easily make the corresponding update to your `attribute_actions_on_encrypt` + # and immediately start writing to that new attribute, without + # any other configuration update needed. + # Once you configure this field, it is not safe to update it. + # + # - Configure `allowed_unsigned_attributes`: You may also explicitly list + # a set of attributes that should be considered unauthenticated when encountered + # on read. Be careful if you use this configuration. Do not remove an attribute + # name from this configuration, even if you are no longer writing with that attribute, + # as old items may still include this attribute, and our configuration needs to know + # to continue to exclude this attribute from the signature scope. + # If you add new attribute names to this field, you must first deploy the update to this + # field to all readers in your host fleet before deploying the update to start writing + # with that new attribute. + # + # For this example, we currently authenticate all attributes. To make it easier to + # add unauthenticated attributes in the future, we define a prefix ":" for such attributes. + unsign_attr_prefix = ":" + + # 4. Create the DynamoDb Encryption configuration for the table we will be writing to. + table_config = DynamoDbTableEncryptionConfig( + logical_table_name=ddb_table_name, + partition_key_name="partition_key", + sort_key_name="sort_key", + attribute_actions_on_encrypt=attribute_actions_on_encrypt, + keyring=aws_kms_mrk_multi_keyring, + allowed_unsigned_attribute_prefix=unsign_attr_prefix, + ) + + table_configs = {ddb_table_name: table_config} + tables_config = DynamoDbTablesEncryptionConfig(table_encryption_configs=table_configs) + + # 5. Create the EncryptedClient + ddb_client = boto3.client("dynamodb") + encrypted_ddb_client = EncryptedClient(client=ddb_client, encryption_config=tables_config) + + # 6. Put an item into our table using the above client. + # Before the item gets sent to DynamoDb, it will be encrypted + # client-side using the MRK multi-keyring. + # The data key protecting this item will be encrypted + # with all the KMS Keys in this keyring, so that it can be + # decrypted with any one of those KMS Keys. + item = { + "partition_key": {"S": "awsKmsMrkMultiKeyringItem"}, + "sort_key": {"N": "0"}, + "sensitive_data": {"S": "encrypt and sign me!"}, + } + + put_response = encrypted_ddb_client.put_item(TableName=ddb_table_name, Item=item) + + # Demonstrate that PutItem succeeded + assert put_response["ResponseMetadata"]["HTTPStatusCode"] == 200 + + # 7. Get the item back from our table using the client. + # The client will decrypt the item client-side using the MRK + # and return back the original item. + # Since the generator key is the first available key in the keyring, + # that is the KMS Key that will be used to decrypt this item. + key_to_get = {"partition_key": {"S": "awsKmsMrkMultiKeyringItem"}, "sort_key": {"N": "0"}} + + get_response = encrypted_ddb_client.get_item(TableName=ddb_table_name, Key=key_to_get) + + # Demonstrate that GetItem succeeded and returned the decrypted item + assert get_response["ResponseMetadata"]["HTTPStatusCode"] == 200 + returned_item = get_response["Item"] + assert returned_item["sensitive_data"]["S"] == "encrypt and sign me!" + + # 8. Create a MRK keyring using the replica MRK arn. + # We will use this to demonstrate that the replica MRK + # can decrypt data created with the original MRK, + # even when the replica MRK was not present in the + # encrypting multi-keyring. + only_replica_key_mrk_multi_keyring = mat_prov.create_aws_kms_mrk_multi_keyring( + input=CreateAwsKmsMrkMultiKeyringInput(kms_key_ids=[mrk_replica_key_arn]) + ) + + # 9. Create a new config and client using the MRK keyring. + # This is the same setup as above, except we provide the MRK keyring to the config. + only_replica_key_table_config = DynamoDbTableEncryptionConfig( + logical_table_name=ddb_table_name, + partition_key_name="partition_key", + sort_key_name="sort_key", + attribute_actions_on_encrypt=attribute_actions_on_encrypt, + keyring=only_replica_key_mrk_multi_keyring, # Only replica keyring added here + allowed_unsigned_attribute_prefix=unsign_attr_prefix, + ) + + only_replica_key_table_configs = {ddb_table_name: only_replica_key_table_config} + only_replica_key_tables_config = DynamoDbTablesEncryptionConfig( + table_encryption_configs=only_replica_key_table_configs + ) + + only_replica_key_encrypted_ddb_client = EncryptedClient( + client=ddb_client, encryption_config=only_replica_key_tables_config + ) + + # 10. Get the item back from our table using the client configured with the replica. + # The client will decrypt the item client-side using the replica MRK + # and return back the original item. + only_replica_key_get_response = only_replica_key_encrypted_ddb_client.get_item( + TableName=ddb_table_name, Key=key_to_get + ) + + # Demonstrate that GetItem succeeded and returned the decrypted item + assert only_replica_key_get_response["ResponseMetadata"]["HTTPStatusCode"] == 200 + only_replica_key_returned_item = only_replica_key_get_response["Item"] + assert only_replica_key_returned_item["sensitive_data"]["S"] == "encrypt and sign me!" + + # 11. Create an AWS KMS keyring using the single-region key ARN. + # We will use this to demonstrate that the single-region key + # can decrypt data created with the MRK multi-keyring, + # since it is present in the keyring used to encrypt. + only_srk_keyring = mat_prov.create_aws_kms_mrk_multi_keyring( + input=CreateAwsKmsMrkMultiKeyringInput(kms_key_ids=[key_arn]) + ) + + # 12. Create a new config and client using the AWS KMS keyring. + # This is the same setup as above, except we provide the AWS KMS keyring to the config. + only_srk_table_config = DynamoDbTableEncryptionConfig( + logical_table_name=ddb_table_name, + partition_key_name="partition_key", + sort_key_name="sort_key", + attribute_actions_on_encrypt=attribute_actions_on_encrypt, + keyring=only_srk_keyring, # Only single-region key keyring added here + allowed_unsigned_attribute_prefix=unsign_attr_prefix, + ) + + only_srk_table_configs = {ddb_table_name: only_srk_table_config} + only_srk_tables_config = DynamoDbTablesEncryptionConfig(table_encryption_configs=only_srk_table_configs) + + only_srk_encrypted_ddb_client = EncryptedClient(client=ddb_client, encryption_config=only_srk_tables_config) + + # 13. Get the item back from our table using the client configured with the AWS KMS keyring. + # The client will decrypt the item client-side using the single-region key + # and return back the original item. + only_srk_get_response = only_srk_encrypted_ddb_client.get_item(TableName=ddb_table_name, Key=key_to_get) + + # Demonstrate that GetItem succeeded and returned the decrypted item + assert only_srk_get_response["ResponseMetadata"]["HTTPStatusCode"] == 200 + only_srk_returned_item = only_srk_get_response["Item"] + assert only_srk_returned_item["sensitive_data"]["S"] == "encrypt and sign me!" diff --git a/Examples/runtimes/python/DynamoDBEncryption/src/keyring/mrk_multi_keyring_example/with_encrypted_table.py b/Examples/runtimes/python/DynamoDBEncryption/src/keyring/mrk_multi_keyring_example/with_encrypted_table.py new file mode 100644 index 000000000..d477f51fe --- /dev/null +++ b/Examples/runtimes/python/DynamoDBEncryption/src/keyring/mrk_multi_keyring_example/with_encrypted_table.py @@ -0,0 +1,250 @@ +# Copyright Amazon.com Inc. or its affiliates. All Rights Reserved. +# SPDX-License-Identifier: Apache-2.0 +""" +Example demonstrating DynamoDb Encryption using an MRK multi-keyring configuration. + +The MRK multi-keyring accepts multiple AWS KMS MRKs (multi-region keys) or regular +AWS KMS keys (single-region keys) and uses them to encrypt and decrypt data. Data +encrypted using an MRK multi-keyring can be decrypted using any of its component +keys. If a component key is an MRK with a replica in a second region, the replica +key can also be used to decrypt data. + +For more information on MRKs and multi-keyrings, see: +- MRKs: https://docs.aws.amazon.com/kms/latest/developerguide/multi-region-keys-overview.html +- Multi-keyrings: https://docs.aws.amazon.com/encryption-sdk/latest/developer-guide/use-multi-keyring.html + +The example creates a new MRK multi-keyring consisting of one MRK (labeled as the +"generator keyring") and one single-region key (labeled as the only "child keyring"). +The MRK also has a replica in a second region. + +The example encrypts a test item using the MRK multi-keyring and puts the encrypted +item to the provided DynamoDb table. Then, it gets the item from the table and +decrypts it using three different configs: + 1. The MRK multi-keyring, where the MRK key is used to decrypt + 2. Another MRK multi-keyring, where the replica MRK key is used to decrypt + 3. Another MRK multi-keyring, where the single-region key that was present + in the original MRK multi-keyring is used to decrypt + +Running this example requires access to the DDB Table whose name is provided in +CLI arguments. This table must be configured with the following primary key +configuration: + - Partition key is named "partition_key" with type (S) + - Sort key is named "sort_key" with type (S) + +Since this example demonstrates multi-region use cases, it requires a default +region set in your AWS client. You can set a default region through the AWS CLI: + aws configure set region [region-name] +For example: + aws configure set region us-west-2 + +For more information on using AWS CLI to set config, see: +https://awscli.amazonaws.com/v2/documentation/api/latest/reference/configure/set.html +""" +import boto3 +from aws_cryptographic_material_providers.mpl import AwsCryptographicMaterialProviders +from aws_cryptographic_material_providers.mpl.config import MaterialProvidersConfig +from aws_cryptographic_material_providers.mpl.models import ( + CreateAwsKmsMrkMultiKeyringInput, +) +from aws_dbesdk_dynamodb.encrypted.table import EncryptedTable +from aws_dbesdk_dynamodb.structures.dynamodb import ( + DynamoDbTableEncryptionConfig, + DynamoDbTablesEncryptionConfig, +) +from aws_dbesdk_dynamodb.structures.structured_encryption import ( + CryptoAction, +) + + +def multi_mrk_keyring_table_example(ddb_table_name: str, mrk_key_arn: str, key_arn: str, mrk_replica_key_arn: str): + """ + Demonstrate using a MRK multi-keyring with EncryptedTable. + + :param ddb_table_name: The name of the DynamoDB table + :param mrk_key_arn: The ARN of the MRK key to use as generator + :param key_arn: The ARN of the single-region key to use as child + :param mrk_replica_key_arn: The ARN of the MRK replica key + """ + # 1. Create a single MRK multi-keyring using the MRK arn and the single-region key arn. + mat_prov = AwsCryptographicMaterialProviders(config=MaterialProvidersConfig()) + + # Create the multi-keyring, using the MRK as the generator key, + # and the single-region key as a child key. + # Note that the generator key will generate and encrypt a plaintext data key + # and all child keys will only encrypt that same plaintext data key. + # As such, you must have permission to call KMS:GenerateDataKey on your generator key + # and permission to call KMS:Encrypt on all child keys. + # For more information, see the AWS docs on multi-keyrings above. + aws_kms_mrk_multi_keyring = mat_prov.create_aws_kms_mrk_multi_keyring( + input=CreateAwsKmsMrkMultiKeyringInput(generator=mrk_key_arn, kms_key_ids=[key_arn]) + ) + + # 2. Configure which attributes are encrypted and/or signed when writing new items. + # For each attribute that may exist on the items we plan to write to our DynamoDbTable, + # we must explicitly configure how they should be treated during item encryption: + # - ENCRYPT_AND_SIGN: The attribute is encrypted and included in the signature + # - SIGN_ONLY: The attribute not encrypted, but is still included in the signature + # - DO_NOTHING: The attribute is not encrypted and not included in the signature + attribute_actions_on_encrypt = { + "partition_key": CryptoAction.SIGN_ONLY, # Our partition attribute must be SIGN_ONLY + "sort_key": CryptoAction.SIGN_ONLY, # Our sort attribute must be SIGN_ONLY + "sensitive_data": CryptoAction.ENCRYPT_AND_SIGN, + } + + # 3. Configure which attributes we expect to be included in the signature + # when reading items. There are two options for configuring this: + # + # - (Recommended) Configure `allowed_unsigned_attribute_prefix`: + # When defining your DynamoDb schema and deciding on attribute names, + # choose a distinguishing prefix (such as ":") for all attributes that + # you do not want to include in the signature. + # This has two main benefits: + # - It is easier to reason about the security and authenticity of data within your item + # when all unauthenticated data is easily distinguishable by their attribute name. + # - If you need to add new unauthenticated attributes in the future, + # you can easily make the corresponding update to your `attribute_actions_on_encrypt` + # and immediately start writing to that new attribute, without + # any other configuration update needed. + # Once you configure this field, it is not safe to update it. + # + # - Configure `allowed_unsigned_attributes`: You may also explicitly list + # a set of attributes that should be considered unauthenticated when encountered + # on read. Be careful if you use this configuration. Do not remove an attribute + # name from this configuration, even if you are no longer writing with that attribute, + # as old items may still include this attribute, and our configuration needs to know + # to continue to exclude this attribute from the signature scope. + # If you add new attribute names to this field, you must first deploy the update to this + # field to all readers in your host fleet before deploying the update to start writing + # with that new attribute. + # + # For this example, we currently authenticate all attributes. To make it easier to + # add unauthenticated attributes in the future, we define a prefix ":" for such attributes. + unsign_attr_prefix = ":" + + # 4. Create the DynamoDb Encryption configuration for the table we will be writing to. + table_config = DynamoDbTableEncryptionConfig( + logical_table_name=ddb_table_name, + partition_key_name="partition_key", + sort_key_name="sort_key", + attribute_actions_on_encrypt=attribute_actions_on_encrypt, + keyring=aws_kms_mrk_multi_keyring, + allowed_unsigned_attribute_prefix=unsign_attr_prefix, + ) + + table_configs = {ddb_table_name: table_config} + tables_config = DynamoDbTablesEncryptionConfig(table_encryption_configs=table_configs) + + # 5. Create the EncryptedTable + ddb_table = boto3.resource("dynamodb").Table(ddb_table_name) + encrypted_table = EncryptedTable( + table=ddb_table, + encryption_config=tables_config, + ) + + # 6. Put an item into our table using the encrypted table. + # Before the item gets sent to DynamoDb, it will be encrypted + # client-side using the MRK multi-keyring. + # The data key protecting this item will be encrypted + # with all the KMS Keys in this keyring, so that it can be + # decrypted with any one of those KMS Keys. + item = { + "partition_key": "awsKmsMrkMultiKeyringItem", + "sort_key": 0, + "sensitive_data": "encrypt and sign me!", + } + + put_response = encrypted_table.put_item(Item=item) + + # Demonstrate that PutItem succeeded + assert put_response["ResponseMetadata"]["HTTPStatusCode"] == 200 + + # 7. Get the item back from our table using the encrypted table. + # The client will decrypt the item client-side using the MRK + # and return back the original item. + # Since the generator key is the first available key in the keyring, + # that is the KMS Key that will be used to decrypt this item. + key_to_get = {"partition_key": "awsKmsMrkMultiKeyringItem", "sort_key": 0} + + get_response = encrypted_table.get_item(Key=key_to_get) + + # Demonstrate that GetItem succeeded and returned the decrypted item + assert get_response["ResponseMetadata"]["HTTPStatusCode"] == 200 + returned_item = get_response["Item"] + assert returned_item["sensitive_data"] == "encrypt and sign me!" + + # 8. Create a MRK keyring using the replica MRK arn. + # We will use this to demonstrate that the replica MRK + # can decrypt data created with the original MRK, + # even when the replica MRK was not present in the + # encrypting multi-keyring. + only_replica_key_mrk_multi_keyring = mat_prov.create_aws_kms_mrk_multi_keyring( + input=CreateAwsKmsMrkMultiKeyringInput(kms_key_ids=[mrk_replica_key_arn]) + ) + + # 9. Create a new config and client using the MRK keyring. + # This is the same setup as above, except we provide the MRK keyring to the config. + only_replica_key_table_config = DynamoDbTableEncryptionConfig( + logical_table_name=ddb_table_name, + partition_key_name="partition_key", + sort_key_name="sort_key", + attribute_actions_on_encrypt=attribute_actions_on_encrypt, + keyring=only_replica_key_mrk_multi_keyring, # Only replica keyring added here + allowed_unsigned_attribute_prefix=unsign_attr_prefix, + ) + + only_replica_key_table_configs = {ddb_table_name: only_replica_key_table_config} + only_replica_key_tables_config = DynamoDbTablesEncryptionConfig( + table_encryption_configs=only_replica_key_table_configs + ) + + only_replica_key_encrypted_table = EncryptedTable( + table=ddb_table, + encryption_config=only_replica_key_tables_config, + ) + + # 10. Get the item back from our table using the table configured with the replica. + # The client will decrypt the item client-side using the replica MRK + # and return back the original item. + only_replica_key_get_response = only_replica_key_encrypted_table.get_item(Key=key_to_get) + + # Demonstrate that GetItem succeeded and returned the decrypted item + assert only_replica_key_get_response["ResponseMetadata"]["HTTPStatusCode"] == 200 + only_replica_key_returned_item = only_replica_key_get_response["Item"] + assert only_replica_key_returned_item["sensitive_data"] == "encrypt and sign me!" + + # 11. Create an AWS KMS keyring using the single-region key ARN. + # We will use this to demonstrate that the single-region key + # can decrypt data created with the MRK multi-keyring, + # since it is present in the keyring used to encrypt. + only_srk_keyring = mat_prov.create_aws_kms_mrk_multi_keyring( + input=CreateAwsKmsMrkMultiKeyringInput(kms_key_ids=[key_arn]) + ) + + # 12. Create a new config and client using the AWS KMS keyring. + # This is the same setup as above, except we provide the AWS KMS keyring to the config. + only_srk_table_config = DynamoDbTableEncryptionConfig( + logical_table_name=ddb_table_name, + partition_key_name="partition_key", + sort_key_name="sort_key", + attribute_actions_on_encrypt=attribute_actions_on_encrypt, + keyring=only_srk_keyring, # Only single-region key keyring added here + allowed_unsigned_attribute_prefix=unsign_attr_prefix, + ) + + only_srk_table_configs = {ddb_table_name: only_srk_table_config} + only_srk_tables_config = DynamoDbTablesEncryptionConfig(table_encryption_configs=only_srk_table_configs) + + only_srk_encrypted_table = EncryptedTable( + table=ddb_table, + encryption_config=only_srk_tables_config, + ) + + # 13. Get the item back from our table using the table configured with the single-region key. + # The client will decrypt the item client-side using the single-region key + # and return back the original item. + only_srk_get_response = only_srk_encrypted_table.get_item(Key=key_to_get) + + # Demonstrate that GetItem succeeded and returned the decrypted item + assert only_srk_get_response["ResponseMetadata"]["HTTPStatusCode"] == 200 + only_srk_returned_item = only_srk_get_response["Item"] + assert only_srk_returned_item["sensitive_data"] == "encrypt and sign me!" diff --git a/Examples/runtimes/python/DynamoDBEncryption/src/keyring/multi_keyring_example/__init__.py b/Examples/runtimes/python/DynamoDBEncryption/src/keyring/multi_keyring_example/__init__.py new file mode 100644 index 000000000..fa977e22f --- /dev/null +++ b/Examples/runtimes/python/DynamoDBEncryption/src/keyring/multi_keyring_example/__init__.py @@ -0,0 +1,3 @@ +# Copyright Amazon.com Inc. or its affiliates. All Rights Reserved. +# SPDX-License-Identifier: Apache-2.0 +"""Stub to allow relative imports of examples from tests.""" diff --git a/Examples/runtimes/python/DynamoDBEncryption/src/keyring/multi_keyring_example/with_encrypted_client.py b/Examples/runtimes/python/DynamoDBEncryption/src/keyring/multi_keyring_example/with_encrypted_client.py new file mode 100644 index 000000000..944dd28c8 --- /dev/null +++ b/Examples/runtimes/python/DynamoDBEncryption/src/keyring/multi_keyring_example/with_encrypted_client.py @@ -0,0 +1,211 @@ +# Copyright Amazon.com Inc. or its affiliates. All Rights Reserved. +# SPDX-License-Identifier: Apache-2.0 +""" +Example demonstrating DynamoDb Encryption using a multi-keyring configuration with EncryptedClient. + +A multi-keyring accepts multiple keyrings and uses them to encrypt and decrypt data. +Data encrypted with a multi-keyring can be decrypted with any of its component keyrings. + +For more information on multi-keyrings, see: +https://docs.aws.amazon.com/encryption-sdk/latest/developer-guide/use-multi-keyring.html + +The example creates a multi-keyring consisting of an AWS KMS keyring (labeled the +"generator keyring") and a raw AES keyring (labeled as the only "child keyring"). +It encrypts a test item using the multi-keyring and puts the encrypted item to the +provided DynamoDb table. Then, it gets the item from the table and decrypts it +using only the raw AES keyring. + +The example takes an `aes_key_bytes` parameter representing a 256-bit AES key. +If run through the class's main method, it will create a new key. In practice, +users should not randomly generate a key, but instead retrieve an existing key +from a secure key management system (e.g. an HSM). + +Running this example requires access to the DDB Table whose name is provided in +CLI arguments. This table must be configured with the following primary key +configuration: + - Partition key is named "partition_key" with type (S) + - Sort key is named "sort_key" with type (S) +""" +import boto3 +from aws_cryptographic_material_providers.mpl import AwsCryptographicMaterialProviders +from aws_cryptographic_material_providers.mpl.config import MaterialProvidersConfig +from aws_cryptographic_material_providers.mpl.models import ( + AesWrappingAlg, + CreateAwsKmsMrkMultiKeyringInput, + CreateMultiKeyringInput, + CreateRawAesKeyringInput, +) +from aws_dbesdk_dynamodb.encrypted.client import EncryptedClient +from aws_dbesdk_dynamodb.structures.dynamodb import ( + DynamoDbTableEncryptionConfig, + DynamoDbTablesEncryptionConfig, +) +from aws_dbesdk_dynamodb.structures.structured_encryption import ( + CryptoAction, +) + + +def multi_keyring_client_example(ddb_table_name: str, key_arn: str, aes_key_bytes: bytes): + """ + Demonstrate using a multi-keyring. + + :param ddb_table_name: The name of the DynamoDB table + :param key_arn: The ARN of the KMS key to use + :param aes_key_bytes: The AES key bytes to use + """ + # 1. Create the raw AES keyring. + mat_prov = AwsCryptographicMaterialProviders(config=MaterialProvidersConfig()) + + raw_aes_keyring_input = CreateRawAesKeyringInput( + key_name="my-aes-key-name", + key_namespace="my-key-namespace", + wrapping_key=aes_key_bytes, + wrapping_alg=AesWrappingAlg.ALG_AES256_GCM_IV12_TAG16, + ) + + raw_aes_keyring = mat_prov.create_raw_aes_keyring(input=raw_aes_keyring_input) + + # 2. Create the AWS KMS keyring. + # We create a MRK multi keyring, as this interface also supports + # single-region KMS keys (standard KMS keys), + # and creates the KMS client for us automatically. + aws_kms_mrk_multi_keyring = mat_prov.create_aws_kms_mrk_multi_keyring( + input=CreateAwsKmsMrkMultiKeyringInput(generator=key_arn) + ) + + # 3. Create the multi-keyring. + # We will label the AWS KMS keyring as the generator and the raw AES keyring as the + # only child keyring. + # You must provide a generator keyring to encrypt data. + # You may provide additional child keyrings. Each child keyring will be able to + # decrypt data encrypted with the multi-keyring on its own. It does not need + # knowledge of any other child keyrings or the generator keyring to decrypt. + multi_keyring = mat_prov.create_multi_keyring( + input=CreateMultiKeyringInput(generator=aws_kms_mrk_multi_keyring, child_keyrings=[raw_aes_keyring]) + ) + + # 4. Configure which attributes are encrypted and/or signed when writing new items. + # For each attribute that may exist on the items we plan to write to our DynamoDbTable, + # we must explicitly configure how they should be treated during item encryption: + # - ENCRYPT_AND_SIGN: The attribute is encrypted and included in the signature + # - SIGN_ONLY: The attribute not encrypted, but is still included in the signature + # - DO_NOTHING: The attribute is not encrypted and not included in the signature + attribute_actions = { + "partition_key": CryptoAction.SIGN_ONLY, # Our partition attribute must be SIGN_ONLY + "sort_key": CryptoAction.SIGN_ONLY, # Our sort attribute must be SIGN_ONLY + "sensitive_data": CryptoAction.ENCRYPT_AND_SIGN, + } + + # 5. Configure which attributes we expect to be included in the signature + # when reading items. There are two options for configuring this: + # + # - (Recommended) Configure `allowed_unsigned_attribute_prefix`: + # When defining your DynamoDb schema and deciding on attribute names, + # choose a distinguishing prefix (such as ":") for all attributes that + # you do not want to include in the signature. + # This has two main benefits: + # - It is easier to reason about the security and authenticity of data within your item + # when all unauthenticated data is easily distinguishable by their attribute name. + # - If you need to add new unauthenticated attributes in the future, + # you can easily make the corresponding update to your `attribute_actions` + # and immediately start writing to that new attribute, without + # any other configuration update needed. + # Once you configure this field, it is not safe to update it. + # + # - Configure `allowed_unsigned_attributes`: You may also explicitly list + # a set of attributes that should be considered unauthenticated when encountered + # on read. Be careful if you use this configuration. Do not remove an attribute + # name from this configuration, even if you are no longer writing with that attribute, + # as old items may still include this attribute, and our configuration needs to know + # to continue to exclude this attribute from the signature scope. + # If you add new attribute names to this field, you must first deploy the update to this + # field to all readers in your host fleet before deploying the update to start writing + # with that new attribute. + # + # For this example, we currently authenticate all attributes. To make it easier to + # add unauthenticated attributes in the future, we define a prefix ":" for such attributes. + unsign_attr_prefix = ":" + + # 6. Create the DynamoDb Encryption configuration for the table we will be writing to. + # Note that this example creates one config/client combination for PUT, and another + # for GET. The PUT config uses the multi-keyring, while the GET config uses the + # raw AES keyring. This is solely done to demonstrate that a keyring included as + # a child of a multi-keyring can be used to decrypt data on its own. + table_config = DynamoDbTableEncryptionConfig( + logical_table_name=ddb_table_name, + partition_key_name="partition_key", + sort_key_name="sort_key", + attribute_actions_on_encrypt=attribute_actions, + keyring=multi_keyring, # Multi-keyring is added here + allowed_unsigned_attribute_prefix=unsign_attr_prefix, + ) + + table_configs = {ddb_table_name: table_config} + tables_config = DynamoDbTablesEncryptionConfig(table_encryption_configs=table_configs) + + # 7. Create the EncryptedClient + ddb_client = boto3.client("dynamodb") + encrypted_ddb_client = EncryptedClient(client=ddb_client, encryption_config=tables_config) + + # 8. Put an item into our table using the above client. + # Before the item gets sent to DynamoDb, it will be encrypted + # client-side using the multi-keyring. + # The item will be encrypted with all wrapping keys in the keyring, + # so that it can be decrypted with any one of the keys. + item = { + "partition_key": {"S": "multiKeyringItem"}, + "sort_key": {"N": "0"}, + "sensitive_data": {"S": "encrypt and sign me!"}, + } + + put_response = encrypted_ddb_client.put_item(TableName=ddb_table_name, Item=item) + + # Demonstrate that PutItem succeeded + assert put_response["ResponseMetadata"]["HTTPStatusCode"] == 200 + + # 9. Get the item back from our table using the above client. + # The client will decrypt the item client-side using the AWS KMS + # keyring, and return back the original item. + # Since the generator key is the first available key in the keyring, + # that is the key that will be used to decrypt this item. + key_to_get = {"partition_key": {"S": "multiKeyringItem"}, "sort_key": {"N": "0"}} + + get_response = encrypted_ddb_client.get_item(TableName=ddb_table_name, Key=key_to_get) + + # Demonstrate that GetItem succeeded and returned the decrypted item + assert get_response["ResponseMetadata"]["HTTPStatusCode"] == 200 + returned_item = get_response["Item"] + assert returned_item["sensitive_data"]["S"] == "encrypt and sign me!" + + # 10. Create a new config and client with only the raw AES keyring to GET the item + # This is the same setup as above, except the config uses the `raw_aes_keyring`. + only_aes_keyring_table_config = DynamoDbTableEncryptionConfig( + logical_table_name=ddb_table_name, + partition_key_name="partition_key", + sort_key_name="sort_key", + attribute_actions_on_encrypt=attribute_actions, + keyring=raw_aes_keyring, # Raw AES keyring is added here + allowed_unsigned_attribute_prefix=unsign_attr_prefix, + ) + + only_aes_keyring_table_configs = {ddb_table_name: only_aes_keyring_table_config} + only_aes_keyring_tables_config = DynamoDbTablesEncryptionConfig( + table_encryption_configs=only_aes_keyring_table_configs + ) + + only_aes_keyring_encrypted_ddb_client = EncryptedClient( + client=ddb_client, encryption_config=only_aes_keyring_tables_config + ) + + # 11. Get the item back from our table using the client + # configured with only the raw AES keyring. + # The client will decrypt the item client-side using the raw + # AES keyring, and return back the original item. + only_aes_keyring_get_response = only_aes_keyring_encrypted_ddb_client.get_item( + TableName=ddb_table_name, Key=key_to_get + ) + + # Demonstrate that GetItem succeeded and returned the decrypted item + assert only_aes_keyring_get_response["ResponseMetadata"]["HTTPStatusCode"] == 200 + only_aes_keyring_returned_item = only_aes_keyring_get_response["Item"] + assert only_aes_keyring_returned_item["sensitive_data"]["S"] == "encrypt and sign me!" diff --git a/Examples/runtimes/python/DynamoDBEncryption/src/keyring/multi_keyring_example/with_encrypted_table.py b/Examples/runtimes/python/DynamoDBEncryption/src/keyring/multi_keyring_example/with_encrypted_table.py new file mode 100644 index 000000000..c7c3b15b4 --- /dev/null +++ b/Examples/runtimes/python/DynamoDBEncryption/src/keyring/multi_keyring_example/with_encrypted_table.py @@ -0,0 +1,209 @@ +# Copyright Amazon.com Inc. or its affiliates. All Rights Reserved. +# SPDX-License-Identifier: Apache-2.0 +""" +Example demonstrating DynamoDb Encryption using a multi-keyring configuration with EncryptedClient. + +A multi-keyring accepts multiple keyrings and uses them to encrypt and decrypt data. +Data encrypted with a multi-keyring can be decrypted with any of its component keyrings. + +For more information on multi-keyrings, see: +https://docs.aws.amazon.com/encryption-sdk/latest/developer-guide/use-multi-keyring.html + +The example creates a multi-keyring consisting of an AWS KMS keyring (labeled the +"generator keyring") and a raw AES keyring (labeled as the only "child keyring"). +It encrypts a test item using the multi-keyring and puts the encrypted item to the +provided DynamoDb table. Then, it gets the item from the table and decrypts it +using only the raw AES keyring. + +The example takes an `aes_key_bytes` parameter representing a 256-bit AES key. +If run through the class's main method, it will create a new key. In practice, +users should not randomly generate a key, but instead retrieve an existing key +from a secure key management system (e.g. an HSM). + +Running this example requires access to the DDB Table whose name is provided in +CLI arguments. This table must be configured with the following primary key +configuration: + - Partition key is named "partition_key" with type (S) + - Sort key is named "sort_key" with type (S) +""" +import boto3 +from aws_cryptographic_material_providers.mpl import AwsCryptographicMaterialProviders +from aws_cryptographic_material_providers.mpl.config import MaterialProvidersConfig +from aws_cryptographic_material_providers.mpl.models import ( + AesWrappingAlg, + CreateAwsKmsMrkMultiKeyringInput, + CreateMultiKeyringInput, + CreateRawAesKeyringInput, +) +from aws_dbesdk_dynamodb.encrypted.table import EncryptedTable +from aws_dbesdk_dynamodb.structures.dynamodb import ( + DynamoDbTableEncryptionConfig, + DynamoDbTablesEncryptionConfig, +) +from aws_dbesdk_dynamodb.structures.structured_encryption import ( + CryptoAction, +) + + +def multi_keyring_table_example(ddb_table_name: str, key_arn: str, aes_key_bytes: bytes): + """ + Demonstrate using a multi-keyring. + + :param ddb_table_name: The name of the DynamoDB table + :param key_arn: The ARN of the KMS key to use + :param aes_key_bytes: The AES key bytes to use + """ + # 1. Create the raw AES keyring. + mat_prov = AwsCryptographicMaterialProviders(config=MaterialProvidersConfig()) + + raw_aes_keyring_input = CreateRawAesKeyringInput( + key_name="my-aes-key-name", + key_namespace="my-key-namespace", + wrapping_key=aes_key_bytes, + wrapping_alg=AesWrappingAlg.ALG_AES256_GCM_IV12_TAG16, + ) + + raw_aes_keyring = mat_prov.create_raw_aes_keyring(input=raw_aes_keyring_input) + + # 2. Create the AWS KMS keyring. + # We create a MRK multi keyring, as this interface also supports + # single-region KMS keys (standard KMS keys), + # and creates the KMS client for us automatically. + aws_kms_mrk_multi_keyring = mat_prov.create_aws_kms_mrk_multi_keyring( + input=CreateAwsKmsMrkMultiKeyringInput(generator=key_arn) + ) + + # 3. Create the multi-keyring. + # We will label the AWS KMS keyring as the generator and the raw AES keyring as the + # only child keyring. + # You must provide a generator keyring to encrypt data. + # You may provide additional child keyrings. Each child keyring will be able to + # decrypt data encrypted with the multi-keyring on its own. It does not need + # knowledge of any other child keyrings or the generator keyring to decrypt. + multi_keyring = mat_prov.create_multi_keyring( + input=CreateMultiKeyringInput(generator=aws_kms_mrk_multi_keyring, child_keyrings=[raw_aes_keyring]) + ) + + # 4. Configure which attributes are encrypted and/or signed when writing new items. + # For each attribute that may exist on the items we plan to write to our DynamoDbTable, + # we must explicitly configure how they should be treated during item encryption: + # - ENCRYPT_AND_SIGN: The attribute is encrypted and included in the signature + # - SIGN_ONLY: The attribute not encrypted, but is still included in the signature + # - DO_NOTHING: The attribute is not encrypted and not included in the signature + attribute_actions = { + "partition_key": CryptoAction.SIGN_ONLY, # Our partition attribute must be SIGN_ONLY + "sort_key": CryptoAction.SIGN_ONLY, # Our sort attribute must be SIGN_ONLY + "sensitive_data": CryptoAction.ENCRYPT_AND_SIGN, + } + + # 5. Configure which attributes we expect to be included in the signature + # when reading items. There are two options for configuring this: + # + # - (Recommended) Configure `allowed_unsigned_attribute_prefix`: + # When defining your DynamoDb schema and deciding on attribute names, + # choose a distinguishing prefix (such as ":") for all attributes that + # you do not want to include in the signature. + # This has two main benefits: + # - It is easier to reason about the security and authenticity of data within your item + # when all unauthenticated data is easily distinguishable by their attribute name. + # - If you need to add new unauthenticated attributes in the future, + # you can easily make the corresponding update to your `attribute_actions` + # and immediately start writing to that new attribute, without + # any other configuration update needed. + # Once you configure this field, it is not safe to update it. + # + # - Configure `allowed_unsigned_attributes`: You may also explicitly list + # a set of attributes that should be considered unauthenticated when encountered + # on read. Be careful if you use this configuration. Do not remove an attribute + # name from this configuration, even if you are no longer writing with that attribute, + # as old items may still include this attribute, and our configuration needs to know + # to continue to exclude this attribute from the signature scope. + # If you add new attribute names to this field, you must first deploy the update to this + # field to all readers in your host fleet before deploying the update to start writing + # with that new attribute. + # + # For this example, we currently authenticate all attributes. To make it easier to + # add unauthenticated attributes in the future, we define a prefix ":" for such attributes. + unsign_attr_prefix = ":" + + # 6. Create the DynamoDb Encryption configuration for the table we will be writing to. + # Note that this example creates one config/client combination for PUT, and another + # for GET. The PUT config uses the multi-keyring, while the GET config uses the + # raw AES keyring. This is solely done to demonstrate that a keyring included as + # a child of a multi-keyring can be used to decrypt data on its own. + table_config = DynamoDbTableEncryptionConfig( + logical_table_name=ddb_table_name, + partition_key_name="partition_key", + sort_key_name="sort_key", + attribute_actions_on_encrypt=attribute_actions, + keyring=multi_keyring, # Multi-keyring is added here + allowed_unsigned_attribute_prefix=unsign_attr_prefix, + ) + + table_configs = {ddb_table_name: table_config} + tables_config = DynamoDbTablesEncryptionConfig(table_encryption_configs=table_configs) + + # 7. Create the EncryptedClient + ddb_table = boto3.resource("dynamodb").Table(ddb_table_name) + encrypted_table = EncryptedTable( + table=ddb_table, + encryption_config=tables_config, + ) + # 8. Put an item into our table using the above table. + # Before the item gets sent to DynamoDb, it will be encrypted + # client-side using the multi-keyring. + # The item will be encrypted with all wrapping keys in the keyring, + # so that it can be decrypted with any one of the keys. + item = { + "partition_key": "multiKeyringItem", + "sort_key": 0, + "sensitive_data": "encrypt and sign me!", + } + + put_response = encrypted_table.put_item(Item=item) + + # Demonstrate that PutItem succeeded + assert put_response["ResponseMetadata"]["HTTPStatusCode"] == 200 + + # 9. Get the item back from our table using the above table. + # The table will decrypt the item client-side using the AWS KMS + # keyring, and return back the original item. + # Since the generator key is the first available key in the keyring, + # that is the key that will be used to decrypt this item. + key_to_get = {"partition_key": "multiKeyringItem", "sort_key": 0} + + get_response = encrypted_table.get_item(Key=key_to_get) + + # Demonstrate that GetItem succeeded and returned the decrypted item + assert get_response["ResponseMetadata"]["HTTPStatusCode"] == 200 + returned_item = get_response["Item"] + assert returned_item["sensitive_data"] == "encrypt and sign me!" + + # 10. Create a new config and client with only the raw AES keyring to GET the item + # This is the same setup as above, except the config uses the `raw_aes_keyring`. + only_aes_keyring_table_config = DynamoDbTableEncryptionConfig( + logical_table_name=ddb_table_name, + partition_key_name="partition_key", + sort_key_name="sort_key", + attribute_actions_on_encrypt=attribute_actions, + keyring=raw_aes_keyring, # Raw AES keyring is added here + allowed_unsigned_attribute_prefix=unsign_attr_prefix, + ) + + only_aes_keyring_table_configs = {ddb_table_name: only_aes_keyring_table_config} + only_aes_keyring_tables_config = DynamoDbTablesEncryptionConfig( + table_encryption_configs=only_aes_keyring_table_configs + ) + + only_aes_keyring_encrypted_table = EncryptedTable(table=ddb_table, encryption_config=only_aes_keyring_tables_config) + + # 11. Get the item back from our table using the client + # configured with only the raw AES keyring. + # The table will decrypt the item client-side using the raw + # AES keyring, and return back the original item. + only_aes_keyring_get_response = only_aes_keyring_encrypted_table.get_item(Key=key_to_get) + + # Demonstrate that GetItem succeeded and returned the decrypted item + assert only_aes_keyring_get_response["ResponseMetadata"]["HTTPStatusCode"] == 200 + only_aes_keyring_returned_item = only_aes_keyring_get_response["Item"] + assert only_aes_keyring_returned_item["sensitive_data"] == "encrypt and sign me!" diff --git a/Examples/runtimes/python/DynamoDBEncryption/src/keyring/raw_aes_keyring_example/__init__.py b/Examples/runtimes/python/DynamoDBEncryption/src/keyring/raw_aes_keyring_example/__init__.py new file mode 100644 index 000000000..fa977e22f --- /dev/null +++ b/Examples/runtimes/python/DynamoDBEncryption/src/keyring/raw_aes_keyring_example/__init__.py @@ -0,0 +1,3 @@ +# Copyright Amazon.com Inc. or its affiliates. All Rights Reserved. +# SPDX-License-Identifier: Apache-2.0 +"""Stub to allow relative imports of examples from tests.""" diff --git a/Examples/runtimes/python/DynamoDBEncryption/src/keyring/raw_aes_keyring_example/encryption_config.py b/Examples/runtimes/python/DynamoDBEncryption/src/keyring/raw_aes_keyring_example/encryption_config.py new file mode 100644 index 000000000..f9b0bef68 --- /dev/null +++ b/Examples/runtimes/python/DynamoDBEncryption/src/keyring/raw_aes_keyring_example/encryption_config.py @@ -0,0 +1,99 @@ +# Copyright Amazon.com Inc. or its affiliates. All Rights Reserved. +# SPDX-License-Identifier: Apache-2.0 +""" +Configuration module for raw AES keyring encryption setup. + +This module provides the common encryption configuration used by both +EncryptedClient and EncryptedTable examples. +""" + +from aws_cryptographic_material_providers.mpl import AwsCryptographicMaterialProviders +from aws_cryptographic_material_providers.mpl.config import MaterialProvidersConfig +from aws_cryptographic_material_providers.mpl.models import ( + AesWrappingAlg, + CreateRawAesKeyringInput, +) +from aws_dbesdk_dynamodb.structures.dynamodb import ( + DynamoDbTableEncryptionConfig, + DynamoDbTablesEncryptionConfig, +) +from aws_dbesdk_dynamodb.structures.structured_encryption import ( + CryptoAction, +) + + +def create_encryption_config(ddb_table_name: str, aes_key_bytes: bytes) -> DynamoDbTablesEncryptionConfig: + """ + Create the encryption configuration for DynamoDB encryption using raw AES keyring. + + :param ddb_table_name: The name of the DynamoDB table + :param aes_key_bytes: The AES key bytes to use + :return: The DynamoDB tables encryption configuration + """ + # 1. Create the keyring. + # The DynamoDb encryption client uses this to encrypt and decrypt items. + mat_prov = AwsCryptographicMaterialProviders(config=MaterialProvidersConfig()) + + keyring_input = CreateRawAesKeyringInput( + key_name="my-aes-key-name", + key_namespace="my-key-namespace", + wrapping_key=aes_key_bytes, + wrapping_alg=AesWrappingAlg.ALG_AES256_GCM_IV12_TAG16, + ) + + raw_aes_keyring = mat_prov.create_raw_aes_keyring(input=keyring_input) + + # 2. Configure which attributes are encrypted and/or signed when writing new items. + # For each attribute that may exist on the items we plan to write to our DynamoDbTable, + # we must explicitly configure how they should be treated during item encryption: + # - ENCRYPT_AND_SIGN: The attribute is encrypted and included in the signature + # - SIGN_ONLY: The attribute not encrypted, but is still included in the signature + # - DO_NOTHING: The attribute is not encrypted and not included in the signature + attribute_actions = { + "partition_key": CryptoAction.SIGN_ONLY, # Our partition attribute must be SIGN_ONLY + "sort_key": CryptoAction.SIGN_ONLY, # Our sort attribute must be SIGN_ONLY + "sensitive_data": CryptoAction.ENCRYPT_AND_SIGN, + } + + # 3. Configure which attributes we expect to be included in the signature + # when reading items. There are two options for configuring this: + # + # - (Recommended) Configure `allowed_unsigned_attribute_prefix`: + # When defining your DynamoDb schema and deciding on attribute names, + # choose a distinguishing prefix (such as ":") for all attributes that + # you do not want to include in the signature. + # This has two main benefits: + # - It is easier to reason about the security and authenticity of data within your item + # when all unauthenticated data is easily distinguishable by their attribute name. + # - If you need to add new unauthenticated attributes in the future, + # you can easily make the corresponding update to your `attribute_actions` + # and immediately start writing to that new attribute, without + # any other configuration update needed. + # Once you configure this field, it is not safe to update it. + # + # - Configure `allowed_unsigned_attributes`: You may also explicitly list + # a set of attributes that should be considered unauthenticated when encountered + # on read. Be careful if you use this configuration. Do not remove an attribute + # name from this configuration, even if you are no longer writing with that attribute, + # as old items may still include this attribute, and our configuration needs to know + # to continue to exclude this attribute from the signature scope. + # If you add new attribute names to this field, you must first deploy the update to this + # field to all readers in your host fleet before deploying the update to start writing + # with that new attribute. + # + # For this example, we currently authenticate all attributes. To make it easier to + # add unauthenticated attributes in the future, we define a prefix ":" for such attributes. + unsign_attr_prefix = ":" + + # 4. Create the DynamoDb Encryption configuration for the table we will be writing to. + table_config = DynamoDbTableEncryptionConfig( + logical_table_name=ddb_table_name, + partition_key_name="partition_key", + sort_key_name="sort_key", + attribute_actions_on_encrypt=attribute_actions, + keyring=raw_aes_keyring, + allowed_unsigned_attribute_prefix=unsign_attr_prefix, + ) + + table_configs = {ddb_table_name: table_config} + return DynamoDbTablesEncryptionConfig(table_encryption_configs=table_configs) diff --git a/Examples/runtimes/python/DynamoDBEncryption/src/keyring/raw_aes_keyring_example/with_encrypted_client.py b/Examples/runtimes/python/DynamoDBEncryption/src/keyring/raw_aes_keyring_example/with_encrypted_client.py new file mode 100644 index 000000000..b7fdffcba --- /dev/null +++ b/Examples/runtimes/python/DynamoDBEncryption/src/keyring/raw_aes_keyring_example/with_encrypted_client.py @@ -0,0 +1,73 @@ +# Copyright Amazon.com Inc. or its affiliates. All Rights Reserved. +# SPDX-License-Identifier: Apache-2.0 +""" +Example demonstrating DynamoDb Encryption using a Raw AES Keyring with EncryptedClient. + +The raw AES Keyring takes in an AES key and uses that key to protect the data +keys that encrypt and decrypt DynamoDb table items. + +This example takes an `aes_key_bytes` parameter representing a 256-bit AES key. +If run through the script's main method, it will create a new key. In practice, +users should not randomly generate a key, but instead retrieve an existing key +from a secure key management system (e.g. an HSM). + +This example encrypts a test item using the provided AES key and puts the encrypted +item to the provided DynamoDb table. Then, it gets the item from the table and +decrypts it. + +Running this example requires access to the DDB Table whose name is provided in +CLI arguments. This table must be configured with the following primary key +configuration: + - Partition key is named "partition_key" with type (S) + - Sort key is named "sort_key" with type (S) +""" + +import boto3 +from aws_dbesdk_dynamodb.encrypted.client import EncryptedClient + +from .encryption_config import create_encryption_config + + +def raw_aes_keyring_client_example(ddb_table_name: str, aes_key_bytes: bytes): + """ + Demonstrate using a raw AES keyring with EncryptedClient. + + :param ddb_table_name: The name of the DynamoDB table + :param aes_key_bytes: The AES key bytes to use + """ + # 1. Create the DynamoDb Encryption configuration for the table we will be writing to. + # See encryption_config.py in this directory for detailed steps on the encryption configuration. + tables_config = create_encryption_config( + ddb_table_name=ddb_table_name, + aes_key_bytes=aes_key_bytes, + ) + + # 2. Create the EncryptedClient + ddb_client = boto3.client("dynamodb") + encrypted_ddb_client = EncryptedClient(client=ddb_client, encryption_config=tables_config) + + # 3. Put an item into our table using the above client. + # Before the item gets sent to DynamoDb, it will be encrypted + # client-side, according to our configuration. + item = { + "partition_key": {"S": "rawAesKeyringItem"}, + "sort_key": {"N": "0"}, + "sensitive_data": {"S": "encrypt and sign me!"}, + } + + put_response = encrypted_ddb_client.put_item(TableName=ddb_table_name, Item=item) + + # Demonstrate that PutItem succeeded + assert put_response["ResponseMetadata"]["HTTPStatusCode"] == 200 + + # 4. Get the item back from our table using the same client. + # The client will decrypt the item client-side, and return + # back the original item. + key_to_get = {"partition_key": {"S": "rawAesKeyringItem"}, "sort_key": {"N": "0"}} + + get_response = encrypted_ddb_client.get_item(TableName=ddb_table_name, Key=key_to_get) + + # Demonstrate that GetItem succeeded and returned the decrypted item + assert get_response["ResponseMetadata"]["HTTPStatusCode"] == 200 + returned_item = get_response["Item"] + assert returned_item["sensitive_data"]["S"] == "encrypt and sign me!" diff --git a/Examples/runtimes/python/DynamoDBEncryption/src/keyring/raw_aes_keyring_example/with_encrypted_table.py b/Examples/runtimes/python/DynamoDBEncryption/src/keyring/raw_aes_keyring_example/with_encrypted_table.py new file mode 100644 index 000000000..dbac65e54 --- /dev/null +++ b/Examples/runtimes/python/DynamoDBEncryption/src/keyring/raw_aes_keyring_example/with_encrypted_table.py @@ -0,0 +1,76 @@ +# Copyright Amazon.com Inc. or its affiliates. All Rights Reserved. +# SPDX-License-Identifier: Apache-2.0 +""" +Example demonstrating DynamoDb Encryption using a Raw AES Keyring with EncryptedTable. + +The raw AES Keyring takes in an AES key and uses that key to protect the data +keys that encrypt and decrypt DynamoDb table items. + +This example takes an `aes_key_bytes` parameter representing a 256-bit AES key. +If run through the script's main method, it will create a new key. In practice, +users should not randomly generate a key, but instead retrieve an existing key +from a secure key management system (e.g. an HSM). + +This example encrypts a test item using the provided AES key and puts the encrypted +item to the provided DynamoDb table. Then, it gets the item from the table and +decrypts it. + +Running this example requires access to the DDB Table whose name is provided in +CLI arguments. This table must be configured with the following primary key +configuration: + - Partition key is named "partition_key" with type (S) + - Sort key is named "sort_key" with type (S) +""" + +import boto3 +from aws_dbesdk_dynamodb.encrypted.table import EncryptedTable + +from .encryption_config import create_encryption_config + + +def raw_aes_keyring_table_example(ddb_table_name: str, aes_key_bytes: bytes): + """ + Demonstrate using a raw AES keyring with EncryptedTable. + + :param ddb_table_name: The name of the DynamoDB table + :param aes_key_bytes: The AES key bytes to use + """ + # 1. Create the DynamoDb Encryption configuration for the table we will be writing to. + # See encryption_config.py in this directory for detailed steps on the encryption configuration. + tables_config = create_encryption_config( + ddb_table_name=ddb_table_name, + aes_key_bytes=aes_key_bytes, + ) + + # 2. Create the EncryptedTable + ddb_table = boto3.resource("dynamodb").Table(ddb_table_name) + encrypted_table = EncryptedTable( + table=ddb_table, + encryption_config=tables_config, + ) + + # 3. Put an item into our table using the above client. + # Before the item gets sent to DynamoDb, it will be encrypted + # client-side, according to our configuration. + item = { + "partition_key": "rawAesKeyringItem", + "sort_key": 0, + "sensitive_data": "encrypt and sign me!", + } + + put_response = encrypted_table.put_item(Item=item) + + # Demonstrate that PutItem succeeded + assert put_response["ResponseMetadata"]["HTTPStatusCode"] == 200 + + # 4. Get the item back from our table using the same client. + # The client will decrypt the item client-side, and return + # back the original item. + key_to_get = {"partition_key": "rawAesKeyringItem", "sort_key": 0} + + get_response = encrypted_table.get_item(Key=key_to_get) + + # Demonstrate that GetItem succeeded and returned the decrypted item + assert get_response["ResponseMetadata"]["HTTPStatusCode"] == 200 + returned_item = get_response["Item"] + assert returned_item["sensitive_data"] == "encrypt and sign me!" diff --git a/Examples/runtimes/python/DynamoDBEncryption/src/keyring/raw_ecdh_keyring_example/__init__.py b/Examples/runtimes/python/DynamoDBEncryption/src/keyring/raw_ecdh_keyring_example/__init__.py new file mode 100644 index 000000000..fa977e22f --- /dev/null +++ b/Examples/runtimes/python/DynamoDBEncryption/src/keyring/raw_ecdh_keyring_example/__init__.py @@ -0,0 +1,3 @@ +# Copyright Amazon.com Inc. or its affiliates. All Rights Reserved. +# SPDX-License-Identifier: Apache-2.0 +"""Stub to allow relative imports of examples from tests.""" diff --git a/Examples/runtimes/python/DynamoDBEncryption/src/keyring/raw_ecdh_keyring_example/utility.py b/Examples/runtimes/python/DynamoDBEncryption/src/keyring/raw_ecdh_keyring_example/utility.py new file mode 100644 index 000000000..cbcc34c1e --- /dev/null +++ b/Examples/runtimes/python/DynamoDBEncryption/src/keyring/raw_ecdh_keyring_example/utility.py @@ -0,0 +1,140 @@ +# Copyright Amazon.com Inc. or its affiliates. All Rights Reserved. +# SPDX-License-Identifier: Apache-2.0 +""" +Utility functions for the raw ECDH keyring example. + +This module provides functions for handling ECC keys, including: +- Loading key bytes from PEM files +- Checking if new key pairs should be generated +- Generating new ECC key pairs +""" + +import pathlib + +from cryptography.hazmat.primitives import serialization +from cryptography.hazmat.primitives.asymmetric import ec + +EXAMPLE_ECC_PRIVATE_KEY_FILENAME_SENDER = "RawEcdhKeyringExamplePrivateKeySender.pem" +EXAMPLE_ECC_PRIVATE_KEY_FILENAME_RECIPIENT = "RawEcdhKeyringExamplePrivateKeyRecipient.pem" +EXAMPLE_ECC_PUBLIC_KEY_FILENAME_RECIPIENT = "RawEcdhKeyringExamplePublicKeyRecipient.pem" + + +def load_private_key(private_key_filename: str) -> bytes: + """ + Load private key bytes from a PEM file. + + :param private_key_filename: The filename containing the private key + :return: The UTF-8 PEM-encoded private key bytes + """ + try: + with open(private_key_filename, "rb") as f: + private_key_utf8_encoded = f.read() + return private_key_utf8_encoded + except IOError as e: + raise OSError("IOError while reading the private key from file") from e + + +def load_public_key_bytes(public_key_filename: str) -> bytes: + """ + Load public key bytes from a PEM file and convert to DER format. + + :param public_key_filename: The filename containing the public key + :return: The DER-encoded public key bytes + """ + try: + with open(public_key_filename, "rb") as f: + public_key_utf8_encoded = f.read() + public_key = serialization.load_pem_public_key(public_key_utf8_encoded) + return public_key.public_bytes( + encoding=serialization.Encoding.DER, format=serialization.PublicFormat.SubjectPublicKeyInfo + ) + except IOError as e: + raise OSError("IOError while reading the public key from file") from e + + +def should_generate_new_ecc_key_pairs() -> bool: + """ + Check if new ECC key pairs should be generated. + + :return: True if new key pairs should be generated, False otherwise + """ + private_key_file_sender = pathlib.Path(EXAMPLE_ECC_PRIVATE_KEY_FILENAME_SENDER) + private_key_file_recipient = pathlib.Path(EXAMPLE_ECC_PRIVATE_KEY_FILENAME_RECIPIENT) + public_key_file_recipient = pathlib.Path(EXAMPLE_ECC_PUBLIC_KEY_FILENAME_RECIPIENT) + + # If keys already exist: do not overwrite existing keys + return ( + not private_key_file_sender.exists() + and not public_key_file_recipient.exists() + and not private_key_file_recipient.exists() + ) + + +def generate_ecc_key_pairs(): + """ + Generate new ECC key pairs. + + This code will generate new ECC key pairs for example use. + The keys will be written to the files: + - private_sender: EXAMPLE_ECC_PRIVATE_KEY_FILENAME_SENDER + - private_recipient: EXAMPLE_ECC_PRIVATE_KEY_FILENAME_RECIPIENT + - public_recipient: EXAMPLE_ECC_PUBLIC_KEY_FILENAME_RECIPIENT + This example uses cryptography's EllipticCurve to generate the key pairs. + In practice, you should not generate this in your code, and should instead + retrieve this key from a secure key management system (e.g. HSM). + These examples only demonstrate using the P256 curve while the keyring accepts + P256, P384, or P521. + These keys are created here for example purposes only. + """ + private_key_file_sender = pathlib.Path(EXAMPLE_ECC_PRIVATE_KEY_FILENAME_SENDER) + private_key_file_recipient = pathlib.Path(EXAMPLE_ECC_PRIVATE_KEY_FILENAME_RECIPIENT) + public_key_file_recipient = pathlib.Path(EXAMPLE_ECC_PUBLIC_KEY_FILENAME_RECIPIENT) + + if private_key_file_sender.exists() or public_key_file_recipient.exists() or private_key_file_recipient.exists(): + raise FileExistsError("generateEccKeyPairs will not overwrite existing PEM files") + + # Generate sender key pair + sender_private_key = ec.generate_private_key(ec.SECP256R1()) + + # Generate recipient key pair + recipient_private_key = ec.generate_private_key(ec.SECP256R1()) + recipient_public_key = recipient_private_key.public_key() + + # Write private keys + write_private_key(sender_private_key, EXAMPLE_ECC_PRIVATE_KEY_FILENAME_SENDER) + write_private_key(recipient_private_key, EXAMPLE_ECC_PRIVATE_KEY_FILENAME_RECIPIENT) + + # Write public key + write_public_key(recipient_public_key, EXAMPLE_ECC_PUBLIC_KEY_FILENAME_RECIPIENT) + + +def write_private_key(private_key: ec.EllipticCurvePrivateKey, filename: str): + """ + Write a private key to a PEM file. + + :param private_key: The private key to write + :param filename: The filename to write to + """ + pem_data = private_key.private_bytes( + encoding=serialization.Encoding.PEM, + format=serialization.PrivateFormat.PKCS8, + encryption_algorithm=serialization.NoEncryption(), + ) + + with open(filename, "wb") as f: + f.write(pem_data) + + +def write_public_key(public_key: ec.EllipticCurvePublicKey, filename: str): + """ + Write a public key to a PEM file. + + :param public_key: The public key to write + :param filename: The filename to write to + """ + pem_data = public_key.public_bytes( + encoding=serialization.Encoding.PEM, format=serialization.PublicFormat.SubjectPublicKeyInfo + ) + + with open(filename, "wb") as f: + f.write(pem_data) diff --git a/Examples/runtimes/python/DynamoDBEncryption/src/keyring/raw_ecdh_keyring_example/with_encrypted_client.py b/Examples/runtimes/python/DynamoDBEncryption/src/keyring/raw_ecdh_keyring_example/with_encrypted_client.py new file mode 100644 index 000000000..3752fe4ba --- /dev/null +++ b/Examples/runtimes/python/DynamoDBEncryption/src/keyring/raw_ecdh_keyring_example/with_encrypted_client.py @@ -0,0 +1,476 @@ +# Copyright Amazon.com Inc. or its affiliates. All Rights Reserved. +# SPDX-License-Identifier: Apache-2.0 +""" +These examples set up DynamoDb Encryption for the AWS SDK client using the raw ECDH Keyring with EncrypedClient. + +This keyring, depending on its KeyAgreement scheme, +takes in the sender's ECC private key, and the recipient's ECC Public Key to derive a shared secret. +The keyring uses the shared secret to derive a data key to protect the +data keys that encrypt and decrypt DynamoDb table items. + +Running these examples require access to the DDB Table whose name +is provided in CLI arguments. +This table must be configured with the following +primary key configuration: + - Partition key is named "partition_key" with type (S) + - Sort key is named "sort_key" with type (S) +""" + +import boto3 +from aws_cryptographic_material_providers.mpl import AwsCryptographicMaterialProviders +from aws_cryptographic_material_providers.mpl.config import MaterialProvidersConfig +from aws_cryptographic_material_providers.mpl.models import ( + CreateRawEcdhKeyringInput, + EphemeralPrivateKeyToStaticPublicKeyInput, + PublicKeyDiscoveryInput, + RawEcdhStaticConfigurationsEphemeralPrivateKeyToStaticPublicKey, + RawEcdhStaticConfigurationsPublicKeyDiscovery, + RawEcdhStaticConfigurationsRawPrivateKeyToStaticPublicKey, + RawPrivateKeyToStaticPublicKeyInput, +) +from aws_cryptographic_material_providers.mpl.references import IKeyring +from aws_dbesdk_dynamodb.encrypted.client import EncryptedClient +from aws_dbesdk_dynamodb.structures.dynamodb import ( + DynamoDbTableEncryptionConfig, + DynamoDbTablesEncryptionConfig, +) +from aws_dbesdk_dynamodb.structures.structured_encryption import ( + CryptoAction, +) +from cryptography.hazmat.primitives import serialization + +from .utility import ( + EXAMPLE_ECC_PRIVATE_KEY_FILENAME_RECIPIENT, + EXAMPLE_ECC_PRIVATE_KEY_FILENAME_SENDER, + EXAMPLE_ECC_PUBLIC_KEY_FILENAME_RECIPIENT, +) + + +def raw_ecdh_keyring_get_item_put_item_with_encrypted_client(ddb_table_name: str, curve_spec: str): + """ + Demonstrate using a raw ECDH keyring with static keys. + + This example takes in the sender's private key as a + UTF8 PEM-encoded (PKCS #8 PrivateKeyInfo structures) + located at the file location defined in EXAMPLE_ECC_PRIVATE_KEY_FILENAME_SENDER, + the recipient's public key as a UTF8 PEM-encoded X.509 public key, also known as SubjectPublicKeyInfo (SPKI), + located at the file location defined in EXAMPLE_ECC_PUBLIC_KEY_FILENAME_RECIPIENT, + and the Curve Specification where the keys lie. + + This example encrypts a test item using the provided ECC keys and puts the + encrypted item to the provided DynamoDb table. Then, it gets the + item from the table and decrypts it. + + This examples creates a RawECDH keyring with the RawPrivateKeyToStaticPublicKey key agreement scheme. + For more information on this configuration see: + https://docs.aws.amazon.com/encryption-sdk/latest/developer-guide/use-raw-ecdh-keyring.html#raw-ecdh-RawPrivateKeyToStaticPublicKey + + On encrypt, the shared secret is derived from the sender's private key and the recipient's public key. + On decrypt, the shared secret is derived from the sender's private key and the recipient's public key; + however, on decrypt the recipient can construct a keyring such that the shared secret is calculated with + the recipient's private key and the sender's public key. In both scenarios the shared secret will be the same. + + :param ddb_table_name: The name of the DynamoDB table + :param curve_spec: The curve specification to use + """ + # Load key pair from UTF-8 encoded PEM files. + # You may provide your own PEM files to use here. If you provide this, it MUST + # be a key on curve P256. + # If you do not, the main method in this class will generate PEM + # files for example use. Do not use these files for any other purpose. + try: + with open(EXAMPLE_ECC_PRIVATE_KEY_FILENAME_SENDER, "rb") as f: + private_key_utf8_encoded = f.read() + except IOError as e: + raise OSError("IOError while reading the private key from file") from e + + try: + with open(EXAMPLE_ECC_PUBLIC_KEY_FILENAME_RECIPIENT, "rb") as f: + public_key_utf8_encoded = f.read() + public_key = serialization.load_pem_public_key(public_key_utf8_encoded) + public_key_bytes = public_key.public_bytes( + encoding=serialization.Encoding.DER, format=serialization.PublicFormat.SubjectPublicKeyInfo + ) + except IOError as e: + raise OSError("IOError while reading the public key from file") from e + + # Create the keyring. + # This keyring uses static sender and recipient keys. This configuration calls for both of + # the keys to be on the same curve (P256, P384, P521). + # On encrypt, the shared secret is derived from the sender's private key and the recipient's public key. + # For this example, on decrypt, the shared secret is derived from the sender's private key + # and the recipient's public key; + # however, on decrypt the recipient can construct a keyring such that the shared secret is calculated with + # the recipient's private key and the sender's public key. In both scenarios the shared secret will be the same. + # The DynamoDb encryption client uses this to encrypt and decrypt items. + mat_prov = AwsCryptographicMaterialProviders(config=MaterialProvidersConfig()) + + keyring_input = CreateRawEcdhKeyringInput( + curve_spec=curve_spec, + key_agreement_scheme=RawEcdhStaticConfigurationsRawPrivateKeyToStaticPublicKey( + RawPrivateKeyToStaticPublicKeyInput( + # Must be a UTF8 PEM-encoded private key + sender_static_private_key=private_key_utf8_encoded, + # Must be a DER-encoded X.509 public key + recipient_public_key=public_key_bytes, + ) + ), + ) + + raw_ecdh_keyring = mat_prov.create_raw_ecdh_keyring(input=keyring_input) + + put_get_example_with_keyring_with_encrypted_client(raw_ecdh_keyring, ddb_table_name) + + +def ephemeral_raw_ecdh_keyring_put_item_with_encrypted_client(ddb_table_name: str, curve_spec: str): + """ + Demonstrate using a raw ECDH keyring with ephemeral keys. + + This example takes in the recipient's public key located at EXAMPLE_ECC_PUBLIC_KEY_FILENAME_RECIPIENT + as a UTF8 PEM-encoded X.509 public key, and the Curve Specification where the key lies. + + This examples creates a RawECDH keyring with the EphemeralPrivateKeyToStaticPublicKey key agreement scheme. + This configuration will always create a new key pair as the sender key pair for the key agreement operation. + The ephemeral configuration can only encrypt data and CANNOT decrypt messages. + For more information on this configuration see: + https://docs.aws.amazon.com/encryption-sdk/latest/developer-guide/use-raw-ecdh-keyring.html#raw-ecdh-EphemeralPrivateKeyToStaticPublicKey + + :param ddb_table_name: The name of the DynamoDB table + :param curve_spec: The curve specification to use + """ + # Load public key from UTF-8 encoded PEM files into a DER encoded public key. + # You may provide your own PEM files to use here. If you provide this, it MUST + # be a key on curve P256. + # If you do not, the main method in this class will generate PEM + # files for example use. Do not use these files for any other purpose. + try: + with open(EXAMPLE_ECC_PUBLIC_KEY_FILENAME_RECIPIENT, "rb") as f: + public_key_utf8_encoded = f.read() + public_key = serialization.load_pem_public_key(public_key_utf8_encoded) + public_key_bytes = public_key.public_bytes( + encoding=serialization.Encoding.DER, format=serialization.PublicFormat.SubjectPublicKeyInfo + ) + except IOError as e: + raise OSError("IOError while reading the public key from file") from e + + # Create the keyring. + # This keyring uses an ephemeral configuration. This configuration will always create a new + # key pair as the sender key pair for the key agreement operation. The ephemeral configuration can only + # encrypt data and CANNOT decrypt messages. + # The DynamoDb encryption client uses this to encrypt items. + mat_prov = AwsCryptographicMaterialProviders(config=MaterialProvidersConfig()) + + keyring_input = CreateRawEcdhKeyringInput( + curve_spec=curve_spec, + key_agreement_scheme=RawEcdhStaticConfigurationsEphemeralPrivateKeyToStaticPublicKey( + EphemeralPrivateKeyToStaticPublicKeyInput(recipient_public_key=public_key_bytes) + ), + ) + + raw_ecdh_keyring = mat_prov.create_raw_ecdh_keyring(input=keyring_input) + + # A raw ecdh keyring with Ephemeral configuration cannot decrypt data since the key pair + # used as the sender is ephemeral. This means that at decrypt time it does not have + # the private key that corresponds to the public key that is stored on the message. + put_example_with_keyring_with_encrypted_client(raw_ecdh_keyring, ddb_table_name) + + +def discovery_raw_ecdh_keyring_get_item_with_encrypted_client(ddb_table_name: str, curve_spec: str): + """ + Demonstrate using a raw ECDH keyring with discovery. + + This example takes in the recipient's private key located at EXAMPLE_ECC_PRIVATE_KEY_FILENAME_RECIPIENT + as a UTF8 PEM-encoded (PKCS #8 PrivateKeyInfo structures) private key, + and the Curve Specification where the key lies. + + This examples creates a RawECDH keyring with the PublicKeyDiscovery key agreement scheme. + This scheme is only available on decrypt. + For more information on this configuration see: + https://docs.aws.amazon.com/encryption-sdk/latest/developer-guide/use-raw-ecdh-keyring.html#raw-ecdh-PublicKeyDiscovery + + :param ddb_table_name: The name of the DynamoDB table + :param curve_spec: The curve specification to use + """ + # Load key pair from UTF-8 encoded PEM files. + # You may provide your own PEM files to use here. If you provide this, it MUST + # be a key on curve P256. + # If you do not, the main method in this class will generate PEM + # files for example use. Do not use these files for any other purpose. + try: + with open(EXAMPLE_ECC_PRIVATE_KEY_FILENAME_RECIPIENT, "rb") as f: + private_key_utf8_encoded = f.read() + except IOError as e: + raise OSError("IOError while reading the private key from file") from e + + # Create the keyring. + # This keyring uses a discovery configuration. This configuration will check on decrypt + # if it is meant to decrypt the message by checking if the configured public key is stored on the message. + # The discovery configuration can only decrypt messages and CANNOT encrypt messages. + # The DynamoDb encryption client uses this to decrypt items. + mat_prov = AwsCryptographicMaterialProviders(config=MaterialProvidersConfig()) + + keyring_input = CreateRawEcdhKeyringInput( + curve_spec=curve_spec, + key_agreement_scheme=RawEcdhStaticConfigurationsPublicKeyDiscovery( + PublicKeyDiscoveryInput(recipient_static_private_key=private_key_utf8_encoded) + ), + ) + + raw_ecdh_keyring = mat_prov.create_raw_ecdh_keyring(input=keyring_input) + + # A raw ecdh keyring with discovery configuration cannot encrypt data since the keyring + # looks for its configured public key on the message. + get_example_with_keyring_with_encrypted_client(raw_ecdh_keyring, ddb_table_name) + + +def put_get_example_with_keyring_with_encrypted_client(raw_ecdh_keyring: IKeyring, ddb_table_name: str): + """ + Demonstrate put and get operations with a raw ECDH keyring. + + :param raw_ecdh_keyring: The raw ECDH keyring to use + :param ddb_table_name: The name of the DynamoDB table + """ + # Configure which attributes are encrypted and/or signed when writing new items. + # For each attribute that may exist on the items we plan to write to our DynamoDbTable, + # we must explicitly configure how they should be treated during item encryption: + # - ENCRYPT_AND_SIGN: The attribute is encrypted and included in the signature + # - SIGN_ONLY: The attribute not encrypted, but is still included in the signature + # - DO_NOTHING: The attribute is not encrypted and not included in the signature + attribute_actions = { + "partition_key": CryptoAction.SIGN_ONLY, # Our partition attribute must be SIGN_ONLY + "sort_key": CryptoAction.SIGN_ONLY, # Our sort attribute must be SIGN_ONLY + "sensitive_data": CryptoAction.ENCRYPT_AND_SIGN, + } + + # Configure which attributes we expect to be included in the signature + # when reading items. There are two options for configuring this: + # + # - (Recommended) Configure `allowed_unsigned_attribute_prefix`: + # When defining your DynamoDb schema and deciding on attribute names, + # choose a distinguishing prefix (such as ":") for all attributes that + # you do not want to include in the signature. + # This has two main benefits: + # - It is easier to reason about the security and authenticity of data within your item + # when all unauthenticated data is easily distinguishable by their attribute name. + # - If you need to add new unauthenticated attributes in the future, + # you can easily make the corresponding update to your `attribute_actions` + # and immediately start writing to that new attribute, without + # any other configuration update needed. + # Once you configure this field, it is not safe to update it. + # + # - Configure `allowed_unsigned_attributes`: You may also explicitly list + # a set of attributes that should be considered unauthenticated when encountered + # on read. Be careful if you use this configuration. Do not remove an attribute + # name from this configuration, even if you are no longer writing with that attribute, + # as old items may still include this attribute, and our configuration needs to know + # to continue to exclude this attribute from the signature scope. + # If you add new attribute names to this field, you must first deploy the update to this + # field to all readers in your host fleet before deploying the update to start writing + # with that new attribute. + # + # For this example, we currently authenticate all attributes. To make it easier to + # add unauthenticated attributes in the future, we define a prefix ":" for such attributes. + unsign_attr_prefix = ":" + + # Create the DynamoDb Encryption configuration for the table we will be writing to. + table_config = DynamoDbTableEncryptionConfig( + logical_table_name=ddb_table_name, + partition_key_name="partition_key", + sort_key_name="sort_key", + attribute_actions_on_encrypt=attribute_actions, + keyring=raw_ecdh_keyring, + allowed_unsigned_attribute_prefix=unsign_attr_prefix, + ) + + table_configs = {ddb_table_name: table_config} + tables_config = DynamoDbTablesEncryptionConfig(table_encryption_configs=table_configs) + + # Create the EncryptedClient + ddb_client = boto3.client("dynamodb") + encrypted_ddb_client = EncryptedClient(client=ddb_client, encryption_config=tables_config) + + # Put an item into our table using the above client. + # Before the item gets sent to DynamoDb, it will be encrypted + # client-side, according to our configuration. + item = { + "partition_key": {"S": "rawEcdhKeyringItem"}, + "sort_key": {"N": "0"}, + "sensitive_data": {"S": "encrypt and sign me!"}, + } + + put_response = encrypted_ddb_client.put_item(TableName=ddb_table_name, Item=item) + + # Demonstrate that PutItem succeeded + assert put_response["ResponseMetadata"]["HTTPStatusCode"] == 200 + + # Get the item back from our table using the client. + # The client will decrypt the item client-side using the RSA keyring + # and return the original item. + key_to_get = {"partition_key": {"S": "rawEcdhKeyringItem"}, "sort_key": {"N": "0"}} + + get_response = encrypted_ddb_client.get_item(TableName=ddb_table_name, Key=key_to_get) + + # Demonstrate that GetItem succeeded and returned the decrypted item + assert get_response["ResponseMetadata"]["HTTPStatusCode"] == 200 + returned_item = get_response["Item"] + assert returned_item["sensitive_data"]["S"] == "encrypt and sign me!" + + +def put_example_with_keyring_with_encrypted_client(raw_ecdh_keyring: IKeyring, ddb_table_name: str): + """ + Demonstrate put operation with a raw ECDH keyring. + + :param raw_ecdh_keyring: The raw ECDH keyring to use + :param ddb_table_name: The name of the DynamoDB table + """ + # Configure which attributes are encrypted and/or signed when writing new items. + # For each attribute that may exist on the items we plan to write to our DynamoDbTable, + # we must explicitly configure how they should be treated during item encryption: + # - ENCRYPT_AND_SIGN: The attribute is encrypted and included in the signature + # - SIGN_ONLY: The attribute not encrypted, but is still included in the signature + # - DO_NOTHING: The attribute is not encrypted and not included in the signature + attribute_actions = { + "partition_key": CryptoAction.SIGN_ONLY, # Our partition attribute must be SIGN_ONLY + "sort_key": CryptoAction.SIGN_ONLY, # Our sort attribute must be SIGN_ONLY + "sensitive_data": CryptoAction.ENCRYPT_AND_SIGN, + } + + # Configure which attributes we expect to be included in the signature + # when reading items. There are two options for configuring this: + # + # - (Recommended) Configure `allowed_unsigned_attribute_prefix`: + # When defining your DynamoDb schema and deciding on attribute names, + # choose a distinguishing prefix (such as ":") for all attributes that + # you do not want to include in the signature. + # This has two main benefits: + # - It is easier to reason about the security and authenticity of data within your item + # when all unauthenticated data is easily distinguishable by their attribute name. + # - If you need to add new unauthenticated attributes in the future, + # you can easily make the corresponding update to your `attribute_actions` + # and immediately start writing to that new attribute, without + # any other configuration update needed. + # Once you configure this field, it is not safe to update it. + # + # - Configure `allowed_unsigned_attributes`: You may also explicitly list + # a set of attributes that should be considered unauthenticated when encountered + # on read. Be careful if you use this configuration. Do not remove an attribute + # name from this configuration, even if you are no longer writing with that attribute, + # as old items may still include this attribute, and our configuration needs to know + # to continue to exclude this attribute from the signature scope. + # If you add new attribute names to this field, you must first deploy the update to this + # field to all readers in your host fleet before deploying the update to start writing + # with that new attribute. + # + # For this example, we currently authenticate all attributes. To make it easier to + # add unauthenticated attributes in the future, we define a prefix ":" for such attributes. + unsign_attr_prefix = ":" + + # Create the DynamoDb Encryption configuration for the table we will be writing to. + table_config = DynamoDbTableEncryptionConfig( + logical_table_name=ddb_table_name, + partition_key_name="partition_key", + sort_key_name="sort_key", + attribute_actions_on_encrypt=attribute_actions, + keyring=raw_ecdh_keyring, + allowed_unsigned_attribute_prefix=unsign_attr_prefix, + ) + + table_configs = {ddb_table_name: table_config} + tables_config = DynamoDbTablesEncryptionConfig(table_encryption_configs=table_configs) + + # Create the EncryptedClient + ddb_client = boto3.client("dynamodb") + encrypted_ddb_client = EncryptedClient(client=ddb_client, encryption_config=tables_config) + + # Put an item into our table using the above client. + # Before the item gets sent to DynamoDb, it will be encrypted + # client-side, according to our configuration. + item = { + "partition_key": {"S": "rawEcdhKeyringItem"}, + "sort_key": {"N": "0"}, + "sensitive_data": {"S": "encrypt and sign me!"}, + } + + put_response = encrypted_ddb_client.put_item(TableName=ddb_table_name, Item=item) + + # Demonstrate that PutItem succeeded + assert put_response["ResponseMetadata"]["HTTPStatusCode"] == 200 + + +def get_example_with_keyring_with_encrypted_client(raw_ecdh_keyring: IKeyring, ddb_table_name: str): + """ + Demonstrate get operation with a raw ECDH keyring. + + :param raw_ecdh_keyring: The raw ECDH keyring to use + :param ddb_table_name: The name of the DynamoDB table + """ + # Configure which attributes are encrypted and/or signed when writing new items. + # For each attribute that may exist on the items we plan to write to our DynamoDbTable, + # we must explicitly configure how they should be treated during item encryption: + # - ENCRYPT_AND_SIGN: The attribute is encrypted and included in the signature + # - SIGN_ONLY: The attribute not encrypted, but is still included in the signature + # - DO_NOTHING: The attribute is not encrypted and not included in the signature + attribute_actions = { + "partition_key": CryptoAction.SIGN_ONLY, # Our partition attribute must be SIGN_ONLY + "sort_key": CryptoAction.SIGN_ONLY, # Our sort attribute must be SIGN_ONLY + "sensitive_data": CryptoAction.ENCRYPT_AND_SIGN, + } + + # Configure which attributes we expect to be included in the signature + # when reading items. There are two options for configuring this: + # + # - (Recommended) Configure `allowed_unsigned_attribute_prefix`: + # When defining your DynamoDb schema and deciding on attribute names, + # choose a distinguishing prefix (such as ":") for all attributes that + # you do not want to include in the signature. + # This has two main benefits: + # - It is easier to reason about the security and authenticity of data within your item + # when all unauthenticated data is easily distinguishable by their attribute name. + # - If you need to add new unauthenticated attributes in the future, + # you can easily make the corresponding update to your `attribute_actions` + # and immediately start writing to that new attribute, without + # any other configuration update needed. + # Once you configure this field, it is not safe to update it. + # + # - Configure `allowed_unsigned_attributes`: You may also explicitly list + # a set of attributes that should be considered unauthenticated when encountered + # on read. Be careful if you use this configuration. Do not remove an attribute + # name from this configuration, even if you are no longer writing with that attribute, + # as old items may still include this attribute, and our configuration needs to know + # to continue to exclude this attribute from the signature scope. + # If you add new attribute names to this field, you must first deploy the update to this + # field to all readers in your host fleet before deploying the update to start writing + # with that new attribute. + # + # For this example, we currently authenticate all attributes. To make it easier to + # add unauthenticated attributes in the future, we define a prefix ":" for such attributes. + unsign_attr_prefix = ":" + + # Create the DynamoDb Encryption configuration for the table we will be writing to. + table_config = DynamoDbTableEncryptionConfig( + logical_table_name=ddb_table_name, + partition_key_name="partition_key", + sort_key_name="sort_key", + attribute_actions_on_encrypt=attribute_actions, + keyring=raw_ecdh_keyring, + allowed_unsigned_attribute_prefix=unsign_attr_prefix, + ) + + table_configs = {ddb_table_name: table_config} + tables_config = DynamoDbTablesEncryptionConfig(table_encryption_configs=table_configs) + + # Create the EncryptedClient + ddb_client = boto3.client("dynamodb") + encrypted_ddb_client = EncryptedClient(client=ddb_client, encryption_config=tables_config) + + # Get the item back from our table using the client. + # The client will decrypt the item client-side using the RSA keyring + # and return the original item. + key_to_get = {"partition_key": {"S": "rawEcdhKeyringItem"}, "sort_key": {"N": "0"}} + + get_response = encrypted_ddb_client.get_item(TableName=ddb_table_name, Key=key_to_get) + + # Demonstrate that GetItem succeeded and returned the decrypted item + assert get_response["ResponseMetadata"]["HTTPStatusCode"] == 200 + returned_item = get_response["Item"] + assert returned_item["sensitive_data"]["S"] == "encrypt and sign me!" diff --git a/Examples/runtimes/python/DynamoDBEncryption/src/keyring/raw_ecdh_keyring_example/with_encrypted_table.py b/Examples/runtimes/python/DynamoDBEncryption/src/keyring/raw_ecdh_keyring_example/with_encrypted_table.py new file mode 100644 index 000000000..758458bcf --- /dev/null +++ b/Examples/runtimes/python/DynamoDBEncryption/src/keyring/raw_ecdh_keyring_example/with_encrypted_table.py @@ -0,0 +1,483 @@ +# Copyright Amazon.com Inc. or its affiliates. All Rights Reserved. +# SPDX-License-Identifier: Apache-2.0 +""" +These examples set up DynamoDb Encryption for the AWS SDK Table resource using the raw ECDH Keyring with EncrypedTable. + +This keyring, depending on its KeyAgreement scheme, +takes in the sender's ECC private key, and the recipient's ECC Public Key to derive a shared secret. +The keyring uses the shared secret to derive a data key to protect the +data keys that encrypt and decrypt DynamoDb table items. + +Running these examples require access to the DDB Table whose name +is provided in CLI arguments. +This table must be configured with the following +primary key configuration: + - Partition key is named "partition_key" with type (S) + - Sort key is named "sort_key" with type (S) +""" + +import boto3 +from aws_cryptographic_material_providers.mpl import AwsCryptographicMaterialProviders +from aws_cryptographic_material_providers.mpl.config import MaterialProvidersConfig +from aws_cryptographic_material_providers.mpl.models import ( + CreateRawEcdhKeyringInput, + EphemeralPrivateKeyToStaticPublicKeyInput, + PublicKeyDiscoveryInput, + RawEcdhStaticConfigurationsEphemeralPrivateKeyToStaticPublicKey, + RawEcdhStaticConfigurationsPublicKeyDiscovery, + RawEcdhStaticConfigurationsRawPrivateKeyToStaticPublicKey, + RawPrivateKeyToStaticPublicKeyInput, +) +from aws_cryptographic_material_providers.mpl.references import IKeyring +from aws_dbesdk_dynamodb.encrypted.table import EncryptedTable +from aws_dbesdk_dynamodb.structures.dynamodb import ( + DynamoDbTableEncryptionConfig, + DynamoDbTablesEncryptionConfig, +) +from aws_dbesdk_dynamodb.structures.structured_encryption import ( + CryptoAction, +) +from cryptography.hazmat.primitives import serialization + +from .utility import ( + EXAMPLE_ECC_PRIVATE_KEY_FILENAME_RECIPIENT, + EXAMPLE_ECC_PRIVATE_KEY_FILENAME_SENDER, + EXAMPLE_ECC_PUBLIC_KEY_FILENAME_RECIPIENT, +) + + +def raw_ecdh_keyring_get_item_put_item_with_encrypted_table(ddb_table_name: str, curve_spec: str): + """ + Demonstrate using a raw ECDH keyring with static keys. + + This example takes in the sender's private key as a + UTF8 PEM-encoded (PKCS #8 PrivateKeyInfo structures) + located at the file location defined in EXAMPLE_ECC_PRIVATE_KEY_FILENAME_SENDER, + the recipient's public key as a UTF8 PEM-encoded X.509 public key, also known as SubjectPublicKeyInfo (SPKI), + located at the file location defined in EXAMPLE_ECC_PUBLIC_KEY_FILENAME_RECIPIENT, + and the Curve Specification where the keys lie. + + This example encrypts a test item using the provided ECC keys and puts the + encrypted item to the provided DynamoDb table. Then, it gets the + item from the table and decrypts it. + + This examples creates a RawECDH keyring with the RawPrivateKeyToStaticPublicKey key agreement scheme. + For more information on this configuration see: + https://docs.aws.amazon.com/encryption-sdk/latest/developer-guide/use-raw-ecdh-keyring.html#raw-ecdh-RawPrivateKeyToStaticPublicKey + + On encrypt, the shared secret is derived from the sender's private key and the recipient's public key. + On decrypt, the shared secret is derived from the sender's private key and the recipient's public key; + however, on decrypt the recipient can construct a keyring such that the shared secret is calculated with + the recipient's private key and the sender's public key. In both scenarios the shared secret will be the same. + + :param ddb_table_name: The name of the DynamoDB table + :param curve_spec: The curve specification to use + """ + # Load key pair from UTF-8 encoded PEM files. + # You may provide your own PEM files to use here. If you provide this, it MUST + # be a key on curve P256. + # If you do not, the main method in this class will generate PEM + # files for example use. Do not use these files for any other purpose. + try: + with open(EXAMPLE_ECC_PRIVATE_KEY_FILENAME_SENDER, "rb") as f: + private_key_utf8_encoded = f.read() + except IOError as e: + raise OSError("IOError while reading the private key from file") from e + + try: + with open(EXAMPLE_ECC_PUBLIC_KEY_FILENAME_RECIPIENT, "rb") as f: + public_key_utf8_encoded = f.read() + public_key = serialization.load_pem_public_key(public_key_utf8_encoded) + public_key_bytes = public_key.public_bytes( + encoding=serialization.Encoding.DER, format=serialization.PublicFormat.SubjectPublicKeyInfo + ) + except IOError as e: + raise OSError("IOError while reading the public key from file") from e + + # Create the keyring. + # This keyring uses static sender and recipient keys. This configuration calls for both of + # the keys to be on the same curve (P256, P384, P521). + # On encrypt, the shared secret is derived from the sender's private key and the recipient's public key. + # For this example, on decrypt, the shared secret is derived from the sender's private key + # and the recipient's public key; + # however, on decrypt the recipient can construct a keyring such that the shared secret is calculated with + # the recipient's private key and the sender's public key. In both scenarios the shared secret will be the same. + # The DynamoDb encryption client uses this to encrypt and decrypt items. + mat_prov = AwsCryptographicMaterialProviders(config=MaterialProvidersConfig()) + + keyring_input = CreateRawEcdhKeyringInput( + curve_spec=curve_spec, + key_agreement_scheme=RawEcdhStaticConfigurationsRawPrivateKeyToStaticPublicKey( + RawPrivateKeyToStaticPublicKeyInput( + # Must be a UTF8 PEM-encoded private key + sender_static_private_key=private_key_utf8_encoded, + # Must be a DER-encoded X.509 public key + recipient_public_key=public_key_bytes, + ) + ), + ) + + raw_ecdh_keyring = mat_prov.create_raw_ecdh_keyring(input=keyring_input) + + put_get_example_with_keyring_with_encrypted_table(raw_ecdh_keyring, ddb_table_name) + + +def ephemeral_raw_ecdh_keyring_put_item_with_encrypted_table(ddb_table_name: str, curve_spec: str): + """ + Demonstrate using a raw ECDH keyring with ephemeral keys. + + This example takes in the recipient's public key located at EXAMPLE_ECC_PUBLIC_KEY_FILENAME_RECIPIENT + as a UTF8 PEM-encoded X.509 public key, and the Curve Specification where the key lies. + + This examples creates a RawECDH keyring with the EphemeralPrivateKeyToStaticPublicKey key agreement scheme. + This configuration will always create a new key pair as the sender key pair for the key agreement operation. + The ephemeral configuration can only encrypt data and CANNOT decrypt messages. + For more information on this configuration see: + https://docs.aws.amazon.com/encryption-sdk/latest/developer-guide/use-raw-ecdh-keyring.html#raw-ecdh-EphemeralPrivateKeyToStaticPublicKey + + :param ddb_table_name: The name of the DynamoDB table + :param curve_spec: The curve specification to use + """ + # Load public key from UTF-8 encoded PEM files into a DER encoded public key. + # You may provide your own PEM files to use here. If you provide this, it MUST + # be a key on curve P256. + # If you do not, the main method in this class will generate PEM + # files for example use. Do not use these files for any other purpose. + try: + with open(EXAMPLE_ECC_PUBLIC_KEY_FILENAME_RECIPIENT, "rb") as f: + public_key_utf8_encoded = f.read() + public_key = serialization.load_pem_public_key(public_key_utf8_encoded) + public_key_bytes = public_key.public_bytes( + encoding=serialization.Encoding.DER, format=serialization.PublicFormat.SubjectPublicKeyInfo + ) + except IOError as e: + raise OSError("IOError while reading the public key from file") from e + + # Create the keyring. + # This keyring uses an ephemeral configuration. This configuration will always create a new + # key pair as the sender key pair for the key agreement operation. The ephemeral configuration can only + # encrypt data and CANNOT decrypt messages. + # The DynamoDb encryption client uses this to encrypt items. + mat_prov = AwsCryptographicMaterialProviders(config=MaterialProvidersConfig()) + + keyring_input = CreateRawEcdhKeyringInput( + curve_spec=curve_spec, + key_agreement_scheme=RawEcdhStaticConfigurationsEphemeralPrivateKeyToStaticPublicKey( + EphemeralPrivateKeyToStaticPublicKeyInput(recipient_public_key=public_key_bytes) + ), + ) + + raw_ecdh_keyring = mat_prov.create_raw_ecdh_keyring(input=keyring_input) + + # A raw ecdh keyring with Ephemeral configuration cannot decrypt data since the key pair + # used as the sender is ephemeral. This means that at decrypt time it does not have + # the private key that corresponds to the public key that is stored on the message. + put_example_with_keyring_with_encrypted_table(raw_ecdh_keyring, ddb_table_name) + + +def discovery_raw_ecdh_keyring_get_item_with_encrypted_table(ddb_table_name: str, curve_spec: str): + """ + Demonstrate using a raw ECDH keyring with discovery. + + This example takes in the recipient's private key located at EXAMPLE_ECC_PRIVATE_KEY_FILENAME_RECIPIENT + as a UTF8 PEM-encoded (PKCS #8 PrivateKeyInfo structures) private key, + and the Curve Specification where the key lies. + + This examples creates a RawECDH keyring with the PublicKeyDiscovery key agreement scheme. + This scheme is only available on decrypt. + For more information on this configuration see: + https://docs.aws.amazon.com/encryption-sdk/latest/developer-guide/use-raw-ecdh-keyring.html#raw-ecdh-PublicKeyDiscovery + + :param ddb_table_name: The name of the DynamoDB table + :param curve_spec: The curve specification to use + """ + # Load key pair from UTF-8 encoded PEM files. + # You may provide your own PEM files to use here. If you provide this, it MUST + # be a key on curve P256. + # If you do not, the main method in this class will generate PEM + # files for example use. Do not use these files for any other purpose. + try: + with open(EXAMPLE_ECC_PRIVATE_KEY_FILENAME_RECIPIENT, "rb") as f: + private_key_utf8_encoded = f.read() + except IOError as e: + raise OSError("IOError while reading the private key from file") from e + + # Create the keyring. + # This keyring uses a discovery configuration. This configuration will check on decrypt + # if it is meant to decrypt the message by checking if the configured public key is stored on the message. + # The discovery configuration can only decrypt messages and CANNOT encrypt messages. + # The DynamoDb encryption client uses this to decrypt items. + mat_prov = AwsCryptographicMaterialProviders(config=MaterialProvidersConfig()) + + keyring_input = CreateRawEcdhKeyringInput( + curve_spec=curve_spec, + key_agreement_scheme=RawEcdhStaticConfigurationsPublicKeyDiscovery( + PublicKeyDiscoveryInput(recipient_static_private_key=private_key_utf8_encoded) + ), + ) + + raw_ecdh_keyring = mat_prov.create_raw_ecdh_keyring(input=keyring_input) + + # A raw ecdh keyring with discovery configuration cannot encrypt data since the keyring + # looks for its configured public key on the message. + get_example_with_keyring_with_encrypted_table(raw_ecdh_keyring, ddb_table_name) + + +def put_get_example_with_keyring_with_encrypted_table(raw_ecdh_keyring: IKeyring, ddb_table_name: str): + """ + Demonstrate put and get operations with a raw ECDH keyring. + + :param raw_ecdh_keyring: The raw ECDH keyring to use + :param ddb_table_name: The name of the DynamoDB table + """ + # Configure which attributes are encrypted and/or signed when writing new items. + # For each attribute that may exist on the items we plan to write to our DynamoDbTable, + # we must explicitly configure how they should be treated during item encryption: + # - ENCRYPT_AND_SIGN: The attribute is encrypted and included in the signature + # - SIGN_ONLY: The attribute not encrypted, but is still included in the signature + # - DO_NOTHING: The attribute is not encrypted and not included in the signature + attribute_actions = { + "partition_key": CryptoAction.SIGN_ONLY, # Our partition attribute must be SIGN_ONLY + "sort_key": CryptoAction.SIGN_ONLY, # Our sort attribute must be SIGN_ONLY + "sensitive_data": CryptoAction.ENCRYPT_AND_SIGN, + } + + # Configure which attributes we expect to be included in the signature + # when reading items. There are two options for configuring this: + # + # - (Recommended) Configure `allowed_unsigned_attribute_prefix`: + # When defining your DynamoDb schema and deciding on attribute names, + # choose a distinguishing prefix (such as ":") for all attributes that + # you do not want to include in the signature. + # This has two main benefits: + # - It is easier to reason about the security and authenticity of data within your item + # when all unauthenticated data is easily distinguishable by their attribute name. + # - If you need to add new unauthenticated attributes in the future, + # you can easily make the corresponding update to your `attribute_actions` + # and immediately start writing to that new attribute, without + # any other configuration update needed. + # Once you configure this field, it is not safe to update it. + # + # - Configure `allowed_unsigned_attributes`: You may also explicitly list + # a set of attributes that should be considered unauthenticated when encountered + # on read. Be careful if you use this configuration. Do not remove an attribute + # name from this configuration, even if you are no longer writing with that attribute, + # as old items may still include this attribute, and our configuration needs to know + # to continue to exclude this attribute from the signature scope. + # If you add new attribute names to this field, you must first deploy the update to this + # field to all readers in your host fleet before deploying the update to start writing + # with that new attribute. + # + # For this example, we currently authenticate all attributes. To make it easier to + # add unauthenticated attributes in the future, we define a prefix ":" for such attributes. + unsign_attr_prefix = ":" + + # Create the DynamoDb Encryption configuration for the table we will be writing to. + table_config = DynamoDbTableEncryptionConfig( + logical_table_name=ddb_table_name, + partition_key_name="partition_key", + sort_key_name="sort_key", + attribute_actions_on_encrypt=attribute_actions, + keyring=raw_ecdh_keyring, + allowed_unsigned_attribute_prefix=unsign_attr_prefix, + ) + + table_configs = {ddb_table_name: table_config} + tables_config = DynamoDbTablesEncryptionConfig(table_encryption_configs=table_configs) + + # Create the EncryptedTable + ddb_table = boto3.resource("dynamodb").Table(ddb_table_name) + encrypted_table = EncryptedTable( + table=ddb_table, + encryption_config=tables_config, + ) + + # Put an item into our table using the encrypted table + # Before the item gets sent to DynamoDb, it will be encrypted + # client-side, according to our configuration. + item = { + "partition_key": "rawEcdhKeyringItem", + "sort_key": 0, + "sensitive_data": "encrypt and sign me!", + } + + put_response = encrypted_table.put_item(TableName=ddb_table_name, Item=item) + + # Demonstrate that PutItem succeeded + assert put_response["ResponseMetadata"]["HTTPStatusCode"] == 200 + + # Get the item back from our table using the encrypted table + # The table will decrypt the item client-side and return the original item. + key_to_get = {"partition_key": "rawEcdhKeyringItem", "sort_key": 0} + + get_response = encrypted_table.get_item(TableName=ddb_table_name, Key=key_to_get) + + # Demonstrate that GetItem succeeded and returned the decrypted item + assert get_response["ResponseMetadata"]["HTTPStatusCode"] == 200 + returned_item = get_response["Item"] + assert returned_item["sensitive_data"] == "encrypt and sign me!" + + +def put_example_with_keyring_with_encrypted_table(raw_ecdh_keyring: IKeyring, ddb_table_name: str): + """ + Demonstrate put operation with a raw ECDH keyring. + + :param raw_ecdh_keyring: The raw ECDH keyring to use + :param ddb_table_name: The name of the DynamoDB table + """ + # Configure which attributes are encrypted and/or signed when writing new items. + # For each attribute that may exist on the items we plan to write to our DynamoDbTable, + # we must explicitly configure how they should be treated during item encryption: + # - ENCRYPT_AND_SIGN: The attribute is encrypted and included in the signature + # - SIGN_ONLY: The attribute not encrypted, but is still included in the signature + # - DO_NOTHING: The attribute is not encrypted and not included in the signature + attribute_actions = { + "partition_key": CryptoAction.SIGN_ONLY, # Our partition attribute must be SIGN_ONLY + "sort_key": CryptoAction.SIGN_ONLY, # Our sort attribute must be SIGN_ONLY + "sensitive_data": CryptoAction.ENCRYPT_AND_SIGN, + } + + # Configure which attributes we expect to be included in the signature + # when reading items. There are two options for configuring this: + # + # - (Recommended) Configure `allowed_unsigned_attribute_prefix`: + # When defining your DynamoDb schema and deciding on attribute names, + # choose a distinguishing prefix (such as ":") for all attributes that + # you do not want to include in the signature. + # This has two main benefits: + # - It is easier to reason about the security and authenticity of data within your item + # when all unauthenticated data is easily distinguishable by their attribute name. + # - If you need to add new unauthenticated attributes in the future, + # you can easily make the corresponding update to your `attribute_actions` + # and immediately start writing to that new attribute, without + # any other configuration update needed. + # Once you configure this field, it is not safe to update it. + # + # - Configure `allowed_unsigned_attributes`: You may also explicitly list + # a set of attributes that should be considered unauthenticated when encountered + # on read. Be careful if you use this configuration. Do not remove an attribute + # name from this configuration, even if you are no longer writing with that attribute, + # as old items may still include this attribute, and our configuration needs to know + # to continue to exclude this attribute from the signature scope. + # If you add new attribute names to this field, you must first deploy the update to this + # field to all readers in your host fleet before deploying the update to start writing + # with that new attribute. + # + # For this example, we currently authenticate all attributes. To make it easier to + # add unauthenticated attributes in the future, we define a prefix ":" for such attributes. + unsign_attr_prefix = ":" + + # Create the DynamoDb Encryption configuration for the table we will be writing to. + table_config = DynamoDbTableEncryptionConfig( + logical_table_name=ddb_table_name, + partition_key_name="partition_key", + sort_key_name="sort_key", + attribute_actions_on_encrypt=attribute_actions, + keyring=raw_ecdh_keyring, + allowed_unsigned_attribute_prefix=unsign_attr_prefix, + ) + + table_configs = {ddb_table_name: table_config} + tables_config = DynamoDbTablesEncryptionConfig(table_encryption_configs=table_configs) + + # Create the EncryptedTable + ddb_table = boto3.resource("dynamodb").Table(ddb_table_name) + encrypted_table = EncryptedTable( + table=ddb_table, + encryption_config=tables_config, + ) + + # Put an item into our table using the encrypted table + # Before the item gets sent to DynamoDb, it will be encrypted + # client-side, according to our configuration. + item = { + "partition_key": "rawEcdhKeyringItem", + "sort_key": 0, + "sensitive_data": "encrypt and sign me!", + } + + put_response = encrypted_table.put_item(TableName=ddb_table_name, Item=item) + + # Demonstrate that PutItem succeeded + assert put_response["ResponseMetadata"]["HTTPStatusCode"] == 200 + + +def get_example_with_keyring_with_encrypted_table(raw_ecdh_keyring: IKeyring, ddb_table_name: str): + """ + Demonstrate get operation with a raw ECDH keyring. + + :param raw_ecdh_keyring: The raw ECDH keyring to use + :param ddb_table_name: The name of the DynamoDB table + """ + # Configure which attributes are encrypted and/or signed when writing new items. + # For each attribute that may exist on the items we plan to write to our DynamoDbTable, + # we must explicitly configure how they should be treated during item encryption: + # - ENCRYPT_AND_SIGN: The attribute is encrypted and included in the signature + # - SIGN_ONLY: The attribute not encrypted, but is still included in the signature + # - DO_NOTHING: The attribute is not encrypted and not included in the signature + attribute_actions = { + "partition_key": CryptoAction.SIGN_ONLY, # Our partition attribute must be SIGN_ONLY + "sort_key": CryptoAction.SIGN_ONLY, # Our sort attribute must be SIGN_ONLY + "sensitive_data": CryptoAction.ENCRYPT_AND_SIGN, + } + + # Configure which attributes we expect to be included in the signature + # when reading items. There are two options for configuring this: + # + # - (Recommended) Configure `allowed_unsigned_attribute_prefix`: + # When defining your DynamoDb schema and deciding on attribute names, + # choose a distinguishing prefix (such as ":") for all attributes that + # you do not want to include in the signature. + # This has two main benefits: + # - It is easier to reason about the security and authenticity of data within your item + # when all unauthenticated data is easily distinguishable by their attribute name. + # - If you need to add new unauthenticated attributes in the future, + # you can easily make the corresponding update to your `attribute_actions` + # and immediately start writing to that new attribute, without + # any other configuration update needed. + # Once you configure this field, it is not safe to update it. + # + # - Configure `allowed_unsigned_attributes`: You may also explicitly list + # a set of attributes that should be considered unauthenticated when encountered + # on read. Be careful if you use this configuration. Do not remove an attribute + # name from this configuration, even if you are no longer writing with that attribute, + # as old items may still include this attribute, and our configuration needs to know + # to continue to exclude this attribute from the signature scope. + # If you add new attribute names to this field, you must first deploy the update to this + # field to all readers in your host fleet before deploying the update to start writing + # with that new attribute. + # + # For this example, we currently authenticate all attributes. To make it easier to + # add unauthenticated attributes in the future, we define a prefix ":" for such attributes. + unsign_attr_prefix = ":" + + # Create the DynamoDb Encryption configuration for the table we will be writing to. + table_config = DynamoDbTableEncryptionConfig( + logical_table_name=ddb_table_name, + partition_key_name="partition_key", + sort_key_name="sort_key", + attribute_actions_on_encrypt=attribute_actions, + keyring=raw_ecdh_keyring, + allowed_unsigned_attribute_prefix=unsign_attr_prefix, + ) + + table_configs = {ddb_table_name: table_config} + tables_config = DynamoDbTablesEncryptionConfig(table_encryption_configs=table_configs) + + # Create the EncryptedTable + ddb_table = boto3.resource("dynamodb").Table(ddb_table_name) + encrypted_table = EncryptedTable( + table=ddb_table, + encryption_config=tables_config, + ) + + # Get the item back from our table using the encrypted table + # The table will decrypt the item client-side and return the original item. + key_to_get = {"partition_key": "rawEcdhKeyringItem", "sort_key": 0} + + get_response = encrypted_table.get_item(TableName=ddb_table_name, Key=key_to_get) + + # Demonstrate that GetItem succeeded and returned the decrypted item + assert get_response["ResponseMetadata"]["HTTPStatusCode"] == 200 + returned_item = get_response["Item"] + assert returned_item["sensitive_data"] == "encrypt and sign me!" diff --git a/Examples/runtimes/python/DynamoDBEncryption/src/keyring/raw_rsa_keyring_example/__init__.py b/Examples/runtimes/python/DynamoDBEncryption/src/keyring/raw_rsa_keyring_example/__init__.py new file mode 100644 index 000000000..fa977e22f --- /dev/null +++ b/Examples/runtimes/python/DynamoDBEncryption/src/keyring/raw_rsa_keyring_example/__init__.py @@ -0,0 +1,3 @@ +# Copyright Amazon.com Inc. or its affiliates. All Rights Reserved. +# SPDX-License-Identifier: Apache-2.0 +"""Stub to allow relative imports of examples from tests.""" diff --git a/Examples/runtimes/python/DynamoDBEncryption/src/keyring/raw_rsa_keyring_example/encryption_config.py b/Examples/runtimes/python/DynamoDBEncryption/src/keyring/raw_rsa_keyring_example/encryption_config.py new file mode 100644 index 000000000..62327c50e --- /dev/null +++ b/Examples/runtimes/python/DynamoDBEncryption/src/keyring/raw_rsa_keyring_example/encryption_config.py @@ -0,0 +1,103 @@ +# Copyright Amazon.com Inc. or its affiliates. All Rights Reserved. +# SPDX-License-Identifier: Apache-2.0 +""" +Configuration module for raw RSA keyring encryption setup. + +This module provides the common encryption configuration used by both +EncryptedClient and EncryptedTable examples. +""" + +from aws_cryptographic_material_providers.mpl import AwsCryptographicMaterialProviders +from aws_cryptographic_material_providers.mpl.config import MaterialProvidersConfig +from aws_cryptographic_material_providers.mpl.models import ( + CreateRawRsaKeyringInput, + PaddingScheme, +) +from aws_dbesdk_dynamodb.structures.dynamodb import ( + DynamoDbTableEncryptionConfig, + DynamoDbTablesEncryptionConfig, +) +from aws_dbesdk_dynamodb.structures.structured_encryption import ( + CryptoAction, +) + + +def create_encryption_config( + ddb_table_name: str, public_key_utf8_encoded: bytes, private_key_utf8_encoded: bytes +) -> DynamoDbTablesEncryptionConfig: + """ + Create the encryption configuration for DynamoDB encryption using raw RSA keyring. + + :param ddb_table_name: The name of the DynamoDB table + :param public_key_utf8_encoded: The UTF-8 encoded PEM format public key + :param private_key_utf8_encoded: The UTF-8 encoded PEM format private key + :return: The DynamoDB tables encryption configuration + """ + # 1. Create the keyring. + # The DynamoDb encryption client uses this to encrypt and decrypt items. + mat_prov = AwsCryptographicMaterialProviders(config=MaterialProvidersConfig()) + + keyring_input = CreateRawRsaKeyringInput( + key_name="my-rsa-key-name", + key_namespace="my-key-namespace", + padding_scheme=PaddingScheme.OAEP_SHA256_MGF1, + public_key=public_key_utf8_encoded, + private_key=private_key_utf8_encoded, + ) + + raw_rsa_keyring = mat_prov.create_raw_rsa_keyring(input=keyring_input) + + # 2. Configure which attributes are encrypted and/or signed when writing new items. + # For each attribute that may exist on the items we plan to write to our DynamoDbTable, + # we must explicitly configure how they should be treated during item encryption: + # - ENCRYPT_AND_SIGN: The attribute is encrypted and included in the signature + # - SIGN_ONLY: The attribute not encrypted, but is still included in the signature + # - DO_NOTHING: The attribute is not encrypted and not included in the signature + attribute_actions = { + "partition_key": CryptoAction.SIGN_ONLY, # Our partition attribute must be SIGN_ONLY + "sort_key": CryptoAction.SIGN_ONLY, # Our sort attribute must be SIGN_ONLY + "sensitive_data": CryptoAction.ENCRYPT_AND_SIGN, + } + + # 3. Configure which attributes we expect to be included in the signature + # when reading items. There are two options for configuring this: + # + # - (Recommended) Configure `allowed_unsigned_attribute_prefix`: + # When defining your DynamoDb schema and deciding on attribute names, + # choose a distinguishing prefix (such as ":") for all attributes that + # you do not want to include in the signature. + # This has two main benefits: + # - It is easier to reason about the security and authenticity of data within your item + # when all unauthenticated data is easily distinguishable by their attribute name. + # - If you need to add new unauthenticated attributes in the future, + # you can easily make the corresponding update to your `attribute_actions` + # and immediately start writing to that new attribute, without + # any other configuration update needed. + # Once you configure this field, it is not safe to update it. + # + # - Configure `allowed_unsigned_attributes`: You may also explicitly list + # a set of attributes that should be considered unauthenticated when encountered + # on read. Be careful if you use this configuration. Do not remove an attribute + # name from this configuration, even if you are no longer writing with that attribute, + # as old items may still include this attribute, and our configuration needs to know + # to continue to exclude this attribute from the signature scope. + # If you add new attribute names to this field, you must first deploy the update to this + # field to all readers in your host fleet before deploying the update to start writing + # with that new attribute. + # + # For this example, we currently authenticate all attributes. To make it easier to + # add unauthenticated attributes in the future, we define a prefix ":" for such attributes. + unsign_attr_prefix = ":" + + # 4. Create the DynamoDb Encryption configuration for the table we will be writing to. + table_config = DynamoDbTableEncryptionConfig( + logical_table_name=ddb_table_name, + partition_key_name="partition_key", + sort_key_name="sort_key", + attribute_actions_on_encrypt=attribute_actions, + keyring=raw_rsa_keyring, + allowed_unsigned_attribute_prefix=unsign_attr_prefix, + ) + + table_configs = {ddb_table_name: table_config} + return DynamoDbTablesEncryptionConfig(table_encryption_configs=table_configs) diff --git a/Examples/runtimes/python/DynamoDBEncryption/src/keyring/raw_rsa_keyring_example/utility.py b/Examples/runtimes/python/DynamoDBEncryption/src/keyring/raw_rsa_keyring_example/utility.py new file mode 100644 index 000000000..e3914678c --- /dev/null +++ b/Examples/runtimes/python/DynamoDBEncryption/src/keyring/raw_rsa_keyring_example/utility.py @@ -0,0 +1,76 @@ +# Copyright Amazon.com Inc. or its affiliates. All Rights Reserved. +# SPDX-License-Identifier: Apache-2.0 +"""Utility functions for the raw RSA keyring example.""" + +import os + +from cryptography.hazmat.primitives import serialization +from cryptography.hazmat.primitives.asymmetric import rsa + +EXAMPLE_RSA_PRIVATE_KEY_FILENAME = "RawRsaKeyringExamplePrivateKey.pem" +EXAMPLE_RSA_PUBLIC_KEY_FILENAME = "RawRsaKeyringExamplePublicKey.pem" + + +def should_generate_new_rsa_key_pair() -> bool: + """ + Check if we need to generate a new RSA key pair. + + :return: True if we need to generate a new key pair, False otherwise + """ + # Check if a key pair already exists + private_key_file = os.path.exists(EXAMPLE_RSA_PRIVATE_KEY_FILENAME) + public_key_file = os.path.exists(EXAMPLE_RSA_PUBLIC_KEY_FILENAME) + + # If a key pair already exists: do not overwrite existing key pair + if private_key_file and public_key_file: + return False + + # If only one file is present: throw exception + if private_key_file and not public_key_file: + raise ValueError(f"Missing public key file at {EXAMPLE_RSA_PUBLIC_KEY_FILENAME}") + if not private_key_file and public_key_file: + raise ValueError(f"Missing private key file at {EXAMPLE_RSA_PRIVATE_KEY_FILENAME}") + + # If neither file is present, generate a new key pair + return True + + +def generate_rsa_key_pair(): + """Generate a new RSA key pair and save to PEM files.""" + # Safety check: Validate neither file is present + if os.path.exists(EXAMPLE_RSA_PRIVATE_KEY_FILENAME) or os.path.exists(EXAMPLE_RSA_PUBLIC_KEY_FILENAME): + raise FileExistsError("generateRsaKeyPair will not overwrite existing PEM files") + + # This code will generate a new RSA key pair for example use. + # The public and private key will be written to the files: + # - public: EXAMPLE_RSA_PUBLIC_KEY_FILENAME + # - private: EXAMPLE_RSA_PRIVATE_KEY_FILENAME + # In practice, you should not generate this in your code, and should instead + # retrieve this key from a secure key management system (e.g. HSM) + # This key is created here for example purposes only. + private_key = rsa.generate_private_key(public_exponent=65537, key_size=2048) + + # Write private key PEM file + private_key_pem = private_key.private_bytes( + encoding=serialization.Encoding.PEM, + format=serialization.PrivateFormat.PKCS8, + encryption_algorithm=serialization.NoEncryption(), + ) + + try: + with open(EXAMPLE_RSA_PRIVATE_KEY_FILENAME, "wb") as f: + f.write(private_key_pem) + except IOError as e: + raise OSError("IOError while writing private key PEM") from e + + # Write public key PEM file + public_key = private_key.public_key() + public_key_pem = public_key.public_bytes( + encoding=serialization.Encoding.PEM, format=serialization.PublicFormat.SubjectPublicKeyInfo + ) + + try: + with open(EXAMPLE_RSA_PUBLIC_KEY_FILENAME, "wb") as f: + f.write(public_key_pem) + except IOError as e: + raise OSError("IOError while writing public key PEM") from e diff --git a/Examples/runtimes/python/DynamoDBEncryption/src/keyring/raw_rsa_keyring_example/with_encrypted_client.py b/Examples/runtimes/python/DynamoDBEncryption/src/keyring/raw_rsa_keyring_example/with_encrypted_client.py new file mode 100644 index 000000000..92343108c --- /dev/null +++ b/Examples/runtimes/python/DynamoDBEncryption/src/keyring/raw_rsa_keyring_example/with_encrypted_client.py @@ -0,0 +1,108 @@ +# Copyright Amazon.com Inc. or its affiliates. All Rights Reserved. +# SPDX-License-Identifier: Apache-2.0 +""" +Example demonstrating DynamoDb Encryption using a Raw RSA Keyring with EncryptedClient. + +The raw RSA Keyring uses an RSA key pair to encrypt and decrypt records. +The keyring accepts PEM encodings of the key pair as UTF-8 interpreted bytes. +The client uses the public key to encrypt items it adds to the table and +uses the private key to decrypt existing table items it retrieves. + +The example loads a key pair from PEM files with paths defined in: + - EXAMPLE_RSA_PRIVATE_KEY_FILENAME + - EXAMPLE_RSA_PUBLIC_KEY_FILENAME + +If you do not provide these files, running this example through the main method +will generate these files for you in the directory where the example is run. +In practice, users of this library should not generate new key pairs like this, +and should instead retrieve an existing key from a secure key management system +(e.g. an HSM). + +You may also provide your own key pair by placing PEM files in the directory +where the example is run or modifying the paths in the code below. These files +must be valid PEM encodings of the key pair as UTF-8 encoded bytes. If you do +provide your own key pair, or if a key pair already exists, this class' main +method will not generate a new key pair. + +The example loads a key pair from disk, encrypts a test item, and puts the +encrypted item to the provided DynamoDb table. Then, it gets the item from +the table and decrypts it. + +Running this example requires access to the DDB Table whose name is provided +in CLI arguments. This table must be configured with the following primary +key configuration: + - Partition key is named "partition_key" with type (S) + - Sort key is named "sort_key" with type (S) +""" + +import boto3 +from aws_dbesdk_dynamodb.encrypted.client import EncryptedClient + +from .encryption_config import create_encryption_config +from .utility import ( + EXAMPLE_RSA_PRIVATE_KEY_FILENAME, + EXAMPLE_RSA_PUBLIC_KEY_FILENAME, +) + + +def raw_rsa_keyring_client_example(ddb_table_name: str, private_key_path: str, public_key_path: str): + """ + Demonstrate using a raw RSA keyring with EncryptedClient. + + :param ddb_table_name: The name of the DynamoDB table + :param private_key_path: Path to the private key PEM file + :param public_key_path: Path to the public key PEM file + """ + # 1. Load key pair from UTF-8 encoded PEM files. + # You may provide your own PEM files to use here. + # If you do not, the main method in this class will generate PEM + # files for example use. Do not use these files for any other purpose. + try: + with open(EXAMPLE_RSA_PUBLIC_KEY_FILENAME, "rb") as f: + public_key_utf8_encoded = f.read() + except IOError as e: + raise OSError("IOError while reading public key from file") from e + + try: + with open(EXAMPLE_RSA_PRIVATE_KEY_FILENAME, "rb") as f: + private_key_utf8_encoded = f.read() + except IOError as e: + raise OSError("IOError while reading private key from file") from e + + # 2. Create the DynamoDb Encryption configuration for the table we will be writing to. + # See encryption_config.py in this directory for detailed steps on the encryption configuration. + tables_config = create_encryption_config( + ddb_table_name=ddb_table_name, + public_key_utf8_encoded=public_key_utf8_encoded, + private_key_utf8_encoded=private_key_utf8_encoded, + ) + + # 3. Create the EncryptedClient + ddb_client = boto3.client("dynamodb") + encrypted_ddb_client = EncryptedClient(client=ddb_client, encryption_config=tables_config) + + # 4. Put an item into our table using the above client. + # Before the item gets sent to DynamoDb, it will be encrypted + # client-side, according to our configuration. + item = { + "partition_key": {"S": "rawRsaKeyringItem"}, + "sort_key": {"N": "0"}, + "sensitive_data": {"S": "encrypt and sign me!"}, + } + + put_response = encrypted_ddb_client.put_item(TableName=ddb_table_name, Item=item) + + # Demonstrate that PutItem succeeded + assert put_response["ResponseMetadata"]["HTTPStatusCode"] == 200 + + # 5. Get the item back from our table using the same client. + # The client will decrypt the item client-side, and return + # back the original item. + key_to_get = {"partition_key": {"S": "rawRsaKeyringItem"}, "sort_key": {"N": "0"}} + + get_response = encrypted_ddb_client.get_item(TableName=ddb_table_name, Key=key_to_get) + + # Demonstrate that GetItem succeeded and returned the decrypted item + assert get_response["ResponseMetadata"]["HTTPStatusCode"] == 200 + returned_item = get_response["Item"] + assert returned_item["sensitive_data"]["S"] == "encrypt and sign me!" diff --git a/Examples/runtimes/python/DynamoDBEncryption/src/keyring/raw_rsa_keyring_example/with_encrypted_table.py b/Examples/runtimes/python/DynamoDBEncryption/src/keyring/raw_rsa_keyring_example/with_encrypted_table.py new file mode 100644 index 000000000..bfee7ee87 --- /dev/null +++ b/Examples/runtimes/python/DynamoDBEncryption/src/keyring/raw_rsa_keyring_example/with_encrypted_table.py @@ -0,0 +1,111 @@ +# Copyright Amazon.com Inc. or its affiliates. All Rights Reserved. +# SPDX-License-Identifier: Apache-2.0 +""" +Example demonstrating DynamoDb Encryption using a Raw RSA Keyring with EncryptedTable. + +The raw RSA Keyring uses an RSA key pair to encrypt and decrypt records. +The keyring accepts PEM encodings of the key pair as UTF-8 interpreted bytes. +The client uses the public key to encrypt items it adds to the table and +uses the private key to decrypt existing table items it retrieves. + +The example loads a key pair from PEM files with paths defined in: + - EXAMPLE_RSA_PRIVATE_KEY_FILENAME + - EXAMPLE_RSA_PUBLIC_KEY_FILENAME + +If you do not provide these files, running this example through the main method +will generate these files for you in the directory where the example is run. +In practice, users of this library should not generate new key pairs like this, +and should instead retrieve an existing key from a secure key management system +(e.g. an HSM). + +You may also provide your own key pair by placing PEM files in the directory +where the example is run or modifying the paths in the code below. These files +must be valid PEM encodings of the key pair as UTF-8 encoded bytes. If you do +provide your own key pair, or if a key pair already exists, this class' main +method will not generate a new key pair. + +The example loads a key pair from disk, encrypts a test item, and puts the +encrypted item to the provided DynamoDb table. Then, it gets the item from +the table and decrypts it. + +Running this example requires access to the DDB Table whose name is provided +in CLI arguments. This table must be configured with the following primary +key configuration: + - Partition key is named "partition_key" with type (S) + - Sort key is named "sort_key" with type (S) +""" + +import boto3 +from aws_dbesdk_dynamodb.encrypted.table import EncryptedTable + +from .encryption_config import create_encryption_config +from .utility import ( + EXAMPLE_RSA_PRIVATE_KEY_FILENAME, + EXAMPLE_RSA_PUBLIC_KEY_FILENAME, +) + + +def raw_rsa_keyring_table_example(ddb_table_name: str, private_key_path: str, public_key_path: str): + """ + Demonstrate using a raw RSA keyring with EncryptedTable. + + :param ddb_table_name: The name of the DynamoDB table + :param private_key_path: Path to the private key PEM file + :param public_key_path: Path to the public key PEM file + """ + # 1. Load key pair from UTF-8 encoded PEM files. + # You may provide your own PEM files to use here. + # If you do not, the main method in this class will generate PEM + # files for example use. Do not use these files for any other purpose. + try: + with open(EXAMPLE_RSA_PUBLIC_KEY_FILENAME, "rb") as f: + public_key_utf8_encoded = f.read() + except IOError as e: + raise OSError("IOError while reading public key from file") from e + + try: + with open(EXAMPLE_RSA_PRIVATE_KEY_FILENAME, "rb") as f: + private_key_utf8_encoded = f.read() + except IOError as e: + raise OSError("IOError while reading private key from file") from e + + # 2. Create the DynamoDb Encryption configuration for the table we will be writing to. + # See encryption_config.py in this directory for detailed steps on the encryption configuration. + tables_config = create_encryption_config( + ddb_table_name=ddb_table_name, + public_key_utf8_encoded=public_key_utf8_encoded, + private_key_utf8_encoded=private_key_utf8_encoded, + ) + + # 3. Create the EncryptedTable + ddb_table = boto3.resource("dynamodb").Table(ddb_table_name) + encrypted_table = EncryptedTable( + table=ddb_table, + encryption_config=tables_config, + ) + + # 4. Put an item into our table using the above client. + # Before the item gets sent to DynamoDb, it will be encrypted + # client-side, according to our configuration. + item = { + "partition_key": "rawRsaKeyringItem", + "sort_key": 0, + "sensitive_data": "encrypt and sign me!", + } + + put_response = encrypted_table.put_item(Item=item) + + # Demonstrate that PutItem succeeded + assert put_response["ResponseMetadata"]["HTTPStatusCode"] == 200 + + # 5. Get the item back from our table using the same client. + # The client will decrypt the item client-side, and return + # back the original item. + key_to_get = {"partition_key": "rawRsaKeyringItem", "sort_key": 0} + + get_response = encrypted_table.get_item(Key=key_to_get) + + # Demonstrate that GetItem succeeded and returned the decrypted item + assert get_response["ResponseMetadata"]["HTTPStatusCode"] == 200 + returned_item = get_response["Item"] + assert returned_item["sensitive_data"] == "encrypt and sign me!" diff --git a/Examples/runtimes/python/DynamoDBEncryption/src/keyring/shared_cache_across_hierarchical_keyrings_example/__init__.py b/Examples/runtimes/python/DynamoDBEncryption/src/keyring/shared_cache_across_hierarchical_keyrings_example/__init__.py new file mode 100644 index 000000000..fa977e22f --- /dev/null +++ b/Examples/runtimes/python/DynamoDBEncryption/src/keyring/shared_cache_across_hierarchical_keyrings_example/__init__.py @@ -0,0 +1,3 @@ +# Copyright Amazon.com Inc. or its affiliates. All Rights Reserved. +# SPDX-License-Identifier: Apache-2.0 +"""Stub to allow relative imports of examples from tests.""" diff --git a/Examples/runtimes/python/DynamoDBEncryption/src/keyring/shared_cache_across_hierarchical_keyrings_example/with_encrypted_client.py b/Examples/runtimes/python/DynamoDBEncryption/src/keyring/shared_cache_across_hierarchical_keyrings_example/with_encrypted_client.py new file mode 100644 index 000000000..a7a2338e2 --- /dev/null +++ b/Examples/runtimes/python/DynamoDBEncryption/src/keyring/shared_cache_across_hierarchical_keyrings_example/with_encrypted_client.py @@ -0,0 +1,352 @@ +# Copyright Amazon.com Inc. or its affiliates. All Rights Reserved. +# SPDX-License-Identifier: Apache-2.0 +""" +Example demonstrates how to use a shared cache across multiple Hierarchical Keyrings in single-threaded environments. + +IMPORTANT: This example and the shared cache functionality should ONLY be used in single-threaded environments. +The AWS Cryptographic Material Providers Library (MPL) for Python does not support multithreading for +components that interact with KMS. For more information about multithreading limitations, see: +https://github.com/aws/aws-cryptographic-material-providers-library/blob/main/AwsCryptographicMaterialProviders/runtimes/python/README.rst + +With this functionality, users only need to maintain one common shared cache across multiple +Hierarchical Keyrings with different Key Stores instances/KMS Clients/KMS Keys in a single-threaded environment. + +There are three important parameters that users need to carefully set while providing the shared cache: + +1. Partition ID - Partition ID is an optional parameter provided to the Hierarchical Keyring input, +which distinguishes Cryptographic Material Providers (i.e: Keyrings) writing to a cache. +- If the Partition ID is set and is the same for two Hierarchical Keyrings (or another Material Provider), + they CAN share the same cache entries in the cache. +- If the Partition ID is set and is different for two Hierarchical Keyrings (or another Material Provider), + they CANNOT share the same cache entries in the cache. +- If the Partition ID is not set by the user, it is initialized as a random 16-byte UUID which makes + it unique for every Hierarchical Keyring, and two Hierarchical Keyrings (or another Material Provider) + CANNOT share the same cache entries in the cache. + +2. Logical Key Store Name - This parameter is set by the user when configuring the Key Store for +the Hierarchical Keyring. This is a logical name for the branch key store. +Suppose you have a physical Key Store (K). You create two instances of K (K1 and K2). Now, you create +two Hierarchical Keyrings (HK1 and HK2) with these Key Store instances (K1 and K2 respectively). +- If you want to share cache entries across these two keyrings, you should set the Logical Key Store Names + for both the Key Store instances (K1 and K2) to be the same. +- If you set the Logical Key Store Names for K1 and K2 to be different, HK1 (which uses Key Store instance K1) + and HK2 (which uses Key Store instance K2) will NOT be able to share cache entries. + +3. Branch Key ID - Choose an effective Branch Key ID Schema + +This is demonstrated in the example below. +Notice that both K1 and K2 are instances of the same physical Key Store (K). +You MUST NEVER have two different physical Key Stores with the same Logical Key Store Name. + +Important Note: If you have two or more Hierarchy Keyrings with: +- Same Partition ID +- Same Logical Key Store Name of the Key Store for the Hierarchical Keyring +- Same Branch Key ID +then they WILL share the cache entries in the Shared Cache. +Please make sure that you set all of Partition ID, Logical Key Store Name and Branch Key ID +to be the same for two Hierarchical Keyrings if and only if you want them to share cache entries. + +This example sets up DynamoDb Encryption for the AWS SDK client using the Hierarchical +Keyring, which establishes a key hierarchy where "branch" keys are persisted in DynamoDb. +These branch keys are used to protect your data keys, and these branch keys are themselves +protected by a root KMS Key. + +This example first creates a shared cache that you can use across multiple Hierarchical Keyrings. +The example then configures a Hierarchical Keyring (HK1 and HK2) with the shared cache, +a Branch Key ID and two instances (K1 and K2) of the same physical Key Store (K) respectively, +i.e. HK1 with K1 and HK2 with K2. The example demonstrates that if you set the same Partition ID +for HK1 and HK2, the two keyrings can share cache entries. +If you set different Partition ID of the Hierarchical Keyrings, or different +Logical Key Store Names of the Key Store instances, then the keyrings will NOT +be able to share cache entries. + +Running this example requires access to the DDB Table whose name +is provided in CLI arguments. +This table must be configured with the following +primary key configuration: + - Partition key is named "partition_key" with type (S) + - Sort key is named "sort_key" with type (S) + +This example also requires using a KMS Key whose ARN +is provided in CLI arguments. You need the following access +on this key: + - GenerateDataKeyWithoutPlaintext + - Decrypt +""" +from typing import Dict + +import boto3 +from aws_cryptographic_material_providers.keystore import KeyStore +from aws_cryptographic_material_providers.keystore.config import KeyStoreConfig +from aws_cryptographic_material_providers.keystore.models import KMSConfigurationKmsKeyArn +from aws_cryptographic_material_providers.mpl import AwsCryptographicMaterialProviders +from aws_cryptographic_material_providers.mpl.config import MaterialProvidersConfig +from aws_cryptographic_material_providers.mpl.models import ( + CacheTypeDefault, + CacheTypeShared, + CreateAwsKmsHierarchicalKeyringInput, + CreateCryptographicMaterialsCacheInput, + DefaultCache, +) +from aws_cryptographic_material_providers.mpl.references import IKeyring +from aws_dbesdk_dynamodb.encrypted.client import EncryptedClient +from aws_dbesdk_dynamodb.structures.dynamodb import ( + DynamoDbTableEncryptionConfig, + DynamoDbTablesEncryptionConfig, +) +from aws_dbesdk_dynamodb.structures.structured_encryption import ( + CryptoAction, +) + + +def get_ddb_client( + ddb_table_name: str, hierarchical_keyring: IKeyring, attribute_actions_on_encrypt: Dict[str, CryptoAction] +) -> boto3.client: + """ + Get a DynamoDB client configured with encryption using the given keyring. + + :param ddb_table_name: The name of the DynamoDB table + :param hierarchical_keyring: The hierarchical keyring to use + :param attribute_actions_on_encrypt: The attribute actions for encryption + :return: The configured DynamoDB client + """ + # Configure which attributes we expect to be included in the signature + # when reading items. There are two options for configuring this: + # + # - (Recommended) Configure `allowedUnsignedAttributesPrefix`: + # When defining your DynamoDb schema and deciding on attribute names, + # choose a distinguishing prefix (such as ":") for all attributes that + # you do not want to include in the signature. + # This has two main benefits: + # - It is easier to reason about the security and authenticity of data within your item + # when all unauthenticated data is easily distinguishable by their attribute name. + # - If you need to add new unauthenticated attributes in the future, + # you can easily make the corresponding update to your `attributeActionsOnEncrypt` + # and immediately start writing to that new attribute, without + # any other configuration update needed. + # Once you configure this field, it is not safe to update it. + # + # - Configure `allowedUnsignedAttributes`: You may also explicitly list + # a set of attributes that should be considered unauthenticated when encountered + # on read. Be careful if you use this configuration. Do not remove an attribute + # name from this configuration, even if you are no longer writing with that attribute, + # as old items may still include this attribute, and our configuration needs to know + # to continue to exclude this attribute from the signature scope. + # If you add new attribute names to this field, you must first deploy the update to this + # field to all readers in your host fleet before deploying the update to start writing + # with that new attribute. + # + # For this example, we currently authenticate all attributes. To make it easier to + # add unauthenticated attributes in the future, we define a prefix ":" for such attributes. + unsign_attr_prefix = ":" + + # Create the DynamoDb Encryption configuration for the table we will be writing to. + table_config = DynamoDbTableEncryptionConfig( + logical_table_name=ddb_table_name, + partition_key_name="partition_key", + sort_key_name="sort_key", + attribute_actions_on_encrypt=attribute_actions_on_encrypt, + keyring=hierarchical_keyring, + allowed_unsigned_attribute_prefix=unsign_attr_prefix, + ) + + table_configs = {ddb_table_name: table_config} + tables_config = DynamoDbTablesEncryptionConfig(table_encryption_configs=table_configs) + + # Create the EncryptedClient + ddb_client = boto3.client("dynamodb") + encrypted_ddb_client = EncryptedClient(client=ddb_client, encryption_config=tables_config) + + return encrypted_ddb_client + + +def put_get_items(ddb_table_name: str, ddb_client: boto3.client): + """ + Put and get items using the given DynamoDB client. + + :param ddb_table_name: The name of the DynamoDB table + :param ddb_client: The DynamoDB client to use + """ + # Put an item into our table using the given ddb client. + # Before the item gets sent to DynamoDb, it will be encrypted + # client-side, according to our configuration. + # This example creates a Hierarchical Keyring for a single BranchKeyId. You can, however, use a + # BranchKeyIdSupplier as per your use-case. See the HierarchicalKeyringsExample.java for more + # information. + item = {"partition_key": {"S": "id"}, "sort_key": {"N": "0"}, "sensitive_data": {"S": "encrypt and sign me!"}} + + put_response = ddb_client.put_item(TableName=ddb_table_name, Item=item) + + # Demonstrate that PutItem succeeded + assert put_response["ResponseMetadata"]["HTTPStatusCode"] == 200 + + # Get the item back from our table using the same client. + # The client will decrypt the item client-side, and return + # back the original item. + # This example creates a Hierarchical Keyring for a single BranchKeyId. You can, however, use a + # BranchKeyIdSupplier as per your use-case. See the HierarchicalKeyringsExample.java for more + # information. + key_to_get = {"partition_key": {"S": "id"}, "sort_key": {"N": "0"}} + + get_response = ddb_client.get_item(TableName=ddb_table_name, Key=key_to_get) + + # Demonstrate that GetItem succeeded and returned the decrypted item + assert get_response["ResponseMetadata"]["HTTPStatusCode"] == 200 + returned_item = get_response["Item"] + assert returned_item["sensitive_data"]["S"] == "encrypt and sign me!" + + +def shared_cache_across_hierarchical_keyrings_example( + ddb_table_name: str, + branch_key_id: str, + key_store_table_name: str, + logical_key_store_name: str, + partition_id: str, + kms_key_id: str, +): + """ + Create multiple hierarchical keyrings sharing a cache and use them to encrypt/decrypt DynamoDB items. + + :param ddb_table_name: The name of the DynamoDB table + :param branch_key_id: The branch key ID to use + :param key_store_table_name: The name of the KeyStore DynamoDB table + :param logical_key_store_name: The logical name for the KeyStore + :param partition_id: The partition ID for cache sharing + :param kms_key_id: ARN of the KMS key + """ + # 1. Create the CryptographicMaterialsCache (CMC) to share across multiple Hierarchical Keyrings + # using the Material Providers Library in a single-threaded environment. + # IMPORTANT: This shared cache must only be used in single-threaded environments as the + # MPL for Python does not support multithreading for KMS operations. + # This CMC takes in: + # - CacheType + mat_prov = AwsCryptographicMaterialProviders(config=MaterialProvidersConfig()) + + cache = CacheTypeDefault(DefaultCache(entry_capacity=100)) + + cryptographic_materials_cache_input = CreateCryptographicMaterialsCacheInput(cache=cache) + + shared_cryptographic_materials_cache = mat_prov.create_cryptographic_materials_cache( + input=cryptographic_materials_cache_input + ) + + # 2. Create a CacheType object for the sharedCryptographicMaterialsCache + # Note that the `cache` parameter in the Hierarchical Keyring Input takes a `CacheType` as input + shared_cache = CacheTypeShared( + # This is the `Shared` CacheType that passes an already initialized shared cache + shared_cryptographic_materials_cache + ) + + # Initial KeyStore Setup: This example requires that you have already + # created your KeyStore, and have populated it with a new branch key. + + # 3. Configure your KeyStore resource keystore1. + # This SHOULD be the same configuration that you used + # to initially create and populate your KeyStore. + # Note that key_store_table_name is the physical Key Store, + # and keystore1 is instances of this physical Key Store. + keystore1 = KeyStore( + config=KeyStoreConfig( + ddb_client=boto3.client("dynamodb"), + ddb_table_name=key_store_table_name, + logical_key_store_name=logical_key_store_name, + kms_client=boto3.client("kms"), + kms_configuration=KMSConfigurationKmsKeyArn(kms_key_id), + ) + ) + + # 4. Create the Hierarchical Keyring HK1 with Key Store instance K1, partitionId, + # the shared Cache and the BranchKeyId. + # Note that we are now providing an already initialized shared cache instead of just mentioning + # the cache type and the Hierarchical Keyring initializing a cache at initialization. + + # This example creates a Hierarchical Keyring for a single BranchKeyId. You can, however, use a + # BranchKeyIdSupplier as per your use-case. See the HierarchicalKeyringsExample.java for more + # information. + + # Please make sure that you read the guidance on how to set Partition ID, Logical Key Store Name and + # Branch Key ID at the top of this example before creating Hierarchical Keyrings with a Shared Cache. + # partitionId for this example is a random UUID + keyring_input1 = CreateAwsKmsHierarchicalKeyringInput( + key_store=keystore1, + branch_key_id=branch_key_id, + ttl_seconds=600, # This dictates how often we call back to KMS to authorize use of the branch keys + cache=shared_cache, + partition_id=partition_id, + ) + + hierarchical_keyring1 = mat_prov.create_aws_kms_hierarchical_keyring(input=keyring_input1) + + # 5. Configure which attributes are encrypted and/or signed when writing new items. + # For each attribute that may exist on the items we plan to write to our DynamoDbTable, + # we must explicitly configure how they should be treated during item encryption: + # - ENCRYPT_AND_SIGN: The attribute is encrypted and included in the signature + # - SIGN_ONLY: The attribute not encrypted, but is still included in the signature + # - DO_NOTHING: The attribute is not encrypted and not included in the signature + attribute_actions_on_encrypt = { + "partition_key": CryptoAction.SIGN_ONLY, # Our partition attribute must be SIGN_ONLY + "sort_key": CryptoAction.SIGN_ONLY, # Our sort attribute must be SIGN_ONLY + "sensitive_data": CryptoAction.ENCRYPT_AND_SIGN, + } + + # 6. Get the DDB Client for Hierarchical Keyring 1. + ddb_client1 = get_ddb_client(ddb_table_name, hierarchical_keyring1, attribute_actions_on_encrypt) + + # 7. Encrypt Decrypt roundtrip with ddb_client1 + put_get_items(ddb_table_name, ddb_client1) + + # Through the above encrypt and decrypt roundtrip, the cache will be populated and + # the cache entries can be used by another Hierarchical Keyring with the + # - Same Partition ID + # - Same Logical Key Store Name of the Key Store for the Hierarchical Keyring + # - Same Branch Key ID + + # 8. Configure your KeyStore resource keystore2. + # This SHOULD be the same configuration that you used + # to initially create and populate your physical KeyStore. + # Note that key_store_table_name is the physical Key Store, + # and keystore2 is instances of this physical Key Store. + + # Note that for this example, keystore2 is identical to keystore1. + # You can optionally change configurations like KMS Client or KMS Key ID based + # on your use-case. + # Make sure you have the required permissions to use different configurations. + + # - If you want to share cache entries across two keyrings HK1 and HK2, + # you should set the Logical Key Store Names for both + # Key Store instances (K1 and K2) to be the same. + # - If you set the Logical Key Store Names for K1 and K2 to be different, + # HK1 (which uses Key Store instance K1) and HK2 (which uses Key Store + # instance K2) will NOT be able to share cache entries. + keystore2 = KeyStore( + config=KeyStoreConfig( + ddb_client=boto3.client("dynamodb"), + ddb_table_name=key_store_table_name, + logical_key_store_name=logical_key_store_name, + kms_client=boto3.client("kms"), + kms_configuration=KMSConfigurationKmsKeyArn(kms_key_id), + ) + ) + + # 9. Create the Hierarchical Keyring HK2 with Key Store instance K2, the shared Cache + # and the same partitionId and BranchKeyId used in HK1 because we want to share cache entries + # (and experience cache HITS). + + # Please make sure that you read the guidance on how to set Partition ID, Logical Key Store Name and + # Branch Key ID at the top of this example before creating Hierarchical Keyrings with a Shared Cache. + # partitionId for this example is a random UUID + keyring_input2 = CreateAwsKmsHierarchicalKeyringInput( + key_store=keystore2, + branch_key_id=branch_key_id, + ttl_seconds=600, # This dictates how often we call back to KMS to authorize use of the branch keys + cache=shared_cache, + partition_id=partition_id, + ) + + hierarchical_keyring2 = mat_prov.create_aws_kms_hierarchical_keyring(input=keyring_input2) + + # 10. Get the DDB Client for Hierarchical Keyring 2. + ddb_client2 = get_ddb_client(ddb_table_name, hierarchical_keyring2, attribute_actions_on_encrypt) + + # 11. Encrypt Decrypt roundtrip with ddb_client2 + put_get_items(ddb_table_name, ddb_client2) diff --git a/Examples/runtimes/python/DynamoDBEncryption/src/keyring/shared_cache_across_hierarchical_keyrings_example/with_encrypted_table.py b/Examples/runtimes/python/DynamoDBEncryption/src/keyring/shared_cache_across_hierarchical_keyrings_example/with_encrypted_table.py new file mode 100644 index 000000000..89173035a --- /dev/null +++ b/Examples/runtimes/python/DynamoDBEncryption/src/keyring/shared_cache_across_hierarchical_keyrings_example/with_encrypted_table.py @@ -0,0 +1,351 @@ +# Copyright Amazon.com Inc. or its affiliates. All Rights Reserved. +# SPDX-License-Identifier: Apache-2.0 +""" +Example demonstrates how to use a shared cache across multiple Hierarchical Keyrings in single-threaded environments. + +IMPORTANT: This example and the shared cache functionality should ONLY be used in single-threaded environments. +The AWS Cryptographic Material Providers Library (MPL) for Python does not support multithreading for +components that interact with KMS. For more information about multithreading limitations, see: +https://github.com/aws/aws-cryptographic-material-providers-library/blob/main/AwsCryptographicMaterialProviders/runtimes/python/README.rst + +With this functionality, users only need to maintain one common shared cache across multiple +Hierarchical Keyrings with different Key Stores instances/KMS Clients/KMS Keys in a single-threaded environment. + +There are three important parameters that users need to carefully set while providing the shared cache: + +1. Partition ID - Partition ID is an optional parameter provided to the Hierarchical Keyring input, +which distinguishes Cryptographic Material Providers (i.e: Keyrings) writing to a cache. +- If the Partition ID is set and is the same for two Hierarchical Keyrings (or another Material Provider), + they CAN share the same cache entries in the cache. +- If the Partition ID is set and is different for two Hierarchical Keyrings (or another Material Provider), + they CANNOT share the same cache entries in the cache. +- If the Partition ID is not set by the user, it is initialized as a random 16-byte UUID which makes + it unique for every Hierarchical Keyring, and two Hierarchical Keyrings (or another Material Provider) + CANNOT share the same cache entries in the cache. + +2. Logical Key Store Name - This parameter is set by the user when configuring the Key Store for +the Hierarchical Keyring. This is a logical name for the branch key store. +Suppose you have a physical Key Store (K). You create two instances of K (K1 and K2). Now, you create +two Hierarchical Keyrings (HK1 and HK2) with these Key Store instances (K1 and K2 respectively). +- If you want to share cache entries across these two keyrings, you should set the Logical Key Store Names + for both the Key Store instances (K1 and K2) to be the same. +- If you set the Logical Key Store Names for K1 and K2 to be different, HK1 (which uses Key Store instance K1) + and HK2 (which uses Key Store instance K2) will NOT be able to share cache entries. + +3. Branch Key ID - Choose an effective Branch Key ID Schema + +This is demonstrated in the example below. +Notice that both K1 and K2 are instances of the same physical Key Store (K). +You MUST NEVER have two different physical Key Stores with the same Logical Key Store Name. + +Important Note: If you have two or more Hierarchy Keyrings with: +- Same Partition ID +- Same Logical Key Store Name of the Key Store for the Hierarchical Keyring +- Same Branch Key ID +then they WILL share the cache entries in the Shared Cache. +Please make sure that you set all of Partition ID, Logical Key Store Name and Branch Key ID +to be the same for two Hierarchical Keyrings if and only if you want them to share cache entries. + +This example sets up DynamoDb Encryption for the AWS SDK Table resource using the Hierarchical +Keyring, which establishes a key hierarchy where "branch" keys are persisted in DynamoDb. +These branch keys are used to protect your data keys, and these branch keys are themselves +protected by a root KMS Key. + +This example first creates a shared cache that you can use across multiple Hierarchical Keyrings. +The example then configures a Hierarchical Keyring (HK1 and HK2) with the shared cache, +a Branch Key ID and two instances (K1 and K2) of the same physical Key Store (K) respectively, +i.e. HK1 with K1 and HK2 with K2. The example demonstrates that if you set the same Partition ID +for HK1 and HK2, the two keyrings can share cache entries. +If you set different Partition ID of the Hierarchical Keyrings, or different +Logical Key Store Names of the Key Store instances, then the keyrings will NOT +be able to share cache entries. + +Running this example requires access to the DDB Table whose name +is provided in CLI arguments. +This table must be configured with the following +primary key configuration: + - Partition key is named "partition_key" with type (S) + - Sort key is named "sort_key" with type (S) + +This example also requires using a KMS Key whose ARN +is provided in CLI arguments. You need the following access +on this key: + - GenerateDataKeyWithoutPlaintext + - Decrypt +""" +from typing import Dict + +import boto3 +from aws_cryptographic_material_providers.keystore import KeyStore +from aws_cryptographic_material_providers.keystore.config import KeyStoreConfig +from aws_cryptographic_material_providers.keystore.models import KMSConfigurationKmsKeyArn +from aws_cryptographic_material_providers.mpl import AwsCryptographicMaterialProviders +from aws_cryptographic_material_providers.mpl.config import MaterialProvidersConfig +from aws_cryptographic_material_providers.mpl.models import ( + CacheTypeDefault, + CacheTypeShared, + CreateAwsKmsHierarchicalKeyringInput, + CreateCryptographicMaterialsCacheInput, + DefaultCache, +) +from aws_cryptographic_material_providers.mpl.references import IKeyring +from aws_dbesdk_dynamodb.encrypted.table import EncryptedTable +from aws_dbesdk_dynamodb.structures.dynamodb import ( + DynamoDbTableEncryptionConfig, + DynamoDbTablesEncryptionConfig, +) +from aws_dbesdk_dynamodb.structures.structured_encryption import ( + CryptoAction, +) + + +def get_ddb_table( + ddb_table_name: str, hierarchical_keyring: IKeyring, attribute_actions_on_encrypt: Dict[str, CryptoAction] +) -> boto3.client: + """ + Get a DynamoDB client configured with encryption using the given keyring. + + :param ddb_table_name: The name of the DynamoDB table + :param hierarchical_keyring: The hierarchical keyring to use + :param attribute_actions_on_encrypt: The attribute actions for encryption + :return: The configured DynamoDB client + """ + # Configure which attributes we expect to be included in the signature + # when reading items. There are two options for configuring this: + # + # - (Recommended) Configure `allowedUnsignedAttributesPrefix`: + # When defining your DynamoDb schema and deciding on attribute names, + # choose a distinguishing prefix (such as ":") for all attributes that + # you do not want to include in the signature. + # This has two main benefits: + # - It is easier to reason about the security and authenticity of data within your item + # when all unauthenticated data is easily distinguishable by their attribute name. + # - If you need to add new unauthenticated attributes in the future, + # you can easily make the corresponding update to your `attributeActionsOnEncrypt` + # and immediately start writing to that new attribute, without + # any other configuration update needed. + # Once you configure this field, it is not safe to update it. + # + # - Configure `allowedUnsignedAttributes`: You may also explicitly list + # a set of attributes that should be considered unauthenticated when encountered + # on read. Be careful if you use this configuration. Do not remove an attribute + # name from this configuration, even if you are no longer writing with that attribute, + # as old items may still include this attribute, and our configuration needs to know + # to continue to exclude this attribute from the signature scope. + # If you add new attribute names to this field, you must first deploy the update to this + # field to all readers in your host fleet before deploying the update to start writing + # with that new attribute. + # + # For this example, we currently authenticate all attributes. To make it easier to + # add unauthenticated attributes in the future, we define a prefix ":" for such attributes. + unsign_attr_prefix = ":" + + # Create the DynamoDb Encryption configuration for the table we will be writing to. + table_config = DynamoDbTableEncryptionConfig( + logical_table_name=ddb_table_name, + partition_key_name="partition_key", + sort_key_name="sort_key", + attribute_actions_on_encrypt=attribute_actions_on_encrypt, + keyring=hierarchical_keyring, + allowed_unsigned_attribute_prefix=unsign_attr_prefix, + ) + + table_configs = {ddb_table_name: table_config} + tables_config = DynamoDbTablesEncryptionConfig(table_encryption_configs=table_configs) + + # Create the EncryptedTable + ddb_table = boto3.resource("dynamodb").Table(ddb_table_name) + encrypted_ddb_table = EncryptedTable(table=ddb_table, encryption_config=tables_config) + + return encrypted_ddb_table + + +def put_get_items(ddb_table: boto3.resource): + """ + Put and get items using the given DynamoDB client. + + :param ddb_table: The DynamoDB client to use + """ + # Put an item into our table using the given ddb client. + # Before the item gets sent to DynamoDb, it will be encrypted + # client-side, according to our configuration. + # This example creates a Hierarchical Keyring for a single BranchKeyId. You can, however, use a + # BranchKeyIdSupplier as per your use-case. See the HierarchicalKeyringsExample.java for more + # information. + item = {"partition_key": "id", "sort_key": 0, "sensitive_data": "encrypt and sign me!"} + + put_response = ddb_table.put_item(Item=item) + + # Demonstrate that PutItem succeeded + assert put_response["ResponseMetadata"]["HTTPStatusCode"] == 200 + + # Get the item back from our table using the same client. + # The client will decrypt the item client-side, and return + # back the original item. + # This example creates a Hierarchical Keyring for a single BranchKeyId. You can, however, use a + # BranchKeyIdSupplier as per your use-case. See the HierarchicalKeyringsExample.java for more + # information. + key_to_get = {"partition_key": "id", "sort_key": 0} + + get_response = ddb_table.get_item(Key=key_to_get) + + # Demonstrate that GetItem succeeded and returned the decrypted item + assert get_response["ResponseMetadata"]["HTTPStatusCode"] == 200 + returned_item = get_response["Item"] + assert returned_item["sensitive_data"] == "encrypt and sign me!" + + +def shared_cache_across_hierarchical_keyrings_example( + ddb_table_name: str, + branch_key_id: str, + key_store_table_name: str, + logical_key_store_name: str, + partition_id: str, + kms_key_id: str, +): + """ + Create multiple hierarchical keyrings sharing a cache and use them to encrypt/decrypt DynamoDB items. + + :param ddb_table_name: The name of the DynamoDB table + :param branch_key_id: The branch key ID to use + :param key_store_table_name: The name of the KeyStore DynamoDB table + :param logical_key_store_name: The logical name for the KeyStore + :param partition_id: The partition ID for cache sharing + :param kms_key_id: ARN of the KMS key + """ + # 1. Create the CryptographicMaterialsCache (CMC) to share across multiple Hierarchical Keyrings + # using the Material Providers Library in a single-threaded environment. + # IMPORTANT: This shared cache must only be used in single-threaded environments as the + # MPL for Python does not support multithreading for KMS operations. + # This CMC takes in: + # - CacheType + mat_prov = AwsCryptographicMaterialProviders(config=MaterialProvidersConfig()) + + cache = CacheTypeDefault(DefaultCache(entry_capacity=100)) + + cryptographic_materials_cache_input = CreateCryptographicMaterialsCacheInput(cache=cache) + + shared_cryptographic_materials_cache = mat_prov.create_cryptographic_materials_cache( + input=cryptographic_materials_cache_input + ) + + # 2. Create a CacheType object for the sharedCryptographicMaterialsCache + # Note that the `cache` parameter in the Hierarchical Keyring Input takes a `CacheType` as input + shared_cache = CacheTypeShared( + # This is the `Shared` CacheType that passes an already initialized shared cache + shared_cryptographic_materials_cache + ) + + # Initial KeyStore Setup: This example requires that you have already + # created your KeyStore, and have populated it with a new branch key. + + # 3. Configure your KeyStore resource keystore1. + # This SHOULD be the same configuration that you used + # to initially create and populate your KeyStore. + # Note that key_store_table_name is the physical Key Store, + # and keystore1 is instances of this physical Key Store. + keystore1 = KeyStore( + config=KeyStoreConfig( + ddb_client=boto3.client("dynamodb"), + ddb_table_name=key_store_table_name, + logical_key_store_name=logical_key_store_name, + kms_client=boto3.client("kms"), + kms_configuration=KMSConfigurationKmsKeyArn(kms_key_id), + ) + ) + + # 4. Create the Hierarchical Keyring HK1 with Key Store instance K1, partitionId, + # the shared Cache and the BranchKeyId. + # Note that we are now providing an already initialized shared cache instead of just mentioning + # the cache type and the Hierarchical Keyring initializing a cache at initialization. + + # This example creates a Hierarchical Keyring for a single BranchKeyId. You can, however, use a + # BranchKeyIdSupplier as per your use-case. See the HierarchicalKeyringsExample.java for more + # information. + + # Please make sure that you read the guidance on how to set Partition ID, Logical Key Store Name and + # Branch Key ID at the top of this example before creating Hierarchical Keyrings with a Shared Cache. + # partitionId for this example is a random UUID + keyring_input1 = CreateAwsKmsHierarchicalKeyringInput( + key_store=keystore1, + branch_key_id=branch_key_id, + ttl_seconds=600, # This dictates how often we call back to KMS to authorize use of the branch keys + cache=shared_cache, + partition_id=partition_id, + ) + + hierarchical_keyring1 = mat_prov.create_aws_kms_hierarchical_keyring(input=keyring_input1) + + # 5. Configure which attributes are encrypted and/or signed when writing new items. + # For each attribute that may exist on the items we plan to write to our DynamoDbTable, + # we must explicitly configure how they should be treated during item encryption: + # - ENCRYPT_AND_SIGN: The attribute is encrypted and included in the signature + # - SIGN_ONLY: The attribute not encrypted, but is still included in the signature + # - DO_NOTHING: The attribute is not encrypted and not included in the signature + attribute_actions_on_encrypt = { + "partition_key": CryptoAction.SIGN_ONLY, # Our partition attribute must be SIGN_ONLY + "sort_key": CryptoAction.SIGN_ONLY, # Our sort attribute must be SIGN_ONLY + "sensitive_data": CryptoAction.ENCRYPT_AND_SIGN, + } + + # 6. Get the DDB Client for Hierarchical Keyring 1. + ddb_table1 = get_ddb_table(ddb_table_name, hierarchical_keyring1, attribute_actions_on_encrypt) + + # 7. Encrypt Decrypt roundtrip with ddb_table1 + put_get_items(ddb_table1) + + # Through the above encrypt and decrypt roundtrip, the cache will be populated and + # the cache entries can be used by another Hierarchical Keyring with the + # - Same Partition ID + # - Same Logical Key Store Name of the Key Store for the Hierarchical Keyring + # - Same Branch Key ID + + # 8. Configure your KeyStore resource keystore2. + # This SHOULD be the same configuration that you used + # to initially create and populate your physical KeyStore. + # Note that key_store_table_name is the physical Key Store, + # and keystore2 is instances of this physical Key Store. + + # Note that for this example, keystore2 is identical to keystore1. + # You can optionally change configurations like KMS Client or KMS Key ID based + # on your use-case. + # Make sure you have the required permissions to use different configurations. + + # - If you want to share cache entries across two keyrings HK1 and HK2, + # you should set the Logical Key Store Names for both + # Key Store instances (K1 and K2) to be the same. + # - If you set the Logical Key Store Names for K1 and K2 to be different, + # HK1 (which uses Key Store instance K1) and HK2 (which uses Key Store + # instance K2) will NOT be able to share cache entries. + keystore2 = KeyStore( + config=KeyStoreConfig( + ddb_client=boto3.client("dynamodb"), + ddb_table_name=key_store_table_name, + logical_key_store_name=logical_key_store_name, + kms_client=boto3.client("kms"), + kms_configuration=KMSConfigurationKmsKeyArn(kms_key_id), + ) + ) + + # 9. Create the Hierarchical Keyring HK2 with Key Store instance K2, the shared Cache + # and the same partitionId and BranchKeyId used in HK1 because we want to share cache entries + # (and experience cache HITS). + + # Please make sure that you read the guidance on how to set Partition ID, Logical Key Store Name and + # Branch Key ID at the top of this example before creating Hierarchical Keyrings with a Shared Cache. + # partitionId for this example is a random UUID + keyring_input2 = CreateAwsKmsHierarchicalKeyringInput( + key_store=keystore2, + branch_key_id=branch_key_id, + ttl_seconds=600, # This dictates how often we call back to KMS to authorize use of the branch keys + cache=shared_cache, + partition_id=partition_id, + ) + + hierarchical_keyring2 = mat_prov.create_aws_kms_hierarchical_keyring(input=keyring_input2) + + # 10. Get the DDB Client for Hierarchical Keyring 2. + ddb_table2 = get_ddb_table(ddb_table_name, hierarchical_keyring2, attribute_actions_on_encrypt) + + # 11. Encrypt Decrypt roundtrip with ddb_table2 + put_get_items(ddb_table2) diff --git a/Examples/runtimes/python/DynamoDBEncryption/src/scan_error_example/__init__.py b/Examples/runtimes/python/DynamoDBEncryption/src/scan_error_example/__init__.py new file mode 100644 index 000000000..fa977e22f --- /dev/null +++ b/Examples/runtimes/python/DynamoDBEncryption/src/scan_error_example/__init__.py @@ -0,0 +1,3 @@ +# Copyright Amazon.com Inc. or its affiliates. All Rights Reserved. +# SPDX-License-Identifier: Apache-2.0 +"""Stub to allow relative imports of examples from tests.""" diff --git a/Examples/runtimes/python/DynamoDBEncryption/src/scan_error_example/encryption_config.py b/Examples/runtimes/python/DynamoDBEncryption/src/scan_error_example/encryption_config.py new file mode 100644 index 000000000..60d936273 --- /dev/null +++ b/Examples/runtimes/python/DynamoDBEncryption/src/scan_error_example/encryption_config.py @@ -0,0 +1,124 @@ +# Copyright Amazon.com Inc. or its affiliates. All Rights Reserved. +# SPDX-License-Identifier: Apache-2.0 +""" +Configuration module for Scan Error Example. + +This module provides the common encryption configuration used by both +EncryptedClient and EncryptedTable examples. +""" + +import sys + +from aws_cryptographic_material_providers.mpl import AwsCryptographicMaterialProviders +from aws_cryptographic_material_providers.mpl.config import MaterialProvidersConfig +from aws_cryptographic_material_providers.mpl.errors import CollectionOfErrors +from aws_cryptographic_material_providers.mpl.models import CreateAwsKmsMrkMultiKeyringInput, DBEAlgorithmSuiteId +from aws_dbesdk_dynamodb.structures.dynamodb import ( + DynamoDbTableEncryptionConfig, + DynamoDbTablesEncryptionConfig, +) +from aws_dbesdk_dynamodb.structures.structured_encryption import ( + CryptoAction, +) + + +def print_exception(e: Exception, indent: str = ""): + """ + Print exception and any nested CollectionOfErrors. + + :param e: Exception to print + :param indent: Indentation string for nested errors + """ + print(indent + str(e), file=sys.stderr) + if isinstance(e.__cause__, CollectionOfErrors): + print(indent + str(e.__cause__), file=sys.stderr) + for err in e.__cause__.list(): + print_exception(err, indent + " ") + elif isinstance(e, CollectionOfErrors): + for err in e.list(): + print_exception(err, indent + " ") + + +def create_encryption_config(kms_key_id: str, ddb_table_name: str) -> DynamoDbTablesEncryptionConfig: + """ + Create the encryption configuration for DynamoDB encryption using raw AES keyring. + + :param ddb_table_name: The name of the DynamoDB table + :param aes_key_bytes: The AES key bytes to use + :return: The DynamoDB tables encryption configuration + """ + # 1. Create a Keyring. This Keyring will be responsible for protecting the data keys that protect your data. + # For this example, we will create a AWS KMS Keyring with the AWS KMS Key we want to use. + # We will use the `create_aws_kms_mrk_multi_keyring` method to create this keyring, + # as it will correctly handle both single region and Multi-Region KMS Keys. + mat_prov = AwsCryptographicMaterialProviders(config=MaterialProvidersConfig()) + + kms_keyring = mat_prov.create_aws_kms_mrk_multi_keyring( + input=CreateAwsKmsMrkMultiKeyringInput(generator=kms_key_id) + ) + + # 2. Configure which attributes are encrypted and/or signed when writing new items. + # For each attribute that may exist on the items we plan to write to our DynamoDbTable, + # we must explicitly configure how they should be treated during item encryption: + # - ENCRYPT_AND_SIGN: The attribute is encrypted and included in the signature + # - SIGN_ONLY: The attribute not encrypted, but is still included in the signature + # - DO_NOTHING: The attribute is not encrypted and not included in the signature + attribute_actions = { + "partition_key": CryptoAction.SIGN_ONLY, # Our partition attribute must be SIGN_ONLY + "sort_key": CryptoAction.SIGN_ONLY, # Our sort attribute must be SIGN_ONLY + "attribute1": CryptoAction.ENCRYPT_AND_SIGN, + "attribute2": CryptoAction.SIGN_ONLY, + ":attribute3": CryptoAction.DO_NOTHING, + } + + # 3. Configure which attributes we expect to be included in the signature + # when reading items. There are two options for configuring this: + # + # - (Recommended) Configure `allowed_unsigned_attribute_prefix`: + # When defining your DynamoDb schema and deciding on attribute names, + # choose a distinguishing prefix (such as ":") for all attributes that + # you do not want to include in the signature. + # This has two main benefits: + # - It is easier to reason about the security and authenticity of data within your item + # when all unauthenticated data is easily distinguishable by their attribute name. + # - If you need to add new unauthenticated attributes in the future, + # you can easily make the corresponding update to your `attribute_actions` + # and immediately start writing to that new attribute, without + # any other configuration update needed. + # Once you configure this field, it is not safe to update it. + # + # - Configure `allowed_unsigned_attributes`: You may also explicitly list + # a set of attributes that should be considered unauthenticated when encountered + # on read. Be careful if you use this configuration. Do not remove an attribute + # name from this configuration, even if you are no longer writing with that attribute, + # as old items may still include this attribute, and our configuration needs to know + # to continue to exclude this attribute from the signature scope. + # If you add new attribute names to this field, you must first deploy the update to this + # field to all readers in your host fleet before deploying the update to start writing + # with that new attribute. + # + # For this example, we have designed our DynamoDb table such that any attribute name with + # the ":" prefix should be considered unauthenticated. + unsign_attr_prefix = ":" + + # 4. Create the DynamoDb Encryption configuration for the table we will be writing to. + table_config = DynamoDbTableEncryptionConfig( + logical_table_name=ddb_table_name, + partition_key_name="partition_key", + sort_key_name="sort_key", + attribute_actions_on_encrypt=attribute_actions, + keyring=kms_keyring, + allowed_unsigned_attribute_prefix=unsign_attr_prefix, + # Specifying an algorithm suite is not required, + # but is done here to demonstrate how to do so. + # We suggest using the + # `ALG_AES_256_GCM_HKDF_SHA512_COMMIT_KEY_ECDSA_P384_SYMSIG_HMAC_SHA384` suite, + # which includes AES-GCM with key derivation, signing, and key commitment. + # This is also the default algorithm suite if one is not specified in this config. + # For more information on supported algorithm suites, see: + # https://docs.aws.amazon.com/database-encryption-sdk/latest/devguide/supported-algorithms.html + algorithm_suite_id=DBEAlgorithmSuiteId.ALG_AES_256_GCM_HKDF_SHA512_COMMIT_KEY_ECDSA_P384_SYMSIG_HMAC_SHA384, + ) + + table_configs = {ddb_table_name: table_config} + return DynamoDbTablesEncryptionConfig(table_encryption_configs=table_configs) diff --git a/Examples/runtimes/python/DynamoDBEncryption/src/scan_error_example/with_encrypted_client.py b/Examples/runtimes/python/DynamoDBEncryption/src/scan_error_example/with_encrypted_client.py new file mode 100644 index 000000000..6702a2e13 --- /dev/null +++ b/Examples/runtimes/python/DynamoDBEncryption/src/scan_error_example/with_encrypted_client.py @@ -0,0 +1,51 @@ +# Copyright Amazon.com Inc. or its affiliates. All Rights Reserved. +# SPDX-License-Identifier: Apache-2.0 + +""" +Example demonstrating error handling for failed decryption during DynamoDB Scan operations with EncryptedClient. + +Uses the Scan operation to show how to retrieve error messages from the +returned CollectionOfErrors when some of the Scan results do not decrypt successfully. + +Running this example requires access to the DDB Table whose name is provided in +CLI arguments. This table must be configured with the following primary key +configuration: + - Partition key is named "partition_key" with type (S) + - Sort key is named "sort_key" with type (N) +""" +import boto3 +from aws_dbesdk_dynamodb.encrypted.client import EncryptedClient + +from .encryption_config import create_encryption_config, print_exception + + +def scan_error_with_client(kms_key_id: str, ddb_table_name: str): + """ + Demonstrate handling scan errors with EncryptedClient. + + :param kms_key_id: The ARN of the KMS key to use + :param ddb_table_name: The name of the DynamoDB table + """ + # 1. Create the DynamoDb Encryption configuration for the table we will be writing to. + # See encryption_config.py in this directory for detailed steps on the encryption configuration. + tables_config = create_encryption_config( + kms_key_id=kms_key_id, + ddb_table_name=ddb_table_name, + ) + + # 2. Create the EncryptedClient + ddb_client = boto3.client("dynamodb") + encrypted_ddb_client = EncryptedClient(client=ddb_client, encryption_config=tables_config) + + # 3. Perform a Scan for which some records will not decrypt + expression_attribute_values = {":prefix": {"S": "Broken"}} + + try: + encrypted_ddb_client.scan( + TableName=ddb_table_name, + FilterExpression="begins_with(partition_key, :prefix)", + ExpressionAttributeValues=expression_attribute_values, + ) + assert False, "scan should have failed" + except Exception as e: + print_exception(e) diff --git a/Examples/runtimes/python/DynamoDBEncryption/src/scan_error_example/with_encrypted_table.py b/Examples/runtimes/python/DynamoDBEncryption/src/scan_error_example/with_encrypted_table.py new file mode 100644 index 000000000..f1ebfb515 --- /dev/null +++ b/Examples/runtimes/python/DynamoDBEncryption/src/scan_error_example/with_encrypted_table.py @@ -0,0 +1,53 @@ +# Copyright Amazon.com Inc. or its affiliates. All Rights Reserved. +# SPDX-License-Identifier: Apache-2.0 + +""" +Example demonstrating error handling for failed decryption during DynamoDB Scan operations with EncryptedClient. + +Uses the Scan operation to show how to retrieve error messages from the +returned CollectionOfErrors when some of the Scan results do not decrypt successfully. + +Running this example requires access to the DDB Table whose name is provided in +CLI arguments. This table must be configured with the following primary key +configuration: + - Partition key is named "partition_key" with type (S) + - Sort key is named "sort_key" with type (N) +""" +import boto3 +from aws_dbesdk_dynamodb.encrypted.table import EncryptedTable + +from .encryption_config import create_encryption_config, print_exception + + +def scan_error_with_table(kms_key_id: str, ddb_table_name: str): + """ + Demonstrate handling scan errors with EncryptedClient. + + :param kms_key_id: The ARN of the KMS key to use + :param ddb_table_name: The name of the DynamoDB table + """ + # 1. Create the DynamoDb Encryption configuration for the table we will be writing to. + # See encryption_config.py in this directory for detailed steps on the encryption configuration. + tables_config = create_encryption_config( + kms_key_id=kms_key_id, + ddb_table_name=ddb_table_name, + ) + + # 2. Create the EncryptedTable + ddb_table = boto3.resource("dynamodb").Table(ddb_table_name) + encrypted_ddb_table = EncryptedTable( + table=ddb_table, + encryption_config=tables_config, + ) + + # 3. Perform a Scan for which some records will not decrypt + expression_attribute_values = {":prefix": "Broken"} + + try: + encrypted_ddb_table.scan( + FilterExpression="begins_with(partition_key, :prefix)", + ExpressionAttributeValues=expression_attribute_values, + ) + assert False, "scan should have failed" + except Exception as e: + print_exception(e) diff --git a/Examples/runtimes/python/DynamoDBEncryption/src/searchable_encryption/__init__.py b/Examples/runtimes/python/DynamoDBEncryption/src/searchable_encryption/__init__.py new file mode 100644 index 000000000..1b8c008ca --- /dev/null +++ b/Examples/runtimes/python/DynamoDBEncryption/src/searchable_encryption/__init__.py @@ -0,0 +1,3 @@ +# Copyright Amazon.com Inc. or its affiliates. All Rights Reserved. +# SPDX-License-Identifier: Apache-2.0 +"""Empty stub to allow imports.""" diff --git a/Examples/runtimes/python/DynamoDBEncryption/src/searchable_encryption/basic_searchable_encryption_example/__init__.py b/Examples/runtimes/python/DynamoDBEncryption/src/searchable_encryption/basic_searchable_encryption_example/__init__.py new file mode 100644 index 000000000..fa977e22f --- /dev/null +++ b/Examples/runtimes/python/DynamoDBEncryption/src/searchable_encryption/basic_searchable_encryption_example/__init__.py @@ -0,0 +1,3 @@ +# Copyright Amazon.com Inc. or its affiliates. All Rights Reserved. +# SPDX-License-Identifier: Apache-2.0 +"""Stub to allow relative imports of examples from tests.""" diff --git a/Examples/runtimes/python/DynamoDBEncryption/src/searchable_encryption/basic_searchable_encryption_example/beacon_config.py b/Examples/runtimes/python/DynamoDBEncryption/src/searchable_encryption/basic_searchable_encryption_example/beacon_config.py new file mode 100644 index 000000000..9f331686a --- /dev/null +++ b/Examples/runtimes/python/DynamoDBEncryption/src/searchable_encryption/basic_searchable_encryption_example/beacon_config.py @@ -0,0 +1,206 @@ +# Copyright Amazon.com Inc. or its affiliates. All Rights Reserved. +# SPDX-License-Identifier: Apache-2.0 +"""Sets up the beacon config for basic searchable encryption.""" +from typing import List + +import boto3 +from aws_cryptographic_material_providers.keystore.client import KeyStore +from aws_cryptographic_material_providers.keystore.config import KeyStoreConfig +from aws_cryptographic_material_providers.keystore.models import KMSConfigurationKmsKeyArn +from aws_cryptographic_material_providers.mpl import AwsCryptographicMaterialProviders +from aws_cryptographic_material_providers.mpl.config import MaterialProvidersConfig +from aws_cryptographic_material_providers.mpl.models import CreateAwsKmsHierarchicalKeyringInput +from aws_dbesdk_dynamodb.structures.dynamodb import ( + BeaconKeySourceSingle, + BeaconVersion, + DynamoDbTableEncryptionConfig, + DynamoDbTablesEncryptionConfig, + SearchConfig, + SingleKeyStore, + StandardBeacon, +) +from aws_dbesdk_dynamodb.structures.structured_encryption import CryptoAction + +GSI_NAME = "last4-unit-index" + + +def setup_beacon_config( + ddb_table_name: str, + branch_key_id: str, + branch_key_wrapping_kms_key_arn: str, + branch_key_ddb_table_name: str, +): + """Set up the beacon config for basic searchable encryption.""" + # 1. Configure Beacons. + # The beacon name must be the name of a table attribute that will be encrypted. + # The `length` parameter dictates how many bits are in the beacon attribute value. + # The following link provides guidance on choosing a beacon length: + # https://docs.aws.amazon.com/database-encryption-sdk/latest/devguide/choosing-beacon-length.html + standard_beacon_list: List[StandardBeacon] = [] + + # The configured DDB table has a GSI on the `aws_dbe_b_inspector_id_last4` AttributeName. + # This field holds the last 4 digits of an inspector ID. + # For our example, this field may range from 0 to 9,999 (10,000 possible values). + # For our example, we assume a full inspector ID is an integer + # ranging from 0 to 99,999,999. We do not assume that the full inspector ID's + # values are uniformly distributed across its range of possible values. + # In many use cases, the prefix of an identifier encodes some information + # about that identifier (e.g. zipcode and SSN prefixes encode geographic + # information), while the suffix does not and is more uniformly distributed. + # We will assume that the inspector ID field matches a similar use case. + # So for this example, we only store and use the last + # 4 digits of the inspector ID, which we assume is uniformly distributed. + # Since the full ID's range is divisible by the range of the last 4 digits, + # then the last 4 digits of the inspector ID are uniformly distributed + # over the range from 0 to 9,999. + # See our documentation for why you should avoid creating beacons over non-uniform distributions + # https://docs.aws.amazon.com/database-encryption-sdk/latest/devguide/searchable-encryption.html#are-beacons-right-for-me + # A single inspector ID suffix may be assigned to multiple `work_id`s. + # + # This link provides guidance for choosing a beacon length: + # https://docs.aws.amazon.com/database-encryption-sdk/latest/devguide/choosing-beacon-length.html + # We follow the guidance in the link above to determine reasonable bounds + # for the length of a beacon on the last 4 digits of an inspector ID: + # - min: log(sqrt(10,000))/log(2) ~= 6.6, round up to 7 + # - max: log((10,000/2))/log(2) ~= 12.3, round down to 12 + # You will somehow need to round results to a nearby integer. + # We choose to round to the nearest integer; you might consider a different rounding approach. + # Rounding up will return fewer expected "false positives" in queries, + # leading to fewer decrypt calls and better performance, + # but it is easier to identify which beacon values encode distinct plaintexts. + # Rounding down will return more expected "false positives" in queries, + # leading to more decrypt calls and worse performance, + # but it is harder to identify which beacon values encode distinct plaintexts. + # We can choose a beacon length between 7 and 12: + # - Closer to 7, we expect more "false positives" to be returned, + # making it harder to identify which beacon values encode distinct plaintexts, + # but leading to more decrypt calls and worse performance + # - Closer to 12, we expect fewer "false positives" returned in queries, + # leading to fewer decrypt calls and better performance, + # but it is easier to identify which beacon values encode distinct plaintexts. + # As an example, we will choose 10. + # + # Values stored in aws_dbe_b_inspector_id_last4 will be 10 bits long (0x000 - 0x3ff) + # There will be 2^10 = 1,024 possible HMAC values. + # With a sufficiently large number of well-distributed inspector IDs, + # for a particular beacon we expect (10,000/1,024) ~= 9.8 4-digit inspector ID suffixes + # sharing that beacon value. + last4_beacon = StandardBeacon(name="inspector_id_last4", length=10) + standard_beacon_list.append(last4_beacon) + + # The configured DDB table has a GSI on the `aws_dbe_b_unit` AttributeName. + # This field holds a unit serial number. + # For this example, this is a 12-digit integer from 0 to 999,999,999,999 (10^12 possible values). + # We will assume values for this attribute are uniformly distributed across this range. + # A single unit serial number may be assigned to multiple `work_id`s. + # + # This link provides guidance for choosing a beacon length: + # https://docs.aws.amazon.com/database-encryption-sdk/latest/devguide/choosing-beacon-length.html + # We follow the guidance in the link above to determine reasonable bounds + # for the length of a beacon on a unit serial number: + # - min: log(sqrt(999,999,999,999))/log(2) ~= 19.9, round up to 20 + # - max: log((999,999,999,999/2))/log(2) ~= 38.9, round up to 39 + # We can choose a beacon length between 20 and 39: + # - Closer to 20, we expect more "false positives" to be returned, + # making it harder to identify which beacon values encode distinct plaintexts, + # but leading to more decrypt calls and worse performance + # - Closer to 39, we expect fewer "false positives" returned in queries, + # leading to fewer decrypt calls and better performance, + # but it is easier to identify which beacon values encode distinct plaintexts. + # As an example, we will choose 30. + # + # Values stored in aws_dbe_b_unit will be 30 bits long (0x00000000 - 0x3fffffff) + # There will be 2^30 = 1,073,741,824 ~= 1.1B possible HMAC values. + # With a sufficiently large number of well-distributed inspector IDs, + # for a particular beacon we expect (10^12/2^30) ~= 931.3 unit serial numbers + # sharing that beacon value. + unit_beacon = StandardBeacon(name="unit", length=30) + standard_beacon_list.append(unit_beacon) + + # 2. Configure Keystore. + # The keystore is a separate DDB table where the client stores encryption and decryption materials. + # In order to configure beacons on the DDB client, you must configure a keystore. + # + # This example expects that you have already set up a KeyStore with a single branch key. + # See the "Create KeyStore Table Example" and "Create KeyStore Key Example" for how to do this. + # After you create a branch key, you should persist its ID for use in this example. + keystore = KeyStore( + config=KeyStoreConfig( + ddb_client=boto3.client("dynamodb"), + ddb_table_name=branch_key_ddb_table_name, + logical_key_store_name=branch_key_ddb_table_name, + kms_client=boto3.client("kms"), + kms_configuration=KMSConfigurationKmsKeyArn(value=branch_key_wrapping_kms_key_arn), + ) + ) + + # 3. Create BeaconVersion. + # The BeaconVersion inside the list holds the list of beacons on the table. + # The BeaconVersion also stores information about the keystore. + # BeaconVersion must be provided: + # - keyStore: The keystore configured in step 2. + # - keySource: A configuration for the key source. + # For simple use cases, we can configure a 'singleKeySource' which + # statically configures a single beaconKey. That is the approach this example takes. + # For use cases where you want to use different beacon keys depending on the data + # (for example if your table holds data for multiple tenants, and you want to use + # a different beacon key per tenant), look into configuring a MultiKeyStore: + # https://docs.aws.amazon.com/database-encryption-sdk/latest/devguide/searchable-encryption-multitenant.html + beacon_versions = [ + BeaconVersion( + standard_beacons=standard_beacon_list, + version=1, # MUST be 1 + key_store=keystore, + key_source=BeaconKeySourceSingle( + SingleKeyStore( + # `key_id` references a beacon key. + # For every branch key we create in the keystore, + # we also create a beacon key. + # This beacon key is not the same as the branch key, + # but is created with the same ID as the branch key. + key_id=branch_key_id, + cache_ttl=6000, + ) + ), + ) + ] + + # 4. Create a Hierarchical Keyring + # This is a KMS keyring that utilizes the keystore table. + # This config defines how items are encrypted and decrypted. + # NOTE: You should configure this to use the same keystore as your search config. + mat_prov = AwsCryptographicMaterialProviders(config=MaterialProvidersConfig()) + + keyring_input = CreateAwsKmsHierarchicalKeyringInput( + branch_key_id=branch_key_id, key_store=keystore, ttl_seconds=6000 + ) + + kms_keyring = mat_prov.create_aws_kms_hierarchical_keyring(input=keyring_input) + + # 5. Configure which attributes are encrypted and/or signed when writing new items. + # For each attribute that may exist on the items we plan to write to our DynamoDbTable, + # we must explicitly configure how they should be treated during item encryption: + # - ENCRYPT_AND_SIGN: The attribute is encrypted and included in the signature + # - SIGN_ONLY: The attribute not encrypted, but is still included in the signature + # - DO_NOTHING: The attribute is not encrypted and not included in the signature + # Any attributes that will be used in beacons must be configured as ENCRYPT_AND_SIGN. + attribute_actions = { + "work_id": CryptoAction.SIGN_ONLY, # Our partition attribute must be SIGN_ONLY + "inspection_date": CryptoAction.SIGN_ONLY, # Our sort attribute must be SIGN_ONLY + "inspector_id_last4": CryptoAction.ENCRYPT_AND_SIGN, # Beaconized attributes must be encrypted + "unit": CryptoAction.ENCRYPT_AND_SIGN, # Beaconized attributes must be encrypted + } + + # 6. Create the DynamoDb Encryption configuration for the table we will be writing to. + # The beaconVersions are added to the search configuration. + table_config = DynamoDbTableEncryptionConfig( + logical_table_name=ddb_table_name, + partition_key_name="work_id", + sort_key_name="inspection_date", + attribute_actions_on_encrypt=attribute_actions, + keyring=kms_keyring, + search=SearchConfig(write_version=1, versions=beacon_versions), # MUST be 1 + ) + + table_configs = {ddb_table_name: table_config} + return DynamoDbTablesEncryptionConfig(table_encryption_configs=table_configs) diff --git a/Examples/runtimes/python/DynamoDBEncryption/src/searchable_encryption/basic_searchable_encryption_example/with_encrypted_client.py b/Examples/runtimes/python/DynamoDBEncryption/src/searchable_encryption/basic_searchable_encryption_example/with_encrypted_client.py new file mode 100644 index 000000000..0dcd7f66c --- /dev/null +++ b/Examples/runtimes/python/DynamoDBEncryption/src/searchable_encryption/basic_searchable_encryption_example/with_encrypted_client.py @@ -0,0 +1,130 @@ +# Copyright Amazon.com Inc. or its affiliates. All Rights Reserved. +# SPDX-License-Identifier: Apache-2.0 +""" +Example demonstrating DynamoDB encryption using beacons with EncryptedClient. + +This example demonstrates how to set up a beacon on an encrypted attribute, +put an item with the beacon, and query against that beacon. +This example follows a use case of a database that stores unit inspection information. + +Running this example requires access to a DDB table with the +following key configuration: + - Partition key is named "work_id" with type (S) + - Sort key is named "inspection_date" with type (S) +This table must have a Global Secondary Index (GSI) configured named "last4-unit-index": + - Partition key is named "aws_dbe_b_inspector_id_last4" with type (S) + - Sort key is named "aws_dbe_b_unit" with type (S) + +In this example for storing unit inspection information, this schema is utilized for the data: + - "work_id" stores a unique identifier for a unit inspection work order (v4 UUID) + - "inspection_date" stores an ISO 8601 date for the inspection (YYYY-MM-DD) + - "inspector_id_last4" stores the last 4 digits of the ID of the inspector performing the work + - "unit" stores a 12-digit serial number for the unit being inspected + +The example requires the following ordered input command line parameters: + 1. DDB table name for table to put/query data from + 2. Branch key ID for a branch key that was previously created in your key store. See the + CreateKeyStoreKeyExample. + 3. Branch key wrapping KMS key ARN for the KMS key used to create the branch key with ID + provided in arg 2 + 4. Branch key DDB table name for the DDB table representing the branch key store +""" +import time + +import boto3 +from aws_dbesdk_dynamodb.encrypted.client import EncryptedClient + +from DynamoDBEncryption.src.searchable_encryption.basic_searchable_encryption_example.beacon_config import ( + GSI_NAME, + setup_beacon_config, +) + + +def basic_searchable_encryption_client_example( + ddb_table_name: str, branch_key_id: str, branch_key_wrapping_kms_key_arn: str, branch_key_ddb_table_name: str +): + """ + Demonstrate using beacons with DynamoDB encryption with EncryptedClient. + + :param ddb_table_name: The name of the DynamoDB table + :param branch_key_id: Branch key ID for a branch key previously created in key store + :param branch_key_wrapping_kms_key_arn: ARN of KMS key used to create the branch key + :param branch_key_ddb_table_name: Name of DDB table representing the branch key store + """ + # 1. Create the DynamoDb Encryption configuration for the table we will be writing to. + # See beacon_config.py in this directory for detailed steps on the encryption configuration. + tables_config = setup_beacon_config( + ddb_table_name, branch_key_id, branch_key_wrapping_kms_key_arn, branch_key_ddb_table_name + ) + + # 2. Create the EncryptedClient + ddb_client = boto3.client("dynamodb") + encrypted_ddb_client = EncryptedClient(client=ddb_client, encryption_config=tables_config) + + # 3. Put an item into our table using the above client. + # Before the item gets sent to DynamoDb, it will be encrypted + # client-side, according to our configuration. + # Since our configuration includes beacons for `inspector_id_last4` and `unit`, + # the client will add two additional attributes to the item. These attributes will have names + # `aws_dbe_b_inspector_id_last4` and `aws_dbe_b_unit`. Their values will be HMACs + # truncated to as many bits as the beacon's `length` parameter; e.g. + # aws_dbe_b_inspector_id_last4 = truncate(HMAC("4321"), 10) + # aws_dbe_b_unit = truncate(HMAC("123456789012"), 30) + item = { + "work_id": {"S": "1313ba89-5661-41eb-ba6c-cb1b4cb67b2d"}, + "inspection_date": {"S": "2023-06-13"}, + "inspector_id_last4": {"S": "4321"}, + "unit": {"S": "123456789012"}, + } + + put_response = encrypted_ddb_client.put_item(TableName=ddb_table_name, Item=item) + + assert put_response["ResponseMetadata"]["HTTPStatusCode"] == 200 + + # 4. Query for the item we just put. + # Note that we are constructing the query as if we were querying on plaintext values. + # However, the DDB encryption client will detect that this attribute name has a beacon configured. + # The client will add the beaconized attribute name and attribute value to the query, + # and transform the query to use the beaconized name and value. + # Internally, the client will query for and receive all items with a matching HMAC value in the beacon field. + # This may include a number of "false positives" with different ciphertext, but the same truncated HMAC. + # e.g. if truncate(HMAC("123456789012"), 30) + # == truncate(HMAC("098765432109"), 30), + # the query will return both items. + # The client will decrypt all returned items to determine which ones have the expected attribute values, + # and only surface items with the correct plaintext to the user. + # This procedure is internal to the client and is abstracted away from the user; + # e.g. the user will only see "123456789012" and never + # "098765432109", though the actual query returned both. + expression_attribute_names = {"#last4": "inspector_id_last4", "#unit": "unit"} + + expression_attribute_values = {":last4": {"S": "4321"}, ":unit": {"S": "123456789012"}} + + # GSIs do not update instantly + # so if the results come back empty + # we retry after a short sleep + for _ in range(10): + query_response = encrypted_ddb_client.query( + TableName=ddb_table_name, + IndexName=GSI_NAME, + KeyConditionExpression="#last4 = :last4 and #unit = :unit", + ExpressionAttributeNames=expression_attribute_names, + ExpressionAttributeValues=expression_attribute_values, + ) + + # Validate query was returned successfully + assert query_response["ResponseMetadata"]["HTTPStatusCode"] == 200 + + items = query_response.get("Items", []) + # if no results, sleep and try again + if not items: + time.sleep(0.02) + continue + + # Validate only 1 item was returned: the item we just put + assert len(items) == 1 + returned_item = items[0] + # Validate the item has the expected attributes + assert returned_item["inspector_id_last4"]["S"] == "4321" + assert returned_item["unit"]["S"] == "123456789012" + break diff --git a/Examples/runtimes/python/DynamoDBEncryption/src/searchable_encryption/basic_searchable_encryption_example/with_encrypted_table.py b/Examples/runtimes/python/DynamoDBEncryption/src/searchable_encryption/basic_searchable_encryption_example/with_encrypted_table.py new file mode 100644 index 000000000..3c3ddbc29 --- /dev/null +++ b/Examples/runtimes/python/DynamoDBEncryption/src/searchable_encryption/basic_searchable_encryption_example/with_encrypted_table.py @@ -0,0 +1,129 @@ +# Copyright Amazon.com Inc. or its affiliates. All Rights Reserved. +# SPDX-License-Identifier: Apache-2.0 +""" +Example demonstrating DynamoDB encryption using beacons with EncryptedTable. + +This example demonstrates how to set up a beacon on an encrypted attribute, +put an item with the beacon, and query against that beacon. +This example follows a use case of a database that stores unit inspection information. + +Running this example requires access to a DDB table with the +following key configuration: + - Partition key is named "work_id" with type (S) + - Sort key is named "inspection_date" with type (S) +This table must have a Global Secondary Index (GSI) configured named "last4-unit-index": + - Partition key is named "aws_dbe_b_inspector_id_last4" with type (S) + - Sort key is named "aws_dbe_b_unit" with type (S) + +In this example for storing unit inspection information, this schema is utilized for the data: + - "work_id" stores a unique identifier for a unit inspection work order (v4 UUID) + - "inspection_date" stores an ISO 8601 date for the inspection (YYYY-MM-DD) + - "inspector_id_last4" stores the last 4 digits of the ID of the inspector performing the work + - "unit" stores a 12-digit serial number for the unit being inspected + +The example requires the following ordered input command line parameters: + 1. DDB table name for table to put/query data from + 2. Branch key ID for a branch key that was previously created in your key store. See the + CreateKeyStoreKeyExample. + 3. Branch key wrapping KMS key ARN for the KMS key used to create the branch key with ID + provided in arg 2 + 4. Branch key DDB table name for the DDB table representing the branch key store +""" +import time + +import boto3 +from aws_dbesdk_dynamodb.encrypted.table import EncryptedTable + +from .beacon_config import ( + GSI_NAME, + setup_beacon_config, +) + + +def basic_searchable_encryption_table_example( + ddb_table_name: str, branch_key_id: str, branch_key_wrapping_kms_key_arn: str, branch_key_ddb_table_name: str +): + """ + Demonstrate using beacons with DynamoDB encryption with EncryptedTable. + + :param ddb_table_name: The name of the DynamoDB table + :param branch_key_id: Branch key ID for a branch key previously created in key store + :param branch_key_wrapping_kms_key_arn: ARN of KMS key used to create the branch key + :param branch_key_ddb_table_name: Name of DDB table representing the branch key store + """ + # 1. Create the DynamoDb Encryption configuration for the table we will be writing to. + # See beacon_config.py in this directory for detailed steps on the encryption configuration. + tables_config = setup_beacon_config( + ddb_table_name, branch_key_id, branch_key_wrapping_kms_key_arn, branch_key_ddb_table_name + ) + + # 2. Create the EncryptedTable + ddb_table = boto3.resource("dynamodb").Table(ddb_table_name) + encrypted_ddb_table = EncryptedTable(table=ddb_table, encryption_config=tables_config) + + # 3. Put an item into our table using the above encrypted table. + # Before the item gets sent to DynamoDb, it will be encrypted + # client-side, according to our configuration. + # Since our configuration includes beacons for `inspector_id_last4` and `unit`, + # the client will add two additional attributes to the item. These attributes will have names + # `aws_dbe_b_inspector_id_last4` and `aws_dbe_b_unit`. Their values will be HMACs + # truncated to as many bits as the beacon's `length` parameter; e.g. + # aws_dbe_b_inspector_id_last4 = truncate(HMAC("4321"), 10) + # aws_dbe_b_unit = truncate(HMAC("123456789012"), 30) + item = { + "work_id": "1313ba89-5661-41eb-ba6c-cb1b4cb67b2d", + "inspection_date": "2023-06-13", + "inspector_id_last4": "4321", + "unit": "123456789012", + } + + put_response = encrypted_ddb_table.put_item(Item=item) + + assert put_response["ResponseMetadata"]["HTTPStatusCode"] == 200 + + # 4. Query for the item we just put. + # Note that we are constructing the query as if we were querying on plaintext values. + # However, the DDB encryption client will detect that this attribute name has a beacon configured. + # The client will add the beaconized attribute name and attribute value to the query, + # and transform the query to use the beaconized name and value. + # Internally, the client will query for and receive all items with a matching HMAC value in the beacon field. + # This may include a number of "false positives" with different ciphertext, but the same truncated HMAC. + # e.g. if truncate(HMAC("123456789012"), 30) + # == truncate(HMAC("098765432109"), 30), + # the query will return both items. + # The client will decrypt all returned items to determine which ones have the expected attribute values, + # and only surface items with the correct plaintext to the user. + # This procedure is internal to the client and is abstracted away from the user; + # e.g. the user will only see "123456789012" and never + # "098765432109", though the actual query returned both. + expression_attribute_names = {"#last4": "inspector_id_last4", "#unit": "unit"} + + expression_attribute_values = {":last4": "4321", ":unit": "123456789012"} + + # GSIs do not update instantly + # so if the results come back empty + # we retry after a short sleep + for _ in range(10): + query_response = encrypted_ddb_table.query( + IndexName=GSI_NAME, + KeyConditionExpression="#last4 = :last4 and #unit = :unit", + ExpressionAttributeNames=expression_attribute_names, + ExpressionAttributeValues=expression_attribute_values, + ) + + # Validate query was returned successfully + assert query_response["ResponseMetadata"]["HTTPStatusCode"] == 200 + + items = query_response.get("Items", []) + # if no results, sleep and try again + if not items: + time.sleep(0.02) + continue + + # Validate only 1 item was returned: the item we just put + assert len(items) == 1 + returned_item = items[0] + # Validate the item has the expected attributes + assert returned_item["inspector_id_last4"] == "4321" + assert returned_item["unit"] == "123456789012" + break diff --git a/Examples/runtimes/python/DynamoDBEncryption/src/searchable_encryption/beacon_styles_searchable_encryption_example/__init__.py b/Examples/runtimes/python/DynamoDBEncryption/src/searchable_encryption/beacon_styles_searchable_encryption_example/__init__.py new file mode 100644 index 000000000..fa977e22f --- /dev/null +++ b/Examples/runtimes/python/DynamoDBEncryption/src/searchable_encryption/beacon_styles_searchable_encryption_example/__init__.py @@ -0,0 +1,3 @@ +# Copyright Amazon.com Inc. or its affiliates. All Rights Reserved. +# SPDX-License-Identifier: Apache-2.0 +"""Stub to allow relative imports of examples from tests.""" diff --git a/Examples/runtimes/python/DynamoDBEncryption/src/searchable_encryption/beacon_styles_searchable_encryption_example/beacon_config.py b/Examples/runtimes/python/DynamoDBEncryption/src/searchable_encryption/beacon_styles_searchable_encryption_example/beacon_config.py new file mode 100644 index 000000000..b0136d7c9 --- /dev/null +++ b/Examples/runtimes/python/DynamoDBEncryption/src/searchable_encryption/beacon_styles_searchable_encryption_example/beacon_config.py @@ -0,0 +1,146 @@ +# Copyright Amazon.com Inc. or its affiliates. All Rights Reserved. +# SPDX-License-Identifier: Apache-2.0 +"""Sets up the beacon config for demonstrating different beacon styles.""" +from typing import List + +import boto3 +from aws_cryptographic_material_providers.keystore.client import KeyStore +from aws_cryptographic_material_providers.keystore.config import KeyStoreConfig +from aws_cryptographic_material_providers.keystore.models import KMSConfigurationKmsKeyArn +from aws_cryptographic_material_providers.mpl import AwsCryptographicMaterialProviders +from aws_cryptographic_material_providers.mpl.config import MaterialProvidersConfig +from aws_cryptographic_material_providers.mpl.models import CreateAwsKmsHierarchicalKeyringInput +from aws_dbesdk_dynamodb.structures.dynamodb import ( + AsSet, + BeaconKeySourceSingle, + BeaconStyleAsSet, + BeaconStylePartOnly, + BeaconStyleShared, + BeaconStyleSharedSet, + BeaconVersion, + CompoundBeacon, + DynamoDbTableEncryptionConfig, + DynamoDbTablesEncryptionConfig, + EncryptedPart, + PartOnly, + SearchConfig, + Shared, + SharedSet, + SignedPart, + SingleKeyStore, + StandardBeacon, +) +from aws_dbesdk_dynamodb.structures.structured_encryption import CryptoAction + + +def setup_beacon_config( + ddb_table_name: str, + branch_key_id: str, + branch_key_wrapping_kms_key_arn: str, + branch_key_ddb_table_name: str, +): + """Set up the beacon config demonstrating different beacon styles.""" + # 1. Create Beacons. + standard_beacon_list: List[StandardBeacon] = [] + + # The fruit beacon allows searching on the encrypted fruit attribute + # We have selected 30 as an example beacon length, but you should go to + # https://docs.aws.amazon.com/database-encryption-sdk/latest/devguide/choosing-beacon-length.html + # when creating your beacons. + fruit_beacon = StandardBeacon(name="fruit", length=30) + standard_beacon_list.append(fruit_beacon) + + # The basket beacon allows searching on the encrypted basket attribute + # basket is used as a Set, and therefore needs a beacon style to reflect that. + # Further, we need to be able to compare the items in basket to the fruit attribute + # so we `share` this beacon with `fruit`. + # Since we need both of these things, we use the SharedSet style. + basket_beacon = StandardBeacon(name="basket", length=30, style=BeaconStyleSharedSet(SharedSet(other="fruit"))) + standard_beacon_list.append(basket_beacon) + + # The dessert beacon allows searching on the encrypted dessert attribute + # We need to be able to compare the dessert attribute to the fruit attribute + # so we `share` this beacon with `fruit`. + dessert_beacon = StandardBeacon(name="dessert", length=30, style=BeaconStyleShared(Shared(other="fruit"))) + standard_beacon_list.append(dessert_beacon) + + # The veggie_beacon allows searching on the encrypted veggies attribute + # veggies is used as a Set, and therefore needs a beacon style to reflect that. + veggie_beacon = StandardBeacon(name="veggies", length=30, style=BeaconStyleAsSet(AsSet())) + standard_beacon_list.append(veggie_beacon) + + # The work_type_beacon allows searching on the encrypted work_type attribute + # We only use it as part of the compound work_unit beacon, + # so we disable its use as a standalone beacon + work_type_beacon = StandardBeacon(name="work_type", length=30, style=BeaconStylePartOnly(PartOnly())) + standard_beacon_list.append(work_type_beacon) + + # Here we build a compound beacon from work_id and work_type + # If we had tried to make a StandardBeacon from work_type, we would have seen an error + # because work_type is "PartOnly" + encrypted_part_list = [EncryptedPart(name="work_type", prefix="T-")] + + signed_part_list = [SignedPart(name="work_id", prefix="I-")] + + compound_beacon_list = [ + CompoundBeacon(name="work_unit", split=".", encrypted=encrypted_part_list, signed=signed_part_list) + ] + + # 2. Configure the Keystore + # These are the same constructions as in the Basic example, which describes these in more detail. + keystore = KeyStore( + config=KeyStoreConfig( + ddb_client=boto3.client("dynamodb"), + ddb_table_name=branch_key_ddb_table_name, + logical_key_store_name=branch_key_ddb_table_name, + kms_client=boto3.client("kms"), + kms_configuration=KMSConfigurationKmsKeyArn(value=branch_key_wrapping_kms_key_arn), + ) + ) + + # 3. Create BeaconVersion. + # This is similar to the Basic example + beacon_versions = [ + BeaconVersion( + standard_beacons=standard_beacon_list, + compound_beacons=compound_beacon_list, + version=1, # MUST be 1 + key_store=keystore, + key_source=BeaconKeySourceSingle(SingleKeyStore(key_id=branch_key_id, cache_ttl=6000)), + ) + ] + + # 4. Create a Hierarchical Keyring + # This is the same configuration as in the Basic example. + mat_prov = AwsCryptographicMaterialProviders(config=MaterialProvidersConfig()) + + keyring_input = CreateAwsKmsHierarchicalKeyringInput( + branch_key_id=branch_key_id, key_store=keystore, ttl_seconds=6000 + ) + + kms_keyring = mat_prov.create_aws_kms_hierarchical_keyring(input=keyring_input) + + # 5. Configure which attributes are encrypted and/or signed when writing new items. + attribute_actions = { + "work_id": CryptoAction.SIGN_ONLY, # Our partition attribute must be SIGN_ONLY + "inspection_date": CryptoAction.SIGN_ONLY, # Our sort attribute must be SIGN_ONLY + "dessert": CryptoAction.ENCRYPT_AND_SIGN, # Beaconized attributes must be encrypted + "fruit": CryptoAction.ENCRYPT_AND_SIGN, # Beaconized attributes must be encrypted + "basket": CryptoAction.ENCRYPT_AND_SIGN, # Beaconized attributes must be encrypted + "veggies": CryptoAction.ENCRYPT_AND_SIGN, # Beaconized attributes must be encrypted + "work_type": CryptoAction.ENCRYPT_AND_SIGN, # Beaconized attributes must be encrypted + } + + # 6. Create the DynamoDb Encryption configuration for the table we will be writing to. + # The beaconVersions are added to the search configuration. + table_config = DynamoDbTableEncryptionConfig( + logical_table_name=ddb_table_name, + partition_key_name="work_id", + sort_key_name="inspection_date", + attribute_actions_on_encrypt=attribute_actions, + keyring=kms_keyring, + search=SearchConfig(write_version=1, versions=beacon_versions), # MUST be 1 + ) + + table_configs = {ddb_table_name: table_config} + return DynamoDbTablesEncryptionConfig(table_encryption_configs=table_configs) diff --git a/Examples/runtimes/python/DynamoDBEncryption/src/searchable_encryption/beacon_styles_searchable_encryption_example/with_encrypted_client.py b/Examples/runtimes/python/DynamoDBEncryption/src/searchable_encryption/beacon_styles_searchable_encryption_example/with_encrypted_client.py new file mode 100644 index 000000000..17cdfa99b --- /dev/null +++ b/Examples/runtimes/python/DynamoDBEncryption/src/searchable_encryption/beacon_styles_searchable_encryption_example/with_encrypted_client.py @@ -0,0 +1,175 @@ +# Copyright Amazon.com Inc. or its affiliates. All Rights Reserved. +# SPDX-License-Identifier: Apache-2.0 +""" +Example demonstrating DynamoDB encryption using beacon styles with EncryptedClient. + +This example demonstrates how to use Beacons Styles on Standard Beacons on encrypted attributes, + put an item with the beacon, and query against that beacon. +This example follows a use case of a database that stores food information. + This is an extension of the "BasicSearchableEncryptionExample" in this directory + and uses the same table schema. + +Running this example requires access to a DDB table with the +following key configuration: + - Partition key is named "work_id" with type (S) + - Sort key is named "inspection_time" with type (S) + +In this example for storing food information, this schema is utilized for the data: + - "work_id" stores a unique identifier for a unit inspection work order (v4 UUID) + - "inspection_date" stores an ISO 8601 date for the inspection (YYYY-MM-DD) + - "fruit" stores one type of fruit + - "basket" stores a set of types of fruit + - "dessert" stores one type of dessert + - "veggies" stores a set of types of vegetable + - "work_type" stores a unit inspection category + +The example requires the following ordered input command line parameters: + 1. DDB table name for table to put/query data from + 2. Branch key ID for a branch key that was previously created in your key store. See the + CreateKeyStoreKeyExample. + 3. Branch key wrapping KMS key ARN for the KMS key used to create the branch key with ID + provided in arg 2 + 4. Branch key DDB table name for the DDB table representing the branch key store +""" +import boto3 +from aws_dbesdk_dynamodb.encrypted.client import EncryptedClient + +from .beacon_config import setup_beacon_config + + +def beacon_styles_client_example( + ddb_table_name: str, + branch_key_id: str, + branch_key_wrapping_kms_key_arn: str, + branch_key_ddb_table_name: str, +): + """ + Demonstrate using different beacon styles with EncryptedClient. + + :param ddb_table_name: The name of the DynamoDB table + :param branch_key_id: Branch key ID for a branch key previously created in key store + :param branch_key_wrapping_kms_key_arn: ARN of KMS key used to create the branch key + :param branch_key_ddb_table_name: Name of DDB table representing the branch key store + """ + # 1. Create the DynamoDb Encryption configuration for the table we will be writing to. + # See beacon_config.py in this directory for detailed steps on the encryption configuration. + tables_config = setup_beacon_config( + ddb_table_name, branch_key_id, branch_key_wrapping_kms_key_arn, branch_key_ddb_table_name + ) + + # 2. Create the EncryptedClient + ddb_client = boto3.client("dynamodb") + encrypted_ddb_client = EncryptedClient(client=ddb_client, encryption_config=tables_config) + + # 3. Create item one, specifically with "dessert != fruit", and "fruit in basket". + item1 = { + "work_id": {"S": "1"}, + "inspection_date": {"S": "2023-06-13"}, + "dessert": {"S": "cake"}, + "fruit": {"S": "banana"}, + "basket": {"SS": ["apple", "banana", "pear"]}, + "veggies": {"SS": ["beans", "carrots", "celery"]}, + "work_type": {"S": "small"}, + } + + # 4. Create item two, specifically with "dessert == fruit", and "fruit not in basket". + item2 = { + "work_id": {"S": "2"}, + "inspection_date": {"S": "2023-06-13"}, + "fruit": {"S": "orange"}, + "dessert": {"S": "orange"}, + "basket": {"SS": ["blackberry", "blueberry", "strawberry"]}, + "veggies": {"SS": ["beans", "carrots", "peas"]}, + "work_type": {"S": "large"}, + } + + # 5. Add the two items + put_response = encrypted_ddb_client.put_item(TableName=ddb_table_name, Item=item1) + # Validate object put successfully + assert put_response["ResponseMetadata"]["HTTPStatusCode"] == 200 + + put_response = encrypted_ddb_client.put_item(TableName=ddb_table_name, Item=item2) + # Validate object put successfully + assert put_response["ResponseMetadata"]["HTTPStatusCode"] == 200 + + # 6. Test the first type of Set operation: + # Select records where the basket attribute holds a particular value + expression_attribute_values = {":value": {"S": "banana"}} + + scan_response = encrypted_ddb_client.scan( + TableName=ddb_table_name, + FilterExpression="contains(basket, :value)", + ExpressionAttributeValues=expression_attribute_values, + ) + # Validate query was returned successfully + assert scan_response["ResponseMetadata"]["HTTPStatusCode"] == 200 + + # Validate only 1 item was returned: item1 + assert len(scan_response["Items"]) == 1 + assert scan_response["Items"][0] == item1 + + # 7. Test the second type of Set operation: + # Select records where the basket attribute holds the fruit attribute + scan_response = encrypted_ddb_client.scan(TableName=ddb_table_name, FilterExpression="contains(basket, fruit)") + # Validate query was returned successfully + assert scan_response["ResponseMetadata"]["HTTPStatusCode"] == 200 + + # Validate only 1 item was returned: item1 + assert len(scan_response["Items"]) == 1 + assert scan_response["Items"][0] == item1 + + # 8. Test the third type of Set operation: + # Select records where the fruit attribute exists in a particular set + expression_attribute_values = {":value": {"SS": ["boysenberry", "grape", "orange"]}} + + scan_response = encrypted_ddb_client.scan( + TableName=ddb_table_name, + FilterExpression="contains(:value, fruit)", + ExpressionAttributeValues=expression_attribute_values, + ) + # Validate query was returned successfully + assert scan_response["ResponseMetadata"]["HTTPStatusCode"] == 200 + + # Validate only 1 item was returned: item2 + assert len(scan_response["Items"]) == 1 + assert scan_response["Items"][0] == item2 + + # 9. Test a Shared search. Select records where the dessert attribute matches the fruit attribute + scan_response = encrypted_ddb_client.scan(TableName=ddb_table_name, FilterExpression="dessert = fruit") + # Validate query was returned successfully + assert scan_response["ResponseMetadata"]["HTTPStatusCode"] == 200 + + # Validate only 1 item was returned: item2 + assert len(scan_response["Items"]) == 1 + assert scan_response["Items"][0] == item2 + + # 10. Test the AsSet attribute 'veggies': + # Select records where the veggies attribute holds a particular value + expression_attribute_values = {":value": {"S": "peas"}} + + scan_response = encrypted_ddb_client.scan( + TableName=ddb_table_name, + FilterExpression="contains(veggies, :value)", + ExpressionAttributeValues=expression_attribute_values, + ) + # Validate query was returned successfully + assert scan_response["ResponseMetadata"]["HTTPStatusCode"] == 200 + + # Validate only 1 item was returned: item2 + assert len(scan_response["Items"]) == 1 + assert scan_response["Items"][0] == item2 + + # 11. Test the compound beacon 'work_unit': + expression_attribute_values = {":value": {"S": "I-1.T-small"}} + + scan_response = encrypted_ddb_client.scan( + TableName=ddb_table_name, + FilterExpression="work_unit = :value", + ExpressionAttributeValues=expression_attribute_values, + ) + # Validate query was returned successfully + assert scan_response["ResponseMetadata"]["HTTPStatusCode"] == 200 + + # Validate only 1 item was returned: item1 + assert len(scan_response["Items"]) == 1 + assert scan_response["Items"][0] == item1 diff --git a/Examples/runtimes/python/DynamoDBEncryption/src/searchable_encryption/beacon_styles_searchable_encryption_example/with_encrypted_table.py b/Examples/runtimes/python/DynamoDBEncryption/src/searchable_encryption/beacon_styles_searchable_encryption_example/with_encrypted_table.py new file mode 100644 index 000000000..63f04f0b8 --- /dev/null +++ b/Examples/runtimes/python/DynamoDBEncryption/src/searchable_encryption/beacon_styles_searchable_encryption_example/with_encrypted_table.py @@ -0,0 +1,174 @@ +# Copyright Amazon.com Inc. or its affiliates. All Rights Reserved. +# SPDX-License-Identifier: Apache-2.0 +""" +Example demonstrating DynamoDB encryption using beacon styles with EncryptedTable. + +This example demonstrates how to use Beacons Styles on Standard Beacons on encrypted attributes, + put an item with the beacon, and query against that beacon. +This example follows a use case of a database that stores food information. + This is an extension of the "BasicSearchableEncryptionExample" in this directory + and uses the same table schema. + +Running this example requires access to a DDB table with the +following key configuration: + - Partition key is named "work_id" with type (S) + - Sort key is named "inspection_time" with type (S) + +In this example for storing food information, this schema is utilized for the data: + - "work_id" stores a unique identifier for a unit inspection work order (v4 UUID) + - "inspection_date" stores an ISO 8601 date for the inspection (YYYY-MM-DD) + - "fruit" stores one type of fruit + - "basket" stores a set of types of fruit + - "dessert" stores one type of dessert + - "veggies" stores a set of types of vegetable + - "work_type" stores a unit inspection category + +The example requires the following ordered input command line parameters: + 1. DDB table name for table to put/query data from + 2. Branch key ID for a branch key that was previously created in your key store. See the + CreateKeyStoreKeyExample. + 3. Branch key wrapping KMS key ARN for the KMS key used to create the branch key with ID + provided in arg 2 + 4. Branch key DDB table name for the DDB table representing the branch key store +""" +import boto3 +from aws_dbesdk_dynamodb.encrypted.table import EncryptedTable + +from .beacon_config import setup_beacon_config + + +def beacon_styles_table_example( + ddb_table_name: str, + branch_key_id: str, + branch_key_wrapping_kms_key_arn: str, + branch_key_ddb_table_name: str, +): + """ + Demonstrate using different beacon styles with EncryptedTable. + + :param ddb_table_name: The name of the DynamoDB table + :param branch_key_id: Branch key ID for a branch key previously created in key store + :param branch_key_wrapping_kms_key_arn: ARN of KMS key used to create the branch key + :param branch_key_ddb_table_name: Name of DDB table representing the branch key store + """ + # 1. Create the DynamoDb Encryption configuration for the table we will be writing to. + # See beacon_config.py in this directory for detailed steps on the encryption configuration. + tables_config = setup_beacon_config( + ddb_table_name, branch_key_id, branch_key_wrapping_kms_key_arn, branch_key_ddb_table_name + ) + + # 2. Create the EncryptedTable + ddb_table = boto3.resource("dynamodb").Table(ddb_table_name) + encrypted_ddb_table = EncryptedTable(table=ddb_table, encryption_config=tables_config) + + # 3. Create item one, specifically with "dessert != fruit", and "fruit in basket". + item1 = { + "work_id": "1", + "inspection_date": "2023-06-13", + "dessert": "cake", + "fruit": "banana", + "basket": {"apple", "banana", "pear"}, + "veggies": {"beans", "carrots", "celery"}, + "work_type": "small", + } + + # 4. Create item two, specifically with "dessert == fruit", and "fruit not in basket". + item2 = { + "work_id": "2", + "inspection_date": "2023-06-13", + "fruit": "orange", + "dessert": "orange", + "basket": {"blackberry", "blueberry", "strawberry"}, + "veggies": {"beans", "carrots", "peas"}, + "work_type": "large", + } + + # 5. Add the two items + put_response = encrypted_ddb_table.put_item(Item=item1) + # Validate object put successfully + assert put_response["ResponseMetadata"]["HTTPStatusCode"] == 200 + + put_response = encrypted_ddb_table.put_item(Item=item2) + # Validate object put successfully + assert put_response["ResponseMetadata"]["HTTPStatusCode"] == 200 + + # 6. Test the first type of Set operation: + # Select records where the basket attribute holds a particular value + expression_attribute_values = {":value": "banana"} + + scan_response = encrypted_ddb_table.scan( + TableName=ddb_table_name, + FilterExpression="contains(basket, :value)", + ExpressionAttributeValues=expression_attribute_values, + ) + # Validate query was returned successfully + assert scan_response["ResponseMetadata"]["HTTPStatusCode"] == 200 + + # Validate only 1 item was returned: item1 + assert len(scan_response["Items"]) == 1 + assert scan_response["Items"][0] == item1 + + # 7. Test the second type of Set operation: + # Select records where the basket attribute holds the fruit attribute + scan_response = encrypted_ddb_table.scan(FilterExpression="contains(basket, fruit)") + # Validate query was returned successfully + assert scan_response["ResponseMetadata"]["HTTPStatusCode"] == 200 + + # Validate only 1 item was returned: item1 + assert len(scan_response["Items"]) == 1 + assert scan_response["Items"][0] == item1 + + # 8. Test the third type of Set operation: + # Select records where the fruit attribute exists in a particular set + expression_attribute_values = {":value": {"boysenberry", "grape", "orange"}} + + scan_response = encrypted_ddb_table.scan( + TableName=ddb_table_name, + FilterExpression="contains(:value, fruit)", + ExpressionAttributeValues=expression_attribute_values, + ) + # Validate query was returned successfully + assert scan_response["ResponseMetadata"]["HTTPStatusCode"] == 200 + + # Validate only 1 item was returned: item2 + assert len(scan_response["Items"]) == 1 + assert scan_response["Items"][0] == item2 + + # 9. Test a Shared search. Select records where the dessert attribute matches the fruit attribute + scan_response = encrypted_ddb_table.scan(FilterExpression="dessert = fruit") + # Validate query was returned successfully + assert scan_response["ResponseMetadata"]["HTTPStatusCode"] == 200 + + # Validate only 1 item was returned: item2 + assert len(scan_response["Items"]) == 1 + assert scan_response["Items"][0] == item2 + + # 10. Test the AsSet attribute 'veggies': + # Select records where the veggies attribute holds a particular value + expression_attribute_values = {":value": "peas"} + + scan_response = encrypted_ddb_table.scan( + FilterExpression="contains(veggies, :value)", + ExpressionAttributeValues=expression_attribute_values, + ) + # Validate query was returned successfully + assert scan_response["ResponseMetadata"]["HTTPStatusCode"] == 200 + + # Validate only 1 item was returned: item2 + assert len(scan_response["Items"]) == 1 + assert scan_response["Items"][0] == item2 + + # 11. Test the compound beacon 'work_unit': + expression_attribute_values = {":value": "I-1.T-small"} + + scan_response = encrypted_ddb_table.scan( + TableName=ddb_table_name, + FilterExpression="work_unit = :value", + ExpressionAttributeValues=expression_attribute_values, + ) + # Validate query was returned successfully + assert scan_response["ResponseMetadata"]["HTTPStatusCode"] == 200 + + # Validate only 1 item was returned: item1 + assert len(scan_response["Items"]) == 1 + assert scan_response["Items"][0] == item1 diff --git a/Examples/runtimes/python/DynamoDBEncryption/src/searchable_encryption/complex_example/__init__.py b/Examples/runtimes/python/DynamoDBEncryption/src/searchable_encryption/complex_example/__init__.py new file mode 100644 index 000000000..1b8c008ca --- /dev/null +++ b/Examples/runtimes/python/DynamoDBEncryption/src/searchable_encryption/complex_example/__init__.py @@ -0,0 +1,3 @@ +# Copyright Amazon.com Inc. or its affiliates. All Rights Reserved. +# SPDX-License-Identifier: Apache-2.0 +"""Empty stub to allow imports.""" diff --git a/Examples/runtimes/python/DynamoDBEncryption/src/searchable_encryption/complex_example/beacon_config.py b/Examples/runtimes/python/DynamoDBEncryption/src/searchable_encryption/complex_example/beacon_config.py new file mode 100644 index 000000000..6b59fbdeb --- /dev/null +++ b/Examples/runtimes/python/DynamoDBEncryption/src/searchable_encryption/complex_example/beacon_config.py @@ -0,0 +1,277 @@ +# Copyright Amazon.com Inc. or its affiliates. All Rights Reserved. +# SPDX-License-Identifier: Apache-2.0 +"""Sets up the beacon config.""" +import boto3 +from aws_cryptographic_material_providers.keystore.client import KeyStore +from aws_cryptographic_material_providers.keystore.config import KeyStoreConfig +from aws_cryptographic_material_providers.keystore.models import ( + KMSConfigurationKmsKeyArn, +) +from aws_cryptographic_material_providers.mpl.client import AwsCryptographicMaterialProviders +from aws_cryptographic_material_providers.mpl.config import MaterialProvidersConfig +from aws_cryptographic_material_providers.mpl.models import ( + CreateAwsKmsHierarchicalKeyringInput, +) +from aws_dbesdk_dynamodb.smithygenerated.aws_cryptography_dbencryptionsdk_dynamodb.models import ( + BeaconKeySourceSingle, + BeaconVersion, + CompoundBeacon, + Constructor, + ConstructorPart, + DynamoDbTableEncryptionConfig, + DynamoDbTablesEncryptionConfig, + EncryptedPart, + SearchConfig, + SignedPart, + SingleKeyStore, + StandardBeacon, +) +from aws_dbesdk_dynamodb.smithygenerated.aws_cryptography_dbencryptionsdk_structuredencryption.models import ( + CryptoAction, +) + + +def setup_beacon_config( + ddb_table_name: str, + branch_key_id: str, + branch_key_wrapping_kms_key_arn: str, + branch_key_ddb_table_name: str, +): + """Set up the beacon config.""" + keystore: KeyStore = KeyStore( + KeyStoreConfig( + ddb_table_name=branch_key_ddb_table_name, + kms_configuration=KMSConfigurationKmsKeyArn(branch_key_wrapping_kms_key_arn), + logical_key_store_name=branch_key_ddb_table_name, + kms_client=boto3.client("kms"), + ddb_client=boto3.client("dynamodb"), + ) + ) + + # Create standard beacons + standard_beacon_list = [ + StandardBeacon(name="EmployeeID", length=4), + StandardBeacon(name="TicketNumber", length=4), + StandardBeacon(name="ProjectName", length=4), + StandardBeacon(name="EmployeeEmail", length=4), + StandardBeacon(name="CreatorEmail", length=4), + StandardBeacon(name="ProjectStatus", length=4), + StandardBeacon(name="OrganizerEmail", length=4), + StandardBeacon(name="ManagerEmail", length=4), + StandardBeacon(name="AssigneeEmail", length=4), + StandardBeacon(name="City", loc="Location.City", length=4), + StandardBeacon(name="Severity", length=4), + StandardBeacon(name="Building", loc="Location.Building", length=4), + StandardBeacon(name="Floor", loc="Location.Floor", length=4), + StandardBeacon(name="Room", loc="Location.Room", length=4), + StandardBeacon(name="Desk", loc="Location.Desk", length=4), + ] + + # Define encrypted parts + encrypted_part_list = [ + EncryptedPart(name="EmployeeID", prefix="E-"), + EncryptedPart(name="TicketNumber", prefix="T-"), + EncryptedPart(name="ProjectName", prefix="P-"), + EncryptedPart(name="EmployeeEmail", prefix="EE-"), + EncryptedPart(name="CreatorEmail", prefix="CE-"), + EncryptedPart(name="ProjectStatus", prefix="PSts-"), + EncryptedPart(name="OrganizerEmail", prefix="OE-"), + EncryptedPart(name="ManagerEmail", prefix="ME-"), + EncryptedPart(name="AssigneeEmail", prefix="AE-"), + EncryptedPart(name="City", prefix="C-"), + EncryptedPart(name="Severity", prefix="S-"), + EncryptedPart(name="Building", prefix="B-"), + EncryptedPart(name="Floor", prefix="F-"), + EncryptedPart(name="Room", prefix="R-"), + EncryptedPart(name="Desk", prefix="D-"), + ] + + # Define signed parts + signed_part_list = [ + SignedPart(name="TicketModTime", prefix="M-"), + SignedPart(name="MeetingStart", prefix="MS-"), + SignedPart(name="TimeCardStart", prefix="TC-"), + SignedPart(name="ProjectStart", prefix="PS-"), + ] + + employee_id_constructor_part = ConstructorPart(name="EmployeeID", required=True) + ticket_number_constructor_part = ConstructorPart(name="TicketNumber", required=True) + project_name_constructor_part = ConstructorPart(name="ProjectName", required=True) + ticket_mod_time_constructor_part = ConstructorPart(name="TicketModTime", required=True) + meeting_start_constructor_part = ConstructorPart(name="MeetingStart", required=True) + time_card_start_constructor_part = ConstructorPart(name="TimeCardStart", required=True) + employee_email_constructor_part = ConstructorPart(name="EmployeeEmail", required=True) + creator_email_constructor_part = ConstructorPart(name="CreatorEmail", required=True) + project_status_constructor_part = ConstructorPart(name="ProjectStatus", required=True) + organizer_email_constructor_part = ConstructorPart(name="OrganizerEmail", required=True) + project_start_constructor_part = ConstructorPart(name="ProjectStart", required=True) + manager_email_constructor_part = ConstructorPart(name="ManagerEmail", required=True) + assignee_email_constructor_part = ConstructorPart(name="AssigneeEmail", required=True) + city_constructor_part = ConstructorPart(name="City", required=True) + severity_constructor_part = ConstructorPart(name="Severity", required=True) + building_constructor_part = ConstructorPart(name="Building", required=True) + floor_constructor_part = ConstructorPart(name="Floor", required=True) + room_constructor_part = ConstructorPart(name="Room", required=True) + desk_constructor_part = ConstructorPart(name="Desk", required=True) + + # 6 + employee_id_constructor = Constructor(parts=[employee_id_constructor_part]) + ticket_number_constructor = Constructor(parts=[ticket_number_constructor_part]) + project_name_constructor = Constructor(parts=[project_name_constructor_part]) + ticket_mod_time_constructor = Constructor(parts=[ticket_mod_time_constructor_part]) + building_constructor = Constructor(parts=[building_constructor_part]) + + meeting_start_floor_room_constructor = Constructor( + parts=[meeting_start_constructor_part, floor_constructor_part, room_constructor_part] + ) + + time_card_start_employee_email_constructor = Constructor( + parts=[time_card_start_constructor_part, employee_email_constructor_part] + ) + + time_card_start_constructor = Constructor(parts=[time_card_start_constructor_part]) + + creator_email_constructor = Constructor(parts=[creator_email_constructor_part]) + + project_status_constructor = Constructor(parts=[project_status_constructor_part]) + + employee_email_constructor = Constructor(parts=[employee_email_constructor_part]) + + organizer_email_constructor = Constructor(parts=[organizer_email_constructor_part]) + + project_start_constructor = Constructor(parts=[project_start_constructor_part]) + + manager_email_constructor = Constructor(parts=[manager_email_constructor_part]) + + assignee_email_constructor = Constructor(parts=[assignee_email_constructor_part]) + + city_constructor = Constructor(parts=[city_constructor_part]) + + severity_constructor = Constructor(parts=[severity_constructor_part]) + + building_floor_desk_constructor = Constructor( + parts=[building_constructor_part, floor_constructor_part, desk_constructor_part] + ) + + # 7 + pk0_constructor_list = [ + employee_id_constructor, + building_constructor, + ticket_number_constructor, + project_name_constructor, + ] + + sk0_constructor_list = [ + ticket_mod_time_constructor, + meeting_start_floor_room_constructor, + time_card_start_employee_email_constructor, + project_name_constructor, + employee_id_constructor, + ] + + pk1_constructor_list = [ + creator_email_constructor, + employee_email_constructor, + project_status_constructor, + organizer_email_constructor, + ] + + sk1_constructor_list = [ + meeting_start_floor_room_constructor, + time_card_start_constructor, + ticket_mod_time_constructor, + project_start_constructor, + employee_id_constructor, + ] + + pk2_constructor_list = [manager_email_constructor, assignee_email_constructor] + + pk3_constructor_list = [city_constructor, severity_constructor] + + sk3_constructor_list = [building_floor_desk_constructor, ticket_mod_time_constructor] + + # 8 + compound_beacon_list = [ + CompoundBeacon(name="PK", split="~", constructors=pk0_constructor_list), + CompoundBeacon(name="SK", split="~", constructors=sk0_constructor_list), + CompoundBeacon(name="PK1", split="~", constructors=pk1_constructor_list), + CompoundBeacon(name="SK1", split="~", constructors=sk1_constructor_list), + CompoundBeacon(name="PK2", split="~", constructors=pk2_constructor_list), + CompoundBeacon(name="PK3", split="~", constructors=pk3_constructor_list), + CompoundBeacon(name="SK3", split="~", constructors=sk3_constructor_list), + ] + + # 9 + beacon_versions = [ + BeaconVersion( + standard_beacons=standard_beacon_list, + compound_beacons=compound_beacon_list, + encrypted_parts=encrypted_part_list, + signed_parts=signed_part_list, + version=1, # MUST be 1 + key_store=keystore, + key_source=BeaconKeySourceSingle(SingleKeyStore(key_id=branch_key_id, cache_ttl=6000)), + ) + ] + + # 10. Create a Hierarchical Keyring + mat_prov = AwsCryptographicMaterialProviders(MaterialProvidersConfig()) + + keyring_input = CreateAwsKmsHierarchicalKeyringInput( + branch_key_id=branch_key_id, key_store=keystore, ttl_seconds=6000 + ) + + kms_keyring = mat_prov.create_aws_kms_hierarchical_keyring(keyring_input) + + # 11. Define crypto actions + attribute_actions_on_encrypt = { + # Our partition key must be configured as SIGN_ONLY + "partition_key": CryptoAction.SIGN_ONLY, + # Attributes used in beacons must be configured as ENCRYPT_AND_SIGN + "EmployeeID": CryptoAction.ENCRYPT_AND_SIGN, + "TicketNumber": CryptoAction.ENCRYPT_AND_SIGN, + "ProjectName": CryptoAction.ENCRYPT_AND_SIGN, + "EmployeeName": CryptoAction.ENCRYPT_AND_SIGN, + "EmployeeEmail": CryptoAction.ENCRYPT_AND_SIGN, + "CreatorEmail": CryptoAction.ENCRYPT_AND_SIGN, + "ProjectStatus": CryptoAction.ENCRYPT_AND_SIGN, + "OrganizerEmail": CryptoAction.ENCRYPT_AND_SIGN, + "ManagerEmail": CryptoAction.ENCRYPT_AND_SIGN, + "AssigneeEmail": CryptoAction.ENCRYPT_AND_SIGN, + "City": CryptoAction.ENCRYPT_AND_SIGN, + "Severity": CryptoAction.ENCRYPT_AND_SIGN, + "Location": CryptoAction.ENCRYPT_AND_SIGN, + # These are not beaconized attributes, but are sensitive data that must be encrypted + "Attendees": CryptoAction.ENCRYPT_AND_SIGN, + "Subject": CryptoAction.ENCRYPT_AND_SIGN, + # Signed parts and unencrypted attributes can be configured as SIGN_ONLY or DO_NOTHING + # For this example, we will set these to SIGN_ONLY to ensure authenticity + "TicketModTime": CryptoAction.SIGN_ONLY, + "MeetingStart": CryptoAction.SIGN_ONLY, + "TimeCardStart": CryptoAction.SIGN_ONLY, + "EmployeeTitle": CryptoAction.SIGN_ONLY, + "Description": CryptoAction.SIGN_ONLY, + "ProjectTarget": CryptoAction.SIGN_ONLY, + "Hours": CryptoAction.SIGN_ONLY, + "Role": CryptoAction.SIGN_ONLY, + "Message": CryptoAction.SIGN_ONLY, + "ProjectStart": CryptoAction.SIGN_ONLY, + "Duration": CryptoAction.SIGN_ONLY, + } + + # Define table encryption configuration + table_configs = {} + + config = DynamoDbTableEncryptionConfig( + logical_table_name=ddb_table_name, + partition_key_name="partition_key", + attribute_actions_on_encrypt=attribute_actions_on_encrypt, + keyring=kms_keyring, + search=SearchConfig(write_version=1, versions=beacon_versions), # MUST be 1 + ) + + # Store the configuration in a dictionary + table_configs[ddb_table_name] = config + + # Return encryption configuration + return DynamoDbTablesEncryptionConfig(table_encryption_configs=table_configs) diff --git a/Examples/runtimes/python/DynamoDBEncryption/src/searchable_encryption/complex_example/client/__init__.py b/Examples/runtimes/python/DynamoDBEncryption/src/searchable_encryption/complex_example/client/__init__.py new file mode 100644 index 000000000..1b8c008ca --- /dev/null +++ b/Examples/runtimes/python/DynamoDBEncryption/src/searchable_encryption/complex_example/client/__init__.py @@ -0,0 +1,3 @@ +# Copyright Amazon.com Inc. or its affiliates. All Rights Reserved. +# SPDX-License-Identifier: Apache-2.0 +"""Empty stub to allow imports.""" diff --git a/Examples/runtimes/python/DynamoDBEncryption/src/searchable_encryption/complex_example/client/example.py b/Examples/runtimes/python/DynamoDBEncryption/src/searchable_encryption/complex_example/client/example.py new file mode 100644 index 000000000..53af9093e --- /dev/null +++ b/Examples/runtimes/python/DynamoDBEncryption/src/searchable_encryption/complex_example/client/example.py @@ -0,0 +1,28 @@ +# Copyright Amazon.com Inc. or its affiliates. All Rights Reserved. +# SPDX-License-Identifier: Apache-2.0 +"""Run the complex searchable encryption example with the EncryptedClient.""" +import boto3 +from aws_dbesdk_dynamodb.encrypted.client import EncryptedClient + +from ..beacon_config import setup_beacon_config +from .put_requests import put_all_items_to_table +from .query_requests import run_queries + + +def run_example( + ddb_table_name: str, + branch_key_id: str, + branch_key_wrapping_kms_key_arn: str, + branch_key_ddb_table_name: str, +): + """Run the example.""" + encryption_config = setup_beacon_config( + ddb_table_name, branch_key_id, branch_key_wrapping_kms_key_arn, branch_key_ddb_table_name + ) + + client = boto3.client("dynamodb") + + encrypted_client = EncryptedClient(client=client, encryption_config=encryption_config) + + put_all_items_to_table(ddb_table_name, encrypted_client) + run_queries(encrypted_client, ddb_table_name) diff --git a/Examples/runtimes/python/DynamoDBEncryption/src/searchable_encryption/complex_example/client/put_requests.py b/Examples/runtimes/python/DynamoDBEncryption/src/searchable_encryption/complex_example/client/put_requests.py new file mode 100644 index 000000000..44e4e3402 --- /dev/null +++ b/Examples/runtimes/python/DynamoDBEncryption/src/searchable_encryption/complex_example/client/put_requests.py @@ -0,0 +1,291 @@ +# Copyright Amazon.com Inc. or its affiliates. All Rights Reserved. +# SPDX-License-Identifier: Apache-2.0 +"""Put all items into the table.""" + + +def put_all_meeting_items_to_table(ddb_table_name, ddb): + """Put all meeting items into the table.""" + meetings = [ + { + "partition_key": {"S": "meeting1"}, + "EmployeeID": {"S": "emp_001"}, + "EmployeeEmail": {"S": "able@gmail.com"}, + "MeetingStart": {"S": "2022-07-04T13:00"}, + "Location": {"M": {"Floor": {"S": "12"}, "Room": {"S": "403"}}}, + "Duration": {"S": "30"}, + "Attendees": {"L": [{"S": "able@gmail.com"}, {"S": "zorro@gmail.com"}]}, + "Subject": {"S": "Scan Beacons"}, + }, + { + "partition_key": {"S": "meeting2"}, + "EmployeeID": {"S": "emp_002"}, + "EmployeeEmail": {"S": "barney@gmail.com"}, + "MeetingStart": {"S": "2022-07-04T13:00"}, + "Location": {"M": {"Floor": {"S": "12"}, "Room": {"S": "403"}}}, + "Duration": {"S": "30"}, + "Attendees": {"L": [{"S": "barney@gmail.com"}, {"S": "zorro@gmail.com"}]}, + "Subject": {"S": "Scan Beacons"}, + }, + { + "partition_key": {"S": "meeting3"}, + "EmployeeID": {"S": "emp_003"}, + "EmployeeEmail": {"S": "charlie@gmail.com"}, + "MeetingStart": {"S": "2022-07-04T13:00"}, + "Location": {"M": {"Floor": {"S": "12"}, "Room": {"S": "403"}}}, + "Duration": {"S": "30"}, + "Attendees": {"L": [{"S": "charlie@gmail.com"}, {"S": "zorro@gmail.com"}]}, + "Subject": {"S": "Scan Beacons"}, + }, + { + "partition_key": {"S": "meeting4"}, + "EmployeeID": {"S": "emp_004"}, + "EmployeeEmail": {"S": "david@gmail.com"}, + "MeetingStart": {"S": "2022-07-04T13:00"}, + "Location": {"M": {"Floor": {"S": "12"}, "Room": {"S": "403"}}}, + "Duration": {"S": "30"}, + "Attendees": {"L": [{"S": "david@gmail.com"}, {"S": "zorro@gmail.com"}]}, + "Subject": {"S": "Scan Beacons"}, + }, + { + "partition_key": {"S": "meeting5"}, + "EmployeeID": {"S": "emp_002"}, + "EmployeeEmail": {"S": "barney@gmail.com"}, + "MeetingStart": {"S": "2022-07-04T14:00"}, + "Location": {"M": {"Floor": {"S": "12"}, "Room": {"S": "407"}}}, + "Duration": {"S": "30"}, + "Attendees": {"L": [{"S": "barney@gmail.com"}, {"S": "zorro@gmail.com"}]}, + "Subject": {"S": "DB ESDK"}, + }, + { + "partition_key": {"S": "meeting6"}, + "EmployeeID": {"S": "emp_003"}, + "EmployeeEmail": {"S": "charlie@gmail.com"}, + "MeetingStart": {"S": "2022-07-04T14:00"}, + "Location": {"M": {"Floor": {"S": "12"}, "Room": {"S": "407"}}}, + "Duration": {"S": "30"}, + "Attendees": {"L": [{"S": "charlie@gmail.com"}, {"S": "zorro@gmail.com"}]}, + "Subject": {"S": "DB ESDK"}, + }, + ] + + for meeting in meetings: + ddb.put_item(TableName=ddb_table_name, Item=meeting) + + +def put_all_employee_items_to_table(ddb_table_name, ddb): + """Put all employee items into the table.""" + employees = [ + { + "partition_key": {"S": "employee1"}, + "EmployeeID": {"S": "emp_001"}, + "EmployeeEmail": {"S": "able@gmail.com"}, + "ManagerEmail": {"S": "zorro@gmail.com"}, + "EmployeeName": {"S": "Able Jones"}, + "EmployeeTitle": {"S": "SDE9"}, + "Location": { + "M": {"Building": {"S": "44"}, "Floor": {"S": "12"}, "Desk": {"S": "3"}, "City": {"S": "Seattle"}} + }, + }, + { + "partition_key": {"S": "employee2"}, + "EmployeeID": {"S": "emp_002"}, + "EmployeeEmail": {"S": "barney@gmail.com"}, + "ManagerEmail": {"S": "zorro@gmail.com"}, + "EmployeeName": {"S": "Barney Jones"}, + "EmployeeTitle": {"S": "SDE8"}, + "Location": { + "M": {"Building": {"S": "44"}, "Floor": {"S": "12"}, "Desk": {"S": "4"}, "City": {"S": "Seattle"}} + }, + }, + { + "partition_key": {"S": "employee3"}, + "EmployeeID": {"S": "emp_003"}, + "EmployeeEmail": {"S": "charlie@gmail.com"}, + "ManagerEmail": {"S": "zorro@gmail.com"}, + "EmployeeName": {"S": "Charlie Jones"}, + "EmployeeTitle": {"S": "SDE7"}, + "Location": { + "M": {"Building": {"S": "44"}, "Floor": {"S": "4"}, "Desk": {"S": "5"}, "City": {"S": "Seattle"}} + }, + }, + { + "partition_key": {"S": "employee4"}, + "EmployeeID": {"S": "emp_004"}, + "EmployeeEmail": {"S": "david@gmail.com"}, + "ManagerEmail": {"S": "zorro@gmail.com"}, + "EmployeeName": {"S": "David Jones"}, + "EmployeeTitle": {"S": "SDE6"}, + "Location": {"M": {"Building": {"S": "22"}, "Floor": {"S": "1"}, "Desk": {"S": "3"}, "City": {"S": "NYC"}}}, + }, + ] + + for employee in employees: + ddb.put_item(TableName=ddb_table_name, Item=employee) + + +def put_all_project_items_to_table(ddb_table_name, ddb): + """Put all project items into the table.""" + projects = [ + { + "partition_key": {"S": "project1"}, + "ProjectName": {"S": "project_001"}, + "ProjectStatus": {"S": "Pending"}, + "ProjectStart": {"S": "2022-11-01"}, + "Description": {"S": "Turbo Crypto"}, + "ProjectTarget": {"S": "2024-01-01"}, + }, + { + "partition_key": {"S": "project2"}, + "ProjectName": {"S": "project_002"}, + "ProjectStatus": {"S": "Active"}, + "ProjectStart": {"S": "2022-07-04"}, + "Description": {"S": "Scan Beacons"}, + "ProjectTarget": {"S": "2024-01-01"}, + }, + { + "partition_key": {"S": "project3"}, + "ProjectName": {"S": "project_003"}, + "ProjectStatus": {"S": "Active"}, + "ProjectStart": {"S": "2022-08-05"}, + "Description": {"S": "DB ESDK"}, + "ProjectTarget": {"S": "2023-02-27"}, + }, + { + "partition_key": {"S": "project4"}, + "ProjectName": {"S": "project_004"}, + "ProjectStatus": {"S": "Done"}, + "ProjectStart": {"S": "2020-03-03"}, + "Description": {"S": "S3EC"}, + "ProjectTarget": {"S": "2021-09-05"}, + }, + ] + + for project in projects: + ddb.put_item(TableName=ddb_table_name, Item=project) + + +def put_all_reservation_items_to_table(ddb_table_name, ddb): + """Put all reservation items into the table.""" + reservations = [ + { + "partition_key": {"S": "reservation1"}, + "Location": {"M": {"Building": {"S": "SEA33"}, "Floor": {"S": "12"}, "Room": {"S": "403"}}}, + "MeetingStart": {"S": "2022-07-04T13:00"}, + "OrganizerEmail": {"S": "able@gmail.com"}, + "Duration": {"S": "30"}, + "Attendees": {"L": [{"S": "able@gmail.com"}, {"S": "barney@gmail.com"}]}, + "Subject": {"S": "Scan beacons"}, + }, + { + "partition_key": {"S": "reservation2"}, + "Location": {"M": {"Building": {"S": "SEA33"}, "Floor": {"S": "12"}, "Room": {"S": "407"}}}, + "MeetingStart": {"S": "2022-07-04T14:00"}, + "OrganizerEmail": {"S": "barney@gmail.com"}, + "Duration": {"S": "30"}, + "Attendees": {"L": [{"S": "able@gmail.com"}, {"S": "barney@gmail.com"}]}, + "Subject": {"S": "DB ESDK"}, + }, + ] + + for reservation in reservations: + ddb.put_item(TableName=ddb_table_name, Item=reservation) + + +def put_all_ticket_items_to_table(ddb_table_name, ddb): + """Put all ticket items into the table.""" + tickets = [ + { + "partition_key": {"S": "ticket1"}, + "TicketNumber": {"S": "ticket_001"}, + "TicketModTime": {"S": "2022-10-07T14:32:25"}, + "CreatorEmail": {"S": "zorro@gmail.com"}, + "AssigneeEmail": {"S": "able@gmail.com"}, + "Severity": {"S": "3"}, + "Subject": {"S": "Bad bug"}, + "Message": {"S": "This bug looks pretty bad"}, + }, + { + "partition_key": {"S": "ticket2"}, + "TicketNumber": {"S": "ticket_001"}, + "TicketModTime": {"S": "2022-10-07T14:32:25"}, + "CreatorEmail": {"S": "able@gmail.com"}, + "AssigneeEmail": {"S": "charlie@gmail.com"}, + "Severity": {"S": "3"}, + "Subject": {"S": "Bad bug"}, + "Message": {"S": "Charlie should handle this"}, + }, + { + "partition_key": {"S": "ticket3"}, + "TicketNumber": {"S": "ticket_002"}, + "TicketModTime": {"S": "2022-10-06T14:32:25"}, + "CreatorEmail": {"S": "zorro@gmail.com"}, + "AssigneeEmail": {"S": "charlie@gmail.com"}, + "Severity": {"S": "3"}, + "Subject": {"S": "Easy Bug"}, + "Message": {"S": "This seems simple enough"}, + }, + { + "partition_key": {"S": "ticket4"}, + "TicketNumber": {"S": "ticket_002"}, + "TicketModTime": {"S": "2022-10-08T14:32:25"}, + "CreatorEmail": {"S": "charlie@gmail.com"}, + "AssigneeEmail": {"S": "able@gmail.com"}, + "Severity": {"S": "3"}, + "Subject": {"S": "Easy Bug"}, + "Message": {"S": "that's in able's code"}, + }, + ] + + for ticket in tickets: + ddb.put_item(TableName=ddb_table_name, Item=ticket) + + +def put_all_timecard_items_to_table(ddb_table_name, ddb): + """Put all timecard items into the table.""" + timecards = [ + { + "partition_key": {"S": "timecard1"}, + "ProjectName": {"S": "project_002"}, + "TimeCardStart": {"S": "2022-09-12"}, + "EmployeeEmail": {"S": "able@gmail.com"}, + "Hours": {"S": "40"}, + "Role": {"S": "SDE3"}, + }, + { + "partition_key": {"S": "timecard2"}, + "ProjectName": {"S": "project_002"}, + "TimeCardStart": {"S": "2022-09-12"}, + "EmployeeEmail": {"S": "barney@gmail.com"}, + "Hours": {"S": "20"}, + "Role": {"S": "PM"}, + }, + { + "partition_key": {"S": "timecard3"}, + "ProjectName": {"S": "project_003"}, + "TimeCardStart": {"S": "2022-09-12"}, + "EmployeeEmail": {"S": "charlie@gmail.com"}, + "Hours": {"S": "40"}, + "Role": {"S": "SDE3"}, + }, + { + "partition_key": {"S": "timecard4"}, + "ProjectName": {"S": "project_003"}, + "TimeCardStart": {"S": "2022-09-12"}, + "EmployeeEmail": {"S": "barney@gmail.com"}, + "Hours": {"S": "20"}, + "Role": {"S": "PM"}, + }, + ] + + for timecard in timecards: + ddb.put_item(TableName=ddb_table_name, Item=timecard) + + +def put_all_items_to_table(ddb_table_name, ddb): + """Put all items into the table.""" + put_all_meeting_items_to_table(ddb_table_name, ddb) + put_all_employee_items_to_table(ddb_table_name, ddb) + put_all_project_items_to_table(ddb_table_name, ddb) + put_all_reservation_items_to_table(ddb_table_name, ddb) + put_all_ticket_items_to_table(ddb_table_name, ddb) + put_all_timecard_items_to_table(ddb_table_name, ddb) diff --git a/Examples/runtimes/python/DynamoDBEncryption/src/searchable_encryption/complex_example/client/query_requests.py b/Examples/runtimes/python/DynamoDBEncryption/src/searchable_encryption/complex_example/client/query_requests.py new file mode 100644 index 000000000..54743896e --- /dev/null +++ b/Examples/runtimes/python/DynamoDBEncryption/src/searchable_encryption/complex_example/client/query_requests.py @@ -0,0 +1,813 @@ +# Copyright Amazon.com Inc. or its affiliates. All Rights Reserved. +# SPDX-License-Identifier: Apache-2.0 +"""Run all queries on the EncryptedClient.""" +import time + + +def run_queries(ddb_client, table_name): + """Run all queries on the table.""" + run_query_1(ddb_client, table_name) + run_query_2(ddb_client, table_name) + run_query_3(ddb_client, table_name) + run_query_4(ddb_client, table_name) + run_query_5(ddb_client, table_name) + run_query_6(ddb_client, table_name) + run_query_7(ddb_client, table_name) + run_query_8(ddb_client, table_name) + run_query_9(ddb_client, table_name) + run_query_10(ddb_client, table_name) + run_query_11(ddb_client, table_name) + run_query_12(ddb_client, table_name) + run_query_13(ddb_client, table_name) + run_query_14(ddb_client, table_name) + run_query_15(ddb_client, table_name) + run_query_16(ddb_client, table_name) + run_query_17(ddb_client, table_name) + run_query_18(ddb_client, table_name) + run_query_19(ddb_client, table_name) + run_query_20(ddb_client, table_name) + run_query_21(ddb_client, table_name) + run_query_22(ddb_client, table_name) + run_query_23(ddb_client, table_name) + + +def run_query_1(ddb_client, table_name): + """ + Query 1: Get meetings by date and email. + + Key condition: PK1 = email AND SK1 BETWEEN date1 AND date2. + Filter condition: Duration > 0. + """ + response = ddb_client.query( + TableName=table_name, + IndexName="GSI-1", + KeyConditionExpression="#pk1 = :email AND #sk1 BETWEEN :date1 AND :date2", + FilterExpression="#dur > :zero", + ExpressionAttributeNames={"#pk1": "PK1", "#sk1": "SK1", "#dur": "Duration"}, + ExpressionAttributeValues={ + ":email": {"S": "EE-able@gmail.com"}, + ":date1": {"S": "MS-2022-07-02"}, + ":date2": {"S": "MS-2022-07-08"}, + ":zero": {"S": "0"}, + }, + ) + + # Validate query response + assert response["ResponseMetadata"]["HTTPStatusCode"] == 200 + + # Assert exactly 1 item is returned + assert len(response["Items"]) == 1 + + # Check known values in the response + found_known_value_item = False + for item in response["Items"]: + if item["partition_key"]["S"] == "meeting1": + found_known_value_item = True + assert item["Subject"]["S"] == "Scan Beacons" + assert item["Location"]["M"]["Floor"]["S"] == "12" + assert {"S": "zorro@gmail.com"} in item["Attendees"]["L"] + + assert found_known_value_item + + +def run_query_2(ddb_client, table_name): + """ + Query 2: Get meetings by date and employeeID. + + Key condition: PK=employeeID SK between(date1, date2). + Filter condition: duration > 0. + """ + response = ddb_client.query( + TableName=table_name, + IndexName="GSI-0", + KeyConditionExpression="#pk = :empid AND #sk BETWEEN :date1 AND :date2", + FilterExpression="#dur > :zero", + ExpressionAttributeNames={"#pk": "PK", "#sk": "SK", "#dur": "Duration"}, + ExpressionAttributeValues={ + ":empid": {"S": "E-emp_001"}, + ":date1": {"S": "MS-2022-07-02"}, + ":date2": {"S": "MS-2022-07-08"}, + ":zero": {"S": "0"}, + }, + ) + + # Validate query response + assert response["ResponseMetadata"]["HTTPStatusCode"] == 200 + + # Assert exactly 1 item is returned + assert len(response["Items"]) == 1 + + # Check known values in the response + found_known_value_item = False + for item in response["Items"]: + if item["partition_key"]["S"] == "meeting1": + found_known_value_item = True + assert item["Subject"]["S"] == "Scan Beacons" + assert item["Location"]["M"]["Floor"]["S"] == "12" + assert {"S": "zorro@gmail.com"} in item["Attendees"]["L"] + + assert found_known_value_item + + +def run_query_3(ddb_client, table_name): + """ + Query 3: Get meetings by date and building/floor/room. + + Key condition: PK=employeeID SK between(date1, date2). + Filter condition: SK contains building.floor.room (see NOTE). + NOTE: This query is modified from Demo.md. + Demo.md calls for a filter condition "SK contains building.floor.room" + However, one cannot use primary keys (partition nor sort) in a filter expression. + Instead, this query filters on the individual beacon attributes: building, floor, and room. + """ + response = ddb_client.query( + TableName=table_name, + IndexName="GSI-0", + KeyConditionExpression="#pk = :building AND #sk BETWEEN :date1 AND :date2", + FilterExpression="#b = :b AND #f = :f AND #r = :r", + ExpressionAttributeNames={"#pk": "PK", "#sk": "SK", "#b": "Building", "#f": "Floor", "#r": "Room"}, + ExpressionAttributeValues={ + ":building": {"S": "B-SEA33"}, + ":date1": {"S": "MS-2022-07-02"}, + ":date2": {"S": "MS-2022-07-08"}, + ":b": {"S": "SEA33"}, + ":f": {"S": "12"}, + ":r": {"S": "403"}, + }, + ) + # Validate query response + assert response["ResponseMetadata"]["HTTPStatusCode"] == 200 + + # Assert exactly 1 item is returned + assert len(response["Items"]) == 1 + # Check known values in the response + found_known_value_item = False + for item in response["Items"]: + if item["partition_key"]["S"] == "reservation1": + found_known_value_item = True + assert item["Subject"]["S"] == "Scan beacons" + assert item["Location"]["M"]["Building"]["S"] == "SEA33" + assert {"S": "barney@gmail.com"} in item["Attendees"]["L"] + + assert found_known_value_item + + +def run_query_4(ddb_client, table_name): + """ + Query 4: Get employee data by email. + + Key condition: PK1=email SK1=employee ID. + """ + response = ddb_client.query( + TableName=table_name, + IndexName="GSI-1", + KeyConditionExpression="#pk1 = :email AND #sk1 = :empid", + ExpressionAttributeNames={"#pk1": "PK1", "#sk1": "SK1"}, + ExpressionAttributeValues={":email": {"S": "EE-able@gmail.com"}, ":empid": {"S": "E-emp_001"}}, + ) + + # Validate query response + assert response["ResponseMetadata"]["HTTPStatusCode"] == 200 + + # Assert exactly 1 item is returned + assert len(response["Items"]) == 1 + + # Check known values in the response + found_known_value_item = False + for item in response["Items"]: + if item["partition_key"]["S"] == "employee1": + found_known_value_item = True + assert item["EmployeeID"]["S"] == "emp_001" + assert item["Location"]["M"]["Desk"]["S"] == "3" + + assert found_known_value_item + + +def run_query_5(ddb_client, table_name): + """ + Query 5: Get meetings by email. + + Key condition: PK1=email SK1 > 30 days ago. + """ + response = ddb_client.query( + TableName=table_name, + IndexName="GSI-1", + KeyConditionExpression="#pk1 = :email AND #sk1 BETWEEN :date1 AND :date2", + ExpressionAttributeNames={"#pk1": "PK1", "#sk1": "SK1"}, + ExpressionAttributeValues={ + ":email": {"S": "EE-able@gmail.com"}, + ":date1": {"S": "MS-"}, + ":date2": {"S": "MS-2023-03-20"}, + }, + ) + + # Validate query response + assert response["ResponseMetadata"]["HTTPStatusCode"] == 200 + + # Assert exactly 1 item is returned + assert len(response["Items"]) == 1 + + # Check known values in the response + found_known_value_item = False + for item in response["Items"]: + if item["partition_key"]["S"] == "meeting1": + found_known_value_item = True + assert item["Subject"]["S"] == "Scan Beacons" + assert item["Location"]["M"]["Floor"]["S"] == "12" + assert {"S": "zorro@gmail.com"} in item["Attendees"]["L"] + + assert found_known_value_item + + +def run_query_6(ddb_client, table_name): + """ + Query 6: Get tickets by email. + + Key condition: PK1=email SK1 > 30 days ago. + """ + response = ddb_client.query( + TableName=table_name, + IndexName="GSI-1", + KeyConditionExpression="#pk1 = :email AND #sk1 < :date", + ExpressionAttributeNames={"#pk1": "PK1", "#sk1": "SK1"}, + ExpressionAttributeValues={":email": {"S": "CE-zorro@gmail.com"}, ":date": {"S": "MS-2023-03-20"}}, + ) + + # Validate query response + assert response["ResponseMetadata"]["HTTPStatusCode"] == 200 + + # Assert 2 items returned: + # Expected to be `ticket1` and `ticket3` + assert len(response["Items"]) == 2 + + # Check known values in the response + found_known_value_item = False + for item in response["Items"]: + if item["partition_key"]["S"] == "ticket1": + found_known_value_item = True + assert item["TicketNumber"]["S"] == "ticket_001" + + assert found_known_value_item + + +def run_query_7(ddb_client, table_name): + """ + Query 7: Get reservations by email. + + Key condition: PK1=organizeremail SK1 > 30 days ago. + """ + response = ddb_client.query( + TableName=table_name, + IndexName="GSI-1", + KeyConditionExpression="#pk1 = :email AND #sk1 < :date", + ExpressionAttributeNames={"#pk1": "PK1", "#sk1": "SK1"}, + ExpressionAttributeValues={":email": {"S": "OE-able@gmail.com"}, ":date": {"S": "MS-2023-03-20"}}, + ) + + # Validate query response + assert response["ResponseMetadata"]["HTTPStatusCode"] == 200 + + # Assert 1 item was returned: `reservation1` + assert len(response["Items"]) == 1 + + # Check known values in the response + found_known_value_item = False + for item in response["Items"]: + if item["partition_key"]["S"] == "reservation1": + found_known_value_item = True + assert item["Subject"]["S"] == "Scan beacons" + assert item["Location"]["M"]["Floor"]["S"] == "12" + assert {"S": "barney@gmail.com"} in item["Attendees"]["L"] + + assert found_known_value_item + + +def run_query_8(ddb_client, table_name): + """ + Query 8: Get time cards by email. + + Key condition: PK1=employeeemail SK1 > 30 days ago. + """ + response = ddb_client.query( + TableName=table_name, + IndexName="GSI-1", + KeyConditionExpression="#pk1 = :email AND #sk1 BETWEEN :date1 AND :date2", + ExpressionAttributeNames={"#pk1": "PK1", "#sk1": "SK1"}, + ExpressionAttributeValues={ + ":email": {"S": "EE-able@gmail.com"}, + ":date1": {"S": "TC-"}, + ":date2": {"S": "TC-2023-03-20"}, + }, + ) + + # Validate query response + assert response["ResponseMetadata"]["HTTPStatusCode"] == 200 + + # Assert 1 item was returned: `timecard1` + assert len(response["Items"]) == 1 + + # Check known values in the response + found_known_value_item = False + for item in response["Items"]: + if item["partition_key"]["S"] == "timecard1": + found_known_value_item = True + assert item["ProjectName"]["S"] == "project_002" + + assert found_known_value_item + + +def run_query_9(ddb_client, table_name): + """ + Query 9: Get employee info by employee ID. + + Key condition: PK1=employeeID SK starts with "E-". + """ + response = ddb_client.query( + TableName=table_name, + IndexName="GSI-0", + KeyConditionExpression="#pk = :empid AND begins_with(#sk, :prefix)", + ExpressionAttributeNames={"#pk": "PK", "#sk": "SK"}, + ExpressionAttributeValues={":empid": {"S": "E-emp_001"}, ":prefix": {"S": "E-"}}, + ) + + # Validate query response + assert response["ResponseMetadata"]["HTTPStatusCode"] == 200 + + # Assert 1 item was returned: `employee1` + assert len(response["Items"]) == 1 + + # Check known values in the response + found_known_value_item = False + for item in response["Items"]: + if item["partition_key"]["S"] == "employee1": + found_known_value_item = True + assert item["EmployeeID"]["S"] == "emp_001" + + assert found_known_value_item + + +def run_query_10(ddb_client, table_name): + """ + Query 10: Get employee info by email. + + Key condition: PK1=email. + Filter condition: SK starts with "E-". + """ + response = ddb_client.query( + TableName=table_name, + IndexName="GSI-1", + KeyConditionExpression="#pk1 = :email AND begins_with(#sk1, :prefix)", + ExpressionAttributeNames={"#pk1": "PK1", "#sk1": "SK1"}, + ExpressionAttributeValues={":email": {"S": "EE-able@gmail.com"}, ":prefix": {"S": "E-"}}, + ) + + # Validate query response + assert response["ResponseMetadata"]["HTTPStatusCode"] == 200 + + # Assert 1 item was returned: `employee1` + assert len(response["Items"]) == 1 + + # Check known values in the response + found_known_value_item = False + for item in response["Items"]: + if item["partition_key"]["S"] == "employee1": + found_known_value_item = True + assert item["EmployeeID"]["S"] == "emp_001" + + assert found_known_value_item + + +def run_query_11(ddb_client, table_name): + """ + Query 11: Get ticket history by ticket number. + + Key condition: PK=TicketNumber. + """ + response = ddb_client.query( + TableName=table_name, + IndexName="GSI-0", + KeyConditionExpression="#pk = :ticketnum", + ExpressionAttributeNames={"#pk": "PK"}, + ExpressionAttributeValues={":ticketnum": {"S": "T-ticket_001"}}, + ) + + # Validate query response + assert response["ResponseMetadata"]["HTTPStatusCode"] == 200 + + # Assert 2 items returned: + # Expected to be `ticket1` and `ticket2` + assert len(response["Items"]) == 2 + + # Check known values in the response + found_known_value_item = False + for item in response["Items"]: + if item["partition_key"]["S"] == "ticket1": + found_known_value_item = True + assert item["TicketNumber"]["S"] == "ticket_001" + + assert found_known_value_item + + +def run_query_12(ddb_client, table_name): + """ + Query 12: Get Ticket History by employee email. + + Key condition: PK1=CreatorEmail. + Filter condition: PK=TicketNumber. + """ + response = ddb_client.query( + TableName=table_name, + IndexName="GSI-1", + KeyConditionExpression="#pk1 = :email", + FilterExpression="#pk = :ticketnum", + ExpressionAttributeNames={"#pk1": "PK1", "#pk": "PK"}, + ExpressionAttributeValues={":email": {"S": "CE-zorro@gmail.com"}, ":ticketnum": {"S": "T-ticket_001"}}, + ) + + # Validate query response + assert response["ResponseMetadata"]["HTTPStatusCode"] == 200 + + # Assert 1 item was returned: `ticket1` + assert len(response["Items"]) == 1 + + # Check known values in the response + found_known_value_item = False + for item in response["Items"]: + if item["partition_key"]["S"] == "ticket1": + found_known_value_item = True + assert item["TicketNumber"]["S"] == "ticket_001" + + assert found_known_value_item + + +def run_query_13(ddb_client, table_name): + """ + Query 13: Get ticket history by assignee email. + + Key condition: PK=AssigneeEmail. + Filter condition: PK=ticketNumber. + """ + response = ddb_client.query( + TableName=table_name, + IndexName="GSI-2", + KeyConditionExpression="#pk2 = :assignee", + FilterExpression="#pk = :ticketnum", + ExpressionAttributeNames={"#pk2": "PK2", "#pk": "PK"}, + ExpressionAttributeValues={":assignee": {"S": "AE-able@gmail.com"}, ":ticketnum": {"S": "T-ticket_001"}}, + ) + + # Validate query response + assert response["ResponseMetadata"]["HTTPStatusCode"] == 200 + + # Assert 1 item was returned: `ticket1` + assert len(response["Items"]) == 1 + + # Check known values in the response + found_known_value_item = False + for item in response["Items"]: + if item["partition_key"]["S"] == "ticket1": + found_known_value_item = True + assert item["Subject"]["S"] == "Bad bug" + + assert found_known_value_item + + +def run_query_14(ddb_client, table_name): + """ + Query 14: Get employees by city.building.floor.desk. + + Key condition: PK3=city SK3 begins_with(building.floor.desk). + """ + # GSIs do not update instantly + # so if the results come back empty + # we retry after a short sleep + for i in range(10): + response = ddb_client.query( + TableName=table_name, + IndexName="GSI-3", + KeyConditionExpression="#pk3 = :city AND begins_with(#sk3, :location)", + ExpressionAttributeNames={"#pk3": "PK3", "#sk3": "SK3"}, + ExpressionAttributeValues={":city": {"S": "C-Seattle"}, ":location": {"S": "B-44~F-12~D-3"}}, + ) + + # Validate query response + assert response["ResponseMetadata"]["HTTPStatusCode"] == 200 + + # Assert 1 item was returned: `employee1` + assert len(response["Items"]) == 1 + + # Check known values in the response + found_known_value_item = False + for item in response["Items"]: + if item["partition_key"]["S"] == "employee1": + found_known_value_item = True + assert item["EmployeeID"]["S"] == "emp_001" + assert item["Location"]["M"]["Desk"]["S"] == "3" + + if found_known_value_item: + break + + time.sleep(0.2) + + # Assert the value was found inside the loop + assert found_known_value_item + + +def run_query_15(ddb_client, table_name): + """ + Query 15: Get employees by manager email. + + Key condition: PK2 = ManagerEmail. + """ + response = ddb_client.query( + TableName=table_name, + IndexName="GSI-2", + KeyConditionExpression="#pk2 = :manager", + ExpressionAttributeNames={"#pk2": "PK2"}, + ExpressionAttributeValues={":manager": {"S": "ME-zorro@gmail.com"}}, + ) + + # Validate query response + assert response["ResponseMetadata"]["HTTPStatusCode"] == 200 + + # Assert 4 items returned: + # Expected to be `employee1`, `employee2`, `employee3`, and `employee4` + assert len(response["Items"]) == 4 + + # Check known values in the response + found_known_value_item = False + for item in response["Items"]: + if item["partition_key"]["S"] == "employee1": + found_known_value_item = True + assert item["EmployeeID"]["S"] == "emp_001" + assert item["Location"]["M"]["Desk"]["S"] == "3" + + assert found_known_value_item + + +def run_query_16(ddb_client, table_name): + """ + Query 16: Get assigned tickets by assignee email. + + Key condition: PK2 = AssigneeEmail. + """ + response = ddb_client.query( + TableName=table_name, + IndexName="GSI-2", + KeyConditionExpression="#pk2 = :assignee", + ExpressionAttributeNames={"#pk2": "PK2"}, + ExpressionAttributeValues={":assignee": {"S": "AE-able@gmail.com"}}, + ) + + # Validate query response + assert response["ResponseMetadata"]["HTTPStatusCode"] == 200 + + # Assert 2 items returned: + # Expected to be `ticket1` and `ticket4` + assert len(response["Items"]) == 2 + + # Check known values in the response + found_known_value_item = False + for item in response["Items"]: + if item["partition_key"]["S"] == "ticket1": + found_known_value_item = True + assert item["TicketNumber"]["S"] == "ticket_001" + + assert found_known_value_item + + +def run_query_17(ddb_client, table_name): + """ + Query 17: Get tickets updated within the last 24 hours. + + Key condition: PK3 = Severity, SK3 > 24 hours ago. + (For the sake of this example, we will assume + the date is 2022-10-08T09:30:00, such that "24 hours ago" + is 2022-10-07T09:30:00, and that our sample ticket record + with TicketModTime=2022-10-07T14:32:25 will be returned.) + """ + response = ddb_client.query( + TableName=table_name, + IndexName="GSI-3", + KeyConditionExpression="#pk3 = :severity AND #sk3 > :date", + ExpressionAttributeNames={"#pk3": "PK3", "#sk3": "SK3"}, + ExpressionAttributeValues={":severity": {"S": "S-3"}, ":date": {"S": "M-2022-10-07T09:30:00"}}, + ) + + # Validate query response + assert response["ResponseMetadata"]["HTTPStatusCode"] == 200 + + # Assert 3 items returned: + # Expected to be `ticket1`, `ticket2`, and `ticket4` + assert len(response["Items"]) == 3 + + # Check known values in the response + found_known_value_item = False + for item in response["Items"]: + if item["partition_key"]["S"] == "ticket1": + found_known_value_item = True + assert item["TicketNumber"]["S"] == "ticket_001" + + assert found_known_value_item + + +def run_query_18(ddb_client, table_name): + """ + Query 18: Get projects by status, start and target date. + + Key condition: PK1 = Status, SK1 > StartDate. + Filter condition: TargetDelivery < TargetDate. + """ + response = ddb_client.query( + TableName=table_name, + IndexName="GSI-1", + KeyConditionExpression="#pk1 = :status AND #sk1 > :startdate", + FilterExpression="#target < :targetdate", + ExpressionAttributeNames={"#pk1": "PK1", "#sk1": "SK1", "#target": "ProjectTarget"}, + ExpressionAttributeValues={ + ":status": {"S": "PSts-Pending"}, + ":startdate": {"S": "PS-2022-01-01"}, + ":targetdate": {"S": "2025-01-01"}, + }, + ) + + # Validate query response + assert response["ResponseMetadata"]["HTTPStatusCode"] == 200 + + # Assert 1 item was returned: `project1` + assert len(response["Items"]) == 1 + + # Check known values in the response + found_known_value_item = False + for item in response["Items"]: + if item["partition_key"]["S"] == "project1": + found_known_value_item = True + assert item["ProjectName"]["S"] == "project_001" + + assert found_known_value_item + + +def run_query_19(ddb_client, table_name): + """ + Query 19: Get projects by name. + + Key condition: PK = ProjectName, SK = ProjectName. + """ + response = ddb_client.query( + TableName=table_name, + IndexName="GSI-0", + KeyConditionExpression="#pk = :projname AND #sk = :projname", + ExpressionAttributeNames={"#pk": "PK", "#sk": "SK"}, + ExpressionAttributeValues={":projname": {"S": "P-project_001"}}, + ) + + # Validate query response + assert response["ResponseMetadata"]["HTTPStatusCode"] == 200 + + # Assert 1 item was returned: `project1` + assert len(response["Items"]) == 1 + + # Check known values in the response + found_known_value_item = False + for item in response["Items"]: + if item["partition_key"]["S"] == "project1": + found_known_value_item = True + assert item["ProjectName"]["S"] == "project_001" + + assert found_known_value_item + + +def run_query_20(ddb_client, table_name): + """ + Query 20: Get Project History by date range (against timecard record). + + Key condition: PK = ProjectName, SK between(date1, date2). + """ + response = ddb_client.query( + TableName=table_name, + IndexName="GSI-0", + KeyConditionExpression="#pk = :projname AND #sk BETWEEN :date1 AND :date2", + ExpressionAttributeNames={"#pk": "PK", "#sk": "SK"}, + ExpressionAttributeValues={ + ":projname": {"S": "P-project_002"}, + ":date1": {"S": "TC-2022-01-01"}, + ":date2": {"S": "TC-2023-01-01"}, + }, + ) + + # Validate query response + assert response["ResponseMetadata"]["HTTPStatusCode"] == 200 + + # Assert 2 items returned: + # Expected to be `timecard1` and `timecard2` + assert len(response["Items"]) == 2 + + # Check known values in the response + found_known_value_item = False + for item in response["Items"]: + if item["partition_key"]["S"] == "timecard1": + found_known_value_item = True + assert item["ProjectName"]["S"] == "project_002" + + assert found_known_value_item + + +def run_query_21(ddb_client, table_name): + """ + Query 21: Get Project History by role. + + Key condition: PK = ProjectName. + Filter condition: role=rolename. + """ + response = ddb_client.query( + TableName=table_name, + IndexName="GSI-0", + KeyConditionExpression="#pk = :projname", + FilterExpression="#role = :rolename", + ExpressionAttributeNames={"#pk": "PK", "#role": "Role"}, + ExpressionAttributeValues={":projname": {"S": "P-project_002"}, ":rolename": {"S": "SDE3"}}, + ) + + # Validate query response + assert response["ResponseMetadata"]["HTTPStatusCode"] == 200 + + # Assert 1 item was returned: `timecard1` + assert len(response["Items"]) == 1 + + # Check known values in the response + found_known_value_item = False + for item in response["Items"]: + if item["partition_key"]["S"] == "timecard1": + found_known_value_item = True + assert item["ProjectName"]["S"] == "project_002" + + assert found_known_value_item + + +def run_query_22(ddb_client, table_name): + """ + Query 22: Get reservations by building ID. + + Key condition: PK = Building ID + """ + response = ddb_client.query( + TableName=table_name, + IndexName="GSI-0", + KeyConditionExpression="#pk = :building", + ExpressionAttributeNames={"#pk": "PK"}, + ExpressionAttributeValues={":building": {"S": "B-SEA33"}}, + ) + + # Validate query response + assert response["ResponseMetadata"]["HTTPStatusCode"] == 200 + + # Assert 2 items returned: + # Expected to be `reservation1` and `reservation2` + assert len(response["Items"]) == 2 + + # Check known values in the response + found_known_value_item = False + for item in response["Items"]: + if item["partition_key"]["S"] == "reservation1": + found_known_value_item = True + assert item["Subject"]["S"] == "Scan beacons" + + assert found_known_value_item + + +def run_query_23(ddb_client, table_name): + """ + Query 23: Get reservations by building ID and time range. + + Key condition: PK = Building ID, SK between(date1, date2) + Filter condition: Duration > 0 + """ + response = ddb_client.query( + TableName=table_name, + IndexName="GSI-0", + KeyConditionExpression="#pk = :building AND #sk BETWEEN :date1 AND :date2", + FilterExpression="#dur > :zero", + ExpressionAttributeNames={"#pk": "PK", "#sk": "SK", "#dur": "Duration"}, + ExpressionAttributeValues={ + ":building": {"S": "B-SEA33"}, + ":date1": {"S": "MS-2022-07-01"}, + ":date2": {"S": "MS-2022-07-08"}, + ":zero": {"S": "0"}, + }, + ) + + # Validate query response + assert response["ResponseMetadata"]["HTTPStatusCode"] == 200 + + # Assert 2 items returned: + # Expected to be `reservation1` and `reservation2` + assert len(response["Items"]) == 2 + + # Check known values in the response + found_known_value_item = False + for item in response["Items"]: + if item["partition_key"]["S"] == "reservation1": + found_known_value_item = True + assert item["Subject"]["S"] == "Scan beacons" + + assert found_known_value_item diff --git a/Examples/runtimes/python/DynamoDBEncryption/src/searchable_encryption/complex_example/table/__init__.py b/Examples/runtimes/python/DynamoDBEncryption/src/searchable_encryption/complex_example/table/__init__.py new file mode 100644 index 000000000..1b8c008ca --- /dev/null +++ b/Examples/runtimes/python/DynamoDBEncryption/src/searchable_encryption/complex_example/table/__init__.py @@ -0,0 +1,3 @@ +# Copyright Amazon.com Inc. or its affiliates. All Rights Reserved. +# SPDX-License-Identifier: Apache-2.0 +"""Empty stub to allow imports.""" diff --git a/Examples/runtimes/python/DynamoDBEncryption/src/searchable_encryption/complex_example/table/example.py b/Examples/runtimes/python/DynamoDBEncryption/src/searchable_encryption/complex_example/table/example.py new file mode 100644 index 000000000..c46ceb016 --- /dev/null +++ b/Examples/runtimes/python/DynamoDBEncryption/src/searchable_encryption/complex_example/table/example.py @@ -0,0 +1,27 @@ +# Copyright Amazon.com Inc. or its affiliates. All Rights Reserved. +# SPDX-License-Identifier: Apache-2.0 +"""Run the complex searchable encryption example with the EncryptedTable.""" +import boto3 +from aws_dbesdk_dynamodb.encrypted.table import EncryptedTable + +from ..beacon_config import setup_beacon_config +from .put_requests import put_all_items_to_table +from .query_requests import run_queries + + +def run_example( + ddb_table_name: str, + branch_key_id: str, + branch_key_wrapping_kms_key_arn: str, + branch_key_ddb_table_name: str, +): + """Run the example.""" + encryption_config = setup_beacon_config( + ddb_table_name, branch_key_id, branch_key_wrapping_kms_key_arn, branch_key_ddb_table_name + ) + + table = boto3.resource("dynamodb").Table(ddb_table_name) + encrypted_table = EncryptedTable(table=table, encryption_config=encryption_config) + + put_all_items_to_table(encrypted_table) + run_queries(encrypted_table) diff --git a/Examples/runtimes/python/DynamoDBEncryption/src/searchable_encryption/complex_example/table/put_requests.py b/Examples/runtimes/python/DynamoDBEncryption/src/searchable_encryption/complex_example/table/put_requests.py new file mode 100644 index 000000000..5e7c3257f --- /dev/null +++ b/Examples/runtimes/python/DynamoDBEncryption/src/searchable_encryption/complex_example/table/put_requests.py @@ -0,0 +1,285 @@ +# Copyright Amazon.com Inc. or its affiliates. All Rights Reserved. +# SPDX-License-Identifier: Apache-2.0 +"""Put all items into the table.""" + + +def put_all_meeting_items_to_table(table): + """Put all meeting items into the table.""" + meetings = [ + { + "partition_key": "meeting1", + "EmployeeID": "emp_001", + "EmployeeEmail": "able@gmail.com", + "MeetingStart": "2022-07-04T13:00", + "Location": {"Floor": "12", "Room": "403"}, + "Duration": "30", + "Attendees": ["able@gmail.com", "zorro@gmail.com"], + "Subject": "Scan Beacons", + }, + { + "partition_key": "meeting2", + "EmployeeID": "emp_002", + "EmployeeEmail": "barney@gmail.com", + "MeetingStart": "2022-07-04T13:00", + "Location": {"Floor": "12", "Room": "403"}, + "Duration": "30", + "Attendees": ["barney@gmail.com", "zorro@gmail.com"], + "Subject": "Scan Beacons", + }, + { + "partition_key": "meeting3", + "EmployeeID": "emp_003", + "EmployeeEmail": "charlie@gmail.com", + "MeetingStart": "2022-07-04T13:00", + "Location": {"Floor": "12", "Room": "403"}, + "Duration": "30", + "Attendees": ["charlie@gmail.com", "zorro@gmail.com"], + "Subject": "Scan Beacons", + }, + { + "partition_key": "meeting4", + "EmployeeID": "emp_004", + "EmployeeEmail": "david@gmail.com", + "MeetingStart": "2022-07-04T13:00", + "Location": {"Floor": "12", "Room": "403"}, + "Duration": "30", + "Attendees": ["david@gmail.com", "zorro@gmail.com"], + "Subject": "Scan Beacons", + }, + { + "partition_key": "meeting5", + "EmployeeID": "emp_002", + "EmployeeEmail": "barney@gmail.com", + "MeetingStart": "2022-07-04T14:00", + "Location": {"Floor": "12", "Room": "407"}, + "Duration": "30", + "Attendees": ["barney@gmail.com", "zorro@gmail.com"], + "Subject": "DB ESDK", + }, + { + "partition_key": "meeting6", + "EmployeeID": "emp_003", + "EmployeeEmail": "charlie@gmail.com", + "MeetingStart": "2022-07-04T14:00", + "Location": {"Floor": "12", "Room": "407"}, + "Duration": "30", + "Attendees": ["charlie@gmail.com", "zorro@gmail.com"], + "Subject": "DB ESDK", + }, + ] + + for meeting in meetings: + table.put_item(Item=meeting) + + +def put_all_employee_items_to_table(table): + """Put all employee items into the table.""" + employees = [ + { + "partition_key": "employee1", + "EmployeeID": "emp_001", + "EmployeeEmail": "able@gmail.com", + "ManagerEmail": "zorro@gmail.com", + "EmployeeName": "Able Jones", + "EmployeeTitle": "SDE9", + "Location": {"Building": "44", "Floor": "12", "Desk": "3", "City": "Seattle"}, + }, + { + "partition_key": "employee2", + "EmployeeID": "emp_002", + "EmployeeEmail": "barney@gmail.com", + "ManagerEmail": "zorro@gmail.com", + "EmployeeName": "Barney Jones", + "EmployeeTitle": "SDE8", + "Location": {"Building": "44", "Floor": "12", "Desk": "4", "City": "Seattle"}, + }, + { + "partition_key": "employee3", + "EmployeeID": "emp_003", + "EmployeeEmail": "charlie@gmail.com", + "ManagerEmail": "zorro@gmail.com", + "EmployeeName": "Charlie Jones", + "EmployeeTitle": "SDE7", + "Location": {"Building": "44", "Floor": "4", "Desk": "5", "City": "Seattle"}, + }, + { + "partition_key": "employee4", + "EmployeeID": "emp_004", + "EmployeeEmail": "david@gmail.com", + "ManagerEmail": "zorro@gmail.com", + "EmployeeName": "David Jones", + "EmployeeTitle": "SDE6", + "Location": {"Building": "22", "Floor": "1", "Desk": "3", "City": "NYC"}, + }, + ] + + for employee in employees: + table.put_item(Item=employee) + + +def put_all_project_items_to_table(table): + """Put all project items into the table.""" + projects = [ + { + "partition_key": "project1", + "ProjectName": "project_001", + "ProjectStatus": "Pending", + "ProjectStart": "2022-11-01", + "Description": "Turbo Crypto", + "ProjectTarget": "2024-01-01", + }, + { + "partition_key": "project2", + "ProjectName": "project_002", + "ProjectStatus": "Active", + "ProjectStart": "2022-07-04", + "Description": "Scan Beacons", + "ProjectTarget": "2024-01-01", + }, + { + "partition_key": "project3", + "ProjectName": "project_003", + "ProjectStatus": "Active", + "ProjectStart": "2022-08-05", + "Description": "DB ESDK", + "ProjectTarget": "2023-02-27", + }, + { + "partition_key": "project4", + "ProjectName": "project_004", + "ProjectStatus": "Done", + "ProjectStart": "2020-03-03", + "Description": "S3EC", + "ProjectTarget": "2021-09-05", + }, + ] + + for project in projects: + table.put_item(Item=project) + + +def put_all_reservation_items_to_table(table): + """Put all reservation items into the table.""" + reservations = [ + { + "partition_key": "reservation1", + "Location": {"Building": "SEA33", "Floor": "12", "Room": "403"}, + "MeetingStart": "2022-07-04T13:00", + "OrganizerEmail": "able@gmail.com", + "Duration": "30", + "Attendees": ["able@gmail.com", "barney@gmail.com"], + "Subject": "Scan beacons", + }, + { + "partition_key": "reservation2", + "Location": {"Building": "SEA33", "Floor": "12", "Room": "407"}, + "MeetingStart": "2022-07-04T14:00", + "OrganizerEmail": "barney@gmail.com", + "Duration": "30", + "Attendees": ["able@gmail.com", "barney@gmail.com"], + "Subject": "DB ESDK", + }, + ] + + for reservation in reservations: + table.put_item(Item=reservation) + + +def put_all_ticket_items_to_table(table): + """Put all ticket items into the table.""" + tickets = [ + { + "partition_key": "ticket1", + "TicketNumber": "ticket_001", + "TicketModTime": "2022-10-07T14:32:25", + "CreatorEmail": "zorro@gmail.com", + "AssigneeEmail": "able@gmail.com", + "Severity": "3", + "Subject": "Bad bug", + "Message": "This bug looks pretty bad", + }, + { + "partition_key": "ticket2", + "TicketNumber": "ticket_001", + "TicketModTime": "2022-10-07T14:32:25", + "CreatorEmail": "able@gmail.com", + "AssigneeEmail": "charlie@gmail.com", + "Severity": "3", + "Subject": "Bad bug", + "Message": "Charlie should handle this", + }, + { + "partition_key": "ticket3", + "TicketNumber": "ticket_002", + "TicketModTime": "2022-10-06T14:32:25", + "CreatorEmail": "zorro@gmail.com", + "AssigneeEmail": "charlie@gmail.com", + "Severity": "3", + "Subject": "Easy Bug", + "Message": "This seems simple enough", + }, + { + "partition_key": "ticket4", + "TicketNumber": "ticket_002", + "TicketModTime": "2022-10-08T14:32:25", + "CreatorEmail": "charlie@gmail.com", + "AssigneeEmail": "able@gmail.com", + "Severity": "3", + "Subject": "Easy Bug", + "Message": "that's in able's code", + }, + ] + + for ticket in tickets: + table.put_item(Item=ticket) + + +def put_all_timecard_items_to_table(table): + """Put all timecard items into the table.""" + timecards = [ + { + "partition_key": "timecard1", + "ProjectName": "project_002", + "TimeCardStart": "2022-09-12", + "EmployeeEmail": "able@gmail.com", + "Hours": "40", + "Role": "SDE3", + }, + { + "partition_key": "timecard2", + "ProjectName": "project_002", + "TimeCardStart": "2022-09-12", + "EmployeeEmail": "barney@gmail.com", + "Hours": "20", + "Role": "PM", + }, + { + "partition_key": "timecard3", + "ProjectName": "project_003", + "TimeCardStart": "2022-09-12", + "EmployeeEmail": "charlie@gmail.com", + "Hours": "40", + "Role": "SDE3", + }, + { + "partition_key": "timecard4", + "ProjectName": "project_003", + "TimeCardStart": "2022-09-12", + "EmployeeEmail": "barney@gmail.com", + "Hours": "20", + "Role": "PM", + }, + ] + + for timecard in timecards: + table.put_item(Item=timecard) + + +def put_all_items_to_table(table): + """Put all items into the table.""" + put_all_meeting_items_to_table(table) + put_all_employee_items_to_table(table) + put_all_project_items_to_table(table) + put_all_reservation_items_to_table(table) + put_all_ticket_items_to_table(table) + put_all_timecard_items_to_table(table) diff --git a/Examples/runtimes/python/DynamoDBEncryption/src/searchable_encryption/complex_example/table/query_requests.py b/Examples/runtimes/python/DynamoDBEncryption/src/searchable_encryption/complex_example/table/query_requests.py new file mode 100644 index 000000000..bc767c36b --- /dev/null +++ b/Examples/runtimes/python/DynamoDBEncryption/src/searchable_encryption/complex_example/table/query_requests.py @@ -0,0 +1,686 @@ +# Copyright Amazon.com Inc. or its affiliates. All Rights Reserved. +# SPDX-License-Identifier: Apache-2.0 +"""Run all queries on the EncryptedTables.""" +from boto3.dynamodb.conditions import Attr, Key + + +def run_queries(table): + """Run all queries on the table.""" + run_query_1(table) + run_query_2(table) + run_query_3(table) + run_query_4(table) + run_query_5(table) + run_query_6(table) + run_query_7(table) + run_query_8(table) + run_query_9(table) + run_query_10(table) + run_query_11(table) + run_query_12(table) + run_query_13(table) + run_query_14(table) + run_query_15(table) + run_query_16(table) + run_query_17(table) + run_query_18(table) + run_query_19(table) + run_query_20(table) + run_query_21(table) + run_query_22(table) + run_query_23(table) + + +def run_query_1(table): + """ + Query 1: Get meetings by date and email. + + Key condition: PK1 = email AND SK1 BETWEEN date1 AND date2. + Filter condition: Duration > 0. + """ + key_condition = Key("PK1").eq("EE-able@gmail.com") & Key("SK1").between("MS-2022-07-02", "MS-2022-07-08") + filter_condition = Attr("Duration").gt("0") + + response = table.query(IndexName="GSI-1", KeyConditionExpression=key_condition, FilterExpression=filter_condition) + + # Validate query response + assert response["ResponseMetadata"]["HTTPStatusCode"] == 200 + + # Assert exactly 1 item is returned + assert len(response["Items"]) == 1 + + # Check known values in the response + found_known_value_item = False + for item in response["Items"]: + if item["partition_key"] == "meeting1": + found_known_value_item = True + assert item["Subject"] == "Scan Beacons" + assert item["Location"]["Floor"] == "12" + assert "zorro@gmail.com" in item["Attendees"] + + assert found_known_value_item + + +def run_query_2(table): + """ + Query 2: Get meetings by date and employeeID. + + Key condition: PK=employeeID SK between(date1, date2). + Filter condition: duration > 0. + """ + key_condition = Key("PK").eq("E-emp_001") & Key("SK").between("MS-2022-07-02", "MS-2022-07-08") + filter_condition = Attr("Duration").gt("0") + + response = table.query(IndexName="GSI-0", KeyConditionExpression=key_condition, FilterExpression=filter_condition) + + # Validate query response + assert response["ResponseMetadata"]["HTTPStatusCode"] == 200 + + # Assert exactly 1 item is returned + assert len(response["Items"]) == 1 + + # Check known values in the response + found_known_value_item = False + for item in response["Items"]: + if item["partition_key"] == "meeting1": + found_known_value_item = True + assert item["Subject"] == "Scan Beacons" + assert item["Location"]["Floor"] == "12" + assert "zorro@gmail.com" in item["Attendees"] + + assert found_known_value_item + + +def run_query_3(table): + """ + Query 3: Get meetings by date and building/floor/room. + + Key condition: PK=employeeID SK between(date1, date2) + Filter condition: SK contains building.floor.room (see NOTE) + NOTE: This query is modified from Demo.md. + Demo.md calls for a filter condition "SK contains building.floor.room" + However, one cannot use primary keys (partition nor sort) in a filter expression. + Instead, this query filters on the individual beacon attributes: building, floor, and room. + """ + key_condition = Key("PK").eq("B-SEA33") & Key("SK").between("MS-2022-07-02", "MS-2022-07-08") + filter_condition = Attr("Building").eq("SEA33") & Attr("Floor").eq("12") & Attr("Room").eq("403") + + response = table.query(IndexName="GSI-0", KeyConditionExpression=key_condition, FilterExpression=filter_condition) + # Validate query response + assert response["ResponseMetadata"]["HTTPStatusCode"] == 200 + + # Assert exactly 1 item is returned + assert len(response["Items"]) == 1 + # Check known values in the response + found_known_value_item = False + for item in response["Items"]: + if item["partition_key"] == "reservation1": + found_known_value_item = True + assert item["Subject"] == "Scan beacons" + assert item["Location"]["Building"] == "SEA33" + assert "barney@gmail.com" in item["Attendees"] + + assert found_known_value_item + + +def run_query_4(table): + """ + Query 4: Get employee data by email. + + Key condition: PK1=email SK1=employee ID. + """ + key_condition = Key("PK1").eq("EE-able@gmail.com") & Key("SK1").eq("E-emp_001") + + response = table.query(IndexName="GSI-1", KeyConditionExpression=key_condition) + + # Validate query response + assert response["ResponseMetadata"]["HTTPStatusCode"] == 200 + + # Assert exactly 1 item is returned + assert len(response["Items"]) == 1 + + # Check known values in the response + found_known_value_item = False + for item in response["Items"]: + if item["partition_key"] == "employee1": + found_known_value_item = True + assert item["EmployeeID"] == "emp_001" + assert item["Location"]["Desk"] == "3" + + assert found_known_value_item + + +def run_query_5(table): + """ + Query 5: Get meetings by email. + + Key condition: PK1=email SK1 > 30 days ago. + """ + key_condition = Key("PK1").eq("EE-able@gmail.com") & Key("SK1").between("MS-", "MS-2023-03-20") + + response = table.query(IndexName="GSI-1", KeyConditionExpression=key_condition) + + # Validate query response + assert response["ResponseMetadata"]["HTTPStatusCode"] == 200 + + # Assert exactly 1 item is returned + assert len(response["Items"]) == 1 + + # Check known values in the response + found_known_value_item = False + for item in response["Items"]: + if item["partition_key"] == "meeting1": + found_known_value_item = True + assert item["Subject"] == "Scan Beacons" + assert item["Location"]["Floor"] == "12" + assert "zorro@gmail.com" in item["Attendees"] + + assert found_known_value_item + + +def run_query_6(table): + """ + Query 6: Get tickets by email. + + Key condition: PK1=email SK1 > 30 days ago. + """ + key_condition = Key("PK1").eq("CE-zorro@gmail.com") & Key("SK1").lt("MS-2023-03-20") + + response = table.query(IndexName="GSI-1", KeyConditionExpression=key_condition) + + # Validate query response + assert response["ResponseMetadata"]["HTTPStatusCode"] == 200 + + # Assert 2 items returned: + # Expected to be `ticket1` and `ticket3` + assert len(response["Items"]) == 2 + + # Check known values in the response + found_known_value_item = False + for item in response["Items"]: + if item["partition_key"] == "ticket1": + found_known_value_item = True + assert item["TicketNumber"] == "ticket_001" + + assert found_known_value_item + + +def run_query_7(table): + """ + Query 7: Get reservations by email. + + Key condition: PK1=organizeremail SK1 > 30 days ago. + """ + key_condition = Key("PK1").eq("OE-able@gmail.com") & Key("SK1").lt("MS-2023-03-20") + + response = table.query(IndexName="GSI-1", KeyConditionExpression=key_condition) + + # Validate query response + assert response["ResponseMetadata"]["HTTPStatusCode"] == 200 + + # Assert 1 item was returned: `reservation1` + assert len(response["Items"]) == 1 + + # Check known values in the response + found_known_value_item = False + for item in response["Items"]: + if item["partition_key"] == "reservation1": + found_known_value_item = True + assert item["Subject"] == "Scan beacons" + assert item["Location"]["Floor"] == "12" + assert "barney@gmail.com" in item["Attendees"] + + assert found_known_value_item + + +def run_query_8(table): + """ + Query 8: Get time cards by email. + + Key condition: PK1=employeeemail SK1 > 30 days ago. + """ + key_condition = Key("PK1").eq("EE-able@gmail.com") & Key("SK1").between("TC-", "TC-2023-03-20") + + response = table.query(IndexName="GSI-1", KeyConditionExpression=key_condition) + + # Validate query response + assert response["ResponseMetadata"]["HTTPStatusCode"] == 200 + + # Assert 1 item was returned: `timecard1` + assert len(response["Items"]) == 1 + + # Check known values in the response + found_known_value_item = False + for item in response["Items"]: + if item["partition_key"] == "timecard1": + found_known_value_item = True + assert item["ProjectName"] == "project_002" + + assert found_known_value_item + + +def run_query_9(table): + """ + Query 9: Get employee info by employee ID. + + Key condition: PK1=employeeID SK starts with "E-" + """ + key_condition = Key("PK").eq("E-emp_001") & Key("SK").begins_with("E-") + + response = table.query(IndexName="GSI-0", KeyConditionExpression=key_condition) + + # Validate query response + assert response["ResponseMetadata"]["HTTPStatusCode"] == 200 + + # Assert 1 item was returned: `employee1` + assert len(response["Items"]) == 1 + + # Check known values in the response + found_known_value_item = False + for item in response["Items"]: + if item["partition_key"] == "employee1": + found_known_value_item = True + assert item["EmployeeID"] == "emp_001" + + assert found_known_value_item + + +def run_query_10(table): + """ + Query 10: Get employee info by email. + + Key condition: PK1=email + Filter condition: SK starts with "E-" + """ + key_condition = Key("PK1").eq("EE-able@gmail.com") & Key("SK1").begins_with("E-") + + response = table.query(IndexName="GSI-1", KeyConditionExpression=key_condition) + + # Validate query response + assert response["ResponseMetadata"]["HTTPStatusCode"] == 200 + + # Assert 1 item was returned: `employee1` + assert len(response["Items"]) == 1 + + # Check known values in the response + found_known_value_item = False + for item in response["Items"]: + if item["partition_key"] == "employee1": + found_known_value_item = True + assert item["EmployeeID"] == "emp_001" + + assert found_known_value_item + + +def run_query_11(table): + """ + Query 11: Get ticket history by ticket number. + + Key condition: PK=TicketNumber + """ + key_condition = Key("PK").eq("T-ticket_001") + + response = table.query(IndexName="GSI-0", KeyConditionExpression=key_condition) + + # Validate query response + assert response["ResponseMetadata"]["HTTPStatusCode"] == 200 + + # Assert 2 items returned: + # Expected to be `ticket1` and `ticket2` + assert len(response["Items"]) == 2 + + # Check known values in the response + found_known_value_item = False + for item in response["Items"]: + if item["partition_key"] == "ticket1": + found_known_value_item = True + assert item["TicketNumber"] == "ticket_001" + + assert found_known_value_item + + +def run_query_12(table): + """ + Query 12: Get Ticket History by employee email. + + Key condition: PK1=CreatorEmail + Filter condition: PK=TicketNumber + """ + key_condition = Key("PK1").eq("CE-zorro@gmail.com") + filter_condition = Attr("PK").eq("T-ticket_001") + + response = table.query(IndexName="GSI-1", KeyConditionExpression=key_condition, FilterExpression=filter_condition) + + # Validate query response + assert response["ResponseMetadata"]["HTTPStatusCode"] == 200 + + # Assert 1 item was returned: `ticket1` + assert len(response["Items"]) == 1 + + # Check known values in the response + found_known_value_item = False + for item in response["Items"]: + if item["partition_key"] == "ticket1": + found_known_value_item = True + assert item["TicketNumber"] == "ticket_001" + + assert found_known_value_item + + +def run_query_13(table): + """ + Query 13: Get ticket history by assignee email. + + Key condition: PK=AssigneeEmail + Filter condition: PK=ticketNumber + """ + key_condition = Key("PK2").eq("AE-able@gmail.com") + filter_condition = Attr("PK").eq("T-ticket_001") + + response = table.query(IndexName="GSI-2", KeyConditionExpression=key_condition, FilterExpression=filter_condition) + + # Validate query response + assert response["ResponseMetadata"]["HTTPStatusCode"] == 200 + + # Assert 1 item was returned: `ticket1` + assert len(response["Items"]) == 1 + + # Check known values in the response + found_known_value_item = False + for item in response["Items"]: + if item["partition_key"] == "ticket1": + found_known_value_item = True + assert item["Subject"] == "Bad bug" + + assert found_known_value_item + + +def run_query_14(table): + """ + Query 14: Get employees by city.building.floor.desk. + + Key condition: PK3=city SK3 begins_with(building.floor.desk) + """ + key_condition = Key("PK3").eq("C-Seattle") & Key("SK3").begins_with("B-44~F-12~D-3") + + # Execute query with retries since GSIs don't update instantly + for i in range(10): + response = table.query(IndexName="GSI-3", KeyConditionExpression=key_condition) + + # Validate query response + assert response["ResponseMetadata"]["HTTPStatusCode"] == 200 + + # If no results, retry after short sleep + if len(response["Items"]) == 0: + import time + + time.sleep(0.02) + continue + + # Assert 1 item was returned: `employee1` + assert len(response["Items"]) == 1 + + # Check known values in the response + found_known_value_item = False + for item in response["Items"]: + if item["partition_key"] == "employee1": + found_known_value_item = True + assert item["EmployeeID"] == "emp_001" + assert item["Location"]["Desk"] == "3" + + assert found_known_value_item + break + + # Assert the value was found inside the loop + assert found_known_value_item + + +def run_query_15(table): + """ + Query 15: Get employees by manager email. + + Key condition: PK2 = ManagerEmail + """ + key_condition = Key("PK2").eq("ME-zorro@gmail.com") + + response = table.query(IndexName="GSI-2", KeyConditionExpression=key_condition) + + # Validate query response + assert response["ResponseMetadata"]["HTTPStatusCode"] == 200 + + # Assert 4 items returned: + # Expected to be `employee1`, `employee2`, `employee3`, and `employee4` + assert len(response["Items"]) == 4 + + # Check known values in the response + found_known_value_item = False + for item in response["Items"]: + if item["partition_key"] == "employee1": + found_known_value_item = True + assert item["EmployeeID"] == "emp_001" + assert item["Location"]["Desk"] == "3" + + assert found_known_value_item + + +def run_query_16(table): + """ + Query 16: Get assigned tickets by assignee email. + + Key condition: PK2 = AssigneeEmail + """ + key_condition = Key("PK2").eq("AE-able@gmail.com") + + response = table.query(IndexName="GSI-2", KeyConditionExpression=key_condition) + + # Validate query response + assert response["ResponseMetadata"]["HTTPStatusCode"] == 200 + + # Assert 2 items returned: + # Expected to be `ticket1` and `ticket4` + assert len(response["Items"]) == 2 + + # Check known values in the response + found_known_value_item = False + for item in response["Items"]: + if item["partition_key"] == "ticket1": + found_known_value_item = True + assert item["TicketNumber"] == "ticket_001" + + assert found_known_value_item + + +def run_query_17(table): + """ + Query 17: Get tickets updated within the last 24 hours. + + Key condition: PK3 = Severity, SK3 > 24 hours ago + (For the sake of this example, we will assume + the date is 2022-10-08T09:30:00, such that "24 hours ago" + is 2022-10-07T09:30:00, and that our sample ticket record + with TicketModTime=2022-10-07T14:32:25 will be returned.) + """ + key_condition = Key("PK3").eq("S-3") & Key("SK3").gt("M-2022-10-07T09:30:00") + + response = table.query(IndexName="GSI-3", KeyConditionExpression=key_condition) + + # Validate query response + assert response["ResponseMetadata"]["HTTPStatusCode"] == 200 + + # Assert 3 items returned: + # Expected to be `ticket1`, `ticket2`, and `ticket4` + assert len(response["Items"]) == 3 + + # Check known values in the response + found_known_value_item = False + for item in response["Items"]: + if item["partition_key"] == "ticket1": + found_known_value_item = True + assert item["TicketNumber"] == "ticket_001" + + assert found_known_value_item + + +def run_query_18(table): + """ + Query 18: Get projects by status, start and target date. + + Key condition: PK1 = Status, SK1 > StartDate + Filter condition: TargetDelivery < TargetDate + """ + key_condition = Key("PK1").eq("PSts-Pending") & Key("SK1").gt("PS-2022-01-01") + filter_condition = Attr("ProjectTarget").lt("2025-01-01") + + response = table.query(IndexName="GSI-1", KeyConditionExpression=key_condition, FilterExpression=filter_condition) + + # Validate query response + assert response["ResponseMetadata"]["HTTPStatusCode"] == 200 + + # Assert 1 item was returned: `project1` + assert len(response["Items"]) == 1 + + # Check known values in the response + found_known_value_item = False + for item in response["Items"]: + if item["partition_key"] == "project1": + found_known_value_item = True + assert item["ProjectName"] == "project_001" + + assert found_known_value_item + + +def run_query_19(table): + """ + Query 19: Get projects by name. + + Key condition: PK = ProjectName, SK = ProjectName + """ + key_condition = Key("PK").eq("P-project_001") & Key("SK").eq("P-project_001") + + response = table.query(IndexName="GSI-0", KeyConditionExpression=key_condition) + + # Validate query response + assert response["ResponseMetadata"]["HTTPStatusCode"] == 200 + + # Assert 1 item was returned: `project1` + assert len(response["Items"]) == 1 + + # Check known values in the response + found_known_value_item = False + for item in response["Items"]: + if item["partition_key"] == "project1": + found_known_value_item = True + assert item["ProjectName"] == "project_001" + + assert found_known_value_item + + +def run_query_20(table): + """ + Query 20: Get Project History by date range (against timecard record). + + Key condition: PK = ProjectName, SK between(date1, date2) + """ + key_condition = Key("PK").eq("P-project_002") & Key("SK").between("TC-2022-01-01", "TC-2023-01-01") + + response = table.query(IndexName="GSI-0", KeyConditionExpression=key_condition) + + # Validate query response + assert response["ResponseMetadata"]["HTTPStatusCode"] == 200 + + # Assert 2 items returned: + # Expected to be `timecard1` and `timecard2` + assert len(response["Items"]) == 2 + + # Check known values in the response + found_known_value_item = False + for item in response["Items"]: + if item["partition_key"] == "timecard1": + found_known_value_item = True + assert item["ProjectName"] == "project_002" + + assert found_known_value_item + + +def run_query_21(table): + """ + Query 21: Get Project History by role. + + Key condition: PK = ProjectName + Filter condition: role=rolename + """ + key_condition = Key("PK").eq("P-project_002") + filter_condition = Attr("Role").eq("SDE3") + + response = table.query(IndexName="GSI-0", KeyConditionExpression=key_condition, FilterExpression=filter_condition) + + # Validate query response + assert response["ResponseMetadata"]["HTTPStatusCode"] == 200 + + # Assert 1 item was returned: `timecard1` + assert len(response["Items"]) == 1 + + # Check known values in the response + found_known_value_item = False + for item in response["Items"]: + if item["partition_key"] == "timecard1": + found_known_value_item = True + assert item["ProjectName"] == "project_002" + + assert found_known_value_item + + +def run_query_22(table): + """ + Query 22: Get reservations by building ID. + + Key condition: PK = Building ID + """ + key_condition = Key("PK").eq("B-SEA33") + + response = table.query(IndexName="GSI-0", KeyConditionExpression=key_condition) + + # Validate query response + assert response["ResponseMetadata"]["HTTPStatusCode"] == 200 + + # Assert 2 items returned: + # Expected to be `reservation1` and `reservation2` + assert len(response["Items"]) == 2 + + # Check known values in the response + found_known_value_item = False + for item in response["Items"]: + if item["partition_key"] == "reservation1": + found_known_value_item = True + assert item["Subject"] == "Scan beacons" + + assert found_known_value_item + + +def run_query_23(table): + """ + Query 23: Get reservations by building ID and time range. + + Key condition: PK = Building ID, SK between(date1, date2) + Filter condition: Duration > 0 + """ + key_condition = Key("PK").eq("B-SEA33") & Key("SK").between("MS-2022-07-01", "MS-2022-07-08") + filter_condition = Attr("Duration").gt("0") + + response = table.query(IndexName="GSI-0", KeyConditionExpression=key_condition, FilterExpression=filter_condition) + + # Validate query response + assert response["ResponseMetadata"]["HTTPStatusCode"] == 200 + + # Assert 2 items returned: + # Expected to be `reservation1` and `reservation2` + assert len(response["Items"]) == 2 + + # Check known values in the response + found_known_value_item = False + for item in response["Items"]: + if item["partition_key"] == "reservation1": + found_known_value_item = True + assert item["Subject"] == "Scan beacons" + + assert found_known_value_item diff --git a/Examples/runtimes/python/DynamoDBEncryption/src/searchable_encryption/compound_beacon_searchable_encryption_example/__init__.py b/Examples/runtimes/python/DynamoDBEncryption/src/searchable_encryption/compound_beacon_searchable_encryption_example/__init__.py new file mode 100644 index 000000000..fa977e22f --- /dev/null +++ b/Examples/runtimes/python/DynamoDBEncryption/src/searchable_encryption/compound_beacon_searchable_encryption_example/__init__.py @@ -0,0 +1,3 @@ +# Copyright Amazon.com Inc. or its affiliates. All Rights Reserved. +# SPDX-License-Identifier: Apache-2.0 +"""Stub to allow relative imports of examples from tests.""" diff --git a/Examples/runtimes/python/DynamoDBEncryption/src/searchable_encryption/compound_beacon_searchable_encryption_example/beacon_config.py b/Examples/runtimes/python/DynamoDBEncryption/src/searchable_encryption/compound_beacon_searchable_encryption_example/beacon_config.py new file mode 100644 index 000000000..1fa1fafc0 --- /dev/null +++ b/Examples/runtimes/python/DynamoDBEncryption/src/searchable_encryption/compound_beacon_searchable_encryption_example/beacon_config.py @@ -0,0 +1,159 @@ +# Copyright Amazon.com Inc. or its affiliates. All Rights Reserved. +# SPDX-License-Identifier: Apache-2.0 +"""Sets up the beacon config for compound beacon searchable encryption.""" +import boto3 +from aws_cryptographic_material_providers.keystore.client import KeyStore +from aws_cryptographic_material_providers.keystore.config import KeyStoreConfig +from aws_cryptographic_material_providers.keystore.models import KMSConfigurationKmsKeyArn +from aws_cryptographic_material_providers.mpl import AwsCryptographicMaterialProviders +from aws_cryptographic_material_providers.mpl.config import MaterialProvidersConfig +from aws_cryptographic_material_providers.mpl.models import CreateAwsKmsHierarchicalKeyringInput +from aws_dbesdk_dynamodb.structures.dynamodb import ( + BeaconKeySourceSingle, + BeaconStylePartOnly, + BeaconVersion, + CompoundBeacon, + Constructor, + ConstructorPart, + DynamoDbTableEncryptionConfig, + DynamoDbTablesEncryptionConfig, + EncryptedPart, + PartOnly, + SearchConfig, + SingleKeyStore, + StandardBeacon, +) +from aws_dbesdk_dynamodb.structures.structured_encryption import CryptoAction + +GSI_NAME = "last4UnitCompound-index" + + +def setup_beacon_config( + ddb_table_name: str, + branch_key_id: str, + branch_key_wrapping_kms_key_arn: str, + branch_key_ddb_table_name: str, +): + """Set up the beacon config for compound beacon searchable encryption.""" + # 1. Create Beacons. + # These are the same beacons as in the "BasicSearchableEncryptionExample" in this directory. + # See that file to see details on beacon construction and parameters. + # While we will not directly query against these beacons, + # you must create standard beacons on encrypted fields + # that we wish to use in compound beacons. + # We mark them both as PartOnly to enforce the fact that + # we will not directly query against these beacons. + standard_beacon_list = [ + StandardBeacon(name="inspector_id_last4", length=10, style=BeaconStylePartOnly(PartOnly())), + StandardBeacon(name="unit", length=30, style=BeaconStylePartOnly(PartOnly())), + ] + + # 2. Define encrypted parts. + # Encrypted parts define the beacons that can be used to construct a compound beacon, + # and how the compound beacon prefixes those beacon values. + # A encrypted part must receive: + # - name: Name of a standard beacon + # - prefix: Any string. This is plaintext that prefixes the beaconized value in the compound beacon. + # Prefixes must be unique across the configuration, and must not be a prefix of another prefix; + # i.e. for all configured prefixes, the first N characters of a prefix must not equal another prefix. + # In practice, it is suggested to have a short value distinguishable from other parts served on the prefix. + encrypted_part_list = [ + # For this example, we will choose "L-" as the prefix for "Last 4 digits of inspector ID". + # With this prefix and the standard beacon's bit length definition (10), the beaconized + # version of the inspector ID's last 4 digits will appear as + # `L-000` to `L-3ff` inside a compound beacon. + EncryptedPart(name="inspector_id_last4", prefix="L-"), + # For this example, we will choose "U-" as the prefix for "unit". + # With this prefix and the standard beacon's bit length definition (30), a unit beacon will appear + # as `U-00000000` to `U-3fffffff` inside a compound beacon. + EncryptedPart(name="unit", prefix="U-"), + ] + + constructor_parts = [ + ConstructorPart(name="inspector_id_last4", required=True), + ConstructorPart(name="unit", required=True), + ] + + constructors = [Constructor(parts=constructor_parts)] + + # 3. Define compound beacon. + # A compound beacon allows one to serve multiple beacons or attributes from a single index. + # A compound beacon must receive: + # - name: The name of the beacon. Compound beacon values will be written to `aws_ddb_e_[name]`. + # - split: A character separating parts in a compound beacon + # A compound beacon may also receive: + # - encrypted: A list of encrypted parts. This is effectively a list of beacons. We provide the list + # that we created above. + # - constructors: A list of constructors. This is an ordered list of possible ways to create a beacon. + # We have not defined any constructors here; see the complex example for how to do this. + # The client will provide a default constructor, which will write a compound beacon as: + # all signed parts in the order they are added to the signed list; + # all encrypted parts in order they are added to the encrypted list; all parts required. + # In this example, we expect compound beacons to be written as + # `L-XXX.U-YYYYYYYY`, since our encrypted list looks like + # [last4EncryptedPart, unitEncryptedPart]. + # - signed: A list of signed parts, i.e. plaintext attributes. This would be provided if we + # wanted to use plaintext values as part of constructing our compound beacon. We do not + # provide this here; see the Complex example for an example. + compound_beacon_list = [CompoundBeacon(name="last4UnitCompound", constructors=constructors, split=".")] + + # 4. Configure the Keystore + # These are the same constructions as in the Basic example, which describes these in more detail. + keystore = KeyStore( + config=KeyStoreConfig( + ddb_client=boto3.client("dynamodb"), + ddb_table_name=branch_key_ddb_table_name, + logical_key_store_name=branch_key_ddb_table_name, + kms_client=boto3.client("kms"), + kms_configuration=KMSConfigurationKmsKeyArn(value=branch_key_wrapping_kms_key_arn), + ) + ) + + # 5. Create BeaconVersion. + # This is similar to the Basic example, except we have also provided a compoundBeaconList. + # We must also continue to provide all of the standard beacons that compose a compound beacon list. + beacon_versions = [ + BeaconVersion( + encrypted_parts=encrypted_part_list, + standard_beacons=standard_beacon_list, + compound_beacons=compound_beacon_list, + version=1, # MUST be 1 + key_store=keystore, + key_source=BeaconKeySourceSingle(SingleKeyStore(key_id=branch_key_id, cache_ttl=6000)), + ) + ] + + # 6. Create a Hierarchical Keyring + # This is the same configuration as in the Basic example. + mat_prov = AwsCryptographicMaterialProviders(config=MaterialProvidersConfig()) + + keyring_input = CreateAwsKmsHierarchicalKeyringInput( + branch_key_id=branch_key_id, key_store=keystore, ttl_seconds=6000 + ) + + kms_keyring = mat_prov.create_aws_kms_hierarchical_keyring(input=keyring_input) + + # 7. Configure which attributes are encrypted and/or signed when writing new items. + attribute_actions = { + "work_id": CryptoAction.SIGN_ONLY, # Our partition attribute must be SIGN_ONLY + "inspection_date": CryptoAction.SIGN_ONLY, # Our sort attribute must be SIGN_ONLY + "inspector_id_last4": CryptoAction.ENCRYPT_AND_SIGN, # Beaconized attributes must be encrypted + "unit": CryptoAction.ENCRYPT_AND_SIGN, # Beaconized attributes must be encrypted + } + + # We do not need to define a crypto action on last4UnitCompound. + # We only need to define crypto actions on attributes that we pass to PutItem. + + # 8. Create the DynamoDb Encryption configuration for the table we will be writing to. + # The beaconVersions are added to the search configuration. + table_config = DynamoDbTableEncryptionConfig( + logical_table_name=ddb_table_name, + partition_key_name="work_id", + sort_key_name="inspection_date", + attribute_actions_on_encrypt=attribute_actions, + keyring=kms_keyring, + search=SearchConfig(write_version=1, versions=beacon_versions), # MUST be 1 + ) + + table_configs = {ddb_table_name: table_config} + return DynamoDbTablesEncryptionConfig(table_encryption_configs=table_configs) diff --git a/Examples/runtimes/python/DynamoDBEncryption/src/searchable_encryption/compound_beacon_searchable_encryption_example/with_encrypted_client.py b/Examples/runtimes/python/DynamoDBEncryption/src/searchable_encryption/compound_beacon_searchable_encryption_example/with_encrypted_client.py new file mode 100644 index 000000000..f611810e5 --- /dev/null +++ b/Examples/runtimes/python/DynamoDBEncryption/src/searchable_encryption/compound_beacon_searchable_encryption_example/with_encrypted_client.py @@ -0,0 +1,164 @@ +# Copyright Amazon.com Inc. or its affiliates. All Rights Reserved. +# SPDX-License-Identifier: Apache-2.0 +""" +Example demonstrating DynamoDB encryption using compound beacons with EncryptedClient. + +This example demonstrates how to set up a compound beacon on encrypted attributes, + put an item with the beacon, and query against that beacon. +This example follows a use case of a database that stores unit inspection information. + This is an extension of the "BasicSearchableEncryptionExample" in this directory. + This example uses the same situation (storing unit inspection information) + and the same table schema. +However, this example uses a different Global Secondary Index (GSI) + that is based on a compound beacon configuration composed of + the `last4` and `unit` attributes. + +Running this example requires access to a DDB table with the +following key configuration: + - Partition key is named "work_id" with type (S) + - Sort key is named "inspection_time" with type (S) +This table must have a Global Secondary Index (GSI) configured named "last4UnitCompound-index": + - Partition key is named "aws_dbe_b_last4UnitCompound" with type (S) + +In this example for storing unit inspection information, this schema is utilized for the data: + - "work_id" stores a unique identifier for a unit inspection work order (v4 UUID) + - "inspection_date" stores an ISO 8601 date for the inspection (YYYY-MM-DD) + - "inspector_id_last4" stores the last 4 digits of the ID of the inspector performing the work + - "unit" stores a 12-digit serial number for the unit being inspected + +The example requires the following ordered input command line parameters: + 1. DDB table name for table to put/query data from + 2. Branch key ID for a branch key that was previously created in your key store. See the + CreateKeyStoreKeyExample. + 3. Branch key wrapping KMS key ARN for the KMS key used to create the branch key with ID + provided in arg 2 + 4. Branch key DDB table name for the DDB table representing the branch key store +""" +import time +from typing import Dict + +import boto3 +from aws_dbesdk_dynamodb.encrypted.client import EncryptedClient +from aws_dbesdk_dynamodb.smithygenerated.aws_cryptography_dbencryptionsdk_dynamodb_transforms.client import ( + DynamoDbEncryptionTransforms, +) +from aws_dbesdk_dynamodb.smithygenerated.aws_cryptography_dbencryptionsdk_dynamodb_transforms.models import ( + ResolveAttributesInput, +) + +from DynamoDBEncryption.src.searchable_encryption.compound_beacon_searchable_encryption_example.beacon_config import ( + GSI_NAME, + setup_beacon_config, +) + + +def compound_beacon_client_example( + ddb_table_name: str, + branch_key_id: str, + branch_key_wrapping_kms_key_arn: str, + branch_key_ddb_table_name: str, +): + """ + Demonstrate using compound beacon searchable encryption with EncryptedClient. + + :param ddb_table_name: The name of the DynamoDB table + :param branch_key_id: Branch key ID for a branch key previously created in key store + :param branch_key_wrapping_kms_key_arn: ARN of KMS key used to create the branch key + :param branch_key_ddb_table_name: Name of DDB table representing the branch key store + """ + # 1. Create the DynamoDb Encryption configuration for the table we will be writing to. + # See beacon_config.py in this directory for detailed steps on the encryption configuration. + tables_config = setup_beacon_config( + ddb_table_name, branch_key_id, branch_key_wrapping_kms_key_arn, branch_key_ddb_table_name + ) + + # 2. Create the EncryptedClient + ddb_client = boto3.client("dynamodb") + encrypted_client = EncryptedClient(client=ddb_client, encryption_config=tables_config) + + # 3. Create an item with both attributes used in the compound beacon. + item = { + "work_id": {"S": "9ce39272-8068-4efd-a211-cd162ad65d4c"}, + "inspection_date": {"S": "2023-06-13"}, + "inspector_id_last4": {"S": "5678"}, + "unit": {"S": "011899988199"}, + } + + # 4. If developing or debugging, verify config by checking compound beacon values directly + trans = DynamoDbEncryptionTransforms(config=tables_config) + + resolve_input = ResolveAttributesInput(table_name=ddb_table_name, item=item, version=1) + + resolve_output = trans.resolve_attributes(input=resolve_input) + + # VirtualFields is empty because we have no Virtual Fields configured + assert not resolve_output.virtual_fields + + # Verify that CompoundBeacons has the expected value + cbs = {"last4UnitCompound": "L-5678.U-011899988199"} + assert resolve_output.compound_beacons == cbs + # Note : the compound beacon actually stored in the table is not "L-5678.U-011899988199" + # but rather something like "L-abc.U-123", as both parts are EncryptedParts + # and therefore the text is replaced by the associated beacon + + put_and_query_item_with_compound_beacon(encrypted_client, ddb_table_name, item) + + +def put_and_query_item_with_compound_beacon(ddb_client: EncryptedClient, ddb_table_name: str, item: Dict): + """ + Put and query an item using a compound beacon. + + :param ddb_client: The encrypted DynamoDB client + :param ddb_table_name: The name of the DynamoDB table + :param item: The item to put and query + """ + # Write the item to the table + put_response = ddb_client.put_item(TableName=ddb_table_name, Item=item) + # Validate object put successfully + assert put_response["ResponseMetadata"]["HTTPStatusCode"] == 200 + + # Query for the item we just put. + expression_attribute_names = {"#compound": "last4UnitCompound"} + + # This query expression takes a few factors into consideration: + # - The configured prefix for the last 4 digits of an inspector ID is "L-"; + # the prefix for the unit is "U-" + # - The configured split character, separating component parts, is "." + # - The default constructor adds encrypted parts in the order they are in the encrypted list, which + # configures `last4` to come before `unit`` + # NOTE: We did not need to create a compound beacon for this query. This query could have also been + # done by querying on the partition and sort key, as was done in the Basic example. + # This is intended to be a simple example to demonstrate how one might set up a compound beacon. + # For examples where compound beacons are required, see the Complex example. + # The most basic extension to this example that would require a compound beacon would add a third + # part to the compound beacon, then query against three parts. + expression_attribute_values = {":value": {"S": "L-5678.U-011899988199"}} + + # GSIs do not update instantly + # so if the results come back empty + # we retry after a short sleep + for _ in range(10): + query_response = ddb_client.query( + TableName=ddb_table_name, + IndexName=GSI_NAME, + KeyConditionExpression="#compound = :value", + ExpressionAttributeNames=expression_attribute_names, + ExpressionAttributeValues=expression_attribute_values, + ) + + # Validate query was returned successfully + assert query_response["ResponseMetadata"]["HTTPStatusCode"] == 200 + + items = query_response.get("Items", []) + # if no results, sleep and try again + if not items: + time.sleep(0.02) + continue + + # Validate only 1 item was returned: the item we just put + assert len(items) == 1 + returned_item = items[0] + # Validate the item has the expected attributes + assert returned_item["inspector_id_last4"]["S"] == "5678" + assert returned_item["unit"]["S"] == "011899988199" + break diff --git a/Examples/runtimes/python/DynamoDBEncryption/src/searchable_encryption/compound_beacon_searchable_encryption_example/with_encrypted_table.py b/Examples/runtimes/python/DynamoDBEncryption/src/searchable_encryption/compound_beacon_searchable_encryption_example/with_encrypted_table.py new file mode 100644 index 000000000..9e132dd20 --- /dev/null +++ b/Examples/runtimes/python/DynamoDBEncryption/src/searchable_encryption/compound_beacon_searchable_encryption_example/with_encrypted_table.py @@ -0,0 +1,156 @@ +# Copyright Amazon.com Inc. or its affiliates. All Rights Reserved. +# SPDX-License-Identifier: Apache-2.0 +""" +Example demonstrating DynamoDB encryption using compound beacons with EncryptedTable. + +This example demonstrates how to set up a compound beacon on encrypted attributes, + put an item with the beacon, and query against that beacon. +This example follows a use case of a database that stores unit inspection information. + This is an extension of the "BasicSearchableEncryptionExample" in this directory. + This example uses the same situation (storing unit inspection information) + and the same table schema. +However, this example uses a different Global Secondary Index (GSI) + that is based on a compound beacon configuration composed of + the `last4` and `unit` attributes. + +Running this example requires access to a DDB table with the +following key configuration: + - Partition key is named "work_id" with type (S) + - Sort key is named "inspection_time" with type (S) +This table must have a Global Secondary Index (GSI) configured named "last4UnitCompound-index": + - Partition key is named "aws_dbe_b_last4UnitCompound" with type (S) + +In this example for storing unit inspection information, this schema is utilized for the data: + - "work_id" stores a unique identifier for a unit inspection work order (v4 UUID) + - "inspection_date" stores an ISO 8601 date for the inspection (YYYY-MM-DD) + - "inspector_id_last4" stores the last 4 digits of the ID of the inspector performing the work + - "unit" stores a 12-digit serial number for the unit being inspected + +The example requires the following ordered input command line parameters: + 1. DDB table name for table to put/query data from + 2. Branch key ID for a branch key that was previously created in your key store. See the + CreateKeyStoreKeyExample. + 3. Branch key wrapping KMS key ARN for the KMS key used to create the branch key with ID + provided in arg 2 + 4. Branch key DDB table name for the DDB table representing the branch key store +""" +import time +from typing import Dict + +import boto3 +from aws_dbesdk_dynamodb.encrypted.table import EncryptedTable + +from DynamoDBEncryption.src.searchable_encryption.compound_beacon_searchable_encryption_example.beacon_config import ( + GSI_NAME, + setup_beacon_config, +) + + +def compound_beacon_table_example( + ddb_table_name: str, + branch_key_id: str, + branch_key_wrapping_kms_key_arn: str, + branch_key_ddb_table_name: str, +): + """ + Demonstrate using compound beacon searchable encryption with EncryptedTable. + + :param ddb_table_name: The name of the DynamoDB table + :param branch_key_id: Branch key ID for a branch key previously created in key store + :param branch_key_wrapping_kms_key_arn: ARN of KMS key used to create the branch key + :param branch_key_ddb_table_name: Name of DDB table representing the branch key store + """ + # 1. Create the DynamoDb Encryption configuration for the table we will be writing to. + # See beacon_config.py in this directory for detailed steps on the encryption configuration. + tables_config = setup_beacon_config( + ddb_table_name, branch_key_id, branch_key_wrapping_kms_key_arn, branch_key_ddb_table_name + ) + + # 2. Create the EncryptedTable + ddb_table = boto3.resource("dynamodb").Table(ddb_table_name) + encrypted_table = EncryptedTable(table=ddb_table, encryption_config=tables_config) + + # 3. Create an item with both attributes used in the compound beacon. + item = { + "work_id": "9ce39272-8068-4efd-a211-cd162ad65d4c", + "inspection_date": "2023-06-13", + "inspector_id_last4": "5678", + "unit": "011899988199", + } + + # # 4. If developing or debugging, verify config by checking compound beacon values directly + # trans = DynamoDbEncryptionTransforms(config=tables_config) + # + # resolve_input = ResolveAttributesInput(table_name=ddb_table_name, item=item, version=1) + # + # resolve_output = trans.resolve_attributes(input=resolve_input) + # + # # VirtualFields is empty because we have no Virtual Fields configured + # assert not resolve_output.virtual_fields + # + # # Verify that CompoundBeacons has the expected value + # cbs = {"last4UnitCompound": "L-5678.U-011899988199"} + # assert resolve_output.compound_beacons == cbs + # # Note : the compound beacon actually stored in the table is not "L-5678.U-011899988199" + # # but rather something like "L-abc.U-123", as both parts are EncryptedParts + # # and therefore the text is replaced by the associated beacon + + put_and_query_item_with_compound_beacon(encrypted_table, item) + + +def put_and_query_item_with_compound_beacon(ddb_table: EncryptedTable, item: Dict): + """ + Put and query an item using a compound beacon. + + :param ddb_table: The encrypted DynamoDB client + :param item: The item to put and query + """ + # Write the item to the table + put_response = ddb_table.put_item(Item=item) + # Validate object put successfully + assert put_response["ResponseMetadata"]["HTTPStatusCode"] == 200 + + # Query for the item we just put. + expression_attribute_names = {"#compound": "last4UnitCompound"} + + # This query expression takes a few factors into consideration: + # - The configured prefix for the last 4 digits of an inspector ID is "L-"; + # the prefix for the unit is "U-" + # - The configured split character, separating component parts, is "." + # - The default constructor adds encrypted parts in the order they are in the encrypted list, which + # configures `last4` to come before `unit`` + # NOTE: We did not need to create a compound beacon for this query. This query could have also been + # done by querying on the partition and sort key, as was done in the Basic example. + # This is intended to be a simple example to demonstrate how one might set up a compound beacon. + # For examples where compound beacons are required, see the Complex example. + # The most basic extension to this example that would require a compound beacon would add a third + # part to the compound beacon, then query against three parts. + expression_attribute_values = {":value": "L-5678.U-011899988199"} + + # GSIs do not update instantly + # so if the results come back empty + # we retry after a short sleep + for _ in range(10): + query_response = ddb_table.query( + IndexName=GSI_NAME, + KeyConditionExpression="#compound = :value", + ExpressionAttributeNames=expression_attribute_names, + ExpressionAttributeValues=expression_attribute_values, + ) + + # Validate query was returned successfully + assert query_response["ResponseMetadata"]["HTTPStatusCode"] == 200 + + items = query_response.get("Items", []) + # if no results, sleep and try again + if not items: + time.sleep(0.02) + continue + + # Validate only 1 item was returned: the item we just put + assert len(items) == 1 + returned_item = items[0] + # Validate the item has the expected attributes + assert returned_item["inspector_id_last4"] == "5678" + assert returned_item["unit"] == "011899988199" + break diff --git a/Examples/runtimes/python/DynamoDBEncryption/src/searchable_encryption/virtual_beacon_searchable_encryption_example/__init__.py b/Examples/runtimes/python/DynamoDBEncryption/src/searchable_encryption/virtual_beacon_searchable_encryption_example/__init__.py new file mode 100644 index 000000000..fa977e22f --- /dev/null +++ b/Examples/runtimes/python/DynamoDBEncryption/src/searchable_encryption/virtual_beacon_searchable_encryption_example/__init__.py @@ -0,0 +1,3 @@ +# Copyright Amazon.com Inc. or its affiliates. All Rights Reserved. +# SPDX-License-Identifier: Apache-2.0 +"""Stub to allow relative imports of examples from tests.""" diff --git a/Examples/runtimes/python/DynamoDBEncryption/src/searchable_encryption/virtual_beacon_searchable_encryption_example/beacon_config.py b/Examples/runtimes/python/DynamoDBEncryption/src/searchable_encryption/virtual_beacon_searchable_encryption_example/beacon_config.py new file mode 100644 index 000000000..3713992e0 --- /dev/null +++ b/Examples/runtimes/python/DynamoDBEncryption/src/searchable_encryption/virtual_beacon_searchable_encryption_example/beacon_config.py @@ -0,0 +1,200 @@ +# Copyright Amazon.com Inc. or its affiliates. All Rights Reserved. +# SPDX-License-Identifier: Apache-2.0 +"""Sets up the beacon config for virtual beacon searchable encryption.""" +import boto3 +from aws_cryptographic_material_providers.keystore.client import KeyStore +from aws_cryptographic_material_providers.keystore.config import KeyStoreConfig +from aws_cryptographic_material_providers.keystore.models import KMSConfigurationKmsKeyArn +from aws_cryptographic_material_providers.mpl import AwsCryptographicMaterialProviders +from aws_cryptographic_material_providers.mpl.config import MaterialProvidersConfig +from aws_cryptographic_material_providers.mpl.models import CreateAwsKmsHierarchicalKeyringInput +from aws_dbesdk_dynamodb.structures.dynamodb import ( + BeaconKeySourceSingle, + BeaconVersion, + DynamoDbTableEncryptionConfig, + DynamoDbTablesEncryptionConfig, + GetPrefix, + SearchConfig, + SingleKeyStore, + StandardBeacon, + VirtualField, + VirtualPart, + VirtualTransformPrefix, +) +from aws_dbesdk_dynamodb.structures.structured_encryption import CryptoAction + + +def setup_beacon_config( + ddb_table_name: str, + branch_key_id: str, + branch_key_wrapping_kms_key_arn: str, + branch_key_ddb_table_name: str, +): + """Set up the beacon config for virtual beacon searchable encryption.""" + # 1. Construct a length-1 prefix virtual transform. + # `hasTestResult` is a binary attribute, containing either `true` or `false`. + # As an example to demonstrate virtual transforms, we will truncate the value + # of `hasTestResult` in the virtual field to the length-1 prefix of the binary value, i.e.: + # - "true" -> "t" + # - "false -> "f" + # This is not necessary. This is done as a demonstration of virtual transforms. + # Virtual transform operations treat all attributes as strings + # (i.e. the boolean value `true` is interpreted as a string "true"), + # so its length-1 prefix is just "t". + length1_prefix_virtual_transform_list = [VirtualTransformPrefix(GetPrefix(length=1))] + + # 2. Construct the VirtualParts required for the VirtualField + has_test_result_part = VirtualPart( + loc="hasTestResult", + # Here, we apply the length-1 prefix virtual transform + trans=length1_prefix_virtual_transform_list, + ) + + state_part = VirtualPart( + loc="state", + # Note that we do not apply any transform to the `state` attribute, + # and the virtual field will read in the attribute as-is. + ) + + # 3. Construct the VirtualField from the VirtualParts + # Note that the order that virtual parts are added to the virtualPartList + # dictates the order in which they are concatenated to build the virtual field. + # You must add virtual parts in the same order on write as you do on read. + virtual_part_list = [state_part, has_test_result_part] + + state_and_has_test_result_field = VirtualField(name="stateAndHasTestResult", parts=virtual_part_list) + + virtual_field_list = [state_and_has_test_result_field] + + # 4. Configure our beacon. + # The virtual field is assumed to hold a US 2-letter state abbreviation + # (56 possible values = 50 states + 6 territories) concatenated with a binary attribute + # (2 possible values: true/false hasTestResult field), we expect a population size of + # 56 * 2 = 112 possible values. + # We will also assume that these values are reasonably well-distributed across + # customer IDs. In practice, this will not be true. We would expect + # more populous states to appear more frequently in the database. + # A more complex analysis would show that a stricter upper bound + # is necessary to account for this by hiding information from the + # underlying distribution. + # + # This link provides guidance for choosing a beacon length: + # https://docs.aws.amazon.com/database-encryption-sdk/latest/devguide/choosing-beacon-length.html + # We follow the guidance in the link above to determine reasonable bounds for beacon length: + # - min: log(sqrt(112))/log(2) ~= 3.4, round down to 3 + # - max: log((112/2))/log(2) ~= 5.8, round up to 6 + # You will somehow need to round results to a nearby integer. + # We choose to round to the nearest integer; you might consider a different rounding approach. + # Rounding up will return fewer expected "false positives" in queries, + # leading to fewer decrypt calls and better performance, + # but it is easier to identify which beacon values encode distinct plaintexts. + # Rounding down will return more expected "false positives" in queries, + # leading to more decrypt calls and worse performance, + # but it is harder to identify which beacon values encode distinct plaintexts. + # We can choose a beacon length between 3 and 6: + # - Closer to 3, we expect more "false positives" to be returned, + # making it harder to identify which beacon values encode distinct plaintexts, + # but leading to more decrypt calls and worse performance + # - Closer to 6, we expect fewer "false positives" returned in queries, + # leading to fewer decrypt calls and better performance, + # but it is easier to identify which beacon values encode distinct plaintexts. + # As an example, we will choose 5. + # Values stored in aws_dbe_b_stateAndHasTestResult will be 5 bits long (0x00 - 0x1f) + # There will be 2^5 = 32 possible HMAC values. + # With a well-distributed dataset (112 values), for a particular beacon we expect + # (112/32) = 3.5 combinations of abbreviation + true/false attribute + # sharing that beacon value. + standard_beacon_list = [ + StandardBeacon( + # This name is the same as our virtual field's name above + name="stateAndHasTestResult", + length=5, + ) + ] + + # 5. Configure Keystore. + # This example expects that you have already set up a KeyStore with a single branch key. + # See the "CreateKeyStoreTableExample" and "CreateKeyStoreKeyExample" files for how to do this. + # After you create a branch key, you should persist its ID for use in this example. + keystore = KeyStore( + config=KeyStoreConfig( + ddb_client=boto3.client("dynamodb"), + ddb_table_name=branch_key_ddb_table_name, + logical_key_store_name=branch_key_ddb_table_name, + kms_client=boto3.client("kms"), + kms_configuration=KMSConfigurationKmsKeyArn(value=branch_key_wrapping_kms_key_arn), + ) + ) + + # 6. Create BeaconVersion. + # The BeaconVersion inside the list holds the list of beacons on the table. + # The BeaconVersion also stores information about the keystore. + # BeaconVersion must be provided: + # - keyStore: The keystore configured in the previous step. + # - keySource: A configuration for the key source. + # For simple use cases, we can configure a 'singleKeySource' which + # statically configures a single beaconKey. That is the approach this example takes. + # For use cases where you want to use different beacon keys depending on the data + # (for example if your table holds data for multiple tenants, and you want to use + # a different beacon key per tenant), look into configuring a MultiKeyStore: + # https://docs.aws.amazon.com/database-encryption-sdk/latest/devguide/searchable-encryption-multitenant.html + # We also provide our standard beacon list and virtual fields here. + beacon_versions = [ + BeaconVersion( + virtual_fields=virtual_field_list, + standard_beacons=standard_beacon_list, + version=1, # MUST be 1 + key_store=keystore, + key_source=BeaconKeySourceSingle( + SingleKeyStore( + # `key_id` references a beacon key. + # For every branch key we create in the keystore, + # we also create a beacon key. + # This beacon key is not the same as the branch key, + # but is created with the same ID as the branch key. + key_id=branch_key_id, + cache_ttl=6000, + ) + ), + ) + ] + + # 7. Create a Hierarchical Keyring + # This is a KMS keyring that utilizes the keystore table. + # This config defines how items are encrypted and decrypted. + # NOTE: You should configure this to use the same keystore as your search config. + mat_prov = AwsCryptographicMaterialProviders(config=MaterialProvidersConfig()) + + keyring_input = CreateAwsKmsHierarchicalKeyringInput( + branch_key_id=branch_key_id, key_store=keystore, ttl_seconds=6000 + ) + + kms_keyring = mat_prov.create_aws_kms_hierarchical_keyring(input=keyring_input) + + # 8. Configure which attributes are encrypted and/or signed when writing new items. + # For each attribute that may exist on the items we plan to write to our DynamoDbTable, + # we must explicitly configure how they should be treated during item encryption: + # - ENCRYPT_AND_SIGN: The attribute is encrypted and included in the signature + # - SIGN_ONLY: The attribute not encrypted, but is still included in the signature + # - DO_NOTHING: The attribute is not encrypted and not included in the signature + # Any attributes that will be used in beacons must be configured as ENCRYPT_AND_SIGN. + attribute_actions = { + "customer_id": CryptoAction.SIGN_ONLY, # Our partition attribute must be SIGN_ONLY + "create_time": CryptoAction.SIGN_ONLY, # Our sort attribute must be SIGN_ONLY + "state": CryptoAction.ENCRYPT_AND_SIGN, # Beaconized attributes must be encrypted + "hasTestResult": CryptoAction.ENCRYPT_AND_SIGN, # Beaconized attributes must be encrypted + } + + # 9. Create the DynamoDb Encryption configuration for the table we will be writing to. + # The beaconVersions are added to the search configuration. + table_config = DynamoDbTableEncryptionConfig( + logical_table_name=ddb_table_name, + partition_key_name="customer_id", + sort_key_name="create_time", + attribute_actions_on_encrypt=attribute_actions, + keyring=kms_keyring, + search=SearchConfig(write_version=1, versions=beacon_versions), # MUST be 1 + ) + + table_configs = {ddb_table_name: table_config} + return DynamoDbTablesEncryptionConfig(table_encryption_configs=table_configs) diff --git a/Examples/runtimes/python/DynamoDBEncryption/src/searchable_encryption/virtual_beacon_searchable_encryption_example/with_encrypted_client.py b/Examples/runtimes/python/DynamoDBEncryption/src/searchable_encryption/virtual_beacon_searchable_encryption_example/with_encrypted_client.py new file mode 100644 index 000000000..43ecd59ce --- /dev/null +++ b/Examples/runtimes/python/DynamoDBEncryption/src/searchable_encryption/virtual_beacon_searchable_encryption_example/with_encrypted_client.py @@ -0,0 +1,245 @@ +# Copyright Amazon.com Inc. or its affiliates. All Rights Reserved. +# SPDX-License-Identifier: Apache-2.0 +""" +Example demonstrating DynamoDB encryption using virtual beacons with EncryptedClient. + +This example demonstrates how to set up a virtual field from two DDB +attributes, create a standard beacon with that field, put an item with +that beacon, and query against that beacon. + +A virtual field is a field consisting of a transformation of one or more attributes in a DDB item. +Virtual fields are useful in querying against encrypted fields that only have a handful of +possible values. They allow you to take fields with few possible values, concatenate +them to other fields, then query against the combined field. This enables using these types of +fields in queries while making it infeasible to identify which beacon values encode +the few possible distinct plaintexts. This is explained in more detail below. +Virtual fields are not stored in the DDB table. However, they are used to construct +a beacon, the value of which is stored. + +For more information on virtual fields, see + https://docs.aws.amazon.com/database-encryption-sdk/latest/devguide/beacons.html#virtual-field + +For our example, we will construct a virtual field +from two DDB attributes `state` and `hasTestResult` as `state`+prefix(`hasTestResult`, 1). +We will then create a beacon out of this virtual field and use it to search. + +This example follows a use case of a database that stores customer test result metadata. +Records are indexed by `customer_id` and store a `state` attribute, representing the +US state or territory where the customer lives, and a `hasTestResult` boolean attribute, +representing whether the customer has a "test result" available. (Maybe this represents +some medical test result, and this table stores "result available" metadata.) We assume +that values in these fields are uniformly distributed across all possible values for +these fields (56 for `state`, 2 for `hasTestResult`), and are uniformly distributed across +customer IDs. + +The motivation behind this example is to demonstrate how and why one would use a virtual beacon. +In this example, our table stores records with an encrypted boolean `hasTestResult` attribute. +We would like to be able to query for customers in a given state with a `true` hasTestResult +attribute. + +To be able to execute this query securely and efficiently, we want the following +properties on our table: + 1. Hide the distribution of `hasTestResult` attribute values (i.e. it should be infeasible + to determine the percentage of `true`s to `false`s across the dataset from beaconized + values) + 2. Query against a combination of whether `hasTestResult` is true/false and the `state` field +We cannot achieve these properties with a standard beacon on a true/false attribute. Following +the guidance to choose a beacon length: + https://docs.aws.amazon.com/database-encryption-sdk/latest/devguide/choosing-beacon-length.html +For a boolean value (in our case, whether `hasTestResult` is true or false), the acceptable +bounds for beacon length are either 0 or 1. This corresponds to either not storing a beacon +(length 0), or effectively storing another boolean attribute (length 1). With +length 0, this beacon is useless for searching (violating property 2); with length 1, this +beacon may not hide the attribute (violating property 1). +In addition, choosing a longer beacon length does not help us. +Each attribute value is mapped to a distinct beacon. +Since booleans only have 2 possible attribute values, we will still only have 2 possible +beacon values, though those values may be longer. A longer beacon provides no advantages over +beacon of length 1 in this situation. + +A compound beacon also does not help. +To (over)simplify, a compound beacon is a concatenation of standard beacons, +i.e. beacon(`state`)+beacon(`hasTestResult`). +The `hasTestResult` beacon is still visible, so we would still have the problems above. + +To achieve these properties, we instead construct a virtual field and use that in our beacon, +i.e. beacon(`state`+`hasTestResult`). Assuming these fields are well-distributed across +customer IDs and possible values, this gives us both desired properties; we can query against +both attributes while hiding information from the underlying data. This is demonstrated in more +detail below. + +Running this example requires access to a DDB table with the +following primary key configuration: + - Partition key is named "customer_id" with type (S) + - Sort key is named "create_time" with type (S) +This table must have a Global Secondary Index (GSI) configured named "stateAndHasTestResult-index": + - Partition key is named "aws_dbe_b_stateAndHasTestResult" with type (S) + +In this example for storing customer location data, this schema is utilized for the data: + - "customer_id" stores a unique customer identifier + - "create_time" stores a Unix timestamp + - "state" stores an encrypted 2-letter US state or territory abbreviation + (https://www.faa.gov/air_traffic/publications/atpubs/cnt_html/appendix_a.html) + - "hasTestResult" is not part of the schema, but is an attribute utilized in this example. + It stores a boolean attribute (false/true) indicating whether this customer has a test result + available. + +The example requires the following ordered input command line parameters: + 1. DDB table name for table to put/query data from + 2. Branch key ID for a branch key that was previously created in your key store. See the + CreateKeyStoreKeyExample. + 3. Branch key wrapping KMS key ARN for the KMS key used to create the branch key with ID + provided in arg 2 + 4. Branch key DDB table name for the DDB table representing the branch key store +""" +import time + +import boto3 +from aws_dbesdk_dynamodb.encrypted.client import EncryptedClient +from aws_dbesdk_dynamodb.smithygenerated.aws_cryptography_dbencryptionsdk_dynamodb_transforms.client import ( + DynamoDbEncryptionTransforms, +) +from aws_dbesdk_dynamodb.smithygenerated.aws_cryptography_dbencryptionsdk_dynamodb_transforms.models import ( + ResolveAttributesInput, +) + +from DynamoDBEncryption.src.searchable_encryption.virtual_beacon_searchable_encryption_example.beacon_config import ( + setup_beacon_config, +) + +GSI_NAME = "stateAndHasTestResult-index" + + +def virtual_beacon_client_example( + ddb_table_name: str, + branch_key_id: str, + branch_key_wrapping_kms_key_arn: str, + branch_key_ddb_table_name: str, +): + """ + Demonstrate using virtual beacon searchable encryption with EncryptedClient. + + :param ddb_table_name: The name of the DynamoDB table + :param branch_key_id: Branch key ID for a branch key previously created in key store + :param branch_key_wrapping_kms_key_arn: ARN of KMS key used to create the branch key + :param branch_key_ddb_table_name: Name of DDB table representing the branch key store + """ + # 1. Create the DynamoDb Encryption configuration for the table we will be writing to. + # See beacon_config.py in this directory for detailed steps on the encryption configuration. + tables_config = setup_beacon_config( + ddb_table_name, branch_key_id, branch_key_wrapping_kms_key_arn, branch_key_ddb_table_name + ) + + # 2. Create the EncryptedClient + ddb_client = boto3.client("dynamodb") + encrypted_ddb_client = EncryptedClient(client=ddb_client, encryption_config=tables_config) + + # 3. Create test items - one with hasTestResult=true and one with hasTestResult=false + # Create item with hasTestResult=true + item_with_has_test_result = { + "customer_id": {"S": "ABC-123"}, + "create_time": {"N": "1681495205"}, + "state": {"S": "CA"}, + "hasTestResult": {"BOOL": True}, + } + + # Create item with hasTestResult=false + item_with_no_has_test_result = { + "customer_id": {"S": "DEF-456"}, + "create_time": {"N": "1681495205"}, + "state": {"S": "CA"}, + "hasTestResult": {"BOOL": False}, + } + + # 4. If developing or debugging, verify config by checking virtual field values directly + trans = DynamoDbEncryptionTransforms(config=tables_config) + + resolve_input = ResolveAttributesInput(table_name=ddb_table_name, item=item_with_has_test_result, version=1) + + resolve_output = trans.resolve_attributes(input=resolve_input) + + # CompoundBeacons is empty because we have no Compound Beacons configured + assert not resolve_output.compound_beacons + + # Verify that VirtualFields has the expected value + vf = {"stateAndHasTestResult": "CAt"} + assert resolve_output.virtual_fields == vf + + # 5. Put two items into our table using the above client. + # The two items will differ only in their `customer_id` attribute (primary key) + # and their `hasTestResult` attribute. + # We will query against these items to demonstrate how to use our setup above + # to query against our `stateAndHasTestResult` beacon. + # Before the item gets sent to DynamoDb, it will be encrypted + # client-side, according to our configuration. + # Since our configuration includes a beacon on a virtual field named + # `stateAndHasTestResult`, the client will add an attribute + # to the item with name `aws_dbe_b_stateAndHasTestResult`. + # Its value will be an HMAC truncated to as many bits as the + # beacon's `length` parameter; i.e. 5. + put_response = encrypted_ddb_client.put_item(TableName=ddb_table_name, Item=item_with_has_test_result) + # Assert PutItem was successful + assert put_response["ResponseMetadata"]["HTTPStatusCode"] == 200 + + put_response = encrypted_ddb_client.put_item(TableName=ddb_table_name, Item=item_with_no_has_test_result) + # Assert PutItem was successful + assert put_response["ResponseMetadata"]["HTTPStatusCode"] == 200 + + # 6. Query by stateAndHasTestResult attribute. + # Note that we are constructing the query as if we were querying on plaintext values. + # However, the DDB encryption client will detect that this attribute name has a beacon configured. + # The client will add the beaconized attribute name and attribute value to the query, + # and transform the query to use the beaconized name and value. + # Internally, the client will query for and receive all items with a matching HMAC value in the beacon field. + # This may include a number of "false positives" with different ciphertext, but the same truncated HMAC. + # e.g. if truncate(HMAC("CAt"), 5) == truncate(HMAC("DCf"), 5), the query will return both items. + # The client will decrypt all returned items to determine which ones have the expected attribute values, + # and only surface items with the correct plaintext to the user. + # This procedure is internal to the client and is abstracted away from the user; + # e.g. the user will only see "CAt" and never "DCf", though the actual query returned both. + expression_attribute_names = {"#stateAndHasTestResult": "stateAndHasTestResult"} + + # We are querying for the item with `state`="CA" and `hasTestResult`=`true`. + # Since we added virtual parts as `state` then `hasTestResult`, + # we must write our query expression in the same order. + # We constructed our virtual field as `state`+`hasTestResult`, + # so we add the two parts in that order. + # Since we also created a virtual transform that truncated `hasTestResult` + # to its length-1 prefix, i.e. "true" -> "t", + # we write that field as its length-1 prefix in the query. + expression_attribute_values = {":stateAndHasTestResult": {"S": "CAt"}} + + # We are querying for the item with state="CA" and hasTestResult=true. + # We constructed our virtual field as state+hasTestResult, + # with hasTestResult truncated to its first character. + # For "true", this is "t", so we search for "CAt" + expression_attribute_values = {":stateAndHasTestResult": {"S": "CAt"}} + + # GSIs do not update instantly + # so if the results come back empty + # we retry after a short sleep + for _ in range(10): + query_response = encrypted_ddb_client.query( + TableName=ddb_table_name, + IndexName=GSI_NAME, + KeyConditionExpression="#stateAndHasTestResult = :stateAndHasTestResult", + ExpressionAttributeNames=expression_attribute_names, + ExpressionAttributeValues=expression_attribute_values, + ) + + # Validate query was returned successfully + assert query_response["ResponseMetadata"]["HTTPStatusCode"] == 200 + + items = query_response.get("Items", []) + # if no results, sleep and try again + if not items: + time.sleep(0.02) + continue + + # Validate only 1 item was returned: the item with the expected attributes + assert len(items) == 1 + returned_item = items[0] + # Validate the item has the expected attributes + assert returned_item["state"]["S"] == "CA" + assert returned_item["hasTestResult"]["BOOL"] is True + break diff --git a/Examples/runtimes/python/DynamoDBEncryption/src/searchable_encryption/virtual_beacon_searchable_encryption_example/with_encrypted_table.py b/Examples/runtimes/python/DynamoDBEncryption/src/searchable_encryption/virtual_beacon_searchable_encryption_example/with_encrypted_table.py new file mode 100644 index 000000000..68cee7d96 --- /dev/null +++ b/Examples/runtimes/python/DynamoDBEncryption/src/searchable_encryption/virtual_beacon_searchable_encryption_example/with_encrypted_table.py @@ -0,0 +1,232 @@ +# Copyright Amazon.com Inc. or its affiliates. All Rights Reserved. +# SPDX-License-Identifier: Apache-2.0 +""" +Example demonstrating DynamoDB encryption using virtual beacons with EncryptedTable. + +This example demonstrates how to set up a virtual field from two DDB +attributes, create a standard beacon with that field, put an item with +that beacon, and query against that beacon. + +A virtual field is a field consisting of a transformation of one or more attributes in a DDB item. +Virtual fields are useful in querying against encrypted fields that only have a handful of +possible values. They allow you to take fields with few possible values, concatenate +them to other fields, then query against the combined field. This enables using these types of +fields in queries while making it infeasible to identify which beacon values encode +the few possible distinct plaintexts. This is explained in more detail below. +Virtual fields are not stored in the DDB table. However, they are used to construct +a beacon, the value of which is stored. + +For more information on virtual fields, see + https://docs.aws.amazon.com/database-encryption-sdk/latest/devguide/beacons.html#virtual-field + +For our example, we will construct a virtual field +from two DDB attributes `state` and `hasTestResult` as `state`+prefix(`hasTestResult`, 1). +We will then create a beacon out of this virtual field and use it to search. + +This example follows a use case of a database that stores customer test result metadata. +Records are indexed by `customer_id` and store a `state` attribute, representing the +US state or territory where the customer lives, and a `hasTestResult` boolean attribute, +representing whether the customer has a "test result" available. (Maybe this represents +some medical test result, and this table stores "result available" metadata.) We assume +that values in these fields are uniformly distributed across all possible values for +these fields (56 for `state`, 2 for `hasTestResult`), and are uniformly distributed across +customer IDs. + +The motivation behind this example is to demonstrate how and why one would use a virtual beacon. +In this example, our table stores records with an encrypted boolean `hasTestResult` attribute. +We would like to be able to query for customers in a given state with a `true` hasTestResult +attribute. + +To be able to execute this query securely and efficiently, we want the following +properties on our table: + 1. Hide the distribution of `hasTestResult` attribute values (i.e. it should be infeasible + to determine the percentage of `true`s to `false`s across the dataset from beaconized + values) + 2. Query against a combination of whether `hasTestResult` is true/false and the `state` field +We cannot achieve these properties with a standard beacon on a true/false attribute. Following +the guidance to choose a beacon length: + https://docs.aws.amazon.com/database-encryption-sdk/latest/devguide/choosing-beacon-length.html +For a boolean value (in our case, whether `hasTestResult` is true or false), the acceptable +bounds for beacon length are either 0 or 1. This corresponds to either not storing a beacon +(length 0), or effectively storing another boolean attribute (length 1). With +length 0, this beacon is useless for searching (violating property 2); with length 1, this +beacon may not hide the attribute (violating property 1). +In addition, choosing a longer beacon length does not help us. +Each attribute value is mapped to a distinct beacon. +Since booleans only have 2 possible attribute values, we will still only have 2 possible +beacon values, though those values may be longer. A longer beacon provides no advantages over +beacon of length 1 in this situation. + +A compound beacon also does not help. +To (over)simplify, a compound beacon is a concatenation of standard beacons, +i.e. beacon(`state`)+beacon(`hasTestResult`). +The `hasTestResult` beacon is still visible, so we would still have the problems above. + +To achieve these properties, we instead construct a virtual field and use that in our beacon, +i.e. beacon(`state`+`hasTestResult`). Assuming these fields are well-distributed across +customer IDs and possible values, this gives us both desired properties; we can query against +both attributes while hiding information from the underlying data. This is demonstrated in more +detail below. + +Running this example requires access to a DDB table with the +following primary key configuration: + - Partition key is named "customer_id" with type (S) + - Sort key is named "create_time" with type (S) +This table must have a Global Secondary Index (GSI) configured named "stateAndHasTestResult-index": + - Partition key is named "aws_dbe_b_stateAndHasTestResult" with type (S) + +In this example for storing customer location data, this schema is utilized for the data: + - "customer_id" stores a unique customer identifier + - "create_time" stores a Unix timestamp + - "state" stores an encrypted 2-letter US state or territory abbreviation + (https://www.faa.gov/air_traffic/publications/atpubs/cnt_html/appendix_a.html) + - "hasTestResult" is not part of the schema, but is an attribute utilized in this example. + It stores a boolean attribute (false/true) indicating whether this customer has a test result + available. + +The example requires the following ordered input command line parameters: + 1. DDB table name for table to put/query data from + 2. Branch key ID for a branch key that was previously created in your key store. See the + CreateKeyStoreKeyExample. + 3. Branch key wrapping KMS key ARN for the KMS key used to create the branch key with ID + provided in arg 2 + 4. Branch key DDB table name for the DDB table representing the branch key store +""" +import time + +import boto3 +from aws_dbesdk_dynamodb.encrypted.table import EncryptedTable + +from DynamoDBEncryption.src.searchable_encryption.virtual_beacon_searchable_encryption_example.beacon_config import ( + setup_beacon_config, +) + +GSI_NAME = "stateAndHasTestResult-index" + + +def virtual_beacon_table_example( + ddb_table_name: str, + branch_key_id: str, + branch_key_wrapping_kms_key_arn: str, + branch_key_ddb_table_name: str, +): + """ + Demonstrate using virtual beacon searchable encryption with EncryptedTable. + + :param ddb_table_name: The name of the DynamoDB table + :param branch_key_id: Branch key ID for a branch key previously created in key store + :param branch_key_wrapping_kms_key_arn: ARN of KMS key used to create the branch key + :param branch_key_ddb_table_name: Name of DDB table representing the branch key store + """ + # 1. Create the DynamoDb Encryption configuration using the setup_beacon_config function + # See beacon_config.py in this directory for detailed steps on the encryption configuration. + tables_config = setup_beacon_config( + ddb_table_name, branch_key_id, branch_key_wrapping_kms_key_arn, branch_key_ddb_table_name + ) + + # 2. Create the EncryptedTable + ddb_table = boto3.resource("dynamodb").Table(ddb_table_name) + encrypted_table = EncryptedTable(table=ddb_table, encryption_config=tables_config) + + # 3. Create test items - one with hasTestResult=true and one with hasTestResult=false + # Create item with hasTestResult=true + item_with_has_test_result = { + "customer_id": "ABC-123", + "create_time": 1681495205, + "state": "CA", + "hasTestResult": True, + } + + # Create item with hasTestResult=false + item_with_no_has_test_result = { + "customer_id": "DEF-456", + "create_time": 1681495205, + "state": "CA", + "hasTestResult": False, + } + + # # 4. If developing or debugging, verify config by checking virtual field values directly + # trans = DynamoDbEncryptionTransforms(config=tables_config) + # + # resolve_input = ResolveAttributesInput(table_name=ddb_table_name, item=item_with_has_test_result, version=1) + # + # resolve_output = trans.resolve_attributes(input=resolve_input) + # + # # CompoundBeacons is empty because we have no Compound Beacons configured + # assert not resolve_output.compound_beacons + # + # # Verify that VirtualFields has the expected value + # vf = {"stateAndHasTestResult": "CAt"} + # assert resolve_output.virtual_fields == vf + + # 5. Put two items into our table using the above encrypted table. + # The two items will differ only in their `customer_id` attribute (primary key) + # and their `hasTestResult` attribute. + # We will query against these items to demonstrate how to use our setup above + # to query against our `stateAndHasTestResult` beacon. + # Before the item gets sent to DynamoDb, it will be encrypted + # client-side, according to our configuration. + # Since our configuration includes a beacon on a virtual field named + # `stateAndHasTestResult`, the client will add an attribute + # to the item with name `aws_dbe_b_stateAndHasTestResult`. + # Its value will be an HMAC truncated to as many bits as the + # beacon's `length` parameter; i.e. 5. + put_response = encrypted_table.put_item(Item=item_with_has_test_result) + # Assert PutItem was successful + assert put_response["ResponseMetadata"]["HTTPStatusCode"] == 200 + + put_response = encrypted_table.put_item(Item=item_with_no_has_test_result) + # Assert PutItem was successful + assert put_response["ResponseMetadata"]["HTTPStatusCode"] == 200 + + # 6. Query by stateAndHasTestResult attribute. + # Note that we are constructing the query as if we were querying on plaintext values. + # However, the DDB encryption client will detect that this attribute name has a beacon configured. + # The client will add the beaconized attribute name and attribute value to the query, + # and transform the query to use the beaconized name and value. + # Internally, the client will query for and receive all items with a matching HMAC value in the beacon field. + # This may include a number of "false positives" with different ciphertext, but the same truncated HMAC. + # e.g. if truncate(HMAC("CAt"), 5) == truncate(HMAC("DCf"), 5), the query will return both items. + # The client will decrypt all returned items to determine which ones have the expected attribute values, + # and only surface items with the correct plaintext to the user. + # This procedure is internal to the client and is abstracted away from the user; + # e.g. the user will only see "CAt" and never "DCf", though the actual query returned both. + expression_attribute_names = {"#stateAndHasTestResult": "stateAndHasTestResult"} + + # We are querying for the item with `state`="CA" and `hasTestResult`=`true`. + # Since we added virtual parts as `state` then `hasTestResult`, + # we must write our query expression in the same order. + # We constructed our virtual field as `state`+`hasTestResult`, + # so we add the two parts in that order. + # Since we also created a virtual transform that truncated `hasTestResult` + # to its length-1 prefix, i.e. "true" -> "t", + # we write that field as its length-1 prefix in the query. + expression_attribute_values = {":stateAndHasTestResult": "CAt"} + + # GSIs do not update instantly + # so if the results come back empty + # we retry after a short sleep + for _ in range(10): + query_response = encrypted_table.query( + IndexName=GSI_NAME, + KeyConditionExpression="#stateAndHasTestResult = :stateAndHasTestResult", + ExpressionAttributeNames=expression_attribute_names, + ExpressionAttributeValues=expression_attribute_values, + ) + + # Validate query was returned successfully + assert query_response["ResponseMetadata"]["HTTPStatusCode"] == 200 + + items = query_response.get("Items", []) + # if no results, sleep and try again + if not items: + time.sleep(0.02) + continue + + # Validate only 1 item was returned: the item with the expected attributes + assert len(items) == 1 + returned_item = items[0] + # Validate the item has the expected attributes + assert returned_item["state"] == "CA" + assert returned_item["hasTestResult"] is True + break diff --git a/Examples/runtimes/python/DynamoDBEncryption/test/__init__.py b/Examples/runtimes/python/DynamoDBEncryption/test/__init__.py new file mode 100644 index 000000000..fa977e22f --- /dev/null +++ b/Examples/runtimes/python/DynamoDBEncryption/test/__init__.py @@ -0,0 +1,3 @@ +# Copyright Amazon.com Inc. or its affiliates. All Rights Reserved. +# SPDX-License-Identifier: Apache-2.0 +"""Stub to allow relative imports of examples from tests.""" diff --git a/Examples/runtimes/python/DynamoDBEncryption/test/basic_put_get_example/__init__.py b/Examples/runtimes/python/DynamoDBEncryption/test/basic_put_get_example/__init__.py new file mode 100644 index 000000000..fa977e22f --- /dev/null +++ b/Examples/runtimes/python/DynamoDBEncryption/test/basic_put_get_example/__init__.py @@ -0,0 +1,3 @@ +# Copyright Amazon.com Inc. or its affiliates. All Rights Reserved. +# SPDX-License-Identifier: Apache-2.0 +"""Stub to allow relative imports of examples from tests.""" diff --git a/Examples/runtimes/python/DynamoDBEncryption/test/basic_put_get_example/test_with_encrypted_client.py b/Examples/runtimes/python/DynamoDBEncryption/test/basic_put_get_example/test_with_encrypted_client.py new file mode 100644 index 000000000..d18ffe823 --- /dev/null +++ b/Examples/runtimes/python/DynamoDBEncryption/test/basic_put_get_example/test_with_encrypted_client.py @@ -0,0 +1,15 @@ +# Copyright Amazon.com Inc. or its affiliates. All Rights Reserved. +# SPDX-License-Identifier: Apache-2.0 +"""Test suite for the EncryptedClient example.""" +import pytest + +from ...src.basic_put_get_example.with_encrypted_client import encrypted_client_put_get_example + +pytestmark = [pytest.mark.examples] + + +def test_encrypted_client_put_get_example(): + """Test function for encrypt and decrypt using the EncryptedClient example.""" + test_kms_key_id = "arn:aws:kms:us-west-2:658956600833:key/b3537ef1-d8dc-4780-9f5a-55776cbb2f7f" + test_dynamodb_table_name = "DynamoDbEncryptionInterceptorTestTable" + encrypted_client_put_get_example(test_kms_key_id, test_dynamodb_table_name) diff --git a/Examples/runtimes/python/DynamoDBEncryption/test/basic_put_get_example/test_with_encrypted_table.py b/Examples/runtimes/python/DynamoDBEncryption/test/basic_put_get_example/test_with_encrypted_table.py new file mode 100644 index 000000000..5b30d7f45 --- /dev/null +++ b/Examples/runtimes/python/DynamoDBEncryption/test/basic_put_get_example/test_with_encrypted_table.py @@ -0,0 +1,15 @@ +# Copyright Amazon.com Inc. or its affiliates. All Rights Reserved. +# SPDX-License-Identifier: Apache-2.0 +"""Test suite for the EncryptedTable example.""" +import pytest + +from ...src.basic_put_get_example.with_encrypted_table import encrypted_table_put_get_example + +pytestmark = [pytest.mark.examples] + + +def test_encrypted_table_put_get_example(): + """Test function for encrypt and decrypt using the EncryptedTable example.""" + test_kms_key_id = "arn:aws:kms:us-west-2:658956600833:key/b3537ef1-d8dc-4780-9f5a-55776cbb2f7f" + test_dynamodb_table_name = "DynamoDbEncryptionInterceptorTestTable" + encrypted_table_put_get_example(test_kms_key_id, test_dynamodb_table_name) diff --git a/Examples/runtimes/python/DynamoDBEncryption/test/cleanup.py b/Examples/runtimes/python/DynamoDBEncryption/test/cleanup.py new file mode 100644 index 000000000..4ae8a3014 --- /dev/null +++ b/Examples/runtimes/python/DynamoDBEncryption/test/cleanup.py @@ -0,0 +1,82 @@ +# Copyright Amazon.com Inc. or its affiliates. All Rights Reserved. +# SPDX-License-Identifier: Apache-2.0 +""" +Test cleanup utilities for DynamoDB Encryption SDK. + +This module provides utilities for cleaning up resources after running tests. + +WARNING: Please be careful. This is only a test utility and should NOT be used in production code. +It is specifically designed for cleaning up test resources after test execution. +- Running this code on production resources or any data you want to keep could result + in cryptographic shredding (permanent loss of access to encrypted data). +- Only use this on test resources that you are willing to permanently delete. +- Never run this against any production DynamoDB tables. Ensure you have backups + of any important data before running cleanup operations. +""" +import boto3 + +BRANCH_KEY_IDENTIFIER_FIELD = "branch-key-id" +TYPE_FIELD = "type" + + +def delete_branch_key( + identifier: str, + table_name: str, + ddb_client: boto3.client, +) -> bool: + """ + Delete all branch key items with the given identifier. + + Args: + identifier: Branch key identifier to delete + table_name: DynamoDB table name + ddb_client: DynamoDB client to use + + Returns: + True if all items were deleted, False if more than 100 items exist + + Raises: + ValueError: If an item is not a branch key + + """ + if ddb_client is None: + ddb_client = boto3.client("dynamodb") + + # Query for items with matching identifier + query_response = ddb_client.query( + TableName=table_name, + KeyConditionExpression="#pk = :pk", + ExpressionAttributeNames={"#pk": BRANCH_KEY_IDENTIFIER_FIELD}, + ExpressionAttributeValues={":pk": {"S": identifier}}, + ) + + items = query_response.get("Items", []) + if not items: + return True + + # Create delete requests for each item + delete_items = [] + for item in items: + if TYPE_FIELD not in item: + raise ValueError("Item is not a branch key") + + delete_item = { + "Delete": { + "Key": {BRANCH_KEY_IDENTIFIER_FIELD: {"S": identifier}, TYPE_FIELD: item[TYPE_FIELD]}, + "TableName": table_name, + } + } + delete_items.append(delete_item) + + if not delete_items: + return True + + # DynamoDB transactions are limited to 100 items + if len(delete_items) > 100: + delete_items = delete_items[:100] + + # Execute the delete transaction + ddb_client.transact_write_items(TransactItems=delete_items) + + # Return False if we had to truncate the deletion + return len(items) <= 100 diff --git a/Examples/runtimes/python/DynamoDBEncryption/test/client_supplier/__init__.py b/Examples/runtimes/python/DynamoDBEncryption/test/client_supplier/__init__.py new file mode 100644 index 000000000..6d76d639b --- /dev/null +++ b/Examples/runtimes/python/DynamoDBEncryption/test/client_supplier/__init__.py @@ -0,0 +1,3 @@ +# Copyright Amazon.com Inc. or its affiliates. All Rights Reserved. +# SPDX-License-Identifier: Apache-2.0 +"""Test suite for the client_supplier examples.""" diff --git a/Examples/runtimes/python/DynamoDBEncryption/test/client_supplier/test_client_supplier_example.py b/Examples/runtimes/python/DynamoDBEncryption/test/client_supplier/test_client_supplier_example.py new file mode 100644 index 000000000..fffd4afe6 --- /dev/null +++ b/Examples/runtimes/python/DynamoDBEncryption/test/client_supplier/test_client_supplier_example.py @@ -0,0 +1,23 @@ +# Copyright Amazon.com Inc. or its affiliates. All Rights Reserved. +# SPDX-License-Identifier: Apache-2.0 +"""Test suite for the client supplier example.""" +import pytest + +from ...src.client_supplier.client_supplier_example import client_supplier_example +from .. import test_utils + +pytestmark = [pytest.mark.examples] + + +def test_client_supplier_example(): + """Test function for client supplier example.""" + accounts = [test_utils.TEST_AWS_ACCOUNT_ID] + regions = ["eu-west-1"] # Using eu-west-1 + + # Call the client_supplier_example with the test parameters + client_supplier_example( + ddb_table_name=test_utils.TEST_DDB_TABLE_NAME, + key_arn=test_utils.TEST_MRK_REPLICA_KEY_ID_US_EAST_1, + account_ids=accounts, + regions=regions, + ) diff --git a/Examples/runtimes/python/DynamoDBEncryption/test/encrypted_paginator/__init__.py b/Examples/runtimes/python/DynamoDBEncryption/test/encrypted_paginator/__init__.py new file mode 100644 index 000000000..fa977e22f --- /dev/null +++ b/Examples/runtimes/python/DynamoDBEncryption/test/encrypted_paginator/__init__.py @@ -0,0 +1,3 @@ +# Copyright Amazon.com Inc. or its affiliates. All Rights Reserved. +# SPDX-License-Identifier: Apache-2.0 +"""Stub to allow relative imports of examples from tests.""" diff --git a/Examples/runtimes/python/DynamoDBEncryption/test/encrypted_paginator/test_encrypted_paginator_example.py b/Examples/runtimes/python/DynamoDBEncryption/test/encrypted_paginator/test_encrypted_paginator_example.py new file mode 100644 index 000000000..a3be81388 --- /dev/null +++ b/Examples/runtimes/python/DynamoDBEncryption/test/encrypted_paginator/test_encrypted_paginator_example.py @@ -0,0 +1,15 @@ +# Copyright Amazon.com Inc. or its affiliates. All Rights Reserved. +# SPDX-License-Identifier: Apache-2.0 +"""Test suite for the EncryptedPaginator example.""" +import pytest + +from ...src.encrypted_paginator.encrypted_paginator_example import encrypted_paginator_example + +pytestmark = [pytest.mark.examples] + + +def test_encrypted_paginator_example(): + """Test function for encrypt and decrypt using the EncryptedPaginator example.""" + test_kms_key_id = "arn:aws:kms:us-west-2:658956600833:key/b3537ef1-d8dc-4780-9f5a-55776cbb2f7f" + test_dynamodb_table_name = "DynamoDbEncryptionInterceptorTestTable" + encrypted_paginator_example(test_kms_key_id, test_dynamodb_table_name) diff --git a/Examples/runtimes/python/DynamoDBEncryption/test/encrypted_resource/__init__.py b/Examples/runtimes/python/DynamoDBEncryption/test/encrypted_resource/__init__.py new file mode 100644 index 000000000..fa977e22f --- /dev/null +++ b/Examples/runtimes/python/DynamoDBEncryption/test/encrypted_resource/__init__.py @@ -0,0 +1,3 @@ +# Copyright Amazon.com Inc. or its affiliates. All Rights Reserved. +# SPDX-License-Identifier: Apache-2.0 +"""Stub to allow relative imports of examples from tests.""" diff --git a/Examples/runtimes/python/DynamoDBEncryption/test/encrypted_resource/test_batch_read_write_example.py b/Examples/runtimes/python/DynamoDBEncryption/test/encrypted_resource/test_batch_read_write_example.py new file mode 100644 index 000000000..259f3925d --- /dev/null +++ b/Examples/runtimes/python/DynamoDBEncryption/test/encrypted_resource/test_batch_read_write_example.py @@ -0,0 +1,15 @@ +# Copyright Amazon.com Inc. or its affiliates. All Rights Reserved. +# SPDX-License-Identifier: Apache-2.0 +"""Test suite for the EncryptedResource batch read/write example.""" +import pytest + +from ...src.encrypted_resource.batch_read_write_example import encrypted_resource_batch_read_write_example + +pytestmark = [pytest.mark.examples] + + +def test_encrypted_resource_batch_read_write_example(): + """Test function for encrypt and decrypt using the EncryptedClient example.""" + test_kms_key_id = "arn:aws:kms:us-west-2:658956600833:key/b3537ef1-d8dc-4780-9f5a-55776cbb2f7f" + test_dynamodb_table_name = "DynamoDbEncryptionInterceptorTestTable" + encrypted_resource_batch_read_write_example(test_kms_key_id, test_dynamodb_table_name) diff --git a/Examples/runtimes/python/DynamoDBEncryption/test/encrypted_resource/test_encrypted_tables_collection_manager_example.py b/Examples/runtimes/python/DynamoDBEncryption/test/encrypted_resource/test_encrypted_tables_collection_manager_example.py new file mode 100644 index 000000000..31e266fbe --- /dev/null +++ b/Examples/runtimes/python/DynamoDBEncryption/test/encrypted_resource/test_encrypted_tables_collection_manager_example.py @@ -0,0 +1,17 @@ +# Copyright Amazon.com Inc. or its affiliates. All Rights Reserved. +# SPDX-License-Identifier: Apache-2.0 +"""Test suite for the EncryptedResource batch read/write example.""" +import pytest + +from ...src.encrypted_resource.encrypted_tables_collection_manager_example import ( + encrypted_tables_collection_manager_example, +) + +pytestmark = [pytest.mark.examples] + + +def test_encrypted_resource_batch_read_write_example(): + """Test function for encrypt and decrypt using the EncryptedClient example.""" + test_kms_key_id = "arn:aws:kms:us-west-2:658956600833:key/b3537ef1-d8dc-4780-9f5a-55776cbb2f7f" + test_dynamodb_table_name = "DynamoDbEncryptionInterceptorTestTable" + encrypted_tables_collection_manager_example(test_kms_key_id, [test_dynamodb_table_name]) diff --git a/Examples/runtimes/python/DynamoDBEncryption/test/item_encryptor/__init__.py b/Examples/runtimes/python/DynamoDBEncryption/test/item_encryptor/__init__.py new file mode 100644 index 000000000..fa977e22f --- /dev/null +++ b/Examples/runtimes/python/DynamoDBEncryption/test/item_encryptor/__init__.py @@ -0,0 +1,3 @@ +# Copyright Amazon.com Inc. or its affiliates. All Rights Reserved. +# SPDX-License-Identifier: Apache-2.0 +"""Stub to allow relative imports of examples from tests.""" diff --git a/Examples/runtimes/python/DynamoDBEncryption/test/item_encryptor/test_item_encryptor.py b/Examples/runtimes/python/DynamoDBEncryption/test/item_encryptor/test_item_encryptor.py new file mode 100644 index 000000000..91db7197f --- /dev/null +++ b/Examples/runtimes/python/DynamoDBEncryption/test/item_encryptor/test_item_encryptor.py @@ -0,0 +1,15 @@ +# Copyright Amazon.com Inc. or its affiliates. All Rights Reserved. +# SPDX-License-Identifier: Apache-2.0 +"""Test suite for the ItemEncryptor example.""" +import pytest + +from ...src.item_encryptor.encrypt_decrypt_example import encrypt_decrypt_example + +pytestmark = [pytest.mark.examples] + + +def test_encrypt_decrypt_example(): + """Test function for encrypt and decrypt using the ItemEncryptor example.""" + test_kms_key_id = "arn:aws:kms:us-west-2:658956600833:key/b3537ef1-d8dc-4780-9f5a-55776cbb2f7f" + test_dynamodb_table_name = "DynamoDbEncryptionInterceptorTestTable" + encrypt_decrypt_example(test_kms_key_id, test_dynamodb_table_name) diff --git a/Examples/runtimes/python/DynamoDBEncryption/test/keyring/__init__.py b/Examples/runtimes/python/DynamoDBEncryption/test/keyring/__init__.py new file mode 100644 index 000000000..fa977e22f --- /dev/null +++ b/Examples/runtimes/python/DynamoDBEncryption/test/keyring/__init__.py @@ -0,0 +1,3 @@ +# Copyright Amazon.com Inc. or its affiliates. All Rights Reserved. +# SPDX-License-Identifier: Apache-2.0 +"""Stub to allow relative imports of examples from tests.""" diff --git a/Examples/runtimes/python/DynamoDBEncryption/test/keyring/hierarchical_keyring_example/__init__.py b/Examples/runtimes/python/DynamoDBEncryption/test/keyring/hierarchical_keyring_example/__init__.py new file mode 100644 index 000000000..fa977e22f --- /dev/null +++ b/Examples/runtimes/python/DynamoDBEncryption/test/keyring/hierarchical_keyring_example/__init__.py @@ -0,0 +1,3 @@ +# Copyright Amazon.com Inc. or its affiliates. All Rights Reserved. +# SPDX-License-Identifier: Apache-2.0 +"""Stub to allow relative imports of examples from tests.""" diff --git a/Examples/runtimes/python/DynamoDBEncryption/test/keyring/hierarchical_keyring_example/test_with_encrypted_client.py b/Examples/runtimes/python/DynamoDBEncryption/test/keyring/hierarchical_keyring_example/test_with_encrypted_client.py new file mode 100644 index 000000000..8f9ac7524 --- /dev/null +++ b/Examples/runtimes/python/DynamoDBEncryption/test/keyring/hierarchical_keyring_example/test_with_encrypted_client.py @@ -0,0 +1,46 @@ +# Copyright Amazon.com Inc. or its affiliates. All Rights Reserved. +# SPDX-License-Identifier: Apache-2.0 +"""Test hierarchical keyring with abstraction EncryptedClient example.""" +import time + +import pytest + +from ....src.create_keystore_key_example import keystore_create_key +from ....src.keyring.hierarchical_keyring_example.with_encrypted_client import ( + hierarchical_keyring_client_example, +) +from ...cleanup import delete_branch_key +from ...test_utils import ( + TEST_DDB_TABLE_NAME, + TEST_KEYSTORE_KMS_KEY_ID, + TEST_KEYSTORE_NAME, + TEST_LOGICAL_KEYSTORE_NAME, +) + +pytestmark = [pytest.mark.examples] + + +def test_hierarchical_keyring_client_example(): + """Test hierarchical keyring abstracted client example.""" + # Create new branch keys for test + key_id1 = keystore_create_key(TEST_KEYSTORE_NAME, TEST_LOGICAL_KEYSTORE_NAME, TEST_KEYSTORE_KMS_KEY_ID) + key_id2 = keystore_create_key(TEST_KEYSTORE_NAME, TEST_LOGICAL_KEYSTORE_NAME, TEST_KEYSTORE_KMS_KEY_ID) + + try: + # Key creation is eventually consistent, so wait 5 seconds to decrease the likelihood + # our test fails due to eventual consistency issues. + time.sleep(5) + + # Run the client example + hierarchical_keyring_client_example( + TEST_DDB_TABLE_NAME, + key_id1, + key_id2, + TEST_KEYSTORE_NAME, + TEST_LOGICAL_KEYSTORE_NAME, + TEST_KEYSTORE_KMS_KEY_ID, + ) + finally: + # Cleanup Branch Keys + delete_branch_key(key_id1, TEST_KEYSTORE_NAME, None) + delete_branch_key(key_id2, TEST_KEYSTORE_NAME, None) diff --git a/Examples/runtimes/python/DynamoDBEncryption/test/keyring/hierarchical_keyring_example/test_with_encrypted_table.py b/Examples/runtimes/python/DynamoDBEncryption/test/keyring/hierarchical_keyring_example/test_with_encrypted_table.py new file mode 100644 index 000000000..bc0c51d78 --- /dev/null +++ b/Examples/runtimes/python/DynamoDBEncryption/test/keyring/hierarchical_keyring_example/test_with_encrypted_table.py @@ -0,0 +1,46 @@ +# Copyright Amazon.com Inc. or its affiliates. All Rights Reserved. +# SPDX-License-Identifier: Apache-2.0 +"""Test hierarchical keyring with abstraction EncryptedTable example.""" +import time + +import pytest + +from ....src.create_keystore_key_example import keystore_create_key +from ....src.keyring.hierarchical_keyring_example.with_encrypted_table import ( + hierarchical_keyring_table_example, +) +from ...cleanup import delete_branch_key +from ...test_utils import ( + TEST_DDB_TABLE_NAME, + TEST_KEYSTORE_KMS_KEY_ID, + TEST_KEYSTORE_NAME, + TEST_LOGICAL_KEYSTORE_NAME, +) + +pytestmark = [pytest.mark.examples] + + +def test_hierarchical_keyring_table_example(): + """Test hierarchical keyring abstracted table example.""" + # Create new branch keys for test + key_id1 = keystore_create_key(TEST_KEYSTORE_NAME, TEST_LOGICAL_KEYSTORE_NAME, TEST_KEYSTORE_KMS_KEY_ID) + key_id2 = keystore_create_key(TEST_KEYSTORE_NAME, TEST_LOGICAL_KEYSTORE_NAME, TEST_KEYSTORE_KMS_KEY_ID) + + try: + # Key creation is eventually consistent, so wait 5 seconds to decrease the likelihood + # our test fails due to eventual consistency issues. + time.sleep(5) + + # Run the table example + hierarchical_keyring_table_example( + TEST_DDB_TABLE_NAME, + key_id1, + key_id2, + TEST_KEYSTORE_NAME, + TEST_LOGICAL_KEYSTORE_NAME, + TEST_KEYSTORE_KMS_KEY_ID, + ) + finally: + # Cleanup Branch Keys + delete_branch_key(key_id1, TEST_KEYSTORE_NAME, None) + delete_branch_key(key_id2, TEST_KEYSTORE_NAME, None) diff --git a/Examples/runtimes/python/DynamoDBEncryption/test/keyring/kms_ecdh_keyring_example/__init__.py b/Examples/runtimes/python/DynamoDBEncryption/test/keyring/kms_ecdh_keyring_example/__init__.py new file mode 100644 index 000000000..fa977e22f --- /dev/null +++ b/Examples/runtimes/python/DynamoDBEncryption/test/keyring/kms_ecdh_keyring_example/__init__.py @@ -0,0 +1,3 @@ +# Copyright Amazon.com Inc. or its affiliates. All Rights Reserved. +# SPDX-License-Identifier: Apache-2.0 +"""Stub to allow relative imports of examples from tests.""" diff --git a/Examples/runtimes/python/DynamoDBEncryption/test/keyring/kms_ecdh_keyring_example/test_with_encrypted_client.py b/Examples/runtimes/python/DynamoDBEncryption/test/keyring/kms_ecdh_keyring_example/test_with_encrypted_client.py new file mode 100644 index 000000000..9e3658d71 --- /dev/null +++ b/Examples/runtimes/python/DynamoDBEncryption/test/keyring/kms_ecdh_keyring_example/test_with_encrypted_client.py @@ -0,0 +1,50 @@ +# Copyright Amazon.com Inc. or its affiliates. All Rights Reserved. +# SPDX-License-Identifier: Apache-2.0 +"""Test KMS ECDH keyring with encrypted client example.""" +import pytest + +from ....src.keyring.kms_ecdh_keyring_example.utility import ( + EXAMPLE_ECC_PUBLIC_KEY_RECIPIENT_FILENAME, + EXAMPLE_ECC_PUBLIC_KEY_SENDER_FILENAME, + should_get_new_public_keys, + write_public_key_pem_for_ecc_key, +) +from ....src.keyring.kms_ecdh_keyring_example.with_encrypted_client import ( + kms_ecdh_discovery_client_example, + kms_ecdh_keyring_client_example, +) +from ...test_utils import TEST_DDB_TABLE_NAME, TEST_KMS_ECDH_KEY_ID_P256_RECIPIENT, TEST_KMS_ECDH_KEY_ID_P256_SENDER + +pytestmark = [pytest.mark.examples] + + +def test_kms_ecdh_keyring_client_example_static(): + """Test kms_ecdh_keyring_client_example with static configuration.""" + # You may provide your own ECC public keys at + # - EXAMPLE_ECC_PUBLIC_KEY_SENDER_FILENAME + # - EXAMPLE_ECC_PUBLIC_KEY_RECIPIENT_FILENAME. + # If you provide these, the keys MUST be on curve P256 + # This must be the public key for the ECC key represented at eccKeyArn + # If this file is not present, this will write a UTF-8 encoded PEM file for you. + if should_get_new_public_keys(): + write_public_key_pem_for_ecc_key(TEST_KMS_ECDH_KEY_ID_P256_SENDER, EXAMPLE_ECC_PUBLIC_KEY_SENDER_FILENAME) + write_public_key_pem_for_ecc_key(TEST_KMS_ECDH_KEY_ID_P256_RECIPIENT, EXAMPLE_ECC_PUBLIC_KEY_RECIPIENT_FILENAME) + + kms_ecdh_keyring_client_example(TEST_DDB_TABLE_NAME, TEST_KMS_ECDH_KEY_ID_P256_SENDER) + + +def test_kms_ecdh_keyring_client_example_discovery(): + """Test kms_ecdh_keyring_client_example with discovery configuration.""" + # In this example you do not need to provide the recipient ECC Public Key. + # On initialization, the keyring will call KMS:getPublicKey on the configured + # recipientKmsIdentifier set on the keyring. This example uses the previous example + # to write an item meant for the recipient. + # First, we need to run the static example to write the item + if should_get_new_public_keys(): + write_public_key_pem_for_ecc_key(TEST_KMS_ECDH_KEY_ID_P256_SENDER, EXAMPLE_ECC_PUBLIC_KEY_SENDER_FILENAME) + write_public_key_pem_for_ecc_key(TEST_KMS_ECDH_KEY_ID_P256_RECIPIENT, EXAMPLE_ECC_PUBLIC_KEY_RECIPIENT_FILENAME) + + kms_ecdh_keyring_client_example(TEST_DDB_TABLE_NAME, TEST_KMS_ECDH_KEY_ID_P256_SENDER) + + # Now test the discovery example + kms_ecdh_discovery_client_example(TEST_DDB_TABLE_NAME, TEST_KMS_ECDH_KEY_ID_P256_RECIPIENT) diff --git a/Examples/runtimes/python/DynamoDBEncryption/test/keyring/kms_ecdh_keyring_example/test_with_encrypted_table.py b/Examples/runtimes/python/DynamoDBEncryption/test/keyring/kms_ecdh_keyring_example/test_with_encrypted_table.py new file mode 100644 index 000000000..0be8f4c1e --- /dev/null +++ b/Examples/runtimes/python/DynamoDBEncryption/test/keyring/kms_ecdh_keyring_example/test_with_encrypted_table.py @@ -0,0 +1,50 @@ +# Copyright Amazon.com Inc. or its affiliates. All Rights Reserved. +# SPDX-License-Identifier: Apache-2.0 +"""Test KMS ECDH keyring with encrypted table example.""" +import pytest + +from ....src.keyring.kms_ecdh_keyring_example.utility import ( + EXAMPLE_ECC_PUBLIC_KEY_RECIPIENT_FILENAME, + EXAMPLE_ECC_PUBLIC_KEY_SENDER_FILENAME, + should_get_new_public_keys, + write_public_key_pem_for_ecc_key, +) +from ....src.keyring.kms_ecdh_keyring_example.with_encrypted_table import ( + kms_ecdh_discovery_table_example, + kms_ecdh_keyring_table_example, +) +from ...test_utils import TEST_DDB_TABLE_NAME, TEST_KMS_ECDH_KEY_ID_P256_RECIPIENT, TEST_KMS_ECDH_KEY_ID_P256_SENDER + +pytestmark = [pytest.mark.examples] + + +def test_kms_ecdh_keyring_table_example_static(): + """Test kms_ecdh_keyring_table_example with static configuration.""" + # You may provide your own ECC public keys at + # - EXAMPLE_ECC_PUBLIC_KEY_SENDER_FILENAME + # - EXAMPLE_ECC_PUBLIC_KEY_RECIPIENT_FILENAME. + # If you provide these, the keys MUST be on curve P256 + # This must be the public key for the ECC key represented at eccKeyArn + # If this file is not present, this will write a UTF-8 encoded PEM file for you. + if should_get_new_public_keys(): + write_public_key_pem_for_ecc_key(TEST_KMS_ECDH_KEY_ID_P256_SENDER, EXAMPLE_ECC_PUBLIC_KEY_SENDER_FILENAME) + write_public_key_pem_for_ecc_key(TEST_KMS_ECDH_KEY_ID_P256_RECIPIENT, EXAMPLE_ECC_PUBLIC_KEY_RECIPIENT_FILENAME) + + kms_ecdh_keyring_table_example(TEST_DDB_TABLE_NAME, TEST_KMS_ECDH_KEY_ID_P256_SENDER) + + +def test_kms_ecdh_keyring_table_example_discovery(): + """Test kms_ecdh_keyring_table_example with discovery configuration.""" + # In this example you do not need to provide the recipient ECC Public Key. + # On initialization, the keyring will call KMS:getPublicKey on the configured + # recipientKmsIdentifier set on the keyring. This example uses the previous example + # to write an item meant for the recipient. + # First, we need to run the static example to write the item + if should_get_new_public_keys(): + write_public_key_pem_for_ecc_key(TEST_KMS_ECDH_KEY_ID_P256_SENDER, EXAMPLE_ECC_PUBLIC_KEY_SENDER_FILENAME) + write_public_key_pem_for_ecc_key(TEST_KMS_ECDH_KEY_ID_P256_RECIPIENT, EXAMPLE_ECC_PUBLIC_KEY_RECIPIENT_FILENAME) + + kms_ecdh_keyring_table_example(TEST_DDB_TABLE_NAME, TEST_KMS_ECDH_KEY_ID_P256_SENDER) + + # Now test the discovery example + kms_ecdh_discovery_table_example(TEST_DDB_TABLE_NAME, TEST_KMS_ECDH_KEY_ID_P256_RECIPIENT) diff --git a/Examples/runtimes/python/DynamoDBEncryption/test/keyring/kms_rsa_keyring_example/__init__.py b/Examples/runtimes/python/DynamoDBEncryption/test/keyring/kms_rsa_keyring_example/__init__.py new file mode 100644 index 000000000..f459ac92f --- /dev/null +++ b/Examples/runtimes/python/DynamoDBEncryption/test/keyring/kms_rsa_keyring_example/__init__.py @@ -0,0 +1,3 @@ +# Copyright Amazon.com Inc. or its affiliates. All Rights Reserved. +# SPDX-License-Identifier: Apache-2.0 +"""KMS RSA keyring examples test package.""" diff --git a/Examples/runtimes/python/DynamoDBEncryption/test/keyring/kms_rsa_keyring_example/test_with_encrypted_client.py b/Examples/runtimes/python/DynamoDBEncryption/test/keyring/kms_rsa_keyring_example/test_with_encrypted_client.py new file mode 100644 index 000000000..be06d3f0a --- /dev/null +++ b/Examples/runtimes/python/DynamoDBEncryption/test/keyring/kms_rsa_keyring_example/test_with_encrypted_client.py @@ -0,0 +1,27 @@ +# Copyright Amazon.com Inc. or its affiliates. All Rights Reserved. +# SPDX-License-Identifier: Apache-2.0 +"""Test KMS RSA keyring with encrypted client example.""" +import pytest + +from ....src.keyring.kms_rsa_keyring_example.utility import ( + should_get_new_public_key, + write_public_key_pem_for_rsa_key, +) +from ....src.keyring.kms_rsa_keyring_example.with_encrypted_client import kms_rsa_keyring_client_example +from ...test_utils import ( + TEST_DDB_TABLE_NAME, + TEST_KMS_RSA_KEY_ID, +) + +pytestmark = [pytest.mark.examples] + + +def test_kms_rsa_keyring_client_example(): + """Test the KMS RSA keyring client example.""" + # You may provide your own RSA public key at EXAMPLE_RSA_PUBLIC_KEY_FILENAME. + # This must be the public key for the RSA key represented at rsa_key_arn. + # If this file is not present, this will write a UTF-8 encoded PEM file for you. + if should_get_new_public_key(): + write_public_key_pem_for_rsa_key(TEST_KMS_RSA_KEY_ID) + + kms_rsa_keyring_client_example(TEST_DDB_TABLE_NAME, TEST_KMS_RSA_KEY_ID) diff --git a/Examples/runtimes/python/DynamoDBEncryption/test/keyring/kms_rsa_keyring_example/test_with_encrypted_table.py b/Examples/runtimes/python/DynamoDBEncryption/test/keyring/kms_rsa_keyring_example/test_with_encrypted_table.py new file mode 100644 index 000000000..cf3da8627 --- /dev/null +++ b/Examples/runtimes/python/DynamoDBEncryption/test/keyring/kms_rsa_keyring_example/test_with_encrypted_table.py @@ -0,0 +1,27 @@ +# Copyright Amazon.com Inc. or its affiliates. All Rights Reserved. +# SPDX-License-Identifier: Apache-2.0 +"""Test KMS RSA keyring with encrypted table example.""" +import pytest + +from ....src.keyring.kms_rsa_keyring_example.utility import ( + should_get_new_public_key, + write_public_key_pem_for_rsa_key, +) +from ....src.keyring.kms_rsa_keyring_example.with_encrypted_table import kms_rsa_keyring_table_example +from ...test_utils import ( + TEST_DDB_TABLE_NAME, + TEST_KMS_RSA_KEY_ID, +) + +pytestmark = [pytest.mark.examples] + + +def test_kms_rsa_keyring_table_example(): + """Test the KMS RSA keyring table example.""" + # You may provide your own RSA public key at EXAMPLE_RSA_PUBLIC_KEY_FILENAME. + # This must be the public key for the RSA key represented at rsa_key_arn. + # If this file is not present, this will write a UTF-8 encoded PEM file for you. + if should_get_new_public_key(): + write_public_key_pem_for_rsa_key(TEST_KMS_RSA_KEY_ID) + + kms_rsa_keyring_table_example(TEST_DDB_TABLE_NAME, TEST_KMS_RSA_KEY_ID) diff --git a/Examples/runtimes/python/DynamoDBEncryption/test/keyring/mrk_discovery_multi_keyring_example/__init__.py b/Examples/runtimes/python/DynamoDBEncryption/test/keyring/mrk_discovery_multi_keyring_example/__init__.py new file mode 100644 index 000000000..fa977e22f --- /dev/null +++ b/Examples/runtimes/python/DynamoDBEncryption/test/keyring/mrk_discovery_multi_keyring_example/__init__.py @@ -0,0 +1,3 @@ +# Copyright Amazon.com Inc. or its affiliates. All Rights Reserved. +# SPDX-License-Identifier: Apache-2.0 +"""Stub to allow relative imports of examples from tests.""" diff --git a/Examples/runtimes/python/DynamoDBEncryption/test/keyring/mrk_discovery_multi_keyring_example/test_with_encrypted_client.py b/Examples/runtimes/python/DynamoDBEncryption/test/keyring/mrk_discovery_multi_keyring_example/test_with_encrypted_client.py new file mode 100644 index 000000000..971f4f1b8 --- /dev/null +++ b/Examples/runtimes/python/DynamoDBEncryption/test/keyring/mrk_discovery_multi_keyring_example/test_with_encrypted_client.py @@ -0,0 +1,24 @@ +# Copyright Amazon.com Inc. or its affiliates. All Rights Reserved. +# SPDX-License-Identifier: Apache-2.0 +"""Test MRK discovery multi-keyring with encrypted client examples.""" +import pytest + +from ....src.keyring.mrk_discovery_multi_keyring_example.with_encrypted_client import ( + multi_mrk_discovery_keyring_client_example, +) +from ...test_utils import ( + TEST_AWS_ACCOUNT_ID, + TEST_AWS_REGION, + TEST_DDB_TABLE_NAME, + TEST_MRK_KEY_ID, +) + +pytestmark = [pytest.mark.examples] + + +def test_multi_mrk_discovery_keyring_client_example(): + """Test multi_mrk_discovery_keyring_client_example.""" + accounts = [TEST_AWS_ACCOUNT_ID] + regions = [TEST_AWS_REGION] + + multi_mrk_discovery_keyring_client_example(TEST_DDB_TABLE_NAME, TEST_MRK_KEY_ID, accounts, regions) diff --git a/Examples/runtimes/python/DynamoDBEncryption/test/keyring/mrk_discovery_multi_keyring_example/test_with_encrypted_table.py b/Examples/runtimes/python/DynamoDBEncryption/test/keyring/mrk_discovery_multi_keyring_example/test_with_encrypted_table.py new file mode 100644 index 000000000..c16489073 --- /dev/null +++ b/Examples/runtimes/python/DynamoDBEncryption/test/keyring/mrk_discovery_multi_keyring_example/test_with_encrypted_table.py @@ -0,0 +1,24 @@ +# Copyright Amazon.com Inc. or its affiliates. All Rights Reserved. +# SPDX-License-Identifier: Apache-2.0 +"""Test MRK discovery multi-keyring with encrypted table examples.""" +import pytest + +from ....src.keyring.mrk_discovery_multi_keyring_example.with_encrypted_table import ( + multi_mrk_discovery_keyring_table_example, +) +from ...test_utils import ( + TEST_AWS_ACCOUNT_ID, + TEST_AWS_REGION, + TEST_DDB_TABLE_NAME, + TEST_MRK_KEY_ID, +) + +pytestmark = [pytest.mark.examples] + + +def test_multi_mrk_discovery_keyring_table_example(): + """Test multi_mrk_discovery_keyring_table_example.""" + accounts = [TEST_AWS_ACCOUNT_ID] + regions = [TEST_AWS_REGION] + + multi_mrk_discovery_keyring_table_example(TEST_DDB_TABLE_NAME, TEST_MRK_KEY_ID, accounts, regions) diff --git a/Examples/runtimes/python/DynamoDBEncryption/test/keyring/mrk_multi_keyring_example/__init__.py b/Examples/runtimes/python/DynamoDBEncryption/test/keyring/mrk_multi_keyring_example/__init__.py new file mode 100644 index 000000000..fa977e22f --- /dev/null +++ b/Examples/runtimes/python/DynamoDBEncryption/test/keyring/mrk_multi_keyring_example/__init__.py @@ -0,0 +1,3 @@ +# Copyright Amazon.com Inc. or its affiliates. All Rights Reserved. +# SPDX-License-Identifier: Apache-2.0 +"""Stub to allow relative imports of examples from tests.""" diff --git a/Examples/runtimes/python/DynamoDBEncryption/test/keyring/mrk_multi_keyring_example/test_with_encrypted_client.py b/Examples/runtimes/python/DynamoDBEncryption/test/keyring/mrk_multi_keyring_example/test_with_encrypted_client.py new file mode 100644 index 000000000..2459a9c96 --- /dev/null +++ b/Examples/runtimes/python/DynamoDBEncryption/test/keyring/mrk_multi_keyring_example/test_with_encrypted_client.py @@ -0,0 +1,23 @@ +# Copyright Amazon.com Inc. or its affiliates. All Rights Reserved. +# SPDX-License-Identifier: Apache-2.0 +"""Test MRK multi-keyring with encrypted client examples.""" +import pytest + +from ....src.keyring.mrk_multi_keyring_example.with_encrypted_client import ( + multi_mrk_keyring_client_example, +) +from ...test_utils import ( + TEST_DDB_TABLE_NAME, + TEST_KMS_KEY_ID, + TEST_MRK_KEY_ID, + TEST_MRK_REPLICA_KEY_ID_US_EAST_1, +) + +pytestmark = [pytest.mark.examples] + + +def test_multi_mrk_keyring_client_example(): + """Test multi_mrk_keyring_client_example.""" + multi_mrk_keyring_client_example( + TEST_DDB_TABLE_NAME, TEST_MRK_KEY_ID, TEST_KMS_KEY_ID, TEST_MRK_REPLICA_KEY_ID_US_EAST_1 + ) diff --git a/Examples/runtimes/python/DynamoDBEncryption/test/keyring/mrk_multi_keyring_example/test_with_encrypted_table.py b/Examples/runtimes/python/DynamoDBEncryption/test/keyring/mrk_multi_keyring_example/test_with_encrypted_table.py new file mode 100644 index 000000000..25a556943 --- /dev/null +++ b/Examples/runtimes/python/DynamoDBEncryption/test/keyring/mrk_multi_keyring_example/test_with_encrypted_table.py @@ -0,0 +1,23 @@ +# Copyright Amazon.com Inc. or its affiliates. All Rights Reserved. +# SPDX-License-Identifier: Apache-2.0 +"""Test MRK multi-keyring with encrypted table examples.""" +import pytest + +from ....src.keyring.mrk_multi_keyring_example.with_encrypted_table import ( + multi_mrk_keyring_table_example, +) +from ...test_utils import ( + TEST_DDB_TABLE_NAME, + TEST_KMS_KEY_ID, + TEST_MRK_KEY_ID, + TEST_MRK_REPLICA_KEY_ID_US_EAST_1, +) + +pytestmark = [pytest.mark.examples] + + +def test_multi_mrk_keyring_table_example(): + """Test multi_mrk_keyring_table_example.""" + multi_mrk_keyring_table_example( + TEST_DDB_TABLE_NAME, TEST_MRK_KEY_ID, TEST_KMS_KEY_ID, TEST_MRK_REPLICA_KEY_ID_US_EAST_1 + ) diff --git a/Examples/runtimes/python/DynamoDBEncryption/test/keyring/multi_keyring_example/__init__.py b/Examples/runtimes/python/DynamoDBEncryption/test/keyring/multi_keyring_example/__init__.py new file mode 100644 index 000000000..fa977e22f --- /dev/null +++ b/Examples/runtimes/python/DynamoDBEncryption/test/keyring/multi_keyring_example/__init__.py @@ -0,0 +1,3 @@ +# Copyright Amazon.com Inc. or its affiliates. All Rights Reserved. +# SPDX-License-Identifier: Apache-2.0 +"""Stub to allow relative imports of examples from tests.""" diff --git a/Examples/runtimes/python/DynamoDBEncryption/test/keyring/multi_keyring_example/test_with_encrypted_client.py b/Examples/runtimes/python/DynamoDBEncryption/test/keyring/multi_keyring_example/test_with_encrypted_client.py new file mode 100644 index 000000000..2bf6a5f2a --- /dev/null +++ b/Examples/runtimes/python/DynamoDBEncryption/test/keyring/multi_keyring_example/test_with_encrypted_client.py @@ -0,0 +1,19 @@ +# Copyright Amazon.com Inc. or its affiliates. All Rights Reserved. +# SPDX-License-Identifier: Apache-2.0 +"""Test multi-keyring example with encrypted client.""" +import secrets + +import pytest + +from ....src.keyring.multi_keyring_example.with_encrypted_client import multi_keyring_client_example +from ...test_utils import TEST_DDB_TABLE_NAME, TEST_KMS_KEY_ID + +pytestmark = [pytest.mark.examples] + + +def test_multi_keyring_client_example(): + """Test multi_keyring_client_example.""" + # Generate a new AES key + aes_key_bytes = secrets.token_bytes(32) # 32 bytes = 256 bits + + multi_keyring_client_example(TEST_DDB_TABLE_NAME, TEST_KMS_KEY_ID, aes_key_bytes) diff --git a/Examples/runtimes/python/DynamoDBEncryption/test/keyring/multi_keyring_example/test_with_encrypted_table.py b/Examples/runtimes/python/DynamoDBEncryption/test/keyring/multi_keyring_example/test_with_encrypted_table.py new file mode 100644 index 000000000..cec803c27 --- /dev/null +++ b/Examples/runtimes/python/DynamoDBEncryption/test/keyring/multi_keyring_example/test_with_encrypted_table.py @@ -0,0 +1,19 @@ +# Copyright Amazon.com Inc. or its affiliates. All Rights Reserved. +# SPDX-License-Identifier: Apache-2.0 +"""Test multi-keyring example with encrypted table.""" +import secrets + +import pytest + +from ....src.keyring.multi_keyring_example.with_encrypted_table import multi_keyring_table_example +from ...test_utils import TEST_DDB_TABLE_NAME, TEST_KMS_KEY_ID + +pytestmark = [pytest.mark.examples] + + +def test_multi_keyring_table_example(): + """Test multi_keyring_table_example.""" + # Generate a new AES key + aes_key_bytes = secrets.token_bytes(32) # 32 bytes = 256 bits + + multi_keyring_table_example(TEST_DDB_TABLE_NAME, TEST_KMS_KEY_ID, aes_key_bytes) diff --git a/Examples/runtimes/python/DynamoDBEncryption/test/keyring/raw_aes_keyring_example/__init__.py b/Examples/runtimes/python/DynamoDBEncryption/test/keyring/raw_aes_keyring_example/__init__.py new file mode 100644 index 000000000..fa977e22f --- /dev/null +++ b/Examples/runtimes/python/DynamoDBEncryption/test/keyring/raw_aes_keyring_example/__init__.py @@ -0,0 +1,3 @@ +# Copyright Amazon.com Inc. or its affiliates. All Rights Reserved. +# SPDX-License-Identifier: Apache-2.0 +"""Stub to allow relative imports of examples from tests.""" diff --git a/Examples/runtimes/python/DynamoDBEncryption/test/keyring/raw_aes_keyring_example/test_with_encrypted_client.py b/Examples/runtimes/python/DynamoDBEncryption/test/keyring/raw_aes_keyring_example/test_with_encrypted_client.py new file mode 100644 index 000000000..8d81b2288 --- /dev/null +++ b/Examples/runtimes/python/DynamoDBEncryption/test/keyring/raw_aes_keyring_example/test_with_encrypted_client.py @@ -0,0 +1,19 @@ +# Copyright Amazon.com Inc. or its affiliates. All Rights Reserved. +# SPDX-License-Identifier: Apache-2.0 +"""Test raw AES keyring with encrypted client example.""" +import secrets + +import pytest + +from ....src.keyring.raw_aes_keyring_example.with_encrypted_client import raw_aes_keyring_client_example +from ...test_utils import TEST_DDB_TABLE_NAME + +pytestmark = [pytest.mark.examples] + + +def test_raw_aes_keyring_client_example(): + """Test raw_aes_keyring_client_example.""" + # Generate a new AES key + aes_key_bytes = secrets.token_bytes(32) # 32 bytes = 256 bits + + raw_aes_keyring_client_example(TEST_DDB_TABLE_NAME, aes_key_bytes) diff --git a/Examples/runtimes/python/DynamoDBEncryption/test/keyring/raw_aes_keyring_example/test_with_encrypted_table.py b/Examples/runtimes/python/DynamoDBEncryption/test/keyring/raw_aes_keyring_example/test_with_encrypted_table.py new file mode 100644 index 000000000..ef5a7f8ca --- /dev/null +++ b/Examples/runtimes/python/DynamoDBEncryption/test/keyring/raw_aes_keyring_example/test_with_encrypted_table.py @@ -0,0 +1,19 @@ +# Copyright Amazon.com Inc. or its affiliates. All Rights Reserved. +# SPDX-License-Identifier: Apache-2.0 +"""Test raw AES keyring with encrypted table example.""" +import secrets + +import pytest + +from ....src.keyring.raw_aes_keyring_example.with_encrypted_table import raw_aes_keyring_table_example +from ...test_utils import TEST_DDB_TABLE_NAME + +pytestmark = [pytest.mark.examples] + + +def test_raw_aes_keyring_table_example(): + """Test raw_aes_keyring_table_example.""" + # Generate a new AES key + aes_key_bytes = secrets.token_bytes(32) # 32 bytes = 256 bits + + raw_aes_keyring_table_example(TEST_DDB_TABLE_NAME, aes_key_bytes) diff --git a/Examples/runtimes/python/DynamoDBEncryption/test/keyring/raw_ecdh_keyring_example/__init__.py b/Examples/runtimes/python/DynamoDBEncryption/test/keyring/raw_ecdh_keyring_example/__init__.py new file mode 100644 index 000000000..fa977e22f --- /dev/null +++ b/Examples/runtimes/python/DynamoDBEncryption/test/keyring/raw_ecdh_keyring_example/__init__.py @@ -0,0 +1,3 @@ +# Copyright Amazon.com Inc. or its affiliates. All Rights Reserved. +# SPDX-License-Identifier: Apache-2.0 +"""Stub to allow relative imports of examples from tests.""" diff --git a/Examples/runtimes/python/DynamoDBEncryption/test/keyring/raw_ecdh_keyring_example/test_with_encrypted_client.py b/Examples/runtimes/python/DynamoDBEncryption/test/keyring/raw_ecdh_keyring_example/test_with_encrypted_client.py new file mode 100644 index 000000000..cc55d9ddb --- /dev/null +++ b/Examples/runtimes/python/DynamoDBEncryption/test/keyring/raw_ecdh_keyring_example/test_with_encrypted_client.py @@ -0,0 +1,76 @@ +# Copyright Amazon.com Inc. or its affiliates. All Rights Reserved. +# SPDX-License-Identifier: Apache-2.0 +"""Test raw ECDH keyring with encrypted client example.""" +import pytest +from aws_cryptography_primitives.smithygenerated.aws_cryptography_primitives.models import ECDHCurveSpec + +from ....src.keyring.raw_ecdh_keyring_example.utility import ( + generate_ecc_key_pairs, + should_generate_new_ecc_key_pairs, +) +from ....src.keyring.raw_ecdh_keyring_example.with_encrypted_client import ( + discovery_raw_ecdh_keyring_get_item_with_encrypted_client, + ephemeral_raw_ecdh_keyring_put_item_with_encrypted_client, + raw_ecdh_keyring_get_item_put_item_with_encrypted_client, +) +from ...test_utils import TEST_DDB_TABLE_NAME + +pytestmark = [pytest.mark.examples] + + +def test_raw_ecdh_keyring_get_item_put_item(): + """Test raw_ecdh_keyring_get_item_put_item with static configuration.""" + # You may provide your own ECC Key pairs in the files located at + # - EXAMPLE_ECC_PRIVATE_KEY_FILENAME_SENDER + # - EXAMPLE_ECC_PUBLIC_KEY_FILENAME_RECIPIENT + # If you provide this, the keys MUST be on curve P256 + # If these files are not present, this will generate a pair for you. + # For this example we will use the curve P256. + if should_generate_new_ecc_key_pairs(): + generate_ecc_key_pairs() + + # Part of using these keyrings is knowing which curve the keys used in the key agreement + # lie on. The keyring will fail if the keys do not lie on the configured curve. + raw_ecdh_keyring_get_item_put_item_with_encrypted_client(TEST_DDB_TABLE_NAME, ECDHCurveSpec.ECC_NIST_P256) + + +def test_ephemeral_raw_ecdh_keyring_put_item(): + """Test ephemeral_raw_ecdh_keyring_put_item with ephemeral configuration.""" + # You may provide your own ECC Public Key in the files located at + # - EXAMPLE_ECC_PUBLIC_KEY_FILENAME_RECIPIENT + # If you provide this, the keys MUST be on curve P256 + # If these files are not present, this will generate a pair for you. + # For this example we will use the curve P256. + if should_generate_new_ecc_key_pairs(): + generate_ecc_key_pairs() + + # Part of using these keyrings is knowing which curve the keys used in the key agreement + # lie on. The keyring will fail if the keys do not lie on the configured curve. + ephemeral_raw_ecdh_keyring_put_item_with_encrypted_client(TEST_DDB_TABLE_NAME, ECDHCurveSpec.ECC_NIST_P256) + + +def test_discovery_raw_ecdh_keyring_get_item(): + """Test discovery_raw_ecdh_keyring_get_item with discovery configuration.""" + # You may provide your own ECC Public Key in the files located at + # - EXAMPLE_ECC_PUBLIC_KEY_FILENAME_RECIPIENT + # - EXAMPLE_ECC_PRIVATE_KEY_FILENAME_RECIPIENT + # If you provide this, the keys MUST be on curve P256 + # If these files are not present, this will generate a pair for you. + # For this example we will use the curve P256. + if should_generate_new_ecc_key_pairs(): + generate_ecc_key_pairs() + + # The discovery configuration is not allowed to encrypt + # To understand this example best, first, write a record with the ephemeral configuration + # This means that the recipient public key configured on both keyrings is the same. + # The other party has the recipient public key and is writing messages meant only for + # the owner of the recipient public key to decrypt. + + # In this call we are writing a record that is written with an ephemeral sender key pair. + # The recipient will be able to decrypt the message + ephemeral_raw_ecdh_keyring_put_item_with_encrypted_client(TEST_DDB_TABLE_NAME, ECDHCurveSpec.ECC_NIST_P256) + + # In this call we are reading a record that was written with the recipient's public key. + # It will use the recipient's private key and the sender's public key stored in the message to + # calculate the appropriate shared secret to successfully decrypt the message. + discovery_raw_ecdh_keyring_get_item_with_encrypted_client(TEST_DDB_TABLE_NAME, ECDHCurveSpec.ECC_NIST_P256) diff --git a/Examples/runtimes/python/DynamoDBEncryption/test/keyring/raw_ecdh_keyring_example/test_with_encrypted_table.py b/Examples/runtimes/python/DynamoDBEncryption/test/keyring/raw_ecdh_keyring_example/test_with_encrypted_table.py new file mode 100644 index 000000000..5a9be8623 --- /dev/null +++ b/Examples/runtimes/python/DynamoDBEncryption/test/keyring/raw_ecdh_keyring_example/test_with_encrypted_table.py @@ -0,0 +1,76 @@ +# Copyright Amazon.com Inc. or its affiliates. All Rights Reserved. +# SPDX-License-Identifier: Apache-2.0 +"""Test raw ECDH keyring with encrypted table example.""" +import pytest +from aws_cryptography_primitives.smithygenerated.aws_cryptography_primitives.models import ECDHCurveSpec + +from ....src.keyring.raw_ecdh_keyring_example.utility import ( + generate_ecc_key_pairs, + should_generate_new_ecc_key_pairs, +) +from ....src.keyring.raw_ecdh_keyring_example.with_encrypted_table import ( + discovery_raw_ecdh_keyring_get_item_with_encrypted_table, + ephemeral_raw_ecdh_keyring_put_item_with_encrypted_table, + raw_ecdh_keyring_get_item_put_item_with_encrypted_table, +) +from ...test_utils import TEST_DDB_TABLE_NAME + +pytestmark = [pytest.mark.examples] + + +def test_raw_ecdh_keyring_get_item_put_item_with_table(): + """Test raw_ecdh_keyring_get_item_put_item with static configuration using EncryptedTable.""" + # You may provide your own ECC Key pairs in the files located at + # - EXAMPLE_ECC_PRIVATE_KEY_FILENAME_SENDER + # - EXAMPLE_ECC_PUBLIC_KEY_FILENAME_RECIPIENT + # If you provide this, the keys MUST be on curve P256 + # If these files are not present, this will generate a pair for you. + # For this example we will use the curve P256. + if should_generate_new_ecc_key_pairs(): + generate_ecc_key_pairs() + + # Part of using these keyrings is knowing which curve the keys used in the key agreement + # lie on. The keyring will fail if the keys do not lie on the configured curve. + raw_ecdh_keyring_get_item_put_item_with_encrypted_table(TEST_DDB_TABLE_NAME, ECDHCurveSpec.ECC_NIST_P256) + + +def test_ephemeral_raw_ecdh_keyring_put_item_with_table(): + """Test ephemeral_raw_ecdh_keyring_put_item with ephemeral configuration using EncryptedTable.""" + # You may provide your own ECC Public Key in the files located at + # - EXAMPLE_ECC_PUBLIC_KEY_FILENAME_RECIPIENT + # If you provide this, the keys MUST be on curve P256 + # If these files are not present, this will generate a pair for you. + # For this example we will use the curve P256. + if should_generate_new_ecc_key_pairs(): + generate_ecc_key_pairs() + + # Part of using these keyrings is knowing which curve the keys used in the key agreement + # lie on. The keyring will fail if the keys do not lie on the configured curve. + ephemeral_raw_ecdh_keyring_put_item_with_encrypted_table(TEST_DDB_TABLE_NAME, ECDHCurveSpec.ECC_NIST_P256) + + +def test_discovery_raw_ecdh_keyring_get_item_with_table(): + """Test discovery_raw_ecdh_keyring_get_item with discovery configuration using EncryptedTable.""" + # You may provide your own ECC Public Key in the files located at + # - EXAMPLE_ECC_PUBLIC_KEY_FILENAME_RECIPIENT + # - EXAMPLE_ECC_PRIVATE_KEY_FILENAME_RECIPIENT + # If you provide this, the keys MUST be on curve P256 + # If these files are not present, this will generate a pair for you. + # For this example we will use the curve P256. + if should_generate_new_ecc_key_pairs(): + generate_ecc_key_pairs() + + # The discovery configuration is not allowed to encrypt + # To understand this example best, first, write a record with the ephemeral configuration + # This means that the recipient public key configured on both keyrings is the same. + # The other party has the recipient public key and is writing messages meant only for + # the owner of the recipient public key to decrypt. + + # In this call we are writing a record that is written with an ephemeral sender key pair. + # The recipient will be able to decrypt the message + ephemeral_raw_ecdh_keyring_put_item_with_encrypted_table(TEST_DDB_TABLE_NAME, ECDHCurveSpec.ECC_NIST_P256) + + # In this call we are reading a record that was written with the recipient's public key. + # It will use the recipient's private key and the sender's public key stored in the message to + # calculate the appropriate shared secret to successfully decrypt the message. + discovery_raw_ecdh_keyring_get_item_with_encrypted_table(TEST_DDB_TABLE_NAME, ECDHCurveSpec.ECC_NIST_P256) diff --git a/Examples/runtimes/python/DynamoDBEncryption/test/keyring/raw_rsa_keyring_example/__init__.py b/Examples/runtimes/python/DynamoDBEncryption/test/keyring/raw_rsa_keyring_example/__init__.py new file mode 100644 index 000000000..fa977e22f --- /dev/null +++ b/Examples/runtimes/python/DynamoDBEncryption/test/keyring/raw_rsa_keyring_example/__init__.py @@ -0,0 +1,3 @@ +# Copyright Amazon.com Inc. or its affiliates. All Rights Reserved. +# SPDX-License-Identifier: Apache-2.0 +"""Stub to allow relative imports of examples from tests.""" diff --git a/Examples/runtimes/python/DynamoDBEncryption/test/keyring/raw_rsa_keyring_example/test_with_encrypted_client.py b/Examples/runtimes/python/DynamoDBEncryption/test/keyring/raw_rsa_keyring_example/test_with_encrypted_client.py new file mode 100644 index 000000000..34152129e --- /dev/null +++ b/Examples/runtimes/python/DynamoDBEncryption/test/keyring/raw_rsa_keyring_example/test_with_encrypted_client.py @@ -0,0 +1,29 @@ +# Copyright Amazon.com Inc. or its affiliates. All Rights Reserved. +# SPDX-License-Identifier: Apache-2.0 +"""Test raw RSA keyring with encrypted client example.""" +import pytest + +from ....src.keyring.raw_rsa_keyring_example.utility import ( + EXAMPLE_RSA_PRIVATE_KEY_FILENAME, + EXAMPLE_RSA_PUBLIC_KEY_FILENAME, + generate_rsa_key_pair, + should_generate_new_rsa_key_pair, +) +from ....src.keyring.raw_rsa_keyring_example.with_encrypted_client import raw_rsa_keyring_client_example +from ...test_utils import TEST_DDB_TABLE_NAME + +pytestmark = [pytest.mark.examples] + + +def test_raw_rsa_keyring_client_example(): + """Test raw_rsa_keyring_client_example.""" + # You may provide your own RSA key pair in the files located at + # - EXAMPLE_RSA_PRIVATE_KEY_FILENAME + # - EXAMPLE_RSA_PUBLIC_KEY_FILENAME + # If these files are not present, this will generate a pair for you + if should_generate_new_rsa_key_pair(): + generate_rsa_key_pair() + + raw_rsa_keyring_client_example( + TEST_DDB_TABLE_NAME, EXAMPLE_RSA_PRIVATE_KEY_FILENAME, EXAMPLE_RSA_PUBLIC_KEY_FILENAME + ) diff --git a/Examples/runtimes/python/DynamoDBEncryption/test/keyring/raw_rsa_keyring_example/test_with_encrypted_table.py b/Examples/runtimes/python/DynamoDBEncryption/test/keyring/raw_rsa_keyring_example/test_with_encrypted_table.py new file mode 100644 index 000000000..0358fbb6d --- /dev/null +++ b/Examples/runtimes/python/DynamoDBEncryption/test/keyring/raw_rsa_keyring_example/test_with_encrypted_table.py @@ -0,0 +1,29 @@ +# Copyright Amazon.com Inc. or its affiliates. All Rights Reserved. +# SPDX-License-Identifier: Apache-2.0 +"""Test raw RSA keyring with encrypted table example.""" +import pytest + +from ....src.keyring.raw_rsa_keyring_example.utility import ( + EXAMPLE_RSA_PRIVATE_KEY_FILENAME, + EXAMPLE_RSA_PUBLIC_KEY_FILENAME, + generate_rsa_key_pair, + should_generate_new_rsa_key_pair, +) +from ....src.keyring.raw_rsa_keyring_example.with_encrypted_table import raw_rsa_keyring_table_example +from ...test_utils import TEST_DDB_TABLE_NAME + +pytestmark = [pytest.mark.examples] + + +def test_raw_rsa_keyring_table_example(): + """Test raw_rsa_keyring_table_example.""" + # You may provide your own RSA key pair in the files located at + # - EXAMPLE_RSA_PRIVATE_KEY_FILENAME + # - EXAMPLE_RSA_PUBLIC_KEY_FILENAME + # If these files are not present, this will generate a pair for you + if should_generate_new_rsa_key_pair(): + generate_rsa_key_pair() + + raw_rsa_keyring_table_example( + TEST_DDB_TABLE_NAME, EXAMPLE_RSA_PRIVATE_KEY_FILENAME, EXAMPLE_RSA_PUBLIC_KEY_FILENAME + ) diff --git a/Examples/runtimes/python/DynamoDBEncryption/test/keyring/shared_cache_across_hierarchical_keyrings_example/__init__.py b/Examples/runtimes/python/DynamoDBEncryption/test/keyring/shared_cache_across_hierarchical_keyrings_example/__init__.py new file mode 100644 index 000000000..fa977e22f --- /dev/null +++ b/Examples/runtimes/python/DynamoDBEncryption/test/keyring/shared_cache_across_hierarchical_keyrings_example/__init__.py @@ -0,0 +1,3 @@ +# Copyright Amazon.com Inc. or its affiliates. All Rights Reserved. +# SPDX-License-Identifier: Apache-2.0 +"""Stub to allow relative imports of examples from tests.""" diff --git a/Examples/runtimes/python/DynamoDBEncryption/test/keyring/shared_cache_across_hierarchical_keyrings_example/test_with_encrypted_client.py b/Examples/runtimes/python/DynamoDBEncryption/test/keyring/shared_cache_across_hierarchical_keyrings_example/test_with_encrypted_client.py new file mode 100644 index 000000000..2b63dbb52 --- /dev/null +++ b/Examples/runtimes/python/DynamoDBEncryption/test/keyring/shared_cache_across_hierarchical_keyrings_example/test_with_encrypted_client.py @@ -0,0 +1,43 @@ +# Copyright Amazon.com Inc. or its affiliates. All Rights Reserved. +# SPDX-License-Identifier: Apache-2.0 +"""Test for the shared cache across hierarchical keyrings example.""" +import time + +import pytest + +from ....src.create_keystore_key_example import keystore_create_key +from ....src.keyring.shared_cache_across_hierarchical_keyrings_example.with_encrypted_client import ( + shared_cache_across_hierarchical_keyrings_example, +) +from ...cleanup import delete_branch_key +from ...test_utils import ( + TEST_DDB_TABLE_NAME, + TEST_KEYSTORE_KMS_KEY_ID, + TEST_KEYSTORE_NAME, + TEST_LOGICAL_KEYSTORE_NAME, + TEST_PARTITION_ID, +) + +pytestmark = [pytest.mark.examples] + + +def test_shared_cache_across_hierarchical_keyrings_example(): + """Test the shared cache across hierarchical keyrings example.""" + # Create new branch key for test + key_id = keystore_create_key(TEST_KEYSTORE_NAME, TEST_LOGICAL_KEYSTORE_NAME, TEST_KEYSTORE_KMS_KEY_ID) + + # Key creation is eventually consistent, so wait 5 seconds to decrease the likelihood + # our test fails due to eventual consistency issues. + time.sleep(5) + + shared_cache_across_hierarchical_keyrings_example( + TEST_DDB_TABLE_NAME, + key_id, + TEST_KEYSTORE_NAME, + TEST_LOGICAL_KEYSTORE_NAME, + TEST_PARTITION_ID, + TEST_KEYSTORE_KMS_KEY_ID, + ) + + # Cleanup Branch Key + delete_branch_key(key_id, TEST_KEYSTORE_NAME, None) diff --git a/Examples/runtimes/python/DynamoDBEncryption/test/keyring/shared_cache_across_hierarchical_keyrings_example/test_with_encrypted_table.py b/Examples/runtimes/python/DynamoDBEncryption/test/keyring/shared_cache_across_hierarchical_keyrings_example/test_with_encrypted_table.py new file mode 100644 index 000000000..862deed82 --- /dev/null +++ b/Examples/runtimes/python/DynamoDBEncryption/test/keyring/shared_cache_across_hierarchical_keyrings_example/test_with_encrypted_table.py @@ -0,0 +1,43 @@ +# Copyright Amazon.com Inc. or its affiliates. All Rights Reserved. +# SPDX-License-Identifier: Apache-2.0 +"""Test for the shared cache across hierarchical keyrings example.""" +import time + +import pytest + +from ....src.create_keystore_key_example import keystore_create_key +from ....src.keyring.shared_cache_across_hierarchical_keyrings_example.with_encrypted_table import ( + shared_cache_across_hierarchical_keyrings_example, +) +from ...cleanup import delete_branch_key +from ...test_utils import ( + TEST_DDB_TABLE_NAME, + TEST_KEYSTORE_KMS_KEY_ID, + TEST_KEYSTORE_NAME, + TEST_LOGICAL_KEYSTORE_NAME, + TEST_PARTITION_ID, +) + +pytestmark = [pytest.mark.examples] + + +def test_shared_cache_across_hierarchical_keyrings_example(): + """Test the shared cache across hierarchical keyrings example.""" + # Create new branch key for test + key_id = keystore_create_key(TEST_KEYSTORE_NAME, TEST_LOGICAL_KEYSTORE_NAME, TEST_KEYSTORE_KMS_KEY_ID) + + # Key creation is eventually consistent, so wait 5 seconds to decrease the likelihood + # our test fails due to eventual consistency issues. + time.sleep(5) + + shared_cache_across_hierarchical_keyrings_example( + TEST_DDB_TABLE_NAME, + key_id, + TEST_KEYSTORE_NAME, + TEST_LOGICAL_KEYSTORE_NAME, + TEST_PARTITION_ID, + TEST_KEYSTORE_KMS_KEY_ID, + ) + + # Cleanup Branch Key + delete_branch_key(key_id, TEST_KEYSTORE_NAME, None) diff --git a/Examples/runtimes/python/DynamoDBEncryption/test/scan_error_example/__init__.py b/Examples/runtimes/python/DynamoDBEncryption/test/scan_error_example/__init__.py new file mode 100644 index 000000000..fa977e22f --- /dev/null +++ b/Examples/runtimes/python/DynamoDBEncryption/test/scan_error_example/__init__.py @@ -0,0 +1,3 @@ +# Copyright Amazon.com Inc. or its affiliates. All Rights Reserved. +# SPDX-License-Identifier: Apache-2.0 +"""Stub to allow relative imports of examples from tests.""" diff --git a/Examples/runtimes/python/DynamoDBEncryption/test/scan_error_example/test_with_encrypted_client.py b/Examples/runtimes/python/DynamoDBEncryption/test/scan_error_example/test_with_encrypted_client.py new file mode 100644 index 000000000..e0caf9709 --- /dev/null +++ b/Examples/runtimes/python/DynamoDBEncryption/test/scan_error_example/test_with_encrypted_client.py @@ -0,0 +1,14 @@ +# Copyright Amazon.com Inc. or its affiliates. All Rights Reserved. +# SPDX-License-Identifier: Apache-2.0 +"""Test scan error example.""" +import pytest + +from ...src.scan_error_example.with_encrypted_client import scan_error_with_client +from ..test_utils import TEST_DDB_TABLE_NAME, TEST_KMS_KEY_ID + +pytestmark = [pytest.mark.examples] + + +def test_scan_error(): + """Test scan_error.""" + scan_error_with_client(TEST_KMS_KEY_ID, TEST_DDB_TABLE_NAME) diff --git a/Examples/runtimes/python/DynamoDBEncryption/test/scan_error_example/test_with_encrypted_table.py b/Examples/runtimes/python/DynamoDBEncryption/test/scan_error_example/test_with_encrypted_table.py new file mode 100644 index 000000000..ff11c20a1 --- /dev/null +++ b/Examples/runtimes/python/DynamoDBEncryption/test/scan_error_example/test_with_encrypted_table.py @@ -0,0 +1,14 @@ +# Copyright Amazon.com Inc. or its affiliates. All Rights Reserved. +# SPDX-License-Identifier: Apache-2.0 +"""Test scan error example.""" +import pytest + +from ...src.scan_error_example.with_encrypted_table import scan_error_with_table +from ..test_utils import TEST_DDB_TABLE_NAME, TEST_KMS_KEY_ID + +pytestmark = [pytest.mark.examples] + + +def test_scan_error(): + """Test scan_error.""" + scan_error_with_table(TEST_KMS_KEY_ID, TEST_DDB_TABLE_NAME) diff --git a/Examples/runtimes/python/DynamoDBEncryption/test/searchable_encryption/__init__.py b/Examples/runtimes/python/DynamoDBEncryption/test/searchable_encryption/__init__.py new file mode 100644 index 000000000..fa977e22f --- /dev/null +++ b/Examples/runtimes/python/DynamoDBEncryption/test/searchable_encryption/__init__.py @@ -0,0 +1,3 @@ +# Copyright Amazon.com Inc. or its affiliates. All Rights Reserved. +# SPDX-License-Identifier: Apache-2.0 +"""Stub to allow relative imports of examples from tests.""" diff --git a/Examples/runtimes/python/DynamoDBEncryption/test/searchable_encryption/basic_searchable_encryption_example/__init__.py b/Examples/runtimes/python/DynamoDBEncryption/test/searchable_encryption/basic_searchable_encryption_example/__init__.py new file mode 100644 index 000000000..fa977e22f --- /dev/null +++ b/Examples/runtimes/python/DynamoDBEncryption/test/searchable_encryption/basic_searchable_encryption_example/__init__.py @@ -0,0 +1,3 @@ +# Copyright Amazon.com Inc. or its affiliates. All Rights Reserved. +# SPDX-License-Identifier: Apache-2.0 +"""Stub to allow relative imports of examples from tests.""" diff --git a/Examples/runtimes/python/DynamoDBEncryption/test/searchable_encryption/basic_searchable_encryption_example/test_with_with_encrypted_client.py b/Examples/runtimes/python/DynamoDBEncryption/test/searchable_encryption/basic_searchable_encryption_example/test_with_with_encrypted_client.py new file mode 100644 index 000000000..75dcc56a5 --- /dev/null +++ b/Examples/runtimes/python/DynamoDBEncryption/test/searchable_encryption/basic_searchable_encryption_example/test_with_with_encrypted_client.py @@ -0,0 +1,42 @@ +# Copyright Amazon.com Inc. or its affiliates. All Rights Reserved. +# SPDX-License-Identifier: Apache-2.0 +"""Test basic searchable encryption example.""" +import time + +import pytest + +from ....src.create_keystore_key_example import keystore_create_key +from ....src.searchable_encryption.basic_searchable_encryption_example.with_encrypted_client import ( + basic_searchable_encryption_client_example, +) +from ...cleanup import delete_branch_key +from ..searchable_encryption_test_utils import ( + TEST_BRANCH_KEY_WRAPPING_KMS_KEY_ARN, + TEST_BRANCH_KEYSTORE_DDB_TABLE_NAME, + TEST_LOGICAL_KEYSTORE_NAME, + UNIT_INSPECTION_TEST_DDB_TABLE_NAME, +) + +pytestmark = [pytest.mark.examples] + + +def test_basic_searchable_encryption_client_example(): + """Test basic searchable encryption example.""" + # Create new branch key for test + key_id = keystore_create_key( + TEST_BRANCH_KEYSTORE_DDB_TABLE_NAME, TEST_LOGICAL_KEYSTORE_NAME, TEST_BRANCH_KEY_WRAPPING_KMS_KEY_ARN + ) + + # Key creation is eventually consistent, so wait 5 seconds to decrease the likelihood + # our test fails due to eventual consistency issues. + time.sleep(5) + + basic_searchable_encryption_client_example( + UNIT_INSPECTION_TEST_DDB_TABLE_NAME, + key_id, + TEST_BRANCH_KEY_WRAPPING_KMS_KEY_ARN, + TEST_BRANCH_KEYSTORE_DDB_TABLE_NAME, + ) + + # Cleanup Branch Key + delete_branch_key(key_id, TEST_BRANCH_KEYSTORE_DDB_TABLE_NAME, None) diff --git a/Examples/runtimes/python/DynamoDBEncryption/test/searchable_encryption/basic_searchable_encryption_example/test_with_with_encrypted_table.py b/Examples/runtimes/python/DynamoDBEncryption/test/searchable_encryption/basic_searchable_encryption_example/test_with_with_encrypted_table.py new file mode 100644 index 000000000..d3a97485a --- /dev/null +++ b/Examples/runtimes/python/DynamoDBEncryption/test/searchable_encryption/basic_searchable_encryption_example/test_with_with_encrypted_table.py @@ -0,0 +1,42 @@ +# Copyright Amazon.com Inc. or its affiliates. All Rights Reserved. +# SPDX-License-Identifier: Apache-2.0 +"""Test basic searchable encryption example.""" +import time + +import pytest + +from ....src.create_keystore_key_example import keystore_create_key +from ....src.searchable_encryption.basic_searchable_encryption_example.with_encrypted_table import ( + basic_searchable_encryption_table_example, +) +from ...cleanup import delete_branch_key +from ..searchable_encryption_test_utils import ( + TEST_BRANCH_KEY_WRAPPING_KMS_KEY_ARN, + TEST_BRANCH_KEYSTORE_DDB_TABLE_NAME, + TEST_LOGICAL_KEYSTORE_NAME, + UNIT_INSPECTION_TEST_DDB_TABLE_NAME, +) + +pytestmark = [pytest.mark.examples] + + +def test_basic_searchable_encryption_table_example(): + """Test basic searchable encryption example.""" + # Create new branch key for test + key_id = keystore_create_key( + TEST_BRANCH_KEYSTORE_DDB_TABLE_NAME, TEST_LOGICAL_KEYSTORE_NAME, TEST_BRANCH_KEY_WRAPPING_KMS_KEY_ARN + ) + + # Key creation is eventually consistent, so wait 5 seconds to decrease the likelihood + # our test fails due to eventual consistency issues. + time.sleep(5) + + basic_searchable_encryption_table_example( + UNIT_INSPECTION_TEST_DDB_TABLE_NAME, + key_id, + TEST_BRANCH_KEY_WRAPPING_KMS_KEY_ARN, + TEST_BRANCH_KEYSTORE_DDB_TABLE_NAME, + ) + + # Cleanup Branch Key + delete_branch_key(key_id, TEST_BRANCH_KEYSTORE_DDB_TABLE_NAME, None) diff --git a/Examples/runtimes/python/DynamoDBEncryption/test/searchable_encryption/beacon_styles_searchable_encryption_example/__init__.py b/Examples/runtimes/python/DynamoDBEncryption/test/searchable_encryption/beacon_styles_searchable_encryption_example/__init__.py new file mode 100644 index 000000000..fa977e22f --- /dev/null +++ b/Examples/runtimes/python/DynamoDBEncryption/test/searchable_encryption/beacon_styles_searchable_encryption_example/__init__.py @@ -0,0 +1,3 @@ +# Copyright Amazon.com Inc. or its affiliates. All Rights Reserved. +# SPDX-License-Identifier: Apache-2.0 +"""Stub to allow relative imports of examples from tests.""" diff --git a/Examples/runtimes/python/DynamoDBEncryption/test/searchable_encryption/beacon_styles_searchable_encryption_example/test_with_with_encrypted_client.py b/Examples/runtimes/python/DynamoDBEncryption/test/searchable_encryption/beacon_styles_searchable_encryption_example/test_with_with_encrypted_client.py new file mode 100644 index 000000000..41823c622 --- /dev/null +++ b/Examples/runtimes/python/DynamoDBEncryption/test/searchable_encryption/beacon_styles_searchable_encryption_example/test_with_with_encrypted_client.py @@ -0,0 +1,42 @@ +# Copyright Amazon.com Inc. or its affiliates. All Rights Reserved. +# SPDX-License-Identifier: Apache-2.0 +"""Test beacon styles searchable encryption example.""" +import time + +import pytest + +from ....src.create_keystore_key_example import keystore_create_key +from ....src.searchable_encryption.beacon_styles_searchable_encryption_example.with_encrypted_client import ( + beacon_styles_client_example, +) +from ...cleanup import delete_branch_key +from ..searchable_encryption_test_utils import ( + TEST_BRANCH_KEY_WRAPPING_KMS_KEY_ARN, + TEST_BRANCH_KEYSTORE_DDB_TABLE_NAME, + TEST_LOGICAL_KEYSTORE_NAME, + UNIT_INSPECTION_TEST_DDB_TABLE_NAME, +) + +pytestmark = [pytest.mark.examples] + + +def test_beacon_styles_client_example(): + """Test beacon styles searchable encryption example.""" + # Create new branch key for test + key_id = keystore_create_key( + TEST_BRANCH_KEYSTORE_DDB_TABLE_NAME, TEST_LOGICAL_KEYSTORE_NAME, TEST_BRANCH_KEY_WRAPPING_KMS_KEY_ARN + ) + + # Key creation is eventually consistent, so wait 5 seconds to decrease the likelihood + # our test fails due to eventual consistency issues. + time.sleep(5) + + beacon_styles_client_example( + UNIT_INSPECTION_TEST_DDB_TABLE_NAME, + key_id, + TEST_BRANCH_KEY_WRAPPING_KMS_KEY_ARN, + TEST_BRANCH_KEYSTORE_DDB_TABLE_NAME, + ) + + # Cleanup Branch Key + delete_branch_key(key_id, TEST_BRANCH_KEYSTORE_DDB_TABLE_NAME, None) diff --git a/Examples/runtimes/python/DynamoDBEncryption/test/searchable_encryption/beacon_styles_searchable_encryption_example/test_with_with_encrypted_table.py b/Examples/runtimes/python/DynamoDBEncryption/test/searchable_encryption/beacon_styles_searchable_encryption_example/test_with_with_encrypted_table.py new file mode 100644 index 000000000..a5df96cf3 --- /dev/null +++ b/Examples/runtimes/python/DynamoDBEncryption/test/searchable_encryption/beacon_styles_searchable_encryption_example/test_with_with_encrypted_table.py @@ -0,0 +1,42 @@ +# Copyright Amazon.com Inc. or its affiliates. All Rights Reserved. +# SPDX-License-Identifier: Apache-2.0 +"""Test beacon styles searchable encryption example.""" +import time + +import pytest + +from ....src.create_keystore_key_example import keystore_create_key +from ....src.searchable_encryption.beacon_styles_searchable_encryption_example.with_encrypted_table import ( + beacon_styles_table_example, +) +from ...cleanup import delete_branch_key +from ..searchable_encryption_test_utils import ( + TEST_BRANCH_KEY_WRAPPING_KMS_KEY_ARN, + TEST_BRANCH_KEYSTORE_DDB_TABLE_NAME, + TEST_LOGICAL_KEYSTORE_NAME, + UNIT_INSPECTION_TEST_DDB_TABLE_NAME, +) + +pytestmark = [pytest.mark.examples] + + +def test_beacon_styles_table_example(): + """Test beacon styles searchable encryption example.""" + # Create new branch key for test + key_id = keystore_create_key( + TEST_BRANCH_KEYSTORE_DDB_TABLE_NAME, TEST_LOGICAL_KEYSTORE_NAME, TEST_BRANCH_KEY_WRAPPING_KMS_KEY_ARN + ) + + # Key creation is eventually consistent, so wait 5 seconds to decrease the likelihood + # our test fails due to eventual consistency issues. + time.sleep(5) + + beacon_styles_table_example( + UNIT_INSPECTION_TEST_DDB_TABLE_NAME, + key_id, + TEST_BRANCH_KEY_WRAPPING_KMS_KEY_ARN, + TEST_BRANCH_KEYSTORE_DDB_TABLE_NAME, + ) + + # Cleanup Branch Key + delete_branch_key(key_id, TEST_BRANCH_KEYSTORE_DDB_TABLE_NAME, None) diff --git a/Examples/runtimes/python/DynamoDBEncryption/test/searchable_encryption/complex_example/__init__.py b/Examples/runtimes/python/DynamoDBEncryption/test/searchable_encryption/complex_example/__init__.py new file mode 100644 index 000000000..fa977e22f --- /dev/null +++ b/Examples/runtimes/python/DynamoDBEncryption/test/searchable_encryption/complex_example/__init__.py @@ -0,0 +1,3 @@ +# Copyright Amazon.com Inc. or its affiliates. All Rights Reserved. +# SPDX-License-Identifier: Apache-2.0 +"""Stub to allow relative imports of examples from tests.""" diff --git a/Examples/runtimes/python/DynamoDBEncryption/test/searchable_encryption/complex_example/client/__init__.py b/Examples/runtimes/python/DynamoDBEncryption/test/searchable_encryption/complex_example/client/__init__.py new file mode 100644 index 000000000..fa977e22f --- /dev/null +++ b/Examples/runtimes/python/DynamoDBEncryption/test/searchable_encryption/complex_example/client/__init__.py @@ -0,0 +1,3 @@ +# Copyright Amazon.com Inc. or its affiliates. All Rights Reserved. +# SPDX-License-Identifier: Apache-2.0 +"""Stub to allow relative imports of examples from tests.""" diff --git a/Examples/runtimes/python/DynamoDBEncryption/test/searchable_encryption/complex_example/client/test_complex_example_client.py b/Examples/runtimes/python/DynamoDBEncryption/test/searchable_encryption/complex_example/client/test_complex_example_client.py new file mode 100644 index 000000000..b50813150 --- /dev/null +++ b/Examples/runtimes/python/DynamoDBEncryption/test/searchable_encryption/complex_example/client/test_complex_example_client.py @@ -0,0 +1,36 @@ +# Copyright Amazon.com Inc. or its affiliates. All Rights Reserved. +# SPDX-License-Identifier: Apache-2.0 +"""Test the complex searchable encryption example with the EncryptedClient.""" +import time + +import pytest + +from .....src import create_keystore_key_example +from .....src.searchable_encryption.complex_example.client import example +from ...searchable_encryption_test_utils import ( + TEST_BRANCH_KEY_WRAPPING_KMS_KEY_ARN, + TEST_BRANCH_KEYSTORE_DDB_TABLE_NAME, + TEST_LOGICAL_KEYSTORE_NAME, +) +from ..complex_searchable_encryption_example_test_utils import ( + TEST_DDB_TABLE_NAME, +) + +pytestmark = [pytest.mark.examples] + + +def test_complex_example_client(): + """Test function for encrypt and decrypt using the AWS KMS Keyring example.""" + key_id = create_keystore_key_example.keystore_create_key( + TEST_BRANCH_KEYSTORE_DDB_TABLE_NAME, + TEST_LOGICAL_KEYSTORE_NAME, + TEST_BRANCH_KEY_WRAPPING_KMS_KEY_ARN, + ) + + # Key creation is eventually consistent, so wait 5 seconds to decrease the likelihood + # our test fails due to eventual consistency issues. + time.sleep(5) + + example.run_example( + TEST_DDB_TABLE_NAME, key_id, TEST_BRANCH_KEY_WRAPPING_KMS_KEY_ARN, TEST_BRANCH_KEYSTORE_DDB_TABLE_NAME + ) diff --git a/Examples/runtimes/python/DynamoDBEncryption/test/searchable_encryption/complex_example/complex_searchable_encryption_example_test_utils.py b/Examples/runtimes/python/DynamoDBEncryption/test/searchable_encryption/complex_example/complex_searchable_encryption_example_test_utils.py new file mode 100644 index 000000000..0efc26888 --- /dev/null +++ b/Examples/runtimes/python/DynamoDBEncryption/test/searchable_encryption/complex_example/complex_searchable_encryption_example_test_utils.py @@ -0,0 +1,3 @@ +"""Test constants for the complex searchable encryption example.""" + +TEST_DDB_TABLE_NAME = "ComplexBeaconTestTable" diff --git a/Examples/runtimes/python/DynamoDBEncryption/test/searchable_encryption/complex_example/table/__init__.py b/Examples/runtimes/python/DynamoDBEncryption/test/searchable_encryption/complex_example/table/__init__.py new file mode 100644 index 000000000..fa977e22f --- /dev/null +++ b/Examples/runtimes/python/DynamoDBEncryption/test/searchable_encryption/complex_example/table/__init__.py @@ -0,0 +1,3 @@ +# Copyright Amazon.com Inc. or its affiliates. All Rights Reserved. +# SPDX-License-Identifier: Apache-2.0 +"""Stub to allow relative imports of examples from tests.""" diff --git a/Examples/runtimes/python/DynamoDBEncryption/test/searchable_encryption/complex_example/table/test_complex_example_table.py b/Examples/runtimes/python/DynamoDBEncryption/test/searchable_encryption/complex_example/table/test_complex_example_table.py new file mode 100644 index 000000000..50f084c39 --- /dev/null +++ b/Examples/runtimes/python/DynamoDBEncryption/test/searchable_encryption/complex_example/table/test_complex_example_table.py @@ -0,0 +1,36 @@ +# Copyright Amazon.com Inc. or its affiliates. All Rights Reserved. +# SPDX-License-Identifier: Apache-2.0 +"""Test the complex searchable encryption example with the EncryptedTable.""" +import time + +import pytest + +from .....src import create_keystore_key_example +from .....src.searchable_encryption.complex_example.table import example +from ...searchable_encryption_test_utils import ( + TEST_BRANCH_KEY_WRAPPING_KMS_KEY_ARN, + TEST_BRANCH_KEYSTORE_DDB_TABLE_NAME, + TEST_LOGICAL_KEYSTORE_NAME, +) +from ..complex_searchable_encryption_example_test_utils import ( + TEST_DDB_TABLE_NAME, +) + +pytestmark = [pytest.mark.examples] + + +def test_complex_example_table(): + """Test function for encrypt and decrypt using the AWS KMS Keyring example.""" + key_id = create_keystore_key_example.keystore_create_key( + TEST_BRANCH_KEYSTORE_DDB_TABLE_NAME, + TEST_LOGICAL_KEYSTORE_NAME, + TEST_BRANCH_KEY_WRAPPING_KMS_KEY_ARN, + ) + + # Key creation is eventually consistent, so wait 5 seconds to decrease the likelihood + # our test fails due to eventual consistency issues. + time.sleep(5) + + example.run_example( + TEST_DDB_TABLE_NAME, key_id, TEST_BRANCH_KEY_WRAPPING_KMS_KEY_ARN, TEST_BRANCH_KEYSTORE_DDB_TABLE_NAME + ) diff --git a/Examples/runtimes/python/DynamoDBEncryption/test/searchable_encryption/compound_beacon_searchable_encryption_example/__init__.py b/Examples/runtimes/python/DynamoDBEncryption/test/searchable_encryption/compound_beacon_searchable_encryption_example/__init__.py new file mode 100644 index 000000000..fa977e22f --- /dev/null +++ b/Examples/runtimes/python/DynamoDBEncryption/test/searchable_encryption/compound_beacon_searchable_encryption_example/__init__.py @@ -0,0 +1,3 @@ +# Copyright Amazon.com Inc. or its affiliates. All Rights Reserved. +# SPDX-License-Identifier: Apache-2.0 +"""Stub to allow relative imports of examples from tests.""" diff --git a/Examples/runtimes/python/DynamoDBEncryption/test/searchable_encryption/compound_beacon_searchable_encryption_example/test_with_with_encrypted_client.py b/Examples/runtimes/python/DynamoDBEncryption/test/searchable_encryption/compound_beacon_searchable_encryption_example/test_with_with_encrypted_client.py new file mode 100644 index 000000000..351e9f417 --- /dev/null +++ b/Examples/runtimes/python/DynamoDBEncryption/test/searchable_encryption/compound_beacon_searchable_encryption_example/test_with_with_encrypted_client.py @@ -0,0 +1,42 @@ +# Copyright Amazon.com Inc. or its affiliates. All Rights Reserved. +# SPDX-License-Identifier: Apache-2.0 +"""Test compound beacon searchable encryption example.""" +import time + +import pytest + +from ....src.create_keystore_key_example import keystore_create_key +from ....src.searchable_encryption.compound_beacon_searchable_encryption_example.with_encrypted_client import ( + compound_beacon_client_example, +) +from ...cleanup import delete_branch_key +from ..searchable_encryption_test_utils import ( + TEST_BRANCH_KEY_WRAPPING_KMS_KEY_ARN, + TEST_BRANCH_KEYSTORE_DDB_TABLE_NAME, + TEST_LOGICAL_KEYSTORE_NAME, + UNIT_INSPECTION_TEST_DDB_TABLE_NAME, +) + +pytestmark = [pytest.mark.examples] + + +def test_compound_beacon_client_example(): + """Test compound beacon searchable encryption example.""" + # Create new branch key for test + key_id = keystore_create_key( + TEST_BRANCH_KEYSTORE_DDB_TABLE_NAME, TEST_LOGICAL_KEYSTORE_NAME, TEST_BRANCH_KEY_WRAPPING_KMS_KEY_ARN + ) + + # Key creation is eventually consistent, so wait 5 seconds to decrease the likelihood + # our test fails due to eventual consistency issues. + time.sleep(5) + + compound_beacon_client_example( + UNIT_INSPECTION_TEST_DDB_TABLE_NAME, + key_id, + TEST_BRANCH_KEY_WRAPPING_KMS_KEY_ARN, + TEST_BRANCH_KEYSTORE_DDB_TABLE_NAME, + ) + + # Cleanup Branch Key + delete_branch_key(key_id, TEST_BRANCH_KEYSTORE_DDB_TABLE_NAME, None) diff --git a/Examples/runtimes/python/DynamoDBEncryption/test/searchable_encryption/compound_beacon_searchable_encryption_example/test_with_with_encrypted_table.py b/Examples/runtimes/python/DynamoDBEncryption/test/searchable_encryption/compound_beacon_searchable_encryption_example/test_with_with_encrypted_table.py new file mode 100644 index 000000000..b7ea7b1ae --- /dev/null +++ b/Examples/runtimes/python/DynamoDBEncryption/test/searchable_encryption/compound_beacon_searchable_encryption_example/test_with_with_encrypted_table.py @@ -0,0 +1,42 @@ +# Copyright Amazon.com Inc. or its affiliates. All Rights Reserved. +# SPDX-License-Identifier: Apache-2.0 +"""Test compound beacon searchable encryption example.""" +import time + +import pytest + +from ....src.create_keystore_key_example import keystore_create_key +from ....src.searchable_encryption.compound_beacon_searchable_encryption_example.with_encrypted_table import ( + compound_beacon_table_example, +) +from ...cleanup import delete_branch_key +from ..searchable_encryption_test_utils import ( + TEST_BRANCH_KEY_WRAPPING_KMS_KEY_ARN, + TEST_BRANCH_KEYSTORE_DDB_TABLE_NAME, + TEST_LOGICAL_KEYSTORE_NAME, + UNIT_INSPECTION_TEST_DDB_TABLE_NAME, +) + +pytestmark = [pytest.mark.examples] + + +def test_compound_beacon_table_example(): + """Test compound beacon searchable encryption example.""" + # Create new branch key for test + key_id = keystore_create_key( + TEST_BRANCH_KEYSTORE_DDB_TABLE_NAME, TEST_LOGICAL_KEYSTORE_NAME, TEST_BRANCH_KEY_WRAPPING_KMS_KEY_ARN + ) + + # Key creation is eventually consistent, so wait 5 seconds to decrease the likelihood + # our test fails due to eventual consistency issues. + time.sleep(5) + + compound_beacon_table_example( + UNIT_INSPECTION_TEST_DDB_TABLE_NAME, + key_id, + TEST_BRANCH_KEY_WRAPPING_KMS_KEY_ARN, + TEST_BRANCH_KEYSTORE_DDB_TABLE_NAME, + ) + + # Cleanup Branch Key + delete_branch_key(key_id, TEST_BRANCH_KEYSTORE_DDB_TABLE_NAME, None) diff --git a/Examples/runtimes/python/DynamoDBEncryption/test/searchable_encryption/searchable_encryption_test_utils.py b/Examples/runtimes/python/DynamoDBEncryption/test/searchable_encryption/searchable_encryption_test_utils.py new file mode 100644 index 000000000..16a9081a4 --- /dev/null +++ b/Examples/runtimes/python/DynamoDBEncryption/test/searchable_encryption/searchable_encryption_test_utils.py @@ -0,0 +1,14 @@ +"""Searchable encryption test constants.""" + +# Our tests require access to DDB Tables with these name +SIMPLE_BEACON_TEST_DDB_TABLE_NAME = "SimpleBeaconTestTable" +UNIT_INSPECTION_TEST_DDB_TABLE_NAME = "UnitInspectionTestTable" + +# The branch key must have been created using this KMS key +# Note: This is a public resource that anyone can access. +# This MUST NOT be used to encrypt any production data. +TEST_BRANCH_KEY_WRAPPING_KMS_KEY_ARN = "arn:aws:kms:us-west-2:370957321024:key/9d989aa2-2f9c-438c-a745-cc57d3ad0126" + +# Our tests require access to DDB Table with this name configured as a branch keystore +TEST_BRANCH_KEYSTORE_DDB_TABLE_NAME = "KeyStoreDdbTable" +TEST_LOGICAL_KEYSTORE_NAME = "KeyStoreDdbTable" diff --git a/Examples/runtimes/python/DynamoDBEncryption/test/searchable_encryption/virtual_beacon_searchable_encryption_example/__init__.py b/Examples/runtimes/python/DynamoDBEncryption/test/searchable_encryption/virtual_beacon_searchable_encryption_example/__init__.py new file mode 100644 index 000000000..fa977e22f --- /dev/null +++ b/Examples/runtimes/python/DynamoDBEncryption/test/searchable_encryption/virtual_beacon_searchable_encryption_example/__init__.py @@ -0,0 +1,3 @@ +# Copyright Amazon.com Inc. or its affiliates. All Rights Reserved. +# SPDX-License-Identifier: Apache-2.0 +"""Stub to allow relative imports of examples from tests.""" diff --git a/Examples/runtimes/python/DynamoDBEncryption/test/searchable_encryption/virtual_beacon_searchable_encryption_example/test_with_with_encrypted_client.py b/Examples/runtimes/python/DynamoDBEncryption/test/searchable_encryption/virtual_beacon_searchable_encryption_example/test_with_with_encrypted_client.py new file mode 100644 index 000000000..254792c51 --- /dev/null +++ b/Examples/runtimes/python/DynamoDBEncryption/test/searchable_encryption/virtual_beacon_searchable_encryption_example/test_with_with_encrypted_client.py @@ -0,0 +1,42 @@ +# Copyright Amazon.com Inc. or its affiliates. All Rights Reserved. +# SPDX-License-Identifier: Apache-2.0 +"""Test virtual beacon searchable encryption example.""" +import time + +import pytest + +from ....src.create_keystore_key_example import keystore_create_key +from ....src.searchable_encryption.virtual_beacon_searchable_encryption_example.with_encrypted_client import ( + virtual_beacon_client_example, +) +from ...cleanup import delete_branch_key +from ..searchable_encryption_test_utils import ( + SIMPLE_BEACON_TEST_DDB_TABLE_NAME, + TEST_BRANCH_KEY_WRAPPING_KMS_KEY_ARN, + TEST_BRANCH_KEYSTORE_DDB_TABLE_NAME, + TEST_LOGICAL_KEYSTORE_NAME, +) + +pytestmark = [pytest.mark.examples] + + +def test_virtual_beacon_client_example(): + """Test virtual beacon searchable encryption example.""" + # Create new branch key for test + key_id = keystore_create_key( + TEST_BRANCH_KEYSTORE_DDB_TABLE_NAME, TEST_LOGICAL_KEYSTORE_NAME, TEST_BRANCH_KEY_WRAPPING_KMS_KEY_ARN + ) + + # Key creation is eventually consistent, so wait 5 seconds to decrease the likelihood + # our test fails due to eventual consistency issues. + time.sleep(5) + + virtual_beacon_client_example( + SIMPLE_BEACON_TEST_DDB_TABLE_NAME, + key_id, + TEST_BRANCH_KEY_WRAPPING_KMS_KEY_ARN, + TEST_BRANCH_KEYSTORE_DDB_TABLE_NAME, + ) + + # Cleanup Branch Key + delete_branch_key(key_id, TEST_BRANCH_KEYSTORE_DDB_TABLE_NAME, None) diff --git a/Examples/runtimes/python/DynamoDBEncryption/test/searchable_encryption/virtual_beacon_searchable_encryption_example/test_with_with_encrypted_table.py b/Examples/runtimes/python/DynamoDBEncryption/test/searchable_encryption/virtual_beacon_searchable_encryption_example/test_with_with_encrypted_table.py new file mode 100644 index 000000000..9b1828b71 --- /dev/null +++ b/Examples/runtimes/python/DynamoDBEncryption/test/searchable_encryption/virtual_beacon_searchable_encryption_example/test_with_with_encrypted_table.py @@ -0,0 +1,42 @@ +# Copyright Amazon.com Inc. or its affiliates. All Rights Reserved. +# SPDX-License-Identifier: Apache-2.0 +"""Test virtual beacon searchable encryption example.""" +import time + +import pytest + +from ....src.create_keystore_key_example import keystore_create_key +from ....src.searchable_encryption.virtual_beacon_searchable_encryption_example.with_encrypted_table import ( + virtual_beacon_table_example, +) +from ...cleanup import delete_branch_key +from ..searchable_encryption_test_utils import ( + SIMPLE_BEACON_TEST_DDB_TABLE_NAME, + TEST_BRANCH_KEY_WRAPPING_KMS_KEY_ARN, + TEST_BRANCH_KEYSTORE_DDB_TABLE_NAME, + TEST_LOGICAL_KEYSTORE_NAME, +) + +pytestmark = [pytest.mark.examples] + + +def test_virtual_beacon_table_example(): + """Test virtual beacon searchable encryption example.""" + # Create new branch key for test + key_id = keystore_create_key( + TEST_BRANCH_KEYSTORE_DDB_TABLE_NAME, TEST_LOGICAL_KEYSTORE_NAME, TEST_BRANCH_KEY_WRAPPING_KMS_KEY_ARN + ) + + # Key creation is eventually consistent, so wait 5 seconds to decrease the likelihood + # our test fails due to eventual consistency issues. + time.sleep(5) + + virtual_beacon_table_example( + SIMPLE_BEACON_TEST_DDB_TABLE_NAME, + key_id, + TEST_BRANCH_KEY_WRAPPING_KMS_KEY_ARN, + TEST_BRANCH_KEYSTORE_DDB_TABLE_NAME, + ) + + # Cleanup Branch Key + delete_branch_key(key_id, TEST_BRANCH_KEYSTORE_DDB_TABLE_NAME, None) diff --git a/Examples/runtimes/python/DynamoDBEncryption/test/test_create_keystore_key_example.py b/Examples/runtimes/python/DynamoDBEncryption/test/test_create_keystore_key_example.py new file mode 100644 index 000000000..10a3c2cad --- /dev/null +++ b/Examples/runtimes/python/DynamoDBEncryption/test/test_create_keystore_key_example.py @@ -0,0 +1,20 @@ +# Copyright Amazon.com Inc. or its affiliates. All Rights Reserved. +# SPDX-License-Identifier: Apache-2.0 +"""Test create key store key example.""" +import pytest + +from ..src.create_keystore_key_example import keystore_create_key +from .cleanup import delete_branch_key +from .test_utils import TEST_KEYSTORE_KMS_KEY_ID, TEST_KEYSTORE_NAME, TEST_LOGICAL_KEYSTORE_NAME + +pytestmark = [pytest.mark.examples] + + +def test_create_keystore_key_example(): + """Test create_key_store_key_example.""" + key_id = keystore_create_key(TEST_KEYSTORE_NAME, TEST_LOGICAL_KEYSTORE_NAME, TEST_KEYSTORE_KMS_KEY_ID) + + assert key_id is not None + + # Cleanup Branch Key + delete_branch_key(key_id, TEST_KEYSTORE_NAME, None) diff --git a/Examples/runtimes/python/DynamoDBEncryption/test/test_create_keystore_table_example.py b/Examples/runtimes/python/DynamoDBEncryption/test/test_create_keystore_table_example.py new file mode 100644 index 000000000..fd030a0f7 --- /dev/null +++ b/Examples/runtimes/python/DynamoDBEncryption/test/test_create_keystore_table_example.py @@ -0,0 +1,14 @@ +# Copyright Amazon.com Inc. or its affiliates. All Rights Reserved. +# SPDX-License-Identifier: Apache-2.0 +"""Test create key store table example.""" +import pytest + +from ..src.create_keystore_table_example import keystore_create_table +from .test_utils import TEST_KEYSTORE_KMS_KEY_ID, TEST_KEYSTORE_NAME, TEST_LOGICAL_KEYSTORE_NAME + +pytestmark = [pytest.mark.examples] + + +def test_create_keystore_table_example(): + """Test create_key_store_table_example.""" + keystore_create_table(TEST_KEYSTORE_NAME, TEST_LOGICAL_KEYSTORE_NAME, TEST_KEYSTORE_KMS_KEY_ID) diff --git a/Examples/runtimes/python/DynamoDBEncryption/test/test_get_encrypted_data_key_description_example.py b/Examples/runtimes/python/DynamoDBEncryption/test/test_get_encrypted_data_key_description_example.py new file mode 100644 index 000000000..2ed488ebe --- /dev/null +++ b/Examples/runtimes/python/DynamoDBEncryption/test/test_get_encrypted_data_key_description_example.py @@ -0,0 +1,27 @@ +# Copyright Amazon.com Inc. or its affiliates. All Rights Reserved. +# SPDX-License-Identifier: Apache-2.0 +"""Test get encrypted data key description example.""" +import pytest + +from ..src.get_encrypted_data_key_description_example import get_encrypted_data_key_description +from .test_utils import ( + TEST_DDB_TABLE_NAME, + TEST_KMS_KEY_ID, +) + +pytestmark = [pytest.mark.examples] + + +def test_get_encrypted_data_key_description(): + """Test get encrypted data key description example.""" + get_encrypted_data_key_description( + TEST_DDB_TABLE_NAME, + "partition_key", + "BasicPutGetExample", + "sort_key", + "0", + "aws-kms", + TEST_KMS_KEY_ID, + None, + None, + ) diff --git a/Examples/runtimes/python/DynamoDBEncryption/test/test_utils.py b/Examples/runtimes/python/DynamoDBEncryption/test/test_utils.py new file mode 100644 index 000000000..388c03f8d --- /dev/null +++ b/Examples/runtimes/python/DynamoDBEncryption/test/test_utils.py @@ -0,0 +1,21 @@ +"""Test constants.""" + +TEST_KEYSTORE_NAME = "KeyStoreDdbTable" +TEST_LOGICAL_KEYSTORE_NAME = "KeyStoreDdbTable" +TEST_KEYSTORE_KMS_KEY_ID = "arn:aws:kms:us-west-2:370957321024:key/9d989aa2-2f9c-438c-a745-cc57d3ad0126" +TEST_PARTITION_ID = "91c1b6a2-6fc3-4539-ad5e-938d597ed730" + +TEST_AWS_ACCOUNT_ID = "658956600833" +TEST_AWS_REGION = "us-west-2" + +# These are public KMS Keys that MUST only be used for testing, and MUST NOT be used for any production data +TEST_KMS_KEY_ID = "arn:aws:kms:us-west-2:658956600833:key/b3537ef1-d8dc-4780-9f5a-55776cbb2f7f" +TEST_MRK_KEY_ID = "arn:aws:kms:us-west-2:658956600833:key/mrk-80bd8ecdcd4342aebd84b7dc9da498a7" +TEST_KMS_RSA_KEY_ID = "arn:aws:kms:us-west-2:658956600833:key/8b432da4-dde4-4bc3-a794-c7d68cbab5a6" +TEST_MRK_REPLICA_KEY_ID_US_EAST_1 = "arn:aws:kms:us-east-1:658956600833:key/mrk-80bd8ecdcd4342aebd84b7dc9da498a7" +TEST_MRK_REPLICA_KEY_ID_EU_WEST_1 = "arn:aws:kms:eu-west-1:658956600833:key/mrk-80bd8ecdcd4342aebd84b7dc9da498a7" +TEST_KMS_ECDH_KEY_ID_P256_SENDER = "arn:aws:kms:us-west-2:370957321024:key/eabdf483-6be2-4d2d-8ee4-8c2583d416e9" +TEST_KMS_ECDH_KEY_ID_P256_RECIPIENT = "arn:aws:kms:us-west-2:370957321024:key/0265c8e9-5b6a-4055-8f70-63719e09fda5" + +# Our tests require access to DDB Table with this name +TEST_DDB_TABLE_NAME = "DynamoDbEncryptionInterceptorTestTable" diff --git a/Examples/runtimes/python/Migration/.gitignore b/Examples/runtimes/python/Migration/.gitignore new file mode 100644 index 000000000..61d5202d8 --- /dev/null +++ b/Examples/runtimes/python/Migration/.gitignore @@ -0,0 +1,17 @@ +# Python build artifacts +__pycache__ +**/__pycache__ +*.pyc +src/**.egg-info/ +build +poetry.lock +**/poetry.lock +dist + +# Dafny-generated Python +**/internaldafny/generated/*.py + +# Python test artifacts +.tox +.pytest_cache + diff --git a/Examples/runtimes/python/Migration/README.md b/Examples/runtimes/python/Migration/README.md new file mode 100644 index 000000000..364315fd1 --- /dev/null +++ b/Examples/runtimes/python/Migration/README.md @@ -0,0 +1,26 @@ +# DynamoDb Encryption Client to AWS Database Encryption SDK for DynamoDb Migration + +This project contains an example project demonstrating how to safely upgrade +from the DynamoDb Encryption Client (v3.3.0) to the AWS Database Encryption SDK for DynamoDb (v3.x). + +## Getting Started + +### Development Requirements + +- Python 3.11+ + +### Building and Running + +Each example includes a runnable `main` method +and a description of the required command line arguments. +To run a given example, inspect its particular setup requirements, +create and/or grant access to any required AWS resources, +and run the example as specified in the file. + +## Security + +See [CONTRIBUTING](CONTRIBUTING.md#security-issue-notifications) for more information. + +## License + +This project is licensed under the Apache-2.0 License. diff --git a/Examples/runtimes/python/Migration/ddbec_to_awsdbe/__init__.py b/Examples/runtimes/python/Migration/ddbec_to_awsdbe/__init__.py new file mode 100644 index 000000000..fa977e22f --- /dev/null +++ b/Examples/runtimes/python/Migration/ddbec_to_awsdbe/__init__.py @@ -0,0 +1,3 @@ +# Copyright Amazon.com Inc. or its affiliates. All Rights Reserved. +# SPDX-License-Identifier: Apache-2.0 +"""Stub to allow relative imports of examples from tests.""" diff --git a/Examples/runtimes/python/Migration/ddbec_to_awsdbe/src/README.md b/Examples/runtimes/python/Migration/ddbec_to_awsdbe/src/README.md new file mode 100644 index 000000000..505ab11b8 --- /dev/null +++ b/Examples/runtimes/python/Migration/ddbec_to_awsdbe/src/README.md @@ -0,0 +1,45 @@ +# DyanmoDb Encryption Client to AWS Database Encryption SDK for DynamoDb Migration + +This projects demonstrates the three Steps necessary to migration to the AWS Database Encryption SDK for DynamoDb +if you are currently using the DynamoDb Encryption Client. + +[Step 0](./ddbec/README.md) demonstrates the starting state for your system. + +## Step 1 + +In Step 1, you update your system to do the following: + +- continue to read items in the old format +- continue to write items in the old format +- prepare to read items in the new format + +When you deploy changes in Step 1, you should not expect any behavior change in your system, +and your dataset still consists of data written in the old format. + +You must ensure that the changes in Step 1 make it to all your reads before you proceed to step 2. + +## Step 2 + +In Step 2, you update your system to do the following: + +- continue to read items in the old format +- start writing items in the new format +- continue to read items in the new format + +When you deploy changes in Step 2, you are introducing a new encryption format to your system, +and must make sure that all your readers are updated with the changes from Step 1. + +Before you move onto the next step, you will need to re-encrypt all old items in your dataset +to use the newest format. How you will want to do this, and how long you may want to remain in this Step, +depends on your system and your desired security properties for old and new items. + +## Step 3 + +Once all old items are re-encrypted to use the new format, +you may update your system to do the following: + +- continue to write items in the new format +- continue to read items in the new format +- do not accept reading items in the old format + +Once you have deployed these changes to your system, you have completed migration. diff --git a/Examples/runtimes/python/Migration/ddbec_to_awsdbe/src/__init__.py b/Examples/runtimes/python/Migration/ddbec_to_awsdbe/src/__init__.py new file mode 100644 index 000000000..fa977e22f --- /dev/null +++ b/Examples/runtimes/python/Migration/ddbec_to_awsdbe/src/__init__.py @@ -0,0 +1,3 @@ +# Copyright Amazon.com Inc. or its affiliates. All Rights Reserved. +# SPDX-License-Identifier: Apache-2.0 +"""Stub to allow relative imports of examples from tests.""" diff --git a/Examples/runtimes/python/Migration/ddbec_to_awsdbe/src/awsdbe/__init__.py b/Examples/runtimes/python/Migration/ddbec_to_awsdbe/src/awsdbe/__init__.py new file mode 100644 index 000000000..fa977e22f --- /dev/null +++ b/Examples/runtimes/python/Migration/ddbec_to_awsdbe/src/awsdbe/__init__.py @@ -0,0 +1,3 @@ +# Copyright Amazon.com Inc. or its affiliates. All Rights Reserved. +# SPDX-License-Identifier: Apache-2.0 +"""Stub to allow relative imports of examples from tests.""" diff --git a/Examples/runtimes/python/Migration/ddbec_to_awsdbe/src/awsdbe/client/__init__.py b/Examples/runtimes/python/Migration/ddbec_to_awsdbe/src/awsdbe/client/__init__.py new file mode 100644 index 000000000..fa977e22f --- /dev/null +++ b/Examples/runtimes/python/Migration/ddbec_to_awsdbe/src/awsdbe/client/__init__.py @@ -0,0 +1,3 @@ +# Copyright Amazon.com Inc. or its affiliates. All Rights Reserved. +# SPDX-License-Identifier: Apache-2.0 +"""Stub to allow relative imports of examples from tests.""" diff --git a/Examples/runtimes/python/Migration/ddbec_to_awsdbe/src/awsdbe/client/common.py b/Examples/runtimes/python/Migration/ddbec_to_awsdbe/src/awsdbe/client/common.py new file mode 100644 index 000000000..2e74ece9c --- /dev/null +++ b/Examples/runtimes/python/Migration/ddbec_to_awsdbe/src/awsdbe/client/common.py @@ -0,0 +1,235 @@ +# Copyright Amazon.com Inc. or its affiliates. All Rights Reserved. +# SPDX-License-Identifier: Apache-2.0 +"""Common Utilities for Migration Examples.""" +import boto3 +from aws_cryptographic_material_providers.mpl import AwsCryptographicMaterialProviders +from aws_cryptographic_material_providers.mpl.config import MaterialProvidersConfig +from aws_cryptographic_material_providers.mpl.models import ( + CreateAwsKmsMrkMultiKeyringInput, + DBEAlgorithmSuiteId, +) +from aws_cryptographic_material_providers.mpl.references import IKeyring +from aws_dbesdk_dynamodb.encrypted.client import EncryptedClient +from aws_dbesdk_dynamodb.structures.dynamodb import ( + DynamoDbTableEncryptionConfig, + DynamoDbTablesEncryptionConfig, + LegacyOverride, +) +from aws_dbesdk_dynamodb.structures.structured_encryption import ( + CryptoAction, +) + +# Import from legacy DynamoDB Encryption Client +from dynamodb_encryption_sdk.encrypted.client import EncryptedClient as LegacyEncryptedClient +from dynamodb_encryption_sdk.material_providers.aws_kms import AwsKmsCryptographicMaterialsProvider + + +def setup_pure_awsdbe_client(kms_key_id: str, ddb_table_name: str): + """ + Set up a pure AWS Database Encryption SDK EncryptedClient without legacy override. + + :param kms_key_id: The ARN of the KMS key to use for encryption + :param ddb_table_name: The name of the DynamoDB table + :returns EncryptedClient for DynamoDB + """ + # 1. Create a Keyring. This Keyring will be responsible for protecting the data keys that protect your data. + # For this example, we will create a AWS KMS Keyring with the AWS KMS Key we want to use. + # We will use the `CreateMrkMultiKeyring` method to create this keyring, + # as it will correctly handle both single region and Multi-Region KMS Keys. + mat_prov: AwsCryptographicMaterialProviders = AwsCryptographicMaterialProviders(config=MaterialProvidersConfig()) + kms_mrk_multi_keyring_input: CreateAwsKmsMrkMultiKeyringInput = CreateAwsKmsMrkMultiKeyringInput( + generator=kms_key_id, + ) + kms_mrk_multi_keyring: IKeyring = mat_prov.create_aws_kms_mrk_multi_keyring(input=kms_mrk_multi_keyring_input) + + # 2. Configure which attributes are encrypted and/or signed when writing new items. + # For each attribute that may exist on the items we plan to write to our DynamoDbTable, + # we must explicitly configure how they should be treated during item encryption: + # - ENCRYPT_AND_SIGN: The attribute is encrypted and included in the signature + # - SIGN_ONLY: The attribute not encrypted, but is still included in the signature + # - DO_NOTHING: The attribute is not encrypted and not included in the signature + attribute_actions_on_encrypt = { + "partition_key": CryptoAction.SIGN_ONLY, + "sort_key": CryptoAction.SIGN_ONLY, + "attribute1": CryptoAction.ENCRYPT_AND_SIGN, + "attribute2": CryptoAction.SIGN_ONLY, + ":attribute3": CryptoAction.DO_NOTHING, + } + + # 3. Configure which attributes we expect to be included in the signature + # when reading items. There are two options for configuring this: + # + # - (Recommended) Configure `allowedUnsignedAttributesPrefix`: + # When defining your DynamoDb schema and deciding on attribute names, + # choose a distinguishing prefix (such as ":") for all attributes that + # you do not want to include in the signature. + # This has two main benefits: + # - It is easier to reason about the security and authenticity of data within your item + # when all unauthenticated data is easily distinguishable by their attribute name. + # - If you need to add new unauthenticated attributes in the future, + # you can easily make the corresponding update to your `attributeActionsOnEncrypt` + # and immediately start writing to that new attribute, without + # any other configuration update needed. + # Once you configure this field, it is not safe to update it. + # + # - Configure `allowedUnsignedAttributes`: You may also explicitly list + # a set of attributes that should be considered unauthenticated when encountered + # on read. Be careful if you use this configuration. Do not remove an attribute + # name from this configuration, even if you are no longer writing with that attribute, + # as old items may still include this attribute, and our configuration needs to know + # to continue to exclude this attribute from the signature scope. + # If you add new attribute names to this field, you must first deploy the update to this + # field to all readers in your host fleet before deploying the update to start writing + # with that new attribute. + # + # For this example, we have designed our DynamoDb table such that any attribute name with + # the ":" prefix should be considered unauthenticated. + unsignAttrPrefix: str = ":" + + # 4. Create the DynamoDb Encryption configuration for the table we will be writing to. + # without the legacy override + table_configs = {} + table_config = DynamoDbTableEncryptionConfig( + logical_table_name=ddb_table_name, + partition_key_name="partition_key", + sort_key_name="sort_key", + attribute_actions_on_encrypt=attribute_actions_on_encrypt, + keyring=kms_mrk_multi_keyring, + allowed_unsigned_attribute_prefix=unsignAttrPrefix, + # Specifying an algorithm suite is not required, + # but is done here to demonstrate how to do so. + # We suggest using the + # `ALG_AES_256_GCM_HKDF_SHA512_COMMIT_KEY_ECDSA_P384_SYMSIG_HMAC_SHA384` suite, + # which includes AES-GCM with key derivation, signing, and key commitment. + # This is also the default algorithm suite if one is not specified in this config. + # For more information on supported algorithm suites, see: + # https://docs.aws.amazon.com/database-encryption-sdk/latest/devguide/supported-algorithms.html + algorithm_suite_id=DBEAlgorithmSuiteId.ALG_AES_256_GCM_HKDF_SHA512_COMMIT_KEY_ECDSA_P384_SYMSIG_HMAC_SHA384, + ) + table_configs[ddb_table_name] = table_config + tables_config = DynamoDbTablesEncryptionConfig(table_encryption_configs=table_configs) + + # 5. Create the EncryptedClient + return EncryptedClient( + client=boto3.client("dynamodb"), + encryption_config=tables_config, + ) + + +def setup_awsdbe_client_with_legacy_override(kms_key_id: str, ddb_table_name: str, policy: str): + """ + Set up an AWS Database Encryption SDK EncryptedClient with legacy override. + + :param kms_key_id: The ARN of the KMS key to use for encryption + :param ddb_table_name: The name of the DynamoDB table + :param policy: The policy required for the Legacy Override configuration + :returns EncryptedClient for DynamoDB + + """ + # 0. Create AWS SDK DynamoDB Client + ddb_client = boto3.client("dynamodb") + + # 1. Create the legacy EncryptedClient + cmp = AwsKmsCryptographicMaterialsProvider(key_id=kms_key_id) + legacy_encrypted_client = LegacyEncryptedClient( + client=ddb_client, + materials_provider=cmp, + ) + + # 2. Configure our legacy behavior, inputting the DynamoDBEncryptor, attribute actions + # created above, and legacy policy. + legacy_override = LegacyOverride( + encryptor=legacy_encrypted_client, + attribute_actions_on_encrypt={ + "partition_key": CryptoAction.SIGN_ONLY, + "sort_key": CryptoAction.SIGN_ONLY, + "attribute1": CryptoAction.ENCRYPT_AND_SIGN, + "attribute2": CryptoAction.SIGN_ONLY, + ":attribute3": CryptoAction.DO_NOTHING, + }, + policy=policy, + ) + + # 3. Create a Keyring. This Keyring will be responsible for protecting the data keys that protect your data. + # For this example, we will create a AWS KMS Keyring with the AWS KMS Key we want to use. + # We will use the `CreateMrkMultiKeyring` method to create this keyring, + # as it will correctly handle both single region and Multi-Region KMS Keys. + mat_prov: AwsCryptographicMaterialProviders = AwsCryptographicMaterialProviders(config=MaterialProvidersConfig()) + kms_mrk_multi_keyring_input: CreateAwsKmsMrkMultiKeyringInput = CreateAwsKmsMrkMultiKeyringInput( + generator=kms_key_id, + ) + kms_mrk_multi_keyring: IKeyring = mat_prov.create_aws_kms_mrk_multi_keyring(input=kms_mrk_multi_keyring_input) + + # 4. Configure which attributes are encrypted and/or signed when writing new items. + # For each attribute that may exist on the items we plan to write to our DynamoDbTable, + # we must explicitly configure how they should be treated during item encryption: + # - ENCRYPT_AND_SIGN: The attribute is encrypted and included in the signature + # - SIGN_ONLY: The attribute not encrypted, but is still included in the signature + # - DO_NOTHING: The attribute is not encrypted and not included in the signature + attribute_actions_on_encrypt = { + "partition_key": CryptoAction.SIGN_ONLY, + "sort_key": CryptoAction.SIGN_ONLY, + "attribute1": CryptoAction.ENCRYPT_AND_SIGN, + "attribute2": CryptoAction.SIGN_ONLY, + ":attribute3": CryptoAction.DO_NOTHING, + } + + # 5. Configure which attributes we expect to be included in the signature + # when reading items. There are two options for configuring this: + # + # - (Recommended) Configure `allowedUnsignedAttributesPrefix`: + # When defining your DynamoDb schema and deciding on attribute names, + # choose a distinguishing prefix (such as ":") for all attributes that + # you do not want to include in the signature. + # This has two main benefits: + # - It is easier to reason about the security and authenticity of data within your item + # when all unauthenticated data is easily distinguishable by their attribute name. + # - If you need to add new unauthenticated attributes in the future, + # you can easily make the corresponding update to your `attributeActionsOnEncrypt` + # and immediately start writing to that new attribute, without + # any other configuration update needed. + # Once you configure this field, it is not safe to update it. + # + # - Configure `allowedUnsignedAttributes`: You may also explicitly list + # a set of attributes that should be considered unauthenticated when encountered + # on read. Be careful if you use this configuration. Do not remove an attribute + # name from this configuration, even if you are no longer writing with that attribute, + # as old items may still include this attribute, and our configuration needs to know + # to continue to exclude this attribute from the signature scope. + # If you add new attribute names to this field, you must first deploy the update to this + # field to all readers in your host fleet before deploying the update to start writing + # with that new attribute. + # + # For this example, we have designed our DynamoDb table such that any attribute name with + # the ":" prefix should be considered unauthenticated. + unsignAttrPrefix: str = ":" + + # 6. Create the DynamoDb Encryption configuration for the table we will be writing to. + # without the legacy override + table_configs = {} + table_config = DynamoDbTableEncryptionConfig( + logical_table_name=ddb_table_name, + partition_key_name="partition_key", + sort_key_name="sort_key", + attribute_actions_on_encrypt=attribute_actions_on_encrypt, + keyring=kms_mrk_multi_keyring, + legacy_override=legacy_override, + allowed_unsigned_attribute_prefix=unsignAttrPrefix, + # Specifying an algorithm suite is not required, + # but is done here to demonstrate how to do so. + # We suggest using the + # `ALG_AES_256_GCM_HKDF_SHA512_COMMIT_KEY_ECDSA_P384_SYMSIG_HMAC_SHA384` suite, + # which includes AES-GCM with key derivation, signing, and key commitment. + # This is also the default algorithm suite if one is not specified in this config. + # For more information on supported algorithm suites, see: + # https://docs.aws.amazon.com/database-encryption-sdk/latest/devguide/supported-algorithms.html + algorithm_suite_id=DBEAlgorithmSuiteId.ALG_AES_256_GCM_HKDF_SHA512_COMMIT_KEY_ECDSA_P384_SYMSIG_HMAC_SHA384, + ) + table_configs[ddb_table_name] = table_config + tables_config = DynamoDbTablesEncryptionConfig(table_encryption_configs=table_configs) + + # 7. Create the EncryptedClient + return EncryptedClient( + client=boto3.client("dynamodb"), + encryption_config=tables_config, + ) diff --git a/Examples/runtimes/python/Migration/ddbec_to_awsdbe/src/awsdbe/client/migration_step_1.py b/Examples/runtimes/python/Migration/ddbec_to_awsdbe/src/awsdbe/client/migration_step_1.py new file mode 100644 index 000000000..892cb2ca5 --- /dev/null +++ b/Examples/runtimes/python/Migration/ddbec_to_awsdbe/src/awsdbe/client/migration_step_1.py @@ -0,0 +1,87 @@ +# Copyright Amazon.com Inc. or its affiliates. All Rights Reserved. +# SPDX-License-Identifier: Apache-2.0 + +""" +Migration Step 1. + +This is an example demonstrating how to start using the +AWS Database Encryption SDK with a pre-existing table used with DynamoDB Encryption Client. +In this example, you configure a EncryptedClient to do the following: + - Read items encrypted in the old format + - Continue to encrypt items in the old format on write + - Read items encrypted in the new format +While this step configures your client to be ready to start reading items encrypted, +we do not yet expect to be reading any items in the new format. +Before you move on to step 2, ensure that these changes have successfully been deployed +to all of your readers. + +Running this example requires access to the DDB Table whose name +is provided in CLI arguments. +This table must be configured with the following +primary key configuration: + - Partition key is named "partition_key" with type (S) + - Sort key is named "sort_key" with type (S) +""" +from aws_dbesdk_dynamodb.structures.dynamodb import LegacyPolicy + +from .common import setup_awsdbe_client_with_legacy_override + + +def migration_step_1_with_client(kms_key_id: str, ddb_table_name: str, sort_read_value: int = 1): + """ + Migration Step 1: Using the AWS Database Encryption SDK with Legacy Override. + + :param kms_key_id: The ARN of the KMS key to use for encryption + :param ddb_table_name: The name of the DynamoDB table + :param sort_read_value: The sort key value to read + + """ + # 1. Create a EncryptedClient with legacy override. + # For Legacy Policy, use `FORCE_LEGACY_ENCRYPT_ALLOW_LEGACY_DECRYPT`. + # With this policy, you will continue to read and write items using the old format, + # but will be able to start reading new items in the new format as soon as they appear + policy = LegacyPolicy.FORCE_LEGACY_ENCRYPT_ALLOW_LEGACY_DECRYPT + encrypted_client = setup_awsdbe_client_with_legacy_override( + kms_key_id=kms_key_id, ddb_table_name=ddb_table_name, policy=policy + ) + + # 2. Put an item in the old format since we are using a legacy override + # with FORCE_LEGACY_ENCRYPT_ALLOW_DECRYPT policy + item_to_encrypt = { + "partition_key": {"S": "MigrationExampleForPython"}, + "sort_key": {"N": str(1)}, + "attribute1": {"S": "encrypt and sign me!"}, + "attribute2": {"S": "sign me!"}, + ":attribute3": {"S": "ignore me!"}, + } + + put_item_request = { + "TableName": ddb_table_name, + "Item": item_to_encrypt, + } + + put_item_response = encrypted_client.put_item(**put_item_request) + # Demonstrate that PutItem succeeded + assert put_item_response["ResponseMetadata"]["HTTPStatusCode"] == 200 + + # 3. Get an item back from the table using the DynamoDb Enhanced Client. + # If this is an item written in the old format (e.g. any item written + # during Step 0 or 1), then we will attempt to decrypt the item + # using the legacy behavior. + # If this is an item written in the new format (e.g. any item written + # during Step 2 or after), then we will attempt to decrypt the item using + # the non-legacy behavior. + key_to_get = {"partition_key": {"S": "MigrationExampleForPython"}, "sort_key": {"N": str(sort_read_value)}} + + get_item_request = {"TableName": ddb_table_name, "Key": key_to_get} + get_item_response = encrypted_client.get_item(**get_item_request) + + # Demonstrate that GetItem succeeded and returned the decrypted item + assert get_item_response["ResponseMetadata"]["HTTPStatusCode"] == 200 + decrypted_item = get_item_response["Item"] + # Demonstrate we get the expected item back + assert decrypted_item["partition_key"]["S"] == "MigrationExampleForPython" + assert decrypted_item["sort_key"]["N"] == str(sort_read_value) + assert decrypted_item["attribute1"]["S"] == "encrypt and sign me!" + assert decrypted_item["attribute2"]["S"] == "sign me!" + assert decrypted_item[":attribute3"]["S"] == "ignore me!" diff --git a/Examples/runtimes/python/Migration/ddbec_to_awsdbe/src/awsdbe/client/migration_step_2.py b/Examples/runtimes/python/Migration/ddbec_to_awsdbe/src/awsdbe/client/migration_step_2.py new file mode 100644 index 000000000..776825585 --- /dev/null +++ b/Examples/runtimes/python/Migration/ddbec_to_awsdbe/src/awsdbe/client/migration_step_2.py @@ -0,0 +1,88 @@ +# Copyright Amazon.com Inc. or its affiliates. All Rights Reserved. +# SPDX-License-Identifier: Apache-2.0 + +""" +Migration Step 2. + +This is an example demonstrating how to update your configuration +to start writing items using the latest encryption format, but still continue +to read any items written using the old encryption format. + +Once you deploy this change to your system, you will have a dataset +containing items in both the old and new format. +Because the changes in Step 1 have been deployed to all our readers, +we can be sure that our entire system is ready to read this new data. + +Running this example requires access to the DDB Table whose name +is provided in CLI arguments. +This table must be configured with the following +primary key configuration: + - Partition key is named "partition_key" with type (S) + - Sort key is named "sort_key" with type (S) +""" + +from aws_dbesdk_dynamodb.structures.dynamodb import LegacyPolicy + +# Import from new AWS Database Encryption SDK +from .common import setup_awsdbe_client_with_legacy_override + + +def migration_step_2_with_client(kms_key_id: str, ddb_table_name: str, sort_read_value: int = 2): + """ + Migration Step 2: Using pure AWS DBESDK and legacy override together with EncryptedClient. + + :param kms_key_id: The ARN of the KMS key to use for encryption + :param ddb_table_name: The name of the DynamoDB table + :param sort_read_value: The sort key value to read + + """ + # 1. Create a EncryptedClient with legacy override. + # When configuring our legacy behavior, use `FORBID_LEGACY_ENCRYPT_ALLOW_LEGACY_DECRYPT`. + # With this policy, you will continue to read items in both formats, + # but will only write new items using the new format. + encrypted_client = setup_awsdbe_client_with_legacy_override( + kms_key_id, ddb_table_name, policy=LegacyPolicy.FORBID_LEGACY_ENCRYPT_ALLOW_LEGACY_DECRYPT + ) + + # 2. Put an item into your table using the DB ESDK Client. + # This item will be encrypted in the latest format, using the + # configuration from your modelled class to decide + # which attribute to encrypt and/or sign. + item_to_encrypt = { + "partition_key": {"S": "MigrationExampleForPython"}, + "sort_key": {"N": str(2)}, + "attribute1": {"S": "encrypt and sign me!"}, + "attribute2": {"S": "sign me!"}, + ":attribute3": {"S": "ignore me!"}, + } + + put_item_request = { + "TableName": ddb_table_name, + "Item": item_to_encrypt, + } + + put_item_response = encrypted_client.put_item(**put_item_request) + # Demonstrate that PutItem succeeded + assert put_item_response["ResponseMetadata"]["HTTPStatusCode"] == 200 + + # 3. Get an item back from the table using the Client. + # If this is an item written in the old format (e.g. any item written + # during Step 0 or 1), then we will attempt to decrypt the item + # using the legacy behavior. + # If this is an item written in the new format (e.g. any item written + # during Step 2 or after), then we will attempt to decrypt the item using + # the non-legacy behavior. + key_to_get = {"partition_key": {"S": "MigrationExampleForPython"}, "sort_key": {"N": str(sort_read_value)}} + + get_item_request = {"TableName": ddb_table_name, "Key": key_to_get} + get_item_response = encrypted_client.get_item(**get_item_request) + + # Demonstrate that GetItem succeeded and returned the decrypted item + assert get_item_response["ResponseMetadata"]["HTTPStatusCode"] == 200 + decrypted_item = get_item_response["Item"] + # Demonstrate we get the expected item back + assert decrypted_item["partition_key"]["S"] == "MigrationExampleForPython" + assert decrypted_item["sort_key"]["N"] == str(sort_read_value) + assert decrypted_item["attribute1"]["S"] == "encrypt and sign me!" + assert decrypted_item["attribute2"]["S"] == "sign me!" + assert decrypted_item[":attribute3"]["S"] == "ignore me!" diff --git a/Examples/runtimes/python/Migration/ddbec_to_awsdbe/src/awsdbe/client/migration_step_3.py b/Examples/runtimes/python/Migration/ddbec_to_awsdbe/src/awsdbe/client/migration_step_3.py new file mode 100644 index 000000000..df4ae758c --- /dev/null +++ b/Examples/runtimes/python/Migration/ddbec_to_awsdbe/src/awsdbe/client/migration_step_3.py @@ -0,0 +1,73 @@ +# Copyright Amazon.com Inc. or its affiliates. All Rights Reserved. +# SPDX-License-Identifier: Apache-2.0 + +""" +Migration Step 3. + +This is an example demonstrating how to update your configuration +to stop accepting reading items encrypted using the old format. +In order to proceed with this step, you will need to re-encrypt all +old items in your table. + +Once you complete Step 3, you can be sure that all items being read by your system +ensure the security properties configured for the new format. + +Running this example requires access to the DDB Table whose name +is provided in CLI arguments. +This table must be configured with the following +primary key configuration: + - Partition key is named "partition_key" with type (S) + - Sort key is named "sort_key" with type (S) +""" + +from .common import setup_pure_awsdbe_client + + +def migration_step_3_with_client(kms_key_id: str, ddb_table_name: str, sort_read_value: int = 3): + """ + Migration Step 3: Using only pure AWS DBESDK (no legacy override) with EncryptedClient. + + :param kms_key_id: The ARN of the KMS key to use for encryption + :param ddb_table_name: The name of the DynamoDB table + :param sort_read_value: The sort key value to read + """ + # 1. Create the EncryptedClient. + # Do not configure any legacy behavior. + encrypted_client = setup_pure_awsdbe_client(kms_key_id, ddb_table_name) + + # 2. Put an item into your table using the Client. + # This item will be encrypted in the latest format, using the + # configuration from your modelled class to decide + # which attribute to encrypt and/or sign. + item = { + "partition_key": {"S": "MigrationExampleForPython"}, + "sort_key": {"N": str(3)}, + "attribute1": {"S": "encrypt and sign me!"}, + "attribute2": {"S": "sign me!"}, + ":attribute3": {"S": "ignore me!"}, + } + + put_item_response = encrypted_client.put_item(TableName=ddb_table_name, Item=item) + # Demonstrate that PutItem succeeded + assert put_item_response["ResponseMetadata"]["HTTPStatusCode"] == 200 + + # 3. Get an item back from the table using the Client. + # If this is an item written in the old format (e.g. any item written + # during Step 0 or 1), then we fail to return the item. + # If this is an item written in the new format (e.g. any item written + # during Step 2 or after), then we will attempt to decrypt the item using + # the non-legacy behavior. + key_to_get = {"partition_key": {"S": "MigrationExampleForPython"}, "sort_key": {"N": str(sort_read_value)}} + + get_item_request = {"TableName": ddb_table_name, "Key": key_to_get} + get_item_response = encrypted_client.get_item(**get_item_request) + + # Demonstrate that GetItem succeeded and returned the decrypted item + assert get_item_response["ResponseMetadata"]["HTTPStatusCode"] == 200 + decrypted_item = get_item_response["Item"] + # Demonstrate we get the expected item back + assert decrypted_item["partition_key"]["S"] == "MigrationExampleForPython" + assert decrypted_item["sort_key"]["N"] == str(sort_read_value) + assert decrypted_item["attribute1"]["S"] == "encrypt and sign me!" + assert decrypted_item["attribute2"]["S"] == "sign me!" + assert decrypted_item[":attribute3"]["S"] == "ignore me!" diff --git a/Examples/runtimes/python/Migration/ddbec_to_awsdbe/src/awsdbe/paginator/__init__.py b/Examples/runtimes/python/Migration/ddbec_to_awsdbe/src/awsdbe/paginator/__init__.py new file mode 100644 index 000000000..fa977e22f --- /dev/null +++ b/Examples/runtimes/python/Migration/ddbec_to_awsdbe/src/awsdbe/paginator/__init__.py @@ -0,0 +1,3 @@ +# Copyright Amazon.com Inc. or its affiliates. All Rights Reserved. +# SPDX-License-Identifier: Apache-2.0 +"""Stub to allow relative imports of examples from tests.""" diff --git a/Examples/runtimes/python/Migration/ddbec_to_awsdbe/src/awsdbe/paginator/migration_step_1.py b/Examples/runtimes/python/Migration/ddbec_to_awsdbe/src/awsdbe/paginator/migration_step_1.py new file mode 100644 index 000000000..ec89382b7 --- /dev/null +++ b/Examples/runtimes/python/Migration/ddbec_to_awsdbe/src/awsdbe/paginator/migration_step_1.py @@ -0,0 +1,94 @@ +# Copyright Amazon.com Inc. or its affiliates. All Rights Reserved. +# SPDX-License-Identifier: Apache-2.0 + +""" +Migration Step 1. + +This is an example demonstrating how to start using the +AWS Database Encryption SDK with a pre-existing table used with DynamoDB Encryption Client. +In this example, you configure a EncryptedPaginator to do the following: + - Read items encrypted in the old format + - Continue to encrypt items in the old format on write + - Read items encrypted in the new format +While this step configures your paginator to be ready to start reading items encrypted, +we do not yet expect to be reading any items in the new format. +Before you move on to step 2, ensure that these changes have successfully been deployed +to all of your readers. + +Running this example requires access to the DDB Table whose name +is provided in CLI arguments. +This table must be configured with the following +primary key configuration: + - Partition key is named "partition_key" with type (S) + - Sort key is named "sort_key" with type (N) +""" +from aws_dbesdk_dynamodb.structures.dynamodb import LegacyPolicy + +from ..client.common import setup_awsdbe_client_with_legacy_override + + +def migration_step_1_with_paginator(kms_key_id: str, ddb_table_name: str, sort_read_value: int = 1): + """ + Migration Step 1: Using the AWS Database Encryption SDK EncryptedPaginator with Legacy Override. + + :param kms_key_id: The ARN of the KMS key to use for encryption + :param ddb_table_name: The name of the DynamoDB table + """ + # 1. Create a EncryptedClient with legacy override. + # For Legacy Policy, use `FORCE_LEGACY_ENCRYPT_ALLOW_LEGACY_DECRYPT`. + # With this policy, you will continue to read and write items using the old format, + # but will be able to start reading new items in the new format as soon as they appear + policy = LegacyPolicy.FORCE_LEGACY_ENCRYPT_ALLOW_LEGACY_DECRYPT + encrypted_client = setup_awsdbe_client_with_legacy_override( + kms_key_id=kms_key_id, ddb_table_name=ddb_table_name, policy=policy + ) + + # 2. Put an item in the old format since we are using a legacy override + # with FORCE_LEGACY_ENCRYPT_ALLOW_DECRYPT policy + item_to_encrypt = { + "partition_key": {"S": "PaginatorMigrationExampleForPython"}, + "sort_key": {"N": "1"}, + "attribute1": {"S": "encrypt and sign me!"}, + "attribute2": {"S": "sign me!"}, + ":attribute3": {"S": "ignore me!"}, + } + + put_item_request = { + "TableName": ddb_table_name, + "Item": item_to_encrypt, + } + + put_item_response = encrypted_client.put_item(**put_item_request) + # Demonstrate that PutItem succeeded + assert put_item_response["ResponseMetadata"]["HTTPStatusCode"] == 200 + + # 3. Get the EncryptedPaginator from the EncryptedClient + encrypted_paginator = encrypted_client.get_paginator("query") + + # 4. Use the EncryptedPaginator to paginate through the items in the table + # If the items were written in the old format (e.g. any item written + # during Step 0 or 1), then we will attempt to decrypt the item + # using the legacy behavior. + # If the items were written in the new format (e.g. any item written + # during Step 2 or after), then we will attempt to decrypt the item using + # the non-legacy behavior. + items = [] + for page in encrypted_paginator.paginate( + TableName=ddb_table_name, + KeyConditionExpression="partition_key = :partition_key AND sort_key = :sort_key", + ExpressionAttributeValues={ + ":partition_key": {"S": "PaginatorMigrationExampleForPython"}, + ":sort_key": {"N": str(sort_read_value)}, + }, + ): + for item in page["Items"]: + items.append(item) + + # 5. Verify the decrypted items + assert len(items) == 1 # We should have only one item with above key condition + item = next((i for i in items if i["sort_key"]["N"] == str(sort_read_value)), None) + assert item is not None + assert item["partition_key"]["S"] == "PaginatorMigrationExampleForPython" + assert item["attribute1"]["S"] == "encrypt and sign me!" + assert item["attribute2"]["S"] == "sign me!" + assert item[":attribute3"]["S"] == "ignore me!" diff --git a/Examples/runtimes/python/Migration/ddbec_to_awsdbe/src/awsdbe/paginator/migration_step_2.py b/Examples/runtimes/python/Migration/ddbec_to_awsdbe/src/awsdbe/paginator/migration_step_2.py new file mode 100644 index 000000000..6ad018cc5 --- /dev/null +++ b/Examples/runtimes/python/Migration/ddbec_to_awsdbe/src/awsdbe/paginator/migration_step_2.py @@ -0,0 +1,94 @@ +# Copyright Amazon.com Inc. or its affiliates. All Rights Reserved. +# SPDX-License-Identifier: Apache-2.0 + +""" +Migration Step 2. + +This is an example demonstrating how to update your configuration +to start writing items using the latest encryption format, but still continue +to read any items written using the old encryption format. + +Once you deploy this change to your system, you will have a dataset +containing items in both the old and new format. +Because the changes in Step 1 have been deployed to all our readers, +we can be sure that our entire system is ready to read this new data. + +Running this example requires access to the DDB Table whose name +is provided in CLI arguments. +This table must be configured with the following +primary key configuration: + - Partition key is named "partition_key" with type (S) + - Sort key is named "sort_key" with type (N) +""" + +from aws_dbesdk_dynamodb.structures.dynamodb import LegacyPolicy + +# Import from new AWS Database Encryption SDK +from ..client.common import setup_awsdbe_client_with_legacy_override + + +def migration_step_2_with_paginator(kms_key_id: str, ddb_table_name: str, sort_read_value: int = 2): + """ + Migration Step 2: Using the AWS Database Encryption SDK EncryptedPaginator with legacy override. + + :param kms_key_id: The ARN of the KMS key to use for encryption + :param ddb_table_name: The name of the DynamoDB table + """ + # 1. Create a EncryptedClient with legacy override. + # When configuring our legacy behavior, use `FORBID_LEGACY_ENCRYPT_ALLOW_LEGACY_DECRYPT`. + # With this policy, you will continue to read items in both formats, + # but will only write new items using the new format. + encrypted_client = setup_awsdbe_client_with_legacy_override( + kms_key_id, ddb_table_name, policy=LegacyPolicy.FORBID_LEGACY_ENCRYPT_ALLOW_LEGACY_DECRYPT + ) + + # 2. Put an item into your table using the EncryptedClient. + # This item will be encrypted in the latest format, using the + # configuration to decide which attribute to encrypt and/or sign. + item_to_encrypt = { + "partition_key": {"S": "PaginatorMigrationExampleForPython"}, + "sort_key": {"N": "2"}, + "attribute1": {"S": "encrypt and sign me!"}, + "attribute2": {"S": "sign me!"}, + ":attribute3": {"S": "ignore me!"}, + } + + put_item_request = { + "TableName": ddb_table_name, + "Item": item_to_encrypt, + } + + put_item_response = encrypted_client.put_item(**put_item_request) + # Demonstrate that PutItem succeeded + assert put_item_response["ResponseMetadata"]["HTTPStatusCode"] == 200 + + # 3. Get the EncryptedPaginator from the EncryptedClient + encrypted_paginator = encrypted_client.get_paginator("query") + + # 4. Use the EncryptedPaginator to paginate through the items in the table + # If the items were written in the old format (e.g. any item written + # during Step 0 or 1), then we will attempt to decrypt the item + # using the legacy behavior. + # If the items were written in the new format (e.g. any item written + # during Step 2 or after), then we will attempt to decrypt the item using + # the non-legacy behavior. + items = [] + for page in encrypted_paginator.paginate( + TableName=ddb_table_name, + KeyConditionExpression="partition_key = :partition_key AND sort_key = :sort_key", + ExpressionAttributeValues={ + ":partition_key": {"S": "PaginatorMigrationExampleForPython"}, + ":sort_key": {"N": str(sort_read_value)}, + }, + ): + for item in page["Items"]: + items.append(item) + + # 5. Verify the decrypted items + assert len(items) == 1 # We should have only one item with above key condition + item = next((i for i in items if i["sort_key"]["N"] == str(sort_read_value)), None) + assert item is not None + assert item["partition_key"]["S"] == "PaginatorMigrationExampleForPython" + assert item["attribute1"]["S"] == "encrypt and sign me!" + assert item["attribute2"]["S"] == "sign me!" + assert item[":attribute3"]["S"] == "ignore me!" diff --git a/Examples/runtimes/python/Migration/ddbec_to_awsdbe/src/awsdbe/paginator/migration_step_3.py b/Examples/runtimes/python/Migration/ddbec_to_awsdbe/src/awsdbe/paginator/migration_step_3.py new file mode 100644 index 000000000..25192ba91 --- /dev/null +++ b/Examples/runtimes/python/Migration/ddbec_to_awsdbe/src/awsdbe/paginator/migration_step_3.py @@ -0,0 +1,85 @@ +# Copyright Amazon.com Inc. or its affiliates. All Rights Reserved. +# SPDX-License-Identifier: Apache-2.0 + +""" +Migration Step 3. + +This is an example demonstrating how to update your configuration +to stop accepting reading items encrypted using the old format. +In order to proceed with this step, you will need to re-encrypt all +old items in your table. + +Once you complete Step 3, you can be sure that all items being read by your system +ensure the security properties configured for the new format. + +Running this example requires access to the DDB Table whose name +is provided in CLI arguments. +This table must be configured with the following +primary key configuration: + - Partition key is named "partition_key" with type (S) + - Sort key is named "sort_key" with type (N) +""" + +from ..client.common import setup_pure_awsdbe_client + + +def migration_step_3_with_paginator(kms_key_id: str, ddb_table_name: str, sort_read_value: int = 3): + """ + Migration Step 3: Using only pure AWS DBESDK (no legacy override) with EncryptedPaginator. + + :param kms_key_id: The ARN of the KMS key to use for encryption + :param ddb_table_name: The name of the DynamoDB table + """ + # 1. Create the EncryptedClient. + # Do not configure any legacy behavior. + encrypted_client = setup_pure_awsdbe_client(kms_key_id, ddb_table_name) + + # 2. Put an item into your table using the Client. + # This item will be encrypted in the latest format, using the + # configuration from your modelled class to decide + # which attribute to encrypt and/or sign. + item = { + "partition_key": {"S": "PaginatorMigrationExampleForPython"}, + "sort_key": {"N": "3"}, + "attribute1": {"S": "encrypt and sign me!"}, + "attribute2": {"S": "sign me!"}, + ":attribute3": {"S": "ignore me!"}, + } + + put_item_response = encrypted_client.put_item(TableName=ddb_table_name, Item=item) + # Demonstrate that PutItem succeeded + assert put_item_response["ResponseMetadata"]["HTTPStatusCode"] == 200 + + # 3. Get the EncryptedPaginator from the EncryptedClient + encrypted_paginator = encrypted_client.get_paginator("query") + + # 4. Use the EncryptedPaginator to paginate through the items in the table + # If the items were written in the old format (e.g. any item written + # during Step 0 or 1), then we will fail to decrypt those items. + # If the items were written in the new format (e.g. any item written + # during Step 2 or 3), then we will attempt to decrypt the item using + # the non-legacy behavior. + items = [] + for page in encrypted_paginator.paginate( + TableName=ddb_table_name, + KeyConditionExpression="partition_key = :partition_key AND sort_key = :sort_key", + ExpressionAttributeValues={ + ":partition_key": {"S": "PaginatorMigrationExampleForPython"}, + ":sort_key": {"N": str(sort_read_value)}, + }, + ): + for item in page["Items"]: + items.append(item) + + # 5. Verify the decrypted items + assert len(items) == 1 # We should have only one item with above key condition + item = next((i for i in items if i["sort_key"]["N"] == str(sort_read_value)), None) + assert item is not None + assert item["partition_key"]["S"] == "PaginatorMigrationExampleForPython" + assert item["attribute1"]["S"] == "encrypt and sign me!" + assert item["attribute2"]["S"] == "sign me!" + assert item[":attribute3"]["S"] == "ignore me!" + + # Note: If we tried to query for items with sort_key = 1 or sort_key = 2 that were + # written with the legacy format in previous migration steps and haven't been + # re-encrypted, the operation would fail with a verification exception. diff --git a/Examples/runtimes/python/Migration/ddbec_to_awsdbe/src/awsdbe/resource/__init__.py b/Examples/runtimes/python/Migration/ddbec_to_awsdbe/src/awsdbe/resource/__init__.py new file mode 100644 index 000000000..fa977e22f --- /dev/null +++ b/Examples/runtimes/python/Migration/ddbec_to_awsdbe/src/awsdbe/resource/__init__.py @@ -0,0 +1,3 @@ +# Copyright Amazon.com Inc. or its affiliates. All Rights Reserved. +# SPDX-License-Identifier: Apache-2.0 +"""Stub to allow relative imports of examples from tests.""" diff --git a/Examples/runtimes/python/Migration/ddbec_to_awsdbe/src/awsdbe/resource/common.py b/Examples/runtimes/python/Migration/ddbec_to_awsdbe/src/awsdbe/resource/common.py new file mode 100644 index 000000000..f2dc49eac --- /dev/null +++ b/Examples/runtimes/python/Migration/ddbec_to_awsdbe/src/awsdbe/resource/common.py @@ -0,0 +1,235 @@ +# Copyright Amazon.com Inc. or its affiliates. All Rights Reserved. +# SPDX-License-Identifier: Apache-2.0 +"""Common Utilities for Migration Examples.""" +import boto3 +from aws_cryptographic_material_providers.mpl import AwsCryptographicMaterialProviders +from aws_cryptographic_material_providers.mpl.config import MaterialProvidersConfig +from aws_cryptographic_material_providers.mpl.models import ( + CreateAwsKmsMrkMultiKeyringInput, + DBEAlgorithmSuiteId, +) +from aws_cryptographic_material_providers.mpl.references import IKeyring +from aws_dbesdk_dynamodb.encrypted.resource import EncryptedResource +from aws_dbesdk_dynamodb.structures.dynamodb import ( + DynamoDbTableEncryptionConfig, + DynamoDbTablesEncryptionConfig, + LegacyOverride, +) +from aws_dbesdk_dynamodb.structures.structured_encryption import ( + CryptoAction, +) + +# Import from legacy DynamoDB Encryption Client +from dynamodb_encryption_sdk.encrypted.resource import EncryptedResource as LegacyEncryptedResource +from dynamodb_encryption_sdk.material_providers.aws_kms import AwsKmsCryptographicMaterialsProvider + + +def setup_pure_awsdbe_resource(kms_key_id: str, ddb_table_name: str): + """ + Set up a pure AWS Database Encryption SDK EncryptedResource without legacy override. + + :param kms_key_id: The ARN of the KMS key to use for encryption + :param ddb_table_name: The name of the DynamoDB table + :returns EncryptedResource for DynamoDB + """ + # 1. Create a Keyring. This Keyring will be responsible for protecting the data keys that protect your data. + # For this example, we will create a AWS KMS Keyring with the AWS KMS Key we want to use. + # We will use the `CreateMrkMultiKeyring` method to create this keyring, + # as it will correctly handle both single region and Multi-Region KMS Keys. + mat_prov: AwsCryptographicMaterialProviders = AwsCryptographicMaterialProviders(config=MaterialProvidersConfig()) + kms_mrk_multi_keyring_input: CreateAwsKmsMrkMultiKeyringInput = CreateAwsKmsMrkMultiKeyringInput( + generator=kms_key_id, + ) + kms_mrk_multi_keyring: IKeyring = mat_prov.create_aws_kms_mrk_multi_keyring(input=kms_mrk_multi_keyring_input) + + # 2. Configure which attributes are encrypted and/or signed when writing new items. + # For each attribute that may exist on the items we plan to write to our DynamoDbTable, + # we must explicitly configure how they should be treated during item encryption: + # - ENCRYPT_AND_SIGN: The attribute is encrypted and included in the signature + # - SIGN_ONLY: The attribute not encrypted, but is still included in the signature + # - DO_NOTHING: The attribute is not encrypted and not included in the signature + attribute_actions_on_encrypt = { + "partition_key": CryptoAction.SIGN_ONLY, + "sort_key": CryptoAction.SIGN_ONLY, + "attribute1": CryptoAction.ENCRYPT_AND_SIGN, + "attribute2": CryptoAction.SIGN_ONLY, + ":attribute3": CryptoAction.DO_NOTHING, + } + + # 3. Configure which attributes we expect to be included in the signature + # when reading items. There are two options for configuring this: + # + # - (Recommended) Configure `allowedUnsignedAttributesPrefix`: + # When defining your DynamoDb schema and deciding on attribute names, + # choose a distinguishing prefix (such as ":") for all attributes that + # you do not want to include in the signature. + # This has two main benefits: + # - It is easier to reason about the security and authenticity of data within your item + # when all unauthenticated data is easily distinguishable by their attribute name. + # - If you need to add new unauthenticated attributes in the future, + # you can easily make the corresponding update to your `attributeActionsOnEncrypt` + # and immediately start writing to that new attribute, without + # any other configuration update needed. + # Once you configure this field, it is not safe to update it. + # + # - Configure `allowedUnsignedAttributes`: You may also explicitly list + # a set of attributes that should be considered unauthenticated when encountered + # on read. Be careful if you use this configuration. Do not remove an attribute + # name from this configuration, even if you are no longer writing with that attribute, + # as old items may still include this attribute, and our configuration needs to know + # to continue to exclude this attribute from the signature scope. + # If you add new attribute names to this field, you must first deploy the update to this + # field to all readers in your host fleet before deploying the update to start writing + # with that new attribute. + # + # For this example, we have designed our DynamoDb table such that any attribute name with + # the ":" prefix should be considered unauthenticated. + unsignAttrPrefix: str = ":" + + # 4. Create the DynamoDb Encryption configuration for the table we will be writing to. + # without the legacy override + table_configs = {} + table_config = DynamoDbTableEncryptionConfig( + logical_table_name=ddb_table_name, + partition_key_name="partition_key", + sort_key_name="sort_key", + attribute_actions_on_encrypt=attribute_actions_on_encrypt, + keyring=kms_mrk_multi_keyring, + allowed_unsigned_attribute_prefix=unsignAttrPrefix, + # Specifying an algorithm suite is not required, + # but is done here to demonstrate how to do so. + # We suggest using the + # `ALG_AES_256_GCM_HKDF_SHA512_COMMIT_KEY_ECDSA_P384_SYMSIG_HMAC_SHA384` suite, + # which includes AES-GCM with key derivation, signing, and key commitment. + # This is also the default algorithm suite if one is not specified in this config. + # For more information on supported algorithm suites, see: + # https://docs.aws.amazon.com/database-encryption-sdk/latest/devguide/supported-algorithms.html + algorithm_suite_id=DBEAlgorithmSuiteId.ALG_AES_256_GCM_HKDF_SHA512_COMMIT_KEY_ECDSA_P384_SYMSIG_HMAC_SHA384, + ) + table_configs[ddb_table_name] = table_config + tables_config = DynamoDbTablesEncryptionConfig(table_encryption_configs=table_configs) + + # 5. Create the EncryptedResource + return EncryptedResource( + resource=boto3.resource("dynamodb"), + encryption_config=tables_config, + ) + + +def setup_awsdbe_resource_with_legacy_override(kms_key_id: str, ddb_table_name: str, policy: str): + """ + Set up an AWS Database Encryption SDK EncryptedResource with legacy override. + + :param kms_key_id: The ARN of the KMS key to use for encryption + :param ddb_table_name: The name of the DynamoDB table + :param policy: The policy required for the Legacy Override configuration + :returns EncryptedResource for DynamoDB + + """ + # 0. Create AWS SDK DynamoDB Resource + ddb_resource = boto3.resource("dynamodb") + + # 1. Create the legacy EncryptedResource + cmp = AwsKmsCryptographicMaterialsProvider(key_id=kms_key_id) + legacy_encrypted_resource = LegacyEncryptedResource( + resource=ddb_resource, + materials_provider=cmp, + ) + + # 2. Configure our legacy behavior, inputting the DynamoDBEncryptor, attribute actions + # created above, and legacy policy. + legacy_override = LegacyOverride( + encryptor=legacy_encrypted_resource, + attribute_actions_on_encrypt={ + "partition_key": CryptoAction.SIGN_ONLY, + "sort_key": CryptoAction.SIGN_ONLY, + "attribute1": CryptoAction.ENCRYPT_AND_SIGN, + "attribute2": CryptoAction.SIGN_ONLY, + ":attribute3": CryptoAction.DO_NOTHING, + }, + policy=policy, + ) + + # 3. Create a Keyring. This Keyring will be responsible for protecting the data keys that protect your data. + # For this example, we will create a AWS KMS Keyring with the AWS KMS Key we want to use. + # We will use the `CreateMrkMultiKeyring` method to create this keyring, + # as it will correctly handle both single region and Multi-Region KMS Keys. + mat_prov: AwsCryptographicMaterialProviders = AwsCryptographicMaterialProviders(config=MaterialProvidersConfig()) + kms_mrk_multi_keyring_input: CreateAwsKmsMrkMultiKeyringInput = CreateAwsKmsMrkMultiKeyringInput( + generator=kms_key_id, + ) + kms_mrk_multi_keyring: IKeyring = mat_prov.create_aws_kms_mrk_multi_keyring(input=kms_mrk_multi_keyring_input) + + # 4. Configure which attributes are encrypted and/or signed when writing new items. + # For each attribute that may exist on the items we plan to write to our DynamoDbTable, + # we must explicitly configure how they should be treated during item encryption: + # - ENCRYPT_AND_SIGN: The attribute is encrypted and included in the signature + # - SIGN_ONLY: The attribute not encrypted, but is still included in the signature + # - DO_NOTHING: The attribute is not encrypted and not included in the signature + attribute_actions_on_encrypt = { + "partition_key": CryptoAction.SIGN_ONLY, + "sort_key": CryptoAction.SIGN_ONLY, + "attribute1": CryptoAction.ENCRYPT_AND_SIGN, + "attribute2": CryptoAction.SIGN_ONLY, + ":attribute3": CryptoAction.DO_NOTHING, + } + + # 5. Configure which attributes we expect to be included in the signature + # when reading items. There are two options for configuring this: + # + # - (Recommended) Configure `allowedUnsignedAttributesPrefix`: + # When defining your DynamoDb schema and deciding on attribute names, + # choose a distinguishing prefix (such as ":") for all attributes that + # you do not want to include in the signature. + # This has two main benefits: + # - It is easier to reason about the security and authenticity of data within your item + # when all unauthenticated data is easily distinguishable by their attribute name. + # - If you need to add new unauthenticated attributes in the future, + # you can easily make the corresponding update to your `attributeActionsOnEncrypt` + # and immediately start writing to that new attribute, without + # any other configuration update needed. + # Once you configure this field, it is not safe to update it. + # + # - Configure `allowedUnsignedAttributes`: You may also explicitly list + # a set of attributes that should be considered unauthenticated when encountered + # on read. Be careful if you use this configuration. Do not remove an attribute + # name from this configuration, even if you are no longer writing with that attribute, + # as old items may still include this attribute, and our configuration needs to know + # to continue to exclude this attribute from the signature scope. + # If you add new attribute names to this field, you must first deploy the update to this + # field to all readers in your host fleet before deploying the update to start writing + # with that new attribute. + # + # For this example, we have designed our DynamoDb table such that any attribute name with + # the ":" prefix should be considered unauthenticated. + unsignAttrPrefix: str = ":" + + # 6. Create the DynamoDb Encryption configuration for the table we will be writing to. + # with the legacy override + table_configs = {} + table_config = DynamoDbTableEncryptionConfig( + logical_table_name=ddb_table_name, + partition_key_name="partition_key", + sort_key_name="sort_key", + attribute_actions_on_encrypt=attribute_actions_on_encrypt, + keyring=kms_mrk_multi_keyring, + legacy_override=legacy_override, + allowed_unsigned_attribute_prefix=unsignAttrPrefix, + # Specifying an algorithm suite is not required, + # but is done here to demonstrate how to do so. + # We suggest using the + # `ALG_AES_256_GCM_HKDF_SHA512_COMMIT_KEY_ECDSA_P384_SYMSIG_HMAC_SHA384` suite, + # which includes AES-GCM with key derivation, signing, and key commitment. + # This is also the default algorithm suite if one is not specified in this config. + # For more information on supported algorithm suites, see: + # https://docs.aws.amazon.com/database-encryption-sdk/latest/devguide/supported-algorithms.html + algorithm_suite_id=DBEAlgorithmSuiteId.ALG_AES_256_GCM_HKDF_SHA512_COMMIT_KEY_ECDSA_P384_SYMSIG_HMAC_SHA384, + ) + table_configs[ddb_table_name] = table_config + tables_config = DynamoDbTablesEncryptionConfig(table_encryption_configs=table_configs) + + # 7. Create the EncryptedResource + return EncryptedResource( + resource=boto3.resource("dynamodb"), + encryption_config=tables_config, + ) diff --git a/Examples/runtimes/python/Migration/ddbec_to_awsdbe/src/awsdbe/resource/migration_step_1.py b/Examples/runtimes/python/Migration/ddbec_to_awsdbe/src/awsdbe/resource/migration_step_1.py new file mode 100644 index 000000000..e399d28de --- /dev/null +++ b/Examples/runtimes/python/Migration/ddbec_to_awsdbe/src/awsdbe/resource/migration_step_1.py @@ -0,0 +1,105 @@ +# Copyright Amazon.com Inc. or its affiliates. All Rights Reserved. +# SPDX-License-Identifier: Apache-2.0 + +""" +Migration Step 1. + +This is an example demonstrating how to start using the +AWS Database Encryption SDK with a pre-existing table used with DynamoDB Encryption Client. +In this example, you configure a EncryptedResource to do the following: + - Read items encrypted in the old format + - Continue to encrypt items in the old format on write + - Read items encrypted in the new format +While this step configures your resource to be ready to start reading items encrypted, +we do not yet expect to be reading any items in the new format. +Before you move on to step 2, ensure that these changes have successfully been deployed +to all of your readers. + +Running this example requires access to the DDB Table whose name +is provided in CLI arguments. +This table must be configured with the following +primary key configuration: + - Partition key is named "partition_key" with type (S) + - Sort key is named "sort_key" with type (N) +""" +from aws_dbesdk_dynamodb.structures.dynamodb import LegacyPolicy + +from .common import setup_awsdbe_resource_with_legacy_override + + +def migration_step_1_with_resource(kms_key_id: str, ddb_table_name: str, sort_read_value: int = 1): + """ + Migration Step 1: Using the AWS Database Encryption SDK with Legacy Override. + + :param kms_key_id: The ARN of the KMS key to use for encryption + :param ddb_table_name: The name of the DynamoDB table + :param sort_read_value: The sort key value to read + + """ + # 1. Create a EncryptedResource with legacy override. + # For Legacy Policy, use `FORCE_LEGACY_ENCRYPT_ALLOW_LEGACY_DECRYPT`. + # With this policy, you will continue to read and write items using the old format, + # but will be able to start reading new items in the new format as soon as they appear + policy = LegacyPolicy.FORCE_LEGACY_ENCRYPT_ALLOW_LEGACY_DECRYPT + encrypted_resource = setup_awsdbe_resource_with_legacy_override( + kms_key_id=kms_key_id, ddb_table_name=ddb_table_name, policy=policy + ) + + # 2. Write a batch of items to the table using the old format since we are using + # a legacy override with FORCE_LEGACY_ENCRYPT_ALLOW_DECRYPT policy + items = [ + { + "partition_key": "PythonEncryptedResourceMigrationExample-1", + "sort_key": 1, + "attribute1": "encrypt and sign me!", + "attribute2": "sign me!", + ":attribute3": "ignore me!", + }, + { + "partition_key": "PythonEncryptedResourceMigrationExample-2", + "sort_key": 1, + "attribute1": "encrypt and sign me!", + "attribute2": "sign me!", + ":attribute3": "ignore me!", + }, + ] + + batch_write_items_put_request = { + "RequestItems": { + ddb_table_name: [{"PutRequest": {"Item": item}} for item in items], + }, + } + + batch_write_items_put_response = encrypted_resource.batch_write_item(**batch_write_items_put_request) + + # Demonstrate that BatchWriteItem succeeded + assert batch_write_items_put_response["ResponseMetadata"]["HTTPStatusCode"] == 200 + + # 3. Read the items back from the table. + # If this is an item written in the old format (e.g. any item written + # during Step 0 or 1), then we will attempt to decrypt the item + # using the legacy behavior. + # If this is an item written in the new format (e.g. any item written + # during Step 2 or after), then we will attempt to decrypt the item using + # the non-legacy behavior. + batch_get_items_request = { + "RequestItems": { + ddb_table_name: { + "Keys": [{"partition_key": item["partition_key"], "sort_key": sort_read_value} for item in items], + } + }, + } + + batch_get_items_response = encrypted_resource.batch_get_item(**batch_get_items_request) + + # Demonstrate that BatchGetItem succeeded with the expected result + assert batch_get_items_response["ResponseMetadata"]["HTTPStatusCode"] == 200 + for item in batch_get_items_response["Responses"][ddb_table_name]: + assert ( + item["partition_key"] == "PythonEncryptedResourceMigrationExample-1" + or item["partition_key"] == "PythonEncryptedResourceMigrationExample-2" + ) + assert item["sort_key"] == sort_read_value + assert item["attribute1"] == "encrypt and sign me!" + assert item["attribute2"] == "sign me!" + assert item[":attribute3"] == "ignore me!" diff --git a/Examples/runtimes/python/Migration/ddbec_to_awsdbe/src/awsdbe/resource/migration_step_2.py b/Examples/runtimes/python/Migration/ddbec_to_awsdbe/src/awsdbe/resource/migration_step_2.py new file mode 100644 index 000000000..cf7ba3a33 --- /dev/null +++ b/Examples/runtimes/python/Migration/ddbec_to_awsdbe/src/awsdbe/resource/migration_step_2.py @@ -0,0 +1,106 @@ +# Copyright Amazon.com Inc. or its affiliates. All Rights Reserved. +# SPDX-License-Identifier: Apache-2.0 + +""" +Migration Step 2. + +This is an example demonstrating how to update your configuration +to start writing items using the latest encryption format, but still continue +to read any items written using the old encryption format. + +Once you deploy this change to your system, you will have a dataset +containing items in both the old and new format. +Because the changes in Step 1 have been deployed to all our readers, +we can be sure that our entire system is ready to read this new data. + +Running this example requires access to the DDB Table whose name +is provided in CLI arguments. +This table must be configured with the following +primary key configuration: + - Partition key is named "partition_key" with type (S) + - Sort key is named "sort_key" with type (N) +""" + +from aws_dbesdk_dynamodb.structures.dynamodb import LegacyPolicy + +# Import from new AWS Database Encryption SDK +from .common import setup_awsdbe_resource_with_legacy_override + + +def migration_step_2_with_resource(kms_key_id: str, ddb_table_name: str, sort_read_value: int = 2): + """ + Migration Step 2: Using pure AWS DBESDK and legacy override together with EncryptedResource. + + :param kms_key_id: The ARN of the KMS key to use for encryption + :param ddb_table_name: The name of the DynamoDB table + :param sort_read_value: The sort key value to read + + """ + # 1. Create a EncryptedResource with legacy override. + # When configuring our legacy behavior, use `FORBID_LEGACY_ENCRYPT_ALLOW_LEGACY_DECRYPT`. + # With this policy, you will continue to read items in both formats, + # but will only write new items using the new format. + encrypted_resource = setup_awsdbe_resource_with_legacy_override( + kms_key_id, ddb_table_name, policy=LegacyPolicy.FORBID_LEGACY_ENCRYPT_ALLOW_LEGACY_DECRYPT + ) + + # 2. Write a batch of items to the table. + # These items will be encrypted in the latest format, using the + # configuration from your modelled class to decide + # which attribute to encrypt and/or sign. + items = [ + { + "partition_key": "PythonEncryptedResourceMigrationExample-1", + "sort_key": 2, + "attribute1": "encrypt and sign me!", + "attribute2": "sign me!", + ":attribute3": "ignore me!", + }, + { + "partition_key": "PythonEncryptedResourceMigrationExample-2", + "sort_key": 2, + "attribute1": "encrypt and sign me!", + "attribute2": "sign me!", + ":attribute3": "ignore me!", + }, + ] + + batch_write_items_put_request = { + "RequestItems": { + ddb_table_name: [{"PutRequest": {"Item": item}} for item in items], + }, + } + + batch_write_items_put_response = encrypted_resource.batch_write_item(**batch_write_items_put_request) + + # Demonstrate that BatchWriteItem succeeded + assert batch_write_items_put_response["ResponseMetadata"]["HTTPStatusCode"] == 200 + + # 3. Read the items back from the table. + # If this is an item written in the old format (e.g. any item written + # during Step 0 or 1), then we will attempt to decrypt the item + # using the legacy behavior. + # If this is an item written in the new format (e.g. any item written + # during Step 2 or after), then we will attempt to decrypt the item using + # the non-legacy behavior. + batch_get_items_request = { + "RequestItems": { + ddb_table_name: { + "Keys": [{"partition_key": item["partition_key"], "sort_key": sort_read_value} for item in items], + } + }, + } + + batch_get_items_response = encrypted_resource.batch_get_item(**batch_get_items_request) + + # Demonstrate that BatchGetItem succeeded with the expected result + assert batch_get_items_response["ResponseMetadata"]["HTTPStatusCode"] == 200 + for item in batch_get_items_response["Responses"][ddb_table_name]: + assert ( + item["partition_key"] == "PythonEncryptedResourceMigrationExample-1" + or item["partition_key"] == "PythonEncryptedResourceMigrationExample-2" + ) + assert item["sort_key"] == sort_read_value + assert item["attribute1"] == "encrypt and sign me!" + assert item["attribute2"] == "sign me!" + assert item[":attribute3"] == "ignore me!" diff --git a/Examples/runtimes/python/Migration/ddbec_to_awsdbe/src/awsdbe/resource/migration_step_3.py b/Examples/runtimes/python/Migration/ddbec_to_awsdbe/src/awsdbe/resource/migration_step_3.py new file mode 100644 index 000000000..fc72fcf29 --- /dev/null +++ b/Examples/runtimes/python/Migration/ddbec_to_awsdbe/src/awsdbe/resource/migration_step_3.py @@ -0,0 +1,96 @@ +# Copyright Amazon.com Inc. or its affiliates. All Rights Reserved. +# SPDX-License-Identifier: Apache-2.0 + +""" +Migration Step 3. + +This is an example demonstrating how to update your configuration +to stop accepting reading items encrypted using the old format. +In order to proceed with this step, you will need to re-encrypt all +old items in your table. + +Once you complete Step 3, you can be sure that all items being read by your system +ensure the security properties configured for the new format. + +Running this example requires access to the DDB Table whose name +is provided in CLI arguments. +This table must be configured with the following +primary key configuration: + - Partition key is named "partition_key" with type (S) + - Sort key is named "sort_key" with type (N) +""" + +from .common import setup_pure_awsdbe_resource + + +def migration_step_3_with_resource(kms_key_id: str, ddb_table_name: str, sort_read_value: int = 3): + """ + Migration Step 3: Using only pure AWS DBESDK (no legacy override) with EncryptedResource. + + :param kms_key_id: The ARN of the KMS key to use for encryption + :param ddb_table_name: The name of the DynamoDB table + :param sort_read_value: The sort key value to read + """ + # 1. Create the EncryptedResource. + # Do not configure any legacy behavior. + encrypted_resource = setup_pure_awsdbe_resource(kms_key_id, ddb_table_name) + + # 2. Write a batch of items to the table. + # These items will be encrypted in the latest format, using the + # configuration from your modelled class to decide + # which attribute to encrypt and/or sign. + items = [ + { + "partition_key": "PythonEncryptedResourceMigrationExample-1", + "sort_key": 3, + "attribute1": "encrypt and sign me!", + "attribute2": "sign me!", + ":attribute3": "ignore me!", + }, + { + "partition_key": "PythonEncryptedResourceMigrationExample-2", + "sort_key": 3, + "attribute1": "encrypt and sign me!", + "attribute2": "sign me!", + ":attribute3": "ignore me!", + }, + ] + + batch_write_items_put_request = { + "RequestItems": { + ddb_table_name: [{"PutRequest": {"Item": item}} for item in items], + }, + } + + batch_write_items_put_response = encrypted_resource.batch_write_item(**batch_write_items_put_request) + + # Demonstrate that BatchWriteItem succeeded + assert batch_write_items_put_response["ResponseMetadata"]["HTTPStatusCode"] == 200 + + # 3. Read the items back from the table. + # If this is an item written in the old format (e.g. any item written + # during Step 0 or 1), then we fail to return the item. + # If this is an item written in the new format (e.g. any item written + # during Step 2 or after), then we will attempt to decrypt the item using + # the non-legacy behavior. + batch_get_items_request = { + "RequestItems": { + ddb_table_name: { + "Keys": [{"partition_key": item["partition_key"], "sort_key": sort_read_value} for item in items], + } + }, + } + + batch_get_items_response = encrypted_resource.batch_get_item(**batch_get_items_request) + + # Demonstrate that BatchGetItem succeeded with the expected result + assert batch_get_items_response["ResponseMetadata"]["HTTPStatusCode"] == 200 + for item in batch_get_items_response["Responses"][ddb_table_name]: + assert ( + item["partition_key"] == "PythonEncryptedResourceMigrationExample-1" + or item["partition_key"] == "PythonEncryptedResourceMigrationExample-2" + ) + assert item["sort_key"] == sort_read_value + assert item["attribute1"] == "encrypt and sign me!" + assert item["attribute2"] == "sign me!" + assert item[":attribute3"] == "ignore me!" diff --git a/Examples/runtimes/python/Migration/ddbec_to_awsdbe/src/awsdbe/table/__init__.py b/Examples/runtimes/python/Migration/ddbec_to_awsdbe/src/awsdbe/table/__init__.py new file mode 100644 index 000000000..fa977e22f --- /dev/null +++ b/Examples/runtimes/python/Migration/ddbec_to_awsdbe/src/awsdbe/table/__init__.py @@ -0,0 +1,3 @@ +# Copyright Amazon.com Inc. or its affiliates. All Rights Reserved. +# SPDX-License-Identifier: Apache-2.0 +"""Stub to allow relative imports of examples from tests.""" diff --git a/Examples/runtimes/python/Migration/ddbec_to_awsdbe/src/awsdbe/table/common.py b/Examples/runtimes/python/Migration/ddbec_to_awsdbe/src/awsdbe/table/common.py new file mode 100644 index 000000000..1f76897d4 --- /dev/null +++ b/Examples/runtimes/python/Migration/ddbec_to_awsdbe/src/awsdbe/table/common.py @@ -0,0 +1,235 @@ +# Copyright Amazon.com Inc. or its affiliates. All Rights Reserved. +# SPDX-License-Identifier: Apache-2.0 +"""Common Utilities for Migration Examples.""" +import boto3 +from aws_cryptographic_material_providers.mpl import AwsCryptographicMaterialProviders +from aws_cryptographic_material_providers.mpl.config import MaterialProvidersConfig +from aws_cryptographic_material_providers.mpl.models import ( + CreateAwsKmsMrkMultiKeyringInput, + DBEAlgorithmSuiteId, +) +from aws_cryptographic_material_providers.mpl.references import IKeyring +from aws_dbesdk_dynamodb.encrypted.table import EncryptedTable +from aws_dbesdk_dynamodb.structures.dynamodb import ( + DynamoDbTableEncryptionConfig, + DynamoDbTablesEncryptionConfig, + LegacyOverride, +) +from aws_dbesdk_dynamodb.structures.structured_encryption import ( + CryptoAction, +) + +# Import from legacy DynamoDB Encryption Client +from dynamodb_encryption_sdk.encrypted.table import EncryptedTable as LegacyEncryptedTable +from dynamodb_encryption_sdk.material_providers.aws_kms import AwsKmsCryptographicMaterialsProvider + + +def setup_pure_awsdbe_table(kms_key_id: str, ddb_table_name: str): + """ + Set up a pure AWS Database Encryption SDK EncryptedTable without legacy override. + + :param kms_key_id: The ARN of the KMS key to use for encryption + :param ddb_table_name: The name of the DynamoDB table + :returns EncryptedTable for DynamoDB + """ + # 1. Create a Keyring. This Keyring will be responsible for protecting the data keys that protect your data. + # For this example, we will create a AWS KMS Keyring with the AWS KMS Key we want to use. + # We will use the `CreateMrkMultiKeyring` method to create this keyring, + # as it will correctly handle both single region and Multi-Region KMS Keys. + mat_prov: AwsCryptographicMaterialProviders = AwsCryptographicMaterialProviders(config=MaterialProvidersConfig()) + kms_mrk_multi_keyring_input: CreateAwsKmsMrkMultiKeyringInput = CreateAwsKmsMrkMultiKeyringInput( + generator=kms_key_id, + ) + kms_mrk_multi_keyring: IKeyring = mat_prov.create_aws_kms_mrk_multi_keyring(input=kms_mrk_multi_keyring_input) + + # 2. Configure which attributes are encrypted and/or signed when writing new items. + # For each attribute that may exist on the items we plan to write to our DynamoDbTable, + # we must explicitly configure how they should be treated during item encryption: + # - ENCRYPT_AND_SIGN: The attribute is encrypted and included in the signature + # - SIGN_ONLY: The attribute not encrypted, but is still included in the signature + # - DO_NOTHING: The attribute is not encrypted and not included in the signature + attribute_actions_on_encrypt = { + "partition_key": CryptoAction.SIGN_ONLY, + "sort_key": CryptoAction.SIGN_ONLY, + "attribute1": CryptoAction.ENCRYPT_AND_SIGN, + "attribute2": CryptoAction.SIGN_ONLY, + ":attribute3": CryptoAction.DO_NOTHING, + } + + # 3. Configure which attributes we expect to be included in the signature + # when reading items. There are two options for configuring this: + # + # - (Recommended) Configure `allowedUnsignedAttributesPrefix`: + # When defining your DynamoDb schema and deciding on attribute names, + # choose a distinguishing prefix (such as ":") for all attributes that + # you do not want to include in the signature. + # This has two main benefits: + # - It is easier to reason about the security and authenticity of data within your item + # when all unauthenticated data is easily distinguishable by their attribute name. + # - If you need to add new unauthenticated attributes in the future, + # you can easily make the corresponding update to your `attributeActionsOnEncrypt` + # and immediately start writing to that new attribute, without + # any other configuration update needed. + # Once you configure this field, it is not safe to update it. + # + # - Configure `allowedUnsignedAttributes`: You may also explicitly list + # a set of attributes that should be considered unauthenticated when encountered + # on read. Be careful if you use this configuration. Do not remove an attribute + # name from this configuration, even if you are no longer writing with that attribute, + # as old items may still include this attribute, and our configuration needs to know + # to continue to exclude this attribute from the signature scope. + # If you add new attribute names to this field, you must first deploy the update to this + # field to all readers in your host fleet before deploying the update to start writing + # with that new attribute. + # + # For this example, we have designed our DynamoDb table such that any attribute name with + # the ":" prefix should be considered unauthenticated. + unsignAttrPrefix: str = ":" + + # 4. Create the DynamoDb Encryption configuration for the table we will be writing to. + # without the legacy override + table_configs = {} + table_config = DynamoDbTableEncryptionConfig( + logical_table_name=ddb_table_name, + partition_key_name="partition_key", + sort_key_name="sort_key", + attribute_actions_on_encrypt=attribute_actions_on_encrypt, + keyring=kms_mrk_multi_keyring, + allowed_unsigned_attribute_prefix=unsignAttrPrefix, + # Specifying an algorithm suite is not required, + # but is done here to demonstrate how to do so. + # We suggest using the + # `ALG_AES_256_GCM_HKDF_SHA512_COMMIT_KEY_ECDSA_P384_SYMSIG_HMAC_SHA384` suite, + # which includes AES-GCM with key derivation, signing, and key commitment. + # This is also the default algorithm suite if one is not specified in this config. + # For more information on supported algorithm suites, see: + # https://docs.aws.amazon.com/database-encryption-sdk/latest/devguide/supported-algorithms.html + algorithm_suite_id=DBEAlgorithmSuiteId.ALG_AES_256_GCM_HKDF_SHA512_COMMIT_KEY_ECDSA_P384_SYMSIG_HMAC_SHA384, + ) + table_configs[ddb_table_name] = table_config + tables_config = DynamoDbTablesEncryptionConfig(table_encryption_configs=table_configs) + + # 5. Create the DB-ESDK EncryptedTable + return EncryptedTable( + table=boto3.resource("dynamodb").Table(ddb_table_name), + encryption_config=tables_config, + ) + + +def setup_awsdbe_table_with_legacy_override(kms_key_id: str, ddb_table_name: str, policy: str): + """ + Set up an AWS Database Encryption SDK EncryptedTable with legacy override. + + :param kms_key_id: The ARN of the KMS key to use for encryption + :param ddb_table_name: The name of the DynamoDB table + :param policy: The policy required for the Legacy Override configuration + :returns EncryptedTable for DynamoDB + + """ + # 0. Create AWS SDK DynamoDB Client + ddb_table = boto3.resource("dynamodb").Table(ddb_table_name) + + # 1. Create the legacy EncryptedTable + cmp = AwsKmsCryptographicMaterialsProvider(key_id=kms_key_id) + legacy_encrypted_table = LegacyEncryptedTable( + table=ddb_table, + materials_provider=cmp, + ) + + # 2. Configure our legacy behavior, inputting the DynamoDBEncryptor, attribute actions + # created above, and legacy policy. + legacy_override = LegacyOverride( + encryptor=legacy_encrypted_table, + attribute_actions_on_encrypt={ + "partition_key": CryptoAction.SIGN_ONLY, + "sort_key": CryptoAction.SIGN_ONLY, + "attribute1": CryptoAction.ENCRYPT_AND_SIGN, + "attribute2": CryptoAction.SIGN_ONLY, + ":attribute3": CryptoAction.DO_NOTHING, + }, + policy=policy, + ) + + # 3. Create a Keyring. This Keyring will be responsible for protecting the data keys that protect your data. + # For this example, we will create a AWS KMS Keyring with the AWS KMS Key we want to use. + # We will use the `CreateMrkMultiKeyring` method to create this keyring, + # as it will correctly handle both single region and Multi-Region KMS Keys. + mat_prov: AwsCryptographicMaterialProviders = AwsCryptographicMaterialProviders(config=MaterialProvidersConfig()) + kms_mrk_multi_keyring_input: CreateAwsKmsMrkMultiKeyringInput = CreateAwsKmsMrkMultiKeyringInput( + generator=kms_key_id, + ) + kms_mrk_multi_keyring: IKeyring = mat_prov.create_aws_kms_mrk_multi_keyring(input=kms_mrk_multi_keyring_input) + + # 4. Configure which attributes are encrypted and/or signed when writing new items. + # For each attribute that may exist on the items we plan to write to our DynamoDbTable, + # we must explicitly configure how they should be treated during item encryption: + # - ENCRYPT_AND_SIGN: The attribute is encrypted and included in the signature + # - SIGN_ONLY: The attribute not encrypted, but is still included in the signature + # - DO_NOTHING: The attribute is not encrypted and not included in the signature + attribute_actions_on_encrypt = { + "partition_key": CryptoAction.SIGN_ONLY, + "sort_key": CryptoAction.SIGN_ONLY, + "attribute1": CryptoAction.ENCRYPT_AND_SIGN, + "attribute2": CryptoAction.SIGN_ONLY, + ":attribute3": CryptoAction.DO_NOTHING, + } + + # 5. Configure which attributes we expect to be included in the signature + # when reading items. There are two options for configuring this: + # + # - (Recommended) Configure `allowedUnsignedAttributesPrefix`: + # When defining your DynamoDb schema and deciding on attribute names, + # choose a distinguishing prefix (such as ":") for all attributes that + # you do not want to include in the signature. + # This has two main benefits: + # - It is easier to reason about the security and authenticity of data within your item + # when all unauthenticated data is easily distinguishable by their attribute name. + # - If you need to add new unauthenticated attributes in the future, + # you can easily make the corresponding update to your `attributeActionsOnEncrypt` + # and immediately start writing to that new attribute, without + # any other configuration update needed. + # Once you configure this field, it is not safe to update it. + # + # - Configure `allowedUnsignedAttributes`: You may also explicitly list + # a set of attributes that should be considered unauthenticated when encountered + # on read. Be careful if you use this configuration. Do not remove an attribute + # name from this configuration, even if you are no longer writing with that attribute, + # as old items may still include this attribute, and our configuration needs to know + # to continue to exclude this attribute from the signature scope. + # If you add new attribute names to this field, you must first deploy the update to this + # field to all readers in your host fleet before deploying the update to start writing + # with that new attribute. + # + # For this example, we have designed our DynamoDb table such that any attribute name with + # the ":" prefix should be considered unauthenticated. + unsignAttrPrefix: str = ":" + + # 6. Create the DynamoDb Encryption configuration for the table we will be writing to. + # without the legacy override + table_configs = {} + table_config = DynamoDbTableEncryptionConfig( + logical_table_name=ddb_table_name, + partition_key_name="partition_key", + sort_key_name="sort_key", + attribute_actions_on_encrypt=attribute_actions_on_encrypt, + keyring=kms_mrk_multi_keyring, + legacy_override=legacy_override, + allowed_unsigned_attribute_prefix=unsignAttrPrefix, + # Specifying an algorithm suite is not required, + # but is done here to demonstrate how to do so. + # We suggest using the + # `ALG_AES_256_GCM_HKDF_SHA512_COMMIT_KEY_ECDSA_P384_SYMSIG_HMAC_SHA384` suite, + # which includes AES-GCM with key derivation, signing, and key commitment. + # This is also the default algorithm suite if one is not specified in this config. + # For more information on supported algorithm suites, see: + # https://docs.aws.amazon.com/database-encryption-sdk/latest/devguide/supported-algorithms.html + algorithm_suite_id=DBEAlgorithmSuiteId.ALG_AES_256_GCM_HKDF_SHA512_COMMIT_KEY_ECDSA_P384_SYMSIG_HMAC_SHA384, + ) + table_configs[ddb_table_name] = table_config + tables_config = DynamoDbTablesEncryptionConfig(table_encryption_configs=table_configs) + + # 7. Create the DB-ESDK EncryptedTable + return EncryptedTable( + table=boto3.resource("dynamodb").Table(ddb_table_name), + encryption_config=tables_config, + ) diff --git a/Examples/runtimes/python/Migration/ddbec_to_awsdbe/src/awsdbe/table/migration_step_1.py b/Examples/runtimes/python/Migration/ddbec_to_awsdbe/src/awsdbe/table/migration_step_1.py new file mode 100644 index 000000000..cdf514d8b --- /dev/null +++ b/Examples/runtimes/python/Migration/ddbec_to_awsdbe/src/awsdbe/table/migration_step_1.py @@ -0,0 +1,81 @@ +# Copyright Amazon.com Inc. or its affiliates. All Rights Reserved. +# SPDX-License-Identifier: Apache-2.0 + +""" +Migration Step 1. + +This is an example demonstrating how to start using the +AWS Database Encryption SDK with a pre-existing table used with DynamoDB Encryption Client. +In this example, you configure a EncryptedTable to do the following: + - Read items encrypted in the old format + - Continue to encrypt items in the old format on write + - Read items encrypted in the new format +While this step configures your client to be ready to start reading items encrypted, +we do not yet expect to be reading any items in the new format. +Before you move on to step 2, ensure that these changes have successfully been deployed +to all of your readers. + +Running this example requires access to the DDB Table whose name +is provided in CLI arguments. +This table must be configured with the following +primary key configuration: + - Partition key is named "partition_key" with type (S) + - Sort key is named "sort_key" with type (S) +""" +from aws_dbesdk_dynamodb.structures.dynamodb import LegacyPolicy + +from .common import setup_awsdbe_table_with_legacy_override + + +def migration_step_1_with_table(kms_key_id: str, ddb_table_name: str, sort_read_value: int = 1): + """ + Migration Step 1: Using the AWS Database Encryption SDK with Legacy Override. + + :param kms_key_id: The ARN of the KMS key to use for encryption + :param ddb_table_name: The name of the DynamoDB table + :param sort_read_value: The sort key value to read + + """ + # 1. Create a EncryptedTable with legacy override. + # For Legacy Policy, use `FORCE_LEGACY_ENCRYPT_ALLOW_LEGACY_DECRYPT`. + # With this policy, you will continue to read and write items using the old format, + # but will be able to start reading new items in the new format as soon as they appear + policy = LegacyPolicy.FORCE_LEGACY_ENCRYPT_ALLOW_LEGACY_DECRYPT + encrypted_table = setup_awsdbe_table_with_legacy_override( + kms_key_id=kms_key_id, ddb_table_name=ddb_table_name, policy=policy + ) + + # 2. Put an item in the old format since we are using a legacy override + # with FORCE_LEGACY_ENCRYPT_ALLOW_DECRYPT policy + item_to_encrypt = { + "partition_key": "MigrationExampleForPythonTable", + "sort_key": 1, + "attribute1": "encrypt and sign me!", + "attribute2": "sign me!", + ":attribute3": "ignore me!", + } + + put_item_response = encrypted_table.put_item(Item=item_to_encrypt) + + # Demonstrate that PutItem succeeded + assert put_item_response["ResponseMetadata"]["HTTPStatusCode"] == 200 + + # 3. Get an item back from the table using the EncryptedTable + # If this is an item written in the old format (e.g. any item written + # during Step 0 or 1), then we will attempt to decrypt the item + # using the legacy behavior. + # If this is an item written in the new format (e.g. any item written + # during Step 2 or after), then we will attempt to decrypt the item using + # the non-legacy behavior. + key_to_get = {"partition_key": "MigrationExampleForPythonTable", "sort_key": sort_read_value} + get_item_response = encrypted_table.get_item(Key=key_to_get) + + # Demonstrate that GetItem succeeded and returned the decrypted item + assert get_item_response["ResponseMetadata"]["HTTPStatusCode"] == 200 + decrypted_item = get_item_response["Item"] + # Demonstrate we get the expected item back + assert decrypted_item["partition_key"] == "MigrationExampleForPythonTable" + assert decrypted_item["sort_key"] == sort_read_value + assert decrypted_item["attribute1"] == "encrypt and sign me!" + assert decrypted_item["attribute2"] == "sign me!" + assert decrypted_item[":attribute3"] == "ignore me!" diff --git a/Examples/runtimes/python/Migration/ddbec_to_awsdbe/src/awsdbe/table/migration_step_2.py b/Examples/runtimes/python/Migration/ddbec_to_awsdbe/src/awsdbe/table/migration_step_2.py new file mode 100644 index 000000000..8795d0e3c --- /dev/null +++ b/Examples/runtimes/python/Migration/ddbec_to_awsdbe/src/awsdbe/table/migration_step_2.py @@ -0,0 +1,82 @@ +# Copyright Amazon.com Inc. or its affiliates. All Rights Reserved. +# SPDX-License-Identifier: Apache-2.0 + +""" +Migration Step 2. + +This is an example demonstrating how to update your configuration +to start writing items using the latest encryption format, but still continue +to read any items written using the old encryption format. + +Once you deploy this change to your system, you will have a dataset +containing items in both the old and new format. +Because the changes in Step 1 have been deployed to all our readers, +we can be sure that our entire system is ready to read this new data. + +Running this example requires access to the DDB Table whose name +is provided in CLI arguments. +This table must be configured with the following +primary key configuration: + - Partition key is named "partition_key" with type (S) + - Sort key is named "sort_key" with type (S) +""" + +from aws_dbesdk_dynamodb.structures.dynamodb import LegacyPolicy + +# Import from new AWS Database Encryption SDK +from .common import setup_awsdbe_table_with_legacy_override + + +def migration_step_2_with_table(kms_key_id: str, ddb_table_name: str, sort_read_value: int = 2): + """ + Migration Step 2: Using pure AWS DBESDK and legacy override together. + + :param kms_key_id: The ARN of the KMS key to use for encryption + :param ddb_table_name: The name of the DynamoDB table + :param sort_read_value: The sort key value to read + + """ + # 1. Create a EncryptedTable with legacy override. + # When configuring our legacy behavior, use `FORBID_LEGACY_ENCRYPT_ALLOW_LEGACY_DECRYPT`. + # With this policy, you will continue to read items in both formats, + # but will only write new items using the new format. + encrypted_table = setup_awsdbe_table_with_legacy_override( + kms_key_id, ddb_table_name, policy=LegacyPolicy.FORBID_LEGACY_ENCRYPT_ALLOW_LEGACY_DECRYPT + ) + + # 2. Put an item into your table using the EncryptedTable. + # This item will be encrypted in the latest format, using the + # configuration from your modelled class to decide + # which attribute to encrypt and/or sign. + item_to_encrypt = { + "partition_key": "MigrationExampleForPythonTable", + "sort_key": 2, + "attribute1": "encrypt and sign me!", + "attribute2": "sign me!", + ":attribute3": "ignore me!", + } + + put_item_response = encrypted_table.put_item(Item=item_to_encrypt) + + # Demonstrate that PutItem succeeded + assert put_item_response["ResponseMetadata"]["HTTPStatusCode"] == 200 + + # 3. Get an item back from the table using the EncryptedTable. + # If this is an item written in the old format (e.g. any item written + # during Step 0 or 1), then we will attempt to decrypt the item + # using the legacy behavior. + # If this is an item written in the new format (e.g. any item written + # during Step 2 or after), then we will attempt to decrypt the item using + # the non-legacy behavior. + key_to_get = {"partition_key": "MigrationExampleForPythonTable", "sort_key": sort_read_value} + get_item_response = encrypted_table.get_item(Key=key_to_get) + + # Demonstrate that GetItem succeeded and returned the decrypted item + assert get_item_response["ResponseMetadata"]["HTTPStatusCode"] == 200 + decrypted_item = get_item_response["Item"] + # Demonstrate we get the expected item back + assert decrypted_item["partition_key"] == "MigrationExampleForPythonTable" + assert decrypted_item["sort_key"] == sort_read_value + assert decrypted_item["attribute1"] == "encrypt and sign me!" + assert decrypted_item["attribute2"] == "sign me!" + assert decrypted_item[":attribute3"] == "ignore me!" diff --git a/Examples/runtimes/python/Migration/ddbec_to_awsdbe/src/awsdbe/table/migration_step_3.py b/Examples/runtimes/python/Migration/ddbec_to_awsdbe/src/awsdbe/table/migration_step_3.py new file mode 100644 index 000000000..79e4b2c71 --- /dev/null +++ b/Examples/runtimes/python/Migration/ddbec_to_awsdbe/src/awsdbe/table/migration_step_3.py @@ -0,0 +1,72 @@ +# Copyright Amazon.com Inc. or its affiliates. All Rights Reserved. +# SPDX-License-Identifier: Apache-2.0 + +""" +Migration Step 3. + +This is an example demonstrating how to update your configuration +to stop accepting reading items encrypted using the old format. +In order to proceed with this step, you will need to re-encrypt all +old items in your table. + +Once you complete Step 3, you can be sure that all items being read by your system +ensure the security properties configured for the new format. + +Running this example requires access to the DDB Table whose name +is provided in CLI arguments. +This table must be configured with the following +primary key configuration: + - Partition key is named "partition_key" with type (S) + - Sort key is named "sort_key" with type (S) +""" + +from .common import setup_pure_awsdbe_table + + +def migration_step_3_with_table(kms_key_id: str, ddb_table_name: str, sort_read_value: int = 3): + """ + Migration Step 3: Using only pure AWS DBESDK (no legacy override). + + :param kms_key_id: The ARN of the KMS key to use for encryption + :param ddb_table_name: The name of the DynamoDB table + :param sort_read_value: The sort key value to read + """ + # 1. Create the EncryptedTable + # Do not configure any legacy behavior. + encrypted_table = setup_pure_awsdbe_table(kms_key_id, ddb_table_name) + + # 2. Put an item into your table using the EncryptedTable. + # This item will be encrypted in the latest format, using the + # configuration from your modelled class to decide + # which attribute to encrypt and/or sign. + item_to_encrypt = { + "partition_key": "MigrationExampleForPythonTable", + "sort_key": 3, + "attribute1": "encrypt and sign me!", + "attribute2": "sign me!", + ":attribute3": "ignore me!", + } + + put_item_response = encrypted_table.put_item(Item=item_to_encrypt) + + # Demonstrate that PutItem succeeded + assert put_item_response["ResponseMetadata"]["HTTPStatusCode"] == 200 + + # 3. Get an item back from the table using the EncryptedTable. + # If this is an item written in the old format (e.g. any item written + # during Step 0 or 1), then we fail to return the item. + # If this is an item written in the new format (e.g. any item written + # during Step 2 or after), then we will attempt to decrypt the item using + # the non-legacy behavior. + key_to_get = {"partition_key": "MigrationExampleForPythonTable", "sort_key": sort_read_value} + get_item_response = encrypted_table.get_item(Key=key_to_get) + + # Demonstrate that GetItem succeeded and returned the decrypted item + assert get_item_response["ResponseMetadata"]["HTTPStatusCode"] == 200 + decrypted_item = get_item_response["Item"] + # Demonstrate we get the expected item back + assert decrypted_item["partition_key"] == "MigrationExampleForPythonTable" + assert decrypted_item["sort_key"] == sort_read_value + assert decrypted_item["attribute1"] == "encrypt and sign me!" + assert decrypted_item["attribute2"] == "sign me!" + assert decrypted_item[":attribute3"] == "ignore me!" diff --git a/Examples/runtimes/python/Migration/ddbec_to_awsdbe/src/ddbec/README.md b/Examples/runtimes/python/Migration/ddbec_to_awsdbe/src/ddbec/README.md new file mode 100644 index 000000000..7da2dc662 --- /dev/null +++ b/Examples/runtimes/python/Migration/ddbec_to_awsdbe/src/ddbec/README.md @@ -0,0 +1,13 @@ +# Step 0 + +In Step 0, your system is in the starting state using the legacy DynamoDB Encryption Client: + +- reads items in the old format using the DynamoDB Encryption Client +- writes items in the old format using the DynamoDB Encryption Client +- cannot read items in the new AWS Database Encryption SDK format + +This represents the baseline configuration before beginning the migration process. +Your dataset consists only of data written in the old format. + +When operating in this state, your system is fully dependent on the legacy DynamoDB Encryption Client library +and its associated encryption format. diff --git a/Examples/runtimes/python/Migration/ddbec_to_awsdbe/src/ddbec/__init__.py b/Examples/runtimes/python/Migration/ddbec_to_awsdbe/src/ddbec/__init__.py new file mode 100644 index 000000000..fa977e22f --- /dev/null +++ b/Examples/runtimes/python/Migration/ddbec_to_awsdbe/src/ddbec/__init__.py @@ -0,0 +1,3 @@ +# Copyright Amazon.com Inc. or its affiliates. All Rights Reserved. +# SPDX-License-Identifier: Apache-2.0 +"""Stub to allow relative imports of examples from tests.""" diff --git a/Examples/runtimes/python/Migration/ddbec_to_awsdbe/src/ddbec/client/__init__.py b/Examples/runtimes/python/Migration/ddbec_to_awsdbe/src/ddbec/client/__init__.py new file mode 100644 index 000000000..fa977e22f --- /dev/null +++ b/Examples/runtimes/python/Migration/ddbec_to_awsdbe/src/ddbec/client/__init__.py @@ -0,0 +1,3 @@ +# Copyright Amazon.com Inc. or its affiliates. All Rights Reserved. +# SPDX-License-Identifier: Apache-2.0 +"""Stub to allow relative imports of examples from tests.""" diff --git a/Examples/runtimes/python/Migration/ddbec_to_awsdbe/src/ddbec/client/migration_step_0.py b/Examples/runtimes/python/Migration/ddbec_to_awsdbe/src/ddbec/client/migration_step_0.py new file mode 100644 index 000000000..7f694de29 --- /dev/null +++ b/Examples/runtimes/python/Migration/ddbec_to_awsdbe/src/ddbec/client/migration_step_0.py @@ -0,0 +1,83 @@ +# Copyright Amazon.com Inc. or its affiliates. All Rights Reserved. +# SPDX-License-Identifier: Apache-2.0 + +""" +Migration Step 0. + +This is an example demonstrating use with the DynamoDb Encryption Client. +and is the starting state for our migration to the AWS Database Encryption SDK for DynamoDb. +In this example we configure an AWS SDK Client configured to encrypt and decrypt +items. The encryption and decryption of data is configured to use a KMS Key as the root of trust. + +Running this example requires access to the DDB Table whose name +is provided in CLI arguments. +This table must be configured with the following +primary key configuration: + - Partition key is named "partition_key" with type (S) + - Sort key is named "sort_key" with type (S) +""" + +import boto3 + +# Import from legacy DynamoDB Encryption Client +from dynamodb_encryption_sdk.encrypted.client import EncryptedClient +from dynamodb_encryption_sdk.identifiers import CryptoAction +from dynamodb_encryption_sdk.material_providers.aws_kms import AwsKmsCryptographicMaterialsProvider +from dynamodb_encryption_sdk.structures import AttributeActions + + +def migration_step_0_with_client(kms_key_id: str, ddb_table_name: str, sort_read_value: int = 0): + """ + Migration Step 0: Using the DynamoDb Encryption Client with EncryptedClient. + + :param kms_key_id: The ARN of the KMS key to use for encryption + :param ddb_table_name: The name of the DynamoDB table + :param sort_read_value: The sort key value to read + + """ + # 1. Create the MaterialProvider that protects your data keys. For this example, + # we create a KmsCryptographicMaterialsProvider which protects data keys using a single kmsKey. + cmp = AwsKmsCryptographicMaterialsProvider(key_id=kms_key_id) + + # 2. Create the DynamoDBEncryptor using the Material Provider created above + actions = AttributeActions( + default_action=CryptoAction.ENCRYPT_AND_SIGN, + attribute_actions={ + "partition_key": CryptoAction.SIGN_ONLY, + "sort_key": CryptoAction.SIGN_ONLY, + "attribute1": CryptoAction.ENCRYPT_AND_SIGN, + "attribute2": CryptoAction.SIGN_ONLY, + ":attribute3": CryptoAction.DO_NOTHING, + }, + ) + + # 3. Create a legacy EncryptedClient. + ddb_client = boto3.client("dynamodb") + encrypted_client = EncryptedClient(client=ddb_client, materials_provider=cmp, attribute_actions=actions) + + # 4. Put an example item into our DynamoDb table. + # This item will be encrypted client-side before it is sent to DynamoDb. + item = { + "partition_key": {"S": "MigrationExampleForPython"}, + "sort_key": {"N": str(0)}, + "attribute1": {"S": "encrypt and sign me!"}, + "attribute2": {"S": "sign me!"}, + ":attribute3": {"S": "ignore me!"}, + } + + encrypted_client.put_item(TableName=ddb_table_name, Item=item) + + # 5. Get this item back from DynamoDb. + # The item will be decrypted client-side, and the original item returned. + key = {"partition_key": {"S": "MigrationExampleForPython"}, "sort_key": {"N": str(sort_read_value)}} + + get_item_response = encrypted_client.get_item(TableName=ddb_table_name, Key=key) + # Demonstrate that GetItem succeeded and returned the decrypted item + assert get_item_response["ResponseMetadata"]["HTTPStatusCode"] == 200 + decrypted_item = get_item_response["Item"] + # Demonstrate we get the expected item back + assert decrypted_item["partition_key"]["S"] == "MigrationExampleForPython" + assert decrypted_item["sort_key"]["N"] == str(sort_read_value) + assert decrypted_item["attribute1"]["S"] == "encrypt and sign me!" + assert decrypted_item["attribute2"]["S"] == "sign me!" + assert decrypted_item[":attribute3"]["S"] == "ignore me!" diff --git a/Examples/runtimes/python/Migration/ddbec_to_awsdbe/src/ddbec/paginator/__init__.py b/Examples/runtimes/python/Migration/ddbec_to_awsdbe/src/ddbec/paginator/__init__.py new file mode 100644 index 000000000..fa977e22f --- /dev/null +++ b/Examples/runtimes/python/Migration/ddbec_to_awsdbe/src/ddbec/paginator/__init__.py @@ -0,0 +1,3 @@ +# Copyright Amazon.com Inc. or its affiliates. All Rights Reserved. +# SPDX-License-Identifier: Apache-2.0 +"""Stub to allow relative imports of examples from tests.""" diff --git a/Examples/runtimes/python/Migration/ddbec_to_awsdbe/src/ddbec/paginator/migration_step_0.py b/Examples/runtimes/python/Migration/ddbec_to_awsdbe/src/ddbec/paginator/migration_step_0.py new file mode 100644 index 000000000..302376d1d --- /dev/null +++ b/Examples/runtimes/python/Migration/ddbec_to_awsdbe/src/ddbec/paginator/migration_step_0.py @@ -0,0 +1,95 @@ +# Copyright Amazon.com Inc. or its affiliates. All Rights Reserved. +# SPDX-License-Identifier: Apache-2.0 + +""" +Migration Step 0. + +This is an example demonstrating use with the DynamoDb Encryption Client. +and is the starting state for our migration to the AWS Database Encryption SDK for DynamoDb. +In this example we configure an EncryptedClient which provides an encrypted paginator +configured to encrypt and decrypt items. The encryption and decryption of data is +configured to use a KMS Key as the root of trust. + +Running this example requires access to the DDB Table whose name +is provided in CLI arguments. +This table must be configured with the following +primary key configuration: + - Partition key is named "partition_key" with type (S) + - Sort key is named "sort_key" with type (N) +""" + +import boto3 + +# Import from legacy DynamoDB Encryption Client +from dynamodb_encryption_sdk.encrypted.client import EncryptedClient +from dynamodb_encryption_sdk.identifiers import CryptoAction +from dynamodb_encryption_sdk.material_providers.aws_kms import AwsKmsCryptographicMaterialsProvider +from dynamodb_encryption_sdk.structures import AttributeActions + + +def migration_step_0_with_paginator(kms_key_id: str, ddb_table_name: str, sort_read_value: int = 0): + """ + Migration Step 0: Using the DynamoDb Encryption Client with EncryptedClient's paginator. + + :param kms_key_id: The ARN of the KMS key to use for encryption + :param ddb_table_name: The name of the DynamoDB table + """ + # 1. Create the MaterialProvider that protects your data keys. For this example, + # we create a KmsCryptographicMaterialsProvider which protects data keys using a single kmsKey. + cmp = AwsKmsCryptographicMaterialsProvider(key_id=kms_key_id) + + # 2. Create the AttributeActions to configure encryption and signing + actions = AttributeActions( + default_action=CryptoAction.ENCRYPT_AND_SIGN, + attribute_actions={ + "partition_key": CryptoAction.SIGN_ONLY, + "sort_key": CryptoAction.SIGN_ONLY, + "attribute1": CryptoAction.ENCRYPT_AND_SIGN, + "attribute2": CryptoAction.SIGN_ONLY, + ":attribute3": CryptoAction.DO_NOTHING, + }, + ) + + # 3. Create a legacy EncryptedClient. + ddb_client = boto3.client("dynamodb") + encrypted_client = EncryptedClient(client=ddb_client, materials_provider=cmp, attribute_actions=actions) + + # 4. Put an example item into our DynamoDb table. + # This item will be encrypted client-side before it is sent to DynamoDb. + item = { + "partition_key": {"S": "PaginatorMigrationExampleForPython"}, + "sort_key": {"N": "0"}, + "attribute1": {"S": "encrypt and sign me!"}, + "attribute2": {"S": "sign me!"}, + ":attribute3": {"S": "ignore me!"}, + } + + put_item_response = encrypted_client.put_item(TableName=ddb_table_name, Item=item) + # Demonstrate that PutItem succeeded + assert put_item_response["ResponseMetadata"]["HTTPStatusCode"] == 200 + + # 5. Get a paginator from the encrypted client + # The paginator will automatically decrypt items as they are returned. + encrypted_paginator = encrypted_client.get_paginator("query") + + # 6. Use the paginator to get items from the table + items = [] + for page in encrypted_paginator.paginate( + TableName=ddb_table_name, + KeyConditionExpression="partition_key = :partition_key AND sort_key = :sort_key", + ExpressionAttributeValues={ + ":partition_key": {"S": "PaginatorMigrationExampleForPython"}, + ":sort_key": {"N": str(sort_read_value)}, + }, + ): + for item in page["Items"]: + items.append(item) + + # 7. Verify the decrypted items + assert len(items) == 1 # We should have only one item with above key condition + item = next((i for i in items if i["sort_key"]["N"] == str(sort_read_value)), None) + assert item is not None + assert item["partition_key"]["S"] == "PaginatorMigrationExampleForPython" + assert item["attribute1"]["S"] == "encrypt and sign me!" + assert item["attribute2"]["S"] == "sign me!" + assert item[":attribute3"]["S"] == "ignore me!" diff --git a/Examples/runtimes/python/Migration/ddbec_to_awsdbe/src/ddbec/resource/__init__.py b/Examples/runtimes/python/Migration/ddbec_to_awsdbe/src/ddbec/resource/__init__.py new file mode 100644 index 000000000..fa977e22f --- /dev/null +++ b/Examples/runtimes/python/Migration/ddbec_to_awsdbe/src/ddbec/resource/__init__.py @@ -0,0 +1,3 @@ +# Copyright Amazon.com Inc. or its affiliates. All Rights Reserved. +# SPDX-License-Identifier: Apache-2.0 +"""Stub to allow relative imports of examples from tests.""" diff --git a/Examples/runtimes/python/Migration/ddbec_to_awsdbe/src/ddbec/resource/migration_step_0.py b/Examples/runtimes/python/Migration/ddbec_to_awsdbe/src/ddbec/resource/migration_step_0.py new file mode 100644 index 000000000..d4ef3adf9 --- /dev/null +++ b/Examples/runtimes/python/Migration/ddbec_to_awsdbe/src/ddbec/resource/migration_step_0.py @@ -0,0 +1,111 @@ +# Copyright Amazon.com Inc. or its affiliates. All Rights Reserved. +# SPDX-License-Identifier: Apache-2.0 + +""" +Migration Step 0. + +This is an example demonstrating use with the DynamoDb Encryption Client. +and is the starting state for our migration to the AWS Database Encryption SDK for DynamoDb. +In this example we configure an EncryptedResource configured to encrypt and decrypt +items. The encryption and decryption of data is configured to use a KMS Key as the root of trust. + +Running this example requires access to the DDB Table whose name +is provided in CLI arguments. +This table must be configured with the following +primary key configuration: + - Partition key is named "partition_key" with type (S) + - Sort key is named "sort_key" with type (N) +""" + +import boto3 + +# Import from legacy DynamoDB Encryption Resource +from dynamodb_encryption_sdk.encrypted.resource import EncryptedResource +from dynamodb_encryption_sdk.identifiers import CryptoAction +from dynamodb_encryption_sdk.material_providers.aws_kms import AwsKmsCryptographicMaterialsProvider +from dynamodb_encryption_sdk.structures import AttributeActions + + +def migration_step_0_with_resource(kms_key_id: str, ddb_table_name: str, sort_read_value: int = 0): + """ + Migration Step 0: Using the DynamoDb Encryption Client with EncryptedResource. + + :param kms_key_id: The ARN of the KMS key to use for encryption + :param ddb_table_name: The name of the DynamoDB table + :param sort_read_value: The sort key value to read + + """ + # 1. Create the MaterialProvider that protects your data keys. For this example, + # we create a KmsCryptographicMaterialsProvider which protects data keys using a single kmsKey. + cmp = AwsKmsCryptographicMaterialsProvider(key_id=kms_key_id) + + # 2. Create the DynamoDBEncryptor using the Material Provider created above + actions = AttributeActions( + default_action=CryptoAction.ENCRYPT_AND_SIGN, + attribute_actions={ + "partition_key": CryptoAction.SIGN_ONLY, + "sort_key": CryptoAction.SIGN_ONLY, + "attribute1": CryptoAction.ENCRYPT_AND_SIGN, + "attribute2": CryptoAction.SIGN_ONLY, + ":attribute3": CryptoAction.DO_NOTHING, + }, + ) + + # 3. Create a legacy EncryptedResource. + encrypted_resource = EncryptedResource( + resource=boto3.resource("dynamodb"), materials_provider=cmp, attribute_actions=actions + ) + + # 4. Write a batch of items to the table. + # These items will be encrypted client-side before they are sent to DynamoDB. + items = [ + { + "partition_key": "PythonEncryptedResourceMigrationExample-1", + "sort_key": 0, + "attribute1": "encrypt and sign me!", + "attribute2": "sign me!", + ":attribute3": "ignore me!", + }, + { + "partition_key": "PythonEncryptedResourceMigrationExample-2", + "sort_key": 0, + "attribute1": "encrypt and sign me!", + "attribute2": "sign me!", + ":attribute3": "ignore me!", + }, + ] + + batch_write_items_put_request = { + "RequestItems": { + ddb_table_name: [{"PutRequest": {"Item": item}} for item in items], + }, + } + + batch_write_items_put_response = encrypted_resource.batch_write_item(**batch_write_items_put_request) + + # Demonstrate that BatchWriteItem succeeded + assert batch_write_items_put_response["ResponseMetadata"]["HTTPStatusCode"] == 200 + + # 5. Read the items back from the table. + # The items will be decrypted client-side, and the original items returned. + batch_get_items_request = { + "RequestItems": { + ddb_table_name: { + "Keys": [{"partition_key": item["partition_key"], "sort_key": sort_read_value} for item in items], + } + }, + } + + batch_get_items_response = encrypted_resource.batch_get_item(**batch_get_items_request) + + # Demonstrate that BatchGetItem succeeded with the expected result + assert batch_get_items_response["ResponseMetadata"]["HTTPStatusCode"] == 200 + for item in batch_get_items_response["Responses"][ddb_table_name]: + assert ( + item["partition_key"] == "PythonEncryptedResourceMigrationExample-1" + or item["partition_key"] == "PythonEncryptedResourceMigrationExample-2" + ) + assert item["sort_key"] == sort_read_value + assert item["attribute1"] == "encrypt and sign me!" + assert item["attribute2"] == "sign me!" + assert item[":attribute3"] == "ignore me!" diff --git a/Examples/runtimes/python/Migration/ddbec_to_awsdbe/src/ddbec/table/__init__.py b/Examples/runtimes/python/Migration/ddbec_to_awsdbe/src/ddbec/table/__init__.py new file mode 100644 index 000000000..fa977e22f --- /dev/null +++ b/Examples/runtimes/python/Migration/ddbec_to_awsdbe/src/ddbec/table/__init__.py @@ -0,0 +1,3 @@ +# Copyright Amazon.com Inc. or its affiliates. All Rights Reserved. +# SPDX-License-Identifier: Apache-2.0 +"""Stub to allow relative imports of examples from tests.""" diff --git a/Examples/runtimes/python/Migration/ddbec_to_awsdbe/src/ddbec/table/migration_step_0.py b/Examples/runtimes/python/Migration/ddbec_to_awsdbe/src/ddbec/table/migration_step_0.py new file mode 100644 index 000000000..b23f982cf --- /dev/null +++ b/Examples/runtimes/python/Migration/ddbec_to_awsdbe/src/ddbec/table/migration_step_0.py @@ -0,0 +1,88 @@ +# Copyright Amazon.com Inc. or its affiliates. All Rights Reserved. +# SPDX-License-Identifier: Apache-2.0 + +""" +Migration Step 0. + +This is an example demonstrating use with the DynamoDb Encryption Client. +and is the starting state for our migration to the AWS Database Encryption SDK for DynamoDb. +In this example we configure an EncryptedTable configured to encrypt and decrypt +items. The encryption and decryption of data is configured to use a KMS Key as the root of trust. + +Running this example requires access to the DDB Table whose name +is provided in CLI arguments. +This table must be configured with the following +primary key configuration: + - Partition key is named "partition_key" with type (S) + - Sort key is named "sort_key" with type (S) +""" + +import boto3 + +# Import from legacy DynamoDB Encryption Table +from dynamodb_encryption_sdk.encrypted.table import EncryptedTable +from dynamodb_encryption_sdk.identifiers import CryptoAction +from dynamodb_encryption_sdk.material_providers.aws_kms import AwsKmsCryptographicMaterialsProvider +from dynamodb_encryption_sdk.structures import AttributeActions + + +def migration_step_0_with_table(kms_key_id: str, ddb_table_name: str, sort_read_value: int = 0): + """ + Migration Step 0: Using the DynamoDb Encryption Client with EncryptedTable. + + :param kms_key_id: The ARN of the KMS key to use for encryption + :param ddb_table_name: The name of the DynamoDB table + :param sort_read_value: The sort key value to read + + """ + # 1. Create the MaterialProvider that protects your data keys. For this example, + # we create a KmsCryptographicMaterialsProvider which protects data keys using a single kmsKey. + cmp = AwsKmsCryptographicMaterialsProvider(key_id=kms_key_id) + + # 2. Create the DynamoDBEncryptor using the Material Provider created above + actions = AttributeActions( + default_action=CryptoAction.ENCRYPT_AND_SIGN, + attribute_actions={ + "partition_key": CryptoAction.SIGN_ONLY, + "sort_key": CryptoAction.SIGN_ONLY, + "attribute1": CryptoAction.ENCRYPT_AND_SIGN, + "attribute2": CryptoAction.SIGN_ONLY, + ":attribute3": CryptoAction.DO_NOTHING, + }, + ) + + # 3. Create a legacy EncryptedTable. + encrypted_table = EncryptedTable( + table=boto3.resource("dynamodb").Table(ddb_table_name), materials_provider=cmp, attribute_actions=actions + ) + + # 4. Put an example item into our DynamoDb table. + # This item will be encrypted client-side before it is sent to DynamoDb. + item_to_encrypt = { + "partition_key": "MigrationExampleForPythonTable", + "sort_key": 0, + "attribute1": "encrypt and sign me!", + "attribute2": "sign me!", + ":attribute3": "ignore me!", + } + + put_item_response = encrypted_table.put_item(Item=item_to_encrypt) + + # Demonstrate that PutItem succeeded + assert put_item_response["ResponseMetadata"]["HTTPStatusCode"] == 200 + + # 5. Get this item back from DynamoDb. + # The item will be decrypted client-side, and the original item returned. + key_to_get = {"partition_key": "MigrationExampleForPythonTable", "sort_key": sort_read_value} + + get_item_response = encrypted_table.get_item(Key=key_to_get) + + # Demonstrate that GetItem succeeded and returned the decrypted item + assert get_item_response["ResponseMetadata"]["HTTPStatusCode"] == 200 + decrypted_item = get_item_response["Item"] + # Demonstrate we get the expected item back + assert decrypted_item["partition_key"] == "MigrationExampleForPythonTable" + assert decrypted_item["sort_key"] == sort_read_value + assert decrypted_item["attribute1"] == "encrypt and sign me!" + assert decrypted_item["attribute2"] == "sign me!" + assert decrypted_item[":attribute3"] == "ignore me!" diff --git a/Examples/runtimes/python/Migration/ddbec_to_awsdbe/test/__init__.py b/Examples/runtimes/python/Migration/ddbec_to_awsdbe/test/__init__.py new file mode 100644 index 000000000..fa977e22f --- /dev/null +++ b/Examples/runtimes/python/Migration/ddbec_to_awsdbe/test/__init__.py @@ -0,0 +1,3 @@ +# Copyright Amazon.com Inc. or its affiliates. All Rights Reserved. +# SPDX-License-Identifier: Apache-2.0 +"""Stub to allow relative imports of examples from tests.""" diff --git a/Examples/runtimes/python/Migration/ddbec_to_awsdbe/test/awsdbe/__init__.py b/Examples/runtimes/python/Migration/ddbec_to_awsdbe/test/awsdbe/__init__.py new file mode 100644 index 000000000..fa977e22f --- /dev/null +++ b/Examples/runtimes/python/Migration/ddbec_to_awsdbe/test/awsdbe/__init__.py @@ -0,0 +1,3 @@ +# Copyright Amazon.com Inc. or its affiliates. All Rights Reserved. +# SPDX-License-Identifier: Apache-2.0 +"""Stub to allow relative imports of examples from tests.""" diff --git a/Examples/runtimes/python/Migration/ddbec_to_awsdbe/test/awsdbe/client/__init__.py b/Examples/runtimes/python/Migration/ddbec_to_awsdbe/test/awsdbe/client/__init__.py new file mode 100644 index 000000000..fa977e22f --- /dev/null +++ b/Examples/runtimes/python/Migration/ddbec_to_awsdbe/test/awsdbe/client/__init__.py @@ -0,0 +1,3 @@ +# Copyright Amazon.com Inc. or its affiliates. All Rights Reserved. +# SPDX-License-Identifier: Apache-2.0 +"""Stub to allow relative imports of examples from tests.""" diff --git a/Examples/runtimes/python/Migration/ddbec_to_awsdbe/test/awsdbe/client/test_migration_step_1.py b/Examples/runtimes/python/Migration/ddbec_to_awsdbe/test/awsdbe/client/test_migration_step_1.py new file mode 100644 index 000000000..db50e0bda --- /dev/null +++ b/Examples/runtimes/python/Migration/ddbec_to_awsdbe/test/awsdbe/client/test_migration_step_1.py @@ -0,0 +1,58 @@ +# Copyright Amazon.com Inc. or its affiliates. All Rights Reserved. +# SPDX-License-Identifier: Apache-2.0 +""" +Test for Migration Step 1. + +This test validates the compatibility between different stages of migration +and ensures that step 1 (using AWS DBESDK with legacy override) can read data +from all other migration steps. +""" +import pytest + +from ....src.awsdbe.client import ( + migration_step_1, + migration_step_2, + migration_step_3, +) +from ....src.ddbec.client import migration_step_0 +from ...test_utils import TEST_DDB_TABLE_NAME, TEST_KMS_KEY_ID + +pytestmark = [pytest.mark.examples] + + +def test_migration_step_1_with_client(): + """Test migration step 1 compatibility with different data formats.""" + # Successfully executes Step 1 + migration_step_1.migration_step_1_with_client( + kms_key_id=TEST_KMS_KEY_ID, ddb_table_name=TEST_DDB_TABLE_NAME, sort_read_value=1 + ) + + # Given: Step 0 has succeeded + migration_step_0.migration_step_0_with_client( + kms_key_id=TEST_KMS_KEY_ID, ddb_table_name=TEST_DDB_TABLE_NAME, sort_read_value=0 + ) + # When: Execute Step 1 with sort_read_value=0 + # Then: Success (i.e. can read values in old format) + migration_step_1.migration_step_1_with_client( + kms_key_id=TEST_KMS_KEY_ID, ddb_table_name=TEST_DDB_TABLE_NAME, sort_read_value=0 + ) + + # Given: Step 2 has succeeded + migration_step_2.migration_step_2_with_client( + kms_key_id=TEST_KMS_KEY_ID, ddb_table_name=TEST_DDB_TABLE_NAME, sort_read_value=2 + ) + # When: Execute Step 1 with sort_read_value=2 + # Then: Success (i.e. can read values in new format) + migration_step_1.migration_step_1_with_client( + kms_key_id=TEST_KMS_KEY_ID, ddb_table_name=TEST_DDB_TABLE_NAME, sort_read_value=2 + ) + + # Given: Step 3 has succeeded + migration_step_3.migration_step_3_with_client( + kms_key_id=TEST_KMS_KEY_ID, ddb_table_name=TEST_DDB_TABLE_NAME, sort_read_value=3 + ) + # When: Execute Step 1 with sort_read_value=3 + # Then: Success (i.e. can read values in new format) + migration_step_1.migration_step_1_with_client( + kms_key_id=TEST_KMS_KEY_ID, ddb_table_name=TEST_DDB_TABLE_NAME, sort_read_value=3 + ) diff --git a/Examples/runtimes/python/Migration/ddbec_to_awsdbe/test/awsdbe/client/test_migration_step_2.py b/Examples/runtimes/python/Migration/ddbec_to_awsdbe/test/awsdbe/client/test_migration_step_2.py new file mode 100644 index 000000000..5734db996 --- /dev/null +++ b/Examples/runtimes/python/Migration/ddbec_to_awsdbe/test/awsdbe/client/test_migration_step_2.py @@ -0,0 +1,58 @@ +# Copyright Amazon.com Inc. or its affiliates. All Rights Reserved. +# SPDX-License-Identifier: Apache-2.0 +""" +Test for Migration Step 2. + +This test validates the compatibility between different stages of migration +and ensures that step 2 (using AWS DBESDK with FORBID_LEGACY_ENCRYPT_ALLOW_LEGACY_DECRYPT) +can read data from all other migration steps. +""" +import pytest + +from ....src.awsdbe.client import ( + migration_step_1, + migration_step_2, + migration_step_3, +) +from ....src.ddbec.client import migration_step_0 +from ...test_utils import TEST_DDB_TABLE_NAME, TEST_KMS_KEY_ID + +pytestmark = [pytest.mark.examples] + + +def test_migration_step_2_with_client(): + """Test migration step 2 compatibility with different data formats.""" + # Successfully executes Step 2 + migration_step_2.migration_step_2_with_client( + kms_key_id=TEST_KMS_KEY_ID, ddb_table_name=TEST_DDB_TABLE_NAME, sort_read_value=2 + ) + + # Given: Step 0 has succeeded + migration_step_0.migration_step_0_with_client( + kms_key_id=TEST_KMS_KEY_ID, ddb_table_name=TEST_DDB_TABLE_NAME, sort_read_value=0 + ) + # When: Execute Step 2 with sort_read_value=0 + # Then: Success (i.e. can read values in old format) + migration_step_2.migration_step_2_with_client( + kms_key_id=TEST_KMS_KEY_ID, ddb_table_name=TEST_DDB_TABLE_NAME, sort_read_value=0 + ) + + # Given: Step 1 has succeeded + migration_step_1.migration_step_1_with_client( + kms_key_id=TEST_KMS_KEY_ID, ddb_table_name=TEST_DDB_TABLE_NAME, sort_read_value=1 + ) + # When: Execute Step 2 with sort_read_value=1 + # Then: Success (i.e. can read values in old format) + migration_step_2.migration_step_2_with_client( + kms_key_id=TEST_KMS_KEY_ID, ddb_table_name=TEST_DDB_TABLE_NAME, sort_read_value=1 + ) + + # Given: Step 3 has succeeded + migration_step_3.migration_step_3_with_client( + kms_key_id=TEST_KMS_KEY_ID, ddb_table_name=TEST_DDB_TABLE_NAME, sort_read_value=3 + ) + # When: Execute Step 2 with sort_read_value=3 + # Then: Success (i.e. can read values in new format) + migration_step_2.migration_step_2_with_client( + kms_key_id=TEST_KMS_KEY_ID, ddb_table_name=TEST_DDB_TABLE_NAME, sort_read_value=3 + ) diff --git a/Examples/runtimes/python/Migration/ddbec_to_awsdbe/test/awsdbe/client/test_migration_step_3.py b/Examples/runtimes/python/Migration/ddbec_to_awsdbe/test/awsdbe/client/test_migration_step_3.py new file mode 100644 index 000000000..eab2b18b3 --- /dev/null +++ b/Examples/runtimes/python/Migration/ddbec_to_awsdbe/test/awsdbe/client/test_migration_step_3.py @@ -0,0 +1,63 @@ +# Copyright Amazon.com Inc. or its affiliates. All Rights Reserved. +# SPDX-License-Identifier: Apache-2.0 +""" +Test for Migration Step 3. + +This test validates the compatibility between different stages of migration +and ensures that step 3 (using only AWS DBESDK) behaves correctly with data +from different migration stages. +""" +import pytest +from aws_dbesdk_dynamodb.smithygenerated.aws_cryptography_dbencryptionsdk_dynamodb_transforms.errors import ( + DynamoDbItemEncryptor, +) + +from ....src.awsdbe.client import ( + migration_step_1, + migration_step_2, + migration_step_3, +) +from ....src.ddbec.client import migration_step_0 +from ...test_utils import TEST_DDB_TABLE_NAME, TEST_KMS_KEY_ID + +pytestmark = [pytest.mark.examples] + + +def test_migration_step_3_with_client(): + """Test migration step 3 compatibility with different data formats.""" + # Successfully executes Step 3 + migration_step_3.migration_step_3_with_client( + kms_key_id=TEST_KMS_KEY_ID, ddb_table_name=TEST_DDB_TABLE_NAME, sort_read_value=3 + ) + + # Given: Step 0 has succeeded + migration_step_0.migration_step_0_with_client( + kms_key_id=TEST_KMS_KEY_ID, ddb_table_name=TEST_DDB_TABLE_NAME, sort_read_value=0 + ) + # When: Execute Step 3 with sort_read_value=0 + # Then: throws DynamoDbItemEncryptor Exception (i.e. cannot read values in old format) + with pytest.raises(DynamoDbItemEncryptor): + migration_step_3.migration_step_3_with_client( + kms_key_id=TEST_KMS_KEY_ID, ddb_table_name=TEST_DDB_TABLE_NAME, sort_read_value=0 + ) + + # Given: Step 1 has succeeded + migration_step_1.migration_step_1_with_client( + kms_key_id=TEST_KMS_KEY_ID, ddb_table_name=TEST_DDB_TABLE_NAME, sort_read_value=1 + ) + # When: Execute Step 3 with sort_read_value=1 + # Then: throws DynamoDbItemEncryptor Exception (i.e. cannot read values in old format) + with pytest.raises(DynamoDbItemEncryptor): + migration_step_3.migration_step_3_with_client( + kms_key_id=TEST_KMS_KEY_ID, ddb_table_name=TEST_DDB_TABLE_NAME, sort_read_value=1 + ) + + # Given: Step 2 has succeeded + migration_step_2.migration_step_2_with_client( + kms_key_id=TEST_KMS_KEY_ID, ddb_table_name=TEST_DDB_TABLE_NAME, sort_read_value=2 + ) + # When: Execute Step 3 with sort_read_value=2 + # Then: Success (i.e. can read values in new format) + migration_step_3.migration_step_3_with_client( + kms_key_id=TEST_KMS_KEY_ID, ddb_table_name=TEST_DDB_TABLE_NAME, sort_read_value=2 + ) diff --git a/Examples/runtimes/python/Migration/ddbec_to_awsdbe/test/awsdbe/paginator/__init__.py b/Examples/runtimes/python/Migration/ddbec_to_awsdbe/test/awsdbe/paginator/__init__.py new file mode 100644 index 000000000..fa977e22f --- /dev/null +++ b/Examples/runtimes/python/Migration/ddbec_to_awsdbe/test/awsdbe/paginator/__init__.py @@ -0,0 +1,3 @@ +# Copyright Amazon.com Inc. or its affiliates. All Rights Reserved. +# SPDX-License-Identifier: Apache-2.0 +"""Stub to allow relative imports of examples from tests.""" diff --git a/Examples/runtimes/python/Migration/ddbec_to_awsdbe/test/awsdbe/paginator/test_migration_step_1.py b/Examples/runtimes/python/Migration/ddbec_to_awsdbe/test/awsdbe/paginator/test_migration_step_1.py new file mode 100644 index 000000000..dc3c5d615 --- /dev/null +++ b/Examples/runtimes/python/Migration/ddbec_to_awsdbe/test/awsdbe/paginator/test_migration_step_1.py @@ -0,0 +1,58 @@ +# Copyright Amazon.com Inc. or its affiliates. All Rights Reserved. +# SPDX-License-Identifier: Apache-2.0 +""" +Test for Migration Step 1. + +This test validates the compatibility between different stages of migration +and ensures that step 1 (using AWS DBESDK with legacy override) can read data +from all other migration steps. +""" +import pytest + +from ....src.awsdbe.paginator import ( + migration_step_1, + migration_step_2, + migration_step_3, +) +from ....src.ddbec.paginator import migration_step_0 +from ...test_utils import TEST_DDB_TABLE_NAME, TEST_KMS_KEY_ID + +pytestmark = [pytest.mark.examples] + + +def test_migration_step_1_with_paginator(): + """Test migration step 1 compatibility with different data formats.""" + # Successfully executes Step 1 + migration_step_1.migration_step_1_with_paginator( + kms_key_id=TEST_KMS_KEY_ID, ddb_table_name=TEST_DDB_TABLE_NAME, sort_read_value=1 + ) + + # Given: Step 0 has succeeded + migration_step_0.migration_step_0_with_paginator( + kms_key_id=TEST_KMS_KEY_ID, ddb_table_name=TEST_DDB_TABLE_NAME, sort_read_value=0 + ) + # When: Execute Step 1 with sort_read_value=0 + # Then: Success (i.e. can read values in old format) + migration_step_1.migration_step_1_with_paginator( + kms_key_id=TEST_KMS_KEY_ID, ddb_table_name=TEST_DDB_TABLE_NAME, sort_read_value=0 + ) + + # Given: Step 2 has succeeded + migration_step_2.migration_step_2_with_paginator( + kms_key_id=TEST_KMS_KEY_ID, ddb_table_name=TEST_DDB_TABLE_NAME, sort_read_value=2 + ) + # When: Execute Step 1 with sort_read_value=2 + # Then: Success (i.e. can read values in new format) + migration_step_1.migration_step_1_with_paginator( + kms_key_id=TEST_KMS_KEY_ID, ddb_table_name=TEST_DDB_TABLE_NAME, sort_read_value=2 + ) + + # Given: Step 3 has succeeded + migration_step_3.migration_step_3_with_paginator( + kms_key_id=TEST_KMS_KEY_ID, ddb_table_name=TEST_DDB_TABLE_NAME, sort_read_value=3 + ) + # When: Execute Step 1 with sort_read_value=3 + # Then: Success (i.e. can read values in new format) + migration_step_1.migration_step_1_with_paginator( + kms_key_id=TEST_KMS_KEY_ID, ddb_table_name=TEST_DDB_TABLE_NAME, sort_read_value=3 + ) diff --git a/Examples/runtimes/python/Migration/ddbec_to_awsdbe/test/awsdbe/paginator/test_migration_step_2.py b/Examples/runtimes/python/Migration/ddbec_to_awsdbe/test/awsdbe/paginator/test_migration_step_2.py new file mode 100644 index 000000000..9f08252a7 --- /dev/null +++ b/Examples/runtimes/python/Migration/ddbec_to_awsdbe/test/awsdbe/paginator/test_migration_step_2.py @@ -0,0 +1,58 @@ +# Copyright Amazon.com Inc. or its affiliates. All Rights Reserved. +# SPDX-License-Identifier: Apache-2.0 +""" +Test for Migration Step 2. + +This test validates the compatibility between different stages of migration +and ensures that step 2 (using AWS DBESDK with FORBID_LEGACY_ENCRYPT_ALLOW_LEGACY_DECRYPT) +can read data from all other migration steps. +""" +import pytest + +from ....src.awsdbe.paginator import ( + migration_step_1, + migration_step_2, + migration_step_3, +) +from ....src.ddbec.paginator import migration_step_0 +from ...test_utils import TEST_DDB_TABLE_NAME, TEST_KMS_KEY_ID + +pytestmark = [pytest.mark.examples] + + +def test_migration_step_2_with_paginator(): + """Test migration step 2 compatibility with different data formats.""" + # Successfully executes Step 2 + migration_step_2.migration_step_2_with_paginator( + kms_key_id=TEST_KMS_KEY_ID, ddb_table_name=TEST_DDB_TABLE_NAME, sort_read_value=2 + ) + + # Given: Step 0 has succeeded + migration_step_0.migration_step_0_with_paginator( + kms_key_id=TEST_KMS_KEY_ID, ddb_table_name=TEST_DDB_TABLE_NAME, sort_read_value=0 + ) + # When: Execute Step 2 with sort_read_value=0 + # Then: Success (i.e. can read values in old format) + migration_step_2.migration_step_2_with_paginator( + kms_key_id=TEST_KMS_KEY_ID, ddb_table_name=TEST_DDB_TABLE_NAME, sort_read_value=0 + ) + + # Given: Step 1 has succeeded + migration_step_1.migration_step_1_with_paginator( + kms_key_id=TEST_KMS_KEY_ID, ddb_table_name=TEST_DDB_TABLE_NAME, sort_read_value=1 + ) + # When: Execute Step 2 with sort_read_value=1 + # Then: Success (i.e. can read values in old format) + migration_step_2.migration_step_2_with_paginator( + kms_key_id=TEST_KMS_KEY_ID, ddb_table_name=TEST_DDB_TABLE_NAME, sort_read_value=1 + ) + + # Given: Step 3 has succeeded + migration_step_3.migration_step_3_with_paginator( + kms_key_id=TEST_KMS_KEY_ID, ddb_table_name=TEST_DDB_TABLE_NAME, sort_read_value=3 + ) + # When: Execute Step 2 with sort_read_value=3 + # Then: Success (i.e. can read values in new format) + migration_step_2.migration_step_2_with_paginator( + kms_key_id=TEST_KMS_KEY_ID, ddb_table_name=TEST_DDB_TABLE_NAME, sort_read_value=3 + ) diff --git a/Examples/runtimes/python/Migration/ddbec_to_awsdbe/test/awsdbe/paginator/test_migration_step_3.py b/Examples/runtimes/python/Migration/ddbec_to_awsdbe/test/awsdbe/paginator/test_migration_step_3.py new file mode 100644 index 000000000..c884d3143 --- /dev/null +++ b/Examples/runtimes/python/Migration/ddbec_to_awsdbe/test/awsdbe/paginator/test_migration_step_3.py @@ -0,0 +1,63 @@ +# Copyright Amazon.com Inc. or its affiliates. All Rights Reserved. +# SPDX-License-Identifier: Apache-2.0 +""" +Test for Migration Step 3. + +This test validates the compatibility between different stages of migration +and ensures that step 3 (using only AWS DBESDK) behaves correctly with data +from different migration stages. +""" +import pytest +from aws_dbesdk_dynamodb.smithygenerated.aws_cryptography_dbencryptionsdk_dynamodb_transforms.errors import ( + DynamoDbItemEncryptor, +) + +from ....src.awsdbe.paginator import ( + migration_step_1, + migration_step_2, + migration_step_3, +) +from ....src.ddbec.paginator import migration_step_0 +from ...test_utils import TEST_DDB_TABLE_NAME, TEST_KMS_KEY_ID + +pytestmark = [pytest.mark.examples] + + +def test_migration_step_3_with_paginator(): + """Test migration step 3 compatibility with different data formats.""" + # Successfully executes Step 3 + migration_step_3.migration_step_3_with_paginator( + kms_key_id=TEST_KMS_KEY_ID, ddb_table_name=TEST_DDB_TABLE_NAME, sort_read_value=3 + ) + + # Given: Step 0 has succeeded + migration_step_0.migration_step_0_with_paginator( + kms_key_id=TEST_KMS_KEY_ID, ddb_table_name=TEST_DDB_TABLE_NAME, sort_read_value=0 + ) + # When: Execute Step 3 with sort_read_value=0 + # Then: throws DynamoDbItemEncryptor Exception (i.e. cannot read values in old format) + with pytest.raises(DynamoDbItemEncryptor): + migration_step_3.migration_step_3_with_paginator( + kms_key_id=TEST_KMS_KEY_ID, ddb_table_name=TEST_DDB_TABLE_NAME, sort_read_value=0 + ) + + # Given: Step 1 has succeeded + migration_step_1.migration_step_1_with_paginator( + kms_key_id=TEST_KMS_KEY_ID, ddb_table_name=TEST_DDB_TABLE_NAME, sort_read_value=1 + ) + # When: Execute Step 3 with sort_read_value=1 + # Then: throws DynamoDbItemEncryptor Exception (i.e. cannot read values in old format) + with pytest.raises(DynamoDbItemEncryptor): + migration_step_3.migration_step_3_with_paginator( + kms_key_id=TEST_KMS_KEY_ID, ddb_table_name=TEST_DDB_TABLE_NAME, sort_read_value=1 + ) + + # Given: Step 2 has succeeded + migration_step_2.migration_step_2_with_paginator( + kms_key_id=TEST_KMS_KEY_ID, ddb_table_name=TEST_DDB_TABLE_NAME, sort_read_value=2 + ) + # When: Execute Step 3 with sort_read_value=2 + # Then: Success (i.e. can read values in new format) + migration_step_3.migration_step_3_with_paginator( + kms_key_id=TEST_KMS_KEY_ID, ddb_table_name=TEST_DDB_TABLE_NAME, sort_read_value=2 + ) diff --git a/Examples/runtimes/python/Migration/ddbec_to_awsdbe/test/awsdbe/resource/__init__.py b/Examples/runtimes/python/Migration/ddbec_to_awsdbe/test/awsdbe/resource/__init__.py new file mode 100644 index 000000000..fa977e22f --- /dev/null +++ b/Examples/runtimes/python/Migration/ddbec_to_awsdbe/test/awsdbe/resource/__init__.py @@ -0,0 +1,3 @@ +# Copyright Amazon.com Inc. or its affiliates. All Rights Reserved. +# SPDX-License-Identifier: Apache-2.0 +"""Stub to allow relative imports of examples from tests.""" diff --git a/Examples/runtimes/python/Migration/ddbec_to_awsdbe/test/awsdbe/resource/test_migration_step_1.py b/Examples/runtimes/python/Migration/ddbec_to_awsdbe/test/awsdbe/resource/test_migration_step_1.py new file mode 100644 index 000000000..fde9845a8 --- /dev/null +++ b/Examples/runtimes/python/Migration/ddbec_to_awsdbe/test/awsdbe/resource/test_migration_step_1.py @@ -0,0 +1,58 @@ +# Copyright Amazon.com Inc. or its affiliates. All Rights Reserved. +# SPDX-License-Identifier: Apache-2.0 +""" +Test for Migration Step 1. + +This test validates the compatibility between different stages of migration +and ensures that step 1 (using AWS DBESDK with legacy override) can read data +from all other migration steps. +""" +import pytest + +from ....src.awsdbe.resource import ( + migration_step_1, + migration_step_2, + migration_step_3, +) +from ....src.ddbec.resource import migration_step_0 +from ...test_utils import TEST_DDB_TABLE_NAME, TEST_KMS_KEY_ID + +pytestmark = [pytest.mark.examples] + + +def test_migration_step_1_with_resource(): + """Test migration step 1 compatibility with different data formats.""" + # Successfully executes Step 1 + migration_step_1.migration_step_1_with_resource( + kms_key_id=TEST_KMS_KEY_ID, ddb_table_name=TEST_DDB_TABLE_NAME, sort_read_value=1 + ) + + # Given: Step 0 has succeeded + migration_step_0.migration_step_0_with_resource( + kms_key_id=TEST_KMS_KEY_ID, ddb_table_name=TEST_DDB_TABLE_NAME, sort_read_value=0 + ) + # When: Execute Step 1 with sort_read_value=0 + # Then: Success (i.e. can read values in old format) + migration_step_1.migration_step_1_with_resource( + kms_key_id=TEST_KMS_KEY_ID, ddb_table_name=TEST_DDB_TABLE_NAME, sort_read_value=0 + ) + + # Given: Step 2 has succeeded + migration_step_2.migration_step_2_with_resource( + kms_key_id=TEST_KMS_KEY_ID, ddb_table_name=TEST_DDB_TABLE_NAME, sort_read_value=2 + ) + # When: Execute Step 1 with sort_read_value=2 + # Then: Success (i.e. can read values in new format) + migration_step_1.migration_step_1_with_resource( + kms_key_id=TEST_KMS_KEY_ID, ddb_table_name=TEST_DDB_TABLE_NAME, sort_read_value=2 + ) + + # Given: Step 3 has succeeded + migration_step_3.migration_step_3_with_resource( + kms_key_id=TEST_KMS_KEY_ID, ddb_table_name=TEST_DDB_TABLE_NAME, sort_read_value=3 + ) + # When: Execute Step 1 with sort_read_value=3 + # Then: Success (i.e. can read values in new format) + migration_step_1.migration_step_1_with_resource( + kms_key_id=TEST_KMS_KEY_ID, ddb_table_name=TEST_DDB_TABLE_NAME, sort_read_value=3 + ) diff --git a/Examples/runtimes/python/Migration/ddbec_to_awsdbe/test/awsdbe/resource/test_migration_step_2.py b/Examples/runtimes/python/Migration/ddbec_to_awsdbe/test/awsdbe/resource/test_migration_step_2.py new file mode 100644 index 000000000..c79a1b84e --- /dev/null +++ b/Examples/runtimes/python/Migration/ddbec_to_awsdbe/test/awsdbe/resource/test_migration_step_2.py @@ -0,0 +1,58 @@ +# Copyright Amazon.com Inc. or its affiliates. All Rights Reserved. +# SPDX-License-Identifier: Apache-2.0 +""" +Test for Migration Step 2. + +This test validates the compatibility between different stages of migration +and ensures that step 2 (using AWS DBESDK with FORBID_LEGACY_ENCRYPT_ALLOW_LEGACY_DECRYPT) +can read data from all other migration steps. +""" +import pytest + +from ....src.awsdbe.resource import ( + migration_step_1, + migration_step_2, + migration_step_3, +) +from ....src.ddbec.resource import migration_step_0 +from ...test_utils import TEST_DDB_TABLE_NAME, TEST_KMS_KEY_ID + +pytestmark = [pytest.mark.examples] + + +def test_migration_step_2_with_resource(): + """Test migration step 2 compatibility with different data formats.""" + # Successfully executes Step 2 + migration_step_2.migration_step_2_with_resource( + kms_key_id=TEST_KMS_KEY_ID, ddb_table_name=TEST_DDB_TABLE_NAME, sort_read_value=2 + ) + + # Given: Step 0 has succeeded + migration_step_0.migration_step_0_with_resource( + kms_key_id=TEST_KMS_KEY_ID, ddb_table_name=TEST_DDB_TABLE_NAME, sort_read_value=0 + ) + # When: Execute Step 2 with sort_read_value=0 + # Then: Success (i.e. can read values in old format) + migration_step_2.migration_step_2_with_resource( + kms_key_id=TEST_KMS_KEY_ID, ddb_table_name=TEST_DDB_TABLE_NAME, sort_read_value=0 + ) + + # Given: Step 1 has succeeded + migration_step_1.migration_step_1_with_resource( + kms_key_id=TEST_KMS_KEY_ID, ddb_table_name=TEST_DDB_TABLE_NAME, sort_read_value=1 + ) + # When: Execute Step 2 with sort_read_value=1 + # Then: Success (i.e. can read values in old format) + migration_step_2.migration_step_2_with_resource( + kms_key_id=TEST_KMS_KEY_ID, ddb_table_name=TEST_DDB_TABLE_NAME, sort_read_value=1 + ) + + # Given: Step 3 has succeeded + migration_step_3.migration_step_3_with_resource( + kms_key_id=TEST_KMS_KEY_ID, ddb_table_name=TEST_DDB_TABLE_NAME, sort_read_value=3 + ) + # When: Execute Step 2 with sort_read_value=3 + # Then: Success (i.e. can read values in new format) + migration_step_2.migration_step_2_with_resource( + kms_key_id=TEST_KMS_KEY_ID, ddb_table_name=TEST_DDB_TABLE_NAME, sort_read_value=3 + ) diff --git a/Examples/runtimes/python/Migration/ddbec_to_awsdbe/test/awsdbe/resource/test_migration_step_3.py b/Examples/runtimes/python/Migration/ddbec_to_awsdbe/test/awsdbe/resource/test_migration_step_3.py new file mode 100644 index 000000000..72d05242d --- /dev/null +++ b/Examples/runtimes/python/Migration/ddbec_to_awsdbe/test/awsdbe/resource/test_migration_step_3.py @@ -0,0 +1,63 @@ +# Copyright Amazon.com Inc. or its affiliates. All Rights Reserved. +# SPDX-License-Identifier: Apache-2.0 +""" +Test for Migration Step 3. + +This test validates the compatibility between different stages of migration +and ensures that step 3 (using only AWS DBESDK) behaves correctly with data +from different migration stages. +""" +import pytest +from aws_dbesdk_dynamodb.smithygenerated.aws_cryptography_dbencryptionsdk_dynamodb_transforms.errors import ( + DynamoDbItemEncryptor, +) + +from ....src.awsdbe.resource import ( + migration_step_1, + migration_step_2, + migration_step_3, +) +from ....src.ddbec.resource import migration_step_0 +from ...test_utils import TEST_DDB_TABLE_NAME, TEST_KMS_KEY_ID + +pytestmark = [pytest.mark.examples] + + +def test_migration_step_3_with_resource(): + """Test migration step 3 compatibility with different data formats.""" + # Successfully executes Step 3 + migration_step_3.migration_step_3_with_resource( + kms_key_id=TEST_KMS_KEY_ID, ddb_table_name=TEST_DDB_TABLE_NAME, sort_read_value=3 + ) + + # Given: Step 0 has succeeded + migration_step_0.migration_step_0_with_resource( + kms_key_id=TEST_KMS_KEY_ID, ddb_table_name=TEST_DDB_TABLE_NAME, sort_read_value=0 + ) + # When: Execute Step 3 with sort_read_value=0 + # Then: throws DynamoDbItemEncryptor Exception (i.e. cannot read values in old format) + with pytest.raises(DynamoDbItemEncryptor): + migration_step_3.migration_step_3_with_resource( + kms_key_id=TEST_KMS_KEY_ID, ddb_table_name=TEST_DDB_TABLE_NAME, sort_read_value=0 + ) + + # Given: Step 1 has succeeded + migration_step_1.migration_step_1_with_resource( + kms_key_id=TEST_KMS_KEY_ID, ddb_table_name=TEST_DDB_TABLE_NAME, sort_read_value=1 + ) + # When: Execute Step 3 with sort_read_value=1 + # Then: throws DynamoDbItemEncryptor Exception (i.e. cannot read values in old format) + with pytest.raises(DynamoDbItemEncryptor): + migration_step_3.migration_step_3_with_resource( + kms_key_id=TEST_KMS_KEY_ID, ddb_table_name=TEST_DDB_TABLE_NAME, sort_read_value=1 + ) + + # Given: Step 2 has succeeded + migration_step_2.migration_step_2_with_resource( + kms_key_id=TEST_KMS_KEY_ID, ddb_table_name=TEST_DDB_TABLE_NAME, sort_read_value=2 + ) + # When: Execute Step 3 with sort_read_value=2 + # Then: Success (i.e. can read values in new format) + migration_step_3.migration_step_3_with_resource( + kms_key_id=TEST_KMS_KEY_ID, ddb_table_name=TEST_DDB_TABLE_NAME, sort_read_value=2 + ) diff --git a/Examples/runtimes/python/Migration/ddbec_to_awsdbe/test/awsdbe/table/__init__.py b/Examples/runtimes/python/Migration/ddbec_to_awsdbe/test/awsdbe/table/__init__.py new file mode 100644 index 000000000..fa977e22f --- /dev/null +++ b/Examples/runtimes/python/Migration/ddbec_to_awsdbe/test/awsdbe/table/__init__.py @@ -0,0 +1,3 @@ +# Copyright Amazon.com Inc. or its affiliates. All Rights Reserved. +# SPDX-License-Identifier: Apache-2.0 +"""Stub to allow relative imports of examples from tests.""" diff --git a/Examples/runtimes/python/Migration/ddbec_to_awsdbe/test/awsdbe/table/test_migration_step_1.py b/Examples/runtimes/python/Migration/ddbec_to_awsdbe/test/awsdbe/table/test_migration_step_1.py new file mode 100644 index 000000000..8fed113b2 --- /dev/null +++ b/Examples/runtimes/python/Migration/ddbec_to_awsdbe/test/awsdbe/table/test_migration_step_1.py @@ -0,0 +1,58 @@ +# Copyright Amazon.com Inc. or its affiliates. All Rights Reserved. +# SPDX-License-Identifier: Apache-2.0 +""" +Test for Migration Step 1. + +This test validates the compatibility between different stages of migration +and ensures that step 1 (using AWS DBESDK with legacy override) can read data +from all other migration steps. +""" +import pytest + +from ....src.awsdbe.table import ( + migration_step_1, + migration_step_2, + migration_step_3, +) +from ....src.ddbec.table import migration_step_0 +from ...test_utils import TEST_DDB_TABLE_NAME, TEST_KMS_KEY_ID + +pytestmark = [pytest.mark.examples] + + +def test_migration_step_1_with_table(): + """Test migration step 1 compatibility with different data formats.""" + # Successfully executes Step 1 + migration_step_1.migration_step_1_with_table( + kms_key_id=TEST_KMS_KEY_ID, ddb_table_name=TEST_DDB_TABLE_NAME, sort_read_value=1 + ) + + # Given: Step 0 has succeeded + migration_step_0.migration_step_0_with_table( + kms_key_id=TEST_KMS_KEY_ID, ddb_table_name=TEST_DDB_TABLE_NAME, sort_read_value=0 + ) + # When: Execute Step 1 with sort_read_value=0 + # Then: Success (i.e. can read values in old format) + migration_step_1.migration_step_1_with_table( + kms_key_id=TEST_KMS_KEY_ID, ddb_table_name=TEST_DDB_TABLE_NAME, sort_read_value=0 + ) + + # Given: Step 2 has succeeded + migration_step_2.migration_step_2_with_table( + kms_key_id=TEST_KMS_KEY_ID, ddb_table_name=TEST_DDB_TABLE_NAME, sort_read_value=2 + ) + # When: Execute Step 1 with sort_read_value=2 + # Then: Success (i.e. can read values in new format) + migration_step_1.migration_step_1_with_table( + kms_key_id=TEST_KMS_KEY_ID, ddb_table_name=TEST_DDB_TABLE_NAME, sort_read_value=2 + ) + + # Given: Step 3 has succeeded + migration_step_3.migration_step_3_with_table( + kms_key_id=TEST_KMS_KEY_ID, ddb_table_name=TEST_DDB_TABLE_NAME, sort_read_value=3 + ) + # When: Execute Step 1 with sort_read_value=3 + # Then: Success (i.e. can read values in new format) + migration_step_1.migration_step_1_with_table( + kms_key_id=TEST_KMS_KEY_ID, ddb_table_name=TEST_DDB_TABLE_NAME, sort_read_value=3 + ) diff --git a/Examples/runtimes/python/Migration/ddbec_to_awsdbe/test/awsdbe/table/test_migration_step_2.py b/Examples/runtimes/python/Migration/ddbec_to_awsdbe/test/awsdbe/table/test_migration_step_2.py new file mode 100644 index 000000000..27330d236 --- /dev/null +++ b/Examples/runtimes/python/Migration/ddbec_to_awsdbe/test/awsdbe/table/test_migration_step_2.py @@ -0,0 +1,58 @@ +# Copyright Amazon.com Inc. or its affiliates. All Rights Reserved. +# SPDX-License-Identifier: Apache-2.0 +""" +Test for Migration Step 2. + +This test validates the compatibility between different stages of migration +and ensures that step 2 (using AWS DBESDK with FORBID_LEGACY_ENCRYPT_ALLOW_LEGACY_DECRYPT) +can read data from all other migration steps. +""" +import pytest + +from ....src.awsdbe.table import ( + migration_step_1, + migration_step_2, + migration_step_3, +) +from ....src.ddbec.table import migration_step_0 +from ...test_utils import TEST_DDB_TABLE_NAME, TEST_KMS_KEY_ID + +pytestmark = [pytest.mark.examples] + + +def test_migration_step_2_with_table(): + """Test migration step 2 compatibility with different data formats.""" + # Successfully executes Step 2 + migration_step_2.migration_step_2_with_table( + kms_key_id=TEST_KMS_KEY_ID, ddb_table_name=TEST_DDB_TABLE_NAME, sort_read_value=2 + ) + + # Given: Step 0 has succeeded + migration_step_0.migration_step_0_with_table( + kms_key_id=TEST_KMS_KEY_ID, ddb_table_name=TEST_DDB_TABLE_NAME, sort_read_value=0 + ) + # When: Execute Step 2 with sort_read_value=0 + # Then: Success (i.e. can read values in old format) + migration_step_2.migration_step_2_with_table( + kms_key_id=TEST_KMS_KEY_ID, ddb_table_name=TEST_DDB_TABLE_NAME, sort_read_value=0 + ) + + # Given: Step 1 has succeeded + migration_step_1.migration_step_1_with_table( + kms_key_id=TEST_KMS_KEY_ID, ddb_table_name=TEST_DDB_TABLE_NAME, sort_read_value=1 + ) + # When: Execute Step 2 with sort_read_value=1 + # Then: Success (i.e. can read values in old format) + migration_step_2.migration_step_2_with_table( + kms_key_id=TEST_KMS_KEY_ID, ddb_table_name=TEST_DDB_TABLE_NAME, sort_read_value=1 + ) + + # Given: Step 3 has succeeded + migration_step_3.migration_step_3_with_table( + kms_key_id=TEST_KMS_KEY_ID, ddb_table_name=TEST_DDB_TABLE_NAME, sort_read_value=3 + ) + # When: Execute Step 2 with sort_read_value=3 + # Then: Success (i.e. can read values in new format) + migration_step_2.migration_step_2_with_table( + kms_key_id=TEST_KMS_KEY_ID, ddb_table_name=TEST_DDB_TABLE_NAME, sort_read_value=3 + ) diff --git a/Examples/runtimes/python/Migration/ddbec_to_awsdbe/test/awsdbe/table/test_migration_step_3.py b/Examples/runtimes/python/Migration/ddbec_to_awsdbe/test/awsdbe/table/test_migration_step_3.py new file mode 100644 index 000000000..993ba82ca --- /dev/null +++ b/Examples/runtimes/python/Migration/ddbec_to_awsdbe/test/awsdbe/table/test_migration_step_3.py @@ -0,0 +1,63 @@ +# Copyright Amazon.com Inc. or its affiliates. All Rights Reserved. +# SPDX-License-Identifier: Apache-2.0 +""" +Test for Migration Step 3. + +This test validates the compatibility between different stages of migration +and ensures that step 3 (using only AWS DBESDK) behaves correctly with data +from different migration stages. +""" +import pytest +from aws_dbesdk_dynamodb.smithygenerated.aws_cryptography_dbencryptionsdk_dynamodb_transforms.errors import ( + DynamoDbItemEncryptor, +) + +from ....src.awsdbe.table import ( + migration_step_1, + migration_step_2, + migration_step_3, +) +from ....src.ddbec.table import migration_step_0 +from ...test_utils import TEST_DDB_TABLE_NAME, TEST_KMS_KEY_ID + +pytestmark = [pytest.mark.examples] + + +def test_migration_step_3_with_table(): + """Test migration step 3 compatibility with different data formats.""" + # Successfully executes Step 3 + migration_step_3.migration_step_3_with_table( + kms_key_id=TEST_KMS_KEY_ID, ddb_table_name=TEST_DDB_TABLE_NAME, sort_read_value=3 + ) + + # Given: Step 0 has succeeded + migration_step_0.migration_step_0_with_table( + kms_key_id=TEST_KMS_KEY_ID, ddb_table_name=TEST_DDB_TABLE_NAME, sort_read_value=0 + ) + # When: Execute Step 3 with sort_read_value=0 + # Then: throws DynamoDbItemEncryptor Exception (i.e. cannot read values in old format) + with pytest.raises(DynamoDbItemEncryptor): + migration_step_3.migration_step_3_with_table( + kms_key_id=TEST_KMS_KEY_ID, ddb_table_name=TEST_DDB_TABLE_NAME, sort_read_value=0 + ) + + # Given: Step 1 has succeeded + migration_step_1.migration_step_1_with_table( + kms_key_id=TEST_KMS_KEY_ID, ddb_table_name=TEST_DDB_TABLE_NAME, sort_read_value=1 + ) + # When: Execute Step 3 with sort_read_value=1 + # Then: throws DynamoDbItemEncryptor Exception (i.e. cannot read values in old format) + with pytest.raises(DynamoDbItemEncryptor): + migration_step_3.migration_step_3_with_table( + kms_key_id=TEST_KMS_KEY_ID, ddb_table_name=TEST_DDB_TABLE_NAME, sort_read_value=1 + ) + + # Given: Step 2 has succeeded + migration_step_2.migration_step_2_with_table( + kms_key_id=TEST_KMS_KEY_ID, ddb_table_name=TEST_DDB_TABLE_NAME, sort_read_value=2 + ) + # When: Execute Step 3 with sort_read_value=2 + # Then: Success (i.e. can read values in new format) + migration_step_3.migration_step_3_with_table( + kms_key_id=TEST_KMS_KEY_ID, ddb_table_name=TEST_DDB_TABLE_NAME, sort_read_value=2 + ) diff --git a/Examples/runtimes/python/Migration/ddbec_to_awsdbe/test/ddbec/__init__.py b/Examples/runtimes/python/Migration/ddbec_to_awsdbe/test/ddbec/__init__.py new file mode 100644 index 000000000..fa977e22f --- /dev/null +++ b/Examples/runtimes/python/Migration/ddbec_to_awsdbe/test/ddbec/__init__.py @@ -0,0 +1,3 @@ +# Copyright Amazon.com Inc. or its affiliates. All Rights Reserved. +# SPDX-License-Identifier: Apache-2.0 +"""Stub to allow relative imports of examples from tests.""" diff --git a/Examples/runtimes/python/Migration/ddbec_to_awsdbe/test/ddbec/client/__init__.py b/Examples/runtimes/python/Migration/ddbec_to_awsdbe/test/ddbec/client/__init__.py new file mode 100644 index 000000000..fa977e22f --- /dev/null +++ b/Examples/runtimes/python/Migration/ddbec_to_awsdbe/test/ddbec/client/__init__.py @@ -0,0 +1,3 @@ +# Copyright Amazon.com Inc. or its affiliates. All Rights Reserved. +# SPDX-License-Identifier: Apache-2.0 +"""Stub to allow relative imports of examples from tests.""" diff --git a/Examples/runtimes/python/Migration/ddbec_to_awsdbe/test/ddbec/client/test_migration_step_0.py b/Examples/runtimes/python/Migration/ddbec_to_awsdbe/test/ddbec/client/test_migration_step_0.py new file mode 100644 index 000000000..9d908b669 --- /dev/null +++ b/Examples/runtimes/python/Migration/ddbec_to_awsdbe/test/ddbec/client/test_migration_step_0.py @@ -0,0 +1,61 @@ +# Copyright Amazon.com Inc. or its affiliates. All Rights Reserved. +# SPDX-License-Identifier: Apache-2.0 +""" +Test for Migration Step 0. + +This test validates the compatibility between different stages of migration +and ensures that step 0 (using legacy DynamoDB Encryption Client) behaves correctly +with data from different migration stages. +""" +import pytest +from dynamodb_encryption_sdk.exceptions import DecryptionError + +from ....src.awsdbe.client import ( + migration_step_1, + migration_step_2, + migration_step_3, +) +from ....src.ddbec.client import migration_step_0 +from ...test_utils import TEST_DDB_TABLE_NAME, TEST_KMS_KEY_ID + +pytestmark = [pytest.mark.examples] + + +def test_migration_step_0_with_client(): + """Test migration step 0 compatibility with different data formats.""" + # Successfully executes Step 0 + migration_step_0.migration_step_0_with_client( + kms_key_id=TEST_KMS_KEY_ID, ddb_table_name=TEST_DDB_TABLE_NAME, sort_read_value=0 + ) + + # Given: Step 1 has succeeded + migration_step_1.migration_step_1_with_client( + kms_key_id=TEST_KMS_KEY_ID, ddb_table_name=TEST_DDB_TABLE_NAME, sort_read_value=1 + ) + # When: Execute Step 0 with sort_read_value=1 + # Then: Success (i.e. can read values in old format) + migration_step_0.migration_step_0_with_client( + kms_key_id=TEST_KMS_KEY_ID, ddb_table_name=TEST_DDB_TABLE_NAME, sort_read_value=1 + ) + + # Given: Step 2 has succeeded + migration_step_2.migration_step_2_with_client( + kms_key_id=TEST_KMS_KEY_ID, ddb_table_name=TEST_DDB_TABLE_NAME, sort_read_value=2 + ) + # When: Execute Step 0 with sort_read_value=2 + # Then: throws DecryptionError Exception (i.e. cannot read values in new format) + with pytest.raises(DecryptionError): # The exact exception may vary in Python implementation + migration_step_0.migration_step_0_with_client( + kms_key_id=TEST_KMS_KEY_ID, ddb_table_name=TEST_DDB_TABLE_NAME, sort_read_value=2 + ) + + # Given: Step 3 has succeeded + migration_step_3.migration_step_3_with_client( + kms_key_id=TEST_KMS_KEY_ID, ddb_table_name=TEST_DDB_TABLE_NAME, sort_read_value=3 + ) + # When: Execute Step 0 with sort_read_value=3 + # Then: throws DecryptionError Exception (i.e. cannot read values in new format) + with pytest.raises(DecryptionError): # The exact exception may vary in Python implementation + migration_step_0.migration_step_0_with_client( + kms_key_id=TEST_KMS_KEY_ID, ddb_table_name=TEST_DDB_TABLE_NAME, sort_read_value=3 + ) diff --git a/Examples/runtimes/python/Migration/ddbec_to_awsdbe/test/ddbec/paginator/__init__.py b/Examples/runtimes/python/Migration/ddbec_to_awsdbe/test/ddbec/paginator/__init__.py new file mode 100644 index 000000000..fa977e22f --- /dev/null +++ b/Examples/runtimes/python/Migration/ddbec_to_awsdbe/test/ddbec/paginator/__init__.py @@ -0,0 +1,3 @@ +# Copyright Amazon.com Inc. or its affiliates. All Rights Reserved. +# SPDX-License-Identifier: Apache-2.0 +"""Stub to allow relative imports of examples from tests.""" diff --git a/Examples/runtimes/python/Migration/ddbec_to_awsdbe/test/ddbec/paginator/test_migration_step_0.py b/Examples/runtimes/python/Migration/ddbec_to_awsdbe/test/ddbec/paginator/test_migration_step_0.py new file mode 100644 index 000000000..7383d1b32 --- /dev/null +++ b/Examples/runtimes/python/Migration/ddbec_to_awsdbe/test/ddbec/paginator/test_migration_step_0.py @@ -0,0 +1,61 @@ +# Copyright Amazon.com Inc. or its affiliates. All Rights Reserved. +# SPDX-License-Identifier: Apache-2.0 +""" +Test for Migration Step 0. + +This test validates the compatibility between different stages of migration +and ensures that step 0 (using legacy DynamoDB Encryption Client) behaves correctly +with data from different migration stages. +""" +import pytest +from dynamodb_encryption_sdk.exceptions import DecryptionError + +from ....src.awsdbe.paginator import ( + migration_step_1, + migration_step_2, + migration_step_3, +) +from ....src.ddbec.paginator import migration_step_0 +from ...test_utils import TEST_DDB_TABLE_NAME, TEST_KMS_KEY_ID + +pytestmark = [pytest.mark.examples] + + +def test_migration_step_0_with_paginator(): + """Test migration step 0 compatibility with different data formats.""" + # Successfully executes Step 0 + migration_step_0.migration_step_0_with_paginator( + kms_key_id=TEST_KMS_KEY_ID, ddb_table_name=TEST_DDB_TABLE_NAME, sort_read_value=0 + ) + + # Given: Step 1 has succeeded + migration_step_1.migration_step_1_with_paginator( + kms_key_id=TEST_KMS_KEY_ID, ddb_table_name=TEST_DDB_TABLE_NAME, sort_read_value=1 + ) + # When: Execute Step 0 with sort_read_value=1 + # Then: Success (i.e. can read values in old format) + migration_step_0.migration_step_0_with_paginator( + kms_key_id=TEST_KMS_KEY_ID, ddb_table_name=TEST_DDB_TABLE_NAME, sort_read_value=1 + ) + + # Given: Step 2 has succeeded + migration_step_2.migration_step_2_with_paginator( + kms_key_id=TEST_KMS_KEY_ID, ddb_table_name=TEST_DDB_TABLE_NAME, sort_read_value=2 + ) + # When: Execute Step 0 with sort_read_value=2 + # Then: throws DecryptionError Exception (i.e. cannot read values in new format) + with pytest.raises(DecryptionError): # The exact exception may vary in Python implementation + migration_step_0.migration_step_0_with_paginator( + kms_key_id=TEST_KMS_KEY_ID, ddb_table_name=TEST_DDB_TABLE_NAME, sort_read_value=2 + ) + + # Given: Step 3 has succeeded + migration_step_3.migration_step_3_with_paginator( + kms_key_id=TEST_KMS_KEY_ID, ddb_table_name=TEST_DDB_TABLE_NAME, sort_read_value=3 + ) + # When: Execute Step 0 with sort_read_value=3 + # Then: throws DecryptionError Exception (i.e. cannot read values in new format) + with pytest.raises(DecryptionError): # The exact exception may vary in Python implementation + migration_step_0.migration_step_0_with_paginator( + kms_key_id=TEST_KMS_KEY_ID, ddb_table_name=TEST_DDB_TABLE_NAME, sort_read_value=3 + ) diff --git a/Examples/runtimes/python/Migration/ddbec_to_awsdbe/test/ddbec/resource/__init__.py b/Examples/runtimes/python/Migration/ddbec_to_awsdbe/test/ddbec/resource/__init__.py new file mode 100644 index 000000000..fa977e22f --- /dev/null +++ b/Examples/runtimes/python/Migration/ddbec_to_awsdbe/test/ddbec/resource/__init__.py @@ -0,0 +1,3 @@ +# Copyright Amazon.com Inc. or its affiliates. All Rights Reserved. +# SPDX-License-Identifier: Apache-2.0 +"""Stub to allow relative imports of examples from tests.""" diff --git a/Examples/runtimes/python/Migration/ddbec_to_awsdbe/test/ddbec/resource/test_migration_step_0.py b/Examples/runtimes/python/Migration/ddbec_to_awsdbe/test/ddbec/resource/test_migration_step_0.py new file mode 100644 index 000000000..4265c368a --- /dev/null +++ b/Examples/runtimes/python/Migration/ddbec_to_awsdbe/test/ddbec/resource/test_migration_step_0.py @@ -0,0 +1,61 @@ +# Copyright Amazon.com Inc. or its affiliates. All Rights Reserved. +# SPDX-License-Identifier: Apache-2.0 +""" +Test for Migration Step 0. + +This test validates the compatibility between different stages of migration +and ensures that step 0 (using legacy DynamoDB Encryption Client) behaves correctly +with data from different migration stages. +""" +import pytest +from dynamodb_encryption_sdk.exceptions import DecryptionError + +from ....src.awsdbe.resource import ( + migration_step_1, + migration_step_2, + migration_step_3, +) +from ....src.ddbec.resource import migration_step_0 +from ...test_utils import TEST_DDB_TABLE_NAME, TEST_KMS_KEY_ID + +pytestmark = [pytest.mark.examples] + + +def test_migration_step_0_with_resource(): + """Test migration step 0 compatibility with different data formats.""" + # Successfully executes Step 0 + migration_step_0.migration_step_0_with_resource( + kms_key_id=TEST_KMS_KEY_ID, ddb_table_name=TEST_DDB_TABLE_NAME, sort_read_value=0 + ) + + # Given: Step 1 has succeeded + migration_step_1.migration_step_1_with_resource( + kms_key_id=TEST_KMS_KEY_ID, ddb_table_name=TEST_DDB_TABLE_NAME, sort_read_value=1 + ) + # When: Execute Step 0 with sort_read_value=1 + # Then: Success (i.e. can read values in old format) + migration_step_0.migration_step_0_with_resource( + kms_key_id=TEST_KMS_KEY_ID, ddb_table_name=TEST_DDB_TABLE_NAME, sort_read_value=1 + ) + + # Given: Step 2 has succeeded + migration_step_2.migration_step_2_with_resource( + kms_key_id=TEST_KMS_KEY_ID, ddb_table_name=TEST_DDB_TABLE_NAME, sort_read_value=2 + ) + # When: Execute Step 0 with sort_read_value=2 + # Then: throws DecryptionError Exception (i.e. cannot read values in new format) + with pytest.raises(DecryptionError): # The exact exception may vary in Python implementation + migration_step_0.migration_step_0_with_resource( + kms_key_id=TEST_KMS_KEY_ID, ddb_table_name=TEST_DDB_TABLE_NAME, sort_read_value=2 + ) + + # Given: Step 3 has succeeded + migration_step_3.migration_step_3_with_resource( + kms_key_id=TEST_KMS_KEY_ID, ddb_table_name=TEST_DDB_TABLE_NAME, sort_read_value=3 + ) + # When: Execute Step 0 with sort_read_value=3 + # Then: throws DecryptionError Exception (i.e. cannot read values in new format) + with pytest.raises(DecryptionError): # The exact exception may vary in Python implementation + migration_step_0.migration_step_0_with_resource( + kms_key_id=TEST_KMS_KEY_ID, ddb_table_name=TEST_DDB_TABLE_NAME, sort_read_value=3 + ) diff --git a/Examples/runtimes/python/Migration/ddbec_to_awsdbe/test/ddbec/table/__init__.py b/Examples/runtimes/python/Migration/ddbec_to_awsdbe/test/ddbec/table/__init__.py new file mode 100644 index 000000000..fa977e22f --- /dev/null +++ b/Examples/runtimes/python/Migration/ddbec_to_awsdbe/test/ddbec/table/__init__.py @@ -0,0 +1,3 @@ +# Copyright Amazon.com Inc. or its affiliates. All Rights Reserved. +# SPDX-License-Identifier: Apache-2.0 +"""Stub to allow relative imports of examples from tests.""" diff --git a/Examples/runtimes/python/Migration/ddbec_to_awsdbe/test/ddbec/table/test_migration_step_0.py b/Examples/runtimes/python/Migration/ddbec_to_awsdbe/test/ddbec/table/test_migration_step_0.py new file mode 100644 index 000000000..6cc410fc5 --- /dev/null +++ b/Examples/runtimes/python/Migration/ddbec_to_awsdbe/test/ddbec/table/test_migration_step_0.py @@ -0,0 +1,61 @@ +# Copyright Amazon.com Inc. or its affiliates. All Rights Reserved. +# SPDX-License-Identifier: Apache-2.0 +""" +Test for Migration Step 0. + +This test validates the compatibility between different stages of migration +and ensures that step 0 (using legacy DynamoDB Encryption Client with EncryptedTable) behaves correctly +with data from different migration stages. +""" +import pytest +from dynamodb_encryption_sdk.exceptions import DecryptionError + +from ....src.awsdbe.table import ( + migration_step_1, + migration_step_2, + migration_step_3, +) +from ....src.ddbec.table import migration_step_0 +from ...test_utils import TEST_DDB_TABLE_NAME, TEST_KMS_KEY_ID + +pytestmark = [pytest.mark.examples] + + +def test_migration_step_0_with_table(): + """Test migration step 0 compatibility with different data formats.""" + # Successfully executes Step 0 + migration_step_0.migration_step_0_with_table( + kms_key_id=TEST_KMS_KEY_ID, ddb_table_name=TEST_DDB_TABLE_NAME, sort_read_value=0 + ) + + # Given: Step 1 has succeeded + migration_step_1.migration_step_1_with_table( + kms_key_id=TEST_KMS_KEY_ID, ddb_table_name=TEST_DDB_TABLE_NAME, sort_read_value=1 + ) + # When: Execute Step 0 with sort_read_value=1 + # Then: Success (i.e. can read values in old format) + migration_step_0.migration_step_0_with_table( + kms_key_id=TEST_KMS_KEY_ID, ddb_table_name=TEST_DDB_TABLE_NAME, sort_read_value=1 + ) + + # Given: Step 2 has succeeded + migration_step_2.migration_step_2_with_table( + kms_key_id=TEST_KMS_KEY_ID, ddb_table_name=TEST_DDB_TABLE_NAME, sort_read_value=2 + ) + # When: Execute Step 0 with sort_read_value=2 + # Then: throws DecryptionError Exception (i.e. cannot read values in new format) + with pytest.raises(DecryptionError): # The exact exception may vary in Python implementation + migration_step_0.migration_step_0_with_table( + kms_key_id=TEST_KMS_KEY_ID, ddb_table_name=TEST_DDB_TABLE_NAME, sort_read_value=2 + ) + + # Given: Step 3 has succeeded + migration_step_3.migration_step_3_with_table( + kms_key_id=TEST_KMS_KEY_ID, ddb_table_name=TEST_DDB_TABLE_NAME, sort_read_value=3 + ) + # When: Execute Step 0 with sort_read_value=3 + # Then: throws DecryptionError Exception (i.e. cannot read values in new format) + with pytest.raises(DecryptionError): # The exact exception may vary in Python implementation + migration_step_0.migration_step_0_with_table( + kms_key_id=TEST_KMS_KEY_ID, ddb_table_name=TEST_DDB_TABLE_NAME, sort_read_value=3 + ) diff --git a/Examples/runtimes/python/Migration/ddbec_to_awsdbe/test/test_utils.py b/Examples/runtimes/python/Migration/ddbec_to_awsdbe/test/test_utils.py new file mode 100644 index 000000000..b7d58f186 --- /dev/null +++ b/Examples/runtimes/python/Migration/ddbec_to_awsdbe/test/test_utils.py @@ -0,0 +1,11 @@ +# Copyright Amazon.com Inc. or its affiliates. All Rights Reserved. +# SPDX-License-Identifier: Apache-2.0 +"""Test constants.""" + +# This is a public KMS Key that MUST only be used for testing, and MUST NOT be used for any production data +TEST_KMS_KEY_ID = "arn:aws:kms:us-west-2:658956600833:key/b3537ef1-d8dc-4780-9f5a-55776cbb2f7f" +# Personal Testing Resource +# TEST_KMS_KEY_ID = "arn:aws:kms:us-west-2:452750982249:key/773d55bf-c816-48a2-96cd-b386f7980d08" + +# Our tests require access to DDB Table with this name +TEST_DDB_TABLE_NAME = "DynamoDbEncryptionInterceptorTestTable" diff --git a/Examples/runtimes/python/pyproject.toml b/Examples/runtimes/python/pyproject.toml new file mode 100644 index 000000000..e292743d5 --- /dev/null +++ b/Examples/runtimes/python/pyproject.toml @@ -0,0 +1,49 @@ +[tool.poetry] +name = "aws-dbesdk-dynamodb-examples" +version = "0.1.0" +description = "" +authors = ["AWS Crypto Tools "] + +[tool.poetry.dependencies] +python = "^3.11.0" +aws-dbesdk-dynamodb = { path = "../../../DynamoDbEncryption/runtimes/python", develop = false, extras = ["legacy-ddbec"]} + +[tool.poetry.group.test.dependencies] +pytest = "^7.4.0" +tox = "^3" + +[build-system] +requires = ["poetry-core<2.0.0"] +build-backend = "poetry.core.masonry.api" + +# Package linting + +[tool.poetry.group.linting] +optional = true + +[tool.poetry.group.linting.dependencies] +ruff = "^0.11.5" +black = "^25.1.0" + +[tool.ruff] +line-length=120 +indent-width=4 +target-version = "py311" + +[tool.ruff.lint] +# Choose linting tools +select = [ + # pycodestyle: spacing, line length + "E", + # pyflakes: unused imports/variables + "F", + # isort: import sorting + "I", + # pydocstyle: docstring style + "D", +] +# Ignore incompatible linting options +ignore=[ + "D203", # `incorrect-blank-line-before-class`; incompatible with `no-blank-line-before-class` (D211) + "D212", # `multi-line-summary-first-line`; incompatible with `multi-line-summary-second-line` (D213) +] diff --git a/Examples/runtimes/python/tox.ini b/Examples/runtimes/python/tox.ini new file mode 100644 index 000000000..eaf7153ac --- /dev/null +++ b/Examples/runtimes/python/tox.ini @@ -0,0 +1,18 @@ +[tox] +isolated_build = True +envlist = + py{311,312,313}-{dynamodbencryption,legacymigration} + +[testenv:base-command] +commands = poetry run pytest -s -v -l {posargs} + +[testenv] +skip_install = true +allowlist_externals = poetry +passenv = AWS_* +commands_pre = + poetry lock + poetry install --with test --no-root +commands = + dynamodbencryption: {[testenv:base-command]commands} DynamoDBEncryption/test/ + legacymigration: {[testenv:base-command]commands} Migration/ddbec_to_awsdbe/test/ \ No newline at end of file diff --git a/Makefile b/Makefile index c1806faab..7bede0e44 100644 --- a/Makefile +++ b/Makefile @@ -51,7 +51,7 @@ format_java_misc-check: setup_prettier npx prettier --plugin=prettier-plugin-java . --check setup_prettier: - npm i --no-save prettier@3 prettier-plugin-java@2.5 + npm i --no-save prettier@3.5.3 prettier-plugin-java@2.5 # Generate the top-level project.properties file using smithy-dafny. # This is for the benefit of the nightly Dafny CI, diff --git a/README.md b/README.md index 73375e7e6..13faf30d9 100644 --- a/README.md +++ b/README.md @@ -68,6 +68,7 @@ You need an Amazon Web Services (AWS) account to use the DB-ESDK for DynamoDB as - .NET - Dafny - Rust +- Python # Contributing diff --git a/TestVectors/Makefile b/TestVectors/Makefile index f918bfb7d..08a7f4097 100644 --- a/TestVectors/Makefile +++ b/TestVectors/Makefile @@ -2,6 +2,7 @@ # SPDX-License-Identifier: Apache-2.0 CORES=2 +ENABLE_EXTERN_PROCESSING=1 TRANSPILE_TESTS_IN_RUST=1 include ../SharedMakefile.mk @@ -128,4 +129,65 @@ _remove_wrapped_client_rust: $(MAKE) _sed_file SED_FILE_PATH=$(REMOVE_WRAPPED_CLIENT_AFTER_POLYMORPH_RUST_DYNAMODB) SED_BEFORE_STRING=$(REMOVE_WRAPPED_CLIENT_AFTER_POLYMORPH_RUST_FROM_1) SED_AFTER_STRING=$(REMOVE_WRAPPED_CLIENT_AFTER_POLYMORPH_RUST_TO_1) $(MAKE) _sed_file SED_FILE_PATH=$(REMOVE_WRAPPED_CLIENT_AFTER_POLYMORPH_RUST_DYNAMODB) SED_BEFORE_STRING=$(REMOVE_WRAPPED_CLIENT_AFTER_POLYMORPH_RUST_FROM_2) SED_AFTER_STRING=$(REMOVE_WRAPPED_CLIENT_AFTER_POLYMORPH_RUST_TO_2) $(MAKE) _sed_file SED_FILE_PATH=$(REMOVE_WRAPPED_CLIENT_AFTER_POLYMORPH_RUST_DYNAMODB_STRUCTURED_ENCRYPTION) SED_BEFORE_STRING=$(REMOVE_WRAPPED_CLIENT_AFTER_POLYMORPH_RUST_FROM_1) SED_AFTER_STRING=$(REMOVE_WRAPPED_CLIENT_AFTER_POLYMORPH_RUST_TO_1) - $(MAKE) _sed_file SED_FILE_PATH=$(REMOVE_WRAPPED_CLIENT_AFTER_POLYMORPH_RUST_DYNAMODB_STRUCTURED_ENCRYPTION) SED_BEFORE_STRING=$(REMOVE_WRAPPED_CLIENT_AFTER_POLYMORPH_RUST_FROM_2) SED_AFTER_STRING=$(REMOVE_WRAPPED_CLIENT_AFTER_POLYMORPH_RUST_TO_2) \ No newline at end of file + $(MAKE) _sed_file SED_FILE_PATH=$(REMOVE_WRAPPED_CLIENT_AFTER_POLYMORPH_RUST_DYNAMODB_STRUCTURED_ENCRYPTION) SED_BEFORE_STRING=$(REMOVE_WRAPPED_CLIENT_AFTER_POLYMORPH_RUST_FROM_2) SED_AFTER_STRING=$(REMOVE_WRAPPED_CLIENT_AFTER_POLYMORPH_RUST_TO_2) + +# Python + +PYTHON_MODULE_NAME=aws_dbesdk_dynamodb_test_vectors + +TRANSLATION_RECORD_PYTHON := \ + --translation-record ../submodules/MaterialProviders/StandardLibrary/runtimes/python/src/smithy_dafny_standard_library/internaldafny/generated/dafny_src-py.dtr \ + --translation-record ../submodules/MaterialProviders/ComAmazonawsKms/runtimes/python/src/aws_cryptography_internal_kms/internaldafny/generated/dafny_src-py.dtr \ + --translation-record ../submodules/MaterialProviders/ComAmazonawsDynamodb/runtimes/python/src/aws_cryptography_internal_dynamodb/internaldafny/generated/dafny_src-py.dtr \ + --translation-record ../submodules/MaterialProviders/AwsCryptographyPrimitives/runtimes/python/src/aws_cryptography_primitives/internaldafny/generated/dafny_src-py.dtr \ + --translation-record ../submodules/MaterialProviders/AwsCryptographicMaterialProviders/runtimes/python/src/aws_cryptographic_material_providers/internaldafny/generated/dafny_src-py.dtr \ + --translation-record ../submodules/MaterialProviders/TestVectorsAwsCryptographicMaterialProviders/runtimes/python/src/aws_cryptography_materialproviders_test_vectors/internaldafny/generated/dafny_src-py.dtr \ + --translation-record ../DynamoDbEncryption/runtimes/python/src/aws_dbesdk_dynamodb/internaldafny/generated/dafny_src-py.dtr + +PYTHON_DEPENDENCY_MODULE_NAMES := \ + --dependency-library-name=aws.cryptography.primitives=aws_cryptography_primitives \ + --dependency-library-name=com.amazonaws.kms=aws_cryptography_internal_kms \ + --dependency-library-name=com.amazonaws.dynamodb=aws_cryptography_internal_dynamodb \ + --dependency-library-name=aws.cryptography.materialProviders=aws_cryptographic_material_providers \ + --dependency-library-name=aws.cryptography.keyStore=aws_cryptographic_material_providers \ + --dependency-library-name=aws.cryptography.materialProvidersTestVectorKeys=aws_cryptography_materialproviders_test_vectors \ + --dependency-library-name=aws.cryptography.dbEncryptionSdk.structuredEncryption=aws_dbesdk_dynamodb \ + --dependency-library-name=aws.cryptography.dbEncryptionSdk.dynamoDb=aws_dbesdk_dynamodb \ + --dependency-library-name=aws.cryptography.dbEncryptionSdk.dynamoDb.itemEncryptor=aws_dbesdk_dynamodb \ + --dependency-library-name=aws.cryptography.dbEncryptionSdk.dynamoDb.transforms=aws_dbesdk_dynamodb \ + +# Constants for languages that drop extern names (Python, Go) + +INDEX_FILE_PATH=dafny/DDBEncryption/src/LibraryIndex.dfy +INDEX_FILE_WITH_EXTERN_STRING="module {:extern \"software.amazon.cryptography.dbencryptionsdk.dynamodb.internaldafny.wrapped\"} WrappedDynamoDbEncryption refines WrappedAbstractAwsCryptographyDynamoDbEncryptionService" +INDEX_FILE_WITHOUT_EXTERN_STRING="module WrappedDynamoDbEncryption refines WrappedAbstractAwsCryptographyDynamoDbEncryptionService" + +ITEMENCRYPTOR_INDEX_FILE_PATH=dafny/WrappedDynamoDbItemEncryptor/src/Index.dfy +ITEMENCRYPTOR_INDEX_FILE_WITH_EXTERN_STRING="module {:extern \"software.amazon.cryptography.dbencryptionsdk.dynamodb.itemencryptor.internaldafny.wrapped\" } WrappedItemEncryptor refines WrappedAbstractAwsCryptographyDbEncryptionSdkDynamoDbItemEncryptorService {" +ITEMENCRYPTOR_INDEX_FILE_WITHOUT_EXTERN_STRING="module WrappedItemEncryptor refines WrappedAbstractAwsCryptographyDbEncryptionSdkDynamoDbItemEncryptorService {" + +_sed_index_file_add_extern: + $(MAKE) _sed_file SED_FILE_PATH=$(INDEX_FILE_PATH) SED_BEFORE_STRING=$(INDEX_FILE_WITHOUT_EXTERN_STRING) SED_AFTER_STRING=$(INDEX_FILE_WITH_EXTERN_STRING) + $(MAKE) _sed_file SED_FILE_PATH=$(ITEMENCRYPTOR_INDEX_FILE_PATH) SED_BEFORE_STRING=$(ITEMENCRYPTOR_INDEX_FILE_WITHOUT_EXTERN_STRING) SED_AFTER_STRING=$(ITEMENCRYPTOR_INDEX_FILE_WITH_EXTERN_STRING) + +_sed_index_file_remove_extern: + $(MAKE) _sed_file SED_FILE_PATH=$(INDEX_FILE_PATH) SED_BEFORE_STRING=$(INDEX_FILE_WITH_EXTERN_STRING) SED_AFTER_STRING=$(INDEX_FILE_WITHOUT_EXTERN_STRING) + $(MAKE) _sed_file SED_FILE_PATH=$(ITEMENCRYPTOR_INDEX_FILE_PATH) SED_BEFORE_STRING=$(ITEMENCRYPTOR_INDEX_FILE_WITH_EXTERN_STRING) SED_AFTER_STRING=$(ITEMENCRYPTOR_INDEX_FILE_WITHOUT_EXTERN_STRING) + +_sed_types_file_remove_extern: + echo "no types file" + +_sed_types_file_add_extern: + echo "no types file" + +test_python_client_interface: + rm -rf runtimes/python/.tox + python3 -m tox -c runtimes/python -e client + +test_python_resource_interface: + rm -rf runtimes/python/.tox + python3 -m tox -c runtimes/python -e resource + +test_python_table_interface: + rm -rf runtimes/python/.tox + python3 -m tox -c runtimes/python -e table diff --git a/TestVectors/dafny/DDBEncryption/src/WriteManifest.dfy b/TestVectors/dafny/DDBEncryption/src/WriteManifest.dfy index 1e668a2de..2f6e3a112 100644 --- a/TestVectors/dafny/DDBEncryption/src/WriteManifest.dfy +++ b/TestVectors/dafny/DDBEncryption/src/WriteManifest.dfy @@ -218,11 +218,11 @@ module {:options "-functionSyntax:4"} WriteManifest { const DoNothing : CryptoAction := 3 const A : string := "A" - const B : string := "퀀" // Ud000" - const C : string := "﹌" // Ufe4c" - const D : string := "𐀁" // U10001 - const E : string := "𐀂" // U10002 - same high surrogate as D - const F : string := "𠀂" // U20002 - different high surrogate as D + const B : string := "\ud000" // "Ud000" <-> "퀀" + const C : string := "\ufe4c" // "Ufe4c" <-> "﹌" + const D : string := "\u10001" // "U10001" <-> "𐀁" (surrogate pair: "\uD800\uDC01") + const E : string := "\u10002" // "U10002" <-> "𐀂" (same high surrogate as D: "\uD800\uDC02") + const F : string := "\u20002" // "U20002" <-> "𠀂" (different high surrogate as D: "\D840\uDC02") lemma CheckLengths() ensures |A| == 1 diff --git a/TestVectors/runtimes/python/.gitignore b/TestVectors/runtimes/python/.gitignore new file mode 100644 index 000000000..aaf44d4cd --- /dev/null +++ b/TestVectors/runtimes/python/.gitignore @@ -0,0 +1,17 @@ +# Python build artifacts +__pycache__ +**/__pycache__ +*.pyc +src/**.egg-info/ +build +poetry.lock +**/poetry.lock +dist + +# Dafny-generated Python +**/internaldafny/generated + +# Python test artifacts +.tox +.pytest_cache + diff --git a/TestVectors/runtimes/python/README.md b/TestVectors/runtimes/python/README.md new file mode 100644 index 000000000..6d46a446c --- /dev/null +++ b/TestVectors/runtimes/python/README.md @@ -0,0 +1,24 @@ +The Python AWS DBESDK for DynamoDB has the following encrypted interfaces for boto3 clients: + +- `EncryptedClient` + - TestVectors test through `client/` +- `EncryptedPaginator` + - Can't write items; will not test via TestVectors +- `EncryptedResource` + - TestVectors test through `resource/` +- `EncryptedTable` + - TestVectors test through `table/` +- `EncryptedTablesManager` + - Can't write items by itself; provides EncryptedTables which are tested via `table/` + +The Python AWS DBESDK for DynamoDB's `ItemEncryptor` interface provides the following APIs: + +- encrypt_python_item / decrypt_python_item + - Standard dictionary JSON; e.g. `{"key": "value"}` + - Tested explicitly via `test/resource/` and `test/table` calling `CreateWrappedDictItemEncryptor` and calling its operations +- encrypt_dynamodb_item / decrypt_dynamodb_item + - DynamoDB JSON; e.g. `{"key": {"S": "value"}}` + - Tested implicitly via `test/resource/` and `test/table` calling `CreateWrappedDictItemEncryptor`. Calls to the dict-formatted APIs pass through the DynamoDB-formatted APIs. +- encrypt_item / decrypt_item + - DBESDK EncryptItemInput; e.g. `EncryptItemInput({"key": {"S": "value"}})` + - Tested explicitly via `test/client/` calling `CreateWrappedDynamoDbItemEncryptor` diff --git a/TestVectors/runtimes/python/pyproject.toml b/TestVectors/runtimes/python/pyproject.toml new file mode 100644 index 000000000..e1c0a23c1 --- /dev/null +++ b/TestVectors/runtimes/python/pyproject.toml @@ -0,0 +1,24 @@ +[tool.poetry] +name = "aws-dbesdk-dynamodb-test-vectors" +version = "0.1.0" +description = "" +authors = ["AWS Crypto Tools "] +packages = [ + { include = "aws_dbesdk_dynamodb_test_vectors", from = "src" }, +] +# Include all of the following .gitignored files in package distributions, +# even though it is not included in version control +include = ["**/internaldafny/generated/*.py"] + +[tool.poetry.dependencies] +python = "^3.11.0" +aws-dbesdk-dynamodb = { path = "../../../DynamoDbEncryption/runtimes/python", develop = false} +aws-cryptography-internal-mpl-testvectors = { path = "../../../submodules/MaterialProviders/TestVectorsAwsCryptographicMaterialProviders/runtimes/python", develop = false} + +[tool.poetry.group.test.dependencies] +pytest = "^7.4.0" +tox = "^3" + +[build-system] +requires = ["poetry-core<2.0.0"] +build-backend = "poetry.core.masonry.api" diff --git a/TestVectors/runtimes/python/src/aws_dbesdk_dynamodb_test_vectors/__init__.py b/TestVectors/runtimes/python/src/aws_dbesdk_dynamodb_test_vectors/__init__.py new file mode 100644 index 000000000..13c1dc9fe --- /dev/null +++ b/TestVectors/runtimes/python/src/aws_dbesdk_dynamodb_test_vectors/__init__.py @@ -0,0 +1,18 @@ +import sys +""" +DBESDK's Dafny code parses the TestVectors JSON recursively. +i.e. GetTests(Json) = (Json[0], GetTests(Json[1:])) +The decrypt_X_33a.json file has 2832 test vectors. +By default, Python has a recursion limit of 1000. +DBESDK exceeds Python's recursion limit when parsing the JSON, which needs >1 call per test vector. +(Other Crypto Tools languages are limited by memory; Python's explicit limit on function calls is unique.) +When using this internal Crypto Tools TestVectors library, set recursion limit to 10,000. +(This value is totally arbitrary and should be increased if this isn't enough.) +""" +sys.setrecursionlimit(10000) + +# Initialize generated Dafny +from .internaldafny.generated import module_ + +# Initialize externs +from .internaldafny import extern diff --git a/TestVectors/runtimes/python/src/aws_dbesdk_dynamodb_test_vectors/internaldafny/extern/CreateInterceptedDDBClient.py b/TestVectors/runtimes/python/src/aws_dbesdk_dynamodb_test_vectors/internaldafny/extern/CreateInterceptedDDBClient.py new file mode 100644 index 000000000..7f3ed8d31 --- /dev/null +++ b/TestVectors/runtimes/python/src/aws_dbesdk_dynamodb_test_vectors/internaldafny/extern/CreateInterceptedDDBClient.py @@ -0,0 +1,28 @@ +import boto3 +import aws_dbesdk_dynamodb_test_vectors.internaldafny.generated.CreateInterceptedDDBClient +import aws_cryptography_internal_dynamodb.internaldafny.extern +from aws_dbesdk_dynamodb.smithygenerated.aws_cryptography_dbencryptionsdk_dynamodb.dafny_to_smithy import aws_cryptography_dbencryptionsdk_dynamodb_DynamoDbTablesEncryptionConfig +from aws_dbesdk_dynamodb.encrypted.client import EncryptedClient +from smithy_dafny_standard_library.internaldafny.generated import Wrappers +from aws_dbesdk_dynamodb.smithygenerated.aws_cryptography_dbencryptionsdk_dynamodb.errors import _smithy_error_to_dafny_error +from aws_dbesdk_dynamodb_test_vectors.waiting_boto3_ddb_client import WaitingLocalDynamoClient + +class default__: + @staticmethod + def CreateVanillaDDBClient(): + try: + return aws_cryptography_internal_dynamodb.internaldafny.extern.Com_Amazonaws_Dynamodb.default__.DynamoDBClient(WaitingLocalDynamoClient()) + except Exception as e: + return Wrappers.Result_Failure(_smithy_error_to_dafny_error(e)) + + @staticmethod + def CreateInterceptedDDBClient(dafny_encryption_config): + try: + native_encryption_config = aws_cryptography_dbencryptionsdk_dynamodb_DynamoDbTablesEncryptionConfig(dafny_encryption_config) + boto3_client = WaitingLocalDynamoClient() + encrypted_client = EncryptedClient(client = boto3_client, encryption_config = native_encryption_config) + return aws_cryptography_internal_dynamodb.internaldafny.extern.Com_Amazonaws_Dynamodb.default__.DynamoDBClient(encrypted_client) + except Exception as e: + return Wrappers.Result_Failure(_smithy_error_to_dafny_error(e)) + +aws_dbesdk_dynamodb_test_vectors.internaldafny.generated.CreateInterceptedDDBClient.default__ = default__ diff --git a/TestVectors/runtimes/python/src/aws_dbesdk_dynamodb_test_vectors/internaldafny/extern/CreateInterceptedDDBResource.py b/TestVectors/runtimes/python/src/aws_dbesdk_dynamodb_test_vectors/internaldafny/extern/CreateInterceptedDDBResource.py new file mode 100644 index 000000000..98af0d1aa --- /dev/null +++ b/TestVectors/runtimes/python/src/aws_dbesdk_dynamodb_test_vectors/internaldafny/extern/CreateInterceptedDDBResource.py @@ -0,0 +1,146 @@ +# Copyright Amazon.com Inc. or its affiliates. All Rights Reserved. +# SPDX-License-Identifier: Apache-2.0 +import boto3 +import types +import aws_dbesdk_dynamodb_test_vectors.internaldafny.generated.CreateInterceptedDDBClient +import aws_cryptography_internal_dynamodb.internaldafny.extern +from aws_dbesdk_dynamodb.smithygenerated.aws_cryptography_dbencryptionsdk_dynamodb.dafny_to_smithy import aws_cryptography_dbencryptionsdk_dynamodb_DynamoDbTablesEncryptionConfig +from aws_dbesdk_dynamodb.encrypted.resource import EncryptedResource +from smithy_dafny_standard_library.internaldafny.generated import Wrappers +from aws_dbesdk_dynamodb.smithygenerated.aws_cryptography_dbencryptionsdk_dynamodb.errors import _smithy_error_to_dafny_error +from aws_dbesdk_dynamodb_test_vectors.waiting_boto3_ddb_client import WaitingLocalDynamoClient +from aws_dbesdk_dynamodb.internal.resource_to_client import ResourceShapeToClientShapeConverter +from aws_dbesdk_dynamodb.internal.client_to_resource import ClientShapeToResourceShapeConverter + +class DynamoDBClientWrapperForDynamoDBResource: + """ + Internal-only wrapper class for DBESDK TestVectors. + + TestVectors Dafny code only knows how to interact with DynamoDB clients. + However, Python DDBEC and DBESDK have an EncryptedResource class that wraps boto3 DynamoDB Resources. + These classes create EncryptedTables that wrap boto3 DynamoDB Table Resources. + This class interfaces between Dafny TestVectors' DynamoDB client-calling code + and Python DBESDK's EncryptedResource/EncryptedTable classes. + + This class defers to a boto3 client for create_table and delete_table, + which are not supported on boto3 DynamoDB Table resources. + + TODO: Transact not supported on table. What do? + """ + + def __init__(self, resource, client): + self._resource = resource + self._client = client + self._client_shape_to_resource_shape_converter = ClientShapeToResourceShapeConverter() + self._resource_shape_to_client_shape_converter = ResourceShapeToClientShapeConverter() + + def batch_write_item(self, **kwargs): + # The input here is from the DBESDK TestVectors, which is in the shape of a client request. + # Convert the client request to a resource request to be passed to the table. + resource_input = self._client_shape_to_resource_shape_converter.batch_write_item_request(kwargs) + resource_output = self._resource.batch_write_item(**resource_input) + client_output = self._resource_shape_to_client_shape_converter.batch_write_item_response(resource_output) + return client_output + + def batch_get_item(self, **kwargs): + resource_input = self._client_shape_to_resource_shape_converter.batch_get_item_request(kwargs) + resource_output = self._resource.batch_get_item(**resource_input) + client_output = self._resource_shape_to_client_shape_converter.batch_get_item_response(resource_output) + return client_output + + def scan(self, **kwargs): + # Resources don't have scan, but EncryptedResources can provide EncryptedTables that do support scan. + # This path tests that the EncryptedTables provided by EncryptedResources can used for scan. + table_name = kwargs["TableName"] + # Note: Any ConditionExpression strings are not converted to boto3 Condition objects + # and are passed as-is to the resource. + # They absolutely could be converted, but that is tested in the boto3 Table tests. + # Not doing this conversion here expands test coverage to both cases. + table_input = self._client_shape_to_resource_shape_converter.scan_request(kwargs) + encrypted_table = self._resource.Table(table_name) + table_output = encrypted_table.scan(**table_input) + table_shape_converter = ResourceShapeToClientShapeConverter(table_name=table_name) + client_output = table_shape_converter.scan_response(table_output) + return client_output + + def put_item(self, **kwargs): + # Resources don't have put_item, but EncryptedResources can provide EncryptedTables that do support put_item. + # This path tests that the EncryptedTables provided by EncryptedResources can used for put_item. + table_name = kwargs["TableName"] + table_input = self._client_shape_to_resource_shape_converter.put_item_request(kwargs) + encrypted_table = self._resource.Table(table_name) + table_output = encrypted_table.put_item(**table_input) + table_shape_converter = ResourceShapeToClientShapeConverter(table_name=table_name) + client_output = table_shape_converter.put_item_response(table_output) + return client_output + + def get_item(self, **kwargs): + # Resources don't have get_item, but EncryptedResources can provide EncryptedTables that do support get_item. + # This path tests that the EncryptedTables provided by EncryptedResources can used for get_item. + table_name = kwargs["TableName"] + table_input = self._client_shape_to_resource_shape_converter.get_item_request(kwargs) + encrypted_table = self._resource.Table(table_name) + table_output = encrypted_table.get_item(**table_input) + table_shape_converter = ResourceShapeToClientShapeConverter(table_name=table_name) + client_output = table_shape_converter.get_item_response(table_output) + return client_output + + def query(self, **kwargs): + # Resources don't have query, but EncryptedResources can provide EncryptedTables that do support query. + # This path tests that the EncryptedTables provided by EncryptedResources can used for query. + table_name = kwargs["TableName"] + # Note: Any ConditionExpression strings are not converted to boto3 Condition objects + # and are passed as-is to the resource. + # They absolutely could be converted, but that is tested in the boto3 Table tests. + # Not doing this conversion here expands test coverage to both cases. + table_input = self._client_shape_to_resource_shape_converter.query_request(kwargs) + encrypted_table = self._resource.Table(table_name) + table_output = encrypted_table.query(**table_input) + table_shape_converter = ResourceShapeToClientShapeConverter(table_name=table_name) + client_output = table_shape_converter.query_response(table_output) + return client_output + + def transact_get_items(self, **kwargs): + raise NotImplementedError("transact_get_items not supported on resources") + + def transact_write_items(self, **kwargs): + raise NotImplementedError("transact_get_items not supported on resources") + + def delete_table(self, **kwargs): + # Resources don't have delete_table. Plus, DBESDK doesn't intercept DeleteTable calls. + # TestVectors only use this to ensure a new, clean table is created for each test. + # Defer to the underlying boto3 client to delete the table. + return self._client.delete_table(**kwargs) + + def create_table(self, **kwargs): + # Resources don't have create_table. Plus, DBESDK doesn't intercept CreateTable calls. + # TestVectors only use this to ensure a new, clean table is created for each test. + # Defer to the underlying boto3 client to create a table. + return self._client.create_table(**kwargs) + + +class default__: + @staticmethod + def CreateVanillaDDBClient(): + try: + return aws_cryptography_internal_dynamodb.internaldafny.extern.Com_Amazonaws_Dynamodb.default__.DynamoDBClient(WaitingLocalDynamoClient()) + except Exception as e: + return Wrappers.Result_Failure(_smithy_error_to_dafny_error(e)) + + @staticmethod + def CreateInterceptedDDBClient(dafny_encryption_config): + try: + native_encryption_config = aws_cryptography_dbencryptionsdk_dynamodb_DynamoDbTablesEncryptionConfig(dafny_encryption_config) + boto3_client = WaitingLocalDynamoClient() + table_config_names = list(native_encryption_config.table_encryption_configs.keys()) + if len(table_config_names) > 1: + raise ValueError("TODO more than 1 table; need EncryptedTablesManager") + # For TestVectors, use local DynamoDB endpoint + resource = boto3.resource('dynamodb', endpoint_url="http://localhost:8000") + encrypted_resource = EncryptedResource(resource = resource, encryption_config = native_encryption_config) + wrapped_encrypted_resource = DynamoDBClientWrapperForDynamoDBResource(resource = encrypted_resource, client = boto3_client) + return aws_cryptography_internal_dynamodb.internaldafny.extern.Com_Amazonaws_Dynamodb.default__.DynamoDBClient(wrapped_encrypted_resource) + except Exception as e: + return Wrappers.Result_Failure(_smithy_error_to_dafny_error(e)) + +aws_dbesdk_dynamodb_test_vectors.internaldafny.generated.CreateInterceptedDDBClient.default__ = default__ diff --git a/TestVectors/runtimes/python/src/aws_dbesdk_dynamodb_test_vectors/internaldafny/extern/CreateInterceptedDDBTable.py b/TestVectors/runtimes/python/src/aws_dbesdk_dynamodb_test_vectors/internaldafny/extern/CreateInterceptedDDBTable.py new file mode 100644 index 000000000..852b5b5d2 --- /dev/null +++ b/TestVectors/runtimes/python/src/aws_dbesdk_dynamodb_test_vectors/internaldafny/extern/CreateInterceptedDDBTable.py @@ -0,0 +1,298 @@ +# Copyright Amazon.com Inc. or its affiliates. All Rights Reserved. +# SPDX-License-Identifier: Apache-2.0 +import boto3 +import aws_dbesdk_dynamodb_test_vectors.internaldafny.generated.CreateInterceptedDDBClient +import aws_cryptography_internal_dynamodb.internaldafny.extern +from aws_dbesdk_dynamodb.smithygenerated.aws_cryptography_dbencryptionsdk_dynamodb.dafny_to_smithy import aws_cryptography_dbencryptionsdk_dynamodb_DynamoDbTablesEncryptionConfig +from aws_dbesdk_dynamodb.encrypted.table import ( + EncryptedTable, +) +from aws_dbesdk_dynamodb.internal.resource_to_client import ResourceShapeToClientShapeConverter +from aws_dbesdk_dynamodb.internal.client_to_resource import ClientShapeToResourceShapeConverter +from smithy_dafny_standard_library.internaldafny.generated import Wrappers +from aws_dbesdk_dynamodb.smithygenerated.aws_cryptography_dbencryptionsdk_dynamodb.errors import _smithy_error_to_dafny_error +from aws_dbesdk_dynamodb_test_vectors.waiting_boto3_ddb_client import WaitingLocalDynamoClient + +from boto3.dynamodb.conditions import Key, Attr +from decimal import Decimal + +import json +import os +from typing import Any, Dict + +def load_test_data() -> Dict[str, Any]: + """Load the ExpressionAttributeValues from data.json file.""" + # Get the directory of the current file + current_dir = os.getcwd() + # Navigate to the data.json file + data_file = os.path.join(current_dir, 'data.json') + + with open(data_file, 'r') as f: + return json.load(f)["Values"] + +expression_attribute_values_from_json = load_test_data() + +def get_test_value(name) -> Any: + """ + Get a test value from the Values section of data.json. + + Args: + name: The name of the value to retrieve (e.g. ":zero", ":one", etc.) + + Returns: + The value from the Values section + + Raises: + KeyError: If the requested value name is not found + """ + if name not in expression_attribute_values_from_json: + raise KeyError(f"Value '{name}' not found in test data") + value = expression_attribute_values_from_json[name] + if isinstance(value, dict): + if "N" in value: + return Decimal(value["N"]) + elif "SS" in value: + return set(value["SS"]) + elif "L" in value: + return list(value["L"]) + else: + raise KeyError(f"Unknown ExpressionAttributeValue type: {value}") + return value + + +# When querying, DBESDK DDB TestVectors will pass the Table the query as a string. +# The Table could accept this string as-is and process it correctly. +# However, EncryptedTables have extra logic to process boto3 Conditions. +# This extra logic should be tested as much as possible. +# This map converts some known query strings to equivalent Conditions. +# TestVectors will pass the query string (map key) to the Table; +# the Table's internal logic will look up the query string in this map: +# - Entry found: Query with replaced Condition +# - Not found: Query with original string. Table accepts strings. +# This map contains all query strings in the TestVectors' data.json as of commit +# 4f18689f79243c9a5ab0f3a23108671defddeac4 +# If any query strings are added to TestVectors, they COULD be added here; +# if they are not added, the Table will accept the string as-is. +known_filter_expression_string_to_condition_map = { + # "Basic" queries + "RecNum = :zero": Attr("RecNum").eq(get_test_value(":zero")), + "RecNum <= :zero": Attr("RecNum").lte(get_test_value(":zero")), + "RecNum > :zero": Attr("RecNum").gt(get_test_value(":zero")), + "RecNum >= :zero": Attr("RecNum").gte(get_test_value(":zero")), + "RecNum <> :zero": Attr("RecNum").ne(get_test_value(":zero")), + "RecNum = :one": Attr("RecNum").eq(get_test_value(":one")), + "Nine between :zeroD and :three": Attr("Nine").between(get_test_value(":zeroD"), get_test_value(":three")), + "Nine between :nineD and :nine": Attr("Nine").between(get_test_value(":nineD"), get_test_value(":nine")), + "Nine between :nine and :three": Attr("Nine").between(get_test_value(":nine"), get_test_value(":three")), + "Nine between :nine and :nine": Attr("Nine").between(get_test_value(":nine"), get_test_value(":nine")), + "NumberTest = :NumberTest": Attr("NumberTest").eq(get_test_value(":NumberTest")), + "RecNum in (:zero, :one)": Attr("RecNum").is_in([get_test_value(":zero"), get_test_value(":one")]), + "Two = :two": Attr("Two").eq(get_test_value(":two")), + "Two = :two or Three = :three or Four = :four OR Five = :five": Attr("Two").eq(get_test_value(":two")) | Attr("Three").eq(get_test_value(":three")) | Attr("Four").eq(get_test_value(":four")) | Attr("Five").eq(get_test_value(":five")), + "Two = :two and Three = :three and Four = :four and Five = :five": Attr("Two").eq(get_test_value(":two")) & Attr("Three").eq(get_test_value(":three")) & Attr("Four").eq(get_test_value(":four")) & Attr("Five").eq(get_test_value(":five")), + "Two in (:two, :three, :four, :five)": Attr("Two").is_in([get_test_value(":two"), get_test_value(":three"), get_test_value(":four"), get_test_value(":five")]), + "Five in (:two, :three, :four, :five)": Attr("Five").is_in([get_test_value(":two"), get_test_value(":three"), get_test_value(":four"), get_test_value(":five")]), + "Five in (:strset)": Attr("Five").is_in([get_test_value(":strset")]), + "Five in (:strlist)": Attr("Five").is_in([get_test_value(":strlist")]), + "contains(One, :oneA)": Attr("One").contains(get_test_value(":oneA")), + "contains(One, :oneB)": Attr("One").contains(get_test_value(":oneB")), + # Hard-coding returning the input string for these cases. + # These conditions test undocumented behavior in DynamoDB that can't be expressed with boto3 Conditions. + # The undocumented behavior is that `contains`' first parameter can be a value, + # and does not need to be an attribute name. + # DynamoDB documentation names `contains`' first argument as `path`, + # and only ever documents accepting an attribute name for `path`. + # However, testing with an AWS SDK reveals that `path` can be a value; + # i.e. a hardcoded string or an attribute value, + # so this expression is valid. + # But I can't find a way to express this via boto3 Conditions, + # where Contains requires an attribute name. + # For these strings, do not attempt to convert to boto3 conditions, + # and just return the input string. + # The input string is still passed to the table and tested. + "contains(:oneA, One)": "contains(:oneA, One)", + "contains(:oneB, One)": "contains(:oneB, One)", + "contains(:strset, One)": "contains(:strset, One)", + + # "Complex" queries + "Comp1 := :cmp1a": Attr("Comp1").eq(get_test_value(":cmp1a")), + "begins_with(Comp1, :cmp1c)": Attr("Comp1").begins_with(get_test_value(":cmp1c")), + "cmp1c < Comp1": Attr("cmp1c").lt(get_test_value(":cmp1c")), + "cmp1c = Comp1": Attr("cmp1c").eq(get_test_value(":cmp1c")), + "begins_with(Comp1, :cmp1d)": Attr("Comp1").begins_with(get_test_value(":cmp1d")), + "contains(Comp1, :cmp1c)": Attr("Comp1").contains(get_test_value(":cmp1c")), + "contains(Comp1, :cmp1d)": Attr("Comp1").contains(get_test_value(":cmp1d")), + "Comp1 = :cmp1b": Attr("Comp1").eq(get_test_value(":cmp1b")), + + # Another query that can't be translated to boto3 Conditions, + # since attribute values aren't attribute names. + # Pass the original string through. + ":cmp1c <= Comp1": ":cmp1c <= Comp1", +} + +# KeyConditionExpression strings expect Keys, not Attrs. +known_key_condition_expression_string_to_condition_map = { + "RecNum = :zero": Key("RecNum").eq(get_test_value(":zero")), + "RecNum = :one": Key("RecNum").eq(get_test_value(":one")), +} + +class DynamoDBClientWrapperForDynamoDBTable: + """ + DBESDK TestVectors-internal wrapper class. + Converts boto3 DynamoDB client-formatted inputs to Table-formatted inputs, + and converts Table-formatted outputs to boto3 DynamoDB client-formatted outputs. + + TestVectors Dafny code only knows how to interact with DynamoDB clients. + However, Python DDBEC and DBESDK have this EncryptedTable class. + This class interfaces between Dafny TestVectors' DynamoDB client-calling code + and Python DBESDK's EncryptedTable class. + + This class defers to a boto3 client for create_table and delete_table, + which are not supported on boto3 DynamoDB Table tables. + """ + + def __init__(self, table, client): + self._table = table + self._client = client + self._client_shape_to_resource_shape_converter = ClientShapeToResourceShapeConverter() + self._resource_shape_to_client_shape_converter = ResourceShapeToClientShapeConverter(table_name = self._table._table.table_name) + + def put_item(self, **kwargs): + table_input = self._client_shape_to_resource_shape_converter.put_item_request(kwargs) + table_output = self._table.put_item(**table_input) + client_output = self._resource_shape_to_client_shape_converter.put_item_response(table_output) + return client_output + + def get_item(self, **kwargs): + table_input = self._client_shape_to_resource_shape_converter.get_item_request(kwargs) + table_output = self._table.get_item(**table_input) + client_output = self._resource_shape_to_client_shape_converter.get_item_response(table_output) + return client_output + + def batch_write_item(self, **kwargs): + # The table doesn't support batch_write_item, but supports batch_writer. + # Translate the batch_write_item request to batch_writer requests. + table_input = self._client_shape_to_resource_shape_converter.batch_write_item_request(kwargs) + with self._table.batch_writer() as batch_writer: + for _, items in table_input["RequestItems"].items(): + for item in items: + if "PutRequest" in item: + batch_writer.put_item(item["PutRequest"]["Item"]) + elif "DeleteRequest" in item: + batch_writer.delete_item(item["DeleteRequest"]["Key"]) + else: + raise ValueError(f"Unknown request type: {item}") + # An empty dict is valid output: + # https://boto3.amazonaws.com/v1/documentation/api/latest/reference/services/dynamodb/client/batch_write_item.html + client_output = {} + return client_output + + def batch_get_item(self, **kwargs): + raise NotImplementedError("batch_get_item not supported on table interface; remove tests calling this") + + def scan(self, **kwargs): + table_input = self._client_shape_to_resource_shape_converter.scan_request(kwargs) + # To exhaustively test Tables, + # convert the string-based KeyConditionExpression and FilterExpression + # into the boto3.conditions.Key and boto3.conditions.Attr resource-formatted queries. + if "KeyConditionExpression" in table_input: + if table_input["KeyConditionExpression"] in known_key_condition_expression_string_to_condition_map: + table_input["KeyConditionExpression"] = known_key_condition_expression_string_to_condition_map[table_input["KeyConditionExpression"]] + # boto3 Conditions cannot accept any externally-provided ExpressionAttributeValues + # if the KeyConditionExpression is not a string. + # If the KeyConditionExpression was replaced, remove the now-useless ExpressionAttributeValues. + if "ExpressionAttributeValues" in table_input and not isinstance(table_input["KeyConditionExpression"], str): + del table_input["ExpressionAttributeValues"] + else: + # Pass the original string through. + # The table will accept the string as-is. + pass + if "FilterExpression" in table_input: + if table_input["FilterExpression"] in known_filter_expression_string_to_condition_map: + # Turn the query into the resource-formatted query + table_input["FilterExpression"] = known_filter_expression_string_to_condition_map[table_input["FilterExpression"]] + # boto3 Conditions cannot accept any externally-provided ExpressionAttributeValues + # if the FilterExpression is not a string. + # If the FilterExpression was replaced, remove the now-useless ExpressionAttributeValues. + if "ExpressionAttributeValues" in table_input and not isinstance(table_input["FilterExpression"], str): + del table_input["ExpressionAttributeValues"] + else: + # Pass the original string through. + # The table will accept the string as-is. + pass + table_output = self._table.scan(**table_input) + client_output = self._resource_shape_to_client_shape_converter.scan_response(table_output) + return client_output + + def transact_get_items(self, **kwargs): + raise NotImplementedError("transact_get_items not supported on table interface; remove tests calling this") + + def transact_write_items(self, **kwargs): + raise NotImplementedError("transact_write_items not supported on table interface; remove tests calling this") + + def query(self, **kwargs): + table_input = self._client_shape_to_resource_shape_converter.query_request(kwargs) + # To exhaustively test Tables, + # convert the string-based KeyConditionExpression and FilterExpression + # into the boto3.conditions.Key and boto3.conditions.Attr resource-formatted queries. + if "KeyConditionExpression" in table_input: + if table_input["KeyConditionExpression"] in known_key_condition_expression_string_to_condition_map: + table_input["KeyConditionExpression"] = known_key_condition_expression_string_to_condition_map[table_input["KeyConditionExpression"]] + # boto3 Conditions cannot accept any externally-provided ExpressionAttributeValues + # if the KeyConditionExpression is not a string. + # If the KeyConditionExpression was replaced, remove the now-useless ExpressionAttributeValues. + if "ExpressionAttributeValues" in table_input and not isinstance(table_input["KeyConditionExpression"], str): + del table_input["ExpressionAttributeValues"] + else: + # Pass the original string through. + # The table will accept the string as-is. + pass + if "FilterExpression" in table_input: + if table_input["FilterExpression"] in known_filter_expression_string_to_condition_map: + # Turn the query into the resource-formatted query + table_input["FilterExpression"] = known_filter_expression_string_to_condition_map[table_input["FilterExpression"]] + # boto3 Conditions cannot accept any externally-provided ExpressionAttributeValues + # if the FilterExpression is not a string. + # If the FilterExpression was replaced, remove the now-useless ExpressionAttributeValues. + if "ExpressionAttributeValues" in table_input and not isinstance(table_input["FilterExpression"], str): + del table_input["ExpressionAttributeValues"] + else: + # Pass the original string through. + # The table will accept the string as-is. + pass + table_output = self._table.query(**table_input) + client_output = self._resource_shape_to_client_shape_converter.query_response(table_output) + return client_output + + def delete_table(self, **kwargs): + return self._client.delete_table(**kwargs) + + def create_table(self, **kwargs): + return self._client.create_table(**kwargs) + +class default__: + @staticmethod + def CreateVanillaDDBClient(): + try: + return aws_cryptography_internal_dynamodb.internaldafny.extern.Com_Amazonaws_Dynamodb.default__.DynamoDBClient(WaitingLocalDynamoClient()) + except Exception as e: + return Wrappers.Result_Failure(_smithy_error_to_dafny_error(e)) + + @staticmethod + def CreateInterceptedDDBClient(dafny_encryption_config): + try: + native_encryption_config = aws_cryptography_dbencryptionsdk_dynamodb_DynamoDbTablesEncryptionConfig(dafny_encryption_config) + boto3_client = WaitingLocalDynamoClient() + table_config_names = list(native_encryption_config.table_encryption_configs.keys()) + if len(table_config_names) > 1: + # If needed, >1 table could be supported by setting up an EncryptedTablesManager + raise ValueError(">1 table not supported") + # For TestVectors, use local DynamoDB endpoint + table = boto3.resource('dynamodb', endpoint_url="http://localhost:8000").Table(table_config_names[0]) + encrypted_table = EncryptedTable(table = table, encryption_config = native_encryption_config) + wrapped_encrypted_table = DynamoDBClientWrapperForDynamoDBTable(table = encrypted_table, client = boto3_client) + return aws_cryptography_internal_dynamodb.internaldafny.extern.Com_Amazonaws_Dynamodb.default__.DynamoDBClient(wrapped_encrypted_table) + except Exception as e: + return Wrappers.Result_Failure(_smithy_error_to_dafny_error(e)) + +aws_dbesdk_dynamodb_test_vectors.internaldafny.generated.CreateInterceptedDDBClient.default__ = default__ diff --git a/TestVectors/runtimes/python/src/aws_dbesdk_dynamodb_test_vectors/internaldafny/extern/CreateWrappedDictItemEncryptor.py b/TestVectors/runtimes/python/src/aws_dbesdk_dynamodb_test_vectors/internaldafny/extern/CreateWrappedDictItemEncryptor.py new file mode 100644 index 000000000..2c1232d5c --- /dev/null +++ b/TestVectors/runtimes/python/src/aws_dbesdk_dynamodb_test_vectors/internaldafny/extern/CreateWrappedDictItemEncryptor.py @@ -0,0 +1,78 @@ +# Copyright Amazon.com Inc. or its affiliates. All Rights Reserved. +# SPDX-License-Identifier: Apache-2.0 +import aws_dbesdk_dynamodb_test_vectors.internaldafny.generated.CreateWrappedItemEncryptor +from aws_dbesdk_dynamodb.smithygenerated.aws_cryptography_dbencryptionsdk_dynamodb_itemencryptor.dafny_to_smithy import aws_cryptography_dbencryptionsdk_dynamodb_itemencryptor_DynamoDbItemEncryptorConfig +from aws_dbesdk_dynamodb.encrypted.item import ItemEncryptor +from smithy_dafny_standard_library.internaldafny.generated import Wrappers +from aws_dbesdk_dynamodb.smithygenerated.aws_cryptography_dbencryptionsdk_dynamodb_itemencryptor.errors import _smithy_error_to_dafny_error +from aws_dbesdk_dynamodb_test_vectors.smithygenerated.aws_cryptography_dbencryptionsdk_dynamodb_itemencryptor.shim import DynamoDbItemEncryptorShim +from aws_dbesdk_dynamodb.smithygenerated.aws_cryptography_dbencryptionsdk_dynamodb_itemencryptor.models import ( + DecryptItemOutput, + EncryptItemOutput, +) +from aws_dbesdk_dynamodb.transform import ( + dict_to_ddb, + ddb_to_dict, +) + +class DynamoDBFormatToDictFormatWrapper: + """ + Crypto Tools Internal wrapper class to test Python dictionary-formatted ItemEncryptor paths. + + Dafny TestVectors provide DynamoDB-formatted items to ItemEncryptors' encrypt_item and decrypt_item methods. + However, the legacy Python DDBEC ItemEncryptor also supports Python dictionary-formatted items. + This class transforms Dafny TestVectors' DynamoDB-formatted items + to Python DBESDK's ItemEncryptor's Python dictionary-formatted encryption methods. + This improves the test coverage of the ItemEncryptor. + """ + def __init__(self, item_encryptor): + self._item_encryptor = item_encryptor + + def encrypt_item(self, encrypt_item_input): + # Convert DynamoDB-formatted item to dict-formatted item + dynamodb_plaintext_item = encrypt_item_input.plaintext_item + python_plaintext_item = ddb_to_dict(dynamodb_plaintext_item) + # Call native ItemEncryptor wrapper dict-formatted encryption method + encrypt_item_output_dict = self._item_encryptor.encrypt_python_item(python_plaintext_item) + python_encrypted_item = encrypt_item_output_dict.encrypted_item + # Convert dict-formatted encrypted item to DynamoDB-formatted encrypted item + dynamodb_encrypted_item = dict_to_ddb(python_encrypted_item) + encrypt_item_output_dynamodb = EncryptItemOutput( + encrypted_item = dynamodb_encrypted_item, + parsed_header = encrypt_item_output_dict.parsed_header + ) + # Surface DynamoDB-formatted encrypted item to Dafny TestVectors + return encrypt_item_output_dynamodb + + def decrypt_item(self, decrypt_item_input): + # Convert DynamoDB-formatted item to dict-formatted item + dynamodb_encrypted_item = decrypt_item_input.encrypted_item + python_encrypted_item = ddb_to_dict(dynamodb_encrypted_item) + # Call native ItemEncryptor wrapper dict-formatted encryption method + decrypt_item_output_dict = self._item_encryptor.decrypt_python_item(python_encrypted_item) + python_plaintext_item = decrypt_item_output_dict.plaintext_item + # Convert dict-formatted plaintext item to DynamoDB-formatted plaintext item + dynamodb_plaintext_item = dict_to_ddb(python_plaintext_item) + decrypt_item_output_dynamodb = DecryptItemOutput( + plaintext_item = dynamodb_plaintext_item, + parsed_header = decrypt_item_output_dict.parsed_header + ) + # Surface DynamoDB-formatted plaintext item to Dafny TestVectors + return decrypt_item_output_dynamodb + +class default__: + @staticmethod + def CreateWrappedItemEncryptor(dafny_encryption_config): + try: + native_encryption_config = aws_cryptography_dbencryptionsdk_dynamodb_itemencryptor_DynamoDbItemEncryptorConfig(dafny_encryption_config) + item_encryptor = ItemEncryptor( + item_encryptor_config = native_encryption_config, + ) + wrapped_item_encryptor = DynamoDBFormatToDictFormatWrapper( + item_encryptor + ) + return Wrappers.Result_Success(DynamoDbItemEncryptorShim(wrapped_item_encryptor)) + except Exception as e: + return Wrappers.Result_Failure(_smithy_error_to_dafny_error(e)) + +aws_dbesdk_dynamodb_test_vectors.internaldafny.generated.CreateWrappedItemEncryptor.default__ = default__ diff --git a/TestVectors/runtimes/python/src/aws_dbesdk_dynamodb_test_vectors/internaldafny/extern/CreateWrappedDynamoDbItemEncryptor.py b/TestVectors/runtimes/python/src/aws_dbesdk_dynamodb_test_vectors/internaldafny/extern/CreateWrappedDynamoDbItemEncryptor.py new file mode 100644 index 000000000..a690214e2 --- /dev/null +++ b/TestVectors/runtimes/python/src/aws_dbesdk_dynamodb_test_vectors/internaldafny/extern/CreateWrappedDynamoDbItemEncryptor.py @@ -0,0 +1,23 @@ +# Copyright Amazon.com Inc. or its affiliates. All Rights Reserved. +# SPDX-License-Identifier: Apache-2.0 +import aws_dbesdk_dynamodb_test_vectors.internaldafny.generated.CreateWrappedItemEncryptor +from aws_dbesdk_dynamodb.smithygenerated.aws_cryptography_dbencryptionsdk_dynamodb_itemencryptor.dafny_to_smithy import aws_cryptography_dbencryptionsdk_dynamodb_itemencryptor_DynamoDbItemEncryptorConfig +from aws_dbesdk_dynamodb.encrypted.item import ItemEncryptor +from smithy_dafny_standard_library.internaldafny.generated import Wrappers +from aws_dbesdk_dynamodb.smithygenerated.aws_cryptography_dbencryptionsdk_dynamodb_itemencryptor.errors import _smithy_error_to_dafny_error +from aws_dbesdk_dynamodb_test_vectors.smithygenerated.aws_cryptography_dbencryptionsdk_dynamodb_itemencryptor.shim import DynamoDbItemEncryptorShim + + +class default__: + @staticmethod + def CreateWrappedItemEncryptor(dafny_encryption_config): + try: + native_encryption_config = aws_cryptography_dbencryptionsdk_dynamodb_itemencryptor_DynamoDbItemEncryptorConfig(dafny_encryption_config) + item_encryptor = ItemEncryptor( + item_encryptor_config = native_encryption_config, + ) + return Wrappers.Result_Success(DynamoDbItemEncryptorShim(item_encryptor)) + except Exception as e: + return Wrappers.Result_Failure(_smithy_error_to_dafny_error(e)) + +aws_dbesdk_dynamodb_test_vectors.internaldafny.generated.CreateWrappedItemEncryptor.default__ = default__ diff --git a/TestVectors/runtimes/python/src/aws_dbesdk_dynamodb_test_vectors/internaldafny/extern/__init__.py b/TestVectors/runtimes/python/src/aws_dbesdk_dynamodb_test_vectors/internaldafny/extern/__init__.py new file mode 100644 index 000000000..148dc5eea --- /dev/null +++ b/TestVectors/runtimes/python/src/aws_dbesdk_dynamodb_test_vectors/internaldafny/extern/__init__.py @@ -0,0 +1,4 @@ +# from . import ( +# CreateInterceptedDDBClient, +# CreateWrappedItemEncryptor, +# ) \ No newline at end of file diff --git a/TestVectors/runtimes/python/src/aws_dbesdk_dynamodb_test_vectors/smithygenerated/aws_cryptography_dbencryptionsdk_dynamodb/__init__.py b/TestVectors/runtimes/python/src/aws_dbesdk_dynamodb_test_vectors/smithygenerated/aws_cryptography_dbencryptionsdk_dynamodb/__init__.py new file mode 100644 index 000000000..09be6133b --- /dev/null +++ b/TestVectors/runtimes/python/src/aws_dbesdk_dynamodb_test_vectors/smithygenerated/aws_cryptography_dbencryptionsdk_dynamodb/__init__.py @@ -0,0 +1,3 @@ +# Copyright Amazon.com Inc. or its affiliates. All Rights Reserved. +# SPDX-License-Identifier: Apache-2.0 +# Do not modify this file. This file is machine generated, and any changes to it will be overwritten. diff --git a/TestVectors/runtimes/python/src/aws_dbesdk_dynamodb_test_vectors/smithygenerated/aws_cryptography_dbencryptionsdk_dynamodb/shim.py b/TestVectors/runtimes/python/src/aws_dbesdk_dynamodb_test_vectors/smithygenerated/aws_cryptography_dbencryptionsdk_dynamodb/shim.py new file mode 100644 index 000000000..ee4651238 --- /dev/null +++ b/TestVectors/runtimes/python/src/aws_dbesdk_dynamodb_test_vectors/smithygenerated/aws_cryptography_dbencryptionsdk_dynamodb/shim.py @@ -0,0 +1,72 @@ +# Copyright Amazon.com Inc. or its affiliates. All Rights Reserved. +# SPDX-License-Identifier: Apache-2.0 +# Do not modify this file. This file is machine generated, and any changes to it will be overwritten. + +from aws_dbesdk_dynamodb.internaldafny.generated.AwsCryptographyDbEncryptionSdkDynamoDbTypes import ( + CreateDynamoDbEncryptionBranchKeyIdSupplierInput_CreateDynamoDbEncryptionBranchKeyIdSupplierInput as DafnyCreateDynamoDbEncryptionBranchKeyIdSupplierInput, + CreateDynamoDbEncryptionBranchKeyIdSupplierOutput_CreateDynamoDbEncryptionBranchKeyIdSupplierOutput as DafnyCreateDynamoDbEncryptionBranchKeyIdSupplierOutput, + GetEncryptedDataKeyDescriptionInput_GetEncryptedDataKeyDescriptionInput as DafnyGetEncryptedDataKeyDescriptionInput, + GetEncryptedDataKeyDescriptionOutput_GetEncryptedDataKeyDescriptionOutput as DafnyGetEncryptedDataKeyDescriptionOutput, +) +import aws_dbesdk_dynamodb.internaldafny.generated.module_ +import aws_dbesdk_dynamodb.smithygenerated.aws_cryptography_dbencryptionsdk_dynamodb.dafny_to_smithy +import aws_dbesdk_dynamodb.smithygenerated.aws_cryptography_dbencryptionsdk_dynamodb.errors +from aws_dbesdk_dynamodb.smithygenerated.aws_cryptography_dbencryptionsdk_dynamodb.errors import ( + CollectionOfErrors, + OpaqueError, + ServiceError, + _smithy_error_to_dafny_error, +) +import aws_dbesdk_dynamodb.smithygenerated.aws_cryptography_dbencryptionsdk_dynamodb.models +import aws_dbesdk_dynamodb.smithygenerated.aws_cryptography_dbencryptionsdk_dynamodb.smithy_to_dafny +from typing import Any + + +import smithy_dafny_standard_library.internaldafny.generated.Wrappers as Wrappers +import aws_dbesdk_dynamodb.internaldafny.generated.AwsCryptographyDbEncryptionSdkDynamoDbTypes +import aws_dbesdk_dynamodb.smithygenerated.aws_cryptography_dbencryptionsdk_dynamodb.client as client_impl + + +class DynamoDbEncryptionShim( + aws_dbesdk_dynamodb.internaldafny.generated.AwsCryptographyDbEncryptionSdkDynamoDbTypes.IDynamoDbEncryptionClient +): + def __init__(self, _impl: client_impl): + self._impl = _impl + + def CreateDynamoDbEncryptionBranchKeyIdSupplier(self, input): + try: + smithy_client_request: ( + aws_dbesdk_dynamodb.smithygenerated.aws_cryptography_dbencryptionsdk_dynamodb.models.CreateDynamoDbEncryptionBranchKeyIdSupplierInput + ) = aws_dbesdk_dynamodb.smithygenerated.aws_cryptography_dbencryptionsdk_dynamodb.dafny_to_smithy.aws_cryptography_dbencryptionsdk_dynamodb_CreateDynamoDbEncryptionBranchKeyIdSupplierInput( + input + ) + smithy_client_response = ( + self._impl.create_dynamo_db_encryption_branch_key_id_supplier( + smithy_client_request + ) + ) + return Wrappers.Result_Success( + aws_dbesdk_dynamodb.smithygenerated.aws_cryptography_dbencryptionsdk_dynamodb.smithy_to_dafny.aws_cryptography_dbencryptionsdk_dynamodb_CreateDynamoDbEncryptionBranchKeyIdSupplierOutput( + smithy_client_response + ) + ) + except Exception as e: + return Wrappers.Result_Failure(_smithy_error_to_dafny_error(e)) + + def GetEncryptedDataKeyDescription(self, input): + try: + smithy_client_request: ( + aws_dbesdk_dynamodb.smithygenerated.aws_cryptography_dbencryptionsdk_dynamodb.models.GetEncryptedDataKeyDescriptionInput + ) = aws_dbesdk_dynamodb.smithygenerated.aws_cryptography_dbencryptionsdk_dynamodb.dafny_to_smithy.aws_cryptography_dbencryptionsdk_dynamodb_GetEncryptedDataKeyDescriptionInput( + input + ) + smithy_client_response = self._impl.get_encrypted_data_key_description( + smithy_client_request + ) + return Wrappers.Result_Success( + aws_dbesdk_dynamodb.smithygenerated.aws_cryptography_dbencryptionsdk_dynamodb.smithy_to_dafny.aws_cryptography_dbencryptionsdk_dynamodb_GetEncryptedDataKeyDescriptionOutput( + smithy_client_response + ) + ) + except Exception as e: + return Wrappers.Result_Failure(_smithy_error_to_dafny_error(e)) diff --git a/TestVectors/runtimes/python/src/aws_dbesdk_dynamodb_test_vectors/smithygenerated/aws_cryptography_dbencryptionsdk_dynamodb_itemencryptor/__init__.py b/TestVectors/runtimes/python/src/aws_dbesdk_dynamodb_test_vectors/smithygenerated/aws_cryptography_dbencryptionsdk_dynamodb_itemencryptor/__init__.py new file mode 100644 index 000000000..09be6133b --- /dev/null +++ b/TestVectors/runtimes/python/src/aws_dbesdk_dynamodb_test_vectors/smithygenerated/aws_cryptography_dbencryptionsdk_dynamodb_itemencryptor/__init__.py @@ -0,0 +1,3 @@ +# Copyright Amazon.com Inc. or its affiliates. All Rights Reserved. +# SPDX-License-Identifier: Apache-2.0 +# Do not modify this file. This file is machine generated, and any changes to it will be overwritten. diff --git a/TestVectors/runtimes/python/src/aws_dbesdk_dynamodb_test_vectors/smithygenerated/aws_cryptography_dbencryptionsdk_dynamodb_itemencryptor/shim.py b/TestVectors/runtimes/python/src/aws_dbesdk_dynamodb_test_vectors/smithygenerated/aws_cryptography_dbencryptionsdk_dynamodb_itemencryptor/shim.py new file mode 100644 index 000000000..683f2f802 --- /dev/null +++ b/TestVectors/runtimes/python/src/aws_dbesdk_dynamodb_test_vectors/smithygenerated/aws_cryptography_dbencryptionsdk_dynamodb_itemencryptor/shim.py @@ -0,0 +1,66 @@ +# Copyright Amazon.com Inc. or its affiliates. All Rights Reserved. +# SPDX-License-Identifier: Apache-2.0 +# Do not modify this file. This file is machine generated, and any changes to it will be overwritten. + +from aws_dbesdk_dynamodb.internaldafny.generated.AwsCryptographyDbEncryptionSdkDynamoDbItemEncryptorTypes import ( + DecryptItemInput_DecryptItemInput as DafnyDecryptItemInput, + DecryptItemOutput_DecryptItemOutput as DafnyDecryptItemOutput, + EncryptItemInput_EncryptItemInput as DafnyEncryptItemInput, + EncryptItemOutput_EncryptItemOutput as DafnyEncryptItemOutput, +) +import aws_dbesdk_dynamodb.internaldafny.generated.module_ +import aws_dbesdk_dynamodb.smithygenerated.aws_cryptography_dbencryptionsdk_dynamodb_itemencryptor.dafny_to_smithy +import aws_dbesdk_dynamodb.smithygenerated.aws_cryptography_dbencryptionsdk_dynamodb_itemencryptor.errors +from aws_dbesdk_dynamodb.smithygenerated.aws_cryptography_dbencryptionsdk_dynamodb_itemencryptor.errors import ( + CollectionOfErrors, + OpaqueError, + ServiceError, + _smithy_error_to_dafny_error, +) +import aws_dbesdk_dynamodb.smithygenerated.aws_cryptography_dbencryptionsdk_dynamodb_itemencryptor.models +import aws_dbesdk_dynamodb.smithygenerated.aws_cryptography_dbencryptionsdk_dynamodb_itemencryptor.smithy_to_dafny +from typing import Any + + +import smithy_dafny_standard_library.internaldafny.generated.Wrappers as Wrappers +import aws_dbesdk_dynamodb.internaldafny.generated.AwsCryptographyDbEncryptionSdkDynamoDbItemEncryptorTypes +import aws_dbesdk_dynamodb.smithygenerated.aws_cryptography_dbencryptionsdk_dynamodb_itemencryptor.client as client_impl + + +class DynamoDbItemEncryptorShim( + aws_dbesdk_dynamodb.internaldafny.generated.AwsCryptographyDbEncryptionSdkDynamoDbItemEncryptorTypes.IDynamoDbItemEncryptorClient +): + def __init__(self, _impl: client_impl): + self._impl = _impl + + def EncryptItem(self, input): + try: + smithy_client_request: ( + aws_dbesdk_dynamodb.smithygenerated.aws_cryptography_dbencryptionsdk_dynamodb_itemencryptor.models.EncryptItemInput + ) = aws_dbesdk_dynamodb.smithygenerated.aws_cryptography_dbencryptionsdk_dynamodb_itemencryptor.dafny_to_smithy.aws_cryptography_dbencryptionsdk_dynamodb_itemencryptor_EncryptItemInput( + input + ) + smithy_client_response = self._impl.encrypt_item(smithy_client_request) + return Wrappers.Result_Success( + aws_dbesdk_dynamodb.smithygenerated.aws_cryptography_dbencryptionsdk_dynamodb_itemencryptor.smithy_to_dafny.aws_cryptography_dbencryptionsdk_dynamodb_itemencryptor_EncryptItemOutput( + smithy_client_response + ) + ) + except Exception as e: + return Wrappers.Result_Failure(_smithy_error_to_dafny_error(e)) + + def DecryptItem(self, input): + try: + smithy_client_request: ( + aws_dbesdk_dynamodb.smithygenerated.aws_cryptography_dbencryptionsdk_dynamodb_itemencryptor.models.DecryptItemInput + ) = aws_dbesdk_dynamodb.smithygenerated.aws_cryptography_dbencryptionsdk_dynamodb_itemencryptor.dafny_to_smithy.aws_cryptography_dbencryptionsdk_dynamodb_itemencryptor_DecryptItemInput( + input + ) + smithy_client_response = self._impl.decrypt_item(smithy_client_request) + return Wrappers.Result_Success( + aws_dbesdk_dynamodb.smithygenerated.aws_cryptography_dbencryptionsdk_dynamodb_itemencryptor.smithy_to_dafny.aws_cryptography_dbencryptionsdk_dynamodb_itemencryptor_DecryptItemOutput( + smithy_client_response + ) + ) + except Exception as e: + return Wrappers.Result_Failure(_smithy_error_to_dafny_error(e)) diff --git a/TestVectors/runtimes/python/src/aws_dbesdk_dynamodb_test_vectors/waiting_boto3_ddb_client.py b/TestVectors/runtimes/python/src/aws_dbesdk_dynamodb_test_vectors/waiting_boto3_ddb_client.py new file mode 100644 index 000000000..ea99a8796 --- /dev/null +++ b/TestVectors/runtimes/python/src/aws_dbesdk_dynamodb_test_vectors/waiting_boto3_ddb_client.py @@ -0,0 +1,44 @@ +import boto3 + +class WaitingLocalDynamoClient: + """ + boto3 DynamoDB client wrapper that wraps `create_table` and `delete_table` methods + and connects to localhost:8000. + If overridden methods are called on this client, they will block returning until + the table is created/deleted. + This is the expected behavior of SDK clients in our Dafny code. + All other methods besides these are unchanged and will call the boto3 client directly. + """ + def __init__(self): + self._client = boto3.client("dynamodb", endpoint_url="http://localhost:8000") + + def __getattr__(self, name): + if hasattr(self._client, name): + original_method = getattr(self._client, name) + + if name == 'create_table': + return self._create_table_with_wait(original_method) + elif name == 'delete_table': + return self._delete_table_with_wait(original_method) + + return original_method + + raise AttributeError(f"'{type(self).__name__}' object has no attribute '{name}'") + + def _create_table_with_wait(self, create_method): + def wrapper(*args, **kwargs): + response = create_method(*args, **kwargs) # Call the original create_table method + table_name = kwargs.get('TableName') + waiter = self._client.get_waiter('table_exists') + waiter.wait(TableName=table_name) + return response + return wrapper + + def _delete_table_with_wait(self, delete_method): + def wrapper(*args, **kwargs): + response = delete_method(*args, **kwargs) # Call the original delete_table method + table_name = kwargs.get('TableName') + waiter = self._client.get_waiter('table_not_exists') + waiter.wait(TableName=table_name) + return response + return wrapper diff --git a/TestVectors/runtimes/python/test/__init__.py b/TestVectors/runtimes/python/test/__init__.py new file mode 100644 index 000000000..fa977e22f --- /dev/null +++ b/TestVectors/runtimes/python/test/__init__.py @@ -0,0 +1,3 @@ +# Copyright Amazon.com Inc. or its affiliates. All Rights Reserved. +# SPDX-License-Identifier: Apache-2.0 +"""Stub to allow relative imports of examples from tests.""" diff --git a/TestVectors/runtimes/python/test/client/__init__.py b/TestVectors/runtimes/python/test/client/__init__.py new file mode 100644 index 000000000..fa977e22f --- /dev/null +++ b/TestVectors/runtimes/python/test/client/__init__.py @@ -0,0 +1,3 @@ +# Copyright Amazon.com Inc. or its affiliates. All Rights Reserved. +# SPDX-License-Identifier: Apache-2.0 +"""Stub to allow relative imports of examples from tests.""" diff --git a/TestVectors/runtimes/python/test/client/test_dafny_wrapper.py b/TestVectors/runtimes/python/test/client/test_dafny_wrapper.py new file mode 100644 index 000000000..73a6607ca --- /dev/null +++ b/TestVectors/runtimes/python/test/client/test_dafny_wrapper.py @@ -0,0 +1,26 @@ +# Copyright Amazon.com Inc. or its affiliates. All Rights Reserved. +# SPDX-License-Identifier: Apache-2.0 +""" +Wrapper file for executing Dafny tests from pytest. +This allows us to import modules required by Dafny-generated tests +before executing Dafny-generated tests. +pytest will find and execute the `test_dafny` method below, +which will execute the `internaldafny_test_executor.py` file in the `dafny` directory. +""" + +import sys + +# Different from standard test_dafny_wrapper due to weird test structure. +test_dir = '/'.join(__file__.split("/")[:-2]) + +sys.path.append(test_dir + "/internaldafny/extern") +sys.path.append(test_dir + "/internaldafny/generated") + +# Import extern to use an EncryptedClient as the wrapped DBESDK client. +import aws_dbesdk_dynamodb_test_vectors.internaldafny.extern.CreateInterceptedDDBClient +# Import extern to use the ItemEncryptor with DDB JSON-formatted items. +# (EncryptedClients use DDB JSON-formatted items by default.) +import aws_dbesdk_dynamodb_test_vectors.internaldafny.extern.CreateWrappedDynamoDbItemEncryptor + +def test_dafny(): + from ..internaldafny.generated import __main__ \ No newline at end of file diff --git a/TestVectors/runtimes/python/test/internaldafny/__init__.py b/TestVectors/runtimes/python/test/internaldafny/__init__.py new file mode 100644 index 000000000..f94fd12a2 --- /dev/null +++ b/TestVectors/runtimes/python/test/internaldafny/__init__.py @@ -0,0 +1,2 @@ +# Copyright Amazon.com Inc. or its affiliates. All Rights Reserved. +# SPDX-License-Identifier: Apache-2.0 diff --git a/TestVectors/runtimes/python/test/resource/__init__.py b/TestVectors/runtimes/python/test/resource/__init__.py new file mode 100644 index 000000000..fa977e22f --- /dev/null +++ b/TestVectors/runtimes/python/test/resource/__init__.py @@ -0,0 +1,3 @@ +# Copyright Amazon.com Inc. or its affiliates. All Rights Reserved. +# SPDX-License-Identifier: Apache-2.0 +"""Stub to allow relative imports of examples from tests.""" diff --git a/TestVectors/runtimes/python/test/resource/test_dafny_wrapper.py b/TestVectors/runtimes/python/test/resource/test_dafny_wrapper.py new file mode 100644 index 000000000..f19423f2d --- /dev/null +++ b/TestVectors/runtimes/python/test/resource/test_dafny_wrapper.py @@ -0,0 +1,64 @@ +# Copyright Amazon.com Inc. or its affiliates. All Rights Reserved. +# SPDX-License-Identifier: Apache-2.0 +""" +Wrapper file for executing Dafny tests from pytest. +This allows us to import modules required by Dafny-generated tests +before executing Dafny-generated tests. +pytest will find and execute the `test_dafny` method below, +which will execute the `internaldafny_test_executor.py` file in the `dafny` directory. +""" + +import sys +from functools import partial + +# Different from standard test_dafny_wrapper due to weird test structure. +test_dir = '/'.join(__file__.split("/")[:-2]) + +sys.path.append(test_dir + "/internaldafny/extern") +sys.path.append(test_dir + "/internaldafny/generated") + +# Import extern to use an EncryptedResource as the wrapped DBESDK client. +import aws_dbesdk_dynamodb_test_vectors.internaldafny.extern.CreateInterceptedDDBResource +# Import extern to use the ItemEncryptor with Python dictionary-formatted items. +# (EncryptedResources use Python dictionary-formatted items.) +import aws_dbesdk_dynamodb_test_vectors.internaldafny.extern.CreateWrappedDictItemEncryptor + +# Remove invalid tests. +# Supported operations on Resources that are also supported by DBESDK are: +# - batch_get_item +# - batch_write_item +# https://boto3.amazonaws.com/v1/documentation/api/latest/reference/services/dynamodb/service-resource/index.html +# +# However, Resources can provide Tables. +# Unsupported operations on Resources are that are supported by provided Tables are: +# - put_item +# - get_item +# - query +# - scan +# https://boto3.amazonaws.com/v1/documentation/api/latest/reference/services/dynamodb/table/index.html#DynamoDB.Table +# These operations will be tested on EncryptedResources via provided EncryptedTables. +# +# Unsupported operations on both Resources and Tables are that are supported by DBESDK are: +# - transact_get_items +# - transact_write_items +# Remove any tests that call unsupported operations by overriding the test method to do nothing.. +# If more tests that call these operations are added, remove them below. +# If the list below becomes unmaintainable, or if other languages add clients with unsupported operations, +# refactor the Dafny code to conditionally call tests based on whether the client supports the operation under test. + +def EmptyTest(*args, **kwargs): + print(f"Skipping test {kwargs['test_name']} because {kwargs['reason']}") + +aws_dbesdk_dynamodb_test_vectors.internaldafny.generated.DdbEncryptionTestVectors.TestVectorConfig.BasicIoTestTransactWriteItems = partial( + EmptyTest, + test_name="BasicIoTestTransactWriteItems", + reason="neither DDB resources nor DDB tables support transact_write_items" +) +aws_dbesdk_dynamodb_test_vectors.internaldafny.generated.DdbEncryptionTestVectors.TestVectorConfig.BasicIoTestTransactGetItems = partial( + EmptyTest, + test_name="BasicIoTestTransactGetItems", + reason="neither DDB resources nor DDB tables support transact_get_items" +) + +def test_dafny(): + from ..internaldafny.generated import __main__ \ No newline at end of file diff --git a/TestVectors/runtimes/python/test/table/__init__.py b/TestVectors/runtimes/python/test/table/__init__.py new file mode 100644 index 000000000..fa977e22f --- /dev/null +++ b/TestVectors/runtimes/python/test/table/__init__.py @@ -0,0 +1,3 @@ +# Copyright Amazon.com Inc. or its affiliates. All Rights Reserved. +# SPDX-License-Identifier: Apache-2.0 +"""Stub to allow relative imports of examples from tests.""" diff --git a/TestVectors/runtimes/python/test/table/test_dafny_wrapper.py b/TestVectors/runtimes/python/test/table/test_dafny_wrapper.py new file mode 100644 index 000000000..390937d9a --- /dev/null +++ b/TestVectors/runtimes/python/test/table/test_dafny_wrapper.py @@ -0,0 +1,63 @@ +# Copyright Amazon.com Inc. or its affiliates. All Rights Reserved. +# SPDX-License-Identifier: Apache-2.0 +""" +Wrapper file for executing Dafny tests from pytest. +This allows us to import modules required by Dafny-generated tests +before executing Dafny-generated tests. +pytest will find and execute the `test_dafny` method below, +which will execute the `internaldafny_test_executor.py` file in the `dafny` directory. +""" + +import sys +from functools import partial +# Different from standard test_dafny_wrapper due to weird test structure. +test_dir = '/'.join(__file__.split("/")[:-2]) + +sys.path.append(test_dir + "/internaldafny/extern") +sys.path.append(test_dir + "/internaldafny/generated") + +# These imports set up the tests to use: +# - An EncryptedTable with a shim to make it appear to Dafny-generated code as a DBESDK client +# - A DictItemEncryptor with a shim to take in DDB-formatted JSON and return DDB-formatted JSON +import aws_dbesdk_dynamodb_test_vectors.internaldafny.extern.CreateInterceptedDDBTable +import aws_dbesdk_dynamodb_test_vectors.internaldafny.extern.CreateWrappedDictItemEncryptor + +# Remove invalid tests. +# Supported operations on Tables that are also supported by DBESDK are: +# - put_item +# - get_item +# - query +# - scan +# - update_item +# https://boto3.amazonaws.com/v1/documentation/api/latest/reference/services/dynamodb/table/index.html#DynamoDB.Table +# +# Unsupported operations on Tables are that are supported by DBESDK are: +# - transact_get_items +# - transact_write_items +# - batch_get_item +# Remove any tests that call unsupported operations by overriding the test method to do nothing. +# If more tests that call these operations are added, remove them below. +# If the list below becomes unmaintainable, or if other languages add clients with unsupported operations, +# refactor the Dafny code to conditionally call tests based on whether the client supports the operation under test. + +def EmptyTest(*args, **kwargs): + print(f"Skipping test {kwargs['test_name']} because {kwargs['reason']}") + +aws_dbesdk_dynamodb_test_vectors.internaldafny.generated.DdbEncryptionTestVectors.TestVectorConfig.BasicIoTestTransactGetItems = partial( + EmptyTest, + test_name="BasicIoTestTransactGetItems", + reason="DDB tables do not support transact_get_items" +) +aws_dbesdk_dynamodb_test_vectors.internaldafny.generated.DdbEncryptionTestVectors.TestVectorConfig.BasicIoTestTransactWriteItems = partial( + EmptyTest, + test_name="BasicIoTestTransactWriteItems", + reason="DDB tables do not support transact_write_items" +) +aws_dbesdk_dynamodb_test_vectors.internaldafny.generated.DdbEncryptionTestVectors.TestVectorConfig.BasicIoTestBatchGetItems = partial( + EmptyTest, + test_name="BasicIoTestBatchGetItems", + reason="DDB tables do not support batch_get_item" +) + +def test_dafny(): + from ..internaldafny.generated import __main__ \ No newline at end of file diff --git a/TestVectors/runtimes/python/tox.ini b/TestVectors/runtimes/python/tox.ini new file mode 100644 index 000000000..4f64adc59 --- /dev/null +++ b/TestVectors/runtimes/python/tox.ini @@ -0,0 +1,26 @@ +[tox] +isolated_build = True +envlist = + py{311,312,313} + +[testenv] +skip_install = true +allowlist_externals = poetry +passenv = AWS_* +commands_pre = + poetry lock + poetry install +commands = + poetry run pytest test/ -s -v + +[testenv:client] +commands = + poetry run pytest test/client -s -v + +[testenv:resource] +commands = + poetry run pytest test/resource -s -v + +[testenv:table] +commands = + poetry run pytest test/table -s -v diff --git a/submodules/MaterialProviders b/submodules/MaterialProviders index f033b9157..1fa8a4a67 160000 --- a/submodules/MaterialProviders +++ b/submodules/MaterialProviders @@ -1 +1 @@ -Subproject commit f033b915701eaa53d97019af61b96a51fed43483 +Subproject commit 1fa8a4a67485f32b01adb3b250e783a59c58bf1e diff --git a/submodules/smithy-dafny b/submodules/smithy-dafny index 2f83e28ad..feacf4a60 160000 --- a/submodules/smithy-dafny +++ b/submodules/smithy-dafny @@ -1 +1 @@ -Subproject commit 2f83e28ad9532b24c93d2229476c9a268355d338 +Subproject commit feacf4a60854532f16da2ef7c8a5d310116dc951