diff --git a/.github/workflows/pull.yml b/.github/workflows/pull.yml index 566c62f731..d40b936536 100644 --- a/.github/workflows/pull.yml +++ b/.github/workflows/pull.yml @@ -10,10 +10,10 @@ jobs: steps: - name: Populate Dafny versions list id: populate-dafny-versions-list - run: echo "dafny-versions-list=['4.7.0','4.9.1']" >> $GITHUB_OUTPUT + run: echo "dafny-versions-list=['4.7.0','4.9.1','nightly-2025-06-29-f5ca47b']" >> $GITHUB_OUTPUT - name: Populate Dafny versions list for "only new versions" languages (Python) id: populate-only-new-dafny-versions-list - run: echo "only-new-dafny-versions-list=['4.9.1']" >> $GITHUB_OUTPUT + run: echo "only-new-dafny-versions-list=['4.9.1','nightly-2025-06-29-f5ca47b']" >> $GITHUB_OUTPUT outputs: dafny-version-list: ${{ steps.populate-dafny-versions-list.outputs.dafny-versions-list }} only-new-dafny-version-list: ${{ steps.populate-only-new-dafny-versions-list.outputs.only-new-dafny-versions-list }} diff --git a/.gitmodules b/.gitmodules index c1d0ecb64c..0cd030a60a 100644 --- a/.gitmodules +++ b/.gitmodules @@ -4,4 +4,3 @@ [submodule "TestModels/dafny-dependencies/dafny"] path = TestModels/dafny-dependencies/dafny url = https://github.com/dafny-lang/dafny.git - branch = actions-and-streaming-stdlibs \ No newline at end of file diff --git a/SmithyDafnyMakefile.mk b/SmithyDafnyMakefile.mk index 76d3d60526..5e8a437a74 100644 --- a/SmithyDafnyMakefile.mk +++ b/SmithyDafnyMakefile.mk @@ -30,6 +30,7 @@ # of Model directories # AWS_SDK_CMD -- the `--aws-sdk` command to generate AWS SDK style interfaces. # STD_LIBRARY -- path from this file to the StandardLibrary Dafny project. +# STREAMING_SUPPORT -- path from this file to the StreamingSupport Dafny project. # SMITHY_DEPS -- path from this file to smithy dependencies, such as custom traits. # GRADLEW -- the gradlew to use when building Java runtimes. # defaults to $(SMITHY_DAFNY_ROOT)/codegen/gradlew @@ -98,7 +99,7 @@ verify: --log-format csv \ --verification-time-limit $(VERIFY_TIMEOUT) \ --resource-limit $(MAX_RESOURCE_COUNT) \ - $(if $(USE_DAFNY_STANDARD_LIBRARIES), --library:$(PROJECT_ROOT)/$(STD_LIBRARY)/bin/DafnyStandardLibraries-smithy-dafny-subset.doo, ) \ + $(if $(USES_STREAMING), --library:$(PROJECT_ROOT)/$(STREAMING_SUPPORT)/bin/DafnyStandardLibraries-smithy-dafny-subset.doo, ) \ $(DAFNY_OPTIONS) \ % @@ -115,7 +116,7 @@ verify_single: --log-format text \ --verification-time-limit $(VERIFY_TIMEOUT) \ --resource-limit $(MAX_RESOURCE_COUNT) \ - $(if $(USE_DAFNY_STANDARD_LIBRARIES), --library:$(PROJECT_ROOT)/$(STD_LIBRARY)/bin/DafnyStandardLibraries-smithy-dafny-subset.doo, ) \ + $(if $(USES_STREAMING), --library:$(PROJECT_ROOT)/$(STREAMING_SUPPORT)/bin/DafnyStandardLibraries-smithy-dafny-subset.doo, ) \ $(DAFNY_OPTIONS) \ $(if ${PROC},-proc:*$(PROC)*,) \ $(FILE) @@ -215,7 +216,7 @@ transpile_implementation: $(DAFNY_OTHER_FILES) \ $(TRANSPILE_MODULE_NAME) \ $(if $(strip $(STD_LIBRARY)) , --library:$(PROJECT_ROOT)/$(STD_LIBRARY)/src/Index.dfy, ) \ - $(if $(USE_DAFNY_STANDARD_LIBRARIES) , --library:$(PROJECT_ROOT)/$(STD_LIBRARY)/bin/DafnyStandardLibraries-smithy-dafny-subset.doo, ) \ + $(if $(USES_STREAMING) , --library:$(PROJECT_ROOT)/$(STREAMING_SUPPORT)/bin/DafnyStandardLibraries-smithy-dafny-subset.doo, ) \ $(TRANSLATION_RECORD) \ $(TRANSPILE_DEPENDENCIES) @@ -255,7 +256,7 @@ transpile_test: $(DAFNY_OPTIONS) \ $(DAFNY_OTHER_FILES) \ $(if $(strip $(STD_LIBRARY)) , --library:$(PROJECT_ROOT)/$(STD_LIBRARY)/src/Index.dfy, ) \ - $(if $(USE_DAFNY_STANDARD_LIBRARIES) , --library:$(PROJECT_ROOT)/$(STD_LIBRARY)/bin/DafnyStandardLibraries-smithy-dafny-subset.doo, ) \ + $(if $(USES_STREAMING) , --library:$(PROJECT_ROOT)/$(STREAMING_SUPPORT)/bin/DafnyStandardLibraries-smithy-dafny-subset.doo, ) \ $(TRANSLATION_RECORD) \ $(SOURCE_TRANSLATION_RECORD) \ $(TRANSPILE_MODULE_NAME) \ @@ -264,7 +265,8 @@ transpile_test: # If we are not the StandardLibrary, transpile the StandardLibrary. # Transpile all other dependencies transpile_dependencies: - $(if $(strip $(STD_LIBRARY)), $(MAKE) -C $(PROJECT_ROOT)/$(STD_LIBRARY) transpile_implementation_$(LANG) USE_DAFNY_STANDARD_LIBRARIES=$(USE_DAFNY_STANDARD_LIBRARIES), ) + $(if $(strip $(STD_LIBRARY)), $(MAKE) -C $(PROJECT_ROOT)/$(STD_LIBRARY) transpile_implementation_$(LANG) USES_STREAMING=$(USES_STREAMING), ) + $(if $(USES_STREAMING), $(MAKE) -C $(PROJECT_ROOT)/$(STREAMING_SUPPORT) transpile_implementation_$(LANG) USES_STREAMING=$(USES_STREAMING), ) $(patsubst %, $(MAKE) -C $(PROJECT_ROOT)/% transpile_implementation_$(LANG);, $(PROJECT_DEPENDENCIES)) transpile_dependencies_test: @@ -337,7 +339,8 @@ _polymorph_wrapped: $(POLYMORPH_OPTIONS)"; _polymorph_dependencies: - $(if $(strip $(STD_LIBRARY)), $(MAKE) -C $(PROJECT_ROOT)/$(STD_LIBRARY) polymorph_$(POLYMORPH_LANGUAGE_TARGET) LIBRARY_ROOT=$(PROJECT_ROOT)/$(STD_LIBRARY) USE_DAFNY_STANDARD_LIBRARIES=$(USE_DAFNY_STANDARD_LIBRARIES), ) + $(if $(strip $(STD_LIBRARY)), $(MAKE) -C $(PROJECT_ROOT)/$(STD_LIBRARY) polymorph_$(POLYMORPH_LANGUAGE_TARGET) LIBRARY_ROOT=$(PROJECT_ROOT)/$(STD_LIBRARY) USES_STREAMING=$(USES_STREAMING), ) + $(if $(USES_STREAMING), $(MAKE) -C $(PROJECT_ROOT)/$(STREAMING_SUPPORT) polymorph_$(POLYMORPH_LANGUAGE_TARGET) LIBRARY_ROOT=$(PROJECT_ROOT)/$(STREAMING_SUPPORT) USES_STREAMING=$(USES_STREAMING), ) @$(foreach dependency, \ $(PROJECT_DEPENDENCIES), \ $(MAKE) -C $(PROJECT_ROOT)/$(dependency) polymorph_$(POLYMORPH_LANGUAGE_TARGET); \ @@ -359,7 +362,8 @@ polymorph_code_gen: _polymorph_code_gen: OUTPUT_DAFNY=\ --output-dafny $(if $(DIR_STRUCTURE_V2), $(LIBRARY_ROOT)/dafny/$(SERVICE)/Model, $(LIBRARY_ROOT)/Model) _polymorph_code_gen: INPUT_DAFNY=\ - --include-dafny $(PROJECT_ROOT)/$(STD_LIBRARY)/src/Index.dfy + --include-dafny $(PROJECT_ROOT)/$(STD_LIBRARY)/src/Index.dfy \ + $(if $(USES_STREAMING), --include-dafny $(PROJECT_ROOT)/$(STREAMING_SUPPORT)/src/Index.dfy, ) _polymorph_code_gen: OUTPUT_DOTNET=\ $(if $(DIR_STRUCTURE_V2), --output-dotnet $(LIBRARY_ROOT)/runtimes/net/Generated/$(SERVICE)/, --output-dotnet $(LIBRARY_ROOT)/runtimes/net/Generated/) _polymorph_code_gen: OUTPUT_JAVA=--output-java $(LIBRARY_ROOT)/runtimes/java/src/main/smithy-generated @@ -386,7 +390,8 @@ polymorph_dafny: _polymorph_dafny: OUTPUT_DAFNY=\ --output-dafny $(if $(DIR_STRUCTURE_V2), $(LIBRARY_ROOT)/dafny/$(SERVICE)/Model, $(LIBRARY_ROOT)/Model) _polymorph_dafny: INPUT_DAFNY=\ - --include-dafny $(PROJECT_ROOT)/$(STD_LIBRARY)/src/Index.dfy + --include-dafny $(PROJECT_ROOT)/$(STD_LIBRARY)/src/Index.dfy \ + $(if $(USES_STREAMING), --include-dafny $(PROJECT_ROOT)/$(STREAMING_SUPPORT)/src/Index.dfy, ) _polymorph_dafny: _polymorph dafny: polymorph_dafny verify @@ -409,7 +414,8 @@ polymorph_dotnet: _polymorph_dotnet: OUTPUT_DOTNET=\ $(if $(DIR_STRUCTURE_V2), --output-dotnet $(LIBRARY_ROOT)/runtimes/net/Generated/$(SERVICE)/, --output-dotnet $(LIBRARY_ROOT)/runtimes/net/Generated/) _polymorph_dotnet: INPUT_DAFNY=\ - --include-dafny $(PROJECT_ROOT)/$(STD_LIBRARY)/src/Index.dfy + --include-dafny $(PROJECT_ROOT)/$(STD_LIBRARY)/src/Index.dfy \ + $(if $(USES_STREAMING), --include-dafny $(PROJECT_ROOT)/$(STREAMING_SUPPORT)/src/Index.dfy, ) _polymorph_dotnet: _polymorph # Generates java code for all namespaces in this project @@ -427,7 +433,8 @@ polymorph_java: _polymorph_java: OUTPUT_JAVA=--output-java $(LIBRARY_ROOT)/runtimes/java/src/main/smithy-generated _polymorph_java: OUTPUT_JAVA_TEST=--output-java-test $(LIBRARY_ROOT)/runtimes/java/src/test/smithy-generated _polymorph_java: INPUT_DAFNY=\ - --include-dafny $(PROJECT_ROOT)/$(STD_LIBRARY)/src/Index.dfy + --include-dafny $(PROJECT_ROOT)/$(STD_LIBRARY)/src/Index.dfy \ + $(if $(USES_STREAMING), --include-dafny $(PROJECT_ROOT)/$(STREAMING_SUPPORT)/src/Index.dfy, ) _polymorph_java: _polymorph # Generates python code for all namespaces in this project @@ -593,6 +600,7 @@ transpile_dependencies_java: transpile_dependencies # Locally deploy all other dependencies mvn_local_deploy_dependencies: $(if $(strip $(STD_LIBRARY)), $(MAKE) -C $(PROJECT_ROOT)/$(STD_LIBRARY) mvn_local_deploy, ) + $(if $(USES_STREAMING), $(MAKE) -C $(PROJECT_ROOT)/$(STREAMING_SUPPORT) mvn_local_deploy, ) $(patsubst %, $(MAKE) -C $(PROJECT_ROOT)/% mvn_local_deploy;, $(PROJECT_DEPENDENCIES)) # The Java MUST all exist already through the transpile step. diff --git a/TestModels/SharedMakefile.mk b/TestModels/SharedMakefile.mk index 2a1269bcb5..71523876e3 100644 --- a/TestModels/SharedMakefile.mk +++ b/TestModels/SharedMakefile.mk @@ -7,6 +7,7 @@ PROJECT_ROOT := $(abspath $(dir $(abspath $(lastword $(MAKEFILE_LIST))))) SMITHY_DAFNY_ROOT := $(PROJECT_ROOT)/.. STD_LIBRARY := dafny-dependencies/StandardLibrary +STREAMING_SUPPORT := dafny-dependencies/StreamingSupport include $(SMITHY_DAFNY_ROOT)/SmithyDafnyMakefile.mk @@ -16,7 +17,8 @@ include $(SMITHY_DAFNY_ROOT)/SmithyDafnyMakefile.mk _polymorph_code_gen: OUTPUT_DAFNY=\ --output-dafny $(if $(DIR_STRUCTURE_V2), $(LIBRARY_ROOT)/dafny/$(SERVICE)/Model, $(LIBRARY_ROOT)/Model) _polymorph_code_gen: INPUT_DAFNY=\ - --include-dafny $(PROJECT_ROOT)/$(STD_LIBRARY)/src/Index.dfy + --include-dafny $(PROJECT_ROOT)/$(STD_LIBRARY)/src/Index.dfy \ + $(if $(USES_STREAMING), --include-dafny $(PROJECT_ROOT)/$(STREAMING_SUPPORT)/src/Index.dfy, ) _polymorph_code_gen: OUTPUT_DOTNET=\ $(if $(DIR_STRUCTURE_V2), --output-dotnet $(LIBRARY_ROOT)/runtimes/net/Generated/$(SERVICE)/, --output-dotnet $(LIBRARY_ROOT)/runtimes/net/Generated/) _polymorph_code_gen: OUTPUT_JAVA=--output-java $(LIBRARY_ROOT)/runtimes/java/src/main/smithy-generated @@ -45,7 +47,8 @@ polymorph_dafny: _polymorph_dafny: OUTPUT_DAFNY=\ --output-dafny $(if $(DIR_STRUCTURE_V2), $(LIBRARY_ROOT)/dafny/$(SERVICE)/Model, $(LIBRARY_ROOT)/Model) _polymorph_dafny: INPUT_DAFNY=\ - --include-dafny $(PROJECT_ROOT)/$(STD_LIBRARY)/src/Index.dfy + --include-dafny $(PROJECT_ROOT)/$(STD_LIBRARY)/src/Index.dfy \ + $(if $(USES_STREAMING), --include-dafny $(PROJECT_ROOT)/$(STREAMING_SUPPORT)/src/Index.dfy, ) _polymorph_dafny: _polymorph _polymorph_dafny: OUTPUT_DAFNY_WRAPPED=\ --output-dafny $(if $(DIR_STRUCTURE_V2), $(LIBRARY_ROOT)/dafny/$(SERVICE)/Model, $(LIBRARY_ROOT)/Model) \ @@ -55,7 +58,8 @@ _polymorph_dafny: _polymorph_wrapped _polymorph_java: OUTPUT_JAVA=--output-java $(LIBRARY_ROOT)/runtimes/java/src/main/smithy-generated _polymorph_java: OUTPUT_JAVA_TEST=--output-java-test $(LIBRARY_ROOT)/runtimes/java/src/test/smithy-generated _polymorph_java: INPUT_DAFNY=\ - --include-dafny $(PROJECT_ROOT)/$(STD_LIBRARY)/src/Index.dfy + --include-dafny $(PROJECT_ROOT)/$(STD_LIBRARY)/src/Index.dfy \ + $(if $(USES_STREAMING), --include-dafny $(PROJECT_ROOT)/$(STREAMING_SUPPORT)/src/Index.dfy, ) _polymorph_java: _polymorph _polymorph_java: OUTPUT_JAVA_WRAPPED=--output-java $(LIBRARY_ROOT)/runtimes/java/src/main/smithy-generated _polymorph_java: _polymorph_wrapped @@ -63,7 +67,8 @@ _polymorph_java: _polymorph_wrapped _polymorph_dotnet: OUTPUT_DOTNET=\ $(if $(DIR_STRUCTURE_V2), --output-dotnet $(LIBRARY_ROOT)/runtimes/net/Generated/$(SERVICE)/, --output-dotnet $(LIBRARY_ROOT)/runtimes/net/Generated/) _polymorph_dotnet: INPUT_DAFNY=\ - --include-dafny $(PROJECT_ROOT)/$(STD_LIBRARY)/src/Index.dfy + --include-dafny $(PROJECT_ROOT)/$(STD_LIBRARY)/src/Index.dfy \ + $(if $(USES_STREAMING), --include-dafny $(PROJECT_ROOT)/$(STREAMING_SUPPORT)/src/Index.dfy, ) _polymorph_dotnet: _polymorph _polymorph_dotnet: OUTPUT_DOTNET_WRAPPED=\ $(if $(DIR_STRUCTURE_V2), --output-dotnet $(LIBRARY_ROOT)/runtimes/net/Generated/Wrapped/$(SERVICE)/, --output-dotnet $(LIBRARY_ROOT)/runtimes/net/Generated/Wrapped) @@ -77,7 +82,8 @@ _polymorph_python: _polymorph_wrapped _polymorph_rust: OUTPUT_RUST_WRAPPED=--output-rust $(LIBRARY_ROOT)/runtimes/rust _polymorph_rust: INPUT_DAFNY=\ - --include-dafny $(PROJECT_ROOT)/$(STD_LIBRARY)/src/Index.dfy + --include-dafny $(PROJECT_ROOT)/$(STD_LIBRARY)/src/Index.dfy \ + $(if $(USES_STREAMING), --include-dafny $(PROJECT_ROOT)/$(STREAMING_SUPPORT)/src/Index.dfy, ) # For several TestModels we've just manually written the code generation target, # So we just want to ensure we can transpile and pass the tests in CI. # For those, make polymorph_rust should just be a no-op. diff --git a/TestModels/Streaming/Makefile b/TestModels/Streaming/Makefile index 0aca2882f2..b120520f60 100644 --- a/TestModels/Streaming/Makefile +++ b/TestModels/Streaming/Makefile @@ -3,7 +3,7 @@ CORES=2 -USE_DAFNY_STANDARD_LIBRARIES=1 +USES_STREAMING=1 ENABLE_EXTERN_PROCESSING=1 @@ -20,8 +20,9 @@ SERVICE_DEPS_SimpleStreaming := SMITHY_DEPS=dafny-dependencies/Model/traits.smithy -# This project has no dependencies -# DEPENDENT-MODELS:= +PROJECT_DEPENDENCIES := + +PROJECT_INDEX := # Python diff --git a/TestModels/Streaming/Model/SimpleStreamingTypes.dfy b/TestModels/Streaming/Model/SimpleStreamingTypes.dfy index c047e70e0d..aed5b4754d 100644 --- a/TestModels/Streaming/Model/SimpleStreamingTypes.dfy +++ b/TestModels/Streaming/Model/SimpleStreamingTypes.dfy @@ -2,12 +2,13 @@ // SPDX-License-Identifier: Apache-2.0 // Do not modify this file. This file is machine generated, and any changes to it will be overwritten. include "../../dafny-dependencies/StandardLibrary/src/Index.dfy" +include "../../dafny-dependencies/StreamingSupport/src/Index.dfy" module {:extern "simple.streaming.internaldafny.types" } SimpleStreamingTypes { import opened Wrappers import opened StandardLibrary.UInt import opened UTF8 - import opened Std.Streams + import opened StandardLibrary.Streams // Generic helpers for verification of mock/unit tests. datatype DafnyCallEvent = DafnyCallEvent(input: I, output: O) @@ -17,17 +18,17 @@ module {:extern "simple.streaming.internaldafny.types" } SimpleStreamingTypes nameonly number: int32 ) datatype BinaryOfOutput = | BinaryOfOutput ( - nameonly binary: ByteStream + nameonly binary: DataStream ) datatype ChunksInput = | ChunksInput ( - nameonly bytesIn: ByteStream , + nameonly bytesIn: DataStream , nameonly chunkSize: CountingInteger ) datatype ChunksOutput = | ChunksOutput ( - nameonly bytesOut: ByteStream + nameonly bytesOut: DataStream ) datatype CountBitsInput = | CountBitsInput ( - nameonly bits: ByteStream + nameonly bits: DataStream ) datatype CountBitsOutput = | CountBitsOutput ( nameonly sum: int32 @@ -79,12 +80,8 @@ module {:extern "simple.streaming.internaldafny.types" } SimpleStreamingTypes returns (output: Result) requires && ValidState() - // TODO: smithy-dafny isn't yet generating the `input.bits.Valid()` part. - requires - && input.bits.Valid() && History !in input.bits.Repr modifies Modifies - {History} , - History`CountBits , - input.bits.Repr + History`CountBits // Dafny will skip type parameters when generating a default decreases clause. decreases Modifies - {History} ensures @@ -106,8 +103,6 @@ module {:extern "simple.streaming.internaldafny.types" } SimpleStreamingTypes && ValidState() ensures BinaryOfEnsuresPublicly(input, output) ensures History.BinaryOf == old(History.BinaryOf) + [DafnyCallEvent(input, output)] - // TODO: smithy-dafny isn't yet generating this - ensures output.Success? ==> output.value.binary.Valid() && fresh(output.value.binary.Repr) predicate ChunksEnsuresPublicly(input: ChunksInput , output: Result) // The public method to be called by library consumers @@ -123,14 +118,12 @@ module {:extern "simple.streaming.internaldafny.types" } SimpleStreamingTypes && ValidState() ensures ChunksEnsuresPublicly(input, output) ensures History.Chunks == old(History.Chunks) + [DafnyCallEvent(input, output)] - // TODO: smithy-dafny isn't yet generating this - ensures output.Success? ==> output.value.bytesOut.Valid() && fresh(output.value.bytesOut.Repr) } datatype SimpleStreamingConfig = | SimpleStreamingConfig ( ) - type StreamingBlob = ByteStream + type StreamingBlob = DataStream datatype Error = // Local Error structures are listed here | OverflowError ( @@ -180,7 +173,7 @@ abstract module AbstractSimpleStreamingService import opened Wrappers import opened StandardLibrary.UInt import opened UTF8 - import opened Std.Streams + import opened StandardLibrary.Streams import opened Types = SimpleStreamingTypes import Operations : AbstractSimpleStreamingOperations function method DefaultSimpleStreamingConfig(): SimpleStreamingConfig @@ -220,12 +213,8 @@ abstract module AbstractSimpleStreamingService returns (output: Result) requires && ValidState() - // TODO: smithy-dafny isn't yet generating the `input.bits.Valid()` parts. - requires - && input.bits.Valid() && History !in input.bits.Repr modifies Modifies - {History} , - History`CountBits , - input.bits.Repr + History`CountBits // Dafny will skip type parameters when generating a default decreases clause. decreases Modifies - {History} ensures @@ -233,10 +222,6 @@ abstract module AbstractSimpleStreamingService ensures CountBitsEnsuresPublicly(input, output) ensures History.CountBits == old(History.CountBits) + [DafnyCallEvent(input, output)] { - // TODO: It's not clear how to actually ensure this, - // since the internal config is not visible to the trait - // so it can't really be a precondition there. - assume {:axiom} Operations.ModifiesInternalConfig(config) !! input.bits.Repr; output := Operations.CountBits(config, input); History.CountBits := History.CountBits + [DafnyCallEvent(input, output)]; } @@ -256,8 +241,6 @@ abstract module AbstractSimpleStreamingService && ValidState() ensures BinaryOfEnsuresPublicly(input, output) ensures History.BinaryOf == old(History.BinaryOf) + [DafnyCallEvent(input, output)] - // TODO: smithy-dafny isn't yet generating this - ensures output.Success? ==> output.value.binary.Valid() && fresh(output.value.binary.Repr) { output := Operations.BinaryOf(config, input); History.BinaryOf := History.BinaryOf + [DafnyCallEvent(input, output)]; @@ -289,7 +272,7 @@ abstract module AbstractSimpleStreamingOperations { import opened Wrappers import opened StandardLibrary.UInt import opened UTF8 - import opened Std.Streams + import opened StandardLibrary.Streams import opened Types = SimpleStreamingTypes type InternalConfig predicate ValidInternalConfig?(config: InternalConfig) @@ -301,12 +284,8 @@ abstract module AbstractSimpleStreamingOperations { method CountBits ( config: InternalConfig , input: CountBitsInput ) returns (output: Result) requires - // TODO: smithy-dafny isn't yet generating the `input.bits.Valid()` part. && ValidInternalConfig?(config) - && input.bits.Valid() - && ModifiesInternalConfig(config) !! input.bits.Repr - // TODO: smithy-dafny isn't yet generating the `input.bits.Repr` part. - modifies ModifiesInternalConfig(config), input.bits.Repr + modifies ModifiesInternalConfig(config) // Dafny will skip type parameters when generating a default decreases clause. decreases ModifiesInternalConfig(config) ensures @@ -328,8 +307,6 @@ abstract module AbstractSimpleStreamingOperations { ensures && ValidInternalConfig?(config) ensures BinaryOfEnsuresPublicly(input, output) - // TODO: smithy-dafny isn't yet generating this - ensures output.Success? ==> output.value.binary.Valid() && fresh(output.value.binary.Repr) predicate ChunksEnsuresPublicly(input: ChunksInput , output: Result) @@ -340,8 +317,6 @@ abstract module AbstractSimpleStreamingOperations { returns (output: Result) requires && ValidInternalConfig?(config) - // TODO: smithy-dafny isn't yet generating this - && input.bytesIn.Valid() modifies ModifiesInternalConfig(config) // Dafny will skip type parameters when generating a default decreases clause. decreases ModifiesInternalConfig(config) diff --git a/TestModels/Streaming/codegen-patches/dafny/dafny-4.9.0.patch b/TestModels/Streaming/codegen-patches/dafny/dafny-4.9.0.patch deleted file mode 100644 index 4736142a23..0000000000 --- a/TestModels/Streaming/codegen-patches/dafny/dafny-4.9.0.patch +++ /dev/null @@ -1,94 +0,0 @@ -diff --git b/TestModels/Streaming/Model/SimpleStreamingTypes.dfy a/TestModels/Streaming/Model/SimpleStreamingTypes.dfy -index af6ad336f..50140cc6c 100644 ---- b/TestModels/Streaming/Model/SimpleStreamingTypes.dfy -+++ a/TestModels/Streaming/Model/SimpleStreamingTypes.dfy -@@ -79,8 +79,12 @@ module SimpleStreamingTypes - returns (output: Result) - requires - && ValidState() -+ // TODO: smithy-dafny isn't yet generating the `input.bits.Valid()` part. -+ requires -+ && input.bits.Valid() && History !in input.bits.Repr - modifies Modifies - {History} , -- History`CountBits -+ History`CountBits , -+ input.bits.Repr - // Dafny will skip type parameters when generating a default decreases clause. - decreases Modifies - {History} - ensures -@@ -102,6 +106,8 @@ module SimpleStreamingTypes - && ValidState() - ensures BinaryOfEnsuresPublicly(input, output) - ensures History.BinaryOf == old(History.BinaryOf) + [DafnyCallEvent(input, output)] -+ // TODO: smithy-dafny isn't yet generating this -+ ensures output.Success? ==> output.value.binary.Valid() && fresh(output.value.binary.Repr) - - predicate ChunksEnsuresPublicly(input: ChunksInput , output: Result) - // The public method to be called by library consumers -@@ -212,8 +218,12 @@ abstract module AbstractSimpleStreamingService - returns (output: Result) - requires - && ValidState() -+ // TODO: smithy-dafny isn't yet generating the `input.bits.Valid()` parts. -+ requires -+ && input.bits.Valid() && History !in input.bits.Repr - modifies Modifies - {History} , -- History`CountBits -+ History`CountBits , -+ input.bits.Repr - // Dafny will skip type parameters when generating a default decreases clause. - decreases Modifies - {History} - ensures -@@ -221,6 +231,10 @@ abstract module AbstractSimpleStreamingService - ensures CountBitsEnsuresPublicly(input, output) - ensures History.CountBits == old(History.CountBits) + [DafnyCallEvent(input, output)] - { -+ // TODO: It's not clear how to actually ensure this, -+ // since the internal config is not visible to the trait -+ // so it can't really be a precondition there. -+ assume {:axiom} Operations.ModifiesInternalConfig(config) !! input.bits.Repr; - output := Operations.CountBits(config, input); - History.CountBits := History.CountBits + [DafnyCallEvent(input, output)]; - } -@@ -240,6 +254,8 @@ abstract module AbstractSimpleStreamingService - && ValidState() - ensures BinaryOfEnsuresPublicly(input, output) - ensures History.BinaryOf == old(History.BinaryOf) + [DafnyCallEvent(input, output)] -+ // TODO: smithy-dafny isn't yet generating this -+ ensures output.Success? ==> output.value.binary.Valid() && fresh(output.value.binary.Repr) - { - output := Operations.BinaryOf(config, input); - History.BinaryOf := History.BinaryOf + [DafnyCallEvent(input, output)]; -@@ -283,8 +299,12 @@ abstract module AbstractSimpleStreamingOperations { - method CountBits ( config: InternalConfig , input: CountBitsInput ) - returns (output: Result) - requires -- && ValidInternalConfig?(config) -- modifies ModifiesInternalConfig(config) -+ // TODO: smithy-dafny isn't yet generating the `input.bits.Valid()` part. -+ && ValidInternalConfig?(config) -+ && input.bits.Valid() -+ && ModifiesInternalConfig(config) !! input.bits.Repr -+ // TODO: smithy-dafny isn't yet generating the `input.bits.Repr` part. -+ modifies ModifiesInternalConfig(config), input.bits.Repr - // Dafny will skip type parameters when generating a default decreases clause. - decreases ModifiesInternalConfig(config) - ensures -@@ -306,6 +326,8 @@ abstract module AbstractSimpleStreamingOperations { - ensures - && ValidInternalConfig?(config) - ensures BinaryOfEnsuresPublicly(input, output) -+ // TODO: smithy-dafny isn't yet generating this -+ ensures output.Success? ==> output.value.binary.Valid() && fresh(output.value.binary.Repr) - - - predicate ChunksEnsuresPublicly(input: ChunksInput , output: Result) -@@ -316,6 +338,8 @@ abstract module AbstractSimpleStreamingOperations { - returns (output: Result) - requires - && ValidInternalConfig?(config) -+ // TODO: smithy-dafny isn't yet generating this -+ && input.bytesIn.Valid() - modifies ModifiesInternalConfig(config) - // Dafny will skip type parameters when generating a default decreases clause. - decreases ModifiesInternalConfig(config) diff --git a/TestModels/Streaming/dfyconfig.toml b/TestModels/Streaming/dfyconfig.toml new file mode 100644 index 0000000000..5ab3faf93a --- /dev/null +++ b/TestModels/Streaming/dfyconfig.toml @@ -0,0 +1,7 @@ +[options] +function-syntax = 3 +unicode-char = false + +library = [ + "/Users/salkeldr/Documents/GitHub/smithy-dafny/TestModels/dafny-dependencies/StandardLibrary/bin/DafnyStandardLibraries-smithy-dafny-subset.doo" +] \ No newline at end of file diff --git a/TestModels/Streaming/runtimes/java/build.gradle.kts b/TestModels/Streaming/runtimes/java/build.gradle.kts new file mode 100644 index 0000000000..c49dcab232 --- /dev/null +++ b/TestModels/Streaming/runtimes/java/build.gradle.kts @@ -0,0 +1,81 @@ +// Copyright Amazon.com Inc. or its affiliates. All Rights Reserved. +// SPDX-License-Identifier: Apache-2.0 +import java.io.File +import java.io.FileInputStream +import java.util.Properties +import java.net.URI +import javax.annotation.Nullable + +tasks.wrapper { + gradleVersion = "7.6" +} + +plugins { + `java-library` + `maven-publish` +} + +var props = Properties().apply { + load(FileInputStream(File(rootProject.rootDir, "../../project.properties"))) +} +var dafnyVersion = props.getProperty("dafnyVersion") + +group = "simple.streaming" +version = "1.0-SNAPSHOT" +description = "SimpleStreaming" + +java { + toolchain.languageVersion.set(JavaLanguageVersion.of(8)) + sourceSets["main"].java { + srcDir("src/main/java") + srcDir("src/main/dafny-generated") + srcDir("src/main/smithy-generated") + } + sourceSets["test"].java { + srcDir("src/test/java") + srcDir("src/test/dafny-generated") + srcDir("src/test/smithy-generated") + } +} + +repositories { + mavenCentral() + mavenLocal() +} + +dependencies { + implementation("org.dafny:DafnyRuntime:${dafnyVersion}") + implementation("software.amazon.smithy.dafny:conversion:0.1.1") + implementation("software.amazon.cryptography:StandardLibrary:1.0-SNAPSHOT") + implementation("software.amazon.cryptography:StreamingSupport:1.0-SNAPSHOT") + testImplementation("org.testng:testng:7.5") +} + +publishing { + publications.create("mavenLocal") { + groupId = group as String? + artifactId = description + from(components["java"]) + } + publications.create("maven") { + groupId = group as String? + artifactId = description + from(components["java"]) + } + repositories { mavenLocal() } +} + +tasks.withType() { + options.encoding = "UTF-8" +} + +tasks { + register("runTests", JavaExec::class.java) { + mainClass.set("TestsFromDafny") + classpath = sourceSets["test"].runtimeClasspath + } +} + +tasks.named("test") { + useTestNG() +} diff --git a/TestModels/Streaming/runtimes/java/src/main/java/Dafny/simple/streaming/__default.java b/TestModels/Streaming/runtimes/java/src/main/java/Dafny/simple/streaming/__default.java new file mode 100644 index 0000000000..2a5ee30cc6 --- /dev/null +++ b/TestModels/Streaming/runtimes/java/src/main/java/Dafny/simple/streaming/__default.java @@ -0,0 +1,5 @@ +// Copyright Amazon.com Inc. or its affiliates. All Rights Reserved. +// SPDX-License-Identifier: Apache-2.0 +package simple.streaming.internaldafny; + +public class __default extends _ExternBase___default {} diff --git a/TestModels/Streaming/runtimes/java/src/main/java/Dafny/simple/streaming/types/__default.java b/TestModels/Streaming/runtimes/java/src/main/java/Dafny/simple/streaming/types/__default.java new file mode 100644 index 0000000000..0272702b3e --- /dev/null +++ b/TestModels/Streaming/runtimes/java/src/main/java/Dafny/simple/streaming/types/__default.java @@ -0,0 +1,5 @@ +// Copyright Amazon.com Inc. or its affiliates. All Rights Reserved. +// SPDX-License-Identifier: Apache-2.0 +package simple.streaming.internaldafny.types; + +public class __default extends _ExternBase___default {} diff --git a/TestModels/Streaming/runtimes/java/src/test/java/simple/streaming/internaldafny/wrapped/__default.java b/TestModels/Streaming/runtimes/java/src/test/java/simple/streaming/internaldafny/wrapped/__default.java new file mode 100644 index 0000000000..ad89539025 --- /dev/null +++ b/TestModels/Streaming/runtimes/java/src/test/java/simple/streaming/internaldafny/wrapped/__default.java @@ -0,0 +1,32 @@ +// Copyright Amazon.com Inc. or its affiliates. All Rights Reserved. +// SPDX-License-Identifier: Apache-2.0 +package simple.streaming.internaldafny.wrapped; + +import Wrappers_Compile.Result; +import simple.streaming.SimpleStreaming; +import simple.streaming.ToNative; +import simple.streaming.internaldafny.types.Error; +import simple.streaming.internaldafny.types.ISimpleStreamingClient; +import simple.streaming.internaldafny.types.SimpleStreamingConfig; +import simple.streaming.wrapped.TestSimpleStreaming; + +public class __default extends _ExternBase___default { + + public static Result WrappedSimpleStreaming( + SimpleStreamingConfig config + ) { + simple.streaming.model.SimpleStreamingConfig wrappedConfig = + ToNative.SimpleStreamingConfig(config); + simple.streaming.SimpleStreaming impl = SimpleStreaming + .builder() + .SimpleStreamingConfig(wrappedConfig) + .build(); + TestSimpleStreaming wrappedClient = TestSimpleStreaming + .builder() + .impl(impl) + .build(); + return simple.streaming.internaldafny.__default.CreateSuccessOfClient( + wrappedClient + ); + } +} diff --git a/TestModels/Streaming/runtimes/python/pyproject.toml b/TestModels/Streaming/runtimes/python/pyproject.toml index b2a6f3351b..c36504276d 100644 --- a/TestModels/Streaming/runtimes/python/pyproject.toml +++ b/TestModels/Streaming/runtimes/python/pyproject.toml @@ -16,6 +16,7 @@ python = "^3.11.0" smithy-python = { path = "../../../../codegen/smithy-dafny-codegen-modules/smithy-python/python-packages/smithy-python", develop = false} smithy-dafny-standard-library = {path = "../../../dafny-dependencies/StandardLibrary/runtimes/python", develop = false} +smithy-dafny-streaming-support = {path = "../../../dafny-dependencies/StreamingSupport/runtimes/python", develop = false} DafnyRuntimePython = "^4.4.0" [tool.poetry.group.test.dependencies] diff --git a/TestModels/Streaming/src/Chunker.dfy b/TestModels/Streaming/src/Chunker.dfy index 8bcdedd95f..20057c6c1e 100644 --- a/TestModels/Streaming/src/Chunker.dfy +++ b/TestModels/Streaming/src/Chunker.dfy @@ -1,72 +1,316 @@ include "../Model/SimpleStreamingTypes.dfy" -module Chunker { +// Separate module not just for better code organization, +// but also to work around the conflict between the Dafny standard library +// Std.Wrappers module and the smithy-dafny specific Wrappers module: +// it's not currently possible to use both Result types in the same module. - import Std.BoundedInts +module {:options "--function-syntax:4"} Chunker { import opened Std.Wrappers import opened Types = SimpleStreamingTypes import opened StandardLibrary.UInt - import opened Std.Enumerators - import opened Std.Aggregators - - // An example of a Pipeline, which processes chunks of bytes - // as they flow through a stream. - // Pipelines let you define a stream transformation once - // in a way that allows external code to apply it to either - // push or pull-based streams: - // when a chunk becomes available to the pipeline, - // zero or more chunks are made available downstream. + import opened Std.Actions + import opened Std.BulkActions + import opened Std.Producers + import opened Std.Consumers + import opened Std.Termination + import opened StandardLibrary.Streams + + // "Batched Byte" + type BB = Batched + @AssumeCrossModuleTermination - class Chunker extends Pipeline { + class Chunker extends BulkAction>, OutputterOfNewProducers { const chunkSize: CountingInteger - var chunkBuffer: BoundedInts.bytes + var chunkBuffer: seq - constructor(upstream: Enumerator, chunkSize: CountingInteger) + constructor(chunkSize: CountingInteger) + requires 0 < chunkSize ensures Valid() + ensures fresh(Repr) ensures history == [] { - this.buffer := new Collector(); - this.upstream := upstream; - this.chunkSize := chunkSize; chunkBuffer := []; history := []; - Repr := {this} + upstream.Repr; - new; - assume {:axiom} Valid(); + Repr := {this}; + } + + ghost predicate Valid() + reads this, Repr + ensures Valid() ==> this in Repr + ensures Valid() ==> ValidHistory(history) + decreases Repr, 0 + { + && this in Repr + && 0 < chunkSize + } + + twostate predicate ValidChange() + reads this, Repr + ensures ValidChange() ==> old(Valid()) && Valid() + ensures ValidChange() ==> fresh(Repr - old(Repr)) + ensures ValidChange() ==> old(history) <= history + { + && fresh(Repr - old(Repr)) + && old(Valid()) + && Valid() + && old(history) <= history + } + + twostate lemma ValidImpliesValidChange() + requires old(Valid()) + requires unchanged(old(Repr)) + ensures ValidChange() + {} + + ghost predicate ValidHistory(history: seq<(BB, Producer)>) + decreases Repr + { + true + } + + ghost predicate ValidInput(history: seq<(BB, Producer)>, next: BB) + requires ValidHistory(history) + decreases Repr + { + true + } + + ghost function Decreases(i: BB): ORDINAL + requires Requires(i) + reads Reads(i) + { + 0 + } + + ghost function MaxProduced(): TerminationMetric { + TMTop + } + + lemma AnyInputIsValid(history: seq<(BB, Producer)>, next: BB) + requires Valid() + requires Action().ValidHistory(history) + ensures Action().ValidInput(history, next) + {} + + @IsolateAssertions + method Invoke(i: BB) returns (o: Producer) + requires Requires(i) + modifies Modifies(i) + decreases Decreases(i), 0 + ensures Ensures(i, o) + ensures OutputFresh(o) + { + assert Valid(); + var input := new SeqReader([i]); + var output := new SeqWriter(); + ghost var thisTotalProof := new ChunkerTotalProof(this); + ghost var outputTotalProof := new SeqWriterTotalActionProof(output); + label before: + Map(input, output, thisTotalProof, outputTotalProof); + assert |output.values| == 1; + o := output.values[0]; + assert Seq.Last(output.Inputs()) == o; + assert Seq.Last(Inputs()) == i; } - method Process(event: Option, a: Accumulator) + @ResourceLimit("1e9") + @IsolateAssertions + method Map(input: Producer, + output: IConsumer>, + ghost thisTotalProof: TotalActionProof>, + ghost outputTotalProof: TotalActionProof, ()>) requires Valid() - requires a.Valid() - requires Repr !! a.Repr - modifies Repr, a.Repr - ensures a.ValidAndDisjoint() + requires input.Valid() + requires output.Valid() + requires outputTotalProof.Valid() + requires outputTotalProof.Action() == output + requires Repr !! input.Repr !! output.Repr !! outputTotalProof.Repr + modifies Repr, input.Repr, output.Repr, outputTotalProof.Repr + ensures ValidChange() + ensures input.ValidChange() + ensures output.ValidChange() + ensures input.Done() + ensures input.NewProduced() == NewInputs() + ensures |input.NewProduced()| == |output.NewInputs()| + ensures output.NewInputs() == NewOutputs() + ensures forall o <- NewOutputs() :: OutputFresh(o) { - assert this in Repr; - assert this !in a.Repr; - match event { - case Some(bits) => { - chunkBuffer := chunkBuffer + bits; - } - case None => return; + assert Valid(); + + var oldProducedCount := input.ProducedCount(); + var batchWriter := new BatchSeqWriter(); + var batchWriterTotalProof := new BatchSeqWriterTotalProof(batchWriter); + label before: + input.ForEach(batchWriter, batchWriterTotalProof); + label after: + assert input.ValidChange@before(); + assert input.ValidChange(); + input.ProducedAndNewProduced@before(); + + var newProducedCount := input.ProducedCount() - oldProducedCount; + assert newProducedCount == input.NewProducedCount(); + if newProducedCount == 0 { + // No-op + assert input.ValidChange(); + assert |batchWriter.Inputs()| == 0; + assert input.NewProduced() == batchWriter.Inputs(); + assert |input.NewProduced()| == 0; + output.ValidImpliesValidChange(); + return; } - while chunkSize as int <= |chunkBuffer| - invariant a.ValidAndDisjoint() - { - a.CanConsumeAll(a.history, chunkBuffer[..chunkSize]); - a.Accept(chunkBuffer[..chunkSize]); - chunkBuffer := chunkBuffer[chunkSize..]; + chunkBuffer := chunkBuffer + batchWriter.elements; + print "chunkBuffer: ", chunkBuffer, "\n"; + + var chunks, leftover := Chunkify(chunkBuffer); + chunkBuffer := leftover; + + var outputProducer: Producer; + match batchWriter.state { + case Failure(error) => + outputProducer := new SeqReader([BatchError(error)]); + case Success(more) => + if !more && 0 < |chunkBuffer| { + // To make it more interesting, produce an error if outputChunks is non empty? + chunks := chunks + Seq.Reverse(chunkBuffer); + chunkBuffer := []; + } + outputProducer := new BatchReader(chunks); } - - if event == None { - if 0 < |chunkBuffer| { - var _ := a.Invoke(chunkBuffer); - } + + var empty := new EmptyProducer(); + var padding: Producer> := new RepeatProducer(newProducedCount - 1, empty); + var producerProducer := new SeqReader([outputProducer]); + var concatenated: Producer> := new ConcatenatedProducer(padding, producerProducer); + assert producerProducer.Remaining() == Some(1); + assert padding.Remaining() == Some(newProducedCount - 1); + assert concatenated.Remaining() == Some(newProducedCount); + label beforeOutput: + concatenated.ForEach(output, outputTotalProof); + assert concatenated.ValidChange@beforeOutput(); + concatenated.ProducedAndNewProduced@beforeOutput(); + + assert |input.NewProduced()| == newProducedCount; + assert |concatenated.NewProduced@beforeOutput()| == newProducedCount; + assert |input.NewProduced()| == |output.NewInputs()|; + history := history + Seq.Zip(input.NewProduced(), output.NewInputs()); + assert input.NewProduced() == NewInputs(); + } + + method Chunkify(data: seq) returns (chunks: seq, leftover: seq) + requires Valid() + { + leftover := data; + chunks := []; + while chunkSize as int <= |leftover| + decreases |leftover| + { + chunks := chunks + Seq.Reverse(leftover[..chunkSize]); + leftover := leftover[chunkSize..]; } } } + + @AssumeCrossModuleTermination + class ChunkerTotalProof extends TotalActionProof> { + + ghost const chunker: Chunker + + ghost constructor(chunker: Chunker) + requires chunker.Valid() + ensures this.chunker == chunker + ensures Valid() + ensures fresh(Repr) + { + this.chunker := chunker; + Repr := {this}; + } + + ghost function Action(): Action> { + chunker + } + + ghost predicate Valid() + reads this, Repr + ensures Valid() ==> this in Repr + decreases Repr, 0 + { + this in Repr + } + + twostate predicate ValidChange() + reads this, Repr + ensures ValidChange() ==> + old(Valid()) && Valid() && fresh(Repr - old(Repr)) + { + old(Valid()) && Valid() && fresh(Repr - old(Repr)) + } + + twostate lemma ValidImpliesValidChange() + requires old(Valid()) + requires unchanged(old(Repr)) + ensures ValidChange() + {} + + lemma AnyInputIsValid(history: seq<(BB, Producer)>, next: BB) + requires Valid() + requires Action().ValidHistory(history) + ensures Action().ValidInput(history, next) + {} + } + + + @AssumeCrossModuleTermination + class ChunkingStream extends DataStream { + + const chunkSize: CountingInteger + const original: DataStream + + constructor (original: DataStream, chunkSize: CountingInteger) + { + this.original := original; + this.chunkSize := chunkSize; + } + + function ContentLength(): Option { + None + } + + predicate Replayable() { + original.Replayable() + } + + method Reader() returns (p: Producer) + ensures + && p.Valid() + && fresh(p.Repr) + && p.history == [] + && (ContentLength().Some? ==> p.Remaining() == Some(ContentLength().value as int + 1)) + { + var chunker := new Chunker(chunkSize); + var chunkerTotalProof := new ChunkerTotalProof(chunker); + var originalProducer := original.Reader(); + var chunkerStream := new MappedProducerOfNewProducers(originalProducer, chunker); + p := new FlattenedProducer(chunkerStream); + } + } + + function BitCount(x: uint8): int { + if x == 0 then + 0 + else if x % 2 == 1 then + 1 + BitCount(x / 2) + else + BitCount(x / 2) + } + + function BinaryOfNumber(x: int32): seq { + // Don't actually need to compute the binary properly here + [12 as uint8, 34, 56] + } + } \ No newline at end of file diff --git a/TestModels/Streaming/src/Index.dfy b/TestModels/Streaming/src/Index.dfy index 882a2e30c2..6884eb35ce 100644 --- a/TestModels/Streaming/src/Index.dfy +++ b/TestModels/Streaming/src/Index.dfy @@ -2,7 +2,7 @@ // SPDX-License-Identifier: Apache-2.0 include "SimpleStreamingImpl.dfy" -module {:extern "simple.streaming.internaldafny"} SimpleStreaming refines AbstractSimpleStreamingService { +module SimpleStreaming refines AbstractSimpleStreamingService { import Operations = SimpleStreamingImpl function method DefaultSimpleStreamingConfig(): SimpleStreamingConfig { diff --git a/TestModels/Streaming/src/SimpleStreamingImpl.dfy b/TestModels/Streaming/src/SimpleStreamingImpl.dfy index a1513822db..bb6d9a3663 100644 --- a/TestModels/Streaming/src/SimpleStreamingImpl.dfy +++ b/TestModels/Streaming/src/SimpleStreamingImpl.dfy @@ -5,11 +5,13 @@ include "../Model/SimpleStreamingTypes.dfy" include "Chunker.dfy" module {:options "/functionSyntax:4" } SimpleStreamingImpl refines AbstractSimpleStreamingOperations { - import Std.Enumerators - import Std.Aggregators + import Std.Actions + import Std.BulkActions + import Std.Producers + import Std.Consumers import Std.Collections.Seq import opened Chunker - + datatype Config = Config type InternalConfig = Config predicate ValidInternalConfig?(config: InternalConfig) @@ -22,30 +24,35 @@ module {:options "/functionSyntax:4" } SimpleStreamingImpl refines AbstractSimpl method CountBits ( config: InternalConfig , input: CountBitsInput ) returns (output: Result) { - var counter := new Aggregators.FoldingAccumulator(0, (sum, byte) => sum + BytesBitCount(byte)); - - Enumerators.ForEach(input.bits, counter); - - // Should really have the FoldingAccumulator fail instead, - // but this is a simpler correct approach. - if 0 <= counter.value < INT32_MAX_LIMIT { - return Success(CountBitsOutput(sum := counter.value as int32)); + var counter := new Consumers.FoldingConsumer(Success(0 as int32), SumBits); + var counterTotalProof := new Consumers.FoldingConsumerTotalActionProof(counter); + + var inputReader := input.bits.Reader(); + inputReader.ForEach(counter, counterTotalProof); + var result := counter.value; + + if result.Success? { + return Success(CountBitsOutput(sum := result.value)); } else { - return Failure(OverflowError(message := "Ah crap")); + return Failure(result.error); } } - function BytesBitCount(b: BoundedInts.bytes): int { - Seq.FoldLeft((sum, byte) => sum + BitCount(byte), 0 as int, b) - } - - function BitCount(x: BoundedInts.uint8): int { - if x == 0 then - 0 - else if x % 2 == 1 then - 1 + BitCount(x / 2) - else - BitCount(x / 2) + function SumBits(sum: Result, batched: BulkActions.Batched): Result { + match batched + case BatchValue(b) => + if sum.Success? then + var next := BitCount(b); + if !(0 <= sum.value as int + next < INT32_MAX_LIMIT) then + Failure(OverflowError(message := "Ah crap")) + else + Success((sum.value as int + next) as int32) + else + sum + case BatchError(error) => + // This could also ensure the first error is kept instead + Failure(error) + case EndOfInput => sum } predicate BinaryOfEnsuresPublicly(input: BinaryOfInput , output: Result) @@ -57,12 +64,10 @@ module {:options "/functionSyntax:4" } SimpleStreamingImpl refines AbstractSimpl returns (output: Result) { - // TODO: Actually compute the binary - var fakeBinary: seq := [[12], [34, 56]]; - var fakeBinaryEnumerator := new Enumerators.SeqEnumerator(fakeBinary); - var fakeBinaryStream := new EnumeratorDataStream(fakeBinaryEnumerator, 3 as BoundedInts.uint64); - - return Success(BinaryOfOutput(binary := fakeBinaryStream)); + var binary := BinaryOfNumber(input.number); + var binaryStream := new SeqDataStream(binary); + + return Success(BinaryOfOutput(binary := binaryStream)); } @@ -72,12 +77,26 @@ module {:options "/functionSyntax:4" } SimpleStreamingImpl refines AbstractSimpl method Chunks ( config: InternalConfig , input: ChunksInput ) returns (output: Result) { - // TODO: for now - assume {:axiom} input.bytesIn.history == []; - var chunker := new Chunker(input.bytesIn, input.chunkSize); - var chunkerStream := new EnumeratorDataStream(chunker, input.bytesIn.ContentLength()); - + var chunkerStream := new ChunkingStream(input.bytesIn, input.chunkSize); + return Success(ChunksOutput(bytesOut := chunkerStream)); } + + method PrintProduced(p: Producers.Producer) + requires p.Valid() + modifies p.Repr + { + while true + invariant fresh(p.Repr - old(p.Repr)) + invariant p.Valid() + decreases p.Decreasing() + { + var next := p.Next(); + if next.None? { break; } + var value := next.value; + + print value, "\n"; + } + } } diff --git a/TestModels/Streaming/src/WrappedSimpleStreamingImpl.dfy b/TestModels/Streaming/src/WrappedSimpleStreamingImpl.dfy index 58bf8aed5d..00827803f0 100644 --- a/TestModels/Streaming/src/WrappedSimpleStreamingImpl.dfy +++ b/TestModels/Streaming/src/WrappedSimpleStreamingImpl.dfy @@ -2,7 +2,7 @@ // SPDX-License-Identifier: Apache-2.0 include "../Model/SimpleStreamingTypesWrapped.dfy" -module {:extern "simple.streaming.internaldafny.wrapped"} WrappedSimpleStreamingService refines WrappedAbstractSimpleStreamingService { +module WrappedSimpleStreamingService refines WrappedAbstractSimpleStreamingService { import WrappedService = SimpleStreaming function method WrappedDefaultSimpleStreamingConfig(): SimpleStreamingConfig { SimpleStreamingConfig diff --git a/TestModels/Streaming/test/SimpleStreamingImplTest.dfy b/TestModels/Streaming/test/SimpleStreamingImplTest.dfy index 411dae3b81..6d89223304 100644 --- a/TestModels/Streaming/test/SimpleStreamingImplTest.dfy +++ b/TestModels/Streaming/test/SimpleStreamingImplTest.dfy @@ -6,17 +6,21 @@ include "../src/WrappedSimpleStreamingImpl.dfy" module SimpleStreamingImplTest { import SimpleStreaming import SimpleStreamingImpl - import Std.Enumerators - import Std.Aggregators - import Std.BoundedInts + import Std.Producers + import Std.Consumers + import Std.BulkActions import opened StandardLibrary.UInt - import opened Std.Streams + import opened StandardLibrary.Streams import opened SimpleStreamingTypes import opened Wrappers + + import opened Chunker + method{:test} TestClient(){ var client :- expect SimpleStreaming.SimpleStreaming(); TestCountBits(client); TestBinaryOf(client); + TestChunks(client); } method TestCountBits(client: ISimpleStreamingClient) @@ -24,14 +28,13 @@ module SimpleStreamingImplTest { modifies client.Modifies ensures client.ValidState() { - var s: seq := [[0x0], [0x1, 0x2], [0x3], [], [0x4, 0x5]]; - var e := new Enumerators.SeqEnumerator(s); - var stream := new EnumeratorDataStream(e, length := 5 as BoundedInts.uint64); + var s: seq := [0x0, 0x1, 0x2, 0x3, 0x4, 0x5]; + var stream := new SeqDataStream(s); var input: CountBitsInput := CountBitsInput(bits := stream); var ret :- expect client.CountBits(input); - expect ret.sum == 7; + expect ret.sum == 7, ret.sum; } method TestBinaryOf(client: ISimpleStreamingClient) @@ -43,11 +46,13 @@ module SimpleStreamingImplTest { var ret :- expect client.BinaryOf(input); - var collector := new Aggregators.Collector(); + var collector := new BulkActions.BatchSeqWriter(); + var collectorTotalProof := new BulkActions.BatchSeqWriterTotalProof(collector); - Enumerators.ForEach(ret.binary, collector); + var reader := ret.binary.Reader(); + reader.ForEach(collector, collectorTotalProof); - expect collector.values == [[12], [34, 56]]; + expect collector.elements == [12, 34, 56]; } method TestChunks(client: ISimpleStreamingClient) @@ -55,17 +60,19 @@ module SimpleStreamingImplTest { modifies client.Modifies ensures client.ValidState() { - var s: seq := [[0x0], [0x1, 0x2], [0x3], [], [0x4, 0x5]]; - var e := new Enumerators.SeqEnumerator(s); - var stream := new EnumeratorDataStream(e, 5 as BoundedInts.uint64); - var input: ChunksInput := ChunksInput(bytesIn := stream, chunkSize := 2); + var s: bytes := [0x0, 0x1, 0x2, 0x3, 0x4, 0x5, 0x6, 0x7]; + var stream := new SeqDataStream(s); + var input: ChunksInput := ChunksInput(bytesIn := stream, chunkSize := 3); var ret :- expect client.Chunks(input); - var collector := new Aggregators.Collector(); + var collector := new BulkActions.BatchSeqWriter(); + var collectorTotalProof := new BulkActions.BatchSeqWriterTotalProof(collector); - Enumerators.ForEach(ret.bytesOut, collector); + var reader := ret.bytesOut.Reader(); + reader.ForEach(collector, collectorTotalProof); - expect collector.values == [[0x0, 0x1], [0x2, 0x3], [0x4, 0x5]]; + expect collector.elements == [0x2, 0x1, 0x0, 0x5, 0x4, 0x3, 0x7, 0x6], collector.elements; } + } \ No newline at end of file diff --git a/TestModels/Streaming/test/WrappedSimpleStreamingTest.dfy b/TestModels/Streaming/test/WrappedSimpleStreamingTest.dfy index 20e848dbf1..60de022620 100644 --- a/TestModels/Streaming/test/WrappedSimpleStreamingTest.dfy +++ b/TestModels/Streaming/test/WrappedSimpleStreamingTest.dfy @@ -7,9 +7,10 @@ module WrappedSimpleTypesStringTest { import WrappedSimpleStreamingService import SimpleStreamingImplTest import opened Wrappers - method{:test} TestCountBits() { + method{:test} TestWrappedClient() { var client :- expect WrappedSimpleStreamingService.WrappedSimpleStreaming(); SimpleStreamingImplTest.TestCountBits(client); SimpleStreamingImplTest.TestBinaryOf(client); + SimpleStreamingImplTest.TestChunks(client); } } diff --git a/TestModels/aws-sdks/s3/Makefile b/TestModels/aws-sdks/s3/Makefile index ddbb526b31..3f1fb96696 100644 --- a/TestModels/aws-sdks/s3/Makefile +++ b/TestModels/aws-sdks/s3/Makefile @@ -3,7 +3,7 @@ ENABLE_EXTERN_PROCESSING=1 CORES=2 -USE_DAFNY_STANDARD_LIBRARIES=1 +USES_STREAMING=1 include ../../SharedMakefile.mk @@ -34,4 +34,4 @@ INDEX_FILE_WITH_EXTERN_STRING="module {:extern \"software.amazon.cryptography.se INDEX_FILE_WITHOUT_EXTERN_STRING="module Com.Amazonaws.S3 refines AbstractComAmazonawsS3Service {" TRANSLATION_RECORD_PYTHON := \ - --translation-record ../../dafny-dependencies/StandardLibrary/runtimes/python/src/smithy_dafny_standard_library/internaldafny/generated/dafny_src-py.dtr + --translation-record ../../dafny-dependencies/StandardLibrary/runtimes/python/src/smithy_dafny_standard_library/internaldafny/generated/dafny_src-py.dtr \ diff --git a/TestModels/aws-sdks/s3/Model/custom-model.json b/TestModels/aws-sdks/s3/Model/custom-model.json index 3acbb908b9..d0661daabf 100644 --- a/TestModels/aws-sdks/s3/Model/custom-model.json +++ b/TestModels/aws-sdks/s3/Model/custom-model.json @@ -42,9 +42,6 @@ { "target": "com.amazonaws.s3#GetObject" }, - { - "target": "com.amazonaws.s3#ListObjectsV2" - }, { "target": "com.amazonaws.s3#PutObject" } @@ -819,214 +816,6 @@ "smithy.api#input": {} } }, - "com.amazonaws.s3#ListObjectsV2": { - "type": "operation", - "input": { - "target": "com.amazonaws.s3#ListObjectsV2Request" - }, - "output": { - "target": "com.amazonaws.s3#ListObjectsV2Output" - }, - "errors": [ - { - "target": "com.amazonaws.s3#NoSuchBucket" - } - ], - "traits": { - "smithy.api#documentation": "

Returns some or all (up to 1,000) of the objects in a bucket with each request. You can\n use the request parameters as selection criteria to return a subset of the objects in a\n bucket. A 200 OK response can contain valid or invalid XML. Make sure to\n design your application to parse the contents of the response and handle it appropriately.\n \n For more information about listing objects, see Listing object keys\n programmatically in the Amazon S3 User Guide. To get a list of your buckets, see ListBuckets.

\n \n

\n Directory buckets - For directory buckets, you must make requests for this API operation to the Zonal endpoint. These endpoints support virtual-hosted-style requests in the format https://bucket_name.s3express-az_id.region.amazonaws.com/key-name\n . Path-style requests are not supported. For more information, see Regional and Zonal endpoints in the\n Amazon S3 User Guide.

\n
\n
\n
Permissions
\n
\n
    \n
  • \n

    \n General purpose bucket permissions - To use this operation, you must have READ access to the bucket. You must have permission to perform\n the s3:ListBucket action. The bucket owner has this permission by default and\n can grant this permission to others. For more information about permissions, see Permissions Related to Bucket Subresource Operations and Managing\n Access Permissions to Your Amazon S3 Resources in the\n Amazon S3 User Guide.

    \n
  • \n
  • \n

    \n Directory bucket permissions - To grant access to this API operation on a directory bucket, we recommend that you use the \n CreateSession\n API operation for session-based authorization. Specifically, you grant the s3express:CreateSession permission to the directory bucket in a bucket policy or an IAM identity-based policy. Then, you make the CreateSession API call on the bucket to obtain a session token. With the session token in your request header, you can make API requests to this operation. After the session token expires, you make another CreateSession API call to generate a new session token for use. \nAmazon Web Services CLI or SDKs create session and refresh the session token automatically to avoid service interruptions when a session expires. For more information about authorization, see \n CreateSession\n .

    \n
  • \n
\n
\n
Sorting order of returned objects
\n
\n
    \n
  • \n

    \n General purpose bucket - For general purpose buckets, ListObjectsV2 returns objects in lexicographical order based on their key names.

    \n
  • \n
  • \n

    \n Directory bucket - For directory buckets, ListObjectsV2 does not return objects in lexicographical order.

    \n
  • \n
\n
\n
HTTP Host header syntax
\n
\n

\n Directory buckets - The HTTP Host header syntax is \n Bucket_name.s3express-az_id.region.amazonaws.com.

\n
\n
\n \n

This section describes the latest revision of this action. We recommend that you use\n this revised API operation for application development. For backward compatibility, Amazon S3\n continues to support the prior version of this API operation, ListObjects.

\n
\n

The following operations are related to ListObjectsV2:

\n ", - "smithy.api#http": { - "method": "GET", - "uri": "/{Bucket}?list-type=2", - "code": 200 - }, - "smithy.api#paginated": { - "inputToken": "ContinuationToken", - "outputToken": "NextContinuationToken", - "pageSize": "MaxKeys" - } - } - }, - "com.amazonaws.s3#ListObjectsV2Output": { - "type": "structure", - "members": { - "IsTruncated": { - "target": "com.amazonaws.s3#IsTruncated", - "traits": { - "smithy.api#documentation": "

Set to false if all of the results were returned. Set to true\n if more keys are available to return. If the number of results exceeds that specified by\n MaxKeys, all of the results might not be returned.

" - } - }, - "Contents": { - "target": "com.amazonaws.s3#ObjectList", - "traits": { - "smithy.api#documentation": "

Metadata about each object returned.

", - "smithy.api#xmlFlattened": {} - } - }, - "Name": { - "target": "com.amazonaws.s3#BucketName", - "traits": { - "smithy.api#documentation": "

The bucket name.

" - } - }, - "Prefix": { - "target": "com.amazonaws.s3#Prefix", - "traits": { - "smithy.api#documentation": "

Keys that begin with the indicated prefix.

\n \n

\n Directory buckets - For directory buckets, only prefixes that end in a delimiter (/) are supported.

\n
" - } - }, - "Delimiter": { - "target": "com.amazonaws.s3#Delimiter", - "traits": { - "smithy.api#documentation": "

Causes keys that contain the same string between the prefix and the first\n occurrence of the delimiter to be rolled up into a single result element in the\n CommonPrefixes collection. These rolled-up keys are not returned elsewhere\n in the response. Each rolled-up result counts as only one return against the\n MaxKeys value.

\n \n

\n Directory buckets - For directory buckets, / is the only supported delimiter.

\n
" - } - }, - "MaxKeys": { - "target": "com.amazonaws.s3#MaxKeys", - "traits": { - "smithy.api#documentation": "

Sets the maximum number of keys returned in the response. By default, the action returns\n up to 1,000 key names. The response might contain fewer keys but will never contain\n more.

" - } - }, - "CommonPrefixes": { - "target": "com.amazonaws.s3#CommonPrefixList", - "traits": { - "smithy.api#documentation": "

All of the keys (up to 1,000) that share the same prefix are grouped together. When counting the total numbers of returns by this API operation, \n this group of keys is considered as one item.

\n

A response can contain CommonPrefixes only if you specify a\n delimiter.

\n

\n CommonPrefixes contains all (if there are any) keys between\n Prefix and the next occurrence of the string specified by a\n delimiter.

\n

\n CommonPrefixes lists keys that act like subdirectories in the directory\n specified by Prefix.

\n

For example, if the prefix is notes/ and the delimiter is a slash\n (/) as in notes/summer/july, the common prefix is\n notes/summer/. All of the keys that roll up into a common prefix count as a\n single return when calculating the number of returns.

\n \n
    \n
  • \n

    \n Directory buckets - For directory buckets, only prefixes that end in a delimiter (/) are supported.

    \n
  • \n
  • \n

    \n Directory buckets - When you query ListObjectsV2 with a delimiter during in-progress multipart uploads, the \n CommonPrefixes response parameter contains the prefixes that are associated with the in-progress multipart uploads. \n For more information about multipart uploads, see Multipart Upload Overview in the Amazon S3 User Guide.

    \n
  • \n
\n
", - "smithy.api#xmlFlattened": {} - } - }, - "EncodingType": { - "target": "com.amazonaws.s3#EncodingType", - "traits": { - "smithy.api#documentation": "

Encoding type used by Amazon S3 to encode object key names in the XML response.

\n

If you specify the encoding-type request parameter, Amazon S3 includes this\n element in the response, and returns encoded key name values in the following response\n elements:

\n

\n Delimiter, Prefix, Key, and StartAfter.

" - } - }, - "KeyCount": { - "target": "com.amazonaws.s3#KeyCount", - "traits": { - "smithy.api#documentation": "

\n KeyCount is the number of keys returned with this request.\n KeyCount will always be less than or equal to the MaxKeys\n field. For example, if you ask for 50 keys, your result will include 50 keys or\n fewer.

" - } - }, - "ContinuationToken": { - "target": "com.amazonaws.s3#Token", - "traits": { - "smithy.api#documentation": "

If ContinuationToken was sent with the request, it is included in the\n response. You can use the returned ContinuationToken for pagination of the list response. You can use this ContinuationToken for pagination of the list results.

" - } - }, - "NextContinuationToken": { - "target": "com.amazonaws.s3#NextToken", - "traits": { - "smithy.api#documentation": "

\n NextContinuationToken is sent when isTruncated is true, which\n means there are more keys in the bucket that can be listed. The next list requests to Amazon S3\n can be continued with this NextContinuationToken.\n NextContinuationToken is obfuscated and is not a real key

" - } - }, - "StartAfter": { - "target": "com.amazonaws.s3#StartAfter", - "traits": { - "smithy.api#documentation": "

If StartAfter was sent with the request, it is included in the response.

\n \n

This functionality is not supported for directory buckets.

\n
" - } - }, - "RequestCharged": { - "target": "com.amazonaws.s3#RequestCharged", - "traits": { - "smithy.api#httpHeader": "x-amz-request-charged" - } - } - }, - "traits": { - "smithy.api#output": {}, - "smithy.api#xmlName": "ListBucketResult" - } - }, - "com.amazonaws.s3#ListObjectsV2Request": { - "type": "structure", - "members": { - "Bucket": { - "target": "com.amazonaws.s3#BucketName", - "traits": { - "smithy.api#documentation": "

\n Directory buckets - When you use this operation with a directory bucket, you must use virtual-hosted-style requests in the format \n Bucket_name.s3express-az_id.region.amazonaws.com. Path-style requests are not supported. Directory bucket names must be unique in the chosen Availability Zone. Bucket names must follow the format \n bucket_base_name--az-id--x-s3 (for example, \n DOC-EXAMPLE-BUCKET--usw2-az1--x-s3). For information about bucket naming\n restrictions, see Directory bucket naming\n rules in the Amazon S3 User Guide.

\n

\n Access points - When you use this action with an access point, you must provide the alias of the access point in place of the bucket name or specify the access point ARN. When using the access point ARN, you must direct requests to the access point hostname. The access point hostname takes the form AccessPointName-AccountId.s3-accesspoint.Region.amazonaws.com. When using this action with an access point through the Amazon Web Services SDKs, you provide the access point ARN in place of the bucket name. For more information about access point ARNs, see Using access points in the Amazon S3 User Guide.

\n \n

Access points and Object Lambda access points are not supported by directory buckets.

\n
\n

\n S3 on Outposts - When you use this action with Amazon S3 on Outposts, you must direct requests to the S3 on Outposts hostname. The S3 on Outposts hostname takes the form \n AccessPointName-AccountId.outpostID.s3-outposts.Region.amazonaws.com. When you use this action with S3 on Outposts through the Amazon Web Services SDKs, you provide the Outposts access point ARN in place of the bucket name. For more information about S3 on Outposts ARNs, see What is S3 on Outposts? in the Amazon S3 User Guide.

", - "smithy.api#httpLabel": {}, - "smithy.api#required": {}, - "smithy.rules#contextParam": { - "name": "Bucket" - } - } - }, - "Delimiter": { - "target": "com.amazonaws.s3#Delimiter", - "traits": { - "smithy.api#documentation": "

A delimiter is a character that you use to group keys.

\n \n
    \n
  • \n

    \n Directory buckets - For directory buckets, / is the only supported delimiter.

    \n
  • \n
  • \n

    \n Directory buckets - When you query ListObjectsV2 with a delimiter during in-progress multipart uploads, the \n CommonPrefixes response parameter contains the prefixes that are associated with the in-progress multipart uploads. \n For more information about multipart uploads, see Multipart Upload Overview in the Amazon S3 User Guide.

    \n
  • \n
\n
", - "smithy.api#httpQuery": "delimiter" - } - }, - "EncodingType": { - "target": "com.amazonaws.s3#EncodingType", - "traits": { - "smithy.api#documentation": "

Encoding type used by Amazon S3 to encode object keys in the response. If using\n url, non-ASCII characters used in an object's key name will be URL encoded.\n For example, the object test_file(3).png will appear as test_file%283%29.png.

", - "smithy.api#httpQuery": "encoding-type" - } - }, - "MaxKeys": { - "target": "com.amazonaws.s3#MaxKeys", - "traits": { - "smithy.api#documentation": "

Sets the maximum number of keys returned in the response. By default, the action returns\n up to 1,000 key names. The response might contain fewer keys but will never contain\n more.

", - "smithy.api#httpQuery": "max-keys" - } - }, - "Prefix": { - "target": "com.amazonaws.s3#Prefix", - "traits": { - "smithy.api#documentation": "

Limits the response to keys that begin with the specified prefix.

\n \n

\n Directory buckets - For directory buckets, only prefixes that end in a delimiter (/) are supported.

\n
", - "smithy.api#httpQuery": "prefix", - "smithy.rules#contextParam": { - "name": "Prefix" - } - } - }, - "ContinuationToken": { - "target": "com.amazonaws.s3#Token", - "traits": { - "smithy.api#documentation": "

\n ContinuationToken indicates to Amazon S3 that the list is being continued on\n this bucket with a token. ContinuationToken is obfuscated and is not a real\n key. You can use this ContinuationToken for pagination of the list results.

", - "smithy.api#httpQuery": "continuation-token" - } - }, - "FetchOwner": { - "target": "com.amazonaws.s3#FetchOwner", - "traits": { - "smithy.api#documentation": "

The owner field is not present in ListObjectsV2 by default. If you want to\n return the owner field with each key in the result, then set the FetchOwner\n field to true.

\n \n

\n Directory buckets - For directory buckets, the bucket owner is returned as the object owner for all objects.

\n
", - "smithy.api#httpQuery": "fetch-owner" - } - }, - "StartAfter": { - "target": "com.amazonaws.s3#StartAfter", - "traits": { - "smithy.api#documentation": "

StartAfter is where you want Amazon S3 to start listing from. Amazon S3 starts listing after this\n specified key. StartAfter can be any key in the bucket.

\n \n

This functionality is not supported for directory buckets.

\n
", - "smithy.api#httpQuery": "start-after" - } - }, - "RequestPayer": { - "target": "com.amazonaws.s3#RequestPayer", - "traits": { - "smithy.api#documentation": "

Confirms that the requester knows that she or he will be charged for the list objects\n request in V2 style. Bucket owners need not specify this parameter in their\n requests.

\n \n

This functionality is not supported for directory buckets.

\n
", - "smithy.api#httpHeader": "x-amz-request-payer" - } - }, - "ExpectedBucketOwner": { - "target": "com.amazonaws.s3#AccountId", - "traits": { - "smithy.api#documentation": "

The account ID of the expected bucket owner. If the account ID that you provide does not match the actual owner of the bucket, the request fails with the HTTP status code 403 Forbidden (access denied).

", - "smithy.api#httpHeader": "x-amz-expected-bucket-owner" - } - }, - "OptionalObjectAttributes": { - "target": "com.amazonaws.s3#OptionalObjectAttributesList", - "traits": { - "smithy.api#documentation": "

Specifies the optional fields that you want returned in the response. Fields that you do\n not specify are not returned.

\n \n

This functionality is not supported for directory buckets.

\n
", - "smithy.api#httpHeader": "x-amz-optional-object-attributes" - } - } - }, - "traits": { - "smithy.api#input": {} - } - }, "com.amazonaws.s3#PutObject": { "type": "operation", "input": { @@ -2205,69 +1994,6 @@ "com.amazonaws.s3#IsTruncated": { "type": "boolean" }, - "com.amazonaws.s3#ObjectList": { - "type": "list", - "member": { - "target": "com.amazonaws.s3#Object" - } - }, - "com.amazonaws.s3#Object": { - "type": "structure", - "members": { - "Key": { - "target": "com.amazonaws.s3#ObjectKey", - "traits": { - "smithy.api#documentation": "

The name that you assign to an object. You use the object key to retrieve the\n object.

" - } - }, - "LastModified": { - "target": "com.amazonaws.s3#LastModified", - "traits": { - "smithy.api#documentation": "

Creation date of the object.

" - } - }, - "ETag": { - "target": "com.amazonaws.s3#ETag", - "traits": { - "smithy.api#documentation": "

The entity tag is a hash of the object. The ETag reflects changes only to the contents\n of an object, not its metadata. The ETag may or may not be an MD5 digest of the object\n data. Whether or not it is depends on how the object was created and how it is encrypted as\n described below:

\n
    \n
  • \n

    Objects created by the PUT Object, POST Object, or Copy operation, or through the\n Amazon Web Services Management Console, and are encrypted by SSE-S3 or plaintext, have ETags that\n are an MD5 digest of their object data.

    \n
  • \n
  • \n

    Objects created by the PUT Object, POST Object, or Copy operation, or through the\n Amazon Web Services Management Console, and are encrypted by SSE-C or SSE-KMS, have ETags that are\n not an MD5 digest of their object data.

    \n
  • \n
  • \n

    If an object is created by either the Multipart Upload or Part Copy operation, the\n ETag is not an MD5 digest, regardless of the method of encryption. If an object is\n larger than 16 MB, the Amazon Web Services Management Console will upload or copy that object as a\n Multipart Upload, and therefore the ETag will not be an MD5 digest.

    \n
  • \n
\n \n

\n Directory buckets - MD5 is not supported by directory buckets.

\n
" - } - }, - "ChecksumAlgorithm": { - "target": "com.amazonaws.s3#ChecksumAlgorithmList", - "traits": { - "smithy.api#documentation": "

The algorithm that was used to create a checksum of the object.

", - "smithy.api#xmlFlattened": {} - } - }, - "Size": { - "target": "com.amazonaws.s3#Size", - "traits": { - "smithy.api#documentation": "

Size in bytes of the object

" - } - }, - "StorageClass": { - "target": "com.amazonaws.s3#ObjectStorageClass", - "traits": { - "smithy.api#documentation": "

The class of storage used to store the object.

\n \n

\n Directory buckets - Only the S3 Express One Zone storage class is supported by directory buckets to store objects.

\n
" - } - }, - "Owner": { - "target": "com.amazonaws.s3#Owner", - "traits": { - "smithy.api#documentation": "

The owner of the object

\n \n

\n Directory buckets - The bucket owner is returned as the object owner.

\n
" - } - }, - "RestoreStatus": { - "target": "com.amazonaws.s3#RestoreStatus", - "traits": { - "smithy.api#documentation": "

Specifies the restoration status of an object. Objects in certain storage classes must\n be restored before they can be retrieved. For more information about these storage classes\n and how to work with archived objects, see Working with archived\n objects in the Amazon S3 User Guide.

\n \n

This functionality is not supported for directory buckets. Only the S3 Express One Zone storage class is supported by directory buckets to store objects.

\n
" - } - } - }, - "traits": { - "smithy.api#documentation": "

An object consists of data and its descriptive metadata.

" - } - }, "com.amazonaws.s3#ChecksumAlgorithmList": { "type": "list", "member": { diff --git a/TestModels/aws-sdks/s3/codegen-patches/java/dafny-4.9.0.patch b/TestModels/aws-sdks/s3/codegen-patches/java/dafny-4.9.0.patch new file mode 100644 index 0000000000..624d1e9782 --- /dev/null +++ b/TestModels/aws-sdks/s3/codegen-patches/java/dafny-4.9.0.patch @@ -0,0 +1,315 @@ +diff --git b/TestModels/aws-sdks/s3/runtimes/java/src/main/smithy-generated/software/amazon/cryptography/services/s3/internaldafny/Shim.java a/TestModels/aws-sdks/s3/runtimes/java/src/main/smithy-generated/software/amazon/cryptography/services/s3/internaldafny/Shim.java +index 87bdb89d9..bc7fb494d 100644 +--- b/TestModels/aws-sdks/s3/runtimes/java/src/main/smithy-generated/software/amazon/cryptography/services/s3/internaldafny/Shim.java ++++ a/TestModels/aws-sdks/s3/runtimes/java/src/main/smithy-generated/software/amazon/cryptography/services/s3/internaldafny/Shim.java +@@ -3,9 +3,13 @@ + // Do not modify this file. This file is machine generated, and any changes to it will be overwritten. + package software.amazon.cryptography.services.s3.internaldafny; + ++import Streams.DataStreamAsRequestBody; + import Wrappers_Compile.Result; + import java.lang.Override; + import java.lang.String; ++ ++import software.amazon.awssdk.core.ResponseInputStream; ++import software.amazon.awssdk.core.sync.RequestBody; + import software.amazon.awssdk.services.s3.S3Client; + import software.amazon.awssdk.services.s3.model.DeleteObjectResponse; + import software.amazon.awssdk.services.s3.model.DeleteObjectsResponse; +@@ -107,7 +111,7 @@ public class Shim implements IS3Client { + software.amazon.awssdk.services.s3.model.GetObjectRequest converted = + ToNative.GetObjectRequest(input); + try { +- GetObjectResponse result = _impl.getObject(converted); ++ ResponseInputStream result = _impl.getObject(converted); + GetObjectOutput dafnyResponse = ToDafny.GetObjectOutput(result); + return Result.create_Success( + GetObjectOutput._typeDescriptor(), +@@ -145,8 +149,9 @@ public class Shim implements IS3Client { + public Result PutObject(PutObjectRequest input) { + software.amazon.awssdk.services.s3.model.PutObjectRequest converted = + ToNative.PutObjectRequest(input); ++ RequestBody body = DataStreamAsRequestBody.of(input._Body.dtor_value()); + try { +- PutObjectResponse result = _impl.putObject(converted); ++ PutObjectResponse result = _impl.putObject(converted, body); + PutObjectOutput dafnyResponse = ToDafny.PutObjectOutput(result); + return Result.create_Success( + PutObjectOutput._typeDescriptor(), +diff --git b/TestModels/aws-sdks/s3/runtimes/java/src/main/smithy-generated/software/amazon/cryptography/services/s3/internaldafny/ToDafny.java a/TestModels/aws-sdks/s3/runtimes/java/src/main/smithy-generated/software/amazon/cryptography/services/s3/internaldafny/ToDafny.java +index 8cfc49346..3746cdacc 100644 +--- b/TestModels/aws-sdks/s3/runtimes/java/src/main/smithy-generated/software/amazon/cryptography/services/s3/internaldafny/ToDafny.java ++++ a/TestModels/aws-sdks/s3/runtimes/java/src/main/smithy-generated/software/amazon/cryptography/services/s3/internaldafny/ToDafny.java +@@ -4,6 +4,8 @@ + package software.amazon.cryptography.services.s3.internaldafny; + + import StandardLibrary_Compile.Streams_Compile.DataStream; ++import Streams.InputStreamAsDataStream; ++import Streams.RequestBodyAsDataStream; + import Wrappers_Compile.Option; + import dafny.DafnyMap; + import dafny.DafnySequence; +@@ -19,6 +21,9 @@ import java.lang.String; + import java.util.List; + import java.util.Map; + import java.util.Objects; ++ ++import software.amazon.awssdk.core.ResponseInputStream; ++import software.amazon.awssdk.core.sync.RequestBody; + import software.amazon.awssdk.services.s3.S3Client; + import software.amazon.awssdk.services.s3.model.DeleteObjectResponse; + import software.amazon.awssdk.services.s3.model.DeleteObjectsResponse; +@@ -352,7 +357,7 @@ public class ToDafny { + } + + public static DafnySequence Errors( +- List nativeValue ++ List nativeValue + ) { + return software.amazon.smithy.dafny.conversion.ToDafny.Aggregate.GenericToSequence( + nativeValue, +@@ -362,7 +367,7 @@ public class ToDafny { + } + + public static ErrorShape ErrorShape( +- software.amazon.awssdk.services.s3.model.ErrorShape nativeValue ++ software.amazon.awssdk.services.s3.model.S3Error nativeValue + ) { + Option> key; + key = +@@ -415,16 +420,13 @@ public class ToDafny { + return new ErrorShape(key, versionId, code, message); + } + +- public static GetObjectOutput GetObjectOutput(GetObjectResponse nativeValue) { ++ public static GetObjectOutput GetObjectOutput(ResponseInputStream responseInputStream) { ++ GetObjectResponse nativeValue = responseInputStream.response(); + Option> body; + body = +- Objects.nonNull(nativeValue.body()) +- ? Option.create_Some( +- DafnySequence._typeDescriptor(TypeDescriptor.BYTE), +- ToDafny.DataStream(nativeValue.body().asByteArray()) +- ) +- : Option.create_None( +- DafnySequence._typeDescriptor(TypeDescriptor.BYTE) ++ Option.create_Some( ++ null, ++ new InputStreamAsDataStream(Error._typeDescriptor(), responseInputStream, Error::create_Opaque) + ); + Option deleteMarker; + deleteMarker = +@@ -716,11 +718,11 @@ public class ToDafny { + ); + Option> sSEKMSKeyId; + sSEKMSKeyId = +- Objects.nonNull(nativeValue.sseKMSKeyId()) ++ Objects.nonNull(nativeValue.ssekmsKeyId()) + ? Option.create_Some( + DafnySequence._typeDescriptor(TypeDescriptor.CHAR), + software.amazon.smithy.dafny.conversion.ToDafny.Simple.CharacterSequence( +- nativeValue.sseKMSKeyId() ++ nativeValue.ssekmsKeyId() + ) + ) + : Option.create_None( +@@ -1252,11 +1254,11 @@ public class ToDafny { + ); + Option> sSEKMSKeyId; + sSEKMSKeyId = +- Objects.nonNull(nativeValue.sseKMSKeyId()) ++ Objects.nonNull(nativeValue.ssekmsKeyId()) + ? Option.create_Some( + DafnySequence._typeDescriptor(TypeDescriptor.CHAR), + software.amazon.smithy.dafny.conversion.ToDafny.Simple.CharacterSequence( +- nativeValue.sseKMSKeyId() ++ nativeValue.ssekmsKeyId() + ) + ) + : Option.create_None( +@@ -1264,11 +1266,11 @@ public class ToDafny { + ); + Option> sSEKMSEncryptionContext; + sSEKMSEncryptionContext = +- Objects.nonNull(nativeValue.sseKMSEncryptionContext()) ++ Objects.nonNull(nativeValue.ssekmsEncryptionContext()) + ? Option.create_Some( + DafnySequence._typeDescriptor(TypeDescriptor.CHAR), + software.amazon.smithy.dafny.conversion.ToDafny.Simple.CharacterSequence( +- nativeValue.sseKMSEncryptionContext() ++ nativeValue.ssekmsEncryptionContext() + ) + ) + : Option.create_None( +@@ -1309,7 +1311,8 @@ public class ToDafny { + } + + public static PutObjectRequest PutObjectRequest( +- software.amazon.awssdk.services.s3.model.PutObjectRequest nativeValue ++ software.amazon.awssdk.services.s3.model.PutObjectRequest nativeValue, ++ RequestBody nativeBody + ) { + Option aCL; + aCL = +@@ -1319,16 +1322,10 @@ public class ToDafny { + ToDafny.ObjectCannedACL(nativeValue.acl()) + ) + : Option.create_None(ObjectCannedACL._typeDescriptor()); +- Option> body; +- body = +- Objects.nonNull(nativeValue.body()) +- ? Option.create_Some( +- DafnySequence._typeDescriptor(TypeDescriptor.BYTE), +- ToDafny.DataStream(nativeValue.body().asByteArray()) +- ) +- : Option.create_None( +- DafnySequence._typeDescriptor(TypeDescriptor.BYTE) +- ); ++ Option> body = Option.create_Some( ++ null, ++ new RequestBodyAsDataStream( ++ null, nativeBody, Error::create_Opaque )); + DafnySequence bucket; + bucket = + software.amazon.smithy.dafny.conversion.ToDafny.Simple.CharacterSequence( +@@ -1620,11 +1617,11 @@ public class ToDafny { + ); + Option> sSEKMSKeyId; + sSEKMSKeyId = +- Objects.nonNull(nativeValue.sseKMSKeyId()) ++ Objects.nonNull(nativeValue.ssekmsKeyId()) + ? Option.create_Some( + DafnySequence._typeDescriptor(TypeDescriptor.CHAR), + software.amazon.smithy.dafny.conversion.ToDafny.Simple.CharacterSequence( +- nativeValue.sseKMSKeyId() ++ nativeValue.ssekmsKeyId() + ) + ) + : Option.create_None( +@@ -1632,11 +1629,11 @@ public class ToDafny { + ); + Option> sSEKMSEncryptionContext; + sSEKMSEncryptionContext = +- Objects.nonNull(nativeValue.sseKMSEncryptionContext()) ++ Objects.nonNull(nativeValue.ssekmsEncryptionContext()) + ? Option.create_Some( + DafnySequence._typeDescriptor(TypeDescriptor.CHAR), + software.amazon.smithy.dafny.conversion.ToDafny.Simple.CharacterSequence( +- nativeValue.sseKMSEncryptionContext() ++ nativeValue.ssekmsEncryptionContext() + ) + ) + : Option.create_None( +diff --git b/TestModels/aws-sdks/s3/runtimes/java/src/main/smithy-generated/software/amazon/cryptography/services/s3/internaldafny/ToNative.java a/TestModels/aws-sdks/s3/runtimes/java/src/main/smithy-generated/software/amazon/cryptography/services/s3/internaldafny/ToNative.java +index e02646327..433c3f16c 100644 +--- b/TestModels/aws-sdks/s3/runtimes/java/src/main/smithy-generated/software/amazon/cryptography/services/s3/internaldafny/ToNative.java ++++ a/TestModels/aws-sdks/s3/runtimes/java/src/main/smithy-generated/software/amazon/cryptography/services/s3/internaldafny/ToNative.java +@@ -22,7 +22,6 @@ import software.amazon.awssdk.services.s3.model.DeleteObjectResponse; + import software.amazon.awssdk.services.s3.model.DeleteObjectsRequest; + import software.amazon.awssdk.services.s3.model.DeleteObjectsResponse; + import software.amazon.awssdk.services.s3.model.DeletedObject; +-import software.amazon.awssdk.services.s3.model.ErrorShape; + import software.amazon.awssdk.services.s3.model.GetObjectRequest; + import software.amazon.awssdk.services.s3.model.GetObjectResponse; + import software.amazon.awssdk.services.s3.model.IntelligentTieringAccessTier; +@@ -38,6 +37,7 @@ import software.amazon.awssdk.services.s3.model.PutObjectResponse; + import software.amazon.awssdk.services.s3.model.ReplicationStatus; + import software.amazon.awssdk.services.s3.model.RequestCharged; + import software.amazon.awssdk.services.s3.model.RequestPayer; ++import software.amazon.awssdk.services.s3.model.S3Error; + import software.amazon.awssdk.services.s3.model.S3Exception; + import software.amazon.awssdk.services.s3.model.ServerSideEncryption; + import software.amazon.awssdk.services.s3.model.StorageClass; +@@ -269,7 +269,7 @@ public class ToNative { + return builder.build(); + } + +- public static List Errors( ++ public static List Errors( + DafnySequence< + ? extends software.amazon.cryptography.services.s3.internaldafny.types.ErrorShape + > dafnyValue +@@ -280,10 +280,10 @@ public class ToNative { + ); + } + +- public static ErrorShape ErrorShape( ++ public static S3Error ErrorShape( + software.amazon.cryptography.services.s3.internaldafny.types.ErrorShape dafnyValue + ) { +- ErrorShape.Builder builder = ErrorShape.builder(); ++ S3Error.Builder builder = S3Error.builder(); + if (dafnyValue.dtor_Code().is_Some()) { + builder.code( + software.amazon.smithy.dafny.conversion.ToNative.Simple.String( +@@ -324,13 +324,6 @@ public class ToNative { + ) + ); + } +- if (dafnyValue.dtor_Body().is_Some()) { +- builder.body( +- SdkBytes.fromByteArray( +- (byte[]) (dafnyValue.dtor_Body().dtor_value().toRawArray()) +- ) +- ); +- } + if (dafnyValue.dtor_BucketKeyEnabled().is_Some()) { + builder.bucketKeyEnabled( + (dafnyValue.dtor_BucketKeyEnabled().dtor_value()) +@@ -511,7 +504,7 @@ public class ToNative { + ); + } + if (dafnyValue.dtor_SSEKMSKeyId().is_Some()) { +- builder.sseKMSKeyId( ++ builder.ssekmsKeyId( + software.amazon.smithy.dafny.conversion.ToNative.Simple.String( + dafnyValue.dtor_SSEKMSKeyId().dtor_value() + ) +@@ -866,14 +859,14 @@ public class ToNative { + ); + } + if (dafnyValue.dtor_SSEKMSEncryptionContext().is_Some()) { +- builder.sseKMSEncryptionContext( ++ builder.ssekmsEncryptionContext( + software.amazon.smithy.dafny.conversion.ToNative.Simple.String( + dafnyValue.dtor_SSEKMSEncryptionContext().dtor_value() + ) + ); + } + if (dafnyValue.dtor_SSEKMSKeyId().is_Some()) { +- builder.sseKMSKeyId( ++ builder.ssekmsKeyId( + software.amazon.smithy.dafny.conversion.ToNative.Simple.String( + dafnyValue.dtor_SSEKMSKeyId().dtor_value() + ) +@@ -896,13 +889,6 @@ public class ToNative { + if (dafnyValue.dtor_ACL().is_Some()) { + builder.acl(ToNative.ObjectCannedACL(dafnyValue.dtor_ACL().dtor_value())); + } +- if (dafnyValue.dtor_Body().is_Some()) { +- builder.body( +- SdkBytes.fromByteArray( +- (byte[]) (dafnyValue.dtor_Body().dtor_value().toRawArray()) +- ) +- ); +- } + builder.bucket( + software.amazon.smithy.dafny.conversion.ToNative.Simple.String( + dafnyValue.dtor_Bucket() +@@ -1098,14 +1084,14 @@ public class ToNative { + ); + } + if (dafnyValue.dtor_SSEKMSEncryptionContext().is_Some()) { +- builder.sseKMSEncryptionContext( ++ builder.ssekmsEncryptionContext( + software.amazon.smithy.dafny.conversion.ToNative.Simple.String( + dafnyValue.dtor_SSEKMSEncryptionContext().dtor_value() + ) + ); + } + if (dafnyValue.dtor_SSEKMSKeyId().is_Some()) { +- builder.sseKMSKeyId( ++ builder.ssekmsKeyId( + software.amazon.smithy.dafny.conversion.ToNative.Simple.String( + dafnyValue.dtor_SSEKMSKeyId().dtor_value() + ) diff --git a/TestModels/aws-sdks/s3/dfyconfig.toml b/TestModels/aws-sdks/s3/dfyconfig.toml new file mode 100644 index 0000000000..6c9348c162 --- /dev/null +++ b/TestModels/aws-sdks/s3/dfyconfig.toml @@ -0,0 +1,10 @@ +# Project file for easier development in VS Code. +# Not yet used in the actual Makefile build. + +[options] +unicode-char = false +function-syntax = 3 + +library = [ + "../../dafny-dependencies/StandardLibrary/bin/DafnyStandardLibraries-smithy-dafny-subset.doo" +] diff --git a/TestModels/aws-sdks/s3/runtimes/java/build.gradle.kts b/TestModels/aws-sdks/s3/runtimes/java/build.gradle.kts new file mode 100644 index 0000000000..093eb548e9 --- /dev/null +++ b/TestModels/aws-sdks/s3/runtimes/java/build.gradle.kts @@ -0,0 +1,78 @@ +// Copyright Amazon.com Inc. or its affiliates. All Rights Reserved. +// SPDX-License-Identifier: Apache-2.0 +import java.io.File +import java.io.FileInputStream +import java.util.Properties +import java.net.URI +import javax.annotation.Nullable + +tasks.wrapper { + gradleVersion = "7.6" +} + +plugins { + `java-library` + `maven-publish` +} + +var props = Properties().apply { + load(FileInputStream(File(rootProject.rootDir, "../../project.properties"))) +} +var dafnyVersion = props.getProperty("dafnyVersion") + +group = "com.amazonaws.s3" +version = "1.0-SNAPSHOT" +description = "AmazonS3" + +java { + toolchain.languageVersion.set(JavaLanguageVersion.of(8)) + sourceSets["main"].java { + srcDir("src/main/java") + srcDir("src/main/dafny-generated") + srcDir("src/main/smithy-generated") + } + sourceSets["test"].java { + srcDir("src/test/java") + srcDir("src/test/dafny-generated") + srcDir("src/test/smithy-generated") + } +} + +repositories { + mavenCentral() + mavenLocal() +} + +dependencies { + implementation("org.dafny:DafnyRuntime:${dafnyVersion}") + implementation("software.amazon.smithy.dafny:conversion:0.1.1") + implementation("software.amazon.cryptography:StandardLibrary:1.0-SNAPSHOT") + implementation("software.amazon.cryptography:StreamingSupport:1.0-SNAPSHOT") + implementation(platform("software.amazon.awssdk:bom:2.31.30")) + implementation("software.amazon.awssdk:s3") +} + +publishing { + publications.create("mavenLocal") { + groupId = group as String? + artifactId = description + from(components["java"]) + } + publications.create("maven") { + groupId = group as String? + artifactId = description + from(components["java"]) + } + repositories { mavenLocal() } +} + +tasks.withType() { + options.encoding = "UTF-8" +} + +tasks { + register("runTests", JavaExec::class.java) { + mainClass.set("TestsFromDafny") + classpath = sourceSets["test"].runtimeClasspath + } +} diff --git a/TestModels/aws-sdks/s3/runtimes/java/src/main/java/software/amazon/cryptography/services/s3/internaldafny/__default.java b/TestModels/aws-sdks/s3/runtimes/java/src/main/java/software/amazon/cryptography/services/s3/internaldafny/__default.java new file mode 100644 index 0000000000..3db9f32425 --- /dev/null +++ b/TestModels/aws-sdks/s3/runtimes/java/src/main/java/software/amazon/cryptography/services/s3/internaldafny/__default.java @@ -0,0 +1,37 @@ +// Copyright Amazon.com Inc. or its affiliates. All Rights Reserved. +// SPDX-License-Identifier: Apache-2.0 +package software.amazon.cryptography.services.s3.internaldafny; + +import static software.amazon.smithy.dafny.conversion.ToDafny.Simple.CharacterSequence; +import static software.amazon.smithy.dafny.conversion.ToNative.Simple.String; + +import StandardLibraryInterop_Compile.WrappersInterop; +import Wrappers_Compile.Option; +import Wrappers_Compile.Result; +import dafny.DafnySequence; +import software.amazon.awssdk.auth.credentials.ProfileCredentialsProvider; +import software.amazon.awssdk.regions.Region; +import software.amazon.awssdk.regions.providers.DefaultAwsRegionProviderChain; +import software.amazon.awssdk.services.s3.S3Client; +import software.amazon.cryptography.services.s3.internaldafny.types.Error; +import software.amazon.cryptography.services.s3.internaldafny.types.IS3Client; + +public class __default + extends software.amazon.cryptography.services.s3.internaldafny._ExternBase___default { + + public static Result S3Client() { + try { + Region region = new DefaultAwsRegionProviderChain().getRegion(); + final S3Client nativeClient = S3Client + .builder() + .region(region) + .build(); + + IS3Client shim = new Shim(nativeClient, region.toString()); + return CreateSuccessOfClient(shim); + } catch (Exception e) { + Error dafny_error = Error.create_Opaque(e); + return CreateFailureOfError(dafny_error); + } + } +} diff --git a/TestModels/aws-sdks/s3/runtimes/java/src/main/java/software/amazon/cryptography/services/s3/internaldafny/types/__default.java b/TestModels/aws-sdks/s3/runtimes/java/src/main/java/software/amazon/cryptography/services/s3/internaldafny/types/__default.java new file mode 100644 index 0000000000..0ce0f3e725 --- /dev/null +++ b/TestModels/aws-sdks/s3/runtimes/java/src/main/java/software/amazon/cryptography/services/s3/internaldafny/types/__default.java @@ -0,0 +1,5 @@ +// Copyright Amazon.com Inc. or its affiliates. All Rights Reserved. +// SPDX-License-Identifier: Apache-2.0 +package software.amazon.cryptography.services.s3.internaldafny.types; + +public class __default extends software.amazon.cryptography.services.s3.internaldafny.types._ExternBase___default {} diff --git a/TestModels/aws-sdks/s3/runtimes/java/src/main/smithy-generated/software/amazon/cryptography/services/s3/internaldafny/Shim.java b/TestModels/aws-sdks/s3/runtimes/java/src/main/smithy-generated/software/amazon/cryptography/services/s3/internaldafny/Shim.java new file mode 100644 index 0000000000..bc7fb494d0 --- /dev/null +++ b/TestModels/aws-sdks/s3/runtimes/java/src/main/smithy-generated/software/amazon/cryptography/services/s3/internaldafny/Shim.java @@ -0,0 +1,175 @@ +// Copyright Amazon.com Inc. or its affiliates. All Rights Reserved. +// SPDX-License-Identifier: Apache-2.0 +// Do not modify this file. This file is machine generated, and any changes to it will be overwritten. +package software.amazon.cryptography.services.s3.internaldafny; + +import Streams.DataStreamAsRequestBody; +import Wrappers_Compile.Result; +import java.lang.Override; +import java.lang.String; + +import software.amazon.awssdk.core.ResponseInputStream; +import software.amazon.awssdk.core.sync.RequestBody; +import software.amazon.awssdk.services.s3.S3Client; +import software.amazon.awssdk.services.s3.model.DeleteObjectResponse; +import software.amazon.awssdk.services.s3.model.DeleteObjectsResponse; +import software.amazon.awssdk.services.s3.model.GetObjectResponse; +import software.amazon.awssdk.services.s3.model.InvalidObjectStateException; +import software.amazon.awssdk.services.s3.model.NoSuchKeyException; +import software.amazon.awssdk.services.s3.model.PutObjectResponse; +import software.amazon.awssdk.services.s3.model.S3Exception; +import software.amazon.cryptography.services.s3.internaldafny.types.DeleteObjectOutput; +import software.amazon.cryptography.services.s3.internaldafny.types.DeleteObjectRequest; +import software.amazon.cryptography.services.s3.internaldafny.types.DeleteObjectsOutput; +import software.amazon.cryptography.services.s3.internaldafny.types.DeleteObjectsRequest; +import software.amazon.cryptography.services.s3.internaldafny.types.Error; +import software.amazon.cryptography.services.s3.internaldafny.types.GetObjectOutput; +import software.amazon.cryptography.services.s3.internaldafny.types.GetObjectRequest; +import software.amazon.cryptography.services.s3.internaldafny.types.IS3Client; +import software.amazon.cryptography.services.s3.internaldafny.types.PutObjectOutput; +import software.amazon.cryptography.services.s3.internaldafny.types.PutObjectRequest; + +public class Shim implements IS3Client { + + private final S3Client _impl; + + private final String region; + + public Shim(final S3Client impl, final String region) { + this._impl = impl; + this.region = region; + } + + public S3Client impl() { + return this._impl; + } + + public String region() { + return this.region; + } + + @Override + public Result DeleteObject( + DeleteObjectRequest input + ) { + software.amazon.awssdk.services.s3.model.DeleteObjectRequest converted = + ToNative.DeleteObjectRequest(input); + try { + DeleteObjectResponse result = _impl.deleteObject(converted); + DeleteObjectOutput dafnyResponse = ToDafny.DeleteObjectOutput(result); + return Result.create_Success( + DeleteObjectOutput._typeDescriptor(), + Error._typeDescriptor(), + dafnyResponse + ); + } catch (S3Exception ex) { + return Result.create_Failure( + DeleteObjectOutput._typeDescriptor(), + Error._typeDescriptor(), + ToDafny.Error(ex) + ); + } catch (Exception ex) { + return Result.create_Failure( + DeleteObjectOutput._typeDescriptor(), + Error._typeDescriptor(), + ToDafny.Error(ex) + ); + } + } + + @Override + public Result DeleteObjects( + DeleteObjectsRequest input + ) { + software.amazon.awssdk.services.s3.model.DeleteObjectsRequest converted = + ToNative.DeleteObjectsRequest(input); + try { + DeleteObjectsResponse result = _impl.deleteObjects(converted); + DeleteObjectsOutput dafnyResponse = ToDafny.DeleteObjectsOutput(result); + return Result.create_Success( + DeleteObjectsOutput._typeDescriptor(), + Error._typeDescriptor(), + dafnyResponse + ); + } catch (S3Exception ex) { + return Result.create_Failure( + DeleteObjectsOutput._typeDescriptor(), + Error._typeDescriptor(), + ToDafny.Error(ex) + ); + } catch (Exception ex) { + return Result.create_Failure( + DeleteObjectsOutput._typeDescriptor(), + Error._typeDescriptor(), + ToDafny.Error(ex) + ); + } + } + + @Override + public Result GetObject(GetObjectRequest input) { + software.amazon.awssdk.services.s3.model.GetObjectRequest converted = + ToNative.GetObjectRequest(input); + try { + ResponseInputStream result = _impl.getObject(converted); + GetObjectOutput dafnyResponse = ToDafny.GetObjectOutput(result); + return Result.create_Success( + GetObjectOutput._typeDescriptor(), + Error._typeDescriptor(), + dafnyResponse + ); + } catch (InvalidObjectStateException ex) { + return Result.create_Failure( + GetObjectOutput._typeDescriptor(), + Error._typeDescriptor(), + ToDafny.Error(ex) + ); + } catch (NoSuchKeyException ex) { + return Result.create_Failure( + GetObjectOutput._typeDescriptor(), + Error._typeDescriptor(), + ToDafny.Error(ex) + ); + } catch (S3Exception ex) { + return Result.create_Failure( + GetObjectOutput._typeDescriptor(), + Error._typeDescriptor(), + ToDafny.Error(ex) + ); + } catch (Exception ex) { + return Result.create_Failure( + GetObjectOutput._typeDescriptor(), + Error._typeDescriptor(), + ToDafny.Error(ex) + ); + } + } + + @Override + public Result PutObject(PutObjectRequest input) { + software.amazon.awssdk.services.s3.model.PutObjectRequest converted = + ToNative.PutObjectRequest(input); + RequestBody body = DataStreamAsRequestBody.of(input._Body.dtor_value()); + try { + PutObjectResponse result = _impl.putObject(converted, body); + PutObjectOutput dafnyResponse = ToDafny.PutObjectOutput(result); + return Result.create_Success( + PutObjectOutput._typeDescriptor(), + Error._typeDescriptor(), + dafnyResponse + ); + } catch (S3Exception ex) { + return Result.create_Failure( + PutObjectOutput._typeDescriptor(), + Error._typeDescriptor(), + ToDafny.Error(ex) + ); + } catch (Exception ex) { + return Result.create_Failure( + PutObjectOutput._typeDescriptor(), + Error._typeDescriptor(), + ToDafny.Error(ex) + ); + } + } +} diff --git a/TestModels/aws-sdks/s3/runtimes/java/src/main/smithy-generated/software/amazon/cryptography/services/s3/internaldafny/ToDafny.java b/TestModels/aws-sdks/s3/runtimes/java/src/main/smithy-generated/software/amazon/cryptography/services/s3/internaldafny/ToDafny.java new file mode 100644 index 0000000000..3746cdacc9 --- /dev/null +++ b/TestModels/aws-sdks/s3/runtimes/java/src/main/smithy-generated/software/amazon/cryptography/services/s3/internaldafny/ToDafny.java @@ -0,0 +1,2257 @@ +// Copyright Amazon.com Inc. or its affiliates. All Rights Reserved. +// SPDX-License-Identifier: Apache-2.0 +// Do not modify this file. This file is machine generated, and any changes to it will be overwritten. +package software.amazon.cryptography.services.s3.internaldafny; + +import StandardLibrary_Compile.Streams_Compile.DataStream; +import Streams.InputStreamAsDataStream; +import Streams.RequestBodyAsDataStream; +import Wrappers_Compile.Option; +import dafny.DafnyMap; +import dafny.DafnySequence; +import dafny.TypeDescriptor; +import java.lang.Boolean; +import java.lang.Byte; +import java.lang.Character; +import java.lang.Exception; +import java.lang.Integer; +import java.lang.Long; +import java.lang.RuntimeException; +import java.lang.String; +import java.util.List; +import java.util.Map; +import java.util.Objects; + +import software.amazon.awssdk.core.ResponseInputStream; +import software.amazon.awssdk.core.sync.RequestBody; +import software.amazon.awssdk.services.s3.S3Client; +import software.amazon.awssdk.services.s3.model.DeleteObjectResponse; +import software.amazon.awssdk.services.s3.model.DeleteObjectsResponse; +import software.amazon.awssdk.services.s3.model.GetObjectResponse; +import software.amazon.awssdk.services.s3.model.InvalidObjectStateException; +import software.amazon.awssdk.services.s3.model.NoSuchBucketException; +import software.amazon.awssdk.services.s3.model.NoSuchKeyException; +import software.amazon.awssdk.services.s3.model.PutObjectResponse; +import software.amazon.awssdk.services.s3.model.S3Exception; +import software.amazon.cryptography.services.s3.internaldafny.types.ChecksumAlgorithm; +import software.amazon.cryptography.services.s3.internaldafny.types.ChecksumMode; +import software.amazon.cryptography.services.s3.internaldafny.types.Delete; +import software.amazon.cryptography.services.s3.internaldafny.types.DeleteObjectOutput; +import software.amazon.cryptography.services.s3.internaldafny.types.DeleteObjectRequest; +import software.amazon.cryptography.services.s3.internaldafny.types.DeleteObjectsOutput; +import software.amazon.cryptography.services.s3.internaldafny.types.DeleteObjectsRequest; +import software.amazon.cryptography.services.s3.internaldafny.types.DeletedObject; +import software.amazon.cryptography.services.s3.internaldafny.types.Error; +import software.amazon.cryptography.services.s3.internaldafny.types.ErrorShape; +import software.amazon.cryptography.services.s3.internaldafny.types.Error_InvalidObjectState; +import software.amazon.cryptography.services.s3.internaldafny.types.Error_NoSuchBucket; +import software.amazon.cryptography.services.s3.internaldafny.types.Error_NoSuchKey; +import software.amazon.cryptography.services.s3.internaldafny.types.GetObjectOutput; +import software.amazon.cryptography.services.s3.internaldafny.types.GetObjectRequest; +import software.amazon.cryptography.services.s3.internaldafny.types.IS3Client; +import software.amazon.cryptography.services.s3.internaldafny.types.IntelligentTieringAccessTier; +import software.amazon.cryptography.services.s3.internaldafny.types.ObjectCannedACL; +import software.amazon.cryptography.services.s3.internaldafny.types.ObjectIdentifier; +import software.amazon.cryptography.services.s3.internaldafny.types.ObjectLockLegalHoldStatus; +import software.amazon.cryptography.services.s3.internaldafny.types.ObjectLockMode; +import software.amazon.cryptography.services.s3.internaldafny.types.PutObjectOutput; +import software.amazon.cryptography.services.s3.internaldafny.types.PutObjectRequest; +import software.amazon.cryptography.services.s3.internaldafny.types.ReplicationStatus; +import software.amazon.cryptography.services.s3.internaldafny.types.RequestCharged; +import software.amazon.cryptography.services.s3.internaldafny.types.RequestPayer; +import software.amazon.cryptography.services.s3.internaldafny.types.ServerSideEncryption; +import software.amazon.cryptography.services.s3.internaldafny.types.StorageClass; + +public class ToDafny { + + public static Delete Delete( + software.amazon.awssdk.services.s3.model.Delete nativeValue + ) { + DafnySequence objects; + objects = ToDafny.ObjectIdentifierList(nativeValue.objects()); + Option quiet; + quiet = + Objects.nonNull(nativeValue.quiet()) + ? Option.create_Some(TypeDescriptor.BOOLEAN, (nativeValue.quiet())) + : Option.create_None(TypeDescriptor.BOOLEAN); + return new Delete(objects, quiet); + } + + public static DeletedObject DeletedObject( + software.amazon.awssdk.services.s3.model.DeletedObject nativeValue + ) { + Option> key; + key = + Objects.nonNull(nativeValue.key()) + ? Option.create_Some( + DafnySequence._typeDescriptor(TypeDescriptor.CHAR), + software.amazon.smithy.dafny.conversion.ToDafny.Simple.CharacterSequence( + nativeValue.key() + ) + ) + : Option.create_None( + DafnySequence._typeDescriptor(TypeDescriptor.CHAR) + ); + Option> versionId; + versionId = + Objects.nonNull(nativeValue.versionId()) + ? Option.create_Some( + DafnySequence._typeDescriptor(TypeDescriptor.CHAR), + software.amazon.smithy.dafny.conversion.ToDafny.Simple.CharacterSequence( + nativeValue.versionId() + ) + ) + : Option.create_None( + DafnySequence._typeDescriptor(TypeDescriptor.CHAR) + ); + Option deleteMarker; + deleteMarker = + Objects.nonNull(nativeValue.deleteMarker()) + ? Option.create_Some( + TypeDescriptor.BOOLEAN, + (nativeValue.deleteMarker()) + ) + : Option.create_None(TypeDescriptor.BOOLEAN); + Option> deleteMarkerVersionId; + deleteMarkerVersionId = + Objects.nonNull(nativeValue.deleteMarkerVersionId()) + ? Option.create_Some( + DafnySequence._typeDescriptor(TypeDescriptor.CHAR), + software.amazon.smithy.dafny.conversion.ToDafny.Simple.CharacterSequence( + nativeValue.deleteMarkerVersionId() + ) + ) + : Option.create_None( + DafnySequence._typeDescriptor(TypeDescriptor.CHAR) + ); + return new DeletedObject( + key, + versionId, + deleteMarker, + deleteMarkerVersionId + ); + } + + public static DafnySequence DeletedObjects( + List nativeValue + ) { + return software.amazon.smithy.dafny.conversion.ToDafny.Aggregate.GenericToSequence( + nativeValue, + software.amazon.cryptography.services.s3.internaldafny.ToDafny::DeletedObject, + DeletedObject._typeDescriptor() + ); + } + + public static DeleteObjectOutput DeleteObjectOutput( + DeleteObjectResponse nativeValue + ) { + Option deleteMarker; + deleteMarker = + Objects.nonNull(nativeValue.deleteMarker()) + ? Option.create_Some( + TypeDescriptor.BOOLEAN, + (nativeValue.deleteMarker()) + ) + : Option.create_None(TypeDescriptor.BOOLEAN); + Option> versionId; + versionId = + Objects.nonNull(nativeValue.versionId()) + ? Option.create_Some( + DafnySequence._typeDescriptor(TypeDescriptor.CHAR), + software.amazon.smithy.dafny.conversion.ToDafny.Simple.CharacterSequence( + nativeValue.versionId() + ) + ) + : Option.create_None( + DafnySequence._typeDescriptor(TypeDescriptor.CHAR) + ); + Option requestCharged; + requestCharged = + Objects.nonNull(nativeValue.requestCharged()) + ? Option.create_Some( + RequestCharged._typeDescriptor(), + ToDafny.RequestCharged(nativeValue.requestCharged()) + ) + : Option.create_None(RequestCharged._typeDescriptor()); + return new DeleteObjectOutput(deleteMarker, versionId, requestCharged); + } + + public static DeleteObjectRequest DeleteObjectRequest( + software.amazon.awssdk.services.s3.model.DeleteObjectRequest nativeValue + ) { + DafnySequence bucket; + bucket = + software.amazon.smithy.dafny.conversion.ToDafny.Simple.CharacterSequence( + nativeValue.bucket() + ); + DafnySequence key; + key = + software.amazon.smithy.dafny.conversion.ToDafny.Simple.CharacterSequence( + nativeValue.key() + ); + Option> mFA; + mFA = + Objects.nonNull(nativeValue.mfa()) + ? Option.create_Some( + DafnySequence._typeDescriptor(TypeDescriptor.CHAR), + software.amazon.smithy.dafny.conversion.ToDafny.Simple.CharacterSequence( + nativeValue.mfa() + ) + ) + : Option.create_None( + DafnySequence._typeDescriptor(TypeDescriptor.CHAR) + ); + Option> versionId; + versionId = + Objects.nonNull(nativeValue.versionId()) + ? Option.create_Some( + DafnySequence._typeDescriptor(TypeDescriptor.CHAR), + software.amazon.smithy.dafny.conversion.ToDafny.Simple.CharacterSequence( + nativeValue.versionId() + ) + ) + : Option.create_None( + DafnySequence._typeDescriptor(TypeDescriptor.CHAR) + ); + Option requestPayer; + requestPayer = + Objects.nonNull(nativeValue.requestPayer()) + ? Option.create_Some( + RequestPayer._typeDescriptor(), + ToDafny.RequestPayer(nativeValue.requestPayer()) + ) + : Option.create_None(RequestPayer._typeDescriptor()); + Option bypassGovernanceRetention; + bypassGovernanceRetention = + Objects.nonNull(nativeValue.bypassGovernanceRetention()) + ? Option.create_Some( + TypeDescriptor.BOOLEAN, + (nativeValue.bypassGovernanceRetention()) + ) + : Option.create_None(TypeDescriptor.BOOLEAN); + Option> expectedBucketOwner; + expectedBucketOwner = + Objects.nonNull(nativeValue.expectedBucketOwner()) + ? Option.create_Some( + DafnySequence._typeDescriptor(TypeDescriptor.CHAR), + software.amazon.smithy.dafny.conversion.ToDafny.Simple.CharacterSequence( + nativeValue.expectedBucketOwner() + ) + ) + : Option.create_None( + DafnySequence._typeDescriptor(TypeDescriptor.CHAR) + ); + return new DeleteObjectRequest( + bucket, + key, + mFA, + versionId, + requestPayer, + bypassGovernanceRetention, + expectedBucketOwner + ); + } + + public static DeleteObjectsOutput DeleteObjectsOutput( + DeleteObjectsResponse nativeValue + ) { + Option> deleted; + deleted = + (Objects.nonNull(nativeValue.deleted()) && + nativeValue.deleted().size() > 0) + ? Option.create_Some( + DafnySequence._typeDescriptor(DeletedObject._typeDescriptor()), + ToDafny.DeletedObjects(nativeValue.deleted()) + ) + : Option.create_None( + DafnySequence._typeDescriptor(DeletedObject._typeDescriptor()) + ); + Option requestCharged; + requestCharged = + Objects.nonNull(nativeValue.requestCharged()) + ? Option.create_Some( + RequestCharged._typeDescriptor(), + ToDafny.RequestCharged(nativeValue.requestCharged()) + ) + : Option.create_None(RequestCharged._typeDescriptor()); + Option> errors; + errors = + (Objects.nonNull(nativeValue.errors()) && nativeValue.errors().size() > 0) + ? Option.create_Some( + DafnySequence._typeDescriptor(ErrorShape._typeDescriptor()), + ToDafny.Errors(nativeValue.errors()) + ) + : Option.create_None( + DafnySequence._typeDescriptor(ErrorShape._typeDescriptor()) + ); + return new DeleteObjectsOutput(deleted, requestCharged, errors); + } + + public static DeleteObjectsRequest DeleteObjectsRequest( + software.amazon.awssdk.services.s3.model.DeleteObjectsRequest nativeValue + ) { + DafnySequence bucket; + bucket = + software.amazon.smithy.dafny.conversion.ToDafny.Simple.CharacterSequence( + nativeValue.bucket() + ); + Delete delete; + delete = ToDafny.Delete(nativeValue.delete()); + Option> mFA; + mFA = + Objects.nonNull(nativeValue.mfa()) + ? Option.create_Some( + DafnySequence._typeDescriptor(TypeDescriptor.CHAR), + software.amazon.smithy.dafny.conversion.ToDafny.Simple.CharacterSequence( + nativeValue.mfa() + ) + ) + : Option.create_None( + DafnySequence._typeDescriptor(TypeDescriptor.CHAR) + ); + Option requestPayer; + requestPayer = + Objects.nonNull(nativeValue.requestPayer()) + ? Option.create_Some( + RequestPayer._typeDescriptor(), + ToDafny.RequestPayer(nativeValue.requestPayer()) + ) + : Option.create_None(RequestPayer._typeDescriptor()); + Option bypassGovernanceRetention; + bypassGovernanceRetention = + Objects.nonNull(nativeValue.bypassGovernanceRetention()) + ? Option.create_Some( + TypeDescriptor.BOOLEAN, + (nativeValue.bypassGovernanceRetention()) + ) + : Option.create_None(TypeDescriptor.BOOLEAN); + Option> expectedBucketOwner; + expectedBucketOwner = + Objects.nonNull(nativeValue.expectedBucketOwner()) + ? Option.create_Some( + DafnySequence._typeDescriptor(TypeDescriptor.CHAR), + software.amazon.smithy.dafny.conversion.ToDafny.Simple.CharacterSequence( + nativeValue.expectedBucketOwner() + ) + ) + : Option.create_None( + DafnySequence._typeDescriptor(TypeDescriptor.CHAR) + ); + Option checksumAlgorithm; + checksumAlgorithm = + Objects.nonNull(nativeValue.checksumAlgorithm()) + ? Option.create_Some( + ChecksumAlgorithm._typeDescriptor(), + ToDafny.ChecksumAlgorithm(nativeValue.checksumAlgorithm()) + ) + : Option.create_None(ChecksumAlgorithm._typeDescriptor()); + return new DeleteObjectsRequest( + bucket, + delete, + mFA, + requestPayer, + bypassGovernanceRetention, + expectedBucketOwner, + checksumAlgorithm + ); + } + + public static DafnySequence Errors( + List nativeValue + ) { + return software.amazon.smithy.dafny.conversion.ToDafny.Aggregate.GenericToSequence( + nativeValue, + software.amazon.cryptography.services.s3.internaldafny.ToDafny::ErrorShape, + ErrorShape._typeDescriptor() + ); + } + + public static ErrorShape ErrorShape( + software.amazon.awssdk.services.s3.model.S3Error nativeValue + ) { + Option> key; + key = + Objects.nonNull(nativeValue.key()) + ? Option.create_Some( + DafnySequence._typeDescriptor(TypeDescriptor.CHAR), + software.amazon.smithy.dafny.conversion.ToDafny.Simple.CharacterSequence( + nativeValue.key() + ) + ) + : Option.create_None( + DafnySequence._typeDescriptor(TypeDescriptor.CHAR) + ); + Option> versionId; + versionId = + Objects.nonNull(nativeValue.versionId()) + ? Option.create_Some( + DafnySequence._typeDescriptor(TypeDescriptor.CHAR), + software.amazon.smithy.dafny.conversion.ToDafny.Simple.CharacterSequence( + nativeValue.versionId() + ) + ) + : Option.create_None( + DafnySequence._typeDescriptor(TypeDescriptor.CHAR) + ); + Option> code; + code = + Objects.nonNull(nativeValue.code()) + ? Option.create_Some( + DafnySequence._typeDescriptor(TypeDescriptor.CHAR), + software.amazon.smithy.dafny.conversion.ToDafny.Simple.CharacterSequence( + nativeValue.code() + ) + ) + : Option.create_None( + DafnySequence._typeDescriptor(TypeDescriptor.CHAR) + ); + Option> message; + message = + Objects.nonNull(nativeValue.message()) + ? Option.create_Some( + DafnySequence._typeDescriptor(TypeDescriptor.CHAR), + software.amazon.smithy.dafny.conversion.ToDafny.Simple.CharacterSequence( + nativeValue.message() + ) + ) + : Option.create_None( + DafnySequence._typeDescriptor(TypeDescriptor.CHAR) + ); + return new ErrorShape(key, versionId, code, message); + } + + public static GetObjectOutput GetObjectOutput(ResponseInputStream responseInputStream) { + GetObjectResponse nativeValue = responseInputStream.response(); + Option> body; + body = + Option.create_Some( + null, + new InputStreamAsDataStream(Error._typeDescriptor(), responseInputStream, Error::create_Opaque) + ); + Option deleteMarker; + deleteMarker = + Objects.nonNull(nativeValue.deleteMarker()) + ? Option.create_Some( + TypeDescriptor.BOOLEAN, + (nativeValue.deleteMarker()) + ) + : Option.create_None(TypeDescriptor.BOOLEAN); + Option> acceptRanges; + acceptRanges = + Objects.nonNull(nativeValue.acceptRanges()) + ? Option.create_Some( + DafnySequence._typeDescriptor(TypeDescriptor.CHAR), + software.amazon.smithy.dafny.conversion.ToDafny.Simple.CharacterSequence( + nativeValue.acceptRanges() + ) + ) + : Option.create_None( + DafnySequence._typeDescriptor(TypeDescriptor.CHAR) + ); + Option> expiration; + expiration = + Objects.nonNull(nativeValue.expiration()) + ? Option.create_Some( + DafnySequence._typeDescriptor(TypeDescriptor.CHAR), + software.amazon.smithy.dafny.conversion.ToDafny.Simple.CharacterSequence( + nativeValue.expiration() + ) + ) + : Option.create_None( + DafnySequence._typeDescriptor(TypeDescriptor.CHAR) + ); + Option> restore; + restore = + Objects.nonNull(nativeValue.restore()) + ? Option.create_Some( + DafnySequence._typeDescriptor(TypeDescriptor.CHAR), + software.amazon.smithy.dafny.conversion.ToDafny.Simple.CharacterSequence( + nativeValue.restore() + ) + ) + : Option.create_None( + DafnySequence._typeDescriptor(TypeDescriptor.CHAR) + ); + Option> lastModified; + lastModified = + Objects.nonNull(nativeValue.lastModified()) + ? Option.create_Some( + DafnySequence._typeDescriptor(TypeDescriptor.CHAR), + software.amazon.smithy.dafny.conversion.ToDafny.Simple.CharacterSequence( + nativeValue.lastModified() + ) + ) + : Option.create_None( + DafnySequence._typeDescriptor(TypeDescriptor.CHAR) + ); + Option contentLength; + contentLength = + Objects.nonNull(nativeValue.contentLength()) + ? Option.create_Some(TypeDescriptor.LONG, (nativeValue.contentLength())) + : Option.create_None(TypeDescriptor.LONG); + Option> eTag; + eTag = + Objects.nonNull(nativeValue.eTag()) + ? Option.create_Some( + DafnySequence._typeDescriptor(TypeDescriptor.CHAR), + software.amazon.smithy.dafny.conversion.ToDafny.Simple.CharacterSequence( + nativeValue.eTag() + ) + ) + : Option.create_None( + DafnySequence._typeDescriptor(TypeDescriptor.CHAR) + ); + Option> checksumCRC32; + checksumCRC32 = + Objects.nonNull(nativeValue.checksumCRC32()) + ? Option.create_Some( + DafnySequence._typeDescriptor(TypeDescriptor.CHAR), + software.amazon.smithy.dafny.conversion.ToDafny.Simple.CharacterSequence( + nativeValue.checksumCRC32() + ) + ) + : Option.create_None( + DafnySequence._typeDescriptor(TypeDescriptor.CHAR) + ); + Option> checksumCRC32C; + checksumCRC32C = + Objects.nonNull(nativeValue.checksumCRC32C()) + ? Option.create_Some( + DafnySequence._typeDescriptor(TypeDescriptor.CHAR), + software.amazon.smithy.dafny.conversion.ToDafny.Simple.CharacterSequence( + nativeValue.checksumCRC32C() + ) + ) + : Option.create_None( + DafnySequence._typeDescriptor(TypeDescriptor.CHAR) + ); + Option> checksumSHA1; + checksumSHA1 = + Objects.nonNull(nativeValue.checksumSHA1()) + ? Option.create_Some( + DafnySequence._typeDescriptor(TypeDescriptor.CHAR), + software.amazon.smithy.dafny.conversion.ToDafny.Simple.CharacterSequence( + nativeValue.checksumSHA1() + ) + ) + : Option.create_None( + DafnySequence._typeDescriptor(TypeDescriptor.CHAR) + ); + Option> checksumSHA256; + checksumSHA256 = + Objects.nonNull(nativeValue.checksumSHA256()) + ? Option.create_Some( + DafnySequence._typeDescriptor(TypeDescriptor.CHAR), + software.amazon.smithy.dafny.conversion.ToDafny.Simple.CharacterSequence( + nativeValue.checksumSHA256() + ) + ) + : Option.create_None( + DafnySequence._typeDescriptor(TypeDescriptor.CHAR) + ); + Option missingMeta; + missingMeta = + Objects.nonNull(nativeValue.missingMeta()) + ? Option.create_Some(TypeDescriptor.INT, (nativeValue.missingMeta())) + : Option.create_None(TypeDescriptor.INT); + Option> versionId; + versionId = + Objects.nonNull(nativeValue.versionId()) + ? Option.create_Some( + DafnySequence._typeDescriptor(TypeDescriptor.CHAR), + software.amazon.smithy.dafny.conversion.ToDafny.Simple.CharacterSequence( + nativeValue.versionId() + ) + ) + : Option.create_None( + DafnySequence._typeDescriptor(TypeDescriptor.CHAR) + ); + Option> cacheControl; + cacheControl = + Objects.nonNull(nativeValue.cacheControl()) + ? Option.create_Some( + DafnySequence._typeDescriptor(TypeDescriptor.CHAR), + software.amazon.smithy.dafny.conversion.ToDafny.Simple.CharacterSequence( + nativeValue.cacheControl() + ) + ) + : Option.create_None( + DafnySequence._typeDescriptor(TypeDescriptor.CHAR) + ); + Option> contentDisposition; + contentDisposition = + Objects.nonNull(nativeValue.contentDisposition()) + ? Option.create_Some( + DafnySequence._typeDescriptor(TypeDescriptor.CHAR), + software.amazon.smithy.dafny.conversion.ToDafny.Simple.CharacterSequence( + nativeValue.contentDisposition() + ) + ) + : Option.create_None( + DafnySequence._typeDescriptor(TypeDescriptor.CHAR) + ); + Option> contentEncoding; + contentEncoding = + Objects.nonNull(nativeValue.contentEncoding()) + ? Option.create_Some( + DafnySequence._typeDescriptor(TypeDescriptor.CHAR), + software.amazon.smithy.dafny.conversion.ToDafny.Simple.CharacterSequence( + nativeValue.contentEncoding() + ) + ) + : Option.create_None( + DafnySequence._typeDescriptor(TypeDescriptor.CHAR) + ); + Option> contentLanguage; + contentLanguage = + Objects.nonNull(nativeValue.contentLanguage()) + ? Option.create_Some( + DafnySequence._typeDescriptor(TypeDescriptor.CHAR), + software.amazon.smithy.dafny.conversion.ToDafny.Simple.CharacterSequence( + nativeValue.contentLanguage() + ) + ) + : Option.create_None( + DafnySequence._typeDescriptor(TypeDescriptor.CHAR) + ); + Option> contentRange; + contentRange = + Objects.nonNull(nativeValue.contentRange()) + ? Option.create_Some( + DafnySequence._typeDescriptor(TypeDescriptor.CHAR), + software.amazon.smithy.dafny.conversion.ToDafny.Simple.CharacterSequence( + nativeValue.contentRange() + ) + ) + : Option.create_None( + DafnySequence._typeDescriptor(TypeDescriptor.CHAR) + ); + Option> contentType; + contentType = + Objects.nonNull(nativeValue.contentType()) + ? Option.create_Some( + DafnySequence._typeDescriptor(TypeDescriptor.CHAR), + software.amazon.smithy.dafny.conversion.ToDafny.Simple.CharacterSequence( + nativeValue.contentType() + ) + ) + : Option.create_None( + DafnySequence._typeDescriptor(TypeDescriptor.CHAR) + ); + Option> expires; + expires = + Objects.nonNull(nativeValue.expires()) + ? Option.create_Some( + DafnySequence._typeDescriptor(TypeDescriptor.CHAR), + software.amazon.smithy.dafny.conversion.ToDafny.Simple.CharacterSequence( + nativeValue.expires() + ) + ) + : Option.create_None( + DafnySequence._typeDescriptor(TypeDescriptor.CHAR) + ); + Option> websiteRedirectLocation; + websiteRedirectLocation = + Objects.nonNull(nativeValue.websiteRedirectLocation()) + ? Option.create_Some( + DafnySequence._typeDescriptor(TypeDescriptor.CHAR), + software.amazon.smithy.dafny.conversion.ToDafny.Simple.CharacterSequence( + nativeValue.websiteRedirectLocation() + ) + ) + : Option.create_None( + DafnySequence._typeDescriptor(TypeDescriptor.CHAR) + ); + Option serverSideEncryption; + serverSideEncryption = + Objects.nonNull(nativeValue.serverSideEncryption()) + ? Option.create_Some( + ServerSideEncryption._typeDescriptor(), + ToDafny.ServerSideEncryption(nativeValue.serverSideEncryption()) + ) + : Option.create_None(ServerSideEncryption._typeDescriptor()); + Option< + DafnyMap< + ? extends DafnySequence, + ? extends DafnySequence + > + > metadata; + metadata = + (Objects.nonNull(nativeValue.metadata()) && + nativeValue.metadata().size() > 0) + ? Option.create_Some( + DafnyMap._typeDescriptor( + DafnySequence._typeDescriptor(TypeDescriptor.CHAR), + DafnySequence._typeDescriptor(TypeDescriptor.CHAR) + ), + ToDafny.Metadata(nativeValue.metadata()) + ) + : Option.create_None( + DafnyMap._typeDescriptor( + DafnySequence._typeDescriptor(TypeDescriptor.CHAR), + DafnySequence._typeDescriptor(TypeDescriptor.CHAR) + ) + ); + Option> sSECustomerAlgorithm; + sSECustomerAlgorithm = + Objects.nonNull(nativeValue.sseCustomerAlgorithm()) + ? Option.create_Some( + DafnySequence._typeDescriptor(TypeDescriptor.CHAR), + software.amazon.smithy.dafny.conversion.ToDafny.Simple.CharacterSequence( + nativeValue.sseCustomerAlgorithm() + ) + ) + : Option.create_None( + DafnySequence._typeDescriptor(TypeDescriptor.CHAR) + ); + Option> sSECustomerKeyMD5; + sSECustomerKeyMD5 = + Objects.nonNull(nativeValue.sseCustomerKeyMD5()) + ? Option.create_Some( + DafnySequence._typeDescriptor(TypeDescriptor.CHAR), + software.amazon.smithy.dafny.conversion.ToDafny.Simple.CharacterSequence( + nativeValue.sseCustomerKeyMD5() + ) + ) + : Option.create_None( + DafnySequence._typeDescriptor(TypeDescriptor.CHAR) + ); + Option> sSEKMSKeyId; + sSEKMSKeyId = + Objects.nonNull(nativeValue.ssekmsKeyId()) + ? Option.create_Some( + DafnySequence._typeDescriptor(TypeDescriptor.CHAR), + software.amazon.smithy.dafny.conversion.ToDafny.Simple.CharacterSequence( + nativeValue.ssekmsKeyId() + ) + ) + : Option.create_None( + DafnySequence._typeDescriptor(TypeDescriptor.CHAR) + ); + Option bucketKeyEnabled; + bucketKeyEnabled = + Objects.nonNull(nativeValue.bucketKeyEnabled()) + ? Option.create_Some( + TypeDescriptor.BOOLEAN, + (nativeValue.bucketKeyEnabled()) + ) + : Option.create_None(TypeDescriptor.BOOLEAN); + Option storageClass; + storageClass = + Objects.nonNull(nativeValue.storageClass()) + ? Option.create_Some( + StorageClass._typeDescriptor(), + ToDafny.StorageClass(nativeValue.storageClass()) + ) + : Option.create_None(StorageClass._typeDescriptor()); + Option requestCharged; + requestCharged = + Objects.nonNull(nativeValue.requestCharged()) + ? Option.create_Some( + RequestCharged._typeDescriptor(), + ToDafny.RequestCharged(nativeValue.requestCharged()) + ) + : Option.create_None(RequestCharged._typeDescriptor()); + Option replicationStatus; + replicationStatus = + Objects.nonNull(nativeValue.replicationStatus()) + ? Option.create_Some( + ReplicationStatus._typeDescriptor(), + ToDafny.ReplicationStatus(nativeValue.replicationStatus()) + ) + : Option.create_None(ReplicationStatus._typeDescriptor()); + Option partsCount; + partsCount = + Objects.nonNull(nativeValue.partsCount()) + ? Option.create_Some(TypeDescriptor.INT, (nativeValue.partsCount())) + : Option.create_None(TypeDescriptor.INT); + Option tagCount; + tagCount = + Objects.nonNull(nativeValue.tagCount()) + ? Option.create_Some(TypeDescriptor.INT, (nativeValue.tagCount())) + : Option.create_None(TypeDescriptor.INT); + Option objectLockMode; + objectLockMode = + Objects.nonNull(nativeValue.objectLockMode()) + ? Option.create_Some( + ObjectLockMode._typeDescriptor(), + ToDafny.ObjectLockMode(nativeValue.objectLockMode()) + ) + : Option.create_None(ObjectLockMode._typeDescriptor()); + Option> objectLockRetainUntilDate; + objectLockRetainUntilDate = + Objects.nonNull(nativeValue.objectLockRetainUntilDate()) + ? Option.create_Some( + DafnySequence._typeDescriptor(TypeDescriptor.CHAR), + software.amazon.smithy.dafny.conversion.ToDafny.Simple.CharacterSequence( + nativeValue.objectLockRetainUntilDate() + ) + ) + : Option.create_None( + DafnySequence._typeDescriptor(TypeDescriptor.CHAR) + ); + Option objectLockLegalHoldStatus; + objectLockLegalHoldStatus = + Objects.nonNull(nativeValue.objectLockLegalHoldStatus()) + ? Option.create_Some( + ObjectLockLegalHoldStatus._typeDescriptor(), + ToDafny.ObjectLockLegalHoldStatus( + nativeValue.objectLockLegalHoldStatus() + ) + ) + : Option.create_None(ObjectLockLegalHoldStatus._typeDescriptor()); + return new GetObjectOutput( + body, + deleteMarker, + acceptRanges, + expiration, + restore, + lastModified, + contentLength, + eTag, + checksumCRC32, + checksumCRC32C, + checksumSHA1, + checksumSHA256, + missingMeta, + versionId, + cacheControl, + contentDisposition, + contentEncoding, + contentLanguage, + contentRange, + contentType, + expires, + websiteRedirectLocation, + serverSideEncryption, + metadata, + sSECustomerAlgorithm, + sSECustomerKeyMD5, + sSEKMSKeyId, + bucketKeyEnabled, + storageClass, + requestCharged, + replicationStatus, + partsCount, + tagCount, + objectLockMode, + objectLockRetainUntilDate, + objectLockLegalHoldStatus + ); + } + + public static GetObjectRequest GetObjectRequest( + software.amazon.awssdk.services.s3.model.GetObjectRequest nativeValue + ) { + DafnySequence bucket; + bucket = + software.amazon.smithy.dafny.conversion.ToDafny.Simple.CharacterSequence( + nativeValue.bucket() + ); + Option> ifMatch; + ifMatch = + Objects.nonNull(nativeValue.ifMatch()) + ? Option.create_Some( + DafnySequence._typeDescriptor(TypeDescriptor.CHAR), + software.amazon.smithy.dafny.conversion.ToDafny.Simple.CharacterSequence( + nativeValue.ifMatch() + ) + ) + : Option.create_None( + DafnySequence._typeDescriptor(TypeDescriptor.CHAR) + ); + Option> ifModifiedSince; + ifModifiedSince = + Objects.nonNull(nativeValue.ifModifiedSince()) + ? Option.create_Some( + DafnySequence._typeDescriptor(TypeDescriptor.CHAR), + software.amazon.smithy.dafny.conversion.ToDafny.Simple.CharacterSequence( + nativeValue.ifModifiedSince() + ) + ) + : Option.create_None( + DafnySequence._typeDescriptor(TypeDescriptor.CHAR) + ); + Option> ifNoneMatch; + ifNoneMatch = + Objects.nonNull(nativeValue.ifNoneMatch()) + ? Option.create_Some( + DafnySequence._typeDescriptor(TypeDescriptor.CHAR), + software.amazon.smithy.dafny.conversion.ToDafny.Simple.CharacterSequence( + nativeValue.ifNoneMatch() + ) + ) + : Option.create_None( + DafnySequence._typeDescriptor(TypeDescriptor.CHAR) + ); + Option> ifUnmodifiedSince; + ifUnmodifiedSince = + Objects.nonNull(nativeValue.ifUnmodifiedSince()) + ? Option.create_Some( + DafnySequence._typeDescriptor(TypeDescriptor.CHAR), + software.amazon.smithy.dafny.conversion.ToDafny.Simple.CharacterSequence( + nativeValue.ifUnmodifiedSince() + ) + ) + : Option.create_None( + DafnySequence._typeDescriptor(TypeDescriptor.CHAR) + ); + DafnySequence key; + key = + software.amazon.smithy.dafny.conversion.ToDafny.Simple.CharacterSequence( + nativeValue.key() + ); + Option> range; + range = + Objects.nonNull(nativeValue.range()) + ? Option.create_Some( + DafnySequence._typeDescriptor(TypeDescriptor.CHAR), + software.amazon.smithy.dafny.conversion.ToDafny.Simple.CharacterSequence( + nativeValue.range() + ) + ) + : Option.create_None( + DafnySequence._typeDescriptor(TypeDescriptor.CHAR) + ); + Option> responseCacheControl; + responseCacheControl = + Objects.nonNull(nativeValue.responseCacheControl()) + ? Option.create_Some( + DafnySequence._typeDescriptor(TypeDescriptor.CHAR), + software.amazon.smithy.dafny.conversion.ToDafny.Simple.CharacterSequence( + nativeValue.responseCacheControl() + ) + ) + : Option.create_None( + DafnySequence._typeDescriptor(TypeDescriptor.CHAR) + ); + Option> responseContentDisposition; + responseContentDisposition = + Objects.nonNull(nativeValue.responseContentDisposition()) + ? Option.create_Some( + DafnySequence._typeDescriptor(TypeDescriptor.CHAR), + software.amazon.smithy.dafny.conversion.ToDafny.Simple.CharacterSequence( + nativeValue.responseContentDisposition() + ) + ) + : Option.create_None( + DafnySequence._typeDescriptor(TypeDescriptor.CHAR) + ); + Option> responseContentEncoding; + responseContentEncoding = + Objects.nonNull(nativeValue.responseContentEncoding()) + ? Option.create_Some( + DafnySequence._typeDescriptor(TypeDescriptor.CHAR), + software.amazon.smithy.dafny.conversion.ToDafny.Simple.CharacterSequence( + nativeValue.responseContentEncoding() + ) + ) + : Option.create_None( + DafnySequence._typeDescriptor(TypeDescriptor.CHAR) + ); + Option> responseContentLanguage; + responseContentLanguage = + Objects.nonNull(nativeValue.responseContentLanguage()) + ? Option.create_Some( + DafnySequence._typeDescriptor(TypeDescriptor.CHAR), + software.amazon.smithy.dafny.conversion.ToDafny.Simple.CharacterSequence( + nativeValue.responseContentLanguage() + ) + ) + : Option.create_None( + DafnySequence._typeDescriptor(TypeDescriptor.CHAR) + ); + Option> responseContentType; + responseContentType = + Objects.nonNull(nativeValue.responseContentType()) + ? Option.create_Some( + DafnySequence._typeDescriptor(TypeDescriptor.CHAR), + software.amazon.smithy.dafny.conversion.ToDafny.Simple.CharacterSequence( + nativeValue.responseContentType() + ) + ) + : Option.create_None( + DafnySequence._typeDescriptor(TypeDescriptor.CHAR) + ); + Option> responseExpires; + responseExpires = + Objects.nonNull(nativeValue.responseExpires()) + ? Option.create_Some( + DafnySequence._typeDescriptor(TypeDescriptor.CHAR), + software.amazon.smithy.dafny.conversion.ToDafny.Simple.CharacterSequence( + nativeValue.responseExpires() + ) + ) + : Option.create_None( + DafnySequence._typeDescriptor(TypeDescriptor.CHAR) + ); + Option> versionId; + versionId = + Objects.nonNull(nativeValue.versionId()) + ? Option.create_Some( + DafnySequence._typeDescriptor(TypeDescriptor.CHAR), + software.amazon.smithy.dafny.conversion.ToDafny.Simple.CharacterSequence( + nativeValue.versionId() + ) + ) + : Option.create_None( + DafnySequence._typeDescriptor(TypeDescriptor.CHAR) + ); + Option> sSECustomerAlgorithm; + sSECustomerAlgorithm = + Objects.nonNull(nativeValue.sseCustomerAlgorithm()) + ? Option.create_Some( + DafnySequence._typeDescriptor(TypeDescriptor.CHAR), + software.amazon.smithy.dafny.conversion.ToDafny.Simple.CharacterSequence( + nativeValue.sseCustomerAlgorithm() + ) + ) + : Option.create_None( + DafnySequence._typeDescriptor(TypeDescriptor.CHAR) + ); + Option> sSECustomerKey; + sSECustomerKey = + Objects.nonNull(nativeValue.sseCustomerKey()) + ? Option.create_Some( + DafnySequence._typeDescriptor(TypeDescriptor.CHAR), + software.amazon.smithy.dafny.conversion.ToDafny.Simple.CharacterSequence( + nativeValue.sseCustomerKey() + ) + ) + : Option.create_None( + DafnySequence._typeDescriptor(TypeDescriptor.CHAR) + ); + Option> sSECustomerKeyMD5; + sSECustomerKeyMD5 = + Objects.nonNull(nativeValue.sseCustomerKeyMD5()) + ? Option.create_Some( + DafnySequence._typeDescriptor(TypeDescriptor.CHAR), + software.amazon.smithy.dafny.conversion.ToDafny.Simple.CharacterSequence( + nativeValue.sseCustomerKeyMD5() + ) + ) + : Option.create_None( + DafnySequence._typeDescriptor(TypeDescriptor.CHAR) + ); + Option requestPayer; + requestPayer = + Objects.nonNull(nativeValue.requestPayer()) + ? Option.create_Some( + RequestPayer._typeDescriptor(), + ToDafny.RequestPayer(nativeValue.requestPayer()) + ) + : Option.create_None(RequestPayer._typeDescriptor()); + Option partNumber; + partNumber = + Objects.nonNull(nativeValue.partNumber()) + ? Option.create_Some(TypeDescriptor.INT, (nativeValue.partNumber())) + : Option.create_None(TypeDescriptor.INT); + Option> expectedBucketOwner; + expectedBucketOwner = + Objects.nonNull(nativeValue.expectedBucketOwner()) + ? Option.create_Some( + DafnySequence._typeDescriptor(TypeDescriptor.CHAR), + software.amazon.smithy.dafny.conversion.ToDafny.Simple.CharacterSequence( + nativeValue.expectedBucketOwner() + ) + ) + : Option.create_None( + DafnySequence._typeDescriptor(TypeDescriptor.CHAR) + ); + Option checksumMode; + checksumMode = + Objects.nonNull(nativeValue.checksumMode()) + ? Option.create_Some( + ChecksumMode._typeDescriptor(), + ToDafny.ChecksumMode(nativeValue.checksumMode()) + ) + : Option.create_None(ChecksumMode._typeDescriptor()); + return new GetObjectRequest( + bucket, + ifMatch, + ifModifiedSince, + ifNoneMatch, + ifUnmodifiedSince, + key, + range, + responseCacheControl, + responseContentDisposition, + responseContentEncoding, + responseContentLanguage, + responseContentType, + responseExpires, + versionId, + sSECustomerAlgorithm, + sSECustomerKey, + sSECustomerKeyMD5, + requestPayer, + partNumber, + expectedBucketOwner, + checksumMode + ); + } + + public static DafnyMap< + ? extends DafnySequence, + ? extends DafnySequence + > Metadata(Map nativeValue) { + return software.amazon.smithy.dafny.conversion.ToDafny.Aggregate.GenericToMap( + nativeValue, + software.amazon.smithy.dafny.conversion.ToDafny.Simple::CharacterSequence, + software.amazon.smithy.dafny.conversion.ToDafny.Simple::CharacterSequence + ); + } + + public static ObjectIdentifier ObjectIdentifier( + software.amazon.awssdk.services.s3.model.ObjectIdentifier nativeValue + ) { + DafnySequence key; + key = + software.amazon.smithy.dafny.conversion.ToDafny.Simple.CharacterSequence( + nativeValue.key() + ); + Option> versionId; + versionId = + Objects.nonNull(nativeValue.versionId()) + ? Option.create_Some( + DafnySequence._typeDescriptor(TypeDescriptor.CHAR), + software.amazon.smithy.dafny.conversion.ToDafny.Simple.CharacterSequence( + nativeValue.versionId() + ) + ) + : Option.create_None( + DafnySequence._typeDescriptor(TypeDescriptor.CHAR) + ); + return new ObjectIdentifier(key, versionId); + } + + public static DafnySequence ObjectIdentifierList( + List nativeValue + ) { + return software.amazon.smithy.dafny.conversion.ToDafny.Aggregate.GenericToSequence( + nativeValue, + software.amazon.cryptography.services.s3.internaldafny.ToDafny::ObjectIdentifier, + ObjectIdentifier._typeDescriptor() + ); + } + + public static PutObjectOutput PutObjectOutput(PutObjectResponse nativeValue) { + Option> expiration; + expiration = + Objects.nonNull(nativeValue.expiration()) + ? Option.create_Some( + DafnySequence._typeDescriptor(TypeDescriptor.CHAR), + software.amazon.smithy.dafny.conversion.ToDafny.Simple.CharacterSequence( + nativeValue.expiration() + ) + ) + : Option.create_None( + DafnySequence._typeDescriptor(TypeDescriptor.CHAR) + ); + Option> eTag; + eTag = + Objects.nonNull(nativeValue.eTag()) + ? Option.create_Some( + DafnySequence._typeDescriptor(TypeDescriptor.CHAR), + software.amazon.smithy.dafny.conversion.ToDafny.Simple.CharacterSequence( + nativeValue.eTag() + ) + ) + : Option.create_None( + DafnySequence._typeDescriptor(TypeDescriptor.CHAR) + ); + Option> checksumCRC32; + checksumCRC32 = + Objects.nonNull(nativeValue.checksumCRC32()) + ? Option.create_Some( + DafnySequence._typeDescriptor(TypeDescriptor.CHAR), + software.amazon.smithy.dafny.conversion.ToDafny.Simple.CharacterSequence( + nativeValue.checksumCRC32() + ) + ) + : Option.create_None( + DafnySequence._typeDescriptor(TypeDescriptor.CHAR) + ); + Option> checksumCRC32C; + checksumCRC32C = + Objects.nonNull(nativeValue.checksumCRC32C()) + ? Option.create_Some( + DafnySequence._typeDescriptor(TypeDescriptor.CHAR), + software.amazon.smithy.dafny.conversion.ToDafny.Simple.CharacterSequence( + nativeValue.checksumCRC32C() + ) + ) + : Option.create_None( + DafnySequence._typeDescriptor(TypeDescriptor.CHAR) + ); + Option> checksumSHA1; + checksumSHA1 = + Objects.nonNull(nativeValue.checksumSHA1()) + ? Option.create_Some( + DafnySequence._typeDescriptor(TypeDescriptor.CHAR), + software.amazon.smithy.dafny.conversion.ToDafny.Simple.CharacterSequence( + nativeValue.checksumSHA1() + ) + ) + : Option.create_None( + DafnySequence._typeDescriptor(TypeDescriptor.CHAR) + ); + Option> checksumSHA256; + checksumSHA256 = + Objects.nonNull(nativeValue.checksumSHA256()) + ? Option.create_Some( + DafnySequence._typeDescriptor(TypeDescriptor.CHAR), + software.amazon.smithy.dafny.conversion.ToDafny.Simple.CharacterSequence( + nativeValue.checksumSHA256() + ) + ) + : Option.create_None( + DafnySequence._typeDescriptor(TypeDescriptor.CHAR) + ); + Option serverSideEncryption; + serverSideEncryption = + Objects.nonNull(nativeValue.serverSideEncryption()) + ? Option.create_Some( + ServerSideEncryption._typeDescriptor(), + ToDafny.ServerSideEncryption(nativeValue.serverSideEncryption()) + ) + : Option.create_None(ServerSideEncryption._typeDescriptor()); + Option> versionId; + versionId = + Objects.nonNull(nativeValue.versionId()) + ? Option.create_Some( + DafnySequence._typeDescriptor(TypeDescriptor.CHAR), + software.amazon.smithy.dafny.conversion.ToDafny.Simple.CharacterSequence( + nativeValue.versionId() + ) + ) + : Option.create_None( + DafnySequence._typeDescriptor(TypeDescriptor.CHAR) + ); + Option> sSECustomerAlgorithm; + sSECustomerAlgorithm = + Objects.nonNull(nativeValue.sseCustomerAlgorithm()) + ? Option.create_Some( + DafnySequence._typeDescriptor(TypeDescriptor.CHAR), + software.amazon.smithy.dafny.conversion.ToDafny.Simple.CharacterSequence( + nativeValue.sseCustomerAlgorithm() + ) + ) + : Option.create_None( + DafnySequence._typeDescriptor(TypeDescriptor.CHAR) + ); + Option> sSECustomerKeyMD5; + sSECustomerKeyMD5 = + Objects.nonNull(nativeValue.sseCustomerKeyMD5()) + ? Option.create_Some( + DafnySequence._typeDescriptor(TypeDescriptor.CHAR), + software.amazon.smithy.dafny.conversion.ToDafny.Simple.CharacterSequence( + nativeValue.sseCustomerKeyMD5() + ) + ) + : Option.create_None( + DafnySequence._typeDescriptor(TypeDescriptor.CHAR) + ); + Option> sSEKMSKeyId; + sSEKMSKeyId = + Objects.nonNull(nativeValue.ssekmsKeyId()) + ? Option.create_Some( + DafnySequence._typeDescriptor(TypeDescriptor.CHAR), + software.amazon.smithy.dafny.conversion.ToDafny.Simple.CharacterSequence( + nativeValue.ssekmsKeyId() + ) + ) + : Option.create_None( + DafnySequence._typeDescriptor(TypeDescriptor.CHAR) + ); + Option> sSEKMSEncryptionContext; + sSEKMSEncryptionContext = + Objects.nonNull(nativeValue.ssekmsEncryptionContext()) + ? Option.create_Some( + DafnySequence._typeDescriptor(TypeDescriptor.CHAR), + software.amazon.smithy.dafny.conversion.ToDafny.Simple.CharacterSequence( + nativeValue.ssekmsEncryptionContext() + ) + ) + : Option.create_None( + DafnySequence._typeDescriptor(TypeDescriptor.CHAR) + ); + Option bucketKeyEnabled; + bucketKeyEnabled = + Objects.nonNull(nativeValue.bucketKeyEnabled()) + ? Option.create_Some( + TypeDescriptor.BOOLEAN, + (nativeValue.bucketKeyEnabled()) + ) + : Option.create_None(TypeDescriptor.BOOLEAN); + Option requestCharged; + requestCharged = + Objects.nonNull(nativeValue.requestCharged()) + ? Option.create_Some( + RequestCharged._typeDescriptor(), + ToDafny.RequestCharged(nativeValue.requestCharged()) + ) + : Option.create_None(RequestCharged._typeDescriptor()); + return new PutObjectOutput( + expiration, + eTag, + checksumCRC32, + checksumCRC32C, + checksumSHA1, + checksumSHA256, + serverSideEncryption, + versionId, + sSECustomerAlgorithm, + sSECustomerKeyMD5, + sSEKMSKeyId, + sSEKMSEncryptionContext, + bucketKeyEnabled, + requestCharged + ); + } + + public static PutObjectRequest PutObjectRequest( + software.amazon.awssdk.services.s3.model.PutObjectRequest nativeValue, + RequestBody nativeBody + ) { + Option aCL; + aCL = + Objects.nonNull(nativeValue.acl()) + ? Option.create_Some( + ObjectCannedACL._typeDescriptor(), + ToDafny.ObjectCannedACL(nativeValue.acl()) + ) + : Option.create_None(ObjectCannedACL._typeDescriptor()); + Option> body = Option.create_Some( + null, + new RequestBodyAsDataStream( + null, nativeBody, Error::create_Opaque )); + DafnySequence bucket; + bucket = + software.amazon.smithy.dafny.conversion.ToDafny.Simple.CharacterSequence( + nativeValue.bucket() + ); + Option> cacheControl; + cacheControl = + Objects.nonNull(nativeValue.cacheControl()) + ? Option.create_Some( + DafnySequence._typeDescriptor(TypeDescriptor.CHAR), + software.amazon.smithy.dafny.conversion.ToDafny.Simple.CharacterSequence( + nativeValue.cacheControl() + ) + ) + : Option.create_None( + DafnySequence._typeDescriptor(TypeDescriptor.CHAR) + ); + Option> contentDisposition; + contentDisposition = + Objects.nonNull(nativeValue.contentDisposition()) + ? Option.create_Some( + DafnySequence._typeDescriptor(TypeDescriptor.CHAR), + software.amazon.smithy.dafny.conversion.ToDafny.Simple.CharacterSequence( + nativeValue.contentDisposition() + ) + ) + : Option.create_None( + DafnySequence._typeDescriptor(TypeDescriptor.CHAR) + ); + Option> contentEncoding; + contentEncoding = + Objects.nonNull(nativeValue.contentEncoding()) + ? Option.create_Some( + DafnySequence._typeDescriptor(TypeDescriptor.CHAR), + software.amazon.smithy.dafny.conversion.ToDafny.Simple.CharacterSequence( + nativeValue.contentEncoding() + ) + ) + : Option.create_None( + DafnySequence._typeDescriptor(TypeDescriptor.CHAR) + ); + Option> contentLanguage; + contentLanguage = + Objects.nonNull(nativeValue.contentLanguage()) + ? Option.create_Some( + DafnySequence._typeDescriptor(TypeDescriptor.CHAR), + software.amazon.smithy.dafny.conversion.ToDafny.Simple.CharacterSequence( + nativeValue.contentLanguage() + ) + ) + : Option.create_None( + DafnySequence._typeDescriptor(TypeDescriptor.CHAR) + ); + Option contentLength; + contentLength = + Objects.nonNull(nativeValue.contentLength()) + ? Option.create_Some(TypeDescriptor.LONG, (nativeValue.contentLength())) + : Option.create_None(TypeDescriptor.LONG); + Option> contentMD5; + contentMD5 = + Objects.nonNull(nativeValue.contentMD5()) + ? Option.create_Some( + DafnySequence._typeDescriptor(TypeDescriptor.CHAR), + software.amazon.smithy.dafny.conversion.ToDafny.Simple.CharacterSequence( + nativeValue.contentMD5() + ) + ) + : Option.create_None( + DafnySequence._typeDescriptor(TypeDescriptor.CHAR) + ); + Option> contentType; + contentType = + Objects.nonNull(nativeValue.contentType()) + ? Option.create_Some( + DafnySequence._typeDescriptor(TypeDescriptor.CHAR), + software.amazon.smithy.dafny.conversion.ToDafny.Simple.CharacterSequence( + nativeValue.contentType() + ) + ) + : Option.create_None( + DafnySequence._typeDescriptor(TypeDescriptor.CHAR) + ); + Option checksumAlgorithm; + checksumAlgorithm = + Objects.nonNull(nativeValue.checksumAlgorithm()) + ? Option.create_Some( + ChecksumAlgorithm._typeDescriptor(), + ToDafny.ChecksumAlgorithm(nativeValue.checksumAlgorithm()) + ) + : Option.create_None(ChecksumAlgorithm._typeDescriptor()); + Option> checksumCRC32; + checksumCRC32 = + Objects.nonNull(nativeValue.checksumCRC32()) + ? Option.create_Some( + DafnySequence._typeDescriptor(TypeDescriptor.CHAR), + software.amazon.smithy.dafny.conversion.ToDafny.Simple.CharacterSequence( + nativeValue.checksumCRC32() + ) + ) + : Option.create_None( + DafnySequence._typeDescriptor(TypeDescriptor.CHAR) + ); + Option> checksumCRC32C; + checksumCRC32C = + Objects.nonNull(nativeValue.checksumCRC32C()) + ? Option.create_Some( + DafnySequence._typeDescriptor(TypeDescriptor.CHAR), + software.amazon.smithy.dafny.conversion.ToDafny.Simple.CharacterSequence( + nativeValue.checksumCRC32C() + ) + ) + : Option.create_None( + DafnySequence._typeDescriptor(TypeDescriptor.CHAR) + ); + Option> checksumSHA1; + checksumSHA1 = + Objects.nonNull(nativeValue.checksumSHA1()) + ? Option.create_Some( + DafnySequence._typeDescriptor(TypeDescriptor.CHAR), + software.amazon.smithy.dafny.conversion.ToDafny.Simple.CharacterSequence( + nativeValue.checksumSHA1() + ) + ) + : Option.create_None( + DafnySequence._typeDescriptor(TypeDescriptor.CHAR) + ); + Option> checksumSHA256; + checksumSHA256 = + Objects.nonNull(nativeValue.checksumSHA256()) + ? Option.create_Some( + DafnySequence._typeDescriptor(TypeDescriptor.CHAR), + software.amazon.smithy.dafny.conversion.ToDafny.Simple.CharacterSequence( + nativeValue.checksumSHA256() + ) + ) + : Option.create_None( + DafnySequence._typeDescriptor(TypeDescriptor.CHAR) + ); + Option> expires; + expires = + Objects.nonNull(nativeValue.expires()) + ? Option.create_Some( + DafnySequence._typeDescriptor(TypeDescriptor.CHAR), + software.amazon.smithy.dafny.conversion.ToDafny.Simple.CharacterSequence( + nativeValue.expires() + ) + ) + : Option.create_None( + DafnySequence._typeDescriptor(TypeDescriptor.CHAR) + ); + Option> grantFullControl; + grantFullControl = + Objects.nonNull(nativeValue.grantFullControl()) + ? Option.create_Some( + DafnySequence._typeDescriptor(TypeDescriptor.CHAR), + software.amazon.smithy.dafny.conversion.ToDafny.Simple.CharacterSequence( + nativeValue.grantFullControl() + ) + ) + : Option.create_None( + DafnySequence._typeDescriptor(TypeDescriptor.CHAR) + ); + Option> grantRead; + grantRead = + Objects.nonNull(nativeValue.grantRead()) + ? Option.create_Some( + DafnySequence._typeDescriptor(TypeDescriptor.CHAR), + software.amazon.smithy.dafny.conversion.ToDafny.Simple.CharacterSequence( + nativeValue.grantRead() + ) + ) + : Option.create_None( + DafnySequence._typeDescriptor(TypeDescriptor.CHAR) + ); + Option> grantReadACP; + grantReadACP = + Objects.nonNull(nativeValue.grantReadACP()) + ? Option.create_Some( + DafnySequence._typeDescriptor(TypeDescriptor.CHAR), + software.amazon.smithy.dafny.conversion.ToDafny.Simple.CharacterSequence( + nativeValue.grantReadACP() + ) + ) + : Option.create_None( + DafnySequence._typeDescriptor(TypeDescriptor.CHAR) + ); + Option> grantWriteACP; + grantWriteACP = + Objects.nonNull(nativeValue.grantWriteACP()) + ? Option.create_Some( + DafnySequence._typeDescriptor(TypeDescriptor.CHAR), + software.amazon.smithy.dafny.conversion.ToDafny.Simple.CharacterSequence( + nativeValue.grantWriteACP() + ) + ) + : Option.create_None( + DafnySequence._typeDescriptor(TypeDescriptor.CHAR) + ); + DafnySequence key; + key = + software.amazon.smithy.dafny.conversion.ToDafny.Simple.CharacterSequence( + nativeValue.key() + ); + Option< + DafnyMap< + ? extends DafnySequence, + ? extends DafnySequence + > + > metadata; + metadata = + (Objects.nonNull(nativeValue.metadata()) && + nativeValue.metadata().size() > 0) + ? Option.create_Some( + DafnyMap._typeDescriptor( + DafnySequence._typeDescriptor(TypeDescriptor.CHAR), + DafnySequence._typeDescriptor(TypeDescriptor.CHAR) + ), + ToDafny.Metadata(nativeValue.metadata()) + ) + : Option.create_None( + DafnyMap._typeDescriptor( + DafnySequence._typeDescriptor(TypeDescriptor.CHAR), + DafnySequence._typeDescriptor(TypeDescriptor.CHAR) + ) + ); + Option serverSideEncryption; + serverSideEncryption = + Objects.nonNull(nativeValue.serverSideEncryption()) + ? Option.create_Some( + ServerSideEncryption._typeDescriptor(), + ToDafny.ServerSideEncryption(nativeValue.serverSideEncryption()) + ) + : Option.create_None(ServerSideEncryption._typeDescriptor()); + Option storageClass; + storageClass = + Objects.nonNull(nativeValue.storageClass()) + ? Option.create_Some( + StorageClass._typeDescriptor(), + ToDafny.StorageClass(nativeValue.storageClass()) + ) + : Option.create_None(StorageClass._typeDescriptor()); + Option> websiteRedirectLocation; + websiteRedirectLocation = + Objects.nonNull(nativeValue.websiteRedirectLocation()) + ? Option.create_Some( + DafnySequence._typeDescriptor(TypeDescriptor.CHAR), + software.amazon.smithy.dafny.conversion.ToDafny.Simple.CharacterSequence( + nativeValue.websiteRedirectLocation() + ) + ) + : Option.create_None( + DafnySequence._typeDescriptor(TypeDescriptor.CHAR) + ); + Option> sSECustomerAlgorithm; + sSECustomerAlgorithm = + Objects.nonNull(nativeValue.sseCustomerAlgorithm()) + ? Option.create_Some( + DafnySequence._typeDescriptor(TypeDescriptor.CHAR), + software.amazon.smithy.dafny.conversion.ToDafny.Simple.CharacterSequence( + nativeValue.sseCustomerAlgorithm() + ) + ) + : Option.create_None( + DafnySequence._typeDescriptor(TypeDescriptor.CHAR) + ); + Option> sSECustomerKey; + sSECustomerKey = + Objects.nonNull(nativeValue.sseCustomerKey()) + ? Option.create_Some( + DafnySequence._typeDescriptor(TypeDescriptor.CHAR), + software.amazon.smithy.dafny.conversion.ToDafny.Simple.CharacterSequence( + nativeValue.sseCustomerKey() + ) + ) + : Option.create_None( + DafnySequence._typeDescriptor(TypeDescriptor.CHAR) + ); + Option> sSECustomerKeyMD5; + sSECustomerKeyMD5 = + Objects.nonNull(nativeValue.sseCustomerKeyMD5()) + ? Option.create_Some( + DafnySequence._typeDescriptor(TypeDescriptor.CHAR), + software.amazon.smithy.dafny.conversion.ToDafny.Simple.CharacterSequence( + nativeValue.sseCustomerKeyMD5() + ) + ) + : Option.create_None( + DafnySequence._typeDescriptor(TypeDescriptor.CHAR) + ); + Option> sSEKMSKeyId; + sSEKMSKeyId = + Objects.nonNull(nativeValue.ssekmsKeyId()) + ? Option.create_Some( + DafnySequence._typeDescriptor(TypeDescriptor.CHAR), + software.amazon.smithy.dafny.conversion.ToDafny.Simple.CharacterSequence( + nativeValue.ssekmsKeyId() + ) + ) + : Option.create_None( + DafnySequence._typeDescriptor(TypeDescriptor.CHAR) + ); + Option> sSEKMSEncryptionContext; + sSEKMSEncryptionContext = + Objects.nonNull(nativeValue.ssekmsEncryptionContext()) + ? Option.create_Some( + DafnySequence._typeDescriptor(TypeDescriptor.CHAR), + software.amazon.smithy.dafny.conversion.ToDafny.Simple.CharacterSequence( + nativeValue.ssekmsEncryptionContext() + ) + ) + : Option.create_None( + DafnySequence._typeDescriptor(TypeDescriptor.CHAR) + ); + Option bucketKeyEnabled; + bucketKeyEnabled = + Objects.nonNull(nativeValue.bucketKeyEnabled()) + ? Option.create_Some( + TypeDescriptor.BOOLEAN, + (nativeValue.bucketKeyEnabled()) + ) + : Option.create_None(TypeDescriptor.BOOLEAN); + Option requestPayer; + requestPayer = + Objects.nonNull(nativeValue.requestPayer()) + ? Option.create_Some( + RequestPayer._typeDescriptor(), + ToDafny.RequestPayer(nativeValue.requestPayer()) + ) + : Option.create_None(RequestPayer._typeDescriptor()); + Option> tagging; + tagging = + Objects.nonNull(nativeValue.tagging()) + ? Option.create_Some( + DafnySequence._typeDescriptor(TypeDescriptor.CHAR), + software.amazon.smithy.dafny.conversion.ToDafny.Simple.CharacterSequence( + nativeValue.tagging() + ) + ) + : Option.create_None( + DafnySequence._typeDescriptor(TypeDescriptor.CHAR) + ); + Option objectLockMode; + objectLockMode = + Objects.nonNull(nativeValue.objectLockMode()) + ? Option.create_Some( + ObjectLockMode._typeDescriptor(), + ToDafny.ObjectLockMode(nativeValue.objectLockMode()) + ) + : Option.create_None(ObjectLockMode._typeDescriptor()); + Option> objectLockRetainUntilDate; + objectLockRetainUntilDate = + Objects.nonNull(nativeValue.objectLockRetainUntilDate()) + ? Option.create_Some( + DafnySequence._typeDescriptor(TypeDescriptor.CHAR), + software.amazon.smithy.dafny.conversion.ToDafny.Simple.CharacterSequence( + nativeValue.objectLockRetainUntilDate() + ) + ) + : Option.create_None( + DafnySequence._typeDescriptor(TypeDescriptor.CHAR) + ); + Option objectLockLegalHoldStatus; + objectLockLegalHoldStatus = + Objects.nonNull(nativeValue.objectLockLegalHoldStatus()) + ? Option.create_Some( + ObjectLockLegalHoldStatus._typeDescriptor(), + ToDafny.ObjectLockLegalHoldStatus( + nativeValue.objectLockLegalHoldStatus() + ) + ) + : Option.create_None(ObjectLockLegalHoldStatus._typeDescriptor()); + Option> expectedBucketOwner; + expectedBucketOwner = + Objects.nonNull(nativeValue.expectedBucketOwner()) + ? Option.create_Some( + DafnySequence._typeDescriptor(TypeDescriptor.CHAR), + software.amazon.smithy.dafny.conversion.ToDafny.Simple.CharacterSequence( + nativeValue.expectedBucketOwner() + ) + ) + : Option.create_None( + DafnySequence._typeDescriptor(TypeDescriptor.CHAR) + ); + return new PutObjectRequest( + aCL, + body, + bucket, + cacheControl, + contentDisposition, + contentEncoding, + contentLanguage, + contentLength, + contentMD5, + contentType, + checksumAlgorithm, + checksumCRC32, + checksumCRC32C, + checksumSHA1, + checksumSHA256, + expires, + grantFullControl, + grantRead, + grantReadACP, + grantWriteACP, + key, + metadata, + serverSideEncryption, + storageClass, + websiteRedirectLocation, + sSECustomerAlgorithm, + sSECustomerKey, + sSECustomerKeyMD5, + sSEKMSKeyId, + sSEKMSEncryptionContext, + bucketKeyEnabled, + requestPayer, + tagging, + objectLockMode, + objectLockRetainUntilDate, + objectLockLegalHoldStatus, + expectedBucketOwner + ); + } + + public static Error Error(InvalidObjectStateException nativeValue) { + Option storageClass; + storageClass = + Objects.nonNull(nativeValue.storageClass()) + ? Option.create_Some( + StorageClass._typeDescriptor(), + ToDafny.StorageClass(nativeValue.storageClass()) + ) + : Option.create_None(StorageClass._typeDescriptor()); + Option accessTier; + accessTier = + Objects.nonNull(nativeValue.accessTier()) + ? Option.create_Some( + IntelligentTieringAccessTier._typeDescriptor(), + ToDafny.IntelligentTieringAccessTier(nativeValue.accessTier()) + ) + : Option.create_None(IntelligentTieringAccessTier._typeDescriptor()); + Option> message; + message = + Objects.nonNull(nativeValue.getMessage()) + ? Option.create_Some( + DafnySequence._typeDescriptor(TypeDescriptor.CHAR), + software.amazon.smithy.dafny.conversion.ToDafny.Simple.CharacterSequence( + nativeValue.getMessage() + ) + ) + : Option.create_None( + DafnySequence._typeDescriptor(TypeDescriptor.CHAR) + ); + return new Error_InvalidObjectState(storageClass, accessTier, message); + } + + public static Error Error(NoSuchBucketException nativeValue) { + Option> message; + message = + Objects.nonNull(nativeValue.getMessage()) + ? Option.create_Some( + DafnySequence._typeDescriptor(TypeDescriptor.CHAR), + software.amazon.smithy.dafny.conversion.ToDafny.Simple.CharacterSequence( + nativeValue.getMessage() + ) + ) + : Option.create_None( + DafnySequence._typeDescriptor(TypeDescriptor.CHAR) + ); + return new Error_NoSuchBucket(message); + } + + public static Error Error(NoSuchKeyException nativeValue) { + Option> message; + message = + Objects.nonNull(nativeValue.getMessage()) + ? Option.create_Some( + DafnySequence._typeDescriptor(TypeDescriptor.CHAR), + software.amazon.smithy.dafny.conversion.ToDafny.Simple.CharacterSequence( + nativeValue.getMessage() + ) + ) + : Option.create_None( + DafnySequence._typeDescriptor(TypeDescriptor.CHAR) + ); + return new Error_NoSuchKey(message); + } + + public static ChecksumAlgorithm ChecksumAlgorithm( + software.amazon.awssdk.services.s3.model.ChecksumAlgorithm nativeValue + ) { + switch (nativeValue) { + case CRC32: + { + return ChecksumAlgorithm.create_CRC32(); + } + case CRC32_C: + { + return ChecksumAlgorithm.create_CRC32C(); + } + case SHA1: + { + return ChecksumAlgorithm.create_SHA1(); + } + case SHA256: + { + return ChecksumAlgorithm.create_SHA256(); + } + default: + { + throw new RuntimeException( + "Cannot convert " + + nativeValue + + " to software.amazon.cryptography.services.s3.internaldafny.types.ChecksumAlgorithm." + ); + } + } + } + + public static ChecksumMode ChecksumMode( + software.amazon.awssdk.services.s3.model.ChecksumMode nativeValue + ) { + switch (nativeValue) { + case ENABLED: + { + return ChecksumMode.create(); + } + default: + { + throw new RuntimeException( + "Cannot convert " + + nativeValue + + " to software.amazon.cryptography.services.s3.internaldafny.types.ChecksumMode." + ); + } + } + } + + public static IntelligentTieringAccessTier IntelligentTieringAccessTier( + software.amazon.awssdk.services.s3.model.IntelligentTieringAccessTier nativeValue + ) { + switch (nativeValue) { + case ARCHIVE_ACCESS: + { + return IntelligentTieringAccessTier.create_ARCHIVE__ACCESS(); + } + case DEEP_ARCHIVE_ACCESS: + { + return IntelligentTieringAccessTier.create_DEEP__ARCHIVE__ACCESS(); + } + default: + { + throw new RuntimeException( + "Cannot convert " + + nativeValue + + " to software.amazon.cryptography.services.s3.internaldafny.types.IntelligentTieringAccessTier." + ); + } + } + } + + public static ObjectCannedACL ObjectCannedACL( + software.amazon.awssdk.services.s3.model.ObjectCannedACL nativeValue + ) { + switch (nativeValue) { + case PRIVATE: + { + return ObjectCannedACL.create_private(); + } + case PUBLIC_READ: + { + return ObjectCannedACL.create_public__read(); + } + case PUBLIC_READ_WRITE: + { + return ObjectCannedACL.create_public__read__write(); + } + case AUTHENTICATED_READ: + { + return ObjectCannedACL.create_authenticated__read(); + } + case AWS_EXEC_READ: + { + return ObjectCannedACL.create_aws__exec__read(); + } + case BUCKET_OWNER_READ: + { + return ObjectCannedACL.create_bucket__owner__read(); + } + case BUCKET_OWNER_FULL_CONTROL: + { + return ObjectCannedACL.create_bucket__owner__full__control(); + } + default: + { + throw new RuntimeException( + "Cannot convert " + + nativeValue + + " to software.amazon.cryptography.services.s3.internaldafny.types.ObjectCannedACL." + ); + } + } + } + + public static ObjectLockLegalHoldStatus ObjectLockLegalHoldStatus( + software.amazon.awssdk.services.s3.model.ObjectLockLegalHoldStatus nativeValue + ) { + switch (nativeValue) { + case ON: + { + return ObjectLockLegalHoldStatus.create_ON(); + } + case OFF: + { + return ObjectLockLegalHoldStatus.create_OFF(); + } + default: + { + throw new RuntimeException( + "Cannot convert " + + nativeValue + + " to software.amazon.cryptography.services.s3.internaldafny.types.ObjectLockLegalHoldStatus." + ); + } + } + } + + public static ObjectLockMode ObjectLockMode( + software.amazon.awssdk.services.s3.model.ObjectLockMode nativeValue + ) { + switch (nativeValue) { + case GOVERNANCE: + { + return ObjectLockMode.create_GOVERNANCE(); + } + case COMPLIANCE: + { + return ObjectLockMode.create_COMPLIANCE(); + } + default: + { + throw new RuntimeException( + "Cannot convert " + + nativeValue + + " to software.amazon.cryptography.services.s3.internaldafny.types.ObjectLockMode." + ); + } + } + } + + public static ReplicationStatus ReplicationStatus( + software.amazon.awssdk.services.s3.model.ReplicationStatus nativeValue + ) { + switch (nativeValue) { + case COMPLETE: + { + return ReplicationStatus.create_COMPLETE(); + } + case PENDING: + { + return ReplicationStatus.create_PENDING(); + } + case FAILED: + { + return ReplicationStatus.create_FAILED(); + } + case REPLICA: + { + return ReplicationStatus.create_REPLICA(); + } + case COMPLETED: + { + return ReplicationStatus.create_COMPLETED(); + } + default: + { + throw new RuntimeException( + "Cannot convert " + + nativeValue + + " to software.amazon.cryptography.services.s3.internaldafny.types.ReplicationStatus." + ); + } + } + } + + public static RequestCharged RequestCharged( + software.amazon.awssdk.services.s3.model.RequestCharged nativeValue + ) { + switch (nativeValue) { + case REQUESTER: + { + return RequestCharged.create(); + } + default: + { + throw new RuntimeException( + "Cannot convert " + + nativeValue + + " to software.amazon.cryptography.services.s3.internaldafny.types.RequestCharged." + ); + } + } + } + + public static RequestPayer RequestPayer( + software.amazon.awssdk.services.s3.model.RequestPayer nativeValue + ) { + switch (nativeValue) { + case REQUESTER: + { + return RequestPayer.create(); + } + default: + { + throw new RuntimeException( + "Cannot convert " + + nativeValue + + " to software.amazon.cryptography.services.s3.internaldafny.types.RequestPayer." + ); + } + } + } + + public static ServerSideEncryption ServerSideEncryption( + software.amazon.awssdk.services.s3.model.ServerSideEncryption nativeValue + ) { + switch (nativeValue) { + case AES256: + { + return ServerSideEncryption.create_AES256(); + } + case AWS_KMS: + { + return ServerSideEncryption.create_aws__kms(); + } + case AWS_KMS_DSSE: + { + return ServerSideEncryption.create_aws__kms__dsse(); + } + default: + { + throw new RuntimeException( + "Cannot convert " + + nativeValue + + " to software.amazon.cryptography.services.s3.internaldafny.types.ServerSideEncryption." + ); + } + } + } + + public static StorageClass StorageClass( + software.amazon.awssdk.services.s3.model.StorageClass nativeValue + ) { + switch (nativeValue) { + case STANDARD: + { + return StorageClass.create_STANDARD(); + } + case REDUCED_REDUNDANCY: + { + return StorageClass.create_REDUCED__REDUNDANCY(); + } + case STANDARD_IA: + { + return StorageClass.create_STANDARD__IA(); + } + case ONEZONE_IA: + { + return StorageClass.create_ONEZONE__IA(); + } + case INTELLIGENT_TIERING: + { + return StorageClass.create_INTELLIGENT__TIERING(); + } + case GLACIER: + { + return StorageClass.create_GLACIER(); + } + case DEEP_ARCHIVE: + { + return StorageClass.create_DEEP__ARCHIVE(); + } + case OUTPOSTS: + { + return StorageClass.create_OUTPOSTS(); + } + case GLACIER_IR: + { + return StorageClass.create_GLACIER__IR(); + } + case SNOW: + { + return StorageClass.create_SNOW(); + } + case EXPRESS_ONEZONE: + { + return StorageClass.create_EXPRESS__ONEZONE(); + } + default: + { + throw new RuntimeException( + "Cannot convert " + + nativeValue + + " to software.amazon.cryptography.services.s3.internaldafny.types.StorageClass." + ); + } + } + } + + public static ChecksumAlgorithm ChecksumAlgorithm(String nativeValue) { + return ChecksumAlgorithm( + software.amazon.awssdk.services.s3.model.ChecksumAlgorithm.fromValue( + nativeValue + ) + ); + } + + public static ChecksumMode ChecksumMode(String nativeValue) { + return ChecksumMode( + software.amazon.awssdk.services.s3.model.ChecksumMode.fromValue( + nativeValue + ) + ); + } + + public static IntelligentTieringAccessTier IntelligentTieringAccessTier( + String nativeValue + ) { + return IntelligentTieringAccessTier( + software.amazon.awssdk.services.s3.model.IntelligentTieringAccessTier.fromValue( + nativeValue + ) + ); + } + + public static ObjectCannedACL ObjectCannedACL(String nativeValue) { + return ObjectCannedACL( + software.amazon.awssdk.services.s3.model.ObjectCannedACL.fromValue( + nativeValue + ) + ); + } + + public static ObjectLockLegalHoldStatus ObjectLockLegalHoldStatus( + String nativeValue + ) { + return ObjectLockLegalHoldStatus( + software.amazon.awssdk.services.s3.model.ObjectLockLegalHoldStatus.fromValue( + nativeValue + ) + ); + } + + public static ObjectLockMode ObjectLockMode(String nativeValue) { + return ObjectLockMode( + software.amazon.awssdk.services.s3.model.ObjectLockMode.fromValue( + nativeValue + ) + ); + } + + public static ReplicationStatus ReplicationStatus(String nativeValue) { + return ReplicationStatus( + software.amazon.awssdk.services.s3.model.ReplicationStatus.fromValue( + nativeValue + ) + ); + } + + public static RequestCharged RequestCharged(String nativeValue) { + return RequestCharged( + software.amazon.awssdk.services.s3.model.RequestCharged.fromValue( + nativeValue + ) + ); + } + + public static RequestPayer RequestPayer(String nativeValue) { + return RequestPayer( + software.amazon.awssdk.services.s3.model.RequestPayer.fromValue( + nativeValue + ) + ); + } + + public static ServerSideEncryption ServerSideEncryption(String nativeValue) { + return ServerSideEncryption( + software.amazon.awssdk.services.s3.model.ServerSideEncryption.fromValue( + nativeValue + ) + ); + } + + public static StorageClass StorageClass(String nativeValue) { + return StorageClass( + software.amazon.awssdk.services.s3.model.StorageClass.fromValue( + nativeValue + ) + ); + } + + public static Error Error(S3Exception nativeValue) { + // While this is logically identical to the other Opaque Error case, + // it is semantically distinct. + // An un-modeled Service Error is different from a Java Heap Exhaustion error. + // In the future, Smithy-Dafny MAY allow for this distinction. + // Which would allow Dafny developers to treat the two differently. + return Error.create_OpaqueWithText( + nativeValue, + dafny.DafnySequence.asString(nativeValue.getMessage()) + ); + } + + public static Error Error(Exception nativeValue) { + // While this is logically identical to the other Opaque Error case, + // it is semantically distinct. + // An un-modeled Service Error is different from a Java Heap Exhaustion error. + // In the future, Smithy-Dafny MAY allow for this distinction. + // Which would allow Dafny developers to treat the two differently. + return Error.create_OpaqueWithText( + nativeValue, + dafny.DafnySequence.asString(nativeValue.getMessage()) + ); + } + + public static IS3Client AmazonS3(S3Client nativeValue) { + return new Shim(nativeValue, null); + } +} diff --git a/TestModels/aws-sdks/s3/runtimes/java/src/main/smithy-generated/software/amazon/cryptography/services/s3/internaldafny/ToNative.java b/TestModels/aws-sdks/s3/runtimes/java/src/main/smithy-generated/software/amazon/cryptography/services/s3/internaldafny/ToNative.java new file mode 100644 index 0000000000..433c3f16cd --- /dev/null +++ b/TestModels/aws-sdks/s3/runtimes/java/src/main/smithy-generated/software/amazon/cryptography/services/s3/internaldafny/ToNative.java @@ -0,0 +1,1346 @@ +// Copyright Amazon.com Inc. or its affiliates. All Rights Reserved. +// SPDX-License-Identifier: Apache-2.0 +// Do not modify this file. This file is machine generated, and any changes to it will be overwritten. +package software.amazon.cryptography.services.s3.internaldafny; + +import dafny.DafnyMap; +import dafny.DafnySequence; +import java.lang.Character; +import java.lang.IllegalStateException; +import java.lang.RuntimeException; +import java.lang.String; +import java.lang.Throwable; +import java.util.List; +import java.util.Map; +import software.amazon.awssdk.core.SdkBytes; +import software.amazon.awssdk.services.s3.S3Client; +import software.amazon.awssdk.services.s3.model.ChecksumAlgorithm; +import software.amazon.awssdk.services.s3.model.ChecksumMode; +import software.amazon.awssdk.services.s3.model.Delete; +import software.amazon.awssdk.services.s3.model.DeleteObjectRequest; +import software.amazon.awssdk.services.s3.model.DeleteObjectResponse; +import software.amazon.awssdk.services.s3.model.DeleteObjectsRequest; +import software.amazon.awssdk.services.s3.model.DeleteObjectsResponse; +import software.amazon.awssdk.services.s3.model.DeletedObject; +import software.amazon.awssdk.services.s3.model.GetObjectRequest; +import software.amazon.awssdk.services.s3.model.GetObjectResponse; +import software.amazon.awssdk.services.s3.model.IntelligentTieringAccessTier; +import software.amazon.awssdk.services.s3.model.InvalidObjectStateException; +import software.amazon.awssdk.services.s3.model.NoSuchBucketException; +import software.amazon.awssdk.services.s3.model.NoSuchKeyException; +import software.amazon.awssdk.services.s3.model.ObjectCannedACL; +import software.amazon.awssdk.services.s3.model.ObjectIdentifier; +import software.amazon.awssdk.services.s3.model.ObjectLockLegalHoldStatus; +import software.amazon.awssdk.services.s3.model.ObjectLockMode; +import software.amazon.awssdk.services.s3.model.PutObjectRequest; +import software.amazon.awssdk.services.s3.model.PutObjectResponse; +import software.amazon.awssdk.services.s3.model.ReplicationStatus; +import software.amazon.awssdk.services.s3.model.RequestCharged; +import software.amazon.awssdk.services.s3.model.RequestPayer; +import software.amazon.awssdk.services.s3.model.S3Error; +import software.amazon.awssdk.services.s3.model.S3Exception; +import software.amazon.awssdk.services.s3.model.ServerSideEncryption; +import software.amazon.awssdk.services.s3.model.StorageClass; +import software.amazon.cryptography.services.s3.internaldafny.types.DeleteObjectOutput; +import software.amazon.cryptography.services.s3.internaldafny.types.DeleteObjectsOutput; +import software.amazon.cryptography.services.s3.internaldafny.types.Error; +import software.amazon.cryptography.services.s3.internaldafny.types.Error_InvalidObjectState; +import software.amazon.cryptography.services.s3.internaldafny.types.Error_NoSuchBucket; +import software.amazon.cryptography.services.s3.internaldafny.types.Error_NoSuchKey; +import software.amazon.cryptography.services.s3.internaldafny.types.Error_Opaque; +import software.amazon.cryptography.services.s3.internaldafny.types.Error_OpaqueWithText; +import software.amazon.cryptography.services.s3.internaldafny.types.GetObjectOutput; +import software.amazon.cryptography.services.s3.internaldafny.types.IS3Client; +import software.amazon.cryptography.services.s3.internaldafny.types.PutObjectOutput; + +public class ToNative { + + public static ChecksumAlgorithm ChecksumAlgorithm( + software.amazon.cryptography.services.s3.internaldafny.types.ChecksumAlgorithm dafnyValue + ) { + if (dafnyValue.is_CRC32()) { + return ChecksumAlgorithm.CRC32; + } + if (dafnyValue.is_CRC32C()) { + return ChecksumAlgorithm.CRC32_C; + } + if (dafnyValue.is_SHA1()) { + return ChecksumAlgorithm.SHA1; + } + if (dafnyValue.is_SHA256()) { + return ChecksumAlgorithm.SHA256; + } + return ChecksumAlgorithm.fromValue(dafnyValue.toString()); + } + + public static ChecksumMode ChecksumMode( + software.amazon.cryptography.services.s3.internaldafny.types.ChecksumMode dafnyValue + ) { + if (dafnyValue.is_ENABLED()) { + return ChecksumMode.ENABLED; + } + return ChecksumMode.fromValue(dafnyValue.toString()); + } + + public static Delete Delete( + software.amazon.cryptography.services.s3.internaldafny.types.Delete dafnyValue + ) { + Delete.Builder builder = Delete.builder(); + builder.objects(ToNative.ObjectIdentifierList(dafnyValue.dtor_Objects())); + if (dafnyValue.dtor_Quiet().is_Some()) { + builder.quiet((dafnyValue.dtor_Quiet().dtor_value())); + } + return builder.build(); + } + + public static DeletedObject DeletedObject( + software.amazon.cryptography.services.s3.internaldafny.types.DeletedObject dafnyValue + ) { + DeletedObject.Builder builder = DeletedObject.builder(); + if (dafnyValue.dtor_DeleteMarker().is_Some()) { + builder.deleteMarker((dafnyValue.dtor_DeleteMarker().dtor_value())); + } + if (dafnyValue.dtor_DeleteMarkerVersionId().is_Some()) { + builder.deleteMarkerVersionId( + software.amazon.smithy.dafny.conversion.ToNative.Simple.String( + dafnyValue.dtor_DeleteMarkerVersionId().dtor_value() + ) + ); + } + if (dafnyValue.dtor_Key().is_Some()) { + builder.key( + software.amazon.smithy.dafny.conversion.ToNative.Simple.String( + dafnyValue.dtor_Key().dtor_value() + ) + ); + } + if (dafnyValue.dtor_VersionId().is_Some()) { + builder.versionId( + software.amazon.smithy.dafny.conversion.ToNative.Simple.String( + dafnyValue.dtor_VersionId().dtor_value() + ) + ); + } + return builder.build(); + } + + public static List DeletedObjects( + DafnySequence< + ? extends software.amazon.cryptography.services.s3.internaldafny.types.DeletedObject + > dafnyValue + ) { + return software.amazon.smithy.dafny.conversion.ToNative.Aggregate.GenericToList( + dafnyValue, + software.amazon.cryptography.services.s3.internaldafny.ToNative::DeletedObject + ); + } + + public static DeleteObjectResponse DeleteObjectOutput( + DeleteObjectOutput dafnyValue + ) { + DeleteObjectResponse.Builder builder = DeleteObjectResponse.builder(); + if (dafnyValue.dtor_DeleteMarker().is_Some()) { + builder.deleteMarker((dafnyValue.dtor_DeleteMarker().dtor_value())); + } + if (dafnyValue.dtor_RequestCharged().is_Some()) { + builder.requestCharged( + ToNative.RequestCharged(dafnyValue.dtor_RequestCharged().dtor_value()) + ); + } + if (dafnyValue.dtor_VersionId().is_Some()) { + builder.versionId( + software.amazon.smithy.dafny.conversion.ToNative.Simple.String( + dafnyValue.dtor_VersionId().dtor_value() + ) + ); + } + return builder.build(); + } + + public static DeleteObjectRequest DeleteObjectRequest( + software.amazon.cryptography.services.s3.internaldafny.types.DeleteObjectRequest dafnyValue + ) { + DeleteObjectRequest.Builder builder = DeleteObjectRequest.builder(); + builder.bucket( + software.amazon.smithy.dafny.conversion.ToNative.Simple.String( + dafnyValue.dtor_Bucket() + ) + ); + if (dafnyValue.dtor_BypassGovernanceRetention().is_Some()) { + builder.bypassGovernanceRetention( + (dafnyValue.dtor_BypassGovernanceRetention().dtor_value()) + ); + } + if (dafnyValue.dtor_ExpectedBucketOwner().is_Some()) { + builder.expectedBucketOwner( + software.amazon.smithy.dafny.conversion.ToNative.Simple.String( + dafnyValue.dtor_ExpectedBucketOwner().dtor_value() + ) + ); + } + builder.key( + software.amazon.smithy.dafny.conversion.ToNative.Simple.String( + dafnyValue.dtor_Key() + ) + ); + if (dafnyValue.dtor_MFA().is_Some()) { + builder.mfa( + software.amazon.smithy.dafny.conversion.ToNative.Simple.String( + dafnyValue.dtor_MFA().dtor_value() + ) + ); + } + if (dafnyValue.dtor_RequestPayer().is_Some()) { + builder.requestPayer( + ToNative.RequestPayer(dafnyValue.dtor_RequestPayer().dtor_value()) + ); + } + if (dafnyValue.dtor_VersionId().is_Some()) { + builder.versionId( + software.amazon.smithy.dafny.conversion.ToNative.Simple.String( + dafnyValue.dtor_VersionId().dtor_value() + ) + ); + } + return builder.build(); + } + + public static DeleteObjectsResponse DeleteObjectsOutput( + DeleteObjectsOutput dafnyValue + ) { + DeleteObjectsResponse.Builder builder = DeleteObjectsResponse.builder(); + if (dafnyValue.dtor_Deleted().is_Some()) { + builder.deleted( + ToNative.DeletedObjects(dafnyValue.dtor_Deleted().dtor_value()) + ); + } + if (dafnyValue.dtor_Errors().is_Some()) { + builder.errors(ToNative.Errors(dafnyValue.dtor_Errors().dtor_value())); + } + if (dafnyValue.dtor_RequestCharged().is_Some()) { + builder.requestCharged( + ToNative.RequestCharged(dafnyValue.dtor_RequestCharged().dtor_value()) + ); + } + return builder.build(); + } + + public static DeleteObjectsRequest DeleteObjectsRequest( + software.amazon.cryptography.services.s3.internaldafny.types.DeleteObjectsRequest dafnyValue + ) { + DeleteObjectsRequest.Builder builder = DeleteObjectsRequest.builder(); + builder.bucket( + software.amazon.smithy.dafny.conversion.ToNative.Simple.String( + dafnyValue.dtor_Bucket() + ) + ); + if (dafnyValue.dtor_BypassGovernanceRetention().is_Some()) { + builder.bypassGovernanceRetention( + (dafnyValue.dtor_BypassGovernanceRetention().dtor_value()) + ); + } + if (dafnyValue.dtor_ChecksumAlgorithm().is_Some()) { + builder.checksumAlgorithm( + ToNative.ChecksumAlgorithm( + dafnyValue.dtor_ChecksumAlgorithm().dtor_value() + ) + ); + } + builder.delete(ToNative.Delete(dafnyValue.dtor_Delete())); + if (dafnyValue.dtor_ExpectedBucketOwner().is_Some()) { + builder.expectedBucketOwner( + software.amazon.smithy.dafny.conversion.ToNative.Simple.String( + dafnyValue.dtor_ExpectedBucketOwner().dtor_value() + ) + ); + } + if (dafnyValue.dtor_MFA().is_Some()) { + builder.mfa( + software.amazon.smithy.dafny.conversion.ToNative.Simple.String( + dafnyValue.dtor_MFA().dtor_value() + ) + ); + } + if (dafnyValue.dtor_RequestPayer().is_Some()) { + builder.requestPayer( + ToNative.RequestPayer(dafnyValue.dtor_RequestPayer().dtor_value()) + ); + } + return builder.build(); + } + + public static List Errors( + DafnySequence< + ? extends software.amazon.cryptography.services.s3.internaldafny.types.ErrorShape + > dafnyValue + ) { + return software.amazon.smithy.dafny.conversion.ToNative.Aggregate.GenericToList( + dafnyValue, + software.amazon.cryptography.services.s3.internaldafny.ToNative::ErrorShape + ); + } + + public static S3Error ErrorShape( + software.amazon.cryptography.services.s3.internaldafny.types.ErrorShape dafnyValue + ) { + S3Error.Builder builder = S3Error.builder(); + if (dafnyValue.dtor_Code().is_Some()) { + builder.code( + software.amazon.smithy.dafny.conversion.ToNative.Simple.String( + dafnyValue.dtor_Code().dtor_value() + ) + ); + } + if (dafnyValue.dtor_Key().is_Some()) { + builder.key( + software.amazon.smithy.dafny.conversion.ToNative.Simple.String( + dafnyValue.dtor_Key().dtor_value() + ) + ); + } + if (dafnyValue.dtor_Message().is_Some()) { + builder.message( + software.amazon.smithy.dafny.conversion.ToNative.Simple.String( + dafnyValue.dtor_Message().dtor_value() + ) + ); + } + if (dafnyValue.dtor_VersionId().is_Some()) { + builder.versionId( + software.amazon.smithy.dafny.conversion.ToNative.Simple.String( + dafnyValue.dtor_VersionId().dtor_value() + ) + ); + } + return builder.build(); + } + + public static GetObjectResponse GetObjectOutput(GetObjectOutput dafnyValue) { + GetObjectResponse.Builder builder = GetObjectResponse.builder(); + if (dafnyValue.dtor_AcceptRanges().is_Some()) { + builder.acceptRanges( + software.amazon.smithy.dafny.conversion.ToNative.Simple.String( + dafnyValue.dtor_AcceptRanges().dtor_value() + ) + ); + } + if (dafnyValue.dtor_BucketKeyEnabled().is_Some()) { + builder.bucketKeyEnabled( + (dafnyValue.dtor_BucketKeyEnabled().dtor_value()) + ); + } + if (dafnyValue.dtor_CacheControl().is_Some()) { + builder.cacheControl( + software.amazon.smithy.dafny.conversion.ToNative.Simple.String( + dafnyValue.dtor_CacheControl().dtor_value() + ) + ); + } + if (dafnyValue.dtor_ChecksumCRC32().is_Some()) { + builder.checksumCRC32( + software.amazon.smithy.dafny.conversion.ToNative.Simple.String( + dafnyValue.dtor_ChecksumCRC32().dtor_value() + ) + ); + } + if (dafnyValue.dtor_ChecksumCRC32C().is_Some()) { + builder.checksumCRC32C( + software.amazon.smithy.dafny.conversion.ToNative.Simple.String( + dafnyValue.dtor_ChecksumCRC32C().dtor_value() + ) + ); + } + if (dafnyValue.dtor_ChecksumSHA1().is_Some()) { + builder.checksumSHA1( + software.amazon.smithy.dafny.conversion.ToNative.Simple.String( + dafnyValue.dtor_ChecksumSHA1().dtor_value() + ) + ); + } + if (dafnyValue.dtor_ChecksumSHA256().is_Some()) { + builder.checksumSHA256( + software.amazon.smithy.dafny.conversion.ToNative.Simple.String( + dafnyValue.dtor_ChecksumSHA256().dtor_value() + ) + ); + } + if (dafnyValue.dtor_ContentDisposition().is_Some()) { + builder.contentDisposition( + software.amazon.smithy.dafny.conversion.ToNative.Simple.String( + dafnyValue.dtor_ContentDisposition().dtor_value() + ) + ); + } + if (dafnyValue.dtor_ContentEncoding().is_Some()) { + builder.contentEncoding( + software.amazon.smithy.dafny.conversion.ToNative.Simple.String( + dafnyValue.dtor_ContentEncoding().dtor_value() + ) + ); + } + if (dafnyValue.dtor_ContentLanguage().is_Some()) { + builder.contentLanguage( + software.amazon.smithy.dafny.conversion.ToNative.Simple.String( + dafnyValue.dtor_ContentLanguage().dtor_value() + ) + ); + } + if (dafnyValue.dtor_ContentLength().is_Some()) { + builder.contentLength((dafnyValue.dtor_ContentLength().dtor_value())); + } + if (dafnyValue.dtor_ContentRange().is_Some()) { + builder.contentRange( + software.amazon.smithy.dafny.conversion.ToNative.Simple.String( + dafnyValue.dtor_ContentRange().dtor_value() + ) + ); + } + if (dafnyValue.dtor_ContentType().is_Some()) { + builder.contentType( + software.amazon.smithy.dafny.conversion.ToNative.Simple.String( + dafnyValue.dtor_ContentType().dtor_value() + ) + ); + } + if (dafnyValue.dtor_DeleteMarker().is_Some()) { + builder.deleteMarker((dafnyValue.dtor_DeleteMarker().dtor_value())); + } + if (dafnyValue.dtor_ETag().is_Some()) { + builder.eTag( + software.amazon.smithy.dafny.conversion.ToNative.Simple.String( + dafnyValue.dtor_ETag().dtor_value() + ) + ); + } + if (dafnyValue.dtor_Expiration().is_Some()) { + builder.expiration( + software.amazon.smithy.dafny.conversion.ToNative.Simple.String( + dafnyValue.dtor_Expiration().dtor_value() + ) + ); + } + if (dafnyValue.dtor_Expires().is_Some()) { + builder.expires( + software.amazon.smithy.dafny.conversion.ToNative.Simple.Instant( + dafnyValue.dtor_Expires().dtor_value() + ) + ); + } + if (dafnyValue.dtor_LastModified().is_Some()) { + builder.lastModified( + software.amazon.smithy.dafny.conversion.ToNative.Simple.Instant( + dafnyValue.dtor_LastModified().dtor_value() + ) + ); + } + if (dafnyValue.dtor_Metadata().is_Some()) { + builder.metadata( + ToNative.Metadata(dafnyValue.dtor_Metadata().dtor_value()) + ); + } + if (dafnyValue.dtor_MissingMeta().is_Some()) { + builder.missingMeta((dafnyValue.dtor_MissingMeta().dtor_value())); + } + if (dafnyValue.dtor_ObjectLockLegalHoldStatus().is_Some()) { + builder.objectLockLegalHoldStatus( + ToNative.ObjectLockLegalHoldStatus( + dafnyValue.dtor_ObjectLockLegalHoldStatus().dtor_value() + ) + ); + } + if (dafnyValue.dtor_ObjectLockMode().is_Some()) { + builder.objectLockMode( + ToNative.ObjectLockMode(dafnyValue.dtor_ObjectLockMode().dtor_value()) + ); + } + if (dafnyValue.dtor_ObjectLockRetainUntilDate().is_Some()) { + builder.objectLockRetainUntilDate( + software.amazon.smithy.dafny.conversion.ToNative.Simple.Instant( + dafnyValue.dtor_ObjectLockRetainUntilDate().dtor_value() + ) + ); + } + if (dafnyValue.dtor_PartsCount().is_Some()) { + builder.partsCount((dafnyValue.dtor_PartsCount().dtor_value())); + } + if (dafnyValue.dtor_ReplicationStatus().is_Some()) { + builder.replicationStatus( + ToNative.ReplicationStatus( + dafnyValue.dtor_ReplicationStatus().dtor_value() + ) + ); + } + if (dafnyValue.dtor_RequestCharged().is_Some()) { + builder.requestCharged( + ToNative.RequestCharged(dafnyValue.dtor_RequestCharged().dtor_value()) + ); + } + if (dafnyValue.dtor_Restore().is_Some()) { + builder.restore( + software.amazon.smithy.dafny.conversion.ToNative.Simple.String( + dafnyValue.dtor_Restore().dtor_value() + ) + ); + } + if (dafnyValue.dtor_ServerSideEncryption().is_Some()) { + builder.serverSideEncryption( + ToNative.ServerSideEncryption( + dafnyValue.dtor_ServerSideEncryption().dtor_value() + ) + ); + } + if (dafnyValue.dtor_SSECustomerAlgorithm().is_Some()) { + builder.sseCustomerAlgorithm( + software.amazon.smithy.dafny.conversion.ToNative.Simple.String( + dafnyValue.dtor_SSECustomerAlgorithm().dtor_value() + ) + ); + } + if (dafnyValue.dtor_SSECustomerKeyMD5().is_Some()) { + builder.sseCustomerKeyMD5( + software.amazon.smithy.dafny.conversion.ToNative.Simple.String( + dafnyValue.dtor_SSECustomerKeyMD5().dtor_value() + ) + ); + } + if (dafnyValue.dtor_SSEKMSKeyId().is_Some()) { + builder.ssekmsKeyId( + software.amazon.smithy.dafny.conversion.ToNative.Simple.String( + dafnyValue.dtor_SSEKMSKeyId().dtor_value() + ) + ); + } + if (dafnyValue.dtor_StorageClass().is_Some()) { + builder.storageClass( + ToNative.StorageClass(dafnyValue.dtor_StorageClass().dtor_value()) + ); + } + if (dafnyValue.dtor_TagCount().is_Some()) { + builder.tagCount((dafnyValue.dtor_TagCount().dtor_value())); + } + if (dafnyValue.dtor_VersionId().is_Some()) { + builder.versionId( + software.amazon.smithy.dafny.conversion.ToNative.Simple.String( + dafnyValue.dtor_VersionId().dtor_value() + ) + ); + } + if (dafnyValue.dtor_WebsiteRedirectLocation().is_Some()) { + builder.websiteRedirectLocation( + software.amazon.smithy.dafny.conversion.ToNative.Simple.String( + dafnyValue.dtor_WebsiteRedirectLocation().dtor_value() + ) + ); + } + return builder.build(); + } + + public static GetObjectRequest GetObjectRequest( + software.amazon.cryptography.services.s3.internaldafny.types.GetObjectRequest dafnyValue + ) { + GetObjectRequest.Builder builder = GetObjectRequest.builder(); + builder.bucket( + software.amazon.smithy.dafny.conversion.ToNative.Simple.String( + dafnyValue.dtor_Bucket() + ) + ); + if (dafnyValue.dtor_ChecksumMode().is_Some()) { + builder.checksumMode( + ToNative.ChecksumMode(dafnyValue.dtor_ChecksumMode().dtor_value()) + ); + } + if (dafnyValue.dtor_ExpectedBucketOwner().is_Some()) { + builder.expectedBucketOwner( + software.amazon.smithy.dafny.conversion.ToNative.Simple.String( + dafnyValue.dtor_ExpectedBucketOwner().dtor_value() + ) + ); + } + if (dafnyValue.dtor_IfMatch().is_Some()) { + builder.ifMatch( + software.amazon.smithy.dafny.conversion.ToNative.Simple.String( + dafnyValue.dtor_IfMatch().dtor_value() + ) + ); + } + if (dafnyValue.dtor_IfModifiedSince().is_Some()) { + builder.ifModifiedSince( + software.amazon.smithy.dafny.conversion.ToNative.Simple.Instant( + dafnyValue.dtor_IfModifiedSince().dtor_value() + ) + ); + } + if (dafnyValue.dtor_IfNoneMatch().is_Some()) { + builder.ifNoneMatch( + software.amazon.smithy.dafny.conversion.ToNative.Simple.String( + dafnyValue.dtor_IfNoneMatch().dtor_value() + ) + ); + } + if (dafnyValue.dtor_IfUnmodifiedSince().is_Some()) { + builder.ifUnmodifiedSince( + software.amazon.smithy.dafny.conversion.ToNative.Simple.Instant( + dafnyValue.dtor_IfUnmodifiedSince().dtor_value() + ) + ); + } + builder.key( + software.amazon.smithy.dafny.conversion.ToNative.Simple.String( + dafnyValue.dtor_Key() + ) + ); + if (dafnyValue.dtor_PartNumber().is_Some()) { + builder.partNumber((dafnyValue.dtor_PartNumber().dtor_value())); + } + if (dafnyValue.dtor_Range().is_Some()) { + builder.range( + software.amazon.smithy.dafny.conversion.ToNative.Simple.String( + dafnyValue.dtor_Range().dtor_value() + ) + ); + } + if (dafnyValue.dtor_RequestPayer().is_Some()) { + builder.requestPayer( + ToNative.RequestPayer(dafnyValue.dtor_RequestPayer().dtor_value()) + ); + } + if (dafnyValue.dtor_ResponseCacheControl().is_Some()) { + builder.responseCacheControl( + software.amazon.smithy.dafny.conversion.ToNative.Simple.String( + dafnyValue.dtor_ResponseCacheControl().dtor_value() + ) + ); + } + if (dafnyValue.dtor_ResponseContentDisposition().is_Some()) { + builder.responseContentDisposition( + software.amazon.smithy.dafny.conversion.ToNative.Simple.String( + dafnyValue.dtor_ResponseContentDisposition().dtor_value() + ) + ); + } + if (dafnyValue.dtor_ResponseContentEncoding().is_Some()) { + builder.responseContentEncoding( + software.amazon.smithy.dafny.conversion.ToNative.Simple.String( + dafnyValue.dtor_ResponseContentEncoding().dtor_value() + ) + ); + } + if (dafnyValue.dtor_ResponseContentLanguage().is_Some()) { + builder.responseContentLanguage( + software.amazon.smithy.dafny.conversion.ToNative.Simple.String( + dafnyValue.dtor_ResponseContentLanguage().dtor_value() + ) + ); + } + if (dafnyValue.dtor_ResponseContentType().is_Some()) { + builder.responseContentType( + software.amazon.smithy.dafny.conversion.ToNative.Simple.String( + dafnyValue.dtor_ResponseContentType().dtor_value() + ) + ); + } + if (dafnyValue.dtor_ResponseExpires().is_Some()) { + builder.responseExpires( + software.amazon.smithy.dafny.conversion.ToNative.Simple.Instant( + dafnyValue.dtor_ResponseExpires().dtor_value() + ) + ); + } + if (dafnyValue.dtor_SSECustomerAlgorithm().is_Some()) { + builder.sseCustomerAlgorithm( + software.amazon.smithy.dafny.conversion.ToNative.Simple.String( + dafnyValue.dtor_SSECustomerAlgorithm().dtor_value() + ) + ); + } + if (dafnyValue.dtor_SSECustomerKey().is_Some()) { + builder.sseCustomerKey( + software.amazon.smithy.dafny.conversion.ToNative.Simple.String( + dafnyValue.dtor_SSECustomerKey().dtor_value() + ) + ); + } + if (dafnyValue.dtor_SSECustomerKeyMD5().is_Some()) { + builder.sseCustomerKeyMD5( + software.amazon.smithy.dafny.conversion.ToNative.Simple.String( + dafnyValue.dtor_SSECustomerKeyMD5().dtor_value() + ) + ); + } + if (dafnyValue.dtor_VersionId().is_Some()) { + builder.versionId( + software.amazon.smithy.dafny.conversion.ToNative.Simple.String( + dafnyValue.dtor_VersionId().dtor_value() + ) + ); + } + return builder.build(); + } + + public static IntelligentTieringAccessTier IntelligentTieringAccessTier( + software.amazon.cryptography.services.s3.internaldafny.types.IntelligentTieringAccessTier dafnyValue + ) { + if (dafnyValue.is_ARCHIVE__ACCESS()) { + return IntelligentTieringAccessTier.ARCHIVE_ACCESS; + } + if (dafnyValue.is_DEEP__ARCHIVE__ACCESS()) { + return IntelligentTieringAccessTier.DEEP_ARCHIVE_ACCESS; + } + return IntelligentTieringAccessTier.fromValue(dafnyValue.toString()); + } + + public static Map Metadata( + DafnyMap< + ? extends DafnySequence, + ? extends DafnySequence + > dafnyValue + ) { + return software.amazon.smithy.dafny.conversion.ToNative.Aggregate.GenericToMap( + dafnyValue, + software.amazon.smithy.dafny.conversion.ToNative.Simple::String, + software.amazon.smithy.dafny.conversion.ToNative.Simple::String + ); + } + + public static ObjectCannedACL ObjectCannedACL( + software.amazon.cryptography.services.s3.internaldafny.types.ObjectCannedACL dafnyValue + ) { + if (dafnyValue.is_private()) { + return ObjectCannedACL.PRIVATE; + } + if (dafnyValue.is_public__read()) { + return ObjectCannedACL.PUBLIC_READ; + } + if (dafnyValue.is_public__read__write()) { + return ObjectCannedACL.PUBLIC_READ_WRITE; + } + if (dafnyValue.is_authenticated__read()) { + return ObjectCannedACL.AUTHENTICATED_READ; + } + if (dafnyValue.is_aws__exec__read()) { + return ObjectCannedACL.AWS_EXEC_READ; + } + if (dafnyValue.is_bucket__owner__read()) { + return ObjectCannedACL.BUCKET_OWNER_READ; + } + if (dafnyValue.is_bucket__owner__full__control()) { + return ObjectCannedACL.BUCKET_OWNER_FULL_CONTROL; + } + return ObjectCannedACL.fromValue(dafnyValue.toString()); + } + + public static ObjectIdentifier ObjectIdentifier( + software.amazon.cryptography.services.s3.internaldafny.types.ObjectIdentifier dafnyValue + ) { + ObjectIdentifier.Builder builder = ObjectIdentifier.builder(); + builder.key( + software.amazon.smithy.dafny.conversion.ToNative.Simple.String( + dafnyValue.dtor_Key() + ) + ); + if (dafnyValue.dtor_VersionId().is_Some()) { + builder.versionId( + software.amazon.smithy.dafny.conversion.ToNative.Simple.String( + dafnyValue.dtor_VersionId().dtor_value() + ) + ); + } + return builder.build(); + } + + public static List ObjectIdentifierList( + DafnySequence< + ? extends software.amazon.cryptography.services.s3.internaldafny.types.ObjectIdentifier + > dafnyValue + ) { + return software.amazon.smithy.dafny.conversion.ToNative.Aggregate.GenericToList( + dafnyValue, + software.amazon.cryptography.services.s3.internaldafny.ToNative::ObjectIdentifier + ); + } + + public static ObjectLockLegalHoldStatus ObjectLockLegalHoldStatus( + software.amazon.cryptography.services.s3.internaldafny.types.ObjectLockLegalHoldStatus dafnyValue + ) { + if (dafnyValue.is_ON()) { + return ObjectLockLegalHoldStatus.ON; + } + if (dafnyValue.is_OFF()) { + return ObjectLockLegalHoldStatus.OFF; + } + return ObjectLockLegalHoldStatus.fromValue(dafnyValue.toString()); + } + + public static ObjectLockMode ObjectLockMode( + software.amazon.cryptography.services.s3.internaldafny.types.ObjectLockMode dafnyValue + ) { + if (dafnyValue.is_GOVERNANCE()) { + return ObjectLockMode.GOVERNANCE; + } + if (dafnyValue.is_COMPLIANCE()) { + return ObjectLockMode.COMPLIANCE; + } + return ObjectLockMode.fromValue(dafnyValue.toString()); + } + + public static PutObjectResponse PutObjectOutput(PutObjectOutput dafnyValue) { + PutObjectResponse.Builder builder = PutObjectResponse.builder(); + if (dafnyValue.dtor_BucketKeyEnabled().is_Some()) { + builder.bucketKeyEnabled( + (dafnyValue.dtor_BucketKeyEnabled().dtor_value()) + ); + } + if (dafnyValue.dtor_ChecksumCRC32().is_Some()) { + builder.checksumCRC32( + software.amazon.smithy.dafny.conversion.ToNative.Simple.String( + dafnyValue.dtor_ChecksumCRC32().dtor_value() + ) + ); + } + if (dafnyValue.dtor_ChecksumCRC32C().is_Some()) { + builder.checksumCRC32C( + software.amazon.smithy.dafny.conversion.ToNative.Simple.String( + dafnyValue.dtor_ChecksumCRC32C().dtor_value() + ) + ); + } + if (dafnyValue.dtor_ChecksumSHA1().is_Some()) { + builder.checksumSHA1( + software.amazon.smithy.dafny.conversion.ToNative.Simple.String( + dafnyValue.dtor_ChecksumSHA1().dtor_value() + ) + ); + } + if (dafnyValue.dtor_ChecksumSHA256().is_Some()) { + builder.checksumSHA256( + software.amazon.smithy.dafny.conversion.ToNative.Simple.String( + dafnyValue.dtor_ChecksumSHA256().dtor_value() + ) + ); + } + if (dafnyValue.dtor_ETag().is_Some()) { + builder.eTag( + software.amazon.smithy.dafny.conversion.ToNative.Simple.String( + dafnyValue.dtor_ETag().dtor_value() + ) + ); + } + if (dafnyValue.dtor_Expiration().is_Some()) { + builder.expiration( + software.amazon.smithy.dafny.conversion.ToNative.Simple.String( + dafnyValue.dtor_Expiration().dtor_value() + ) + ); + } + if (dafnyValue.dtor_RequestCharged().is_Some()) { + builder.requestCharged( + ToNative.RequestCharged(dafnyValue.dtor_RequestCharged().dtor_value()) + ); + } + if (dafnyValue.dtor_ServerSideEncryption().is_Some()) { + builder.serverSideEncryption( + ToNative.ServerSideEncryption( + dafnyValue.dtor_ServerSideEncryption().dtor_value() + ) + ); + } + if (dafnyValue.dtor_SSECustomerAlgorithm().is_Some()) { + builder.sseCustomerAlgorithm( + software.amazon.smithy.dafny.conversion.ToNative.Simple.String( + dafnyValue.dtor_SSECustomerAlgorithm().dtor_value() + ) + ); + } + if (dafnyValue.dtor_SSECustomerKeyMD5().is_Some()) { + builder.sseCustomerKeyMD5( + software.amazon.smithy.dafny.conversion.ToNative.Simple.String( + dafnyValue.dtor_SSECustomerKeyMD5().dtor_value() + ) + ); + } + if (dafnyValue.dtor_SSEKMSEncryptionContext().is_Some()) { + builder.ssekmsEncryptionContext( + software.amazon.smithy.dafny.conversion.ToNative.Simple.String( + dafnyValue.dtor_SSEKMSEncryptionContext().dtor_value() + ) + ); + } + if (dafnyValue.dtor_SSEKMSKeyId().is_Some()) { + builder.ssekmsKeyId( + software.amazon.smithy.dafny.conversion.ToNative.Simple.String( + dafnyValue.dtor_SSEKMSKeyId().dtor_value() + ) + ); + } + if (dafnyValue.dtor_VersionId().is_Some()) { + builder.versionId( + software.amazon.smithy.dafny.conversion.ToNative.Simple.String( + dafnyValue.dtor_VersionId().dtor_value() + ) + ); + } + return builder.build(); + } + + public static PutObjectRequest PutObjectRequest( + software.amazon.cryptography.services.s3.internaldafny.types.PutObjectRequest dafnyValue + ) { + PutObjectRequest.Builder builder = PutObjectRequest.builder(); + if (dafnyValue.dtor_ACL().is_Some()) { + builder.acl(ToNative.ObjectCannedACL(dafnyValue.dtor_ACL().dtor_value())); + } + builder.bucket( + software.amazon.smithy.dafny.conversion.ToNative.Simple.String( + dafnyValue.dtor_Bucket() + ) + ); + if (dafnyValue.dtor_BucketKeyEnabled().is_Some()) { + builder.bucketKeyEnabled( + (dafnyValue.dtor_BucketKeyEnabled().dtor_value()) + ); + } + if (dafnyValue.dtor_CacheControl().is_Some()) { + builder.cacheControl( + software.amazon.smithy.dafny.conversion.ToNative.Simple.String( + dafnyValue.dtor_CacheControl().dtor_value() + ) + ); + } + if (dafnyValue.dtor_ChecksumAlgorithm().is_Some()) { + builder.checksumAlgorithm( + ToNative.ChecksumAlgorithm( + dafnyValue.dtor_ChecksumAlgorithm().dtor_value() + ) + ); + } + if (dafnyValue.dtor_ChecksumCRC32().is_Some()) { + builder.checksumCRC32( + software.amazon.smithy.dafny.conversion.ToNative.Simple.String( + dafnyValue.dtor_ChecksumCRC32().dtor_value() + ) + ); + } + if (dafnyValue.dtor_ChecksumCRC32C().is_Some()) { + builder.checksumCRC32C( + software.amazon.smithy.dafny.conversion.ToNative.Simple.String( + dafnyValue.dtor_ChecksumCRC32C().dtor_value() + ) + ); + } + if (dafnyValue.dtor_ChecksumSHA1().is_Some()) { + builder.checksumSHA1( + software.amazon.smithy.dafny.conversion.ToNative.Simple.String( + dafnyValue.dtor_ChecksumSHA1().dtor_value() + ) + ); + } + if (dafnyValue.dtor_ChecksumSHA256().is_Some()) { + builder.checksumSHA256( + software.amazon.smithy.dafny.conversion.ToNative.Simple.String( + dafnyValue.dtor_ChecksumSHA256().dtor_value() + ) + ); + } + if (dafnyValue.dtor_ContentDisposition().is_Some()) { + builder.contentDisposition( + software.amazon.smithy.dafny.conversion.ToNative.Simple.String( + dafnyValue.dtor_ContentDisposition().dtor_value() + ) + ); + } + if (dafnyValue.dtor_ContentEncoding().is_Some()) { + builder.contentEncoding( + software.amazon.smithy.dafny.conversion.ToNative.Simple.String( + dafnyValue.dtor_ContentEncoding().dtor_value() + ) + ); + } + if (dafnyValue.dtor_ContentLanguage().is_Some()) { + builder.contentLanguage( + software.amazon.smithy.dafny.conversion.ToNative.Simple.String( + dafnyValue.dtor_ContentLanguage().dtor_value() + ) + ); + } + if (dafnyValue.dtor_ContentLength().is_Some()) { + builder.contentLength((dafnyValue.dtor_ContentLength().dtor_value())); + } + if (dafnyValue.dtor_ContentMD5().is_Some()) { + builder.contentMD5( + software.amazon.smithy.dafny.conversion.ToNative.Simple.String( + dafnyValue.dtor_ContentMD5().dtor_value() + ) + ); + } + if (dafnyValue.dtor_ContentType().is_Some()) { + builder.contentType( + software.amazon.smithy.dafny.conversion.ToNative.Simple.String( + dafnyValue.dtor_ContentType().dtor_value() + ) + ); + } + if (dafnyValue.dtor_ExpectedBucketOwner().is_Some()) { + builder.expectedBucketOwner( + software.amazon.smithy.dafny.conversion.ToNative.Simple.String( + dafnyValue.dtor_ExpectedBucketOwner().dtor_value() + ) + ); + } + if (dafnyValue.dtor_Expires().is_Some()) { + builder.expires( + software.amazon.smithy.dafny.conversion.ToNative.Simple.Instant( + dafnyValue.dtor_Expires().dtor_value() + ) + ); + } + if (dafnyValue.dtor_GrantFullControl().is_Some()) { + builder.grantFullControl( + software.amazon.smithy.dafny.conversion.ToNative.Simple.String( + dafnyValue.dtor_GrantFullControl().dtor_value() + ) + ); + } + if (dafnyValue.dtor_GrantRead().is_Some()) { + builder.grantRead( + software.amazon.smithy.dafny.conversion.ToNative.Simple.String( + dafnyValue.dtor_GrantRead().dtor_value() + ) + ); + } + if (dafnyValue.dtor_GrantReadACP().is_Some()) { + builder.grantReadACP( + software.amazon.smithy.dafny.conversion.ToNative.Simple.String( + dafnyValue.dtor_GrantReadACP().dtor_value() + ) + ); + } + if (dafnyValue.dtor_GrantWriteACP().is_Some()) { + builder.grantWriteACP( + software.amazon.smithy.dafny.conversion.ToNative.Simple.String( + dafnyValue.dtor_GrantWriteACP().dtor_value() + ) + ); + } + builder.key( + software.amazon.smithy.dafny.conversion.ToNative.Simple.String( + dafnyValue.dtor_Key() + ) + ); + if (dafnyValue.dtor_Metadata().is_Some()) { + builder.metadata( + ToNative.Metadata(dafnyValue.dtor_Metadata().dtor_value()) + ); + } + if (dafnyValue.dtor_ObjectLockLegalHoldStatus().is_Some()) { + builder.objectLockLegalHoldStatus( + ToNative.ObjectLockLegalHoldStatus( + dafnyValue.dtor_ObjectLockLegalHoldStatus().dtor_value() + ) + ); + } + if (dafnyValue.dtor_ObjectLockMode().is_Some()) { + builder.objectLockMode( + ToNative.ObjectLockMode(dafnyValue.dtor_ObjectLockMode().dtor_value()) + ); + } + if (dafnyValue.dtor_ObjectLockRetainUntilDate().is_Some()) { + builder.objectLockRetainUntilDate( + software.amazon.smithy.dafny.conversion.ToNative.Simple.Instant( + dafnyValue.dtor_ObjectLockRetainUntilDate().dtor_value() + ) + ); + } + if (dafnyValue.dtor_RequestPayer().is_Some()) { + builder.requestPayer( + ToNative.RequestPayer(dafnyValue.dtor_RequestPayer().dtor_value()) + ); + } + if (dafnyValue.dtor_ServerSideEncryption().is_Some()) { + builder.serverSideEncryption( + ToNative.ServerSideEncryption( + dafnyValue.dtor_ServerSideEncryption().dtor_value() + ) + ); + } + if (dafnyValue.dtor_SSECustomerAlgorithm().is_Some()) { + builder.sseCustomerAlgorithm( + software.amazon.smithy.dafny.conversion.ToNative.Simple.String( + dafnyValue.dtor_SSECustomerAlgorithm().dtor_value() + ) + ); + } + if (dafnyValue.dtor_SSECustomerKey().is_Some()) { + builder.sseCustomerKey( + software.amazon.smithy.dafny.conversion.ToNative.Simple.String( + dafnyValue.dtor_SSECustomerKey().dtor_value() + ) + ); + } + if (dafnyValue.dtor_SSECustomerKeyMD5().is_Some()) { + builder.sseCustomerKeyMD5( + software.amazon.smithy.dafny.conversion.ToNative.Simple.String( + dafnyValue.dtor_SSECustomerKeyMD5().dtor_value() + ) + ); + } + if (dafnyValue.dtor_SSEKMSEncryptionContext().is_Some()) { + builder.ssekmsEncryptionContext( + software.amazon.smithy.dafny.conversion.ToNative.Simple.String( + dafnyValue.dtor_SSEKMSEncryptionContext().dtor_value() + ) + ); + } + if (dafnyValue.dtor_SSEKMSKeyId().is_Some()) { + builder.ssekmsKeyId( + software.amazon.smithy.dafny.conversion.ToNative.Simple.String( + dafnyValue.dtor_SSEKMSKeyId().dtor_value() + ) + ); + } + if (dafnyValue.dtor_StorageClass().is_Some()) { + builder.storageClass( + ToNative.StorageClass(dafnyValue.dtor_StorageClass().dtor_value()) + ); + } + if (dafnyValue.dtor_Tagging().is_Some()) { + builder.tagging( + software.amazon.smithy.dafny.conversion.ToNative.Simple.String( + dafnyValue.dtor_Tagging().dtor_value() + ) + ); + } + if (dafnyValue.dtor_WebsiteRedirectLocation().is_Some()) { + builder.websiteRedirectLocation( + software.amazon.smithy.dafny.conversion.ToNative.Simple.String( + dafnyValue.dtor_WebsiteRedirectLocation().dtor_value() + ) + ); + } + return builder.build(); + } + + public static ReplicationStatus ReplicationStatus( + software.amazon.cryptography.services.s3.internaldafny.types.ReplicationStatus dafnyValue + ) { + if (dafnyValue.is_COMPLETE()) { + return ReplicationStatus.COMPLETE; + } + if (dafnyValue.is_PENDING()) { + return ReplicationStatus.PENDING; + } + if (dafnyValue.is_FAILED()) { + return ReplicationStatus.FAILED; + } + if (dafnyValue.is_REPLICA()) { + return ReplicationStatus.REPLICA; + } + if (dafnyValue.is_COMPLETED()) { + return ReplicationStatus.COMPLETED; + } + return ReplicationStatus.fromValue(dafnyValue.toString()); + } + + public static RequestCharged RequestCharged( + software.amazon.cryptography.services.s3.internaldafny.types.RequestCharged dafnyValue + ) { + if (dafnyValue.is_requester()) { + return RequestCharged.REQUESTER; + } + return RequestCharged.fromValue(dafnyValue.toString()); + } + + public static RequestPayer RequestPayer( + software.amazon.cryptography.services.s3.internaldafny.types.RequestPayer dafnyValue + ) { + if (dafnyValue.is_requester()) { + return RequestPayer.REQUESTER; + } + return RequestPayer.fromValue(dafnyValue.toString()); + } + + public static ServerSideEncryption ServerSideEncryption( + software.amazon.cryptography.services.s3.internaldafny.types.ServerSideEncryption dafnyValue + ) { + if (dafnyValue.is_AES256()) { + return ServerSideEncryption.AES256; + } + if (dafnyValue.is_aws__kms()) { + return ServerSideEncryption.AWS_KMS; + } + if (dafnyValue.is_aws__kms__dsse()) { + return ServerSideEncryption.AWS_KMS_DSSE; + } + return ServerSideEncryption.fromValue(dafnyValue.toString()); + } + + public static StorageClass StorageClass( + software.amazon.cryptography.services.s3.internaldafny.types.StorageClass dafnyValue + ) { + if (dafnyValue.is_STANDARD()) { + return StorageClass.STANDARD; + } + if (dafnyValue.is_REDUCED__REDUNDANCY()) { + return StorageClass.REDUCED_REDUNDANCY; + } + if (dafnyValue.is_STANDARD__IA()) { + return StorageClass.STANDARD_IA; + } + if (dafnyValue.is_ONEZONE__IA()) { + return StorageClass.ONEZONE_IA; + } + if (dafnyValue.is_INTELLIGENT__TIERING()) { + return StorageClass.INTELLIGENT_TIERING; + } + if (dafnyValue.is_GLACIER()) { + return StorageClass.GLACIER; + } + if (dafnyValue.is_DEEP__ARCHIVE()) { + return StorageClass.DEEP_ARCHIVE; + } + if (dafnyValue.is_OUTPOSTS()) { + return StorageClass.OUTPOSTS; + } + if (dafnyValue.is_GLACIER__IR()) { + return StorageClass.GLACIER_IR; + } + if (dafnyValue.is_SNOW()) { + return StorageClass.SNOW; + } + if (dafnyValue.is_EXPRESS__ONEZONE()) { + return StorageClass.EXPRESS_ONEZONE; + } + return StorageClass.fromValue(dafnyValue.toString()); + } + + public static InvalidObjectStateException Error( + Error_InvalidObjectState dafnyValue + ) { + InvalidObjectStateException.Builder builder = + InvalidObjectStateException.builder(); + if (dafnyValue.dtor_AccessTier().is_Some()) { + builder.accessTier( + ToNative.IntelligentTieringAccessTier( + dafnyValue.dtor_AccessTier().dtor_value() + ) + ); + } + if (dafnyValue.dtor_message().is_Some()) { + builder.message( + software.amazon.smithy.dafny.conversion.ToNative.Simple.String( + dafnyValue.dtor_message().dtor_value() + ) + ); + } + if (dafnyValue.dtor_StorageClass().is_Some()) { + builder.storageClass( + ToNative.StorageClass(dafnyValue.dtor_StorageClass().dtor_value()) + ); + } + return builder.build(); + } + + public static NoSuchBucketException Error(Error_NoSuchBucket dafnyValue) { + NoSuchBucketException.Builder builder = NoSuchBucketException.builder(); + if (dafnyValue.dtor_message().is_Some()) { + builder.message( + software.amazon.smithy.dafny.conversion.ToNative.Simple.String( + dafnyValue.dtor_message().dtor_value() + ) + ); + } + return builder.build(); + } + + public static NoSuchKeyException Error(Error_NoSuchKey dafnyValue) { + NoSuchKeyException.Builder builder = NoSuchKeyException.builder(); + if (dafnyValue.dtor_message().is_Some()) { + builder.message( + software.amazon.smithy.dafny.conversion.ToNative.Simple.String( + dafnyValue.dtor_message().dtor_value() + ) + ); + } + return builder.build(); + } + + public static S3Client AmazonS3(IS3Client dafnyValue) { + return ((Shim) dafnyValue).impl(); + } + + public static RuntimeException Error(Error_Opaque dafnyValue) { + // While the first two cases are logically identical, + // there is a semantic distinction. + // An un-modeled Service Error is different from a Java Heap Exhaustion error. + // In the future, Smithy-Dafny MAY allow for this distinction. + // Which would allow Dafny developers to treat the two differently. + if (dafnyValue.dtor_obj() instanceof S3Exception) { + return (S3Exception) dafnyValue.dtor_obj(); + } else if (dafnyValue.dtor_obj() instanceof RuntimeException) { + return (RuntimeException) dafnyValue.dtor_obj(); + } else if (dafnyValue.dtor_obj() instanceof Throwable) { + return new RuntimeException( + String.format( + "Unknown error thrown while calling Amazon Simple Storage Service. %s", + (Throwable) dafnyValue.dtor_obj() + ) + ); + } + return new IllegalStateException( + String.format( + "Unknown error thrown while calling Amazon Simple Storage Service. %s", + dafnyValue + ) + ); + } + + public static RuntimeException Error(Error_OpaqueWithText dafnyValue) { + // While the first two cases are logically identical, + // there is a semantic distinction. + // An un-modeled Service Error is different from a Java Heap Exhaustion error. + // In the future, Smithy-Dafny MAY allow for this distinction. + // Which would allow Dafny developers to treat the two differently. + if (dafnyValue.dtor_obj() instanceof S3Exception) { + return (S3Exception) dafnyValue.dtor_obj(); + } else if (dafnyValue.dtor_obj() instanceof RuntimeException) { + return (RuntimeException) dafnyValue.dtor_obj(); + } else if (dafnyValue.dtor_obj() instanceof Throwable) { + return new RuntimeException( + String.format( + "Unknown error thrown while calling Amazon Simple Storage Service. %s", + (Throwable) dafnyValue.dtor_obj() + ) + ); + } + return new IllegalStateException( + String.format( + "Unknown error thrown while calling Amazon Simple Storage Service. %s", + dafnyValue + ) + ); + } + + public static RuntimeException Error(Error dafnyValue) { + if (dafnyValue.is_InvalidObjectState()) { + return ToNative.Error((Error_InvalidObjectState) dafnyValue); + } + if (dafnyValue.is_NoSuchBucket()) { + return ToNative.Error((Error_NoSuchBucket) dafnyValue); + } + if (dafnyValue.is_NoSuchKey()) { + return ToNative.Error((Error_NoSuchKey) dafnyValue); + } + if (dafnyValue.is_Opaque()) { + return ToNative.Error((Error_Opaque) dafnyValue); + } + if (dafnyValue.is_OpaqueWithText()) { + return ToNative.Error((Error_OpaqueWithText) dafnyValue); + } + // TODO This should indicate a codegen bug; every error Should have been taken care of. + return new IllegalStateException( + String.format( + "Unknown error thrown while calling service. %s", + dafnyValue + ) + ); + } +} diff --git a/TestModels/aws-sdks/s3/runtimes/python/pyproject.toml b/TestModels/aws-sdks/s3/runtimes/python/pyproject.toml index fecc985010..6058c169b9 100644 --- a/TestModels/aws-sdks/s3/runtimes/python/pyproject.toml +++ b/TestModels/aws-sdks/s3/runtimes/python/pyproject.toml @@ -14,6 +14,7 @@ include = ["**/smithygenerated/**/*.py", "**/internaldafny/generated/*.py"] python = "^3.11.0" boto3 = "^1.28.38" smithy-dafny-standard-library = {path = "../../../../dafny-dependencies/StandardLibrary/runtimes/python", develop = false} +smithy-dafny-streaming-support = {path = "../../../../dafny-dependencies/StreamingSupport/runtimes/python", develop = false} # TODO: Depend on PyPi once Smithy-Python publishes their Python package there smithy-python = { path = "../../../../../codegen/smithy-dafny-codegen-modules/smithy-python/python-packages/smithy-python", develop = false} diff --git a/TestModels/aws-sdks/s3/test/TestComAmazonawsS3.dfy b/TestModels/aws-sdks/s3/test/TestComAmazonawsS3.dfy index 7663aae4ec..e2b1a7c723 100644 --- a/TestModels/aws-sdks/s3/test/TestComAmazonawsS3.dfy +++ b/TestModels/aws-sdks/s3/test/TestComAmazonawsS3.dfy @@ -5,11 +5,14 @@ include "../src/Index.dfy" module TestComAmazonawsS3 { import Com.Amazonaws.S3 + import opened ComAmazonawsS3Types import opened StandardLibrary.UInt + import opened StandardLibrary.Streams import opened Wrappers - import opened Std.Enumerators - import opened Std.Aggregators - import opened Std.Streams + import opened Std.BulkActions + import opened Std.Producers + import opened Std.Consumers + const testBucket := "s3-dafny-test-bucket" const testObjectKey := "smithy-dafny-test-model-object-key" @@ -21,10 +24,7 @@ module TestComAmazonawsS3 { Key := testObjectKey ) ); - // Note the chunk size has to ensure all but the last chunk is >= 8192 bytes. - // For a small stream like this that means just one chunk. - var s: ByteStream := new SeqByteStream([ 97, 115, 100, 102 ], 10); - expect s is RewindableByteStream; + var s: DataStream := new SeqDataStream([ 97, 115, 100, 102 ]); PutObjectTest( input := S3.Types.PutObjectRequest( Bucket := testBucket, @@ -55,7 +55,7 @@ module TestComAmazonawsS3 { method GetObjectTest( nameonly input: S3.Types.GetObjectRequest, - nameonly expectedBody: BoundedInts.bytes + nameonly expectedBody: bytes ) { var client :- expect S3.S3Client(); @@ -68,11 +68,7 @@ module TestComAmazonawsS3 { var MyBody := ret.value.Body; expect MyBody.Some?; - // TODO: These need to be generated as postconditions on GetObject instead - assume {:axiom} fresh(MyBody.value.Repr); - assume {:axiom} MyBody.value.Valid(); - - var bodyValue := Collect(MyBody.value); + var bodyValue :- expect Collect(MyBody.value); expect bodyValue == expectedBody; } @@ -112,15 +108,15 @@ module TestComAmazonawsS3 { var ret := client.DeleteObject(input); - expect(ret.Success?); + expect(ret.Success?), ret.error; } - method Collect(e: ByteStream) returns (s: BoundedInts.bytes) - requires e.Valid() - modifies e.Repr + method Collect(e: DataStream) returns (s: Result) { - var a := new Collector(); - ForEach(e, a); - return Seq.Flatten(a.values); + var reader := e.Reader(); + var a: BatchSeqWriter := new BatchSeqWriter(); + var aTotalProof := new BatchSeqWriterTotalProof(a); + reader.ForEach(a, aTotalProof); + return Success(a.elements); } } diff --git a/TestModels/dafny-dependencies/StandardLibrary/DafnyStandardLibraries-smithy-dafny-subset.toml b/TestModels/dafny-dependencies/StandardLibrary/DafnyStandardLibraries-smithy-dafny-subset.toml deleted file mode 100644 index 3cc98bcb77..0000000000 --- a/TestModels/dafny-dependencies/StandardLibrary/DafnyStandardLibraries-smithy-dafny-subset.toml +++ /dev/null @@ -1,35 +0,0 @@ -includes = [ - "../dafny/Source/DafnyStandardLibraries/src/Std/Actions/Actions.dfy", - "../dafny/Source/DafnyStandardLibraries/src/Std/Actions/Aggregators.dfy", - "../dafny/Source/DafnyStandardLibraries/src/Std/Actions/Enumerators.dfy", - "../dafny/Source/DafnyStandardLibraries/src/Std/Actions/GenericAction.dfy", - "../dafny/Source/DafnyStandardLibraries/src/Std/BoundedInts.dfy", - "../dafny/Source/DafnyStandardLibraries/src/Std/Collections/Seq.dfy", - "../dafny/Source/DafnyStandardLibraries/src/Std/DynamicArray.dfy", - "../dafny/Source/DafnyStandardLibraries/src/Std/Frames.dfy", - "../dafny/Source/DafnyStandardLibraries/src/Std/Math.dfy", - "../dafny/Source/DafnyStandardLibraries/src/Std/Relations.dfy", - "../dafny/Source/DafnyStandardLibraries/src/Std/Streams/Streams.dfy", - "../dafny/Source/DafnyStandardLibraries/src/Std/Termination.dfy", - "../dafny/Source/DafnyStandardLibraries/src/Std/Wrappers.dfy" -] - -[options] -# Options that help support more values of consuming context options -# (mostly turning on any more restrictive verification) -track-print-effects = true -enforce-determinism = true -reads-clauses-on-methods = true - -# Options that differ from Dafny's -unicode-char = false - -# TODO -allow-warnings = true - -# Options important for sustainable development -# of the libraries themselves. -resource-limit = 1000000 -# These give too many false positives right now, but should be enabled in the future -warn-redundant-assumptions = false -warn-contradictory-assumptions = false diff --git a/TestModels/dafny-dependencies/StandardLibrary/Makefile b/TestModels/dafny-dependencies/StandardLibrary/Makefile index 6f0eac3250..9e209eaa6c 100644 --- a/TestModels/dafny-dependencies/StandardLibrary/Makefile +++ b/TestModels/dafny-dependencies/StandardLibrary/Makefile @@ -25,7 +25,7 @@ else endif # Overriding this target just to generate the project.properties file. -polymorph_dafny: $(if $(USE_DAFNY_STANDARD_LIBRARIES), build_dafny_stdlibs, ) +polymorph_dafny: cd $(CODEGEN_CLI_ROOT); \ $(GRADLEW) run --args="\ --library-root $(LIBRARY_ROOT) \ @@ -108,15 +108,11 @@ transpile_implementation: --unicode-char:false \ --function-syntax:3 \ --output $(OUT) \ - $(if $(USE_DAFNY_STANDARD_LIBRARIES), bin/DafnyStandardLibraries-smithy-dafny-subset.doo, ) \ + $(if $(USES_STREAMING), $(PROJECT_ROOT)/$(STREAMING_SUPPORT)/bin/DafnyStandardLibraries-smithy-dafny-subset.doo, ) \ $(DAFNY_OPTIONS) \ $(DAFNY_OTHER_FILES) \ $(TRANSPILE_MODULE_NAME) -build_dafny_stdlibs: - dafny build -t:lib --output bin/DafnyStandardLibraries-smithy-dafny-subset.doo DafnyStandardLibraries-smithy-dafny-subset.toml - rm -rf DafnyStandardLibraries-smithy-dafny-subset-lib - # Override SharedMakefile's build_java to not install # StandardLibrary as a dependency build_java: transpile_java diff --git a/TestModels/dafny-dependencies/StandardLibrary/runtimes/python/src/smithy_dafny_standard_library/internaldafny/extern/streams.py b/TestModels/dafny-dependencies/StandardLibrary/runtimes/python/src/smithy_dafny_standard_library/internaldafny/extern/streams.py deleted file mode 100644 index 7a6eff36bc..0000000000 --- a/TestModels/dafny-dependencies/StandardLibrary/runtimes/python/src/smithy_dafny_standard_library/internaldafny/extern/streams.py +++ /dev/null @@ -1,67 +0,0 @@ - -from _dafny import Seq -from smithy_python.interfaces.blobs import ByteStream -from smithy_dafny_standard_library.internaldafny.generated.Std_Streams import ByteStream as DafnyByteStream, RewindableByteStream as DafnyRewindableByteStream -from smithy_dafny_standard_library.internaldafny.generated.Std_Enumerators import Enumerator -from smithy_dafny_standard_library.internaldafny.generated.Std_Wrappers import Option, Option_Some, Option_None - -# Adaptor classes for wrapping up Python-native types as their -# corresponding Dafny interfaces, and vice-versa. -# These are the equivalent of type conversions, -# but avoiding having to load all data into memory at once. - -class DafnyByteStreamAsByteStream(ByteStream): - """Wrapper class adapting a Dafny ByteStream as a native ByteStream.""" - - def __init__(self, dafny_byte_stream): - self.dafny_byte_stream = dafny_byte_stream - - def read(self, size: int = -1) -> bytes: - # TODO: assert size is -1, buffer, - # or define a more specialized Action type for streams. - next = self.dafny_byte_stream.Next() - while next.is_Some and len(next.value) == 0: - next = self.dafny_byte_stream.Next() - # Do NOT return None, because that indicates "no data right now, might be more later" - return bytes(next.value) if next.is_Some else bytes() - - -class RewindableDafnyByteStreamAsByteStream(DafnyByteStreamAsByteStream): - """Wrapper class adapting a Dafny RewindableByteStream as a native ByteStream - that supports tell and seek. - """ - - def __init__(self, dafny_byte_stream): - if not isinstance(dafny_byte_stream, DafnyRewindableByteStream): - raise ValueError("Rewindable stream required") - super().__init__(dafny_byte_stream) - - def tell(self) -> int: - return self.dafny_byte_stream.Position() - - def seek(self, offset, whence=0): - match whence: - case 0: - position = offset - case 1: - position = self.dafny_byte_stream.Position() + offset - case 2: - position = self.dafny_byte_stream.ContentLength() + offset - return self.dafny_byte_stream.Seek(position) - - -class StreamingBlobAsDafnyDataStream(DafnyByteStream): - """Wrapper class adapting a native StreamingBlob as a Dafny ByteStream.""" - - def __init__(self, streaming_blob): - self.streaming_blob = streaming_blob - - def Next(self): - return Enumerator.Next(self) - - def Invoke(self, _) -> Option: - next = self.streaming_blob.read() - if next: - return Option_Some(Seq(next)) - else: - return Option_None() diff --git a/TestModels/dafny-dependencies/StandardLibrary/src/StandardLibrary.dfy b/TestModels/dafny-dependencies/StandardLibrary/src/StandardLibrary.dfy index 75119a3919..8e9efdabd1 100644 --- a/TestModels/dafny-dependencies/StandardLibrary/src/StandardLibrary.dfy +++ b/TestModels/dafny-dependencies/StandardLibrary/src/StandardLibrary.dfy @@ -341,7 +341,7 @@ module StandardLibrary { ensures forall i :: 0 <= i < |q| ==> q[i] in s ensures forall k :: k in s ==> k in q ensures forall i :: 0 < i < |q| ==> LexicographicLessOrEqual(q[i-1], q[i], less) - ensures forall i, j | 0 <= i < j < |q| :: q[i] != q[j]; + ensures forall i, j | 0 <= i < j < |q| :: q[i] != q[j] { if s == {} then [] diff --git a/TestModels/dafny-dependencies/StandardLibrary/src/UTF8.dfy b/TestModels/dafny-dependencies/StandardLibrary/src/UTF8.dfy index 7a29161a28..734853de0d 100644 --- a/TestModels/dafny-dependencies/StandardLibrary/src/UTF8.dfy +++ b/TestModels/dafny-dependencies/StandardLibrary/src/UTF8.dfy @@ -22,7 +22,7 @@ module {:extern "UTF8"} UTF8 { // The tradeoff of assuming the external implementation of encode and decode is correct is worth the tradeoff // of unlocking being able to express and hence prove so many other specifications - function method {:extern "Encode"} Encode(s: string): (res: Result) + function method {:extern "Encode"} {:axiom} Encode(s: string): (res: Result) // US-ASCII only needs a single UTF-8 byte per character ensures IsASCIIString(s) ==> res.Success? && |res.value| == |s| // The following MUST be true for any correct implementation of Encode @@ -30,7 +30,7 @@ module {:extern "UTF8"} UTF8 { ensures res.Success? ==> Decode(res.value).Success? && Decode(res.value).value == s // Decode return a Result, therefore doesn't need to require utf8 input - function method {:extern "Decode"} Decode(b: seq): (res: Result) + function method {:extern "Decode"} {:axiom} Decode(b: seq): (res: Result) ensures res.Success? ==> ValidUTF8Seq(b) // The next four functions are for the benefit of the extern implementation to call, diff --git a/TestModels/dafny-dependencies/StreamingSupport/DafnyStandardLibraries-smithy-dafny-subset/dfyconfig.toml b/TestModels/dafny-dependencies/StreamingSupport/DafnyStandardLibraries-smithy-dafny-subset/dfyconfig.toml new file mode 100644 index 0000000000..69ec290de4 --- /dev/null +++ b/TestModels/dafny-dependencies/StreamingSupport/DafnyStandardLibraries-smithy-dafny-subset/dfyconfig.toml @@ -0,0 +1,39 @@ +includes = [ + "../../dafny/Source/DafnyStandardLibraries/src/Std/Actions/Actions.dfy", + "../../dafny/Source/DafnyStandardLibraries/src/Std/Actions/BulkActions.dfy", + "../../dafny/Source/DafnyStandardLibraries/src/Std/Actions/Producers.dfy", + "../../dafny/Source/DafnyStandardLibraries/src/Std/Actions/Consumers.dfy", + "../../dafny/Source/DafnyStandardLibraries/src/Std/Actions/GenericAction.dfy", + "../../dafny/Source/DafnyStandardLibraries/src/Std/BoundedInts.dfy", + "../../dafny/Source/DafnyStandardLibraries/src/Std/Collections/Seq.dfy", + "../../dafny/Source/DafnyStandardLibraries/src/Std/Collections/Set.dfy", + "../../dafny/Source/DafnyStandardLibraries/src/Std/Collections/Multiset.dfy", + "../../dafny/Source/DafnyStandardLibraries/src/Std/DynamicArray.dfy", + "../../dafny/Source/DafnyStandardLibraries/src/Std/Frames.dfy", + "../../dafny/Source/DafnyStandardLibraries/src/Std/Functions.dfy", + "../../dafny/Source/DafnyStandardLibraries/src/Std/Math.dfy", + "../../dafny/Source/DafnyStandardLibraries/src/Std/Ordinal.dfy", + "../../dafny/Source/DafnyStandardLibraries/src/Std/Relations.dfy", + "../../dafny/Source/DafnyStandardLibraries/src/Std/Termination.dfy", + "../../dafny/Source/DafnyStandardLibraries/src/Std/Wrappers.dfy" +] + +[options] +# Options that help support more values of consuming context options +# (mostly turning on any more restrictive verification) +track-print-effects = true +enforce-determinism = true +reads-clauses-on-methods = true +type-system-refresh = true +general-traits = "datatype" +general-newtypes = true + +# Options that differ from Dafny's +unicode-char = false + +# Options important for sustainable development +# of the libraries themselves. +resource-limit = 1000000 +# These give too many false positives right now, but should be enabled in the future +warn-redundant-assumptions = false +warn-contradictory-assumptions = false diff --git a/TestModels/dafny-dependencies/StreamingSupport/Makefile b/TestModels/dafny-dependencies/StreamingSupport/Makefile new file mode 100644 index 0000000000..18d6dee0a1 --- /dev/null +++ b/TestModels/dafny-dependencies/StreamingSupport/Makefile @@ -0,0 +1,33 @@ + +CORES=2 + +USES_STREAMING=1 + +include ../../SharedMakefile.mk + +PYTHON_MODULE_NAME=smithy_dafny_streaming_support + +TRANSLATION_RECORD_PYTHON := \ + --translation-record ../StandardLibrary/runtimes/python/src/smithy_dafny_standard_library/internaldafny/generated/dafny_src-py.dtr + + +transpile_java: | transpile_implementation_java transpile_test_java + +transpile_python: | transpile_implementation_python transpile_test_python + +# Overriding this target just to generate the project.properties file. +polymorph_dafny: build_dafny_stdlibs + cd $(CODEGEN_CLI_ROOT); \ + $(GRADLEW) run --args="\ + --library-root $(LIBRARY_ROOT) \ + --properties-file $(LIBRARY_ROOT)/project.properties \ + --model $(LIBRARY_ROOT)/Model \ + --namespace aws.polymorph \ + --dependent-model $(LIBRARY_ROOT)/../Model \ + "; + +build_dafny_stdlibs: + dafny build -t:lib --output bin/DafnyStandardLibraries-smithy-dafny-subset.doo DafnyStandardLibraries-smithy-dafny-subset/dfyconfig.toml + +_polymorph_dependencies: + echo "Skipping _polymorph_dependencies for StreamingSupport" \ No newline at end of file diff --git a/TestModels/dafny-dependencies/StreamingSupport/Model/streamingsupport.smithy b/TestModels/dafny-dependencies/StreamingSupport/Model/streamingsupport.smithy new file mode 100644 index 0000000000..0054bef6e3 --- /dev/null +++ b/TestModels/dafny-dependencies/StreamingSupport/Model/streamingsupport.smithy @@ -0,0 +1,11 @@ +// Copyright Amazon.com Inc. or its affiliates. All Rights Reserved. +// SPDX-License-Identifier: Apache-2.0 +namespace aws.polymorph + +// Dummy service just to keep polymorph happy. +service StreamingSupport { + version: "2021-11-01", + resources: [], + operations: [], + errors: [], +} diff --git a/TestModels/dafny-dependencies/StreamingSupport/runtimes/java/build.gradle.kts b/TestModels/dafny-dependencies/StreamingSupport/runtimes/java/build.gradle.kts new file mode 100644 index 0000000000..e4a380c18c --- /dev/null +++ b/TestModels/dafny-dependencies/StreamingSupport/runtimes/java/build.gradle.kts @@ -0,0 +1,92 @@ +// Copyright Amazon.com Inc. or its affiliates. All Rights Reserved. +// SPDX-License-Identifier: Apache-2.0 +import java.io.File +import java.io.FileInputStream +import java.util.Properties +import java.net.URI +import javax.annotation.Nullable + +plugins { + `java-library` + `maven-publish` +} + +var props = Properties().apply { + load(FileInputStream(File(rootProject.rootDir, "../../project.properties"))) +} +var dafnyVersion = props.getProperty("dafnyVersion") + +group = "software.amazon.cryptography" +version = "1.0-SNAPSHOT" +description = "StreamingSupport" + +java { + toolchain.languageVersion.set(JavaLanguageVersion.of(8)) + sourceSets["main"].java { + srcDir("src/main/java") + srcDir("src/main/dafny-generated") + } + sourceSets["test"].java { + srcDir("src/test/dafny-generated") + } +} + +var caUrl: URI? = null +@Nullable +val caUrlStr: String? = System.getenv("CODEARTIFACT_URL_JAVA_CONVERSION") +if (!caUrlStr.isNullOrBlank()) { + caUrl = URI.create(caUrlStr) +} + +var caPassword: String? = null +@Nullable +val caPasswordString: String? = System.getenv("CODEARTIFACT_AUTH_TOKEN") +if (!caPasswordString.isNullOrBlank()) { + caPassword = caPasswordString +} + +repositories { + mavenCentral() + mavenLocal() + if (caUrl != null && caPassword != null) { + maven { + name = "CodeArtifact" + url = caUrl!! + credentials { + username = "aws" + password = caPassword!! + } + } + } +} + +dependencies { + implementation("org.dafny:DafnyRuntime:${dafnyVersion}") + implementation("software.amazon.smithy.dafny:conversion:0.1.1") + implementation("software.amazon.cryptography:StandardLibrary:1.0-SNAPSHOT") + api("software.amazon.awssdk:sdk-core:2.28.28") +} +publishing { + publications.create("mavenLocal") { + groupId = group as String? + artifactId = description + from(components["java"]) + } + publications.create("maven") { + groupId = "software.amazon.cryptography" + artifactId = "StreamingSupport" + from(components["java"]) + } + repositories { mavenLocal() } +} + +tasks.withType() { + options.encoding = "UTF-8" +} + +tasks { + register("runTests", JavaExec::class.java) { + mainClass.set("TestsFromDafny") + classpath = sourceSets["test"].runtimeClasspath + } +} diff --git a/TestModels/dafny-dependencies/StreamingSupport/runtimes/java/src/main/java/Streams/ConsumerAsSubscriber.java b/TestModels/dafny-dependencies/StreamingSupport/runtimes/java/src/main/java/Streams/ConsumerAsSubscriber.java new file mode 100644 index 0000000000..9199119530 --- /dev/null +++ b/TestModels/dafny-dependencies/StreamingSupport/runtimes/java/src/main/java/Streams/ConsumerAsSubscriber.java @@ -0,0 +1,71 @@ +package Streams; + +import Std_Compile.BulkActions_Compile.BatchReader; +import Std_Compile.BulkActions_Compile.Batched; +import Std_Compile.Consumers_Compile.IConsumer; +import dafny.Array; +import dafny.DafnySequence; +import dafny.TypeDescriptor; +import org.reactivestreams.Subscriber; +import org.reactivestreams.Subscription; + +import java.nio.ByteBuffer; +import java.util.concurrent.CompletableFuture; + +public class ConsumerAsSubscriber implements Subscriber { + + private final IConsumer> consumer; + private final CompletableFuture future; + private Subscription subscription; + + private static final TypeDescriptor T_TD = TypeDescriptor.BYTE; + private static final TypeDescriptor E_TD = TypeDescriptor.reference(Throwable.class); + private static final TypeDescriptor> BATCHED_TD = + Batched._typeDescriptor(T_TD, E_TD); + + + public ConsumerAsSubscriber(IConsumer> consumer, CompletableFuture future) { + this.consumer = consumer; + this.future = future; + } + + @Override + public void onSubscribe(Subscription s) { + this.subscription = subscription; + subscription.request(1L); + } + + @Override + public void onNext(ByteBuffer o) { + try { + byte[] a = new byte[o.remaining()]; + o.get(a); + Array dafnyArray = Array.wrap(a); + DafnySequence dafnySeq = DafnySequence.unsafeWrapArray(dafnyArray); + BatchReader reader = new BatchReader(T_TD, E_TD); + reader.__ctor(dafnySeq); + reader.ForEach(consumer); + + this.subscription.request(1L); + } catch (RuntimeException e) { + this.subscription.cancel(); + this.future.completeExceptionally(e); + } + } + + @Override + public void onError(Throwable t) { + Batched event = Batched.create_BatchError(T_TD, E_TD, t); + consumer.Accept(event); + + this.future.completeExceptionally(t); + } + + @Override + public void onComplete() { + Batched event = Batched.create_EndOfInput(T_TD, E_TD); + consumer.Accept(event); + + this.future.complete(null); + } +} diff --git a/TestModels/dafny-dependencies/StreamingSupport/runtimes/java/src/main/java/Streams/DataStreamAsAsyncRequestBody.java b/TestModels/dafny-dependencies/StreamingSupport/runtimes/java/src/main/java/Streams/DataStreamAsAsyncRequestBody.java new file mode 100644 index 0000000000..17a9d2c3b3 --- /dev/null +++ b/TestModels/dafny-dependencies/StreamingSupport/runtimes/java/src/main/java/Streams/DataStreamAsAsyncRequestBody.java @@ -0,0 +1,34 @@ +package Streams; + +import StandardLibrary_Compile.Streams_Compile.DataStream; +import Std_Compile.Wrappers_Compile.Option; +import org.reactivestreams.Subscriber; +import software.amazon.awssdk.core.async.AsyncRequestBody; + +import java.math.BigInteger; +import java.nio.ByteBuffer; +import java.util.Optional; + +public class DataStreamAsAsyncRequestBody implements AsyncRequestBody { + + private final DataStream dataStream; + + public DataStreamAsAsyncRequestBody(DataStream dataStream) { + this.dataStream = dataStream; + } + + @Override + public Optional contentLength() { + Option cl = dataStream.ContentLength(); + if (cl.is_Some()) { + return Optional.of(cl.dtor_value().longValueExact()); + } else { + return Optional.empty(); + } + } + + @Override + public void subscribe(Subscriber s) { + s.onSubscribe(null); + } +} diff --git a/TestModels/dafny-dependencies/StreamingSupport/runtimes/java/src/main/java/Streams/DataStreamAsRequestBody.java b/TestModels/dafny-dependencies/StreamingSupport/runtimes/java/src/main/java/Streams/DataStreamAsRequestBody.java new file mode 100644 index 0000000000..cfd617a13b --- /dev/null +++ b/TestModels/dafny-dependencies/StreamingSupport/runtimes/java/src/main/java/Streams/DataStreamAsRequestBody.java @@ -0,0 +1,24 @@ +package Streams; + +import StandardLibrary_Compile.Streams_Compile.DataStream; +import Std_Compile.Producers_Compile.Producer; +import Std_Compile.Wrappers_Compile.Option; +import software.amazon.awssdk.core.sync.RequestBody; +import software.amazon.awssdk.http.ContentStreamProvider; + +import java.math.BigInteger; + +public class DataStreamAsRequestBody { + + public static RequestBody of(DataStream dataStream) { + final ContentStreamProvider provider = () -> { + Producer reader = dataStream.Reader(); + return new ProducerAsInputStream(reader); + }; + final Option contentLength = dataStream.ContentLength(); + + return contentLength.is_Some() + ? RequestBody.fromContentProvider(provider, contentLength.dtor_value().longValueExact(), "application/octet-stream") + : RequestBody.fromContentProvider(provider, "application/octet-stream"); + } +} diff --git a/TestModels/dafny-dependencies/StreamingSupport/runtimes/java/src/main/java/Streams/InputStreamAsDataStream.java b/TestModels/dafny-dependencies/StreamingSupport/runtimes/java/src/main/java/Streams/InputStreamAsDataStream.java new file mode 100644 index 0000000000..84aa4cbf69 --- /dev/null +++ b/TestModels/dafny-dependencies/StreamingSupport/runtimes/java/src/main/java/Streams/InputStreamAsDataStream.java @@ -0,0 +1,47 @@ +package Streams; + +import StandardLibrary_Compile.Streams_Compile.DataStream; +import Std_Compile.BulkActions_Compile.Batched; +import Std_Compile.Producers_Compile.Producer; +import Std_Compile.Wrappers_Compile.Option; +import dafny.TypeDescriptor; + +import java.io.IOException; +import java.io.InputStream; +import java.math.BigInteger; +import java.util.function.Function; + +public class InputStreamAsDataStream implements DataStream { + + private final TypeDescriptor e_td; + + private final InputStream inputStream; + private final Function ioExceptionWrapper; + private boolean read = false; + + public InputStreamAsDataStream(TypeDescriptor e_td, InputStream inputStream, Function ioExceptionWrapper) { + this.e_td = e_td; + this.inputStream = inputStream; + this.ioExceptionWrapper = ioExceptionWrapper; + } + + @Override + public Option ContentLength() { + return Option.create_None(null); + } + + @Override + public boolean Replayable() { + return false; + } + + @Override + public Producer> Reader() { + if (read) { + throw new IllegalStateException("Already read"); + } + read = true; + + return new InputStreamAsProducer(e_td, inputStream, ioExceptionWrapper); + } +} diff --git a/TestModels/dafny-dependencies/StreamingSupport/runtimes/java/src/main/java/Streams/InputStreamAsProducer.java b/TestModels/dafny-dependencies/StreamingSupport/runtimes/java/src/main/java/Streams/InputStreamAsProducer.java new file mode 100644 index 0000000000..e1f1eba82c --- /dev/null +++ b/TestModels/dafny-dependencies/StreamingSupport/runtimes/java/src/main/java/Streams/InputStreamAsProducer.java @@ -0,0 +1,106 @@ +package Streams; + +import Std_Compile.BulkActions_Compile.BatchArrayWriter; +import Std_Compile.BulkActions_Compile.BatchReader; +import Std_Compile.BulkActions_Compile.Batched; +import Std_Compile.Consumers_Compile.Consumer; +import Std_Compile.Consumers_Compile.IConsumer; +import Std_Compile.Producers_Compile.Producer; +import Std_Compile.Producers_Compile._Companion_Producer; +import Std_Compile.Producers_Compile.__default; +import Std_Compile.Wrappers_Compile.Option; +import dafny.Array; +import dafny.DafnySequence; +import dafny.Tuple0; +import dafny.TypeDescriptor; + +import java.io.IOException; +import java.io.InputStream; +import java.math.BigInteger; +import java.util.function.Function; + +public class InputStreamAsProducer implements Producer> { + + private final InputStream inputStream; + private long totalRead; + private Function ioExceptionWrapper; + private boolean producedEOI = false; + + private static final TypeDescriptor T_TD = TypeDescriptor.BYTE; + private final TypeDescriptor E_TD; + private final TypeDescriptor> BATCHED_TD; + + public InputStreamAsProducer(TypeDescriptor e_td, InputStream inputStream, Function ioExceptionWrapper) { + this.inputStream = inputStream; + this.totalRead = 0; + this.ioExceptionWrapper = ioExceptionWrapper; + + this.E_TD = e_td; + this.BATCHED_TD = Batched._typeDescriptor(T_TD, E_TD); + } + + @Override + public BigInteger ProducedCount() { + return BigInteger.valueOf(totalRead); + } + + @Override + public Option Remaining() { + return Option.create_None(TypeDescriptor.BIG_INTEGER); + } + + @Override + public Option> Next() { + try { + int value = inputStream.read(); + if (value == -1) { + if (producedEOI) { + return Option.create_None(BATCHED_TD); + } else { + producedEOI = true; + return Option.create_Some(BATCHED_TD, Batched.create_EndOfInput(T_TD, E_TD)); + } + } else { + Batched batched = Batched.create_BatchValue(T_TD, E_TD, (byte)value); + return Option.create_Some(BATCHED_TD, batched); + } + } catch (IOException e) { + return Option.create_Some(null, Batched.create_BatchError(T_TD, E_TD, ioExceptionWrapper.apply(e))); + } + } + + @Override + public void ForEach(IConsumer> consumer) { + __default.DefaultForEach(BATCHED_TD, this, consumer); + } + + @Override + public void Fill(Consumer> consumer) { + if (consumer instanceof BatchArrayWriter) { + BatchArrayWriter writer = (BatchArrayWriter) consumer; + int n = writer.Capacity().dtor_value().intValueExact(); + byte[] buffer = new byte[n]; + try { + int count = inputStream.read(buffer, 0, n); + if (count == -1) { + consumer.Accept(Batched.create_EndOfInput(T_TD, E_TD)); + } else { + totalRead += count; + Array dafnyArray = Array.wrap(buffer); + BatchReader reader = new BatchReader(T_TD, E_TD); + reader.__ctor(DafnySequence.fromArrayRange(T_TD, dafnyArray, 0, count)); + reader.Fill(consumer); + } + } catch (IOException e) { + writer.Accept(Batched.create_BatchError(T_TD, E_TD, ioExceptionWrapper.apply(e))); + } + } + + __default.DefaultFill(BATCHED_TD, this, consumer); + } + + @Override + public Option> Invoke(Tuple0 tuple0) { + return _Companion_Producer.Next(null, this); + } +} diff --git a/TestModels/dafny-dependencies/StreamingSupport/runtimes/java/src/main/java/Streams/ProducerAsInputStream.java b/TestModels/dafny-dependencies/StreamingSupport/runtimes/java/src/main/java/Streams/ProducerAsInputStream.java new file mode 100644 index 0000000000..460a2fe43c --- /dev/null +++ b/TestModels/dafny-dependencies/StreamingSupport/runtimes/java/src/main/java/Streams/ProducerAsInputStream.java @@ -0,0 +1,71 @@ +package Streams; + +import Std_Compile.BulkActions_Compile.BatchArrayWriter; +import Std_Compile.BulkActions_Compile.Batched; +import Std_Compile.Consumers_Compile.IgnoreNConsumer; +import Std_Compile.Producers_Compile.Producer; +import Std_Compile.Wrappers_Compile.Option; +import dafny.TypeDescriptor; + +import java.io.IOException; +import java.io.InputStream; +import java.math.BigInteger; + +public class ProducerAsInputStream extends InputStream { + + private final Producer> producer; + + public ProducerAsInputStream(Producer> producer) { + this.producer = producer; + } + + @Override + public int read() throws IOException { + Option> next = producer.Next(); + if (next.is_Some()) { + Batched batched = next.dtor_value(); + if (batched.is_BatchValue()) { + return next.dtor_value().dtor_value(); + } else if (batched.is_EndOfInput()) { + return -1; + } else if (batched.is_BatchError()) { + throw new IOException(batched.dtor_error()); + } else { + throw new RuntimeException("Unexpected batched value: " + batched); + } + } else { + return -1; + } + } + + @Override + public int read(byte[] b, int off, int len) throws IOException { + // TODO: Could optimize to use b directly, + // but that starts to introduce risk. + byte[] storage = new byte[len]; + BatchArrayWriter consumer = new BatchArrayWriter<>( + TypeDescriptor.BYTE, TypeDescriptor.reference(Exception.class) + ); + consumer.__ctor(storage); + producer.Fill(consumer); + if (consumer.state.is_Failure()) { + throw new IOException(consumer.state.dtor_error()); + } + int count = consumer.size.intValueExact(); + if (count == 0) { + return -1; + } else { + System.arraycopy(storage, 0, b, off, count); + return count; + } + } + + @Override + public long skip(long n) throws IOException { + // TODO: Still need to check for errors and handle EOI + IgnoreNConsumer> consumer = new IgnoreNConsumer<>(null); + consumer.__ctor(BigInteger.valueOf(n)); + producer.Fill(consumer); + return consumer.consumedCount.longValueExact(); + } +} diff --git a/TestModels/dafny-dependencies/StreamingSupport/runtimes/java/src/main/java/Streams/RequestBodyAsDataStream.java b/TestModels/dafny-dependencies/StreamingSupport/runtimes/java/src/main/java/Streams/RequestBodyAsDataStream.java new file mode 100644 index 0000000000..4ac82c90f2 --- /dev/null +++ b/TestModels/dafny-dependencies/StreamingSupport/runtimes/java/src/main/java/Streams/RequestBodyAsDataStream.java @@ -0,0 +1,47 @@ +package Streams; + +import StandardLibrary_Compile.Streams_Compile.DataStream; +import Std_Compile.BulkActions_Compile.Batched; +import Std_Compile.Producers_Compile.Producer; +import Std_Compile.Wrappers_Compile.Option; +import dafny.TypeDescriptor; +import software.amazon.awssdk.core.sync.RequestBody; + +import java.io.IOException; +import java.io.InputStream; +import java.math.BigInteger; +import java.util.Optional; +import java.util.function.Function; + +public class RequestBodyAsDataStream implements DataStream { + + private final TypeDescriptor e_td; + private final Function ioExceptionWrapper; + private final RequestBody requestBody; + + public RequestBodyAsDataStream(TypeDescriptor e_td, RequestBody requestBody, Function ioExceptionWrapper) { + this.e_td = e_td; + this.ioExceptionWrapper = ioExceptionWrapper; + this.requestBody = requestBody; + } + + @Override + public Option ContentLength() { + if (requestBody.optionalContentLength().isPresent()) { + return Option.create_Some(TypeDescriptor.BIG_INTEGER, BigInteger.valueOf(requestBody.optionalContentLength().get())); + } else { + return Option.create_None(TypeDescriptor.BIG_INTEGER); + } + } + + @Override + public boolean Replayable() { + return true; + } + + @Override + public Producer> Reader() { + InputStream is = requestBody.contentStreamProvider().newStream(); + return new InputStreamAsProducer<>(e_td, is, ioExceptionWrapper); + } +} diff --git a/TestModels/dafny-dependencies/StreamingSupport/runtimes/python/.gitignore b/TestModels/dafny-dependencies/StreamingSupport/runtimes/python/.gitignore new file mode 100644 index 0000000000..3c31a7deb8 --- /dev/null +++ b/TestModels/dafny-dependencies/StreamingSupport/runtimes/python/.gitignore @@ -0,0 +1,8 @@ +# Python Artifacts +src/**.egg-info/ +.pytest_cache +.tox +build +poetry.lock +**/poetry.lock +dist \ No newline at end of file diff --git a/TestModels/dafny-dependencies/StreamingSupport/runtimes/python/pyproject.toml b/TestModels/dafny-dependencies/StreamingSupport/runtimes/python/pyproject.toml new file mode 100644 index 0000000000..870acd975c --- /dev/null +++ b/TestModels/dafny-dependencies/StreamingSupport/runtimes/python/pyproject.toml @@ -0,0 +1,23 @@ +[tool.poetry] +name = "smithy_dafny_streaming_support" +version = "0.1.0" +description = "" +authors = ["AWS "] +packages = [ + { include = "smithy_dafny_streaming_support", from = "src" }, +] +# Include all of the following .gitignored files in package distributions, +# even though it is not included in version control +include = ["**/internaldafny/generated/*.py"] + +[tool.poetry.dependencies] +python = "^3.11.0" +DafnyRuntimePython = "^4.7.0" +smithy-dafny-standard-library = {path = "../../../StandardLibrary/runtimes/python", develop = false} + +[tool.poetry.group.test.dependencies] +pytest = "^7.4.0" + +[build-system] +requires = ["poetry-core<2.0.0"] +build-backend = "poetry.core.masonry.api" diff --git a/TestModels/dafny-dependencies/StreamingSupport/runtimes/python/src/smithy_dafny_streaming_support/__init__.py b/TestModels/dafny-dependencies/StreamingSupport/runtimes/python/src/smithy_dafny_streaming_support/__init__.py new file mode 100644 index 0000000000..12ef3d38ad --- /dev/null +++ b/TestModels/dafny-dependencies/StreamingSupport/runtimes/python/src/smithy_dafny_streaming_support/__init__.py @@ -0,0 +1,10 @@ +# Copyright Amazon.com Inc. or its affiliates. All Rights Reserved. +# SPDX-License-Identifier: Apache-2.0 + +# Boilerplate code for a Python package with internal Dafny code + +# Initialize generated Dafny +from .internaldafny.generated import module_ + +# Initialize externs +from .internaldafny import extern \ No newline at end of file diff --git a/TestModels/dafny-dependencies/StreamingSupport/runtimes/python/src/smithy_dafny_streaming_support/internaldafny/extern/streams.py b/TestModels/dafny-dependencies/StreamingSupport/runtimes/python/src/smithy_dafny_streaming_support/internaldafny/extern/streams.py new file mode 100644 index 0000000000..e5475f736f --- /dev/null +++ b/TestModels/dafny-dependencies/StreamingSupport/runtimes/python/src/smithy_dafny_streaming_support/internaldafny/extern/streams.py @@ -0,0 +1,131 @@ + +from _dafny import Seq, Array as DafnyArray +from smithy_python.interfaces.blobs import ByteStream +from smithy_dafny_streaming_support.internaldafny.generated.StandardLibrary_Streams import DataStream +from smithy_dafny_standard_library.internaldafny.generated.Std_BulkActions import BatchSeqWriter, BatchArrayWriter, Batched_BatchValue, Batched_EndOfInput, BatchReader +from smithy_dafny_standard_library.internaldafny.generated.Std_Consumers import IgnoreNConsumer +from smithy_dafny_standard_library.internaldafny.generated.Std_Producers import Producer +from smithy_dafny_standard_library.internaldafny.generated.Std_Wrappers import Option, Option_Some, Option_None + +# Adaptor classes for wrapping up Python-native types as their +# corresponding Dafny interfaces, and vice-versa. +# These are the equivalent of type conversions, +# but avoiding having to load all data into memory at once. + +class DafnyDataStreamAsByteStream(ByteStream): + """Wrapper class adapting a Dafny DataStream as a native ByteStream.""" + + def __init__(self, data_stream): + self.data_stream = data_stream + self.reader = data_stream.Reader() + + def read(self, size: int = -1) -> bytes: + if size == -1: + writer = BatchSeqWriter() + writer.ctor__() + self.reader.ForEach(writer) + else: + writer = BatchArrayWriter() + writer.ctor__(DafnyArray(None, size)) + self.reader.Fill(writer) + + # TODO: Check for errors. Fine to ignore EOI though. + return bytes(writer.Values()) + + def tell(self) -> int: + return self.reader.ProducedCount() + + def seek(self, offset, whence=0): + # TODO: check whether invalid offsets must raise errors + # TODO: Need to -1 to account for EndOfInput + match whence: + case 0: + new_position = offset + case 1: + new_position = self.reader.ProducedCount() + offset + case 2: + new_position = self.data_stream.ContentLength().value + offset + + if new_position > self.reader.ProducedCount(): + consumer = IgnoreNConsumer() + consumer.ctor__(new_position - self.reader.ProducedCount()) + self.reader.Fill(consumer) + elif new_position < self.reader.ProducedCount(): + self.reader = self.data_stream.Reader() + consumer = IgnoreNConsumer() + consumer.ctor__(new_position) + self.reader.Fill(consumer) + + +class StreamingBlobAsDafnyDataStream(DataStream): + def __init__(self, streaming_blob): + self.streaming_blob = streaming_blob + self.read = False + + def Replayable(self): + False + + def Reader(self): + if self.read: + raise Exception("StreamingBlobAsDafnyDataStream.Reader() called twice") + self.read = True + return StreamingBlobAsDafnyProducer(self.streaming_blob) + + +# TODO: Missing some methods like Remaining() +class StreamingBlobAsDafnyProducer(Producer): + """Wrapper class adapting a native StreamingBlob as a Dafny DataStream.""" + + def __init__(self, streaming_blob): + self.streaming_blob = streaming_blob + self.emitted_eoi = False + + def Next(self): + return Producer.Next(self) + + def Invoke(self, _) -> Option: + if self.emitted_eoi: + return Option_None() + + # TODO: error handling + next = self.streaming_blob.read(1) + if next: + return Option_Some(Batched_BatchValue(next[0])) + else: + self.emitted_eoi = True + return Option_Some(Batched_EndOfInput()) + + def ProducedCount(self): + return self.streaming_blob.position + + def ForEach(self, consumer): + if self.emitted_eoi: + return + + # TODO: error handling + while True: + next = self.streaming_blob.read(4096) + if not next: + break + batch = BatchReader() + batch.ctor__(Seq(next)) + batch.ForEach(consumer) + + consumer.Accept(Batched_EndOfInput()) + self.emitted_eoi = True + + def Fill(self, consumer): + if self.emitted_eoi: + return + + # TODO: error handling + size = consumer.Capacity() + next = self.streaming_blob.read(size) + if not next: + self.emitted_eoi = True + eoi = Batched_EndOfInput() + consumer.Accept(eoi) + else: + batch = BatchReader() + batch.ctor__(Seq(next)) + batch.Fill(consumer) diff --git a/TestModels/dafny-dependencies/StreamingSupport/runtimes/python/tox.ini b/TestModels/dafny-dependencies/StreamingSupport/runtimes/python/tox.ini new file mode 100644 index 0000000000..b626d2496c --- /dev/null +++ b/TestModels/dafny-dependencies/StreamingSupport/runtimes/python/tox.ini @@ -0,0 +1,13 @@ +[tox] +isolated_build = True +envlist = + py{311,312} + +[testenv] +skip_install = true +allowlist_externals = poetry +commands_pre = + poetry lock + poetry install +commands = + poetry run pytest -s -v test/ \ No newline at end of file diff --git a/TestModels/dafny-dependencies/StreamingSupport/src/Index.dfy b/TestModels/dafny-dependencies/StreamingSupport/src/Index.dfy new file mode 100644 index 0000000000..c57699c151 --- /dev/null +++ b/TestModels/dafny-dependencies/StreamingSupport/src/Index.dfy @@ -0,0 +1,4 @@ +// Copyright Amazon.com Inc. or its affiliates. All Rights Reserved. +// SPDX-License-Identifier: Apache-2.0 + +include "./Streams.dfy" \ No newline at end of file diff --git a/TestModels/dafny-dependencies/StreamingSupport/src/Streams.dfy b/TestModels/dafny-dependencies/StreamingSupport/src/Streams.dfy new file mode 100644 index 0000000000..760f077f01 --- /dev/null +++ b/TestModels/dafny-dependencies/StreamingSupport/src/Streams.dfy @@ -0,0 +1,60 @@ +/******************************************************************************* + * Copyright by the contributors to the Dafny Project + * SPDX-License-Identifier: MIT + *******************************************************************************/ + +module {:options "--function-syntax:4"} StandardLibrary.Streams { + + import opened Std.Wrappers + import opened Std.Actions + import opened Std.BulkActions + import opened Std.Producers + import opened Std.Collections.Seq + import opened Std.Termination + + trait DataStream { + + function ContentLength(): Option + + predicate Replayable() + + method Reader() returns (p: Producer>) + ensures + && p.Valid() + && fresh(p.Repr) + && p.history == [] + && (ContentLength().Some? ==> p.Remaining() == Some(ContentLength().value as int + 1)) + } + + class SeqDataStream extends DataStream { + + const s: seq + + constructor (s: seq) + ensures this.s == s + { + this.s := s; + } + + function ContentLength(): Option { + Some(|s|) + } + + predicate Replayable() { + true + } + + method Reader() returns (p: Producer>) + ensures + && p.Valid() + && fresh(p.Repr) + && p.history == [] + && (ContentLength().Some? ==> p.Remaining() == Some(ContentLength().value + 1)) + { + var data := new BatchReader(s); + var eoi := new SeqReader([EndOfInput]); + p := new ConcatenatedProducer(data, eoi); + } + } + +} \ No newline at end of file diff --git a/TestModels/dafny-dependencies/StreamingSupport/src/dfyconfig.toml b/TestModels/dafny-dependencies/StreamingSupport/src/dfyconfig.toml new file mode 100644 index 0000000000..c1b0a59157 --- /dev/null +++ b/TestModels/dafny-dependencies/StreamingSupport/src/dfyconfig.toml @@ -0,0 +1,10 @@ +# Project file for easier development in VS Code. +# Not yet used in the actual Makefile build. + +[options] +unicode-char = false +function-syntax = 3 + +library = [ + "../../StandardLibrary/bin/DafnyStandardLibraries-smithy-dafny-subset.doo" +] \ No newline at end of file diff --git a/TestModels/dafny-dependencies/dafny b/TestModels/dafny-dependencies/dafny index d12aa376e6..332fa44fe3 160000 --- a/TestModels/dafny-dependencies/dafny +++ b/TestModels/dafny-dependencies/dafny @@ -1 +1 @@ -Subproject commit d12aa376e62d2e0a940c4de687c64625862b8206 +Subproject commit 332fa44fe38cd6209ac1210ccfef036929ab157b diff --git a/codegen/smithy-dafny-codegen-cli/src/main/java/software/amazon/polymorph/CodegenCli.java b/codegen/smithy-dafny-codegen-cli/src/main/java/software/amazon/polymorph/CodegenCli.java index b0368f8823..151e70d3b8 100644 --- a/codegen/smithy-dafny-codegen-cli/src/main/java/software/amazon/polymorph/CodegenCli.java +++ b/codegen/smithy-dafny-codegen-cli/src/main/java/software/amazon/polymorph/CodegenCli.java @@ -139,7 +139,8 @@ public static void main(String[] args) { .withAwsSdkStyle(cliArguments.awsSdkStyle) .withDafnyVersion(cliArguments.dafnyVersion) .withUpdatePatchFiles(cliArguments.updatePatchFiles) - .withGenerationAspects(cliArguments.generationAspects); + .withGenerationAspects(cliArguments.generationAspects) + .withIncludeDafnyFiles(cliArguments.includeDafnyFiles); // Rust currently generates all code for all dependencies at once, // and the makefile structure makes it very difficult to avoid passing --local-service-test // when we don't actually want it for --aws-sdk style projects. @@ -159,9 +160,6 @@ public static void main(String[] args) { cliArguments.javaAwsSdkVersion.ifPresent( engineBuilder::withJavaAwsSdkVersion ); - cliArguments.includeDafnyFile.ifPresent( - engineBuilder::withIncludeDafnyFile - ); cliArguments.libraryName.ifPresent(engineBuilder::withLibraryName); cliArguments.patchFilesDir.ifPresent(engineBuilder::withPatchFilesDir); final CodegenEngine engine = engineBuilder.build(); @@ -335,7 +333,8 @@ private static Options getCliOptionsForBuild() { .builder() .longOpt("include-dafny") .desc(" files to be include in the generated Dafny") - .hasArg() + .hasArgs() + .valueSeparator(',') .build() ) .addOption( @@ -519,7 +518,7 @@ private record CliArguments( Optional javaAwsSdkVersion, DafnyVersion dafnyVersion, Optional propertiesFile, - Optional includeDafnyFile, + List includeDafnyFiles, boolean awsSdkStyle, boolean localServiceTest, Optional patchFilesDir, @@ -664,9 +663,13 @@ static Optional parse(String[] args) throws ParseException { .ofNullable(commandLine.getOptionValue("properties-file")) .map(Paths::get); - Optional includeDafnyFile = Optional - .ofNullable(commandLine.getOptionValue("include-dafny")) - .map(Paths::get); + final String[] includeDafnyFileOptions = Optional + .ofNullable(commandLine.getOptionValues("include-dafny")) + .orElse(new String[0]); + final List includeDafnyFiles = Arrays + .stream(includeDafnyFileOptions) + .map(Paths::get) + .toList(); Optional patchFilesDir = Optional .ofNullable(commandLine.getOptionValue("patch-files-dir")) @@ -702,7 +705,7 @@ static Optional parse(String[] args) throws ParseException { javaAwsSdkVersion, dafnyVersion, propertiesFile, - includeDafnyFile, + includeDafnyFiles, awsSdkStyle, localServiceTest, patchFilesDir, diff --git a/codegen/smithy-dafny-codegen-test/src/test/java/software/amazon/polymorph/TestModelTest.java b/codegen/smithy-dafny-codegen-test/src/test/java/software/amazon/polymorph/TestModelTest.java index 40f5cea6eb..dbd275d852 100644 --- a/codegen/smithy-dafny-codegen-test/src/test/java/software/amazon/polymorph/TestModelTest.java +++ b/codegen/smithy-dafny-codegen-test/src/test/java/software/amazon/polymorph/TestModelTest.java @@ -91,14 +91,21 @@ protected Path getTestModelPath(String relativeTestModelPath) { protected void testModels(String relativeTestModelPath) { // The @streaming support depends on our subset of the Dafny standard libraries // which cannot be built for old versions of Dafny. - if ( + // We also don't want to burn a lot of CI cycles testing all models against Dafny 4.10 yet. + DafnyVersion dafnyVersion = CodegenEngine.getDafnyVersionFromDafny(); + boolean needsStreaming = relativeTestModelPath.endsWith("Streaming") || - relativeTestModelPath.endsWith("s3") - ) { - DafnyVersion dafnyVersion = CodegenEngine.getDafnyVersionFromDafny(); - if (dafnyVersion.compareTo(DafnyVersion.parse("4.9.0")) < 0) { - Assumptions.assumeTrue(false); - } + relativeTestModelPath.endsWith("s3") || + relativeTestModelPath.endsWith("StreamingSupport"); + if (dafnyVersion.compareTo(DafnyVersion.parse("4.10.0")) < 0) { + Assumptions.assumeFalse(needsStreaming); + } else { + Assumptions.assumeTrue(needsStreaming); + } + + // StreamingSupport isn't directly testable + if (relativeTestModelPath.endsWith("StreamingSupport")) { + Assumptions.assumeTrue(false); } } } diff --git a/codegen/smithy-dafny-codegen-test/src/test/java/software/amazon/polymorph/smithyjava/JavaTestModels.java b/codegen/smithy-dafny-codegen-test/src/test/java/software/amazon/polymorph/smithyjava/JavaTestModels.java index 4b739521ff..6d5601353a 100644 --- a/codegen/smithy-dafny-codegen-test/src/test/java/software/amazon/polymorph/smithyjava/JavaTestModels.java +++ b/codegen/smithy-dafny-codegen-test/src/test/java/software/amazon/polymorph/smithyjava/JavaTestModels.java @@ -31,7 +31,6 @@ class JavaTestModels extends TestModelTest { DISABLED_TESTS.add("SimpleTypes/SimpleTimestamp"); // Need to add for Java DISABLED_TESTS.add("aws-sdks/kms-lite"); // Not written yet - DISABLED_TESTS.add("aws-sdks/s3"); // Not written yet DISABLED_TESTS.add("aws-sdks/sqs"); // Not written yet DISABLED_TESTS.add("aws-sdks/sqs-via-cli"); // Not written yet @@ -43,7 +42,6 @@ class JavaTestModels extends TestModelTest { DISABLED_TESTS.add("SimpleTypes/SimpleDocument"); // Not supported yet DISABLED_TESTS.add("SimpleTypes/SimpleFloat"); // Not supported yet DISABLED_TESTS.add("SimpleTypes/SimpleShort"); // Not supported yet - DISABLED_TESTS.add("Streaming"); // Not supported yet // These are commented out because they should work // They are left here because it can be useful // to have these here so that it is easy to only run a single test locally. @@ -66,6 +64,7 @@ class JavaTestModels extends TestModelTest { // DISABLED_TESTS.add("SimpleTypes/SimpleInteger"); // These work // DISABLED_TESTS.add("SimpleTypes/SimpleLong"); // These work // DISABLED_TESTS.add("SimpleTypes/SimpleString"); // These work + // DISABLED_TESTS.add("Streaming"); // These work // DISABLED_TESTS.add("Union"); // These work // DISABLED_TESTS.add("aws-sdks/ddb"); // These work // DISABLED_TESTS.add("aws-sdks/ddb-lite"); // These work @@ -74,6 +73,7 @@ class JavaTestModels extends TestModelTest { // DISABLED_TESTS.add("aws-sdks/kms"); // These work // DISABLED_TESTS.add("aws-sdks/kmsv2"); // These work // DISABLED_TESTS.add("aws-sdks/lakeformation"); // These work + // DISABLED_TESTS.add("aws-sdks/s3"); // These work // DISABLED_TESTS.add("dafny-dependencies/StandardLibrary"); // These work } diff --git a/codegen/smithy-dafny-codegen/src/main/java/software/amazon/polymorph/CodegenEngine.java b/codegen/smithy-dafny-codegen/src/main/java/software/amazon/polymorph/CodegenEngine.java index 74c902cab3..5bf18bf962 100644 --- a/codegen/smithy-dafny-codegen/src/main/java/software/amazon/polymorph/CodegenEngine.java +++ b/codegen/smithy-dafny-codegen/src/main/java/software/amazon/polymorph/CodegenEngine.java @@ -100,7 +100,7 @@ public class CodegenEngine { private final boolean updatePatchFiles; // refactor this to only be required if generating Java private final AwsSdkVersion javaAwsSdkVersion; - private final Optional includeDafnyFile; + private final List includeDafnyFiles; private final boolean awsSdkStyle; private final boolean localServiceTest; private final Set generationAspects; @@ -126,7 +126,7 @@ private CodegenEngine( final DafnyVersion dafnyVersion, final Optional propertiesFile, final AwsSdkVersion javaAwsSdkVersion, - final Optional includeDafnyFile, + final List includeDafnyFiles, final boolean awsSdkStyle, final boolean localServiceTest, final Set generationAspects, @@ -145,7 +145,7 @@ private CodegenEngine( this.dafnyVersion = dafnyVersion; this.propertiesFile = propertiesFile; this.javaAwsSdkVersion = javaAwsSdkVersion; - this.includeDafnyFile = includeDafnyFile; + this.includeDafnyFiles = includeDafnyFiles; this.awsSdkStyle = awsSdkStyle; this.localServiceTest = localServiceTest; this.generationAspects = generationAspects; @@ -242,12 +242,12 @@ private void generateProjectPropertiesFile(final Path outputPath) private void generateDafny(final Path outputDir) { // Validated by builder, but check again - assert this.includeDafnyFile.isPresent(); + assert !this.includeDafnyFiles.isEmpty(); final DafnyApiCodegen dafnyApiCodegen = new DafnyApiCodegen( model, serviceShape, outputDir, - this.includeDafnyFile.get(), + this.includeDafnyFiles, this.dependentModelPaths, this.awsSdkStyle ); @@ -364,7 +364,7 @@ private void generateDafnySkeleton(Path outputDir) { model, serviceShape, outputDir, - this.includeDafnyFile.get(), + this.includeDafnyFiles, this.dependentModelPaths, this.awsSdkStyle ); @@ -1014,18 +1014,17 @@ private static CommandResult runCommand(Path workingDir, String... args) { } private Path standardLibraryPath() { - final Path includeDafnyFile = - this.includeDafnyFile.orElseThrow(() -> - new IllegalStateException( - "includeDafnyFile required when generating additional aspects (--generate)" - ) - ); + if (this.includeDafnyFiles.isEmpty()) { + throw new IllegalStateException( + "includeDafnyFile required when generating additional aspects (--generate)" + ); + } - // Assumes that includeDafnyFile is at StandardLibrary/src/Index.dfy + // Assumes that includeDafnyFiles.get(0) is at StandardLibrary/src/Index.dfy // TODO be smarter about finding the StandardLibrary path return libraryRoot .resolve("runtimes/net") - .relativize(includeDafnyFile.resolve("../..")); + .relativize(includeDafnyFiles.get(0).resolve("../..")); } private void writeTemplatedFile( @@ -1062,7 +1061,7 @@ public static class Builder { private DafnyVersion dafnyVersion; private Path propertiesFile; private AwsSdkVersion javaAwsSdkVersion = AwsSdkVersion.V2; - private Path includeDafnyFile; + private List includeDafnyFiles; private boolean awsSdkStyle = false; private boolean localServiceTest = false; private Set generationAspects = Collections.emptySet(); @@ -1175,8 +1174,8 @@ public Builder withJavaAwsSdkVersion( /** * Sets a file to be included in the generated Dafny code. */ - public Builder withIncludeDafnyFile(final Path includeDafnyFile) { - this.includeDafnyFile = includeDafnyFile; + public Builder withIncludeDafnyFiles(final List includeDafnyFiles) { + this.includeDafnyFiles = includeDafnyFiles; return this; } @@ -1324,15 +1323,12 @@ public CodegenEngine build() { if ( targetLangOutputDirs.containsKey(TargetLanguage.DAFNY) && - this.includeDafnyFile == null + this.includeDafnyFiles.isEmpty() ) { throw new IllegalStateException( - "includeDafnyFile is required when generating Dafny code" + "includeDafnyFiles is required when generating Dafny code" ); } - final Optional includeDafnyFile = Optional - .ofNullable(this.includeDafnyFile) - .map(path -> path.toAbsolutePath().normalize()); if (this.awsSdkStyle && this.localServiceTest) { throw new IllegalStateException( @@ -1365,7 +1361,7 @@ public CodegenEngine build() { dafnyVersion, propertiesFile, javaAwsSdkVersion, - includeDafnyFile, + includeDafnyFiles, this.awsSdkStyle, this.localServiceTest, this.generationAspects, diff --git a/codegen/smithy-dafny-codegen/src/main/java/software/amazon/polymorph/smithydafny/DafnyApiCodegen.java b/codegen/smithy-dafny-codegen/src/main/java/software/amazon/polymorph/smithydafny/DafnyApiCodegen.java index 93e2424d9c..eba95ad43e 100644 --- a/codegen/smithy-dafny-codegen/src/main/java/software/amazon/polymorph/smithydafny/DafnyApiCodegen.java +++ b/codegen/smithy-dafny-codegen/src/main/java/software/amazon/polymorph/smithydafny/DafnyApiCodegen.java @@ -31,7 +31,7 @@ public class DafnyApiCodegen { private final ServiceShape serviceShape; private final DafnyNameResolver nameResolver; private final Path outputDir; - private final Path includeDafnyFile; + private final List includeDafnyFiles; private static final Logger LOGGER = LoggerFactory.getLogger( DafnyApiCodegen.class ); @@ -52,14 +52,14 @@ public DafnyApiCodegen( final Model model, final ServiceShape serviceShape, final Path outputDir, - final Path includeDafnyFile, + final List includeDafnyFiles, final Path[] dependentModelPaths, final boolean awsSdkRequest ) { this.model = model; this.serviceShape = serviceShape; this.outputDir = outputDir; - this.includeDafnyFile = includeDafnyFile; + this.includeDafnyFiles = includeDafnyFiles; this.nameResolver = new DafnyNameResolver( model, @@ -106,7 +106,7 @@ public Map generate() { .of( Stream .concat( - Stream.of(outputDir.relativize(includeDafnyFile)), + includeDafnyFiles.stream().map(outputDir::relativize), nameResolver .dependentModels() // nameResolve.dependentModels() filters dependentModelPaths @@ -237,9 +237,11 @@ public Map generateWrappedAbstractServiceModule( .of( Stream .concat( - Stream.of( - outputDir.relativize(includeDafnyFile), - outputDafny.relativize(outputDir.resolve("../src/Index.dfy")) + Stream.concat( + includeDafnyFiles.stream().map(outputDir::relativize), + Stream.of( + outputDafny.relativize(outputDir.resolve("../src/Index.dfy")) + ) ), nameResolver .dependentModels() @@ -321,9 +323,8 @@ public TokenTree generateBlobTypeDefinition(final ShapeId blobShapeId) { .map(DafnyApiCodegen::generateLengthConstraint); if (blobShape.hasTrait(StreamingTrait.class)) { // TODO: need to handle @length too, - // something like `forall produced | a.CanProduce(produced) :: min <= |Enumerated(produced)| <= max - // (which should have a simpler helper predicate version, especially when allowing for reference types) - return generateTypeSynonym(blobShapeId, "ByteStream"); + // something like `a.ContentLength().Some? && min <= a.ContentLength().value <= max + return generateTypeSynonym(blobShapeId, "DataStream"); } else { return generateSubsetType(blobShapeId, "seq", lengthConstraint); } diff --git a/codegen/smithy-dafny-codegen/src/main/java/software/amazon/polymorph/smithydafny/DafnyNameResolver.java b/codegen/smithy-dafny-codegen/src/main/java/software/amazon/polymorph/smithydafny/DafnyNameResolver.java index 80cce84b3e..329d0c123d 100644 --- a/codegen/smithy-dafny-codegen/src/main/java/software/amazon/polymorph/smithydafny/DafnyNameResolver.java +++ b/codegen/smithy-dafny-codegen/src/main/java/software/amazon/polymorph/smithydafny/DafnyNameResolver.java @@ -90,7 +90,7 @@ public String baseTypeForShape(final ShapeId shapeId) { MAP -> dafnyModulePrefixForShape(shape) + shapeName; case BLOB -> { if (shape.hasTrait(StreamingTrait.class)) { - yield "ByteStream"; + yield "DataStream"; } else { yield dafnyModulePrefixForShape(shape) + shapeName; } @@ -386,7 +386,10 @@ public static Stream modulePreludeStandardImports( .stream() .anyMatch(s -> s.hasTrait(StreamingTrait.class)) ) { - return Streams.concat(basics, Stream.of("import opened Std.Streams")); + return Streams.concat( + basics, + Stream.of("import opened StandardLibrary.Streams") + ); } else { return basics; } diff --git a/codegen/smithy-dafny-codegen/src/main/java/software/amazon/polymorph/smithyjava/generator/ToDafny.java b/codegen/smithy-dafny-codegen/src/main/java/software/amazon/polymorph/smithyjava/generator/ToDafny.java index 9ec737662a..ae2343e8a0 100644 --- a/codegen/smithy-dafny-codegen/src/main/java/software/amazon/polymorph/smithyjava/generator/ToDafny.java +++ b/codegen/smithy-dafny-codegen/src/main/java/software/amazon/polymorph/smithyjava/generator/ToDafny.java @@ -39,6 +39,7 @@ import software.amazon.smithy.model.shapes.UnionShape; import software.amazon.smithy.model.traits.EnumDefinition; import software.amazon.smithy.model.traits.EnumTrait; +import software.amazon.smithy.model.traits.StreamingTrait; public abstract class ToDafny extends Generator { @@ -478,6 +479,9 @@ protected MethodReference conversionMethodReference(Shape shape) { ) ); } + if (shape.isBlobShape() && shape.hasTrait(StreamingTrait.class)) { + return new MethodReference(thisClassName, "DataStream"); + } // If the target is simple, use SIMPLE_CONVERSION_METHOD_FROM_SHAPE_TYPE if (ModelUtils.isSmithyApiOrSimpleShape(shape)) { return SIMPLE_CONVERSION_METHOD_FROM_SHAPE_TYPE.get(shape.getType()); diff --git a/codegen/smithy-dafny-codegen/src/main/java/software/amazon/polymorph/smithyjava/generator/ToNative.java b/codegen/smithy-dafny-codegen/src/main/java/software/amazon/polymorph/smithyjava/generator/ToNative.java index 71d1e48b2c..1ea196a702 100644 --- a/codegen/smithy-dafny-codegen/src/main/java/software/amazon/polymorph/smithyjava/generator/ToNative.java +++ b/codegen/smithy-dafny-codegen/src/main/java/software/amazon/polymorph/smithyjava/generator/ToNative.java @@ -18,6 +18,7 @@ import software.amazon.polymorph.smithyjava.generator.CodegenSubject.AwsSdkVersion; import software.amazon.polymorph.smithyjava.generator.awssdk.v1.ToNativeAwsV1; import software.amazon.polymorph.smithyjava.generator.awssdk.v2.ToNativeAwsV2; +import software.amazon.polymorph.smithyjava.nameresolver.Constants; import software.amazon.polymorph.smithyjava.nameresolver.Dafny; import software.amazon.polymorph.utils.AwsSdkNameResolverHelpers; import software.amazon.polymorph.utils.ModelUtils; @@ -33,6 +34,7 @@ import software.amazon.smithy.model.shapes.UnionShape; import software.amazon.smithy.model.traits.EnumDefinition; import software.amazon.smithy.model.traits.EnumTrait; +import software.amazon.smithy.model.traits.StreamingTrait; public abstract class ToNative extends Generator { @@ -408,6 +410,12 @@ protected MethodReference conversionMethodReference(Shape shape) { "MemberShapes MUST BE de-referenced BEFORE calling ToNative.conversionMethodReference." ); } + if (shape.isBlobShape() && shape.hasTrait(StreamingTrait.class)) { + return new MethodReference( + software.amazon.polymorph.smithyjava.nameresolver.Constants.DATA_STREAM_AS_REQUEST_BODY_CLASS_NAME, + "of" + ); + } // If the target is simple, use SIMPLE_CONVERSION_METHOD_FROM_SHAPE_TYPE if (ModelUtils.isSmithyApiOrSimpleShape(shape)) { return SIMPLE_CONVERSION_METHOD_FROM_SHAPE_TYPE.get(shape.getType()); diff --git a/codegen/smithy-dafny-codegen/src/main/java/software/amazon/polymorph/smithyjava/generator/library/ToDafnyLibrary.java b/codegen/smithy-dafny-codegen/src/main/java/software/amazon/polymorph/smithyjava/generator/library/ToDafnyLibrary.java index a4aeacd97a..9144372f08 100644 --- a/codegen/smithy-dafny-codegen/src/main/java/software/amazon/polymorph/smithyjava/generator/library/ToDafnyLibrary.java +++ b/codegen/smithy-dafny-codegen/src/main/java/software/amazon/polymorph/smithyjava/generator/library/ToDafnyLibrary.java @@ -19,8 +19,10 @@ import java.util.Set; import java.util.stream.Collectors; import javax.lang.model.element.Modifier; +import software.amazon.awssdk.core.sync.RequestBody; import software.amazon.polymorph.smithyjava.MethodReference; import software.amazon.polymorph.smithyjava.generator.ToDafny; +import software.amazon.polymorph.smithyjava.nameresolver.Constants; import software.amazon.polymorph.smithyjava.nameresolver.Dafny; import software.amazon.polymorph.smithyjava.nameresolver.Native; import software.amazon.polymorph.smithyjava.unmodeled.CollectionOfErrors; @@ -87,6 +89,10 @@ TypeSpec toDafny() { toDafnyMethods.add(opaqueWithTextError()); // CollectionError toDafnyMethods.add(collectionError()); + // DataStreams + if (ModelUtils.usesStreaming(subject.model)) { + toDafnyMethods.add(dataStream()); + } // Structures subject .getStructuresInServiceNamespace() @@ -269,6 +275,31 @@ MethodSpec collectionError() { .build(); } + MethodSpec dataStream() { + ClassName dafnyDataStream = + software.amazon.polymorph.smithyjava.nameresolver.Constants.DAFNY_DATA_STREAM_CLASS_NAME; + TypeName dafnyDataStreamParameterized = ParameterizedTypeName.get( + dafnyDataStream, + ClassName.get(Byte.class), + subject.dafnyNameResolver.abstractClassForError() + ); + return MethodSpec + .methodBuilder("DataStream") + .returns(dafnyDataStreamParameterized) + .addModifiers(PUBLIC_STATIC) + .addParameter(ClassName.get(RequestBody.class), VAR_INPUT) + .addStatement( + "return new $T(\n" + + " Error._typeDescriptor(),\n" + + " $L,\n" + + " Error::create_Opaque\n" + + ")", + software.amazon.polymorph.smithyjava.nameresolver.Constants.REQUEST_BODY_AS_DATA_STREAM_CLASS_NAME, + VAR_INPUT + ) + .build(); + } + @Override protected MethodSpec modeledStructure(final StructureShape structureShape) { if (structureShape.hasTrait(PositionalTrait.class)) { diff --git a/codegen/smithy-dafny-codegen/src/main/java/software/amazon/polymorph/smithyjava/generator/library/ToNativeLibrary.java b/codegen/smithy-dafny-codegen/src/main/java/software/amazon/polymorph/smithyjava/generator/library/ToNativeLibrary.java index 1e38389f4f..b9031713af 100644 --- a/codegen/smithy-dafny-codegen/src/main/java/software/amazon/polymorph/smithyjava/generator/library/ToNativeLibrary.java +++ b/codegen/smithy-dafny-codegen/src/main/java/software/amazon/polymorph/smithyjava/generator/library/ToNativeLibrary.java @@ -138,7 +138,7 @@ TypeSpec toNative() { .stream() .map(this::modeledResource) .forEachOrdered(toNativeMethods::add); - // The Service, it's self + // The Service itself toNativeMethods.add(modeledService(subject.serviceShape)); return TypeSpec .classBuilder(thisClassName) diff --git a/codegen/smithy-dafny-codegen/src/main/java/software/amazon/polymorph/smithyjava/nameresolver/Constants.java b/codegen/smithy-dafny-codegen/src/main/java/software/amazon/polymorph/smithyjava/nameresolver/Constants.java index ab2d8264f9..d8948941c3 100644 --- a/codegen/smithy-dafny-codegen/src/main/java/software/amazon/polymorph/smithyjava/nameresolver/Constants.java +++ b/codegen/smithy-dafny-codegen/src/main/java/software/amazon/polymorph/smithyjava/nameresolver/Constants.java @@ -6,6 +6,7 @@ import java.nio.charset.StandardCharsets; import java.util.Base64; import java.util.Set; +import software.amazon.awssdk.core.sync.RequestBody; import software.amazon.smithy.model.shapes.ShapeId; import software.amazon.smithy.model.shapes.ShapeType; @@ -57,4 +58,12 @@ public class Constants { public static final ClassName BASE64_DECODER_CLASS_NAME = ClassName.get( Base64.Decoder.class ); + public static final ClassName DAFNY_DATA_STREAM_CLASS_NAME = ClassName.get( + "StandardLibrary_Compile.Streams_Compile", + "DataStream" + ); + public static final ClassName REQUEST_BODY_AS_DATA_STREAM_CLASS_NAME = + ClassName.get("Streams", "RequestBodyAsDataStream"); + public static final ClassName DATA_STREAM_AS_REQUEST_BODY_CLASS_NAME = + ClassName.get("Streams", "DataStreamAsRequestBody"); } diff --git a/codegen/smithy-dafny-codegen/src/main/java/software/amazon/polymorph/smithyjava/nameresolver/Dafny.java b/codegen/smithy-dafny-codegen/src/main/java/software/amazon/polymorph/smithyjava/nameresolver/Dafny.java index a754fb000e..1a72002957 100644 --- a/codegen/smithy-dafny-codegen/src/main/java/software/amazon/polymorph/smithyjava/nameresolver/Dafny.java +++ b/codegen/smithy-dafny-codegen/src/main/java/software/amazon/polymorph/smithyjava/nameresolver/Dafny.java @@ -36,6 +36,7 @@ import software.amazon.smithy.model.shapes.StructureShape; import software.amazon.smithy.model.traits.EnumTrait; import software.amazon.smithy.model.traits.ErrorTrait; +import software.amazon.smithy.model.traits.StreamingTrait; /** * Provides a consistent mapping between names of @@ -338,7 +339,13 @@ public TypeName typeForShape(final ShapeId shapeId) { new IllegalStateException("Cannot find shape " + shapeId) ); return switch (shape.getType()) { - case BLOB -> Dafny.typeForBlob(); + case BLOB -> { + if (shape.hasTrait(StreamingTrait.class)) { + yield typeForStreamingBlob(); + } else { + yield Dafny.typeForBlob(); + } + } case BOOLEAN -> TypeName.BOOLEAN.box(); case STRING -> typeForString(shape.asStringShape().get()); case TIMESTAMP -> typeForCharacterSequence(); @@ -370,6 +377,14 @@ private static TypeName typeForBlob() { ); } + public TypeName typeForStreamingBlob() { + return ParameterizedTypeName.get( + Constants.DAFNY_DATA_STREAM_CLASS_NAME, + TypeName.BYTE.box(), + abstractClassForError() + ); + } + static TypeName typeForDouble() { return typeForBlob(); } diff --git a/codegen/smithy-dafny-codegen/src/main/java/software/amazon/polymorph/smithyjava/nameresolver/Native.java b/codegen/smithy-dafny-codegen/src/main/java/software/amazon/polymorph/smithyjava/nameresolver/Native.java index 2035d91e4f..2f93fb5962 100644 --- a/codegen/smithy-dafny-codegen/src/main/java/software/amazon/polymorph/smithyjava/nameresolver/Native.java +++ b/codegen/smithy-dafny-codegen/src/main/java/software/amazon/polymorph/smithyjava/nameresolver/Native.java @@ -18,6 +18,7 @@ import java.util.Optional; import java.util.Set; import javax.annotation.Nullable; +import software.amazon.awssdk.core.sync.RequestBody; import software.amazon.polymorph.smithydafny.DafnyNameResolver; import software.amazon.polymorph.smithyjava.NamespaceHelper; import software.amazon.polymorph.smithyjava.generator.CodegenSubject; @@ -159,9 +160,15 @@ public TypeName typeForShape(final ShapeId shapeId) { ); yield shape.hasTrait(BoxTrait.class) ? typeName.box() : typeName; } + case BLOB -> { + if (shape.hasTrait(StreamingTrait.class)) { + yield ClassName.get(RequestBody.class); + } else { + yield NATIVE_TYPES_BY_SIMPLE_SHAPE_TYPE.get(shape.getType()); + } + } // For supported simple shapes, just map to native types - case BLOB, - TIMESTAMP, + case TIMESTAMP, BIG_DECIMAL, BIG_INTEGER -> NATIVE_TYPES_BY_SIMPLE_SHAPE_TYPE.get(shape.getType()); case STRING, ENUM -> classForStringOrEnum(shape); diff --git a/codegen/smithy-dafny-codegen/src/main/java/software/amazon/polymorph/smithypython/awssdk/shapevisitor/AwsSdkToDafnyShapeVisitor.java b/codegen/smithy-dafny-codegen/src/main/java/software/amazon/polymorph/smithypython/awssdk/shapevisitor/AwsSdkToDafnyShapeVisitor.java index 18686cb43f..8202ba94fe 100644 --- a/codegen/smithy-dafny-codegen/src/main/java/software/amazon/polymorph/smithypython/awssdk/shapevisitor/AwsSdkToDafnyShapeVisitor.java +++ b/codegen/smithy-dafny-codegen/src/main/java/software/amazon/polymorph/smithypython/awssdk/shapevisitor/AwsSdkToDafnyShapeVisitor.java @@ -77,7 +77,7 @@ protected String getDefault(Shape shape) { public String blobShape(BlobShape shape) { if (shape.hasTrait(StreamingTrait.class)) { writer.addStdlibImport( - "smithy_dafny_standard_library.internaldafny.extern.streams", + "smithy_dafny_streaming_support.internaldafny.extern.streams", "StreamingBlobAsDafnyDataStream" ); return "StreamingBlobAsDafnyDataStream(%1$s)".formatted(dataSource); diff --git a/codegen/smithy-dafny-codegen/src/main/java/software/amazon/polymorph/smithypython/awssdk/shapevisitor/DafnyToAwsSdkShapeVisitor.java b/codegen/smithy-dafny-codegen/src/main/java/software/amazon/polymorph/smithypython/awssdk/shapevisitor/DafnyToAwsSdkShapeVisitor.java index 6a2029a1f3..aa2834b58d 100644 --- a/codegen/smithy-dafny-codegen/src/main/java/software/amazon/polymorph/smithypython/awssdk/shapevisitor/DafnyToAwsSdkShapeVisitor.java +++ b/codegen/smithy-dafny-codegen/src/main/java/software/amazon/polymorph/smithypython/awssdk/shapevisitor/DafnyToAwsSdkShapeVisitor.java @@ -77,12 +77,10 @@ protected String getDefault(Shape shape) { public String blobShape(BlobShape shape) { if (shape.hasTrait(StreamingTrait.class)) { writer.addStdlibImport( - "smithy_dafny_standard_library.internaldafny.extern.streams", - "RewindableDafnyByteStreamAsByteStream" + "smithy_dafny_streaming_support.internaldafny.extern.streams", + "DafnyDataStreamAsByteStream" ); - return "RewindableDafnyByteStreamAsByteStream(%1$s)".formatted( - dataSource - ); + return "DafnyDataStreamAsByteStream(%1$s)".formatted(dataSource); } else { return "bytes(%1$s)".formatted(dataSource); } diff --git a/codegen/smithy-dafny-codegen/src/main/java/software/amazon/polymorph/smithypython/localservice/shapevisitor/DafnyToLocalServiceShapeVisitor.java b/codegen/smithy-dafny-codegen/src/main/java/software/amazon/polymorph/smithypython/localservice/shapevisitor/DafnyToLocalServiceShapeVisitor.java index 06679418c7..851afe529d 100644 --- a/codegen/smithy-dafny-codegen/src/main/java/software/amazon/polymorph/smithypython/localservice/shapevisitor/DafnyToLocalServiceShapeVisitor.java +++ b/codegen/smithy-dafny-codegen/src/main/java/software/amazon/polymorph/smithypython/localservice/shapevisitor/DafnyToLocalServiceShapeVisitor.java @@ -95,10 +95,10 @@ protected String getDefault(Shape shape) { public String blobShape(BlobShape shape) { if (shape.hasTrait(StreamingTrait.class)) { writer.addStdlibImport( - "smithy_dafny_standard_library.internaldafny.extern.streams", - "DafnyByteStreamAsByteStream" + "smithy_dafny_streaming_support.internaldafny.extern.streams", + "DafnyDataStreamAsByteStream" ); - return "DafnyByteStreamAsByteStream(%1$s)".formatted(dataSource); + return "DafnyDataStreamAsByteStream(%1$s)".formatted(dataSource); } else { return "bytes(%1$s)".formatted(dataSource); } diff --git a/codegen/smithy-dafny-codegen/src/main/java/software/amazon/polymorph/smithypython/localservice/shapevisitor/LocalServiceToDafnyShapeVisitor.java b/codegen/smithy-dafny-codegen/src/main/java/software/amazon/polymorph/smithypython/localservice/shapevisitor/LocalServiceToDafnyShapeVisitor.java index bdbf0c627e..5fef44ad73 100644 --- a/codegen/smithy-dafny-codegen/src/main/java/software/amazon/polymorph/smithypython/localservice/shapevisitor/LocalServiceToDafnyShapeVisitor.java +++ b/codegen/smithy-dafny-codegen/src/main/java/software/amazon/polymorph/smithypython/localservice/shapevisitor/LocalServiceToDafnyShapeVisitor.java @@ -96,7 +96,7 @@ protected String getDefault(Shape shape) { public String blobShape(BlobShape shape) { if (shape.hasTrait(StreamingTrait.class)) { writer.addStdlibImport( - "smithy_dafny_standard_library.internaldafny.extern.streams", + "smithy_dafny_streaming_support.internaldafny.extern.streams", "StreamingBlobAsDafnyDataStream" ); return "StreamingBlobAsDafnyDataStream(%1$s)".formatted(dataSource); diff --git a/codegen/smithy-dafny-codegen/src/main/java/software/amazon/polymorph/utils/ModelUtils.java b/codegen/smithy-dafny-codegen/src/main/java/software/amazon/polymorph/utils/ModelUtils.java index 0b8469b54d..1eb72c52f2 100644 --- a/codegen/smithy-dafny-codegen/src/main/java/software/amazon/polymorph/utils/ModelUtils.java +++ b/codegen/smithy-dafny-codegen/src/main/java/software/amazon/polymorph/utils/ModelUtils.java @@ -26,6 +26,7 @@ import software.amazon.smithy.model.traits.EnumTrait; import software.amazon.smithy.model.traits.ErrorTrait; import software.amazon.smithy.model.traits.RequiredTrait; +import software.amazon.smithy.model.traits.StreamingTrait; import software.amazon.smithy.model.traits.StringTrait; import software.amazon.smithy.model.transform.ModelTransformer; @@ -791,4 +792,11 @@ public static StructureShape getConfigShape( StructureShape.class ); } + + public static boolean usesStreaming(final Model model) { + return model + .toSet() + .stream() + .anyMatch(shape -> shape.hasTrait(StreamingTrait.class)); + } } diff --git a/codegen/smithy-dafny-codegen/src/main/java/software/amazon/smithy/dafny/codegen/DafnyClientCodegenPlugin.java b/codegen/smithy-dafny-codegen/src/main/java/software/amazon/smithy/dafny/codegen/DafnyClientCodegenPlugin.java index 5a1cc18d64..b63e3e91bf 100644 --- a/codegen/smithy-dafny-codegen/src/main/java/software/amazon/smithy/dafny/codegen/DafnyClientCodegenPlugin.java +++ b/codegen/smithy-dafny-codegen/src/main/java/software/amazon/smithy/dafny/codegen/DafnyClientCodegenPlugin.java @@ -85,7 +85,7 @@ public void execute(PluginContext context) { .withPropertiesFile(propertiesFile) .withTargetLangOutputDirs(outputDirs) .withAwsSdkStyle(true) // this plugin only generates AWS SDK-style code - .withIncludeDafnyFile(settings.includeDafnyFile) + .withIncludeDafnyFiles(settings.includeDafnyFiles) .withGenerationAspects( EnumSet.of( CodegenEngine.GenerationAspect.PROJECT_FILES, diff --git a/codegen/smithy-dafny-codegen/src/main/java/software/amazon/smithy/dafny/codegen/DafnyClientCodegenPluginSettings.java b/codegen/smithy-dafny-codegen/src/main/java/software/amazon/smithy/dafny/codegen/DafnyClientCodegenPluginSettings.java index 4d44f39a11..f33464597f 100644 --- a/codegen/smithy-dafny-codegen/src/main/java/software/amazon/smithy/dafny/codegen/DafnyClientCodegenPluginSettings.java +++ b/codegen/smithy-dafny-codegen/src/main/java/software/amazon/smithy/dafny/codegen/DafnyClientCodegenPluginSettings.java @@ -53,7 +53,7 @@ class DafnyClientCodegenPluginSettings { *

* TODO: replace this with something cleaner */ - public final Path includeDafnyFile; + public final List includeDafnyFiles; /** * The Dafny version to generate code compatible with. @@ -67,13 +67,13 @@ private DafnyClientCodegenPluginSettings( final DafnyClientCodegenEdition edition, final ShapeId serviceId, final Set targetLanguages, - final Path includeDafnyFile, + final List includeDafnyFiles, final DafnyVersion dafnyVersion ) { this.edition = edition; this.serviceId = serviceId; this.targetLanguages = targetLanguages; - this.includeDafnyFile = includeDafnyFile; + this.includeDafnyFiles = includeDafnyFiles; this.dafnyVersion = dafnyVersion; } @@ -142,6 +142,9 @@ static Optional fromObject( includeDafnyFileNormalized ); } + final List includeDafnyFilesNormalized = List.of( + includeDafnyFileNormalized + ); // This is now optional since we can get it from dafny itself final DafnyVersion dafnyVersionString = node @@ -155,7 +158,7 @@ static Optional fromObject( edition, serviceId, targetLanguages, - includeDafnyFileNormalized, + includeDafnyFilesNormalized, dafnyVersionString ) ); diff --git a/codegen/smithy-dafny-codegen/src/test/java/software/amazon/polymorph/smithydafny/DafnyApiCodegenTest.java b/codegen/smithy-dafny-codegen/src/test/java/software/amazon/polymorph/smithydafny/DafnyApiCodegenTest.java index fbf8428b87..d19c98e966 100644 --- a/codegen/smithy-dafny-codegen/src/test/java/software/amazon/polymorph/smithydafny/DafnyApiCodegenTest.java +++ b/codegen/smithy-dafny-codegen/src/test/java/software/amazon/polymorph/smithydafny/DafnyApiCodegenTest.java @@ -35,7 +35,7 @@ private static DafnyApiCodegen setupCodegen( model, serviceShape, Path.of(""), - Path.of(""), + List.of(), new Path[0], false );