diff --git a/.doc_gen/metadata/bedrock-runtime_metadata.yaml b/.doc_gen/metadata/bedrock-runtime_metadata.yaml index 6c98247ca0e..95b1d3f2e6e 100644 --- a/.doc_gen/metadata/bedrock-runtime_metadata.yaml +++ b/.doc_gen/metadata/bedrock-runtime_metadata.yaml @@ -423,6 +423,14 @@ bedrock-runtime_Converse_MetaLlama: - description: Send a text message to Meta Llama, using Bedrock's Converse API. snippet_tags: - javascript.v3.bedrock-runtime.Converse_MetaLlama + Swift: + versions: + - sdk_version: 1 + github: swift/example_code/bedrock-runtime + excerpts: + - description: Send a text message to Meta Llama, using Bedrock's Converse API. + snippet_tags: + - swift.example_code.bedrock-runtime.Converse_MetaLlama services: bedrock-runtime: {Converse} @@ -708,6 +716,14 @@ bedrock-runtime_ConverseStream_MetaLlama: - description: Send a text message to Meta Llama, using Bedrock's Converse API and process the response stream in real-time. snippet_tags: - javascript.v3.bedrock-runtime.ConverseStream_MetaLlama + Swift: + versions: + - sdk_version: 1 + github: swift/example_code/bedrock-runtime + excerpts: + - description: Send a text message to Meta Llama, using Bedrock's Converse API and process the response stream in real-time. + snippet_tags: + - swift.example_code.bedrock-runtime.ConverseStream_MetaLlama services: bedrock-runtime: {ConverseStream} diff --git a/swift/example_code/bedrock-runtime/README.md b/swift/example_code/bedrock-runtime/README.md index f6c653aac19..ac18bf73b7a 100644 --- a/swift/example_code/bedrock-runtime/README.md +++ b/swift/example_code/bedrock-runtime/README.md @@ -38,6 +38,11 @@ For prerequisites, see the [README](../../README.md#Prerequisites) in the `swift - [Converse](models/anthropic_claude/Sources/Converse/main.swift#L4) - [ConverseStream](models/anthropic_claude/Sources/ConverseStream/main.swift#L4) +### Meta Llama + +- [Converse](models/meta_llama/Sources/Converse/main.swift#L4) +- [ConverseStream](models/meta_llama/Sources/ConverseStream/main.swift#L4) + diff --git a/swift/example_code/bedrock-runtime/models/meta_llama/Package.swift b/swift/example_code/bedrock-runtime/models/meta_llama/Package.swift new file mode 100644 index 00000000000..f5f2c9fc018 --- /dev/null +++ b/swift/example_code/bedrock-runtime/models/meta_llama/Package.swift @@ -0,0 +1,38 @@ +// swift-tools-version: 6.1 +// Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. +// SPDX-License-Identifier: Apache-2.0 +// +// The swift-tools-version declares the minimum version of Swift required to build this package. + +import PackageDescription + +let package = Package( + name: "MetaLlamaConverse", + // Let Xcode know the minimum Apple platforms supported. + platforms: [ + .macOS(.v13), + .iOS(.v15) + ], + dependencies: [ + // Dependencies declare other packages that this package depends on. + .package(url: "https://github.com/awslabs/aws-sdk-swift", from: "1.2.61") + ], + targets: [ + // Targets are the basic building blocks of a package, defining a module or a test suite. + // Targets can depend on other targets in this package and products from dependencies. + .executableTarget( + name: "Converse", + dependencies: [ + .product(name: "AWSBedrockRuntime", package: "aws-sdk-swift"), + ], + path: "Sources/Converse" + ), + .executableTarget( + name: "ConverseStream", + dependencies: [ + .product(name: "AWSBedrockRuntime", package: "aws-sdk-swift"), + ], + path: "Sources/ConverseStream" + ) + ] +) diff --git a/swift/example_code/bedrock-runtime/models/meta_llama/Sources/Converse/main.swift b/swift/example_code/bedrock-runtime/models/meta_llama/Sources/Converse/main.swift new file mode 100644 index 00000000000..2528e994dd5 --- /dev/null +++ b/swift/example_code/bedrock-runtime/models/meta_llama/Sources/Converse/main.swift @@ -0,0 +1,65 @@ +// Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. +// SPDX-License-Identifier: Apache-2.0 +// +// snippet-start:[swift.example_code.bedrock-runtime.Converse_MetaLlama] +// An example demonstrating how to use the Conversation API to send +// a text message to Meta Llama. + +import AWSBedrockRuntime + +func converse(_ textPrompt: String) async throws -> String { + + // Create a Bedrock Runtime client in the AWS Region you want to use. + let config = + try await BedrockRuntimeClient.BedrockRuntimeClientConfiguration( + region: "us-east-1" + ) + let client = BedrockRuntimeClient(config: config) + + // Set the model ID. + let modelId = "meta.llama3-8b-instruct-v1:0" + + // Start a conversation with the user message. + let message = BedrockRuntimeClientTypes.Message( + content: [.text(textPrompt)], + role: .user + ) + + // Optionally use inference parameters + let inferenceConfig = + BedrockRuntimeClientTypes.InferenceConfiguration( + maxTokens: 512, + stopSequences: ["END"], + temperature: 0.5, + topp: 0.9 + ) + + // Create the ConverseInput to send to the model + let input = ConverseInput( + inferenceConfig: inferenceConfig, messages: [message], modelId: modelId) + + // Send the ConverseInput to the model + let response = try await client.converse(input: input) + + // Extract and return the response text. + if case let .message(msg) = response.output { + if case let .text(textResponse) = msg.content![0] { + return textResponse + } else { + return "No text response found in message content" + } + } else { + return "No message found in converse output" + } +} + +// snippet-end:[swift.example_code.bedrock-runtime.Converse_MetaLlama] + +do { + let reply = try await converse( + "Describe the purpose of a 'hello world' program in one line." + ) + print(reply) +} catch { + print("An error occurred: \(error)") +} diff --git a/swift/example_code/bedrock-runtime/models/meta_llama/Sources/ConverseStream/main.swift b/swift/example_code/bedrock-runtime/models/meta_llama/Sources/ConverseStream/main.swift new file mode 100644 index 00000000000..33c8255d33c --- /dev/null +++ b/swift/example_code/bedrock-runtime/models/meta_llama/Sources/ConverseStream/main.swift @@ -0,0 +1,75 @@ +// Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. +// SPDX-License-Identifier: Apache-2.0 +// +// snippet-start:[swift.example_code.bedrock-runtime.ConverseStream_MetaLlama] +// An example demonstrating how to use the Conversation API to send a text message +// to Meta Llama and print the response stream. + +import AWSBedrockRuntime + +func printConverseStream(_ textPrompt: String) async throws { + + // Create a Bedrock Runtime client in the AWS Region you want to use. + let config = + try await BedrockRuntimeClient.BedrockRuntimeClientConfiguration( + region: "us-east-1" + ) + let client = BedrockRuntimeClient(config: config) + + // Set the model ID. + let modelId = "meta.llama3-8b-instruct-v1:0" + + // Start a conversation with the user message. + let message = BedrockRuntimeClientTypes.Message( + content: [.text(textPrompt)], + role: .user + ) + + // Optionally use inference parameters. + let inferenceConfig = + BedrockRuntimeClientTypes.InferenceConfiguration( + maxTokens: 512, + stopSequences: ["END"], + temperature: 0.5, + topp: 0.9 + ) + + // Create the ConverseStreamInput to send to the model. + let input = ConverseStreamInput( + inferenceConfig: inferenceConfig, messages: [message], modelId: modelId) + + // Send the ConverseStreamInput to the model. + let response = try await client.converseStream(input: input) + + // Extract the streaming response. + guard let stream = response.stream else { + print("No stream available") + return + } + + // Extract and print the streamed response text in real-time. + for try await event in stream { + switch event { + case .messagestart(_): + print("\nMeta Llama:") + + case .contentblockdelta(let deltaEvent): + if case .text(let text) = deltaEvent.delta { + print(text, terminator: "") + } + + default: + break + } + } +} + +// snippet-end:[swift.example_code.bedrock-runtime.ConverseStream_MetaLlama] + +do { + try await printConverseStream( + "Describe the purpose of a 'hello world' program in two paragraphs." + ) +} catch { + print("An error occurred: \(error)") +}