Skip to content

Swift: code examples for Meta Llama to use Converse and ConverseStream, Amazon Bedrock #7434

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Open
wants to merge 3 commits into
base: main
Choose a base branch
from
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
16 changes: 16 additions & 0 deletions .doc_gen/metadata/bedrock-runtime_metadata.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -415,6 +415,14 @@ bedrock-runtime_Converse_MetaLlama:
- description: Send a text message to Meta Llama, using Bedrock's Converse API.
snippet_tags:
- javascript.v3.bedrock-runtime.Converse_MetaLlama
Swift:
versions:
- sdk_version: 1
github: swift/example_code/bedrock-runtime
excerpts:
- description: Send a text message to Meta Llama, using Bedrock's Converse API.
snippet_tags:
- swift.example_code.bedrock-runtime.Converse_MetaLlama
services:
bedrock-runtime: {Converse}

Expand Down Expand Up @@ -692,6 +700,14 @@ bedrock-runtime_ConverseStream_MetaLlama:
- description: Send a text message to Meta Llama, using Bedrock's Converse API and process the response stream in real-time.
snippet_tags:
- javascript.v3.bedrock-runtime.ConverseStream_MetaLlama
Swift:
versions:
- sdk_version: 1
github: swift/example_code/bedrock-runtime
excerpts:
- description: Send a text message to Meta Llama, using Bedrock's Converse API and process the response stream in real-time.
snippet_tags:
- swift.example_code.bedrock-runtime.ConverseStream_MetaLlama
services:
bedrock-runtime: {ConverseStream}

Expand Down
5 changes: 5 additions & 0 deletions swift/example_code/bedrock-runtime/README.md
Original file line number Diff line number Diff line change
Expand Up @@ -33,6 +33,11 @@ For prerequisites, see the [README](../../README.md#Prerequisites) in the `swift
- [Converse](models/amazon-nova/amazon-nova-text/Sources/Converse/main.swift#L4)
- [ConverseStream](models/amazon-nova/amazon-nova-text/Sources/ConverseStream/main.swift#L4)

### Meta Llama

- [Converse](models/meta_llama/Sources/Converse/main.swift#L4)
- [ConverseStream](models/meta_llama/Sources/ConverseStream/main.swift#L4)


<!--custom.examples.start-->
<!--custom.examples.end-->
Expand Down
38 changes: 38 additions & 0 deletions swift/example_code/bedrock-runtime/models/meta_llama/Package.swift
Original file line number Diff line number Diff line change
@@ -0,0 +1,38 @@
// swift-tools-version: 6.1
// Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
// SPDX-License-Identifier: Apache-2.0
//
// The swift-tools-version declares the minimum version of Swift required to build this package.

import PackageDescription

let package = Package(
name: "MetaLlamaConverse",
// Let Xcode know the minimum Apple platforms supported.
platforms: [
.macOS(.v13),
.iOS(.v15)
],
dependencies: [
// Dependencies declare other packages that this package depends on.
.package(url: "https://github.com/awslabs/aws-sdk-swift", from: "1.2.61")
],
targets: [
// Targets are the basic building blocks of a package, defining a module or a test suite.
// Targets can depend on other targets in this package and products from dependencies.
.executableTarget(
name: "Converse",
dependencies: [
.product(name: "AWSBedrockRuntime", package: "aws-sdk-swift"),
],
path: "Sources/Converse"
),
.executableTarget(
name: "ConverseStream",
dependencies: [
.product(name: "AWSBedrockRuntime", package: "aws-sdk-swift"),
],
path: "Sources/ConverseStream"
)
]
)
Original file line number Diff line number Diff line change
@@ -0,0 +1,65 @@
// Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
// SPDX-License-Identifier: Apache-2.0
//
// snippet-start:[swift.example_code.bedrock-runtime.Converse_MetaLlama]
// An example demonstrating how to use the Conversation API to send
// a text message to Meta Llama.

import AWSBedrockRuntime

func converse(_ textPrompt: String) async throws -> String {

// Create a Bedrock Runtime client in the AWS Region you want to use.
let config =
try await BedrockRuntimeClient.BedrockRuntimeClientConfiguration(
region: "us-east-1"
)
let client = BedrockRuntimeClient(config: config)

// Set the model ID.
let modelId = "meta.llama3-8b-instruct-v1:0"

// Start a conversation with the user message.
let message = BedrockRuntimeClientTypes.Message(
content: [.text(textPrompt)],
role: .user
)

// Optionally use inference parameters
let inferenceConfig =
BedrockRuntimeClientTypes.InferenceConfiguration(
maxTokens: 512,
stopSequences: ["END"],
temperature: 0.5,
topp: 0.9
)

// Create the ConverseInput to send to the model
let input = ConverseInput(
inferenceConfig: inferenceConfig, messages: [message], modelId: modelId)

// Send the ConverseInput to the model
let response = try await client.converse(input: input)

// Extract and return the response text.
if case let .message(msg) = response.output {
if case let .text(textResponse) = msg.content![0] {
return textResponse
} else {
return "No text response found in message content"
}
} else {
return "No message found in converse output"
}
}

// snippet-end:[swift.example_code.bedrock-runtime.Converse_MetaLlama]

do {
let reply = try await converse(
"Describe the purpose of a 'hello world' program in one line."
)
print(reply)
} catch {
print("An error occurred: \(error)")
}
Original file line number Diff line number Diff line change
@@ -0,0 +1,75 @@
// Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
// SPDX-License-Identifier: Apache-2.0
//
// snippet-start:[swift.example_code.bedrock-runtime.ConverseStream_MetaLlama]
// An example demonstrating how to use the Conversation API to send a text message
// to Meta Llama and print the response stream.

import AWSBedrockRuntime

func printConverseStream(_ textPrompt: String) async throws {

// Create a Bedrock Runtime client in the AWS Region you want to use.
let config =
try await BedrockRuntimeClient.BedrockRuntimeClientConfiguration(
region: "us-east-1"
)
let client = BedrockRuntimeClient(config: config)

// Set the model ID.
let modelId = "meta.llama3-8b-instruct-v1:0"

// Start a conversation with the user message.
let message = BedrockRuntimeClientTypes.Message(
content: [.text(textPrompt)],
role: .user
)

// Optionally use inference parameters.
let inferenceConfig =
BedrockRuntimeClientTypes.InferenceConfiguration(
maxTokens: 512,
stopSequences: ["END"],
temperature: 0.5,
topp: 0.9
)

// Create the ConverseStreamInput to send to the model.
let input = ConverseStreamInput(
inferenceConfig: inferenceConfig, messages: [message], modelId: modelId)

// Send the ConverseStreamInput to the model.
let response = try await client.converseStream(input: input)

// Extract the streaming response.
guard let stream = response.stream else {
print("No stream available")
return
}

// Extract and print the streamed response text in real-time.
for try await event in stream {
switch event {
case .messagestart(_):
print("\nMeta Llama:")

case .contentblockdelta(let deltaEvent):
if case .text(let text) = deltaEvent.delta {
print(text, terminator: "")
}

default:
break
}
}
}

// snippet-end:[swift.example_code.bedrock-runtime.ConverseStream_MetaLlama]

do {
try await printConverseStream(
"Describe the purpose of a 'hello world' program in two paragraphs."
)
} catch {
print("An error occurred: \(error)")
}