client_bedrock.py•1.95 kB
# Use the Conversation API to send a text message to Amazon Nova.
import boto3
from dotenv import load_dotenv
from botocore.exceptions import ClientError
load_dotenv()
# Create a Bedrock Runtime client in the AWS Region you want to use.
client = boto3.client("bedrock-runtime", region_name="us-east-1")
# Set the model ID, e.g., Amazon Nova Lite.
model_id = "amazon.nova-pro-v1:0"
# Start a conversation with the user message.
user_message = "add 2 to 3 and show the result in a sentence'"
conversation = [
{
"role": "user",
"content": [{"text": user_message}],
}
]
print("=========================")
print("==> type: converse_stream\n")
try:
# Send the message to the model, using a basic inference configuration.
streaming_response = client.converse_stream(
modelId=model_id,
messages=conversation,
inferenceConfig={"maxTokens": 512, "temperature": 0.5, "topP": 0.9},
)
# Extract and print the streamed response text in real-time.
for chunk in streaming_response["stream"]:
if "contentBlockDelta" in chunk:
text = chunk["contentBlockDelta"]["delta"]["text"]
print(text, end=" ")
except (ClientError, Exception) as e:
print(f"ERROR: Can't invoke '{model_id}'. Reason: {e}")
exit(1)
print("\n")
print("=========================")
print("==> type: converse\n")
try:
# Send the message to the model, using a basic inference configuration.
response = client.converse(
modelId=model_id,
messages=conversation,
inferenceConfig={"maxTokens": 512, "temperature": 0.5, "topP": 0.9},
)
# Extract and print the response text.
response_text = response["output"]["message"]["content"][0]["text"]
print(response_text)
except (ClientError, Exception) as e:
print(f"ERROR: Can't invoke '{model_id}'. Reason: {e}")
exit(1)