Skip to main content

Integrating with Amazon Bedrock

Get an API Key

If you do not have an API key yet, you can get one by registering Feedback Intelligence Platform and creating a project.

Install the dependencies

pip install feedbackIntelligence
import boto3, json
from copy import deepcopy

from feedbackIntelligence.fi import BedrockWrapper

bedrock_runtime = boto3.client(service_name="bedrock-runtime", region_name="us-east-1")
wrapper = BedrockWrapper(bedrock_client=bedrock_runtime,
fi_api_key='you_api_key')

Construct the query

# suppose you have the query, prompt and context.

system_prompt = """
I'm going to give you a document.
Then I'm going to ask you a question about it.
I'd like you to first write down exact quotes of parts of the document that would help answer the question,
and then I'd like you to answer the question using facts from the quoted content. Here is the document:
"""

context = """sample context ...."""

query = "What was described in the document?"

body = json.dumps(
{'messages': [{'role': 'system', 'content': system_prompt},
{'role': 'user', 'content': f"CONTEXT: {context} \n QUERY: {query}", }],
'max_tokens': 200, 'temperature': 0.7,
'top_p': 1,
'presence_penalty': 0, 'frequency_penalty': 0})

kwargs = {
"modelId": "ai21.jamba-instruct-v1:0",
"contentType": "application/json",
"accept": "*/*",
"body": body
}

Get the response

# This is the fucntion that shows how to extarct the LLM response.
def response_extractor(resp):
body_content = resp['body'].read()

# Create a copy of the body content in memory
body_copy = deepcopy(
body_content) # A hard copy of the bytes so that when the same obj is returned it can be reusable

# Wrap the original content and the copy in separate BytesIO objects
resp['body'] = io.BytesIO(body_content)
body_copy_io = io.BytesIO(body_copy)

# Read and parse the body content as JSON
resp_body = json.loads(resp['body'].read())

# Reset the file pointers for both the original and the copy
resp['body'].seek(0)
body_copy_io.seek(0)
return resp_body.get("choices")[0]["message"]["content"]

project_id = your_project_id
chat_id = your_chat_id

response = wrapper.invoke_model(context=context, query=query, prompt=system_prompt, chat_id=chat_id, project_id=project_id
get_resp=response_extractor, **kwargs)