There are multiple ways to integrate Sagemaker with Portkey.
You can use your AWS credentials, or use an assumed role.
In this example we will create a virtual key and use it to interact with Sagemaker.
This helps you restrict access (specific models, few endpoints, etc).
Create a virtual key in the Portkey dashboard in the virtual keys section.
You can select sagemaker as the provider, and fill in deployment details.
Initialize the Portkey SDK with the virtual key. (If you are using the REST API, skip to next step)
import Portkey from 'portkey-ai'const portkey = new Portkey({ apiKey: "PORTKEY_API_KEY", // Replace with your Portkey API key virtualKey: "VIRTUAL_KEY" // Replace with your Sagemaker Virtual Key})
import Portkey from 'portkey-ai'const portkey = new Portkey({ apiKey: "PORTKEY_API_KEY", // Replace with your Portkey API key virtualKey: "VIRTUAL_KEY" // Replace with your Sagemaker Virtual Key})
from portkey_ai import Portkeyportkey = Portkey( api_key="PORTKEY_API_KEY", # Replace with your Portkey API key virtual_key="VIRTUAL_KEY" # Replace with your Sagemaker Virtual Key)
response = portkey.post( url="endpoints/{endpoint_name}/invocations", # You can pass any key value pair required by the model, apart from `url`, they are passed as kwargs to the Sagemaker endpoint inputs="my_custom_value", my_custom_key="my_custom_value",)print(response)
response = portkey.post( url="endpoints/{endpoint_name}/invocations", # You can pass any key value pair required by the model, apart from `url`, they are passed as kwargs to the Sagemaker endpoint inputs="my_custom_value", my_custom_key="my_custom_value",)print(response)
const response = await portkey.post( url="endpoints/{endpoint_name}/invocations", // You can pass any key value pair required by the model, apart from `url`, they are passed as kwargs to the Sagemaker endpoint inputs="my_custom_value", my_custom_key="my_custom_value",)console.log(response);
curl --location 'https://api.portkey.ai/v1/endpoints/{endpoint_name}/invocations' \--header 'x-portkey-virtual-key: {VIRTUAL_KEY}' \--header 'x-portkey-api-key: {PORTKEY_API_KEY}' \--header 'Content-Type: application/json' \--data '{ # You can pass any key value pair required by the model, they are passed as kwargs to the Sagemaker endpoint "inputs": "my_custom_value", "my_custom_key": "my_custom_value"}'
from portkey_ai import Portkeyportkey = Portkey( api_key="PORTKEY_API_KEY", # Replace with your Portkey API key provider="sagemaker", aws_region="us-east-1", # Replace with your AWS region aws_access_key_id="AWS_ACCESS_KEY_ID", # Replace with your AWS access key id aws_secret_access_key="AWS_SECRET_ACCESS_KEY", # Replace with your AWS secret access key amzn_sagemaker_inference_component="SAGEMAKER_INFERENCE_COMPONENT" # Replace with your Sagemaker inference component)response = portkey.post( url="endpoints/{endpoint_name}/invocations", # You can pass any key value pair required by the model, apart from `url`, they are passed as kwargs to the Sagemaker endpoint inputs="my_custom_value", my_custom_key="my_custom_value")print(response)
from portkey_ai import Portkeyportkey = Portkey( api_key="PORTKEY_API_KEY", # Replace with your Portkey API key provider="sagemaker", aws_region="us-east-1", # Replace with your AWS region aws_access_key_id="AWS_ACCESS_KEY_ID", # Replace with your AWS access key id aws_secret_access_key="AWS_SECRET_ACCESS_KEY", # Replace with your AWS secret access key amzn_sagemaker_inference_component="SAGEMAKER_INFERENCE_COMPONENT" # Replace with your Sagemaker inference component)response = portkey.post( url="endpoints/{endpoint_name}/invocations", # You can pass any key value pair required by the model, apart from `url`, they are passed as kwargs to the Sagemaker endpoint inputs="my_custom_value", my_custom_key="my_custom_value")print(response)
import Portkey from 'portkey-ai'const portkey = new Portkey({ api_key:"PORTKEY_API_KEY", provider:"sagemaker", aws_access_key_id:"AWS_ACCESS_KEY_ID", aws_secret_access_key:"AWS_SECRET_ACCESS_KEY", aws_region:"us-east-1", amzn_sagemaker_inference_component:"SAGEMAKER_INFERENCE_COMPONENT"})const response = await portkey.post( url="endpoints/{endpoint_name}/invocations", // You can pass any key value pair required by the model, apart from `url`, they are passed as kwargs to the Sagemaker endpoint inputs="my_custom_value", my_custom_key="my_custom_value")console.log(response)
curl https://api.portkey.ai/v1/chat/completions \ -H "Content-Type: application/json" \ -H "x-portkey-api-key: $PORTKEY_API_KEY" \ -H "x-portkey-provider: sagemaker" \ -H "x-portkey-aws-access-key-id: $AWS_ACCESS_KEY_ID" \ -H "x-portkey-aws-secret-access-key: $AWS_SECRET_ACCESS_KEY" \ -H "x-portkey-aws-region: $AWS_REGION" \ -H "x-portkey-amzn-sagemaker-inference-component: $SAGEMAKER_INFERENCE_COMPONENT" \ -d '{ # You can pass any key value pair apart from `url` required by the model, they are passed as kwargs to the Sagemaker endpoint "inputs": "my_custom_value", "my_custom_key": "my_custom_value" }'