Groq
Add the x-props-provider: groq
header.
Call Groq via the OpenAI SDK
Props AI supports Groq queries via the OpenAI SDK.
We'll take care of creating a mapping between the OpenAI SDK and the Groq API, from formatting the request to handling the response.
import os
from openai import OpenAI
client = OpenAI(
api_key=process.env.GROQ_API_KEY,
base_url="https://proxy.getprops.ai",
default_headers={
"x-props-key": process.env.PROPS_API_KEY,
"x-props-provider": "groq"
}
)
chat_completion = client.chat.completions.create(
messages=[
{
"role": "user",
"content": "Say this is a test",
}
],
model="llama3-8b-8192",
user="<USER_ID or STRIPE_SUBSCRIPTION_ID or INTERNAL_PROCESS_ID>"
)
Groq SDK
from groq import Groq
import json
client = Groq(
base_url="https://proxy.getprops.ai",
api_key="<GROQ_API_KEY>",
default_headers={
"x-props-key": "<PROPS_API_KEY>",
"x-props-provider": "groq",
}
)
chat_completion = client.chat.completions.create(
messas=[
{
"role": "user",
"content": "Explain the importance of fast language models",
}
],
model="llama3-8b-8192",
user="<USER_ID or STRIPE_SUBSCRIPTION_ID or INTERNAL_PROCESS_ID>"
)
REST
curl -X POST https://proxy.getprops.ai/openai/v1/chat/completions \
-H "Authorization: Bearer $GROQ_API_KEY" \
-H "Content-Type: application/json" \
-H "x-props-key: $PROPS_API_KEY" \
-H "x-props-provider: groq" \
-d '{
"messages": [
{
"role": "user",
"content": "Explain the importance of fast language models."
}
],
"model": "llama3-8b-8192",
"user": "<USER_ID or STRIPE_SUBSCRIPTION_ID or INTERNAL_PROCESS_ID>"
}'