import requests
# Fully OpenAI compatible endpoint
response = requests.post(
"http://localhost:8080/openai/v1/chat/completions",
headers={
"Authorization": f"Bearer {openai_key}",
"Content-Type": "application/json"
},
json={
"model": "gpt-4o-mini",
"messages": [{"role": "user", "content": "Hello!"}]
}
)
# Fully Anthropic compatible endpoint
response = requests.post(
"http://localhost:8080/anthropic/v1/messages",
headers={
"Content-Type": "application/json",
},
json={
"model": "claude-3-sonnet-20240229",
"max_tokens": 1000,
"messages": [{"role": "user", "content": "Hello!"}]
}
)
# Fully Google GenAI compatible endpoint
response = requests.post(
"http://localhost:8080/genai/v1beta/models/gemini-1.5-flash/generateContent",
headers={
"Content-Type": "application/json",
},
json={
"contents": [
{"parts": [{"text": "Hello!"}]}
],
"generation_config": {
"max_output_tokens": 1000,
"temperature": 1
}
}
)