Use Proxy with OpenAI SDK
https://gateway.adaline.ai/v1/openai/
from openai import OpenAI client = OpenAI( api_key="your-openai-api-key", base_url="https://gateway.adaline.ai/v1/openai/" ) headers = { "adaline-api-key": "your-adaline-api-key", "adaline-project-id": "your-project-id", "adaline-prompt-id": "your-prompt-id" } response = client.chat.completions.create( model="gpt-4", messages=[ {"role": "system", "content": "You are a helpful assistant."}, {"role": "user", "content": "What is machine learning?"} ], extra_headers=headers ) print(response.choices[0].message.content)
from openai import OpenAI client = OpenAI( api_key="your-openai-api-key", base_url="https://gateway.adaline.ai/v1/openai/" ) headers = { "adaline-api-key": "your-adaline-api-key", "adaline-project-id": "your-project-id", "adaline-prompt-id": "your-prompt-id" } stream = client.chat.completions.create( model="gpt-4", messages=[ {"role": "system", "content": "You are a helpful assistant."}, {"role": "user", "content": "Explain quantum computing in simple terms."} ], stream=True, stream_options={ "include_usage": True }, extra_headers=headers ) for chunk in stream: if chunk.choices[0].delta.content is not None: print(chunk.choices[0].delta.content, end="")
from openai import OpenAI client = OpenAI( api_key="your-openai-api-key", base_url="https://gateway.adaline.ai/v1/openai/" ) headers = { "adaline-api-key": "your-adaline-api-key", "adaline-project-id": "your-project-id", "adaline-prompt-id": "your-prompt-id" } response = client.embeddings.create( model="text-embedding-3-small", input="The quick brown fox jumps over the lazy dog", extra_headers=headers ) embedding = response.data[0].embedding print(f"Embedding dimension: {len(embedding)}")
Was this page helpful?