Integrate Proxy with the Azure OpenAI SDK to automatically capture telemetry from your Azure-hosted OpenAI models.
Demonstrated with Azure OpenAI’s Python SDK but should work in most languages.
from openai import AzureOpenAIclient = AzureOpenAI( api_key="your-azure-openai-api-key", api_version="2024-02-01", azure_endpoint="https://gateway.adaline.ai/v1/azure/", azure_deployment="your-deployment-name")headers = { "adaline-api-key": "your-adaline-api-key", "adaline-project-id": "your-project-id", "adaline-prompt-id": "your-prompt-id", "adaline-azure-resource-name": "your-resource-name" # Deployed resource name in of the OpenAI LLM in Azure}stream = client.chat.completions.create( model="gpt-4", messages=[ {"role": "system", "content": "You are a helpful assistant."}, {"role": "user", "content": "Explain Azure services in detail."} ], stream=True, extra_headers=headers)for chunk in stream: if chunk.choices[0].delta.content is not None: print(chunk.choices[0].delta.content, end="")
from openai import AzureOpenAIclient = AzureOpenAI( api_key="your-azure-openai-api-key", api_version="2024-02-01", azure_endpoint="https://gateway.adaline.ai/v1/azure/", azure_deployment="your-embedding-deployment-name")headers = { "adaline-api-key": "your-adaline-api-key", "adaline-project-id": "your-project-id", "adaline-prompt-id": "your-prompt-id", "adaline-azure-resource-name": "your-resource-name" # Deployed resource name in of the OpenAI embedding model in Azure}response = client.embeddings.create( model="text-embedding-ada-002", input="The quick brown fox jumps over the lazy dog", extra_headers=headers)embedding = response.data[0].embeddingprint(f"Embedding dimension: {len(embedding)}")