Skip to main content

Synchronous Usage

from openai import AzureOpenAI
import dokumetry

client = AzureOpenAI(
  api_key = "YOUR_AZURE_OPENAI_API_KEY",  
  api_version = "2024-02-01",
  azure_endpoint = "YOUR_AZURE_OPENAI_API_ENDPOINT"
)

azure_model_deployment = "YOUR_AZURE_OPENAI_DEPLOYMENT"

# Pass the above `client` object along with your Doku Ingester URL and API key and this will make sure that all OpenAI calls are automatically tracked.
dokumetry.init(llm=client, doku_url="YOUR_DOKU_INGESTER_URL", api_key="YOUR_DOKU_TOKEN")

completion = client.chat.completions.create(
  model=azure_model_deployment,
  messages=[
    {"role": "system", "content": "You are a expert at monitoring LLM Applications"},
    {"role": "user", "content": "Hello! How do I monitor my OpenAI based LLM Applications?"}
  ]
)

print(completion.choices[0].message)

Asynchronous Usage

import asyncio
from openai import AsyncAzureOpenAI
import dokumetry

client = AsyncAzureOpenAI(
  api_key = "YOUR_AZURE_OPENAI_API_KEY",  
  api_version = "2024-02-01",
  azure_endpoint = "YOUR_AZURE_OPENAI_API_ENDPOINT"
)

azure_model_deployment = "YOUR_AZURE_OPENAI_DEPLOYMENT"

# Pass the above `client` object along with your Doku Ingester URL and API key and this will make sure that all OpenAI calls are automatically tracked.
dokumetry.init(llm=client, doku_url="YOUR_DOKU_INGESTER_URL", api_key="YOUR_DOKU_TOKEN")

async def main() -> None:
  chat_completion = await client.chat.completions.create(
    messages=[
      {"role": "system", "content": "You are a expert at monitoring LLM Applications"},
      {"role": "user", "content": "Hello! How do I monitor my OpenAI based LLM Applications?"}
    ],
    model=azure_model_deployment,
  )

asyncio.run(main())