- Python
- NodeJS
Synchronous Usage
from openai import AzureOpenAI
import dokumetry
client = AzureOpenAI(
api_key = "YOUR_AZURE_OPENAI_API_KEY",
api_version = "2024-02-01",
azure_endpoint = "YOUR_AZURE_OPENAI_API_ENDPOINT"
)
azure_model_deployment = "YOUR_AZURE_OPENAI_DEPLOYMENT"
# Pass the above `client` object along with your Doku Ingester URL and API key and this will make sure that all OpenAI calls are automatically tracked.
dokumetry.init(llm=client, doku_url="YOUR_DOKU_INGESTER_URL", api_key="YOUR_DOKU_TOKEN")
completion = client.chat.completions.create(
model=azure_model_deployment,
messages=[
{"role": "system", "content": "You are a expert at monitoring LLM Applications"},
{"role": "user", "content": "Hello! How do I monitor my OpenAI based LLM Applications?"}
]
)
print(completion.choices[0].message)
Asynchronous Usage
import asyncio
from openai import AsyncAzureOpenAI
import dokumetry
client = AsyncAzureOpenAI(
api_key = "YOUR_AZURE_OPENAI_API_KEY",
api_version = "2024-02-01",
azure_endpoint = "YOUR_AZURE_OPENAI_API_ENDPOINT"
)
azure_model_deployment = "YOUR_AZURE_OPENAI_DEPLOYMENT"
# Pass the above `client` object along with your Doku Ingester URL and API key and this will make sure that all OpenAI calls are automatically tracked.
dokumetry.init(llm=client, doku_url="YOUR_DOKU_INGESTER_URL", api_key="YOUR_DOKU_TOKEN")
async def main() -> None:
chat_completion = await client.chat.completions.create(
messages=[
{"role": "system", "content": "You are a expert at monitoring LLM Applications"},
{"role": "user", "content": "Hello! How do I monitor my OpenAI based LLM Applications?"}
],
model=azure_model_deployment,
)
asyncio.run(main())
import OpenAI from "openai";
import DokuMetry from 'dokumetry';
const client = new OpenAI({
apiKey: "YOUR_AZURE_OPENAI_API_KEY",
baseURL: `https://YOUR_AZURE_OPENAI_RESOURCE.openai.azure.com/openai/deployments/YOUR_AZURE_OPENAI_DEPLOYMENT`,
defaultQuery: { 'api-version': "2024-02-01" },
defaultHeaders: { 'api-key': "YOUR_AZURE_OPENAI_API_KEY" },
});
const azureModelDeployment = 'YOUR_AZURE_OPENAI_DEPLOYMENT';
// Pass the above `client` object along with your Doku Ingester URL and API key and this will make sure that all OpenAI calls are automatically tracked.
DokuMetry.init({llm: client, dokuUrl: "YOUR_DOKU_INGESTER_URL", apiKey: "YOUR_DOKU_TOKEN"})
async function main() {
const completion = await client.chat.completions.create({
messages: [
{"role": "system", "content": "You are a expert at monitoring LLM Applications"},
{"role": "user", "content": "Hello! How do I monitor my OpenAI based LLM Applications?"}
],
model: azureModelDeployment,
});
console.log(completion.choices[0]);
}
main();

