tracing_openai_node_tutorial.ipynb•3.56 kB
{
"cells": [
{
"cell_type": "markdown",
"metadata": {},
"source": [
"<center>\n",
" <p style=\"text-align:center\">\n",
" <img alt=\"phoenix logo\" src=\"https://raw.githubusercontent.com/Arize-ai/phoenix-assets/9e6101d95936f4bd4d390efc9ce646dc6937fb2d/images/socal/github-large-banner-phoenix.jpg\" width=\"1000\"/>\n",
" <br>\n",
" <br>\n",
" <a href=\"https://arize.com/docs/phoenix/\">Docs</a>\n",
" |\n",
" <a href=\"https://github.com/Arize-ai/phoenix\">GitHub</a>\n",
" |\n",
" <a href=\"https://arize-ai.slack.com/join/shared_invite/zt-2w57bhem8-hq24MB6u7yE_ZF_ilOYSBw#/shared-invite/email\">Community</a>\n",
" </p>\n",
"</center>\n",
"<h1 align=\"center\">OpenAI Node SDK Tracing</h1>\n",
"\n",
"Let's see how to get started with using the OpenAI Node SDK to trace your LLM calls using Deno. \n",
"\n",
"> Note: that this example requires the OPENAI_API_KEY environment variable to be set and assumes you are running the Phoenix server on localhost:6006."
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"import {\n",
" NodeTracerProvider,\n",
" SimpleSpanProcessor,\n",
"} from \"npm:@opentelemetry/sdk-trace-node\";\n",
"import { Resource } from \"npm:@opentelemetry/resources\";\n",
"import { OTLPTraceExporter } from \"npm:@opentelemetry/exporter-trace-otlp-proto\";\n",
"import { SEMRESATTRS_PROJECT_NAME } from \"npm:@arizeai/openinference-semantic-conventions\";\n",
"import { diag, DiagConsoleLogger, DiagLogLevel } from \"npm:@opentelemetry/api\";\n",
"\n",
"// For troubleshooting, set the log level to DiagLogLevel.DEBUG\n",
"diag.setLogger(new DiagConsoleLogger(), DiagLogLevel.INFO);\n",
"\n",
"const provider = new NodeTracerProvider({\n",
" resource: new Resource({\n",
" [SEMRESATTRS_PROJECT_NAME]: \"deno-openai\",\n",
" }),\n",
"});\n",
"\n",
"provider.addSpanProcessor(\n",
" new SimpleSpanProcessor(\n",
" new OTLPTraceExporter({\n",
" url: \"http://localhost:6006/v1/traces\",\n",
" }),\n",
" ),\n",
");\n",
"\n",
"provider.register();\n",
"\n",
"console.log(\"👀 OpenInference initialized\");"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"import OpenAI from 'npm:openai';\n",
"import { OpenAIInstrumentation } from \"npm:@arizeai/openinference-instrumentation-openai\";\n",
"\n",
"const oaiInstrumentor = new OpenAIInstrumentation();\n",
"oaiInstrumentor.manuallyInstrument(OpenAI);"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"const client = new OpenAI({\n",
" apiKey: process.env['OPENAI_API_KEY'], // This is the default and can be omitted\n",
"});\n",
"\n",
"async function main() {\n",
" try {\n",
" const chatCompletion = await client.chat.completions.create({\n",
" messages: [{ role: 'user', content: 'Say this is a test' }],\n",
" model: 'gpt-3.5-turbo',\n",
" });\n",
" console.dir(chatCompletion.choices[0].message);\n",
" } catch (e) {\n",
" console.error(e);\n",
" }\n",
"}\n",
"\n",
"await main();"
]
}
],
"metadata": {
"language_info": {
"name": "typescript"
}
},
"nbformat": 4,
"nbformat_minor": 2
}