Skip to main content
Glama
cloudera

Cloudera Iceberg MCP Server

Official
by cloudera
openai-agentsdk-example.ipynb5.33 kB
{ "cells": [ { "cell_type": "code", "execution_count": 1, "id": "fee3e09d-880e-49db-b665-839c77b49808", "metadata": {}, "outputs": [ { "name": "stdout", "output_type": "stream", "text": [ "Note: you may need to restart the kernel to use updated packages.\n" ] } ], "source": [ "# Dependencies for the MCP server\n", "%pip install impyla python-dotenv \"mcp[cli]\" --quiet" ] }, { "cell_type": "code", "execution_count": 2, "id": "ee20517e-230e-475f-bb92-affc7d405a62", "metadata": {}, "outputs": [ { "name": "stdout", "output_type": "stream", "text": [ "Note: you may need to restart the kernel to use updated packages.\n" ] } ], "source": [ "# Dependencies for the application\n", "%pip install openai-agents --quiet" ] }, { "cell_type": "code", "execution_count": 3, "id": "9c6ba393-c6b1-411a-84f1-bde6b0390514", "metadata": {}, "outputs": [], "source": [ "# Disable tracing\n", "from agents import set_tracing_disabled\n", "\n", "set_tracing_disabled(True)" ] }, { "cell_type": "code", "execution_count": 4, "id": "47aacc9b-1d5b-4fea-8c37-580afedbd3f7", "metadata": {}, "outputs": [], "source": [ "import asyncio\n", "\n", "from openai import AsyncOpenAI\n", "from agents import Agent, Runner, OpenAIChatCompletionsModel\n", "from agents.mcp import MCPServer, MCPServerStdio" ] }, { "cell_type": "code", "execution_count": 5, "id": "0d007a5f-9a61-4406-9027-dd0f62bf5d84", "metadata": {}, "outputs": [], "source": [ "# Configuring AI Inference service hosted model Endpoint\n", "import json\n", "\n", "API_KEY = json.load(open(\"/tmp/jwt\"))[\"access_token\"]\n", "MODEL_NAME = \"meta/llama-3.1-8b-instruct\" # Update this for your Cloudera AI Inference service Endpoint\n", "BASE_URL = \"https://ml-2dad9e26-62f.cloudera.site/namespaces/serving-default/endpoints/llama3-8b-throughput/v1/\" # Update this for your Cloudera AI Inference service Endpoint\n", "\n", "client = AsyncOpenAI(api_key=API_KEY, base_url=BASE_URL)\n", "model = OpenAIChatCompletionsModel(model=MODEL_NAME, openai_client=client)" ] }, { "cell_type": "code", "execution_count": 6, "id": "d53a0635-9b1f-42b8-b693-188298cec537", "metadata": {}, "outputs": [], "source": [ "async def run(mcp_server: MCPServer):\n", " agent = Agent(\n", " name=\"Assistant\",\n", " instructions=\"Answer questions, use tools that are available for you.\",\n", " model=model,\n", " mcp_servers=[mcp_server],\n", " )\n", "\n", " message = \"What information can I learn form the `airlines_snappy` table? I'm looking for a quick answer, don't ask for clarifying questions.\"\n", " print(f\"Running: {message}\")\n", " result = await Runner.run(starting_agent=agent, input=message)\n", " print(f\"\\nAnswer: {result.final_output}\")" ] }, { "cell_type": "code", "execution_count": 7, "id": "20cd1a40-6ace-404c-8356-475e0479a8e9", "metadata": {}, "outputs": [], "source": [ "# Configuring MCP Server and Impala endpoint\n", "async def main():\n", " async with MCPServerStdio(\n", " name=\"Cloudera Iceberg MCP Server\",\n", " params={\n", " \"command\": \"python\",\n", " \"args\": [\"/home/cdsw/iceberg-mcp-server/server.py\"],\n", " \"env\": {\n", " \"IMPALA_HOST\": \"coordinator-default-impala.example.cloudera.site\", # Update this for your Impala host\n", " \"IMPALA_USER\": \"username\",\n", " \"IMPALA_PASSWORD\": \"password\",\n", " },\n", " },\n", " ) as server:\n", " await run(server)" ] }, { "cell_type": "code", "execution_count": 8, "id": "6583fcc7-7403-481b-a2f4-e6872c75ea0d", "metadata": {}, "outputs": [ { "name": "stdout", "output_type": "stream", "text": [ "Running: What information can I learn form the `airlines_snappy` table? I'm looking for a quick answer, don't ask for clarifying questions.\n", "\n", "Answer: The `airlines_snappy` table contains information about flights, including their departure and arrival times, delays, and various other details. It includes 31 columns, including carrier and flight number, departure and arrival times, airline delays, and other factors that could impact the flight.\n" ] } ], "source": [ "await main()" ] }, { "cell_type": "code", "execution_count": null, "id": "b27ff1ad-3853-4801-a94e-48dbb004e610", "metadata": {}, "outputs": [], "source": [] } ], "metadata": { "kernelspec": { "display_name": "Python 3 (ipykernel)", "language": "python", "name": "python3" }, "language_info": { "codemirror_mode": { "name": "ipython", "version": 3 }, "file_extension": ".py", "mimetype": "text/x-python", "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", "version": "3.12.8" } }, "nbformat": 4, "nbformat_minor": 5 }

Latest Blog Posts

MCP directory API

We provide all the information about MCP servers via our MCP API.

curl -X GET 'https://glama.ai/api/mcp/v1/servers/cloudera/iceberg-mcp-server'

If you have feedback or need assistance with the MCP directory API, please join our Discord server