Skip to main content
Glama
by mckinsey
example.ipynb4.07 kB
{ "cells": [ { "cell_type": "code", "execution_count": null, "metadata": {}, "outputs": [], "source": [ "from vizro_ai import VizroAI\n", "\n", "llm = None\n", "# uncomment below to try out different models\n", "# llm = \"gpt-4o\"\n", "# llm = \"claude-3-5-sonnet-latest\"\n", "# llm = \"mistral-large-latest\"\n", "\n", "# llm = \"grok-beta\" #xAI API is compatible with OpenAI. To use grok-beta,\n", "# point `OPENAI_BASE_URL` to the xAI baseurl, use xAI API key for `OPENAI_API_KEY`\n", "# when setting up the environment variables\n", "# e.g.\n", "# OPENAI_BASE_URL=\"https://api.x.ai/v1\"\n", "# OPENAI_API_KEY=<xAI API key>\n", "# reference: https://docs.x.ai/api/integrations#openai-sdk\n", "\n", "# from langchain_openai import ChatOpenAI\n", "# llm = ChatOpenAI(\n", "# model=\"gpt-4o\")\n", "\n", "\n", "# import os\n", "# from langchain_anthropic import ChatAnthropic\n", "# llm = ChatAnthropic(\n", "# model=\"claude-3-5-sonnet-latest\",\n", "# # api_key = os.environ.get(\"ANTHROPIC_API_KEY\"),\n", "# # base_url= os.environ.get(\"ANTHROPIC_API_BASE\")\n", "# )\n", "\n", "# import os\n", "# from langchain_openai import AzureChatOpenAI\n", "# llm = AzureChatOpenAI(\n", "# azure_deployment=\"gpt-4-1106-preview\",\n", "# api_version=\"2024-04-01-preview\",\n", "# temperature=0.4,\n", "# azure_endpoint=os.environ[\"AZURE_OPENAI_ENDPOINT\"],\n", "# api_key=os.environ[\"AZURE_OPENAI_API_KEY\"]\n", "# )\n", "\n", "# import os\n", "# from langchain_mistralai import ChatMistralAI\n", "# llm = ChatMistralAI(\n", "# # name=\"mistral-large-latest\",\n", "# name = \"open-mistral-nemo\",\n", "# temperature=0,\n", "# max_retries=2,\n", "# endpoint= os.environ.get(\"MISTRAL_BASE_URL\"),\n", "# mistral_api_key = os.environ.get(\"MISTRAL_API_KEY\")\n", "# )\n", "\n", "# import os\n", "# from langchain_google_genai import ChatGoogleGenerativeAI\n", "# llm = ChatGoogleGenerativeAI(\n", "# model=\"gemini-1.5-flash-latest\",\n", "# # model=\"gemini-1.5-pro-latest\",\n", "# google_api_key=os.environ.get(\"GOOGLE_API_KEY\"),\n", "# temperature=0,\n", "# )" ] }, { "cell_type": "code", "execution_count": null, "metadata": {}, "outputs": [], "source": [ "vizro_ai = VizroAI(model=llm)" ] }, { "cell_type": "code", "execution_count": null, "metadata": {}, "outputs": [], "source": [ "import vizro.plotly.express as px\n", "\n", "df = px.data.gapminder()\n", "df" ] }, { "cell_type": "code", "execution_count": null, "metadata": { "collapsed": false, "jupyter": { "outputs_hidden": false } }, "outputs": [], "source": [ "vizro_ai.plot(df, \"the trend of gdp over years in the US\")" ] }, { "cell_type": "code", "execution_count": null, "metadata": {}, "outputs": [], "source": [ "vizro_ai.plot(df, \"describe the composition of gdp in continent, and add horizontal line for avg gdp\")" ] }, { "cell_type": "code", "execution_count": null, "metadata": {}, "outputs": [], "source": [ "vizro_ai.plot(df, \"show me the geo distribution of life expectancy and set year as animation \")" ] }, { "cell_type": "code", "execution_count": null, "metadata": {}, "outputs": [], "source": [] } ], "metadata": { "kernelspec": { "display_name": "Python 3 (ipykernel)", "language": "python", "name": "python3" }, "language_info": { "codemirror_mode": { "name": "ipython", "version": 3 }, "file_extension": ".py", "mimetype": "text/x-python", "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", "version": "3.12.7" } }, "nbformat": 4, "nbformat_minor": 4 }

Latest Blog Posts

MCP directory API

We provide all the information about MCP servers via our MCP API.

curl -X GET 'https://glama.ai/api/mcp/v1/servers/mckinsey/vizro'

If you have feedback or need assistance with the MCP directory API, please join our Discord server