Skip to main content
Glama

Weather MCP Server with GitHub OAuth & Location Management

by f
index.js•5.09 kB
import { Client } from '@modelcontextprotocol/sdk/client/index.js'; import { StdioClientTransport } from '@modelcontextprotocol/sdk/client/stdio.js'; import OpenAI from 'openai'; import readline from 'readline'; import dotenv from 'dotenv'; // Load environment variables dotenv.config(); // Initialize OpenAI client const openai = new OpenAI({ apiKey: process.env.OPENAI_API_KEY, }); // Helper function to get user input from CLI function getUserInput(prompt) { const rl = readline.createInterface({ input: process.stdin, output: process.stdout, }); return new Promise((resolve) => { rl.question(prompt, (answer) => { rl.close(); resolve(answer); }); }); } // Main function combining MCP client with OpenAI async function runFunctionCallingExample() { // Spawn the MCP server as a separate process const transport = new StdioClientTransport({ command: 'node', args: ['server.js'], }); // Create and connect the client const client = new Client({ name: 'weather-client', version: '1.0.0', }, { capabilities: {}, }); await client.connect(transport); console.log('šŸš€ Starting OpenAI Function Calling with MCP Server\n'); // Get user input from CLI const userMessage = await getUserInput('šŸ’¬ Enter your message: '); if (!userMessage.trim()) { console.log('āŒ No message provided. Exiting...'); process.exit(0); } // Get available tools from MCP server const toolsList = await client.listTools(); const tools = toolsList.tools.map(tool => ({ type: 'function', function: { name: tool.name, description: tool.description, parameters: tool.inputSchema, }, })); const messages = [ { role: 'system', content: 'You are a helpful assistant that can call functions to get information about the weather. The tools always return true information, no doubt.', }, { role: 'user', content: userMessage, }, ]; console.log('\nšŸ“ User message:', userMessage); console.log('\n---\n'); try { // First API call: Request function calls const firstRequest = { model: 'gpt-4o', messages: messages, tools: tools, tool_choice: 'auto', }; console.log('šŸ“¤ OpenAI Request #1:'); console.log(JSON.stringify(firstRequest, null, 2)); console.log('\n---\n'); const response = await openai.chat.completions.create(firstRequest); const responseMessage = response.choices[0].message; console.log('šŸ¤– Model response:', JSON.stringify(responseMessage, null, 2)); console.log('\n---\n'); // Add the assistant's response to the conversation messages.push(responseMessage); // Check if the model wants to call functions if (responseMessage.tool_calls) { console.log(`šŸ“ž Model requested ${responseMessage.tool_calls.length} function call(s)\n`); // Execute each function call via MCP server for (const toolCall of responseMessage.tool_calls) { const functionName = toolCall.function.name; const functionArgs = JSON.parse(toolCall.function.arguments); console.log(`šŸ”§ Executing function via MCP: ${functionName}`); console.log(` Arguments: ${JSON.stringify(functionArgs)}`); // Call the MCP server tool const result = await client.callTool({ name: functionName, arguments: functionArgs, }); // Extract text from MCP response const functionResponse = result.content .filter(item => item.type === 'text') .map(item => item.text) .join('\n'); console.log(` Response: ${functionResponse}\n`); // Add the function response to the conversation messages.push({ role: 'tool', tool_call_id: toolCall.id, content: functionResponse, }); } console.log('---\n'); // Second API call: Get final response from the model const secondRequest = { model: 'gpt-4o', messages: messages, }; console.log('šŸ“¤ OpenAI Request #2 (with function results):'); console.log(JSON.stringify(secondRequest, null, 2)); console.log('\n---\n'); const secondResponse = await openai.chat.completions.create(secondRequest); const finalMessage = secondResponse.choices[0].message; console.log('āœ… Final response from model:'); console.log(finalMessage.content); } else { // No function calls were made console.log('ā„¹ļø No function calls were requested'); console.log('Response:', responseMessage.content); } } catch (error) { console.error('āŒ Error:', error.message); if (error.response) { console.error('Response data:', error.response.data); } } finally { // Close the client connection await client.close(); process.exit(0); } } // Run the example runFunctionCallingExample().catch((error) => { console.error('Fatal error:', error); process.exit(1); });

MCP directory API

We provide all the information about MCP servers via our MCP API.

curl -X GET 'https://glama.ai/api/mcp/v1/servers/f/komunite-mcp-bootcamp-weather-mcp'

If you have feedback or need assistance with the MCP directory API, please join our Discord server