Skip to main content
Glama

Axion Planetary MCP

by Dhenenjay
bedrock-client.tsβ€’2.56 kB
import { BedrockRuntimeClient, InvokeModelCommand, InvokeModelWithResponseStreamCommand } from '@aws-sdk/client-bedrock-runtime'; import modelConfig from '../model-config.json'; export class BedrockClient { private client: BedrockRuntimeClient; private modelId: string; constructor(region: string = 'us-east-1') { this.client = new BedrockRuntimeClient({ region }); this.modelId = modelConfig.models.primary.id; } async generateText(prompt: string, options?: { temperature?: number; maxTokens?: number; streaming?: boolean; }): Promise<string | AsyncIterable<string>> { const payload = { anthropic_version: "bedrock-2023-05-31", max_tokens: options?.maxTokens || modelConfig.models.primary.maxTokens, temperature: options?.temperature || modelConfig.models.primary.temperature, messages: [ { role: "user", content: prompt } ] }; if (options?.streaming) { return this.streamResponse(payload); } const command = new InvokeModelCommand({ modelId: this.modelId, body: JSON.stringify(payload), contentType: 'application/json', accept: 'application/json' }); const response = await this.client.send(command); const responseBody = JSON.parse(new TextDecoder().decode(response.body)); return responseBody.content[0].text; } private async *streamResponse(payload: any): AsyncIterable<string> { const command = new InvokeModelWithResponseStreamCommand({ modelId: this.modelId, body: JSON.stringify(payload), contentType: 'application/json', accept: 'application/json' }); const response = await this.client.send(command); if (response.body) { for await (const event of response.body) { if (event.chunk?.bytes) { const chunk = JSON.parse(new TextDecoder().decode(event.chunk.bytes)); if (chunk.type === 'content_block_delta') { yield chunk.delta.text; } } } } } async generateEmbedding(text: string): Promise<number[]> { const embeddingModelId = modelConfig.models.embedding.id; const command = new InvokeModelCommand({ modelId: embeddingModelId, body: JSON.stringify({ inputText: text }), contentType: 'application/json', accept: 'application/json' }); const response = await this.client.send(command); const responseBody = JSON.parse(new TextDecoder().decode(response.body)); return responseBody.embedding; } }

MCP directory API

We provide all the information about MCP servers via our MCP API.

curl -X GET 'https://glama.ai/api/mcp/v1/servers/Dhenenjay/axion-planetary-mcp'

If you have feedback or need assistance with the MCP directory API, please join our Discord server