Skip to main content
Glama
azure-provider.ts1.21 kB
import { httpClient, HttpMethod } from '@activepieces/pieces-common' import { AIProviderModel, AIProviderModelType, AzureProviderConfig } from '@activepieces/shared' import { AIProviderStrategy } from './ai-provider' export const azureProvider: AIProviderStrategy<AzureProviderConfig> = { name: 'Azure OpenAI', async listModels(config: AzureProviderConfig): Promise<AIProviderModel[]> { const endpoint = `https://${config.resourceName}.openai.azure.com` const apiKey = config.apiKey const apiVersion = '2024-10-21' if (!endpoint || !apiKey) { return [] } const res = await httpClient.sendRequest<{ data: AzureModel[] }>({ url: `${endpoint}/openai/deployments?api-version=${apiVersion}`, method: HttpMethod.GET, headers: { 'api-key': config.apiKey, 'Content-Type': 'application/json', }, }) const { data } = res.body return data.map((deployment: AzureModel) => ({ id: deployment.name, name: deployment.name, type: AIProviderModelType.TEXT, })) }, } type AzureModel = { name: string }

Latest Blog Posts

MCP directory API

We provide all the information about MCP servers via our MCP API.

curl -X GET 'https://glama.ai/api/mcp/v1/servers/activepieces/activepieces'

If you have feedback or need assistance with the MCP directory API, please join our Discord server