Skip to main content
Glama
nrwl

Nx MCP Server

Official
by nrwl
inference-plugins-completion.ts1.46 kB
import { getNxVersion } from '@nx-console/shared-nx-workspace-info'; import { workspaceDependencies } from '@nx-console/shared-npm'; import { existsSync } from 'fs'; import { join } from 'path'; import { CompletionItem } from 'vscode-json-languageservice'; let inferencePluginsCompletionCache: CompletionItem[] | undefined = undefined; export async function inferencePluginsCompletion( workingPath: string | undefined, ): Promise<CompletionItem[]> { if (!workingPath) { return []; } if ( inferencePluginsCompletionCache && inferencePluginsCompletionCache.length > 0 ) { return inferencePluginsCompletionCache; } const inferencePluginsCompletion: CompletionItem[] = []; const nxVersion = await getNxVersion(workingPath); const dependencies = await workspaceDependencies(workingPath, nxVersion); for (const dependency of dependencies) { const hasPluginJs = existsSync(join(dependency, 'plugin.js')); if (hasPluginJs) { const dependencyPath = dependency .replace(/\\/g, '/') .split('node_modules/') .pop(); if (dependencyPath?.includes('nx')) { inferencePluginsCompletion.push({ label: `${dependencyPath}/plugin`, }); } } } inferencePluginsCompletionCache = inferencePluginsCompletion; return inferencePluginsCompletion; } export async function resetInferencePluginsCompletionCache() { inferencePluginsCompletionCache = undefined; }

Latest Blog Posts

MCP directory API

We provide all the information about MCP servers via our MCP API.

curl -X GET 'https://glama.ai/api/mcp/v1/servers/nrwl/nx-console'

If you have feedback or need assistance with the MCP directory API, please join our Discord server