Skip to main content
Glama
targets.bzl1.1 kB
# Copyright (c) Meta Platforms, Inc. and affiliates. # # This source code is licensed under both the MIT license found in the # LICENSE-MIT file in the root directory of this source tree and the Apache # License, Version 2.0 found in the LICENSE-APACHE file in the root directory # of this source tree. load("@prelude//decls:core_rules.bzl", "TargetCpuType") load("@prelude//os_lookup:defs.bzl", "Os", "OsLookup") _cpu = enum(*TargetCpuType) _OS_TRIPLES = { (Os("linux"), _cpu("arm64")): "aarch64-unknown-linux-gnu", (Os("linux"), _cpu("x86_64")): "x86_64-unknown-linux-gnu", (Os("macos"), _cpu("arm64")): "aarch64-apple-darwin", (Os("macos"), _cpu("x86_64")): "x86_64-apple-darwin", (Os("windows"), _cpu("arm64")): "aarch64-pc-windows-msvc", (Os("windows"), _cpu("x86_64")): "x86_64-pc-windows-msvc", } def _exec_triple(ctx: AnalysisContext) -> [str, None]: exec_os = ctx.attrs._exec_os_type[OsLookup] if exec_os.cpu: return _OS_TRIPLES.get((exec_os.os, _cpu(exec_os.cpu))) else: return None targets = struct( exec_triple = _exec_triple, )

Latest Blog Posts

MCP directory API

We provide all the information about MCP servers via our MCP API.

curl -X GET 'https://glama.ai/api/mcp/v1/servers/systeminit/si'

If you have feedback or need assistance with the MCP directory API, please join our Discord server