We provide all the information about MCP servers via our MCP API.
curl -X GET 'https://glama.ai/api/mcp/v1/servers/1withall/domin8'
If you have feedback or need assistance with the MCP directory API, please join our Discord server
test_import_graph_parallel.py•1.23 kB
from pathlib import Path
from domin8.tools.import_graph import build_import_graph
def test_parallel_indexing(tmp_path):
# Create small repo with multiple python files to exercise parallelism
repo_root = tmp_path
(repo_root / "pkg").mkdir()
for i in range(4):
(repo_root / "pkg" / f"mod{i}.py").write_text(f"""
def public_{i}():
return {i}
""")
(repo_root / "tests").mkdir()
(repo_root / "tests" / "test_all.py").write_text("""
from pkg.mod0 import public_0
from pkg.mod1 import public_1
def test_combined():
assert public_0() == 0
assert public_1() == 1
""")
# Have an optimization config to set workers; write JSON config
cfg_dir = repo_root / "config"
cfg_dir.mkdir()
(cfg_dir / "optimization.json").write_text('{"enable_parallel_indexing": true, "max_workers": 2}')
g = build_import_graph(repo_root)
# Ensure some exports discovered
assert 'public_0' in g.symbol_to_files
assert 'public_1' in g.symbol_to_files
# Ensure symbol_to_tests computed
assert hasattr(g, 'symbol_to_tests')
# At least one of the exported symbols maps to the test file
assert any('test_all.py' in str(p) for pset in g.symbol_to_tests.values() for p in pset)