Skip to main content
Glama

cognee-mcp

temporal_graph_tests.yml7.82 kB
name: Temporal Graph Tests permissions: contents: read on: workflow_call: inputs: databases: required: false type: string default: "all" description: "Which vector databases to test (comma-separated list or 'all')" jobs: run_temporal_graph_kuzu_lance_sqlite: name: Temporal Graph test Kuzu (lancedb + sqlite) runs-on: ubuntu-22.04 if: ${{ inputs.databases == 'all' || contains(inputs.databases, 'kuzu/lance/sqlite') }} steps: - name: Check out uses: actions/checkout@v4 with: fetch-depth: 0 - name: Cognee Setup uses: ./.github/actions/cognee_setup with: python-version: ${{ inputs.python-version }} - name: Dependencies already installed run: echo "Dependencies already installed in setup" - name: Run Temporal Graph with Kuzu (lancedb + sqlite) env: ENV: 'dev' LLM_MODEL: ${{ secrets.LLM_MODEL }} LLM_ENDPOINT: ${{ secrets.LLM_ENDPOINT }} LLM_API_KEY: ${{ secrets.LLM_API_KEY }} LLM_API_VERSION: ${{ secrets.LLM_API_VERSION }} EMBEDDING_MODEL: ${{ secrets.EMBEDDING_MODEL }} EMBEDDING_ENDPOINT: ${{ secrets.EMBEDDING_ENDPOINT }} EMBEDDING_API_KEY: ${{ secrets.EMBEDDING_API_KEY }} EMBEDDING_API_VERSION: ${{ secrets.EMBEDDING_API_VERSION }} GRAPH_DATABASE_PROVIDER: 'kuzu' VECTOR_DB_PROVIDER: 'lancedb' DB_PROVIDER: 'sqlite' run: uv run python ./cognee/tests/test_temporal_graph.py run_temporal_graph_neo4j_lance_sqlite: name: Temporal Graph test Neo4j (lancedb + sqlite) runs-on: ubuntu-22.04 if: ${{ inputs.databases == 'all' || contains(inputs.databases, 'neo4j/lance/sqlite') }} services: neo4j: image: neo4j:5.11 env: NEO4J_AUTH: neo4j/pleaseletmein NEO4J_PLUGINS: '["apoc","graph-data-science"]' ports: - 7474:7474 - 7687:7687 options: >- --health-cmd="cypher-shell -u neo4j -p pleaseletmein 'RETURN 1'" --health-interval=10s --health-timeout=5s --health-retries=5 steps: - name: Check out uses: actions/checkout@v4 with: fetch-depth: 0 - name: Cognee Setup uses: ./.github/actions/cognee_setup with: python-version: ${{ inputs.python-version }} - name: Dependencies already installed run: echo "Dependencies already installed in setup" - name: Run Temporal Graph with Neo4j (lancedb + sqlite) env: ENV: 'dev' LLM_MODEL: ${{ secrets.LLM_MODEL }} LLM_ENDPOINT: ${{ secrets.LLM_ENDPOINT }} LLM_API_KEY: ${{ secrets.LLM_API_KEY }} LLM_API_VERSION: ${{ secrets.LLM_API_VERSION }} EMBEDDING_MODEL: ${{ secrets.EMBEDDING_MODEL }} EMBEDDING_ENDPOINT: ${{ secrets.EMBEDDING_ENDPOINT }} EMBEDDING_API_KEY: ${{ secrets.EMBEDDING_API_KEY }} EMBEDDING_API_VERSION: ${{ secrets.EMBEDDING_API_VERSION }} GRAPH_DATABASE_PROVIDER: 'neo4j' VECTOR_DB_PROVIDER: 'lancedb' DB_PROVIDER: 'sqlite' GRAPH_DATABASE_URL: bolt://localhost:7687 GRAPH_DATABASE_USERNAME: neo4j GRAPH_DATABASE_PASSWORD: pleaseletmein run: uv run python ./cognee/tests/test_temporal_graph.py run_temporal_graph_kuzu_postgres_pgvector: name: Temporal Graph test Kuzu (postgres + pgvector) runs-on: ubuntu-22.04 if: ${{ inputs.databases == 'all' || contains(inputs.databases, 'kuzu/pgvector/postgres') }} services: postgres: image: pgvector/pgvector:pg17 env: POSTGRES_USER: cognee POSTGRES_PASSWORD: cognee POSTGRES_DB: cognee_db options: >- --health-cmd pg_isready --health-interval 10s --health-timeout 5s --health-retries 5 ports: - 5432:5432 steps: - name: Check out uses: actions/checkout@v4 with: fetch-depth: 0 - name: Cognee Setup uses: ./.github/actions/cognee_setup with: python-version: ${{ inputs.python-version }} extra-dependencies: "postgres" - name: Dependencies already installed run: echo "Dependencies already installed in setup" - name: Run Temporal Graph with Kuzu (postgres + pgvector) env: ENV: dev LLM_MODEL: ${{ secrets.LLM_MODEL }} LLM_ENDPOINT: ${{ secrets.LLM_ENDPOINT }} LLM_API_KEY: ${{ secrets.LLM_API_KEY }} LLM_API_VERSION: ${{ secrets.LLM_API_VERSION }} EMBEDDING_MODEL: ${{ secrets.EMBEDDING_MODEL }} EMBEDDING_ENDPOINT: ${{ secrets.EMBEDDING_ENDPOINT }} EMBEDDING_API_KEY: ${{ secrets.EMBEDDING_API_KEY }} EMBEDDING_API_VERSION: ${{ secrets.EMBEDDING_API_VERSION }} GRAPH_DATABASE_PROVIDER: 'kuzu' VECTOR_DB_PROVIDER: 'pgvector' DB_PROVIDER: 'postgres' DB_NAME: 'cognee_db' DB_HOST: '127.0.0.1' DB_PORT: 5432 DB_USERNAME: cognee DB_PASSWORD: cognee run: uv run python ./cognee/tests/test_temporal_graph.py run_temporal_graph_neo4j_postgres_pgvector: name: Temporal Graph test Neo4j (postgres + pgvector) runs-on: ubuntu-22.04 if: ${{ inputs.databases == 'all' || contains(inputs.databases, 'neo4j/pgvector/postgres') }} services: neo4j: image: neo4j:5.11 env: NEO4J_AUTH: neo4j/pleaseletmein NEO4J_PLUGINS: '["apoc","graph-data-science"]' ports: - 7474:7474 - 7687:7687 options: >- --health-cmd="cypher-shell -u neo4j -p pleaseletmein 'RETURN 1'" --health-interval=10s --health-timeout=5s --health-retries=5 postgres: image: pgvector/pgvector:pg17 env: POSTGRES_USER: cognee POSTGRES_PASSWORD: cognee POSTGRES_DB: cognee_db ports: - 5432:5432 options: >- --health-cmd pg_isready --health-interval 10s --health-timeout 5s --health-retries=5 steps: - name: Check out uses: actions/checkout@v4 with: fetch-depth: 0 - name: Cognee Setup uses: ./.github/actions/cognee_setup with: python-version: ${{ inputs.python-version }} extra-dependencies: "postgres" - name: Dependencies already installed run: echo "Dependencies already installed in setup" - name: Run Temporal Graph with Neo4j (postgres + pgvector) env: ENV: dev LLM_MODEL: ${{ secrets.LLM_MODEL }} LLM_ENDPOINT: ${{ secrets.LLM_ENDPOINT }} LLM_API_KEY: ${{ secrets.LLM_API_KEY }} LLM_API_VERSION: ${{ secrets.LLM_API_VERSION }} EMBEDDING_MODEL: ${{ secrets.EMBEDDING_MODEL }} EMBEDDING_ENDPOINT: ${{ secrets.EMBEDDING_ENDPOINT }} EMBEDDING_API_KEY: ${{ secrets.EMBEDDING_API_KEY }} EMBEDDING_API_VERSION: ${{ secrets.EMBEDDING_API_VERSION }} GRAPH_DATABASE_PROVIDER: 'neo4j' VECTOR_DB_PROVIDER: 'pgvector' DB_PROVIDER: 'postgres' GRAPH_DATABASE_URL: bolt://localhost:7687 GRAPH_DATABASE_USERNAME: neo4j GRAPH_DATABASE_PASSWORD: pleaseletmein DB_NAME: cognee_db DB_HOST: 127.0.0.1 DB_PORT: 5432 DB_USERNAME: cognee DB_PASSWORD: cognee run: uv run python ./cognee/tests/test_temporal_graph.py

MCP directory API

We provide all the information about MCP servers via our MCP API.

curl -X GET 'https://glama.ai/api/mcp/v1/servers/topoteretes/cognee'

If you have feedback or need assistance with the MCP directory API, please join our Discord server