Skip to main content
Glama

cognee-mcp

test_different_operating_systems.yml7.43 kB
name: Tests to run on different Operating Systems permissions: contents: read on: workflow_call: inputs: python-versions: required: false type: string default: '["3.10.x", "3.11.x", "3.12.x"]' secrets: LLM_PROVIDER: required: true LLM_MODEL: required: true LLM_ENDPOINT: required: true LLM_API_KEY: required: true LLM_API_VERSION: required: true EMBEDDING_PROVIDER: required: true EMBEDDING_MODEL: required: true EMBEDDING_ENDPOINT: required: true EMBEDDING_API_KEY: required: true EMBEDDING_API_VERSION: required: true env: RUNTIME__LOG_LEVEL: ERROR ENV: 'dev' jobs: run-unit-tests: name: Unit tests ${{ matrix.python-version }} on ${{ matrix.os }} runs-on: ${{ matrix.os }} strategy: matrix: python-version: ${{ fromJSON(inputs.python-versions) }} os: [ubuntu-22.04, macos-13, macos-15, windows-latest] fail-fast: false steps: - name: Check out uses: actions/checkout@v4 with: fetch-depth: 0 - name: Cognee Setup uses: ./.github/actions/cognee_setup with: python-version: ${{ matrix.python-version }} - name: Run unit tests shell: bash run: uv run pytest cognee/tests/unit/ env: PYTHONUTF8: 1 LLM_PROVIDER: openai LLM_MODEL: ${{ secrets.LLM_MODEL }} LLM_ENDPOINT: ${{ secrets.LLM_ENDPOINT }} LLM_API_KEY: ${{ secrets.LLM_API_KEY }} LLM_API_VERSION: ${{ secrets.LLM_API_VERSION }} EMBEDDING_PROVIDER: openai EMBEDDING_MODEL: ${{ secrets.EMBEDDING_MODEL }} EMBEDDING_ENDPOINT: ${{ secrets.EMBEDDING_ENDPOINT }} EMBEDDING_API_KEY: ${{ secrets.EMBEDDING_API_KEY }} EMBEDDING_API_VERSION: ${{ secrets.EMBEDDING_API_VERSION }} run-integration-tests: name: Integration tests ${{ matrix.python-version }} on ${{ matrix.os }} runs-on: ${{ matrix.os }} strategy: matrix: python-version: ${{ fromJSON(inputs.python-versions) }} os: [ ubuntu-22.04, macos-13, macos-15, windows-latest ] fail-fast: false steps: - name: Check out uses: actions/checkout@v4 with: fetch-depth: 0 - name: Cognee Setup uses: ./.github/actions/cognee_setup with: python-version: ${{ matrix.python-version }} - name: Run integration tests shell: bash run: uv run pytest cognee/tests/integration/ env: PYTHONUTF8: 1 LLM_PROVIDER: openai LLM_MODEL: ${{ secrets.LLM_MODEL }} LLM_ENDPOINT: ${{ secrets.LLM_ENDPOINT }} LLM_API_KEY: ${{ secrets.LLM_API_KEY }} LLM_API_VERSION: ${{ secrets.LLM_API_VERSION }} EMBEDDING_PROVIDER: openai EMBEDDING_MODEL: ${{ secrets.EMBEDDING_MODEL }} EMBEDDING_ENDPOINT: ${{ secrets.EMBEDDING_ENDPOINT }} EMBEDDING_API_KEY: ${{ secrets.EMBEDDING_API_KEY }} EMBEDDING_API_VERSION: ${{ secrets.EMBEDDING_API_VERSION }} run-library-test: name: Library test ${{ matrix.python-version }} on ${{ matrix.os }} runs-on: ${{ matrix.os }} strategy: matrix: python-version: ${{ fromJSON(inputs.python-versions) }} os: [ ubuntu-22.04, macos-13, macos-15, windows-latest ] fail-fast: false steps: - name: Check out uses: actions/checkout@v4 with: fetch-depth: 0 - name: Cognee Setup uses: ./.github/actions/cognee_setup with: python-version: ${{ matrix.python-version }} - name: Run default basic pipeline shell: bash env: PYTHONUTF8: 1 LLM_PROVIDER: openai LLM_MODEL: ${{ secrets.LLM_MODEL }} LLM_ENDPOINT: ${{ secrets.LLM_ENDPOINT }} LLM_API_KEY: ${{ secrets.LLM_API_KEY }} LLM_API_VERSION: ${{ secrets.LLM_API_VERSION }} EMBEDDING_PROVIDER: openai EMBEDDING_MODEL: ${{ secrets.EMBEDDING_MODEL }} EMBEDDING_ENDPOINT: ${{ secrets.EMBEDDING_ENDPOINT }} EMBEDDING_API_KEY: ${{ secrets.EMBEDDING_API_KEY }} EMBEDDING_API_VERSION: ${{ secrets.EMBEDDING_API_VERSION }} run: uv run python ./cognee/tests/test_library.py run-build-test: name: Build test ${{ matrix.python-version }} on ${{ matrix.os }} runs-on: ${{ matrix.os }} strategy: matrix: python-version: ${{ fromJSON(inputs.python-versions) }} os: [ ubuntu-22.04, macos-13, macos-15, windows-latest ] fail-fast: false steps: - name: Check out uses: actions/checkout@v4 with: fetch-depth: 0 - name: Cognee Setup uses: ./.github/actions/cognee_setup with: python-version: ${{ matrix.python-version }} - name: Build with uv shell: bash run: uv build - name: Install Package if: ${{ !contains(matrix.os, 'windows-latest') }} run: | cd dist pip install *.whl run-soft-deletion-test: name: Soft Delete test ${{ matrix.python-version }} on ${{ matrix.os }} runs-on: ${{ matrix.os }} strategy: matrix: python-version: ${{ fromJSON(inputs.python-versions) }} os: [ ubuntu-22.04, macos-13, macos-15, windows-latest ] fail-fast: false steps: - name: Check out uses: actions/checkout@v4 with: fetch-depth: 0 - name: Cognee Setup uses: ./.github/actions/cognee_setup with: python-version: ${{ matrix.python-version }} - name: Run Soft Deletion Tests env: ENV: 'dev' LLM_API_KEY: ${{ secrets.OPENAI_API_KEY }} # Test needs OpenAI endpoint to handle multimedia OPENAI_API_KEY: ${{ secrets.OPENAI_API_KEY }} EMBEDDING_MODEL: ${{ secrets.EMBEDDING_MODEL }} EMBEDDING_ENDPOINT: ${{ secrets.EMBEDDING_ENDPOINT }} EMBEDDING_API_KEY: ${{ secrets.EMBEDDING_API_KEY }} EMBEDDING_API_VERSION: ${{ secrets.EMBEDDING_API_VERSION }} run: uv run python ./cognee/tests/test_delete_soft.py run-hard-deletion-test: name: Hard Delete test ${{ matrix.python-version }} on ${{ matrix.os }} runs-on: ${{ matrix.os }} strategy: matrix: python-version: ${{ fromJSON(inputs.python-versions) }} os: [ ubuntu-22.04, macos-13, macos-15, windows-latest ] fail-fast: false steps: - name: Check out uses: actions/checkout@v4 with: fetch-depth: 0 - name: Cognee Setup uses: ./.github/actions/cognee_setup with: python-version: ${{ matrix.python-version }} - name: Run Hard Deletion Test env: ENV: 'dev' LLM_API_KEY: ${{ secrets.OPENAI_API_KEY }} # Test needs OpenAI endpoint to handle multimedia OPENAI_API_KEY: ${{ secrets.OPENAI_API_KEY }} EMBEDDING_MODEL: ${{ secrets.EMBEDDING_MODEL }} EMBEDDING_ENDPOINT: ${{ secrets.EMBEDDING_ENDPOINT }} EMBEDDING_API_KEY: ${{ secrets.EMBEDDING_API_KEY }} EMBEDDING_API_VERSION: ${{ secrets.EMBEDDING_API_VERSION }} run: uv run python ./cognee/tests/test_delete_hard.py

MCP directory API

We provide all the information about MCP servers via our MCP API.

curl -X GET 'https://glama.ai/api/mcp/v1/servers/topoteretes/cognee'

If you have feedback or need assistance with the MCP directory API, please join our Discord server